Merge pull request #1010 from talentedmrjones/rename-workspace-flag

Rename workspace to project including flag, references, and tests
This commit is contained in:
Sam Alba 2021-09-23 16:01:26 -07:00 committed by GitHub
commit 9c32c8a564
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
27 changed files with 228 additions and 228 deletions

View File

@ -13,37 +13,37 @@ import (
"go.dagger.io/dagger/state"
)
func CurrentWorkspace(ctx context.Context) *state.Workspace {
func CurrentProject(ctx context.Context) *state.Project {
lg := log.Ctx(ctx)
if workspacePath := viper.GetString("workspace"); workspacePath != "" {
workspace, err := state.Open(ctx, workspacePath)
if projectPath := viper.GetString("project"); projectPath != "" {
project, err := state.Open(ctx, projectPath)
if err != nil {
lg.
Fatal().
Err(err).
Str("path", workspacePath).
Msg("failed to open workspace")
Str("path", projectPath).
Msg("failed to open project")
}
return workspace
return project
}
workspace, err := state.Current(ctx)
project, err := state.Current(ctx)
if err != nil {
lg.
Fatal().
Err(err).
Msg("failed to determine current workspace")
Msg("failed to determine current project")
}
return workspace
return project
}
func CurrentEnvironmentState(ctx context.Context, workspace *state.Workspace) *state.State {
func CurrentEnvironmentState(ctx context.Context, project *state.Project) *state.State {
lg := log.Ctx(ctx)
environmentName := viper.GetString("environment")
if environmentName != "" {
st, err := workspace.Get(ctx, environmentName)
st, err := project.Get(ctx, environmentName)
if err != nil {
lg.
Fatal().
@ -53,7 +53,7 @@ func CurrentEnvironmentState(ctx context.Context, workspace *state.Workspace) *s
return st
}
environments, err := workspace.List(ctx)
environments, err := project.List(ctx)
if err != nil {
lg.
Fatal().
@ -76,7 +76,7 @@ func CurrentEnvironmentState(ctx context.Context, workspace *state.Workspace) *s
Fatal().
Err(err).
Strs("environments", envNames).
Msg("multiple environments available in the workspace, select one with `--environment`")
Msg("multiple environments available in the project, select one with `--environment`")
}
return environments[0]

View File

@ -32,9 +32,9 @@ func commandName(cmd *cobra.Command) string {
return strings.Join(parts, " ")
}
// TrackWorkspaceCommand is like TrackCommand but includes workspace and
// TrackProjectCommand is like TrackCommand but includes project and
// optionally environment metadata.
func TrackWorkspaceCommand(ctx context.Context, cmd *cobra.Command, w *state.Workspace, env *state.State, props ...*telemetry.Property) chan struct{} {
func TrackProjectCommand(ctx context.Context, cmd *cobra.Command, w *state.Project, env *state.State, props ...*telemetry.Property) chan struct{} {
props = append([]*telemetry.Property{
{
// Hash the repository URL for privacy
@ -42,8 +42,8 @@ func TrackWorkspaceCommand(ctx context.Context, cmd *cobra.Command, w *state.Wor
Value: hash(gitRepoURL(w.Path)),
},
{
// The workspace path might contain the username (e.g. /home/user/workspace), so we hash itfor privacy.
Name: "workspace_path_hash",
// The project path might contain the username (e.g. /home/user/project), so we hash itfor privacy.
Name: "project_path_hash",
Value: hash(w.Path),
},
}, props...)

View File

@ -34,14 +34,14 @@ var editCmd = &cobra.Command{
lg := logger.New()
ctx := lg.WithContext(cmd.Context())
workspace := common.CurrentWorkspace(ctx)
st := common.CurrentEnvironmentState(ctx, workspace)
project := common.CurrentProject(ctx)
st := common.CurrentEnvironmentState(ctx, project)
lg = lg.With().
Str("environment", st.Name).
Logger()
doneCh := common.TrackWorkspaceCommand(ctx, cmd, workspace, st)
doneCh := common.TrackProjectCommand(ctx, cmd, project, st)
data, err := yaml.Marshal(st)
if err != nil {
@ -92,7 +92,7 @@ var editCmd = &cobra.Command{
lg.Fatal().Err(err).Str("environment", st.Name).Msg("invalid input")
}
if err := workspace.Save(ctx, st); err != nil {
if err := project.Save(ctx, st); err != nil {
lg.Fatal().Err(err).Msg("failed to save state")
}
},

View File

@ -12,7 +12,7 @@ import (
var initCmd = &cobra.Command{
Use: "init",
Short: "Initialize a new empty workspace",
Short: "Initialize a new empty project",
Args: cobra.NoArgs,
PreRun: func(cmd *cobra.Command, args []string) {
// Fix Viper bug for duplicate flags:
@ -25,7 +25,7 @@ var initCmd = &cobra.Command{
lg := logger.New()
ctx := lg.WithContext(cmd.Context())
dir := viper.GetString("workspace")
dir := viper.GetString("project")
if dir == "" {
cwd, err := os.Getwd()
if err != nil {
@ -37,12 +37,12 @@ var initCmd = &cobra.Command{
dir = cwd
}
workspace, err := state.Init(ctx, dir)
project, err := state.Init(ctx, dir)
if err != nil {
lg.Fatal().Err(err).Msg("failed to initialize workspace")
lg.Fatal().Err(err).Msg("failed to initialize project")
}
<-common.TrackWorkspaceCommand(ctx, cmd, workspace, nil)
<-common.TrackProjectCommand(ctx, cmd, project, nil)
},
}

View File

@ -37,11 +37,11 @@ var dirCmd = &cobra.Command{
lg.Fatal().Err(err).Str("path", args[1]).Msg("dir doesn't exists")
}
workspace := common.CurrentWorkspace(ctx)
if !strings.HasPrefix(p, workspace.Path) {
lg.Fatal().Err(err).Str("path", args[1]).Msg("dir is outside the workspace")
project := common.CurrentProject(ctx)
if !strings.HasPrefix(p, project.Path) {
lg.Fatal().Err(err).Str("path", args[1]).Msg("dir is outside the project")
}
p, err = filepath.Rel(workspace.Path, p)
p, err = filepath.Rel(project.Path, p)
if err != nil {
lg.Fatal().Err(err).Str("path", args[1]).Msg("unable to resolve path")
}

View File

@ -32,14 +32,14 @@ var listCmd = &cobra.Command{
lg := logger.New()
ctx := lg.WithContext(cmd.Context())
workspace := common.CurrentWorkspace(ctx)
st := common.CurrentEnvironmentState(ctx, workspace)
project := common.CurrentProject(ctx)
st := common.CurrentEnvironmentState(ctx, project)
lg = lg.With().
Str("environment", st.Name).
Logger()
doneCh := common.TrackWorkspaceCommand(ctx, cmd, workspace, st)
doneCh := common.TrackProjectCommand(ctx, cmd, project, st)
c := common.NewClient(ctx)
err := c.Do(ctx, st, func(ctx context.Context, env *environment.Environment, s solver.Solver) error {

View File

@ -40,14 +40,14 @@ func init() {
func updateEnvironmentInput(ctx context.Context, cmd *cobra.Command, target string, input state.Input) {
lg := *log.Ctx(ctx)
workspace := common.CurrentWorkspace(ctx)
st := common.CurrentEnvironmentState(ctx, workspace)
project := common.CurrentProject(ctx)
st := common.CurrentEnvironmentState(ctx, project)
lg = lg.With().
Str("environment", st.Name).
Logger()
doneCh := common.TrackWorkspaceCommand(ctx, cmd, workspace, st, &telemetry.Property{
doneCh := common.TrackProjectCommand(ctx, cmd, project, st, &telemetry.Property{
Name: "input_target",
Value: target,
})
@ -71,7 +71,7 @@ func updateEnvironmentInput(ctx context.Context, cmd *cobra.Command, target stri
lg.Fatal().Err(err).Msg("invalid input")
}
if err := workspace.Save(ctx, st); err != nil {
if err := project.Save(ctx, st); err != nil {
lg.Fatal().Err(err).Msg("cannot update environment")
}
}

View File

@ -22,11 +22,11 @@ var unsetCmd = &cobra.Command{
lg := logger.New()
ctx := lg.WithContext(cmd.Context())
workspace := common.CurrentWorkspace(ctx)
st := common.CurrentEnvironmentState(ctx, workspace)
project := common.CurrentProject(ctx)
st := common.CurrentEnvironmentState(ctx, project)
st.RemoveInputs(args[0])
if err := workspace.Save(ctx, st); err != nil {
if err := project.Save(ctx, st); err != nil {
lg.Fatal().Err(err).Str("environment", st.Name).Msg("cannot update environment")
}
lg.Info().Str("environment", st.Name).Msg("updated environment")

View File

@ -29,10 +29,10 @@ var listCmd = &cobra.Command{
lg := logger.New()
ctx := lg.WithContext(cmd.Context())
workspace := common.CurrentWorkspace(ctx)
doneCh := common.TrackWorkspaceCommand(ctx, cmd, workspace, nil)
project := common.CurrentProject(ctx)
doneCh := common.TrackProjectCommand(ctx, cmd, project, nil)
environments, err := workspace.List(ctx)
environments, err := project.List(ctx)
if err != nil {
lg.
Fatal().

View File

@ -24,11 +24,11 @@ const tmpBasePath = "./cue.mod/tmp"
type file struct {
require []*require
workspacePath string
projectPath string
}
func readPath(workspacePath string) (*file, error) {
p := path.Join(workspacePath, filePath)
func readPath(projectPath string) (*file, error) {
p := path.Join(projectPath, filePath)
f, err := os.Open(p)
if err != nil {
@ -47,7 +47,7 @@ func readPath(workspacePath string) (*file, error) {
return nil, err
}
modFile.workspacePath = workspacePath
modFile.projectPath = projectPath
return modFile, nil
}
@ -102,7 +102,7 @@ func nonEmptyLines(b []byte) []string {
func (f *file) processRequire(req *require, upgrade bool) (bool, error) {
var isNew bool
tmpPath := path.Join(f.workspacePath, tmpBasePath, req.repo)
tmpPath := path.Join(f.projectPath, tmpBasePath, req.repo)
if err := os.MkdirAll(tmpPath, 0755); err != nil {
return false, fmt.Errorf("error creating tmp dir for cloning package")
}
@ -117,7 +117,7 @@ func (f *file) processRequire(req *require, upgrade bool) (bool, error) {
}
existing := f.search(req)
destPath := path.Join(f.workspacePath, destBasePath)
destPath := path.Join(f.projectPath, destBasePath)
// requirement is new, so we should move the files and add it to the mod file
if existing == nil {
@ -167,7 +167,7 @@ func (f *file) processRequire(req *require, upgrade bool) (bool, error) {
}
func (f *file) write() error {
return ioutil.WriteFile(path.Join(f.workspacePath, filePath), f.contents().Bytes(), 0600)
return ioutil.WriteFile(path.Join(f.projectPath, filePath), f.contents().Bytes(), 0600)
}
func (f *file) contents() *bytes.Buffer {

View File

@ -25,15 +25,15 @@ var getCmd = &cobra.Command{
lg := logger.New()
ctx := lg.WithContext(cmd.Context())
workspace := common.CurrentWorkspace(ctx)
st := common.CurrentEnvironmentState(ctx, workspace)
doneCh := common.TrackWorkspaceCommand(ctx, cmd, workspace, st, &telemetry.Property{
project := common.CurrentProject(ctx)
st := common.CurrentEnvironmentState(ctx, project)
doneCh := common.TrackProjectCommand(ctx, cmd, project, st, &telemetry.Property{
Name: "packages",
Value: args,
})
// read mod file in the current dir
modFile, err := readPath(workspace.Path)
modFile, err := readPath(project.Path)
if err != nil {
lg.Fatal().Err(err).Msg("error loading module file")
}

View File

@ -23,7 +23,7 @@ var newCmd = &cobra.Command{
lg := logger.New()
ctx := lg.WithContext(cmd.Context())
workspace := common.CurrentWorkspace(ctx)
project := common.CurrentProject(ctx)
if viper.GetString("environment") != "" {
lg.
@ -32,7 +32,7 @@ var newCmd = &cobra.Command{
}
name := args[0]
st, err := workspace.Create(ctx, name, state.Plan{
st, err := project.Create(ctx, name, state.Plan{
Package: viper.GetString("package"),
})
@ -40,7 +40,7 @@ var newCmd = &cobra.Command{
lg.Fatal().Err(err).Msg("failed to create environment")
}
<-common.TrackWorkspaceCommand(ctx, cmd, workspace, st)
<-common.TrackProjectCommand(ctx, cmd, project, st)
},
}

View File

@ -31,14 +31,14 @@ var listCmd = &cobra.Command{
lg := logger.New()
ctx := lg.WithContext(cmd.Context())
workspace := common.CurrentWorkspace(ctx)
st := common.CurrentEnvironmentState(ctx, workspace)
project := common.CurrentProject(ctx)
st := common.CurrentEnvironmentState(ctx, project)
lg = lg.With().
Str("environment", st.Name).
Logger()
doneCh := common.TrackWorkspaceCommand(ctx, cmd, workspace, st)
doneCh := common.TrackProjectCommand(ctx, cmd, project, st)
cl := common.NewClient(ctx)
err := cl.Do(ctx, st, func(ctx context.Context, env *environment.Environment, s solver.Solver) error {

View File

@ -27,8 +27,8 @@ var queryCmd = &cobra.Command{
lg := logger.New()
ctx := lg.WithContext(cmd.Context())
workspace := common.CurrentWorkspace(ctx)
state := common.CurrentEnvironmentState(ctx, workspace)
project := common.CurrentProject(ctx)
state := common.CurrentEnvironmentState(ctx, project)
lg = lg.With().
Str("environment", state.Name).
@ -40,7 +40,7 @@ var queryCmd = &cobra.Command{
cuePath = cue.ParsePath(args[0])
}
doneCh := common.TrackWorkspaceCommand(ctx, cmd, workspace, state)
doneCh := common.TrackProjectCommand(ctx, cmd, project, state)
cueVal := compiler.NewValue()

View File

@ -34,7 +34,7 @@ func init() {
"External cache sources (eg. user/app:cache, type=local,src=path/to/dir)")
rootCmd.PersistentFlags().StringP("environment", "e", "", "Select an environment")
rootCmd.PersistentFlags().StringP("workspace", "w", "", "Specify a workspace (defaults to current git repository)")
rootCmd.PersistentFlags().String("project", "", "Specify a project directory (defaults to current)")
rootCmd.PersistentPreRun = func(cmd *cobra.Command, _ []string) {
lg := logger.New()

View File

@ -34,14 +34,14 @@ var upCmd = &cobra.Command{
lg := logger.New()
ctx := lg.WithContext(cmd.Context())
workspace := common.CurrentWorkspace(ctx)
st := common.CurrentEnvironmentState(ctx, workspace)
project := common.CurrentProject(ctx)
st := common.CurrentEnvironmentState(ctx, project)
lg = lg.With().
Str("environment", st.Name).
Logger()
doneCh := common.TrackWorkspaceCommand(ctx, cmd, workspace, st)
doneCh := common.TrackProjectCommand(ctx, cmd, project, st)
cl := common.NewClient(ctx)
@ -56,7 +56,7 @@ var upCmd = &cobra.Command{
}
st.Computed = env.Computed().JSON().PrettyString()
if err := workspace.Save(ctx, st); err != nil {
if err := project.Save(ctx, st); err != nil {
return err
}

View File

@ -15,13 +15,13 @@ setup() {
"$DAGGER_SANDBOX"/import-tutorial-key.sh
# Collect url
dagger -w "$DAGGER_SANDBOX" up
url=$(dagger -w "$DAGGER_SANDBOX" query -f text url)
dagger --project "$DAGGER_SANDBOX" up
url=$(dagger --project "$DAGGER_SANDBOX" query -f text url)
# More commands
dagger -w "$DAGGER_SANDBOX" list
dagger --project "$DAGGER_SANDBOX" list
ls -l "$DAGGER_SANDBOX"/s3
dagger -w "$DAGGER_SANDBOX" input list
dagger --project "$DAGGER_SANDBOX" input list
# Check output
run curl "$url"
@ -33,25 +33,25 @@ setup() {
# Follow tutorial
mkdir -p "$DAGGER_SANDBOX"/multibucket
cp "$DAGGER_WORKSPACE"/multibucket/source.cue "$DAGGER_SANDBOX"/multibucket
cp "$DAGGER_WORKSPACE"/multibucket/yarn.cue "$DAGGER_SANDBOX"/multibucket
cp "$DAGGER_WORKSPACE"/multibucket/netlify.cue "$DAGGER_SANDBOX"/multibucket
cp "$DAGGER_PROJECT"/multibucket/source.cue "$DAGGER_SANDBOX"/multibucket
cp "$DAGGER_PROJECT"/multibucket/yarn.cue "$DAGGER_SANDBOX"/multibucket
cp "$DAGGER_PROJECT"/multibucket/netlify.cue "$DAGGER_SANDBOX"/multibucket
dagger -w "$DAGGER_SANDBOX" doc alpha.dagger.io/netlify
dagger -w "$DAGGER_SANDBOX" doc alpha.dagger.io/js/yarn
dagger --project "$DAGGER_SANDBOX" doc alpha.dagger.io/netlify
dagger --project "$DAGGER_SANDBOX" doc alpha.dagger.io/js/yarn
# Initialize new env
dagger -w "$DAGGER_SANDBOX" new 'multibucket' -p "$DAGGER_SANDBOX"/multibucket
dagger --project "$DAGGER_SANDBOX" new 'multibucket' -p "$DAGGER_SANDBOX"/multibucket
# Copy corresponding env
cp -r "$DAGGER_WORKSPACE"/.dagger/env/multibucket "$DAGGER_SANDBOX"/.dagger/env/
cp -r "$DAGGER_PROJECT"/.dagger/env/multibucket "$DAGGER_SANDBOX"/.dagger/env/
# Add missing src input
dagger -w "$DAGGER_SANDBOX" -e multibucket input dir src "$DAGGER_SANDBOX"
dagger --project "$DAGGER_SANDBOX" -e multibucket input dir src "$DAGGER_SANDBOX"
# Run test
dagger -w "$DAGGER_SANDBOX" -e multibucket up
url=$(dagger -w "$DAGGER_SANDBOX" -e multibucket query -f text site.netlify.deployUrl)
dagger --project "$DAGGER_SANDBOX" -e multibucket up
url=$(dagger --project "$DAGGER_SANDBOX" -e multibucket query -f text site.netlify.deployUrl)
# Check output
run curl "$url"
@ -63,19 +63,19 @@ setup() {
# Follow tutorial
mkdir -p "$DAGGER_SANDBOX"/gcpcloudrun
cp "$DAGGER_WORKSPACE"/gcpcloudrun/source.cue "$DAGGER_SANDBOX"/gcpcloudrun
cp "$DAGGER_PROJECT"/gcpcloudrun/source.cue "$DAGGER_SANDBOX"/gcpcloudrun
# Initialize new env
dagger -w "$DAGGER_SANDBOX" new 'gcpcloudrun' -p "$DAGGER_SANDBOX"/gcpcloudrun
dagger --project "$DAGGER_SANDBOX" new 'gcpcloudrun' -p "$DAGGER_SANDBOX"/gcpcloudrun
# Copy corresponding env
cp -r "$DAGGER_WORKSPACE"/.dagger/env/gcpcloudrun "$DAGGER_SANDBOX"/.dagger/env/
cp -r "$DAGGER_PROJECT"/.dagger/env/gcpcloudrun "$DAGGER_SANDBOX"/.dagger/env/
# Add missing src input
dagger -w "$DAGGER_SANDBOX" -e gcpcloudrun input dir src "$DAGGER_SANDBOX"
dagger --project "$DAGGER_SANDBOX" -e gcpcloudrun input dir src "$DAGGER_SANDBOX"
# Run test
run dagger -w "$DAGGER_SANDBOX" -e gcpcloudrun up
run dagger --project "$DAGGER_SANDBOX" -e gcpcloudrun up
assert_success
}
@ -88,10 +88,10 @@ setup() {
# copy_to_sandbox kube-kind-basic kube-kind
# # Add kubeconfig
# dagger -w "$DAGGER_SANDBOX" -e kube-kind-basic input text kubeconfig -f "$HOME"/.kube/config
# dagger --project "$DAGGER_SANDBOX" -e kube-kind-basic input text kubeconfig -f "$HOME"/.kube/config
# # Up deployment
# dagger -w "$DAGGER_SANDBOX" -e kube-kind-basic up
# dagger --project "$DAGGER_SANDBOX" -e kube-kind-basic up
# # Check deployment
# kubectl describe deployment todoapp | grep 'True'
@ -105,10 +105,10 @@ setup() {
# copy_to_sandbox kube-kind-deployment kube-kind
# # Add kubeconfig
# dagger -w "$DAGGER_SANDBOX" -e kube-kind-deployment input text kubeconfig -f "$HOME"/.kube/config
# dagger --project "$DAGGER_SANDBOX" -e kube-kind-deployment input text kubeconfig -f "$HOME"/.kube/config
# # Up deployment
# dagger -w "$DAGGER_SANDBOX" -e kube-kind-deployment up
# dagger --project "$DAGGER_SANDBOX" -e kube-kind-deployment up
# # Check deployment
# kubectl describe deployment todoapp | grep 'True'
@ -122,10 +122,10 @@ setup() {
# copy_to_sandbox kube-kind-cue-manifest kube-kind
# # Add kubeconfig
# dagger -w "$DAGGER_SANDBOX" -e kube-kind-cue-manifest input text kubeconfig -f "$HOME"/.kube/config
# dagger --project "$DAGGER_SANDBOX" -e kube-kind-cue-manifest input text kubeconfig -f "$HOME"/.kube/config
# # Up deployment
# dagger -w "$DAGGER_SANDBOX" -e kube-kind-cue-manifest up
# dagger --project "$DAGGER_SANDBOX" -e kube-kind-cue-manifest up
# # Check deployment
# kubectl describe deployment todoapp | grep 'True'
@ -142,20 +142,20 @@ setup() {
# copy_to_sandbox kube-aws-basic kube-aws
# # Up deployment
# dagger -w "$DAGGER_SANDBOX" -e kube-aws-basic up
# dagger --project "$DAGGER_SANDBOX" -e kube-aws-basic up
# #################### DEPLOYMENT ####################
# # Copy deployment to sandbox
# copy_to_sandbox kube-aws-deployment kube-aws
# # Up deployment
# dagger -w "$DAGGER_SANDBOX" -e kube-aws-deployment up
# dagger --project "$DAGGER_SANDBOX" -e kube-aws-deployment up
# #################### CUE MANIFEST ####################
# # Copy deployment to sandbox
# copy_to_sandbox kube-aws-cue-manifest kube-aws
# # Up deployment
# dagger -w "$DAGGER_SANDBOX" -e kube-aws-cue-manifest up
# dagger --project "$DAGGER_SANDBOX" -e kube-aws-cue-manifest up
}
@test "doc-1007-kube-gcp" {
@ -165,20 +165,20 @@ setup() {
# copy_to_sandbox kube-gcp-basic kube-gcp
# # Up deployment
# dagger -w "$DAGGER_SANDBOX" -e kube-gcp-basic up
# dagger --project "$DAGGER_SANDBOX" -e kube-gcp-basic up
# #################### DEPLOYMENT ####################
# # Copy deployment to sandbox
# copy_to_sandbox kube-gcp-deployment kube-gcp
# # Up deployment
# dagger -w "$DAGGER_SANDBOX" -e kube-gcp-deployment up
# dagger --project "$DAGGER_SANDBOX" -e kube-gcp-deployment up
# #################### CUE MANIFEST ####################
# # Copy deployment to sandbox
# copy_to_sandbox kube-gcp-cue-manifest kube-gcp
# # Up deployment
# dagger -w "$DAGGER_SANDBOX" -e kube-gcp-cue-manifest up
# dagger --project "$DAGGER_SANDBOX" -e kube-gcp-cue-manifest up
}
@test "doc-1008-aws-cloudformation" {
@ -188,96 +188,96 @@ setup() {
### Create a basic plan
## Construct
mkdir -p "$DAGGER_SANDBOX"/cloudformation
cp "$DAGGER_WORKSPACE"/cloudformation/template.cue "$DAGGER_SANDBOX"/cloudformation
cp "$DAGGER_PROJECT"/cloudformation/template.cue "$DAGGER_SANDBOX"/cloudformation
# Cloudformation relay
dagger -w "$DAGGER_SANDBOX" doc alpha.dagger.io/aws/cloudformation
cp "$DAGGER_WORKSPACE"/cloudformation/source-begin.cue "$DAGGER_SANDBOX"/cloudformation/source.cue
dagger --project "$DAGGER_SANDBOX" doc alpha.dagger.io/aws/cloudformation
cp "$DAGGER_PROJECT"/cloudformation/source-begin.cue "$DAGGER_SANDBOX"/cloudformation/source.cue
# Initialize new env
dagger -w "$DAGGER_SANDBOX" new 'cloudformation' -p "$DAGGER_SANDBOX"/cloudformation
dagger --project "$DAGGER_SANDBOX" new 'cloudformation' -p "$DAGGER_SANDBOX"/cloudformation
# Finish template setup
cp "$DAGGER_WORKSPACE"/cloudformation/source-end.cue "$DAGGER_SANDBOX"/cloudformation/source.cue
cp "$DAGGER_PROJECT"/cloudformation/source-end.cue "$DAGGER_SANDBOX"/cloudformation/source.cue
# Copy corresponding env
cp -r "$DAGGER_WORKSPACE"/.dagger/env/cloudformation "$DAGGER_SANDBOX"/.dagger/env/
cp -r "$DAGGER_PROJECT"/.dagger/env/cloudformation "$DAGGER_SANDBOX"/.dagger/env/
# Run test
dagger -w "$DAGGER_SANDBOX" -e cloudformation up
stackName=$(dagger -w "$DAGGER_SANDBOX" -e cloudformation query cfnStackName -f text)
dagger --project "$DAGGER_SANDBOX" -e cloudformation up
stackName=$(dagger --project "$DAGGER_SANDBOX" -e cloudformation query cfnStackName -f text)
## Cleanup
# Place back empty source
cp "$DAGGER_WORKSPACE"/cloudformation/source-begin.cue "$DAGGER_SANDBOX"/cloudformation/source.cue
cp "$DAGGER_WORKSPACE"/cloudformation/deletion.cue "$DAGGER_SANDBOX"/cloudformation/deletion.cue
cp "$DAGGER_PROJECT"/cloudformation/source-begin.cue "$DAGGER_SANDBOX"/cloudformation/source.cue
cp "$DAGGER_PROJECT"/cloudformation/deletion.cue "$DAGGER_SANDBOX"/cloudformation/deletion.cue
# Prepare and run cloudformation cleanup
dagger -w "$DAGGER_SANDBOX" -e cloudformation input text stackRemoval.stackName "$stackName"
dagger -w "$DAGGER_SANDBOX" -e cloudformation up
dagger --project "$DAGGER_SANDBOX" -e cloudformation input text stackRemoval.stackName "$stackName"
dagger --project "$DAGGER_SANDBOX" -e cloudformation up
### Template part
## Create convert.cue
cp "$DAGGER_WORKSPACE"/cloudformation/template/convert.cue "$DAGGER_SANDBOX"/cloudformation/convert.cue
cp "$DAGGER_PROJECT"/cloudformation/template/convert.cue "$DAGGER_SANDBOX"/cloudformation/convert.cue
rm "$DAGGER_SANDBOX"/cloudformation/source.cue "$DAGGER_SANDBOX"/cloudformation/deletion.cue
## Retrieve Unmarshalled JSON
dagger -w "$DAGGER_SANDBOX" query -e cloudformation s3Template
dagger --project "$DAGGER_SANDBOX" query -e cloudformation s3Template
## Remove convert.cue
rm "$DAGGER_SANDBOX"/cloudformation/convert.cue
## Store the output
cp "$DAGGER_WORKSPACE"/cloudformation/template/template-begin.cue "$DAGGER_SANDBOX"/cloudformation/template.cue
cp "$DAGGER_PROJECT"/cloudformation/template/template-begin.cue "$DAGGER_SANDBOX"/cloudformation/template.cue
# Inspect conf
dagger -w "$DAGGER_SANDBOX" query -e cloudformation template -f text
dagger --project "$DAGGER_SANDBOX" query -e cloudformation template -f text
cp "$DAGGER_WORKSPACE"/cloudformation/template/deployment.cue "$DAGGER_SANDBOX"/cloudformation/deployment.cue
cp "$DAGGER_WORKSPACE"/cloudformation/template/template-end.cue "$DAGGER_SANDBOX"/cloudformation/template.cue
cp "$DAGGER_WORKSPACE"/cloudformation/source-end.cue "$DAGGER_SANDBOX"/cloudformation/source.cue
cp "$DAGGER_PROJECT"/cloudformation/template/deployment.cue "$DAGGER_SANDBOX"/cloudformation/deployment.cue
cp "$DAGGER_PROJECT"/cloudformation/template/template-end.cue "$DAGGER_SANDBOX"/cloudformation/template.cue
cp "$DAGGER_PROJECT"/cloudformation/source-end.cue "$DAGGER_SANDBOX"/cloudformation/source.cue
# Deploy again
dagger -w "$DAGGER_SANDBOX" -e cloudformation query template -f text
dagger -w "$DAGGER_SANDBOX" -e cloudformation up
dagger -w "$DAGGER_SANDBOX" -e cloudformation output list
dagger --project "$DAGGER_SANDBOX" -e cloudformation query template -f text
dagger --project "$DAGGER_SANDBOX" -e cloudformation up
dagger --project "$DAGGER_SANDBOX" -e cloudformation output list
## Cleanup again
stackName=$(dagger -w "$DAGGER_SANDBOX" -e cloudformation query cfnStackName -f text)
stackName=$(dagger --project "$DAGGER_SANDBOX" -e cloudformation query cfnStackName -f text)
rm -rf "$DAGGER_SANDBOX"/cloudformation/*
# Place back empty source
cp "$DAGGER_WORKSPACE"/cloudformation/source-begin.cue "$DAGGER_SANDBOX"/cloudformation/source.cue
cp "$DAGGER_WORKSPACE"/cloudformation/deletion.cue "$DAGGER_SANDBOX"/cloudformation/deletion.cue
cp "$DAGGER_PROJECT"/cloudformation/source-begin.cue "$DAGGER_SANDBOX"/cloudformation/source.cue
cp "$DAGGER_PROJECT"/cloudformation/deletion.cue "$DAGGER_SANDBOX"/cloudformation/deletion.cue
# Prepare and run cloudformation cleanup
dagger -w "$DAGGER_SANDBOX" -e cloudformation input text stackRemoval.stackName "$stackName"
dagger -w "$DAGGER_SANDBOX" -e cloudformation up
dagger --project "$DAGGER_SANDBOX" -e cloudformation input text stackRemoval.stackName "$stackName"
dagger --project "$DAGGER_SANDBOX" -e cloudformation up
}
@test "doc-1010-dev-cue-package" {
# Initializing workspace
mkdir -p "$DAGGER_SANDBOX"/workspace
# Initializing project
mkdir -p "$DAGGER_SANDBOX"/project
# Writing package
# dagger init # The sandbox is already init
mkdir -p "$DAGGER_SANDBOX"/cue.mod/pkg/github.com/tjovicic/gcpcloudrun
cp "$DAGGER_WORKSPACE"/dev-cue-package/source.cue "$DAGGER_SANDBOX"/cue.mod/pkg/github.com/tjovicic/gcpcloudrun/source.cue
cp "$DAGGER_WORKSPACE"/dev-cue-package/script.sh "$DAGGER_SANDBOX"/workspace/script.sh
cp "$DAGGER_PROJECT"/dev-cue-package/source.cue "$DAGGER_SANDBOX"/cue.mod/pkg/github.com/tjovicic/gcpcloudrun/source.cue
cp "$DAGGER_PROJECT"/dev-cue-package/script.sh "$DAGGER_SANDBOX"/project/script.sh
# We remove the last line of the script, as bats cannot expand dagger
# to dagger() bats helper func inside bash files
sed '$d' <"$DAGGER_SANDBOX"/workspace/script.sh >"$DAGGER_SANDBOX"/tmpFile
mv "$DAGGER_SANDBOX"/tmpFile "$DAGGER_SANDBOX"/workspace/script.sh
sed '$d' <"$DAGGER_SANDBOX"/project/script.sh >"$DAGGER_SANDBOX"/tmpFile
mv "$DAGGER_SANDBOX"/tmpFile "$DAGGER_SANDBOX"/project/script.sh
chmod +x "$DAGGER_SANDBOX"/workspace/script.sh
"$DAGGER_SANDBOX"/workspace/script.sh
chmod +x "$DAGGER_SANDBOX"/project/script.sh
"$DAGGER_SANDBOX"/project/script.sh
# Sync file from documentation
rsync -a test "$DAGGER_SANDBOX"
# Command removed from script.sh above
dagger -w "$DAGGER_SANDBOX" new staging -p "$DAGGER_SANDBOX"/test
run dagger up -w "$DAGGER_SANDBOX" -e staging
dagger --project "$DAGGER_SANDBOX" new staging -p "$DAGGER_SANDBOX"/test
run dagger up --project "$DAGGER_SANDBOX" -e staging
assert_output --partial "input=run.gcpConfig.serviceKey"
# Clean script.sh output

View File

@ -10,19 +10,19 @@ common_setup() {
# otherwise infinite recursion when DAGGER_BINARY is not set.
export DAGGER="${DAGGER_BINARY:-$(bash -c 'command -v dagger')}"
# Set the workspace to the universe directory (so tests can run from anywhere)
# Set the project to the universe directory (so tests can run from anywhere)
UNIVERSE="$( cd "$( dirname "$BATS_TEST_FILENAME" )" >/dev/null 2>&1 && pwd )"
DAGGER_WORKSPACE="$UNIVERSE"
export DAGGER_WORKSPACE
DAGGER_PROJECT="$UNIVERSE"
export DAGGER_PROJECT
# Force pretty printing for error reporting
DAGGER_LOG_FORMAT="pretty"
export DAGGER_LOG_FORMAT
# Sandbox workspace.
DAGGER_SANDBOX="$(mktemp -d -t dagger-workspace-XXXXXX)"
# Sandbox project.
DAGGER_SANDBOX="$(mktemp -d -t dagger-project-XXXXXX)"
export DAGGER_SANDBOX
dagger init -w "$DAGGER_SANDBOX"
dagger init --project "$DAGGER_SANDBOX"
# allows the use of `sops`
SOPS_AGE_KEY_FILE=~/.config/dagger/keys.txt
@ -43,28 +43,28 @@ setup_example_sandbox() {
}
# copy an environment from the current workspace to the sandbox.
# copy an environment from the current project to the sandbox.
#
# this is needed if the test requires altering inputs without dirtying the
# current environment.
# Usage:
# copy_to_sandbox myenv
# dagger input secret -w "$DAGGER_SANDBOX" -e myenv "temporary change"
# dagger up -w "$DAGGER_SANDBOX" -e myenv
# dagger up --project "$DAGGER_SANDBOX" -e myenv
#
# To use testdata directory in tests, add the package name as second flag
# Usage:
# copy_to_sandbox myenv mypackage
copy_to_sandbox() {
local name="$1"
local source="$DAGGER_WORKSPACE"/.dagger/env/"$name"
local source="$DAGGER_PROJECT"/.dagger/env/"$name"
local target="$DAGGER_SANDBOX"/.dagger/env/"$name"
cp -a "$source" "$target"
if [ -d "$2" ]; then
local package="$2"
local source_package="$DAGGER_WORKSPACE"/"$package"
local source_package="$DAGGER_PROJECT"/"$package"
local target_package="$DAGGER_SANDBOX"/
cp -a "$source_package" "$target_package"

View File

@ -108,10 +108,10 @@ func (dir dirInput) Compile(_ string, state *State) (*compiler.Value, error) {
p := dir.Path
if !filepath.IsAbs(p) {
p = filepath.Clean(path.Join(state.Workspace, dir.Path))
p = filepath.Clean(path.Join(state.Project, dir.Path))
}
if !strings.HasPrefix(p, state.Workspace) {
return nil, fmt.Errorf("%q is outside the workspace", dir.Path)
if !strings.HasPrefix(p, state.Project) {
return nil, fmt.Errorf("%q is outside the project", dir.Path)
}
llb := fmt.Sprintf(

View File

@ -32,11 +32,11 @@ const (
computedFile = "computed.json"
)
type Workspace struct {
type Project struct {
Path string
}
func Init(ctx context.Context, dir string) (*Workspace, error) {
func Init(ctx context.Context, dir string) (*Project, error) {
root, err := filepath.Abs(dir)
if err != nil {
return nil, err
@ -57,12 +57,12 @@ func Init(ctx context.Context, dir string) (*Workspace, error) {
return nil, err
}
return &Workspace{
return &Project{
Path: root,
}, nil
}
func Open(ctx context.Context, dir string) (*Workspace, error) {
func Open(ctx context.Context, dir string) (*Project, error) {
_, err := os.Stat(path.Join(dir, daggerDir))
if err != nil {
if errors.Is(err, os.ErrNotExist) {
@ -76,12 +76,12 @@ func Open(ctx context.Context, dir string) (*Workspace, error) {
return nil, err
}
return &Workspace{
return &Project{
Path: root,
}, nil
}
func Current(ctx context.Context) (*Workspace, error) {
func Current(ctx context.Context) (*Project, error) {
current, err := os.Getwd()
if err != nil {
return nil, err
@ -103,11 +103,11 @@ func Current(ctx context.Context) (*Workspace, error) {
return nil, ErrNotInit
}
func (w *Workspace) envPath(name string) string {
func (w *Project) envPath(name string) string {
return path.Join(w.Path, daggerDir, envDir, name)
}
func (w *Workspace) List(ctx context.Context) ([]*State, error) {
func (w *Project) List(ctx context.Context) ([]*State, error) {
var (
environments = []*State{}
err error
@ -139,7 +139,7 @@ func (w *Workspace) List(ctx context.Context) ([]*State, error) {
return environments, nil
}
func (w *Workspace) Get(ctx context.Context, name string) (*State, error) {
func (w *Project) Get(ctx context.Context, name string) (*State, error) {
envPath, err := filepath.Abs(w.envPath(name))
if err != nil {
return nil, err
@ -179,7 +179,7 @@ func (w *Workspace) Get(ctx context.Context, name string) (*State, error) {
st.Plan.Module = planRelPath
}
}
st.Workspace = w.Path
st.Project = w.Path
computed, err := os.ReadFile(path.Join(envPath, stateDir, computedFile))
if err == nil {
@ -189,7 +189,7 @@ func (w *Workspace) Get(ctx context.Context, name string) (*State, error) {
return &st, nil
}
func (w *Workspace) Save(ctx context.Context, st *State) error {
func (w *Project) Save(ctx context.Context, st *State) error {
data, err := yaml.Marshal(st)
if err != nil {
return err
@ -234,7 +234,7 @@ func (w *Workspace) Save(ctx context.Context, st *State) error {
return nil
}
func (w *Workspace) Create(ctx context.Context, name string, plan Plan) (*State, error) {
func (w *Project) Create(ctx context.Context, name string, plan Plan) (*State, error) {
if _, err := w.Get(ctx, name); err == nil {
return nil, ErrExist
}
@ -257,8 +257,8 @@ func (w *Workspace) Create(ctx context.Context, name string, plan Plan) (*State,
manifestPath := path.Join(envPath, manifestFile)
st := &State{
Path: envPath,
Workspace: w.Path,
Path: envPath,
Project: w.Path,
Plan: Plan{
Package: pkg,
},
@ -293,7 +293,7 @@ func (w *Workspace) Create(ctx context.Context, name string, plan Plan) (*State,
return st, nil
}
func (w *Workspace) cleanPackageName(ctx context.Context, pkg string) (string, error) {
func (w *Project) cleanPackageName(ctx context.Context, pkg string) (string, error) {
lg := log.
Ctx(ctx).
With().
@ -325,7 +325,7 @@ func (w *Workspace) cleanPackageName(ctx context.Context, pkg string) (string, e
}
if !strings.HasPrefix(p, w.Path) {
lg.Fatal().Err(err).Msg("package is outside the workspace")
lg.Fatal().Err(err).Msg("package is outside the project")
return "", err
}

View File

@ -12,7 +12,7 @@ import (
"gopkg.in/yaml.v3"
)
func TestWorkspace(t *testing.T) {
func TestProject(t *testing.T) {
ctx := context.TODO()
keychain.EnsureDefaultKey(ctx)
@ -25,39 +25,39 @@ func TestWorkspace(t *testing.T) {
require.ErrorIs(t, ErrNotInit, err)
// Init
workspace, err := Init(ctx, root)
project, err := Init(ctx, root)
require.NoError(t, err)
require.Equal(t, root, workspace.Path)
require.Equal(t, root, project.Path)
// Create
st, err := workspace.Create(ctx, "test", Plan{
st, err := project.Create(ctx, "test", Plan{
Module: ".",
})
require.NoError(t, err)
require.Equal(t, "test", st.Name)
// Open
workspace, err = Open(ctx, root)
project, err = Open(ctx, root)
require.NoError(t, err)
require.Equal(t, root, workspace.Path)
require.Equal(t, root, project.Path)
// List
envs, err := workspace.List(ctx)
envs, err := project.List(ctx)
require.NoError(t, err)
require.Len(t, envs, 1)
require.Equal(t, "test", envs[0].Name)
// Get
env, err := workspace.Get(ctx, "test")
env, err := project.Get(ctx, "test")
require.NoError(t, err)
require.Equal(t, "test", env.Name)
// Save
require.NoError(t, env.SetInput("foo", TextInput("bar")))
require.NoError(t, workspace.Save(ctx, env))
workspace, err = Open(ctx, root)
require.NoError(t, project.Save(ctx, env))
project, err = Open(ctx, root)
require.NoError(t, err)
env, err = workspace.Get(ctx, "test")
env, err = project.Get(ctx, "test")
require.NoError(t, err)
require.Contains(t, env.Inputs, "foo")
}
@ -77,28 +77,28 @@ func TestEncryption(t *testing.T) {
root, err := os.MkdirTemp(os.TempDir(), "dagger-*")
require.NoError(t, err)
workspace, err := Init(ctx, root)
project, err := Init(ctx, root)
require.NoError(t, err)
_, err = workspace.Create(ctx, "test", Plan{
_, err = project.Create(ctx, "test", Plan{
Module: ".",
})
require.NoError(t, err)
// Set a plaintext input, make sure it is not encrypted
st, err := workspace.Get(ctx, "test")
st, err := project.Get(ctx, "test")
require.NoError(t, err)
require.NoError(t, st.SetInput("plain", TextInput("plain")))
require.NoError(t, workspace.Save(ctx, st))
require.NoError(t, project.Save(ctx, st))
o := readManifest(st)
require.Contains(t, o.Inputs, "plain")
require.Equal(t, "plain", string(*o.Inputs["plain"].Text))
// Set a secret input, make sure it's encrypted
st, err = workspace.Get(ctx, "test")
st, err = project.Get(ctx, "test")
require.NoError(t, err)
require.NoError(t, st.SetInput("secret", SecretInput("secret")))
require.NoError(t, workspace.Save(ctx, st))
require.NoError(t, project.Save(ctx, st))
o = readManifest(st)
require.Contains(t, o.Inputs, "secret")
secretValue := string(*o.Inputs["secret"].Secret)
@ -106,10 +106,10 @@ func TestEncryption(t *testing.T) {
require.True(t, strings.HasPrefix(secretValue, "ENC["))
// Change another input, make sure our secret didn't change
st, err = workspace.Get(ctx, "test")
st, err = project.Get(ctx, "test")
require.NoError(t, err)
require.NoError(t, st.SetInput("plain", TextInput("different")))
require.NoError(t, workspace.Save(ctx, st))
require.NoError(t, project.Save(ctx, st))
o = readManifest(st)
require.Contains(t, o.Inputs, "plain")
require.Equal(t, "different", string(*o.Inputs["plain"].Text))

View File

@ -13,8 +13,8 @@ type State struct {
// State path
Path string `yaml:"-"`
// Workspace path
Workspace string `yaml:"-"`
// Project path
Project string `yaml:"-"`
// Plan
Plan Plan `yaml:"plan,omitempty"`
@ -33,7 +33,7 @@ type State struct {
// Cue module containing the environment plan
func (s *State) CompilePlan(ctx context.Context) (*compiler.Value, error) {
w := s.Workspace
w := s.Project
// FIXME: backward compatibility
if mod := s.Plan.Module; mod != "" {
w = path.Join(w, mod)
@ -44,7 +44,7 @@ func (s *State) CompilePlan(ctx context.Context) (*compiler.Value, error) {
// However:
// 1) As of right now, there's no way to update universe through the
// CLI, so we are lazily updating on `dagger up` using the embedded `universe`
// 2) For backward compatibility: if the workspace was `dagger
// 2) For backward compatibility: if the project was `dagger
// init`-ed before we added support for vendoring universe, it might not
// contain a `cue.mod`.
if err := vendorUniverse(ctx, w); err != nil {
@ -83,11 +83,11 @@ func (s *State) CompileInputs() (*compiler.Value, error) {
// VendorUniverse vendors the latest (built-in) version of the universe into the
// environment's `cue.mod`.
// FIXME: This has nothing to do in `State` and should be tied to a `Workspace`.
// FIXME: This has nothing to do in `State` and should be tied to a `Project`.
// However, since environments could point to different modules before, we have
// to handle vendoring on a per environment basis.
func (s *State) VendorUniverse(ctx context.Context) error {
w := s.Workspace
w := s.Project
// FIXME: backward compatibility
if mod := s.Plan.Module; mod != "" {
w = path.Join(w, mod)

View File

@ -7,19 +7,19 @@ common_setup() {
# otherwise infinite recursion when DAGGER_BINARY is not set.
export DAGGER="${DAGGER_BINARY:-$(bash -c 'command -v dagger')}"
# Set the workspace to the universe directory (so tests can run from anywhere)
# Set the project to the universe directory (so tests can run from anywhere)
UNIVERSE="$( cd "$( dirname "$BATS_TEST_FILENAME" )" >/dev/null 2>&1 && pwd )"
DAGGER_WORKSPACE="$UNIVERSE"
export DAGGER_WORKSPACE
DAGGER_PROJECT="$UNIVERSE"
export DAGGER_PROJECT
# Force pretty printing for error reporting
DAGGER_LOG_FORMAT="pretty"
export DAGGER_LOG_FORMAT
# Sandbox workspace.
DAGGER_SANDBOX="$(mktemp -d -t dagger-workspace-XXXXXX)"
# Sandbox project.
DAGGER_SANDBOX="$(mktemp -d -t dagger-project-XXXXXX)"
export DAGGER_SANDBOX
dagger init -w "$DAGGER_SANDBOX"
dagger init --project "$DAGGER_SANDBOX"
# allows the use of `sops`
SOPS_AGE_KEY_FILE=~/.config/dagger/keys.txt
@ -31,28 +31,28 @@ dagger() {
"${DAGGER}" "$@"
}
# copy an environment from the current workspace to the sandbox.
# copy an environment from the current project to the sandbox.
#
# this is needed if the test requires altering inputs without dirtying the
# current environment.
# Usage:
# copy_to_sandbox myenv
# dagger input secret -w "$DAGGER_SANDBOX" -e myenv "temporary change"
# dagger up -w "$DAGGER_SANDBOX" -e myenv
# dagger up --project "$DAGGER_SANDBOX" -e myenv
#
# To use testdata directory in tests, add the package name as second flag
# Usage:
# copy_to_sandbox myenv mypackage
copy_to_sandbox() {
local name="$1"
local source="$DAGGER_WORKSPACE"/.dagger/env/"$name"
local source="$DAGGER_PROJECT"/.dagger/env/"$name"
local target="$DAGGER_SANDBOX"/.dagger/env/"$name"
cp -a "$source" "$target"
if [ -d "$2" ]; then
local package="$2"
local source_package="$DAGGER_WORKSPACE"/"$package"
local source_package="$DAGGER_PROJECT"/"$package"
local target_package="$DAGGER_SANDBOX"/
cp -a "$source_package" "$target_package"

View File

@ -132,12 +132,12 @@ setup() {
copy_to_sandbox kubernetes-deployment kubernetes
# Set kubeconfig
dagger -w "$DAGGER_SANDBOX" -e kubernetes-deployment input text TestKubeconfig -f "$HOME"/.kube/config
dagger --project "$DAGGER_SANDBOX" -e kubernetes-deployment input text TestKubeconfig -f "$HOME"/.kube/config
dagger -w "$DAGGER_SANDBOX" -e kubernetes-deployment up
dagger --project "$DAGGER_SANDBOX" -e kubernetes-deployment up
# Unset kubeconfig
dagger -w "$DAGGER_SANDBOX" -e kubernetes-deployment input unset TestKubeconfig
dagger --project "$DAGGER_SANDBOX" -e kubernetes-deployment input unset TestKubeconfig
}
@test "kubernetes: kustomize" {
@ -151,12 +151,12 @@ setup() {
copy_to_sandbox kubernetes-helm kubernetes
# Set kubeconfig
dagger -w "$DAGGER_SANDBOX" -e kubernetes-helm input text TestKubeconfig -f "$HOME"/.kube/config
dagger --project "$DAGGER_SANDBOX" -e kubernetes-helm input text TestKubeconfig -f "$HOME"/.kube/config
dagger -w "$DAGGER_SANDBOX" -e kubernetes-helm up
dagger --project "$DAGGER_SANDBOX" -e kubernetes-helm up
# Unset kubeconfig
dagger -w "$DAGGER_SANDBOX" -e kubernetes-helm input unset TestKubeconfig
dagger --project "$DAGGER_SANDBOX" -e kubernetes-helm input unset TestKubeconfig
}
@test "google cloud: gcr" {
@ -194,23 +194,23 @@ setup() {
copy_to_sandbox terraform terraform
# Add the var and try again
run dagger -w "$DAGGER_SANDBOX" -e terraform input text TestTerraform.apply.tfvars.input "42"
run dagger -w "$DAGGER_SANDBOX" -e terraform up
run dagger --project "$DAGGER_SANDBOX" -e terraform input text TestTerraform.apply.tfvars.input "42"
run dagger --project "$DAGGER_SANDBOX" -e terraform up
assert_success
# ensure the tfvar was passed correctly
run dagger -w "$DAGGER_SANDBOX" query -e terraform TestTerraform.apply.output.input.value -f text
run dagger --project "$DAGGER_SANDBOX" query -e terraform TestTerraform.apply.output.input.value -f text
assert_success
assert_output "42"
# ensure the random value is always the same
# this proves we're effectively using the s3 backend
run dagger -w "$DAGGER_SANDBOX" query -e terraform TestTerraform.apply.output.random.value -f json
run dagger --project "$DAGGER_SANDBOX" query -e terraform TestTerraform.apply.output.random.value -f json
assert_success
assert_output "36"
# Unset input
run dagger -w "$DAGGER_SANDBOX" -e terraform input unset TestTerraform.apply.tfvars.input
run dagger --project "$DAGGER_SANDBOX" -e terraform input unset TestTerraform.apply.tfvars.input
assert_success
}

View File

@ -42,7 +42,7 @@ setup() {
@test "dagger new: modules" {
"$DAGGER" init
cp -a "$TESTDIR"/cli/input/simple/* "$DAGGER_WORKSPACE"
cp -a "$TESTDIR"/cli/input/simple/* "$DAGGER_PROJECT"
"$DAGGER" new "a"
"$DAGGER" new "b"
@ -61,7 +61,7 @@ setup() {
assert_success
assert_output "b"
# run ls -la "$DAGGER_WORKSPACE"
# run ls -la "$DAGGER_PROJECT"
# assert_failure
}
@ -70,7 +70,7 @@ setup() {
@test "dagger new: packages" {
"$DAGGER" init
cp -a "$TESTDIR"/cli/packages/* "$DAGGER_WORKSPACE"
cp -a "$TESTDIR"/cli/packages/* "$DAGGER_PROJECT"
"$DAGGER" new "a" --package alpha.dagger.io/test/a
"$DAGGER" new "b" --package alpha.dagger.io/test/b
@ -293,13 +293,13 @@ setup() {
dagger_new_with_plan input "$TESTDIR"/cli/input/artifact
# input dir outside the workspace
# input dir outside the project
run "$DAGGER" input -e "input" dir "source" /tmp
assert_failure
# input dir inside the workspace
cp -R "$TESTDIR"/cli/input/artifact/testdata/ "$DAGGER_WORKSPACE"/testdata
"$DAGGER" input -e "input" dir "source" "$DAGGER_WORKSPACE"/testdata
# input dir inside the project
cp -R "$TESTDIR"/cli/input/artifact/testdata/ "$DAGGER_PROJECT"/testdata
"$DAGGER" input -e "input" dir "source" "$DAGGER_PROJECT"/testdata
"$DAGGER" up -e "input"
run "$DAGGER" -l error query -e "input"
assert_success
@ -331,8 +331,8 @@ setup() {
run [ -d "$TESTDIR"/cli/input/ignore/testdata/.dagger ]
assert_success
cp -R "$TESTDIR"/cli/input/ignore/testdata/ "$DAGGER_WORKSPACE"/testdata
"$DAGGER" input -e "input" dir "source" "$DAGGER_WORKSPACE"/testdata
cp -R "$TESTDIR"/cli/input/ignore/testdata/ "$DAGGER_PROJECT"/testdata
"$DAGGER" input -e "input" dir "source" "$DAGGER_PROJECT"/testdata
"$DAGGER" up -e "input"
assert_success
}

View File

@ -11,7 +11,7 @@ setup() {
# new-style tests: use 'dagger up'
#
# For new tests, please adopt new-style.
# NOTE: you will need to 'unset DAGGER_WORKSPACE'
# NOTE: you will need to 'unset DAGGER_PROJECT'
# at the beginning of each new-style test.
@test "core: inputs & outputs" {
@ -26,7 +26,7 @@ setup() {
assert_output --partial 'dir'
# Set dir input
dagger -e test-core input dir dir "$DAGGER_WORKSPACE"
dagger -e test-core input dir dir "$DAGGER_PROJECT"
# Set text input
dagger -e test-core input text name Bob
@ -160,5 +160,5 @@ setup() {
}
@test "compute: exclude" {
"$DAGGER" up -w "$TESTDIR"/compute/exclude
"$DAGGER" up --project "$TESTDIR"/compute/exclude
}

View File

@ -10,8 +10,8 @@ common_setup() {
DAGGER_LOG_FORMAT="pretty"
export DAGGER_LOG_FORMAT
DAGGER_WORKSPACE="$(mktemp -d -t dagger-workspace-XXXXXX)"
export DAGGER_WORKSPACE
DAGGER_PROJECT="$(mktemp -d -t dagger-project-XXXXXX)"
export DAGGER_PROJECT
SOPS_AGE_KEY_FILE=~/.config/dagger/keys.txt
export SOPS_AGE_KEY_FILE
@ -21,7 +21,7 @@ dagger_new_with_plan() {
local name="$1"
local sourcePlan="$2"
cp -a "$sourcePlan"/* "$DAGGER_WORKSPACE"
cp -a "$sourcePlan"/* "$DAGGER_PROJECT"
"$DAGGER" new "$name"
}
@ -29,8 +29,8 @@ dagger_new_with_plan() {
dagger_new_with_env() {
local sourcePlan="$1"
"$DAGGER" init -w "$DAGGER_WORKSPACE"
rsync -av "$sourcePlan"/ "$DAGGER_WORKSPACE"
"$DAGGER" init --project "$DAGGER_PROJECT"
rsync -av "$sourcePlan"/ "$DAGGER_PROJECT"
}
# dagger helper to execute the right binary