Merge pull request #1639 from jlongtine/europa-default

Clean up non-Europa commands and code
This commit is contained in:
Joel Longtine 2022-02-23 12:31:47 -07:00 committed by GitHub
commit 344998c904
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
635 changed files with 494 additions and 23157 deletions

View File

@ -1,2 +0,0 @@
# dagger state
state/**

View File

@ -1,61 +0,0 @@
// A dagger workflow to develop dagger
package main
import (
"alpha.dagger.io/dagger"
"alpha.dagger.io/os"
"alpha.dagger.io/go"
)
// Dagger source code
source: dagger.#Artifact @dagger(input)
test: {
// Go unit tests
unit: {
logs: (os.#File & {
from: build.ctr
path: "/test.log"
read: format: "string"
}).read.data
}
// Full suite of bats integration tests
integration: {
// FIXME
}
}
// Build the dagger binaries
build: {
ctr: go.#Container & {
"source": source
setup: [
"apk add --no-cache file",
]
command: """
go test -v ./... > /test.log
go build -o /binaries/ ./cmd/... > /build.log
"""
}
binaries: os.#Dir & {
from: ctr
path: "/binaries"
}
logs: (os.#File & {
from: ctr
path: "/build.log"
read: format: "string"
}).read.data
}
// Execute `dagger help`
usage: os.#Container & {
command: "dagger help"
let binpath = "/usr/local/dagger/bin"
mount: "\(binpath)": from: build.binaries
shell: search: "\(binpath)": true
}

View File

@ -1,26 +0,0 @@
name: dev
inputs:
source:
dir:
path: .
include: []
sops:
kms: []
gcp_kms: []
azure_kv: []
hc_vault: []
age:
- recipient: age1ez5phfa2l6e9fqt4nnem734jajg4nlcefhv3ydeltzjlele47ags34868h
enc: |
-----BEGIN AGE ENCRYPTED FILE-----
YWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSA4c0xSL1hJZmtsejN0dGpM
OCtpNFdLK0ZUdlc2YXBQV1RaazFpcERSOG5BClZzZGR3YllQODk5dGp5akpxVkhM
YUFxODJVeG1XbHl1WEVmNkMwREdldkkKLS0tIGNDVzNkak9UMnVyQkRHN3Q0TjZz
TWhsS0NUcU5ac0YrQjdmM0dlcEdRYWMKqUO/A83KsECVW+hSgVLTkIuK11VX3T77
fBSXL9D+riW9Q/TCKP0uMVulGnqZF4OoHAzLCug7aV6AEszpeSWyjg==
-----END AGE ENCRYPTED FILE-----
lastmodified: "2021-05-25T00:31:59Z"
mac: ENC[AES256_GCM,data:CMrCKRLDHc/o18zlVc/nwin1d14y2ruhk4i3mL/jtzDVeXDusJpelcgYAsNvKdhXU1v/gl1P4fINzQ4zc6t3krWW80UQFXBSZ1c2EJThKzVQfKFOwr3Fg3FyTm3zeX9Lk4e8z1SFmBj08k3jxr1xtdKwPKLmYN8e37cOy0bT97c=,iv:ffvYUpDxFLCyCynEVkFkBHCsp4HzLFcabiSjuCM0r40=,tag:ZwjW3qB8XcIvF+GoHjsbaQ==,type:str]
pgp: []
encrypted_suffix: secret
version: 3.7.1

View File

@ -1,2 +0,0 @@
# dagger state
state/**

View File

@ -1 +0,0 @@
module: ""

View File

@ -1,5 +0,0 @@
# generated by dagger
dagger.lock
alpha.dagger.io
dagger.io
universe.dagger.io

View File

@ -1,30 +0,0 @@
package main
import (
"alpha.dagger.io/dagger"
"alpha.dagger.io/js/yarn"
"alpha.dagger.io/netlify"
)
// dagger repository
repository: dagger.#Artifact @dagger(input)
// Build the docs website
docs: yarn.#Package & {
source: repository
cwd: "website/"
buildDir: "website/build"
env: {
OAUTH_ENABLE: "true"
REACT_APP_OAUTH_SCOPE: "user:email"
REACT_APP_GITHUB_AUTHORIZE_URI: "https://github.com/login/oauth/authorize?client_id=${REACT_APP_CLIENT_ID}&scope=${REACT_APP_OAUTH_SCOPE}&allow_signup=false"
REACT_APP_DAGGER_SITE_URI: "https://dagger.io"
REACT_APP_API_PROXY_ENABLE: "true"
}
}
// Deploy the docs website
site: netlify.#Site & {
name: string | *"docs-dagger-io" @dagger(input)
contents: docs.build
}

View File

@ -1,39 +0,0 @@
plan:
module: .dagger/env/docs/plan
name: docs
inputs:
docs.env.REACT_APP_CLIENT_ID:
text: 1a6d02cb59199eb205ef
docs.env.REACT_APP_CLIENT_SECRET:
text: 47a1a67abc8d3fe9edb829a6c7775fbad47f47bb
repository:
dir:
path: .
exclude:
- '**/node_modules'
- cmd/dagger/dagger
- cmd/dagger/dagger-debug
site.account.name:
text: blocklayer
site.account.token:
secret: ENC[AES256_GCM,data:jPJ8N6cAmtYnQh2SyhM9bQGfkhz777S4fyPDm2YhujwgXH6EogN2Uyw6Ew==,iv:gDchoJYLdQ8IPxrUUIsQ9s2f12JOhh7p573DwOIV2zE=,tag:okatHyjVGPGNOt+aw4iUHg==,type:str]
sops:
kms: []
gcp_kms: []
azure_kv: []
hc_vault: []
age:
- recipient: age1gxwmtwahzwdmrskhf90ppwlnze30lgpm056kuesrxzeuyclrwvpsupwtpk
enc: |
-----BEGIN AGE ENCRYPTED FILE-----
YWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBaZVVuYThsYk9YcWhYb0g4
TDhZQ21YN0FCRExKV2tqbVVUNTg5MWRUVUJzCndjc28yVUZEK3ZoQzVDQkk4emZQ
WGx2bUxSZlBENXlibzQ1MVkvc2I2MVkKLS0tIFJKS0ZENFhuVmdUM2h1a2xrK01a
ejRKQXNFZzBTbUxpc05acnkxQ2U2UkEKX1byNj64xOiRGAJ9lwh55d/mlasI3H6H
b+o3HbXSbV0G0UwQxEOisntR6o27ry/l12ai/sOQ4f9MXm6FRw2XTg==
-----END AGE ENCRYPTED FILE-----
lastmodified: "2021-06-18T14:46:35Z"
mac: ENC[AES256_GCM,data:RX4rZSP2sMqSGfbSkpK1rAqLcpYHv99jUvhGpQvM9Sjxki4LskxQP51BuZj7eVD0u0uqBMObatyZvofTybld6tlrgQeddIS4SINRJNsC+P7dzaiOEePZX/oCCMIQCnO8+OLO7z6DNdy1IMxhuCZWeGHDJt1ritLds3ACAjtFrGo=,iv:AI8tdLTFnFV919ypY0RKVaDNfwEN6b5hZuH4DvlhdAk=,tag:izIPuozRiYpQ50hi4yLyCw==,type:str]
pgp: []
encrypted_suffix: secret
version: 3.7.1

View File

@ -18,66 +18,66 @@ on:
- 'Makefile' - 'Makefile'
- '.github/workflows/test-docs.yml' - '.github/workflows/test-docs.yml'
jobs: # jobs:
test-docs: # test-docs:
name: "Test Docs" # name: "Test Docs"
runs-on: ubuntu-latest # runs-on: ubuntu-latest
timeout-minutes: 15 # timeout-minutes: 15
steps: # steps:
- name: "Check out" # - name: "Check out"
uses: actions/checkout@v2 # uses: actions/checkout@v2
- name: "Setup Go" # - name: "Setup Go"
uses: actions/setup-go@v1 # uses: actions/setup-go@v1
with: # with:
go-version: 1.16 # go-version: 1.16
- name: "Install CUE" # - name: "Install CUE"
run: | # run: |
# Cue # # Cue
export CUE_VERSION="$(grep cue ./go.mod | cut -d' ' -f2 | head -1 | sed -E 's/\.[[:digit:]]\.[[:alnum:]]+-[[:alnum:]]+$//')" # export CUE_VERSION="$(grep cue ./go.mod | cut -d' ' -f2 | head -1 | sed -E 's/\.[[:digit:]]\.[[:alnum:]]+-[[:alnum:]]+$//')"
export CUE_TARBALL="cue_${CUE_VERSION}_linux_amd64.tar.gz" # export CUE_TARBALL="cue_${CUE_VERSION}_linux_amd64.tar.gz"
echo "Installing cue version $CUE_VERSION" # echo "Installing cue version $CUE_VERSION"
curl -L https://github.com/cue-lang/cue/releases/download/${CUE_VERSION}/${CUE_TARBALL} | sudo tar zxf - -C /usr/local/bin # curl -L https://github.com/cue-lang/cue/releases/download/${CUE_VERSION}/${CUE_TARBALL} | sudo tar zxf - -C /usr/local/bin
- name: "Run local registry" # - name: "Run local registry"
run: | # run: |
docker run -d -p 5000:5000 --name registry registry:2 # docker run -d -p 5000:5000 --name registry registry:2
# TODO: DISABLED for CI deadlock debugging # # TODO: DISABLED for CI deadlock debugging
# - name: Generate KiND config # # - name: Generate KiND config
# run: | # # run: |
# echo 'kind: Cluster # # echo 'kind: Cluster
# apiVersion: kind.x-k8s.io/v1alpha4 # # apiVersion: kind.x-k8s.io/v1alpha4
# containerdConfigPatches: # # containerdConfigPatches:
# - |- # # - |-
# [plugins."io.containerd.grpc.v1.cri".registry.mirrors."localhost:5000"] # # [plugins."io.containerd.grpc.v1.cri".registry.mirrors."localhost:5000"]
# endpoint = ["http://registry:5000"]' > ./kind-config.yaml # # endpoint = ["http://registry:5000"]' > ./kind-config.yaml
# cat ./kind-config.yaml # # cat ./kind-config.yaml
# - name: "Setup KiND" # # - name: "Setup KiND"
# uses: helm/kind-action@v1.2.0 # # uses: helm/kind-action@v1.2.0
# with: # # with:
# config: "./kind-config.yaml" # # config: "./kind-config.yaml"
# - name: Connect local registry to KiND # # - name: Connect local registry to KiND
# run: | # # run: |
# docker network connect kind registry # # docker network connect kind registry
- name: "Import Dagger private key" # - name: "Import Dagger private key"
env: # env:
DAGGER_AGE_KEY: ${{ secrets.DAGGER_AGE_KEY }} # DAGGER_AGE_KEY: ${{ secrets.DAGGER_AGE_KEY }}
run: | # run: |
mkdir -p ~/.config/dagger # mkdir -p ~/.config/dagger
echo "$DAGGER_AGE_KEY" > ~/.config/dagger/keys.txt # echo "$DAGGER_AGE_KEY" > ~/.config/dagger/keys.txt
- name: "Expose GitHub Runtime" # - name: "Expose GitHub Runtime"
uses: crazy-max/ghaction-github-runtime@v1 # uses: crazy-max/ghaction-github-runtime@v1
- name: Test # - name: Test
# TODO: https://github.com/dagger/dagger/pull/1341 # # TODO: https://github.com/dagger/dagger/pull/1341
# env: # # env:
# DAGGER_CACHE_TO: "type=gha,mode=max,scope=test-docs" # # DAGGER_CACHE_TO: "type=gha,mode=max,scope=test-docs"
# DAGGER_CACHE_FROM: "type=gha,mode=max,scope=test-docs" # # DAGGER_CACHE_FROM: "type=gha,mode=max,scope=test-docs"
run: | # run: |
make doc-test # make doc-test

View File

@ -25,66 +25,6 @@ on:
- '.github/workflows/test-universe.yml' - '.github/workflows/test-universe.yml'
jobs: jobs:
universe-tests:
name: "Universe Tests"
runs-on: ubuntu-latest
timeout-minutes: 30
services:
localstack:
image: localstack/localstack:0.12.16
env:
SERVICES: s3, ecr
LOCALSTACK_API_KEY: ${{ secrets.LOCALSTACK_API_KEY }}
ports:
- 4566:4566
- 4571:4571
- 4510:4510
options: >-
--health-cmd "curl -f http://localhost:4566/health"
--health-start-period 5s
--health-timeout 5s
--health-interval 5s
--health-retries 10
steps:
- name: "Check out"
uses: actions/checkout@v2
- name: "Setup Go"
uses: actions/setup-go@v1
with:
go-version: 1.16
- name: "Setup KiND"
uses: helm/kind-action@v1.2.0
- name: "Import Dagger private key"
env:
DAGGER_AGE_KEY: ${{ secrets.DAGGER_AGE_KEY }}
run: |
mkdir -p ~/.config/dagger
echo "$DAGGER_AGE_KEY" > ~/.config/dagger/keys.txt
- name: "Provision Localstack AWS resources"
env:
AWS_ACCESS_KEY_ID: test
AWS_SECRET_ACCESS_KEY: test
AWS_DEFAULT_REGION: us-east-2
run: |
aws --endpoint-url=http://localhost:4566 s3 mb s3://dagger-ci
aws --endpoint-url=http://localhost:4566 ecr create-repository --repository-name dagger-ci
- name: "Expose GitHub Runtime"
uses: crazy-max/ghaction-github-runtime@v1
- name: Test
# TODO: https://github.com/dagger/dagger/pull/1341
# env:
# DAGGER_CACHE_TO: "type=gha,mode=max,scope=test-universe"
# DAGGER_CACHE_FROM: "type=gha,mode=max,scope=test-universe"
run: |
make universe-test
universe-europa-tests: universe-europa-tests:
name: "Universe Tests - Europa" name: "Universe Tests - Europa"
runs-on: ubuntu-latest runs-on: ubuntu-latest
@ -118,4 +58,4 @@ jobs:
DAGGER_CACHE_TO: "type=gha,mode=max,scope=test-universe" DAGGER_CACHE_TO: "type=gha,mode=max,scope=test-universe"
DAGGER_CACHE_FROM: "type=gha,mode=max,scope=test-universe" DAGGER_CACHE_FROM: "type=gha,mode=max,scope=test-universe"
run: | run: |
make europa-universe-test make universe-test

View File

@ -59,13 +59,13 @@ core-integration: dagger-debug # Run core integration tests
yarn --cwd "./tests" install yarn --cwd "./tests" install
DAGGER_BINARY="$(shell pwd)/cmd/dagger/dagger-debug" yarn --cwd "./tests" test DAGGER_BINARY="$(shell pwd)/cmd/dagger/dagger-debug" yarn --cwd "./tests" test
# .PHONY: universe-test
# universe-test: dagger-debug # Run universe tests
# yarn --cwd "./universe" install
# DAGGER_BINARY="$(shell pwd)/cmd/dagger/dagger-debug" yarn --cwd "./universe" test
.PHONY: universe-test .PHONY: universe-test
universe-test: dagger-debug # Run universe tests universe-test: dagger-debug # Run universe tests
yarn --cwd "./universe" install
DAGGER_BINARY="$(shell pwd)/cmd/dagger/dagger-debug" yarn --cwd "./universe" test
.PHONY: europa-universe-test
europa-universe-test: dagger-debug # Run Europa universe tests
yarn --cwd "./pkg/universe.dagger.io" install yarn --cwd "./pkg/universe.dagger.io" install
DAGGER_BINARY="$(shell pwd)/cmd/dagger/dagger-debug" yarn --cwd "./pkg/universe.dagger.io" test DAGGER_BINARY="$(shell pwd)/cmd/dagger/dagger-debug" yarn --cwd "./pkg/universe.dagger.io" test

View File

@ -12,78 +12,8 @@ import (
"go.dagger.io/dagger/client" "go.dagger.io/dagger/client"
"go.dagger.io/dagger/compiler" "go.dagger.io/dagger/compiler"
"go.dagger.io/dagger/plancontext" "go.dagger.io/dagger/plancontext"
"go.dagger.io/dagger/state"
) )
func CurrentProject(ctx context.Context) *state.Project {
lg := log.Ctx(ctx)
if projectPath := viper.GetString("project"); projectPath != "" {
project, err := state.Open(ctx, projectPath)
if err != nil {
lg.
Fatal().
Err(err).
Str("path", projectPath).
Msg("failed to open project")
}
return project
}
project, err := state.Current(ctx)
if err != nil {
lg.
Fatal().
Err(err).
Msg("failed to determine current project")
}
return project
}
func CurrentEnvironmentState(ctx context.Context, project *state.Project) *state.State {
lg := log.Ctx(ctx)
environmentName := viper.GetString("environment")
if environmentName != "" {
st, err := project.Get(ctx, environmentName)
if err != nil {
lg.
Fatal().
Err(err).
Msg("failed to load environment")
}
return st
}
environments, err := project.List(ctx)
if err != nil {
lg.
Fatal().
Err(err).
Msg("failed to list environments")
}
if len(environments) == 0 {
lg.
Fatal().
Msg("no environments")
}
if len(environments) > 1 {
envNames := []string{}
for _, e := range environments {
envNames = append(envNames, e.Name)
}
lg.
Fatal().
Err(err).
Strs("environments", envNames).
Msg("multiple environments available in the project, select one with `--environment`")
}
return environments[0]
}
// FormatValue returns the String representation of the cue value // FormatValue returns the String representation of the cue value
func FormatValue(val *compiler.Value) string { func FormatValue(val *compiler.Value) string {
switch { switch {

View File

@ -2,13 +2,9 @@ package common
import ( import (
"context" "context"
"crypto/sha256"
"fmt"
"strings" "strings"
"github.com/go-git/go-git/v5"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"go.dagger.io/dagger/state"
"go.dagger.io/dagger/telemetry" "go.dagger.io/dagger/telemetry"
) )
@ -34,54 +30,54 @@ func commandName(cmd *cobra.Command) string {
// TrackProjectCommand is like TrackCommand but includes project and // TrackProjectCommand is like TrackCommand but includes project and
// optionally environment metadata. // optionally environment metadata.
func TrackProjectCommand(ctx context.Context, cmd *cobra.Command, w *state.Project, env *state.State, props ...*telemetry.Property) chan struct{} { // func TrackProjectCommand(ctx context.Context, cmd *cobra.Command, w *state.Project, env *state.State, props ...*telemetry.Property) chan struct{} {
props = append([]*telemetry.Property{ // props = append([]*telemetry.Property{
{ // {
// Hash the repository URL for privacy // // Hash the repository URL for privacy
Name: "git_repository_hash", // Name: "git_repository_hash",
Value: hash(gitRepoURL(w.Path)), // Value: hash(gitRepoURL(w.Path)),
}, // },
{ // {
// The project path might contain the username (e.g. /home/user/project), so we hash it for privacy. // // The project path might contain the username (e.g. /home/user/project), so we hash it for privacy.
Name: "project_path_hash", // Name: "project_path_hash",
Value: hash(w.Path), // Value: hash(w.Path),
}, // },
}, props...) // }, props...)
if env != nil { // if env != nil {
props = append([]*telemetry.Property{ // props = append([]*telemetry.Property{
{ // {
Name: "environment_name", // Name: "environment_name",
Value: env.Name, // Value: env.Name,
}, // },
}, props...) // }, props...)
} // }
return TrackCommand(ctx, cmd, props...) // return TrackCommand(ctx, cmd, props...)
} // }
// hash returns the sha256 digest of the string // hash returns the sha256 digest of the string
func hash(s string) string { // func hash(s string) string {
return fmt.Sprintf("%x", sha256.Sum256([]byte(s))) // return fmt.Sprintf("%x", sha256.Sum256([]byte(s)))
} // }
// gitRepoURL returns the git repository remote, if any. // // gitRepoURL returns the git repository remote, if any.
func gitRepoURL(path string) string { // func gitRepoURL(path string) string {
repo, err := git.PlainOpenWithOptions(path, &git.PlainOpenOptions{ // repo, err := git.PlainOpenWithOptions(path, &git.PlainOpenOptions{
DetectDotGit: true, // DetectDotGit: true,
}) // })
if err != nil { // if err != nil {
return "" // return ""
} // }
origin, err := repo.Remote("origin") // origin, err := repo.Remote("origin")
if err != nil { // if err != nil {
return "" // return ""
} // }
if urls := origin.Config().URLs; len(urls) > 0 { // if urls := origin.Config().URLs; len(urls) > 0 {
return urls[0] // return urls[0]
} // }
return "" // return ""
} // }

View File

@ -1,236 +0,0 @@
package cmd
import (
"context"
"encoding/json"
"errors"
"fmt"
"os"
"strings"
"cuelang.org/go/cue"
"github.com/containerd/containerd/platforms"
specs "github.com/opencontainers/image-spec/specs-go/v1"
"go.dagger.io/dagger/cmd/dagger/cmd/common"
"go.dagger.io/dagger/cmd/dagger/logger"
"go.dagger.io/dagger/compiler"
"go.dagger.io/dagger/environment"
"go.dagger.io/dagger/plancontext"
"go.dagger.io/dagger/solver"
"go.dagger.io/dagger/state"
"go.mozilla.org/sops/v3"
"go.mozilla.org/sops/v3/decrypt"
"github.com/spf13/cobra"
"github.com/spf13/viper"
)
var computeCmd = &cobra.Command{
Use: "compute CONFIG",
Short: "Compute a configuration (DEPRECATED)",
Args: cobra.ExactArgs(1),
Hidden: true,
PreRun: func(cmd *cobra.Command, args []string) {
// Fix Viper bug for duplicate flags:
// https://github.com/spf13/viper/issues/233
if err := viper.BindPFlags(cmd.Flags()); err != nil {
panic(err)
}
},
Run: func(cmd *cobra.Command, args []string) {
lg := logger.New()
ctx := lg.WithContext(cmd.Context())
doneCh := common.TrackCommand(ctx, cmd)
st := &state.State{
Context: plancontext.New(),
Name: "FIXME",
Platform: platforms.Format(specs.Platform{OS: "linux", Architecture: "amd64"}),
Path: args[0],
Plan: state.Plan{
Module: args[0],
},
}
for _, input := range viper.GetStringSlice("input-string") {
parts := strings.SplitN(input, "=", 2)
if len(parts) != 2 {
lg.Fatal().Msgf("failed to parse input: input-string")
}
k, v := parts[0], parts[1]
err := st.SetInput(k, state.TextInput(v))
if err != nil {
lg.
Fatal().
Err(err).
Str("input", k).
Msg("failed to add input")
}
}
for _, input := range viper.GetStringSlice("input-dir") {
parts := strings.SplitN(input, "=", 2)
if len(parts) != 2 {
lg.Fatal().Msgf("failed to parse input: input-dir")
}
k, v := parts[0], parts[1]
err := st.SetInput(k, state.DirInput(v, []string{}, []string{}))
if err != nil {
lg.
Fatal().
Err(err).
Str("input", k).
Msg("failed to add input")
}
}
for _, input := range viper.GetStringSlice("input-git") {
parts := strings.SplitN(input, "=", 2)
if len(parts) != 2 {
lg.Fatal().Msgf("failed to parse input: input-git")
}
k, v := parts[0], parts[1]
err := st.SetInput(k, state.GitInput(v, "", ""))
if err != nil {
lg.
Fatal().
Err(err).
Str("input", k).
Msg("failed to add input")
}
}
if f := viper.GetString("input-json"); f != "" {
lg := lg.With().Str("path", f).Logger()
content, err := os.ReadFile(f)
if err != nil {
lg.Fatal().Err(err).Msg("failed to read file")
}
plaintext, err := decrypt.Data(content, "json")
if err != nil && !errors.Is(err, sops.MetadataNotFound) {
lg.Fatal().Err(err).Msg("unable to decrypt")
}
if len(plaintext) > 0 {
content = plaintext
}
if !json.Valid(content) {
lg.Fatal().Msg("invalid json")
}
err = st.SetInput("", state.JSONInput(string(content)))
if err != nil {
lg.Fatal().Err(err).Msg("failed to add input")
}
}
if f := viper.GetString("input-yaml"); f != "" {
lg := lg.With().Str("path", f).Logger()
content, err := os.ReadFile(f)
if err != nil {
lg.Fatal().Err(err).Msg("failed to read file")
}
plaintext, err := decrypt.Data(content, "yaml")
if err != nil && !errors.Is(err, sops.MetadataNotFound) {
lg.Fatal().Err(err).Msg("unable to decrypt")
}
if len(plaintext) > 0 {
content = plaintext
}
err = st.SetInput("", state.YAMLInput(string(content)))
if err != nil {
lg.Fatal().Err(err).Msg("failed to add input")
}
}
if f := viper.GetString("input-file"); f != "" {
lg := lg.With().Str("path", f).Logger()
parts := strings.SplitN(f, "=", 2)
k, v := parts[0], parts[1]
content, err := os.ReadFile(v)
if err != nil {
lg.Fatal().Err(err).Msg("failed to read file")
}
if len(content) > 0 {
err = st.SetInput(k, state.FileInput(v))
if err != nil {
lg.Fatal().Err(err).Msg("failed to set input string")
}
}
}
cl := common.NewClient(ctx)
v := compiler.NewValue()
plan, err := st.CompilePlan(ctx)
if err != nil {
lg.Fatal().Err(err).Msg("failed to compile plan")
}
if err := v.FillPath(cue.MakePath(), plan); err != nil {
lg.Fatal().Err(err).Msg("failed to compile plan")
}
inputs, err := st.CompileInputs()
if err != nil {
lg.Fatal().Err(err).Msg("failed to compile inputs")
}
if err := v.FillPath(cue.MakePath(), inputs); err != nil {
lg.Fatal().Err(err).Msg("failed to compile inputs")
}
env, err := environment.New(st)
if err != nil {
lg.Fatal().Err(err).Msg("unable to create environment")
}
err = cl.Do(ctx, env.Context(), func(ctx context.Context, s solver.Solver) error {
// check that all inputs are set
checkInputs(ctx, env)
if err := env.Up(ctx, s); err != nil {
return err
}
if err := v.FillPath(cue.MakePath(), env.Computed()); err != nil {
return err
}
fmt.Println(v.JSON())
return nil
})
<-doneCh
if err != nil {
lg.Fatal().Err(err).Msg("failed to up environment")
}
},
}
func init() {
computeCmd.Flags().StringSlice("input-string", []string{}, "TARGET=STRING")
computeCmd.Flags().StringSlice("input-dir", []string{}, "TARGET=PATH")
computeCmd.Flags().String("input-file", "", "TARGET=PATH")
computeCmd.Flags().StringSlice("input-git", []string{}, "TARGET=REMOTE#REF")
computeCmd.Flags().String("input-json", "", "JSON")
computeCmd.Flags().String("input-yaml", "", "YAML")
if err := viper.BindPFlags(computeCmd.Flags()); err != nil {
panic(err)
}
}

View File

@ -20,7 +20,6 @@ import (
"go.dagger.io/dagger/cmd/dagger/cmd/common" "go.dagger.io/dagger/cmd/dagger/cmd/common"
"go.dagger.io/dagger/cmd/dagger/logger" "go.dagger.io/dagger/cmd/dagger/logger"
"go.dagger.io/dagger/compiler" "go.dagger.io/dagger/compiler"
"go.dagger.io/dagger/environment"
"go.dagger.io/dagger/pkg" "go.dagger.io/dagger/pkg"
"golang.org/x/term" "golang.org/x/term"
) )
@ -55,22 +54,22 @@ type Package struct {
func Parse(ctx context.Context, packageName string, val *compiler.Value) *Package { func Parse(ctx context.Context, packageName string, val *compiler.Value) *Package {
lg := log.Ctx(ctx) lg := log.Ctx(ctx)
parseValues := func(field string, values []*compiler.Value) []Value { // parseValues := func(field string, values []*compiler.Value) []Value {
val := []Value{} // val := []Value{}
for _, i := range values { // for _, i := range values {
v := Value{} // v := Value{}
v.Name = strings.TrimPrefix( // v.Name = strings.TrimPrefix(
i.Path().String(), // i.Path().String(),
field+".", // field+".",
) // )
v.Type = common.FormatValue(i) // v.Type = common.FormatValue(i)
v.Description = common.ValueDocOneLine(i) // v.Description = common.ValueDocOneLine(i)
val = append(val, v) // val = append(val, v)
} // }
return val // return val
} // }
fields, err := val.Fields(cue.Definitions(true)) fields, err := val.Fields(cue.Definitions(true))
if err != nil { if err != nil {
@ -104,14 +103,6 @@ func Parse(ctx context.Context, packageName string, val *compiler.Value) *Packag
field.Name = name field.Name = name
field.Description = common.ValueDocOneLine(v) field.Description = common.ValueDocOneLine(v)
// Inputs
inp := environment.ScanInputs(ctx, v)
field.Inputs = parseValues(field.Name, inp)
// Outputs
out := environment.ScanOutputs(ctx, v)
field.Outputs = parseValues(field.Name, out)
pkg.Fields = append(pkg.Fields, field) pkg.Fields = append(pkg.Fields, field)
} }
@ -333,47 +324,48 @@ func walkStdlib(ctx context.Context, output, format string) {
lg.Info().Str("output", output).Msg("generating stdlib") lg.Info().Str("output", output).Msg("generating stdlib")
packages := map[string]*Package{} packages := map[string]*Package{}
err := fs.WalkDir(pkg.FS, pkg.AlphaModule, func(p string, d fs.DirEntry, err error) error { // TODO: Does this need to be re-worked for Europa?
if err != nil { // err := fs.WalkDir(pkg.FS, pkg.AlphaModule, func(p string, d fs.DirEntry, err error) error {
return err // if err != nil {
} // return err
// }
// Ignore useless embedded files // // Ignore useless embedded files
if p == "." || d.Name() == pkg.AlphaModule || !d.IsDir() || d.Name() == "cue.mod" || // if p == "." || d.Name() == pkg.AlphaModule || !d.IsDir() || d.Name() == "cue.mod" ||
strings.Contains(p, "cue.mod") || strings.Contains(p, "tests") { // strings.Contains(p, "cue.mod") || strings.Contains(p, "tests") {
return nil // return nil
} // }
p = strings.TrimPrefix(p, pkg.AlphaModule+"/") // p = strings.TrimPrefix(p, pkg.AlphaModule+"/")
// Ignore tests directories // // Ignore tests directories
if d.Name() == "tests" { // if d.Name() == "tests" {
return nil // return nil
} // }
pkgName := fmt.Sprintf("%s/%s", pkg.AlphaModule, p) // pkgName := fmt.Sprintf("%s/%s", pkg.AlphaModule, p)
lg.Info().Str("package", pkgName).Str("format", format).Msg("generating doc") // lg.Info().Str("package", pkgName).Str("format", format).Msg("generating doc")
val, err := loadCode(pkgName) // val, err := loadCode(pkgName)
if err != nil { // if err != nil {
if strings.Contains(err.Error(), "no CUE files") { // if strings.Contains(err.Error(), "no CUE files") {
lg.Warn().Str("package", p).Err(err).Msg("ignoring") // lg.Warn().Str("package", p).Err(err).Msg("ignoring")
return nil // return nil
} // }
if strings.Contains(err.Error(), "cannot find package") { // if strings.Contains(err.Error(), "cannot find package") {
lg.Warn().Str("package", p).Err(err).Msg("ignoring") // lg.Warn().Str("package", p).Err(err).Msg("ignoring")
return nil // return nil
} // }
return err // return err
} // }
pkg := Parse(ctx, pkgName, val) // pkg := Parse(ctx, pkgName, val)
packages[p] = pkg // packages[p] = pkg
return nil // return nil
}) // })
if err != nil { // if err != nil {
lg.Fatal().Err(err).Msg("cannot generate stdlib doc") // lg.Fatal().Err(err).Msg("cannot generate stdlib doc")
} // }
hasSubPackages := func(name string) bool { hasSubPackages := func(name string) bool {
for p := range packages { for p := range packages {
@ -401,7 +393,9 @@ func walkStdlib(ctx context.Context, output, format string) {
lg.Fatal().Err(err).Msg("cannot generate stdlib doc index") lg.Fatal().Err(err).Msg("cannot generate stdlib doc index")
} }
defer index.Close() defer index.Close()
fmt.Fprintf(index, "# Index\n\n") // FIXME: I removed a \n character, so that markdownlint doesn't complain
// about an extra newline at the end of the file.
fmt.Fprintf(index, "# Index\n")
indexKeys := []string{} indexKeys := []string{}
for p, pkg := range packages { for p, pkg := range packages {
@ -424,6 +418,11 @@ func walkStdlib(ctx context.Context, output, format string) {
// Generate index from sorted list of packages // Generate index from sorted list of packages
sort.Strings(indexKeys) sort.Strings(indexKeys)
// Add a extra blank line if we have at least one package
// TODO: this is a hack, fixes issue with markdownlint, if we haven't generated any docs.
if len(indexKeys) > 0 {
fmt.Fprintf(index, "\n")
}
for _, p := range indexKeys { for _, p := range indexKeys {
description := mdEscape(packages[p].Description) description := mdEscape(packages[p].Description)
fmt.Fprintf(index, "- [%s](./%s) - %s\n", p, getFileName(p), description) fmt.Fprintf(index, "- [%s](./%s) - %s\n", p, getFileName(p), description)

View File

@ -1,32 +0,0 @@
package cmd
import (
"github.com/spf13/cobra"
"github.com/spf13/viper"
)
var downCmd = &cobra.Command{
Use: "down",
Short: "Take an environment offline (WARNING: may destroy infrastructure)",
Args: cobra.NoArgs,
PreRun: func(cmd *cobra.Command, args []string) {
// Fix Viper bug for duplicate flags:
// https://github.com/spf13/viper/issues/233
if err := viper.BindPFlags(cmd.Flags()); err != nil {
panic(err)
}
},
Run: func(cmd *cobra.Command, args []string) {
panic("not implemented")
},
// Remove hidden flag once command has been implemented
Hidden: true,
}
func init() {
downCmd.Flags().Bool("no-cache", false, "Disable all run cache")
if err := viper.BindPFlags(downCmd.Flags()); err != nil {
panic(err)
}
}

View File

@ -1,146 +0,0 @@
package cmd
import (
"context"
"fmt"
"os"
"os/exec"
"strings"
"go.dagger.io/dagger/cmd/dagger/cmd/common"
"go.dagger.io/dagger/cmd/dagger/logger"
"go.dagger.io/dagger/environment"
"go.dagger.io/dagger/solver"
"go.dagger.io/dagger/state"
"github.com/google/shlex"
"github.com/spf13/cobra"
"github.com/spf13/viper"
"gopkg.in/yaml.v3"
)
var editCmd = &cobra.Command{
Use: "edit",
Short: "Interactively edit an environment",
Args: cobra.MaximumNArgs(1),
PreRun: func(cmd *cobra.Command, args []string) {
// Fix Viper bug for duplicate flags:
// https://github.com/spf13/viper/issues/233
if err := viper.BindPFlags(cmd.Flags()); err != nil {
panic(err)
}
},
Run: func(cmd *cobra.Command, args []string) {
lg := logger.New()
ctx := lg.WithContext(cmd.Context())
project := common.CurrentProject(ctx)
st := common.CurrentEnvironmentState(ctx, project)
lg = lg.With().
Str("environment", st.Name).
Logger()
doneCh := common.TrackProjectCommand(ctx, cmd, project, st)
data, err := yaml.Marshal(st)
if err != nil {
lg.Fatal().Err(err).Msg("unable to marshal state")
}
f, err := os.CreateTemp("", fmt.Sprintf("%s-*.yaml", st.Name))
if err != nil {
lg.Fatal().Err(err).Msg("failed to create temporary file")
}
tmpPath := f.Name()
defer os.Remove(tmpPath)
if _, err := f.Write(data); err != nil {
lg.Fatal().Err(err).Msg("unable to write file")
}
f.Close()
if err := runEditor(ctx, tmpPath); err != nil {
lg.Fatal().Err(err).Msg("failed to start editor")
}
data, err = os.ReadFile(tmpPath)
if err != nil {
lg.Fatal().Err(err).Msg("failed to read temporary file")
}
var newState state.State
if err := yaml.Unmarshal(data, &newState); err != nil {
lg.Fatal().Err(err).Msg("failed to decode file")
}
st.Name = newState.Name
st.Platform = newState.Platform
st.Plan = newState.Plan
st.Inputs = newState.Inputs
env, err := environment.New(st)
if err != nil {
lg.Fatal().Err(err).Msg("unable to create environment")
}
cl := common.NewClient(ctx)
err = cl.Do(ctx, env.Context(), func(ctx context.Context, s solver.Solver) error {
// check for cue errors by scanning all the inputs
_, err := env.ScanInputs(ctx, true)
if err != nil {
return err
}
return nil
})
<-doneCh
if err != nil {
lg.Fatal().Err(err).Str("environment", st.Name).Msg("invalid input")
}
if err := project.Save(ctx, st); err != nil {
lg.Fatal().Err(err).Msg("failed to save state")
}
},
}
func runEditor(ctx context.Context, path string) error {
editor := os.Getenv("EDITOR")
var cmd *exec.Cmd
if editor == "" {
editor, err := lookupAnyEditor("vim", "nano", "vi")
if err != nil {
return err
}
cmd = exec.CommandContext(ctx, editor, path)
} else {
parts, err := shlex.Split(editor)
if err != nil {
return fmt.Errorf("invalid $EDITOR: %s", editor)
}
parts = append(parts, path)
cmd = exec.CommandContext(ctx, parts[0], parts[1:]...) // #nosec
}
cmd.Env = os.Environ()
cmd.Stdin = os.Stdin
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr
return cmd.Run()
}
func lookupAnyEditor(editorNames ...string) (editorPath string, err error) {
for _, editorName := range editorNames {
editorPath, err = exec.LookPath(editorName)
if err == nil {
return editorPath, nil
}
}
return "", fmt.Errorf("no editor available: dagger attempts to use the editor defined in the EDITOR environment variable, and if that's not set defaults to any of %s, but none of them could be found", strings.Join(editorNames, ", "))
}
func init() {
if err := viper.BindPFlags(editCmd.Flags()); err != nil {
panic(err)
}
}

View File

@ -1,33 +0,0 @@
package cmd
import (
"github.com/spf13/cobra"
"github.com/spf13/viper"
)
var historyCmd = &cobra.Command{
Use: "history",
Short: "List past changes to an environment",
Args: cobra.NoArgs,
PreRun: func(cmd *cobra.Command, args []string) {
// Fix Viper bug for duplicate flags:
// https://github.com/spf13/viper/issues/233
if err := viper.BindPFlags(cmd.Flags()); err != nil {
panic(err)
}
},
Run: func(cmd *cobra.Command, args []string) {
// lg := logger.New()
// ctx := lg.WithContext(cmd.Context())
panic("not implemented")
},
// Remove hidden flag once command has been implemented
Hidden: true,
}
func init() {
if err := viper.BindPFlags(historyCmd.Flags()); err != nil {
panic(err)
}
}

View File

@ -5,9 +5,8 @@ import (
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/spf13/viper" "github.com/spf13/viper"
"go.dagger.io/dagger/cmd/dagger/cmd/common"
"go.dagger.io/dagger/cmd/dagger/logger" "go.dagger.io/dagger/cmd/dagger/logger"
"go.dagger.io/dagger/state" "go.dagger.io/dagger/pkg"
) )
var initCmd = &cobra.Command{ var initCmd = &cobra.Command{
@ -37,12 +36,13 @@ var initCmd = &cobra.Command{
dir = cwd dir = cwd
} }
project, err := state.Init(ctx, dir) err := pkg.CueModInit(ctx, dir)
if err != nil { if err != nil {
lg.Fatal().Err(err).Msg("failed to initialize project") lg.Fatal().Err(err).Msg("failed to initialize project")
} }
<-common.TrackProjectCommand(ctx, cmd, project, nil) // TODO: Add telemtry for init
// <-common.TrackProjectCommand(ctx, cmd, project, nil)
}, },
} }

View File

@ -1,38 +0,0 @@
package input
import (
"github.com/spf13/cobra"
"github.com/spf13/viper"
"go.dagger.io/dagger/cmd/dagger/logger"
"go.dagger.io/dagger/state"
)
var boolCmd = &cobra.Command{
Use: "bool <TARGET> <true|false>",
Short: "Add a boolean input",
Args: cobra.ExactArgs(2),
PreRun: func(cmd *cobra.Command, args []string) {
// Fix Viper bug for duplicate flags:
// https://github.com/spf13/viper/issues/233
if err := viper.BindPFlags(cmd.Flags()); err != nil {
panic(err)
}
},
Run: func(cmd *cobra.Command, args []string) {
lg := logger.New()
ctx := lg.WithContext(cmd.Context())
updateEnvironmentInput(
ctx,
cmd,
args[0],
state.BoolInput(readInput(ctx, args[1])),
)
},
}
func init() {
if err := viper.BindPFlags(boolCmd.Flags()); err != nil {
panic(err)
}
}

View File

@ -1,69 +0,0 @@
package input
import (
"os"
"path/filepath"
"strings"
"github.com/spf13/cobra"
"github.com/spf13/viper"
"go.dagger.io/dagger/cmd/dagger/cmd/common"
"go.dagger.io/dagger/cmd/dagger/logger"
"go.dagger.io/dagger/state"
)
var dirCmd = &cobra.Command{
Use: "dir TARGET PATH",
Short: "Add a local directory as input artifact",
Args: cobra.ExactArgs(2),
PreRun: func(cmd *cobra.Command, args []string) {
// Fix Viper bug for duplicate flags:
// https://github.com/spf13/viper/issues/233
if err := viper.BindPFlags(cmd.Flags()); err != nil {
panic(err)
}
},
Run: func(cmd *cobra.Command, args []string) {
lg := logger.New()
ctx := lg.WithContext(cmd.Context())
p, err := filepath.Abs(args[1])
if err != nil {
lg.Fatal().Err(err).Str("path", args[1]).Msg("unable to resolve path")
}
// Check that directory exists
if _, err := os.Stat(p); os.IsNotExist(err) {
lg.Fatal().Err(err).Str("path", args[1]).Msg("dir doesn't exist")
}
project := common.CurrentProject(ctx)
if !strings.HasPrefix(p, project.Path) {
lg.Fatal().Err(err).Str("path", args[1]).Msg("dir is outside the project")
}
p, err = filepath.Rel(project.Path, p)
if err != nil {
lg.Fatal().Err(err).Str("path", args[1]).Msg("unable to resolve path")
}
if !strings.HasPrefix(p, ".") {
p = "./" + p
}
updateEnvironmentInput(ctx, cmd, args[0],
state.DirInput(
p,
viper.GetStringSlice("include"),
viper.GetStringSlice("exclude"),
),
)
},
}
func init() {
dirCmd.Flags().StringSlice("include", []string{}, "Include pattern")
dirCmd.Flags().StringSlice("exclude", []string{}, "Exclude pattern")
if err := viper.BindPFlags(dirCmd.Flags()); err != nil {
panic(err)
}
}

View File

@ -1,43 +0,0 @@
package input
import (
"github.com/spf13/cobra"
"github.com/spf13/viper"
"go.dagger.io/dagger/cmd/dagger/logger"
"go.dagger.io/dagger/state"
)
var gitCmd = &cobra.Command{
Use: "git TARGET REMOTE [REF] [SUBDIR]",
Short: "Add a git repository as input artifact",
Args: cobra.RangeArgs(2, 4),
PreRun: func(cmd *cobra.Command, args []string) {
// Fix Viper bug for duplicate flags:
// https://github.com/spf13/viper/issues/233
if err := viper.BindPFlags(cmd.Flags()); err != nil {
panic(err)
}
},
Run: func(cmd *cobra.Command, args []string) {
lg := logger.New()
ctx := lg.WithContext(cmd.Context())
ref := "HEAD"
if len(args) > 2 {
ref = args[2]
}
subDir := ""
if len(args) > 3 {
subDir = args[3]
}
updateEnvironmentInput(ctx, cmd, args[0], state.GitInput(args[1], ref, subDir))
},
}
func init() {
if err := viper.BindPFlags(gitCmd.Flags()); err != nil {
panic(err)
}
}

View File

@ -1,40 +0,0 @@
package input
import (
"github.com/spf13/cobra"
"github.com/spf13/viper"
"go.dagger.io/dagger/cmd/dagger/logger"
"go.dagger.io/dagger/state"
)
var jsonCmd = &cobra.Command{
Use: "json <TARGET> [-f] <VALUE|PATH>",
Short: "Add a JSON input",
Args: cobra.ExactArgs(2),
PreRun: func(cmd *cobra.Command, args []string) {
// Fix Viper bug for duplicate flags:
// https://github.com/spf13/viper/issues/233
if err := viper.BindPFlags(cmd.Flags()); err != nil {
panic(err)
}
},
Run: func(cmd *cobra.Command, args []string) {
lg := logger.New()
ctx := lg.WithContext(cmd.Context())
updateEnvironmentInput(
ctx,
cmd,
args[0],
state.JSONInput(readInput(ctx, args[1])),
)
},
}
func init() {
jsonCmd.Flags().BoolP("file", "f", false, "Read value from file")
if err := viper.BindPFlags(jsonCmd.Flags()); err != nil {
panic(err)
}
}

View File

@ -1,131 +0,0 @@
package input
import (
"context"
"fmt"
"os"
"text/tabwriter"
"go.dagger.io/dagger/cmd/dagger/cmd/common"
"go.dagger.io/dagger/cmd/dagger/logger"
"go.dagger.io/dagger/compiler"
"go.dagger.io/dagger/environment"
"go.dagger.io/dagger/plancontext"
"go.dagger.io/dagger/solver"
"go.dagger.io/dagger/state"
"github.com/spf13/cobra"
"github.com/spf13/viper"
)
var listCmd = &cobra.Command{
Use: "list [TARGET] [flags]",
Short: "List the inputs of an environment",
Args: cobra.MaximumNArgs(1),
PreRun: func(cmd *cobra.Command, args []string) {
// Fix Viper bug for duplicate flags:
// https://github.com/spf13/viper/issues/233
if err := viper.BindPFlags(cmd.Flags()); err != nil {
panic(err)
}
},
Run: func(cmd *cobra.Command, args []string) {
lg := logger.New()
ctx := lg.WithContext(cmd.Context())
project := common.CurrentProject(ctx)
st := common.CurrentEnvironmentState(ctx, project)
lg = lg.With().
Str("environment", st.Name).
Logger()
doneCh := common.TrackProjectCommand(ctx, cmd, project, st)
env, err := environment.New(st)
if err != nil {
lg.Fatal().Err(err).Msg("unable to create environment")
}
cl := common.NewClient(ctx)
err = cl.Do(ctx, env.Context(), func(ctx context.Context, s solver.Solver) error {
inputs, err := env.ScanInputs(ctx, false)
if err != nil {
return err
}
w := tabwriter.NewWriter(os.Stdout, 0, 4, 2, ' ', 0)
fmt.Fprintln(w, "Input\tValue\tSet by user\tDescription")
for _, inp := range inputs {
isConcrete := (inp.IsConcreteR() == nil)
_, hasDefault := inp.Default()
switch {
case plancontext.IsSecretValue(inp):
if _, err := env.Context().Secrets.FromValue(inp); err != nil {
isConcrete = false
}
case plancontext.IsFSValue(inp):
if _, err := env.Context().FS.FromValue(inp); err != nil {
isConcrete = false
}
case plancontext.IsServiceValue(inp):
if _, err := env.Context().Services.FromValue(inp); err != nil {
isConcrete = false
}
}
if !viper.GetBool("all") {
// skip input that is not overridable
if !hasDefault && isConcrete {
continue
}
}
if !viper.GetBool("show-optional") && !viper.GetBool("all") {
// skip input if there is already a default value
if hasDefault {
continue
}
}
fmt.Fprintf(w, "%s\t%s\t%t\t%s\n",
inp.Path(),
common.FormatValue(inp),
isUserSet(st, inp),
common.ValueDocOneLine(inp),
)
}
w.Flush()
return nil
})
<-doneCh
if err != nil {
lg.Fatal().Err(err).Msg("failed to query environment")
}
},
}
func isUserSet(env *state.State, val *compiler.Value) bool {
for key := range env.Inputs {
if val.Path().String() == key {
return true
}
}
return false
}
func init() {
listCmd.Flags().BoolP("all", "a", false, "List all inputs (include non-overridable)")
listCmd.Flags().Bool("show-optional", false, "List optional inputs (those with default values)")
if err := viper.BindPFlags(listCmd.Flags()); err != nil {
panic(err)
}
}

View File

@ -1,112 +0,0 @@
package input
import (
"context"
"io"
"os"
"github.com/rs/zerolog/log"
"github.com/spf13/cobra"
"github.com/spf13/viper"
"go.dagger.io/dagger/cmd/dagger/cmd/common"
"go.dagger.io/dagger/environment"
"go.dagger.io/dagger/solver"
"go.dagger.io/dagger/state"
"go.dagger.io/dagger/telemetry"
)
// Cmd exposes the top-level command
var Cmd = &cobra.Command{
Use: "input",
Short: "Manage an environment's inputs",
}
func init() {
Cmd.AddCommand(
dirCmd,
gitCmd,
secretCmd,
textCmd,
jsonCmd,
yamlCmd,
listCmd,
boolCmd,
socketCmd,
unsetCmd,
)
}
func updateEnvironmentInput(ctx context.Context, cmd *cobra.Command, target string, input state.Input) {
lg := *log.Ctx(ctx)
project := common.CurrentProject(ctx)
st := common.CurrentEnvironmentState(ctx, project)
lg = lg.With().
Str("environment", st.Name).
Logger()
doneCh := common.TrackProjectCommand(ctx, cmd, project, st, &telemetry.Property{
Name: "input_target",
Value: target,
})
st.SetInput(target, input)
env, err := environment.New(st)
if err != nil {
lg.Fatal().Err(err).Msg("unable to create environment")
}
cl := common.NewClient(ctx)
err = cl.Do(ctx, env.Context(), func(ctx context.Context, s solver.Solver) error {
// the inputs are set, check for cue errors by scanning all the inputs
_, err := env.ScanInputs(ctx, true)
if err != nil {
return err
}
return nil
})
<-doneCh
if err != nil {
lg.Fatal().Err(err).Msg("invalid input")
}
if err := project.Save(ctx, st); err != nil {
lg.Fatal().Err(err).Msg("cannot update environment")
}
}
func readInput(ctx context.Context, source string) string {
lg := log.Ctx(ctx)
if !viper.GetBool("file") {
return source
}
if source == "-" {
// stdin source
data, err := io.ReadAll(os.Stdin)
if err != nil {
lg.
Fatal().
Err(err).
Msg("failed to read input from stdin")
}
return string(data)
}
// file source
data, err := os.ReadFile(source)
if err != nil {
lg.
Fatal().
Err(err).
Str("path", source).
Msg("failed to read input from file")
}
return string(data)
}

View File

@ -1,59 +0,0 @@
package input
import (
"fmt"
"os"
"github.com/spf13/cobra"
"github.com/spf13/viper"
"go.dagger.io/dagger/cmd/dagger/logger"
"go.dagger.io/dagger/state"
"golang.org/x/term"
)
var secretCmd = &cobra.Command{
Use: "secret <TARGET> [-f] [<VALUE|PATH>]",
Short: "Add an encrypted input secret",
Args: cobra.RangeArgs(1, 2),
PreRun: func(cmd *cobra.Command, args []string) {
// Fix Viper bug for duplicate flags:
// https://github.com/spf13/viper/issues/233
if err := viper.BindPFlags(cmd.Flags()); err != nil {
panic(err)
}
},
Run: func(cmd *cobra.Command, args []string) {
lg := logger.New()
ctx := lg.WithContext(cmd.Context())
var secret string
if len(args) == 1 {
// No value specified: prompt terminal
fmt.Print("Secret: ")
data, err := term.ReadPassword(int(os.Stdin.Fd()))
if err != nil {
lg.Fatal().Err(err).Msg("unable to read secret from terminal")
}
fmt.Println("")
secret = string(data)
} else {
// value specified: read it
secret = readInput(ctx, args[1])
}
updateEnvironmentInput(
ctx,
cmd,
args[0],
state.SecretInput(secret),
)
},
}
func init() {
secretCmd.Flags().BoolP("file", "f", false, "Read value from file")
if err := viper.BindPFlags(secretCmd.Flags()); err != nil {
panic(err)
}
}

View File

@ -1,68 +0,0 @@
package input
import (
"context"
"os"
"runtime"
"github.com/rs/zerolog/log"
"github.com/spf13/cobra"
"github.com/spf13/viper"
"go.dagger.io/dagger/cmd/dagger/logger"
"go.dagger.io/dagger/state"
)
var socketCmd = &cobra.Command{
Use: "socket <TARGET> <UNIX path>",
Short: "Add a socket input",
Args: cobra.ExactArgs(2),
PreRun: func(cmd *cobra.Command, args []string) {
// Fix Viper bug for duplicate flags:
// https://github.com/spf13/viper/issues/233
if err := viper.BindPFlags(cmd.Flags()); err != nil {
panic(err)
}
},
Run: func(cmd *cobra.Command, args []string) {
lg := logger.New()
ctx := lg.WithContext(cmd.Context())
updateEnvironmentInput(
ctx,
cmd,
args[0],
state.SocketInput(detectStreamType(ctx, args[1])),
)
},
}
func detectStreamType(ctx context.Context, path string) (string, string) {
lg := log.Ctx(ctx)
if runtime.GOOS == "windows" {
// support the unix format for convenience
if path == "/var/run/docker.sock" || path == "\\var\\run\\docker.sock" {
path = "\\\\.\\pipe\\docker_engine"
lg.Info().Str("path", path).Msg("Windows detected, override unix socket path")
}
return path, "npipe"
}
st, err := os.Stat(path)
if err != nil {
lg.Fatal().Err(err).Str("path", path).Msg("invalid unix socket")
}
if st.Mode()&os.ModeSocket == 0 {
lg.Fatal().Str("path", path).Msg("not a unix socket")
}
return path, "unix"
}
func init() {
if err := viper.BindPFlags(boolCmd.Flags()); err != nil {
panic(err)
}
}

View File

@ -1,40 +0,0 @@
package input
import (
"github.com/spf13/cobra"
"github.com/spf13/viper"
"go.dagger.io/dagger/cmd/dagger/logger"
"go.dagger.io/dagger/state"
)
var textCmd = &cobra.Command{
Use: "text <TARGET> [-f] <VALUE|PATH>",
Short: "Add a text input",
Args: cobra.ExactArgs(2),
PreRun: func(cmd *cobra.Command, args []string) {
// Fix Viper bug for duplicate flags:
// https://github.com/spf13/viper/issues/233
if err := viper.BindPFlags(cmd.Flags()); err != nil {
panic(err)
}
},
Run: func(cmd *cobra.Command, args []string) {
lg := logger.New()
ctx := lg.WithContext(cmd.Context())
updateEnvironmentInput(
ctx,
cmd,
args[0],
state.TextInput(readInput(ctx, args[1])),
)
},
}
func init() {
textCmd.Flags().BoolP("file", "f", false, "Read value from file")
if err := viper.BindPFlags(textCmd.Flags()); err != nil {
panic(err)
}
}

View File

@ -1,34 +0,0 @@
package input
import (
"github.com/spf13/cobra"
"github.com/spf13/viper"
"go.dagger.io/dagger/cmd/dagger/cmd/common"
"go.dagger.io/dagger/cmd/dagger/logger"
)
var unsetCmd = &cobra.Command{
Use: "unset [TARGET]",
Short: "Remove input of an environment",
Args: cobra.ExactArgs(1),
PreRun: func(cmd *cobra.Command, args []string) {
// Fix Viper bug for duplicate flags:
// https://github.com/spf13/viper/issues/233
if err := viper.BindPFlags(cmd.Flags()); err != nil {
panic(err)
}
},
Run: func(cmd *cobra.Command, args []string) {
lg := logger.New()
ctx := lg.WithContext(cmd.Context())
project := common.CurrentProject(ctx)
st := common.CurrentEnvironmentState(ctx, project)
st.RemoveInputs(args[0])
if err := project.Save(ctx, st); err != nil {
lg.Fatal().Err(err).Str("environment", st.Name).Msg("cannot update environment")
}
lg.Info().Str("environment", st.Name).Msg("updated environment")
},
}

View File

@ -1,40 +0,0 @@
package input
import (
"github.com/spf13/cobra"
"github.com/spf13/viper"
"go.dagger.io/dagger/cmd/dagger/logger"
"go.dagger.io/dagger/state"
)
var yamlCmd = &cobra.Command{
Use: "yaml <TARGET> [-f] <VALUE|PATH>",
Short: "Add a YAML input",
Args: cobra.ExactArgs(2),
PreRun: func(cmd *cobra.Command, args []string) {
// Fix Viper bug for duplicate flags:
// https://github.com/spf13/viper/issues/233
if err := viper.BindPFlags(cmd.Flags()); err != nil {
panic(err)
}
},
Run: func(cmd *cobra.Command, args []string) {
lg := logger.New()
ctx := lg.WithContext(cmd.Context())
updateEnvironmentInput(
ctx,
cmd,
args[0],
state.YAMLInput(readInput(ctx, args[1])),
)
},
}
func init() {
yamlCmd.Flags().BoolP("file", "f", false, "Read value from file")
if err := viper.BindPFlags(yamlCmd.Flags()); err != nil {
panic(err)
}
}

View File

@ -1,71 +0,0 @@
package cmd
import (
"fmt"
"os"
"path"
"strings"
"text/tabwriter"
"github.com/mitchellh/go-homedir"
"github.com/spf13/cobra"
"github.com/spf13/viper"
"go.dagger.io/dagger/cmd/dagger/cmd/common"
"go.dagger.io/dagger/cmd/dagger/logger"
)
var listCmd = &cobra.Command{
Use: "list",
Short: "List available environments",
Args: cobra.NoArgs,
PreRun: func(cmd *cobra.Command, args []string) {
// Fix Viper bug for duplicate flags:
// https://github.com/spf13/viper/issues/233
if err := viper.BindPFlags(cmd.Flags()); err != nil {
panic(err)
}
},
Run: func(cmd *cobra.Command, args []string) {
lg := logger.New()
ctx := lg.WithContext(cmd.Context())
project := common.CurrentProject(ctx)
doneCh := common.TrackProjectCommand(ctx, cmd, project, nil)
environments, err := project.List(ctx)
if err != nil {
lg.
Fatal().
Err(err).
Msg("cannot list environments")
}
w := tabwriter.NewWriter(os.Stdout, 0, 0, 1, ' ', tabwriter.TabIndent)
defer w.Flush()
for _, e := range environments {
line := fmt.Sprintf("%s\t%s\t", e.Name, formatPath(e.Path))
fmt.Fprintln(w, line)
}
<-doneCh
},
}
func formatPath(p string) string {
dir, err := homedir.Dir()
if err != nil {
// Ignore error
return p
}
if strings.HasPrefix(p, dir) {
return path.Join("~", p[len(dir):])
}
return p
}
func init() {
if err := viper.BindPFlags(listCmd.Flags()); err != nil {
panic(err)
}
}

View File

@ -1,33 +0,0 @@
package cmd
import (
"github.com/spf13/cobra"
"github.com/spf13/viper"
)
var loginCmd = &cobra.Command{
Use: "login",
Short: "Login to Dagger Cloud",
Args: cobra.NoArgs,
PreRun: func(cmd *cobra.Command, args []string) {
// Fix Viper bug for duplicate flags:
// https://github.com/spf13/viper/issues/233
if err := viper.BindPFlags(cmd.Flags()); err != nil {
panic(err)
}
},
Run: func(cmd *cobra.Command, args []string) {
// lg := logger.New()
// ctx := lg.WithContext(cmd.Context())
panic("not implemented")
},
// Remove hidden flag once command has been implemented
Hidden: true,
}
func init() {
if err := viper.BindPFlags(loginCmd.Flags()); err != nil {
panic(err)
}
}

View File

@ -1,33 +0,0 @@
package cmd
import (
"github.com/spf13/cobra"
"github.com/spf13/viper"
)
var logoutCmd = &cobra.Command{
Use: "logout",
Short: "Logout from Dagger Cloud",
Args: cobra.NoArgs,
PreRun: func(cmd *cobra.Command, args []string) {
// Fix Viper bug for duplicate flags:
// https://github.com/spf13/viper/issues/233
if err := viper.BindPFlags(cmd.Flags()); err != nil {
panic(err)
}
},
Run: func(cmd *cobra.Command, args []string) {
// lg := logger.New()
// ctx := lg.WithContext(cmd.Context())
panic("not implemented")
},
// Remove hidden flag once command has been implemented
Hidden: true,
}
func init() {
if err := viper.BindPFlags(logoutCmd.Flags()); err != nil {
panic(err)
}
}

View File

@ -6,7 +6,6 @@ import (
"go.dagger.io/dagger/cmd/dagger/logger" "go.dagger.io/dagger/cmd/dagger/logger"
"go.dagger.io/dagger/mod" "go.dagger.io/dagger/mod"
"go.dagger.io/dagger/pkg" "go.dagger.io/dagger/pkg"
"go.dagger.io/dagger/state"
) )
var getCmd = &cobra.Command{ var getCmd = &cobra.Command{
@ -28,10 +27,10 @@ var getCmd = &cobra.Command{
var err error var err error
cueModPath := pkg.GetCueModParent() cueModPath := pkg.GetCueModParent()
// err = pkg.CueModInit(ctx, cueModPath) err = pkg.CueModInit(ctx, cueModPath)
_, err = state.Init(ctx, cueModPath) if err != nil {
if err != nil && err != state.ErrAlreadyInit {
lg.Fatal().Err(err).Msg("failed to initialize cue.mod") lg.Fatal().Err(err).Msg("failed to initialize cue.mod")
panic(err)
} }
var update = viper.GetBool("update") var update = viper.GetBool("update")

View File

@ -1,53 +0,0 @@
package cmd
import (
"github.com/spf13/cobra"
"github.com/spf13/viper"
"go.dagger.io/dagger/cmd/dagger/cmd/common"
"go.dagger.io/dagger/cmd/dagger/logger"
"go.dagger.io/dagger/state"
)
var newCmd = &cobra.Command{
Use: "new <NAME>",
Short: "Create a new empty environment",
Args: cobra.ExactArgs(1),
PreRun: func(cmd *cobra.Command, args []string) {
// Fix Viper bug for duplicate flags:
// https://github.com/spf13/viper/issues/233
if err := viper.BindPFlags(cmd.Flags()); err != nil {
panic(err)
}
},
Run: func(cmd *cobra.Command, args []string) {
lg := logger.New()
ctx := lg.WithContext(cmd.Context())
project := common.CurrentProject(ctx)
if viper.GetString("environment") != "" {
lg.
Fatal().
Msg("cannot use option -e,--environment for this command")
}
name := args[0]
st, err := project.Create(ctx, name, state.Plan{
Package: viper.GetString("package"),
}, viper.GetString("platform"))
if err != nil {
lg.Fatal().Err(err).Msg("failed to create environment")
}
<-common.TrackProjectCommand(ctx, cmd, project, st)
},
}
func init() {
newCmd.Flags().StringP("package", "p", "", "references the name of the Cue package within the module to use as a plan. Default: defer to cue loader")
newCmd.Flags().String("platform", "", "platform of the running pipeline. Default: host platform")
if err := viper.BindPFlags(newCmd.Flags()); err != nil {
panic(err)
}
}

View File

@ -1,33 +0,0 @@
package output
import (
"github.com/spf13/cobra"
"github.com/spf13/viper"
)
var dirCmd = &cobra.Command{
Use: "dir PATH",
Short: "Add a local directory as output artifact",
Args: cobra.ExactArgs(1),
PreRun: func(cmd *cobra.Command, args []string) {
// Fix Viper bug for duplicate flags:
// https://github.com/spf13/viper/issues/233
if err := viper.BindPFlags(cmd.Flags()); err != nil {
panic(err)
}
},
Run: func(cmd *cobra.Command, args []string) {
// lg := logger.New()
// ctx := lg.WithContext(cmd.Context())
panic("not implemented")
},
// Remove hidden flag once command has been implemented
Hidden: true,
}
func init() {
if err := viper.BindPFlags(dirCmd.Flags()); err != nil {
panic(err)
}
}

View File

@ -1,94 +0,0 @@
package output
import (
"context"
"fmt"
"os"
"text/tabwriter"
"go.dagger.io/dagger/cmd/dagger/cmd/common"
"go.dagger.io/dagger/cmd/dagger/logger"
"go.dagger.io/dagger/environment"
"go.dagger.io/dagger/solver"
"github.com/rs/zerolog/log"
"github.com/spf13/cobra"
"github.com/spf13/viper"
)
var listCmd = &cobra.Command{
Use: "list [TARGET] [flags]",
Short: "List the outputs of an environment",
Args: cobra.MaximumNArgs(1),
PreRun: func(cmd *cobra.Command, args []string) {
// Fix Viper bug for duplicate flags:
// https://github.com/spf13/viper/issues/233
if err := viper.BindPFlags(cmd.Flags()); err != nil {
panic(err)
}
},
Run: func(cmd *cobra.Command, args []string) {
lg := logger.New()
ctx := lg.WithContext(cmd.Context())
project := common.CurrentProject(ctx)
st := common.CurrentEnvironmentState(ctx, project)
lg = lg.With().
Str("environment", st.Name).
Logger()
doneCh := common.TrackProjectCommand(ctx, cmd, project, st)
env, err := environment.New(st)
if err != nil {
lg.Fatal().Err(err).Msg("unable to create environment")
}
cl := common.NewClient(ctx)
err = cl.Do(ctx, env.Context(), func(ctx context.Context, s solver.Solver) error {
return ListOutputs(ctx, env, true)
})
<-doneCh
if err != nil {
lg.Fatal().Err(err).Msg("failed to scan outputs")
}
},
}
func ListOutputs(ctx context.Context, env *environment.Environment, isTTY bool) error {
lg := log.Ctx(ctx).With().
Str("environment", env.Name()).
Logger()
outputs, err := env.ScanOutputs(ctx)
if err != nil {
lg.Error().Err(err).Msg("failed to scan outputs")
return err
}
if !isTTY {
for _, out := range outputs {
lg.Info().Str("name", out.Path().String()).
Str("value", fmt.Sprintf("%v", out.Cue())).
Msg("output")
}
return nil
}
w := tabwriter.NewWriter(os.Stdout, 0, 4, 2, ' ', 0)
fmt.Fprintln(w, "Output\tValue\tDescription")
for _, out := range outputs {
fmt.Fprintf(w, "%s\t%s\t%s\n",
out.Path(),
common.FormatValue(out),
common.ValueDocOneLine(out),
)
}
w.Flush()
return nil
}

View File

@ -1,14 +0,0 @@
package output
import "github.com/spf13/cobra"
// Cmd exposes the top-level command
var Cmd = &cobra.Command{
Use: "output",
Short: "Manage an environment's outputs",
}
func init() {
// Cmd.AddCommand(dirCmd)
Cmd.AddCommand(listCmd)
}

View File

@ -1,147 +0,0 @@
package cmd
import (
"fmt"
"cuelang.org/go/cue"
"go.dagger.io/dagger/cmd/dagger/cmd/common"
"go.dagger.io/dagger/cmd/dagger/logger"
"go.dagger.io/dagger/compiler"
"github.com/spf13/cobra"
"github.com/spf13/viper"
)
var queryCmd = &cobra.Command{
Use: "query [TARGET] [flags]",
Short: "Query the contents of an environment",
Args: cobra.MaximumNArgs(1),
PreRun: func(cmd *cobra.Command, args []string) {
// Fix Viper bug for duplicate flags:
// https://github.com/spf13/viper/issues/233
if err := viper.BindPFlags(cmd.Flags()); err != nil {
panic(err)
}
},
Run: func(cmd *cobra.Command, args []string) {
lg := logger.New()
ctx := lg.WithContext(cmd.Context())
project := common.CurrentProject(ctx)
state := common.CurrentEnvironmentState(ctx, project)
lg = lg.With().
Str("environment", state.Name).
Logger()
cueOpts := parseQueryFlags()
cuePath := cue.MakePath()
if len(args) > 0 {
cuePath = cue.ParsePath(args[0])
}
doneCh := common.TrackProjectCommand(ctx, cmd, project, state)
cueVal := compiler.NewValue()
if !viper.GetBool("no-plan") {
plan, err := state.CompilePlan(ctx)
if err != nil {
lg.Fatal().Err(err).Msg("failed to compile plan")
}
if err := cueVal.FillPath(cue.MakePath(), plan); err != nil {
lg.Fatal().Err(err).Msg("failed to compile plan")
}
}
if !viper.GetBool("no-input") {
inputs, err := state.CompileInputs()
if err != nil {
lg.Fatal().Err(err).Msg("failed to compile inputs")
}
if err := cueVal.FillPath(cue.MakePath(), inputs); err != nil {
lg.Fatal().Err(err).Msg("failed to compile inputs")
}
}
if !viper.GetBool("no-computed") && state.Computed != "" {
computed, err := compiler.DecodeJSON("", []byte(state.Computed))
if err != nil {
lg.Fatal().Err(err).Msg("failed to decode json")
}
if err := cueVal.FillPath(cue.MakePath(), computed); err != nil {
lg.Fatal().Err(err).Msg("failed to merge plan with computed")
}
}
<-doneCh
cueVal = cueVal.LookupPath(cuePath)
if viper.GetBool("concrete") {
if err := cueVal.IsConcreteR(); err != nil {
lg.Fatal().Err(compiler.Err(err)).Msg("not concrete")
}
}
format := viper.GetString("format")
switch format {
case "cue":
out, err := cueVal.Source(cueOpts...)
if err != nil {
lg.Fatal().Err(err).Msg("failed to lookup source")
}
fmt.Println(string(out))
case "json":
fmt.Println(cueVal.JSON().PrettyString())
case "yaml":
lg.Fatal().Msg("yaml format not yet implemented")
case "text":
out, err := cueVal.String()
if err != nil {
lg.Fatal().Err(err).Msg("value can't be formatted as text")
}
fmt.Println(out)
default:
lg.Fatal().Msgf("unsupported format: %q", format)
}
},
}
func parseQueryFlags() []cue.Option {
opts := []cue.Option{
cue.Definitions(true),
}
if viper.GetBool("concrete") {
opts = append(opts, cue.Concrete(true))
}
if viper.GetBool("show-optional") {
opts = append(opts, cue.Optional(true))
}
if viper.GetBool("show-attributes") {
opts = append(opts, cue.Attributes(true))
}
return opts
}
func init() {
queryCmd.Flags().BoolP("concrete", "c", false, "Require the evaluation to be concrete")
queryCmd.Flags().BoolP("show-optional", "O", false, "Display optional fields (cue format only)")
queryCmd.Flags().BoolP("show-attributes", "A", false, "Display field attributes (cue format only)")
// FIXME: implement the flags below
// queryCmd.Flags().String("revision", "latest", "Query a specific version of the environment")
queryCmd.Flags().StringP("format", "f", "json", "Output format (json|yaml|cue|text|env)")
queryCmd.Flags().BoolP("no-plan", "P", false, "Exclude plan from query")
queryCmd.Flags().BoolP("no-input", "I", false, "Exclude inputs from query")
queryCmd.Flags().BoolP("no-computed", "C", false, "Exclude computed values from query")
if err := viper.BindPFlags(queryCmd.Flags()); err != nil {
panic(err)
}
}

View File

@ -7,11 +7,8 @@ import (
"github.com/moby/buildkit/util/appcontext" "github.com/moby/buildkit/util/appcontext"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/spf13/viper" "github.com/spf13/viper"
"go.dagger.io/dagger/cmd/dagger/cmd/input"
"go.dagger.io/dagger/cmd/dagger/cmd/mod" "go.dagger.io/dagger/cmd/dagger/cmd/mod"
"go.dagger.io/dagger/cmd/dagger/cmd/output"
"go.dagger.io/dagger/cmd/dagger/logger" "go.dagger.io/dagger/cmd/dagger/logger"
"go.dagger.io/dagger/keychain"
"go.opentelemetry.io/otel" "go.opentelemetry.io/otel"
"go.opentelemetry.io/otel/attribute" "go.opentelemetry.io/otel/attribute"
@ -33,21 +30,10 @@ func init() {
rootCmd.PersistentFlags().StringArray("cache-from", []string{}, rootCmd.PersistentFlags().StringArray("cache-from", []string{},
"External cache sources (eg. user/app:cache, type=local,src=path/to/dir)") "External cache sources (eg. user/app:cache, type=local,src=path/to/dir)")
rootCmd.PersistentFlags().StringP("environment", "e", "", "Select an environment")
rootCmd.PersistentFlags().String("project", "", "Specify a project directory (defaults to current)") rootCmd.PersistentFlags().String("project", "", "Specify a project directory (defaults to current)")
rootCmd.PersistentFlags().Bool("europa", false, "Enable experiemental Europa UX")
rootCmd.PersistentPreRun = func(cmd *cobra.Command, _ []string) { rootCmd.PersistentPreRun = func(cmd *cobra.Command, _ []string) {
lg := logger.New()
ctx := lg.WithContext(cmd.Context())
go checkVersion() go checkVersion()
err := keychain.EnsureDefaultKey(ctx)
if err != nil {
lg.Fatal().Err(err).Msg("failed to generate default key")
}
} }
rootCmd.PersistentPostRun = func(*cobra.Command, []string) { rootCmd.PersistentPostRun = func(*cobra.Command, []string) {
warnVersion() warnVersion()
@ -55,18 +41,7 @@ func init() {
rootCmd.AddCommand( rootCmd.AddCommand(
initCmd, initCmd,
newCmd,
computeCmd,
listCmd,
queryCmd,
upCmd, upCmd,
downCmd,
editCmd,
historyCmd,
loginCmd,
logoutCmd,
input.Cmd,
output.Cmd,
versionCmd, versionCmd,
docCmd, docCmd,
mod.Cmd, mod.Cmd,

View File

@ -2,20 +2,13 @@ package cmd
import ( import (
"context" "context"
"errors"
"fmt" "fmt"
"os" "os"
"cuelang.org/go/cue"
"go.dagger.io/dagger/client" "go.dagger.io/dagger/client"
"go.dagger.io/dagger/cmd/dagger/cmd/common" "go.dagger.io/dagger/cmd/dagger/cmd/common"
"go.dagger.io/dagger/cmd/dagger/cmd/output"
"go.dagger.io/dagger/cmd/dagger/logger" "go.dagger.io/dagger/cmd/dagger/logger"
"go.dagger.io/dagger/compiler"
"go.dagger.io/dagger/environment"
"go.dagger.io/dagger/mod"
"go.dagger.io/dagger/plan" "go.dagger.io/dagger/plan"
"go.dagger.io/dagger/plancontext"
"go.dagger.io/dagger/solver" "go.dagger.io/dagger/solver"
"golang.org/x/term" "golang.org/x/term"
@ -56,88 +49,31 @@ var upCmd = &cobra.Command{
ctx := lg.WithContext(cmd.Context()) ctx := lg.WithContext(cmd.Context())
cl := common.NewClient(ctx) cl := common.NewClient(ctx)
if viper.GetBool("europa") { err = europaUp(ctx, cl, args...)
err = europaUp(ctx, cl, args...)
// TODO: rework telemetry // TODO: rework telemetry
// <-doneCh // <-doneCh
if err != nil {
lg.Fatal().Err(err).Msg("failed to up environment")
}
return
}
project := common.CurrentProject(ctx)
st := common.CurrentEnvironmentState(ctx, project)
lg = lg.With().
Str("environment", st.Name).
Logger()
universeUpdateCh := make(chan bool)
go func() {
universeUpdateCh <- checkUniverseVersion(ctx, project.Path)
}()
doneCh := common.TrackProjectCommand(ctx, cmd, project, st)
env, err := environment.New(st)
if err != nil {
lg.Fatal().Err(err).Msg("unable to create environment")
}
err = cl.Do(ctx, env.Context(), func(ctx context.Context, s solver.Solver) error {
// check that all inputs are set
if err := checkInputs(ctx, env); err != nil {
return err
}
if err := env.Up(ctx, s); err != nil {
return err
}
st.Computed = env.Computed().JSON().PrettyString()
if err := project.Save(ctx, st); err != nil {
return err
}
// FIXME: `ListOutput` is printing to Stdout directly which messes
// up the TTY logger.
if tty != nil {
tty.Stop()
}
return output.ListOutputs(ctx, env, term.IsTerminal(int(os.Stdout.Fd())))
})
<-doneCh
if err != nil { if err != nil {
lg.Fatal().Err(err).Msg("failed to up environment") lg.Fatal().Err(err).Msg("failed to up environment")
} }
// Warn universe version if out of date
if update := <-universeUpdateCh; update {
fmt.Println("A new version of universe is available, please run 'dagger mod get alpha.dagger.io'")
}
}, },
} }
func checkUniverseVersion(ctx context.Context, projectPath string) bool { // func checkUniverseVersion(ctx context.Context, projectPath string) bool {
lg := log.Ctx(ctx) // lg := log.Ctx(ctx)
isLatest, err := mod.IsUniverseLatest(ctx, projectPath) // isLatest, err := mod.IsUniverseLatest(ctx, projectPath)
if err != nil { // if err != nil {
lg.Debug().Err(err).Msg("failed to check universe version") // lg.Debug().Err(err).Msg("failed to check universe version")
return false // return false
} // }
if !isLatest { // if !isLatest {
return true // return true
} // }
lg.Debug().Msg("universe is up to date") // lg.Debug().Msg("universe is up to date")
return false // return false
} // }
func europaUp(ctx context.Context, cl *client.Client, args ...string) error { func europaUp(ctx context.Context, cl *client.Client, args ...string) error {
lg := log.Ctx(ctx) lg := log.Ctx(ctx)
@ -174,57 +110,8 @@ func europaUp(ctx context.Context, cl *client.Client, args ...string) error {
}) })
} }
func checkInputs(ctx context.Context, env *environment.Environment) error {
lg := log.Ctx(ctx)
warnOnly := viper.GetBool("force")
notConcreteInputs := []*compiler.Value{}
inputs, err := env.ScanInputs(ctx, true)
if err != nil {
lg.Error().Err(err).Msg("failed to scan inputs")
return err
}
for _, i := range inputs {
isConcrete := (i.IsConcreteR(cue.Optional(true)) == nil)
switch {
case plancontext.IsSecretValue(i):
if _, err := env.Context().Secrets.FromValue(i); err != nil {
isConcrete = false
}
case plancontext.IsFSValue(i):
if _, err := env.Context().FS.FromValue(i); err != nil {
isConcrete = false
}
case plancontext.IsServiceValue(i):
if _, err := env.Context().Services.FromValue(i); err != nil {
isConcrete = false
}
}
if !isConcrete {
notConcreteInputs = append(notConcreteInputs, i)
}
}
for _, i := range notConcreteInputs {
if warnOnly {
lg.Warn().Str("input", i.Path().String()).Msg("required input is missing")
} else {
lg.Error().Str("input", i.Path().String()).Msg("required input is missing")
}
}
if !warnOnly && len(notConcreteInputs) > 0 {
return errors.New("some required inputs are not set, please re-run with `--force` if you think it's a mistake")
}
return nil
}
func init() { func init() {
upCmd.Flags().BoolP("force", "f", false, "Force up, disable inputs check") upCmd.Flags().BoolP("force", "f", false, "Force up, disable inputs check")
upCmd.Flags().String("output", "", "Write computed output. Prints on stdout if set to-")
upCmd.Flags().StringArrayP("with", "w", []string{}, "") upCmd.Flags().StringArrayP("with", "w", []string{}, "")
upCmd.Flags().StringP("target", "t", "", "Run a single target of the DAG (for debugging only)") upCmd.Flags().StringP("target", "t", "", "Run a single target of the DAG (for debugging only)")
upCmd.Flags().Bool("no-vendor", false, "Force up, disable inputs check") upCmd.Flags().Bool("no-vendor", false, "Force up, disable inputs check")

View File

@ -14,14 +14,14 @@ import (
"github.com/containerd/console" "github.com/containerd/console"
"github.com/morikuni/aec" "github.com/morikuni/aec"
"github.com/tonistiigi/vt100" "github.com/tonistiigi/vt100"
"go.dagger.io/dagger/environment" "go.dagger.io/dagger/plan/task"
) )
type Event map[string]interface{} type Event map[string]interface{}
type Group struct { type Group struct {
Name string Name string
State environment.State State task.State
Events []Event Events []Event
Started *time.Time Started *time.Time
Completed *time.Time Completed *time.Time
@ -43,7 +43,7 @@ func (l *Logs) Add(event Event) error {
l.l.Lock() l.l.Lock()
defer l.l.Unlock() defer l.l.Unlock()
task, ok := event["task"].(string) taskPath, ok := event["task"].(string)
if !ok { if !ok {
l.Messages = append(l.Messages, Message{ l.Messages = append(l.Messages, Message{
Event: event, Event: event,
@ -52,12 +52,8 @@ func (l *Logs) Add(event Event) error {
return nil return nil
} }
// Hide `#up.*` from log group names
// FIXME: remove in Europa
groupKey := strings.Split(task, ".#up")[0]
// Hide hidden fields (e.g. `._*`) from log group names // Hide hidden fields (e.g. `._*`) from log group names
groupKey = strings.Split(groupKey, "._")[0] groupKey := strings.Split(taskPath, "._")[0]
group := l.groups[groupKey] group := l.groups[groupKey]
@ -82,8 +78,8 @@ func (l *Logs) Add(event Event) error {
// For each task in a group, the status will transition from computing to complete, then back to computing and so on. // For each task in a group, the status will transition from computing to complete, then back to computing and so on.
// The transition is fast enough not to cause a problem. // The transition is fast enough not to cause a problem.
if st, ok := event["state"].(string); ok { if st, ok := event["state"].(string); ok {
group.State = environment.State(st) group.State = task.State(st)
if group.State == environment.StateComputing { if group.State == task.StateComputing {
group.Completed = nil group.Completed = nil
} else { } else {
now := time.Now() now := time.Now()
@ -228,7 +224,7 @@ func (c *TTYOutput) linesPerGroup(width, height int) int {
runningGroups := 0 runningGroups := 0
for _, message := range c.logs.Messages { for _, message := range c.logs.Messages {
if group := message.Group; group != nil && group.State == environment.StateComputing { if group := message.Group; group != nil && group.State == task.StateComputing {
runningGroups++ runningGroups++
} }
} }
@ -268,13 +264,13 @@ func (c *TTYOutput) printGroup(group *Group, width, maxLines int) int {
prefix := "" prefix := ""
switch group.State { switch group.State {
case environment.StateComputing: case task.StateComputing:
prefix = "[+]" prefix = "[+]"
case environment.StateCanceled: case task.StateCanceled:
prefix = "[✗]" prefix = "[✗]"
case environment.StateFailed: case task.StateFailed:
prefix = "[✗]" prefix = "[✗]"
case environment.StateCompleted: case task.StateCompleted:
prefix = "[✔]" prefix = "[✔]"
} }
@ -298,13 +294,13 @@ func (c *TTYOutput) printGroup(group *Group, width, maxLines int) int {
// color // color
switch group.State { switch group.State {
case environment.StateComputing: case task.StateComputing:
out = aec.Apply(out, aec.LightBlueF) out = aec.Apply(out, aec.LightBlueF)
case environment.StateCanceled: case task.StateCanceled:
out = aec.Apply(out, aec.LightYellowF) out = aec.Apply(out, aec.LightYellowF)
case environment.StateFailed: case task.StateFailed:
out = aec.Apply(out, aec.LightRedF) out = aec.Apply(out, aec.LightRedF)
case environment.StateCompleted: case task.StateCompleted:
out = aec.Apply(out, aec.LightGreenF) out = aec.Apply(out, aec.LightGreenF)
} }
@ -314,19 +310,19 @@ func (c *TTYOutput) printGroup(group *Group, width, maxLines int) int {
printEvents := []Event{} printEvents := []Event{}
switch group.State { switch group.State {
case environment.StateComputing: case task.StateComputing:
printEvents = group.Events printEvents = group.Events
// for computing tasks, show only last N // for computing tasks, show only last N
if len(printEvents) > maxLines { if len(printEvents) > maxLines {
printEvents = printEvents[len(printEvents)-maxLines:] printEvents = printEvents[len(printEvents)-maxLines:]
} }
case environment.StateCanceled: case task.StateCanceled:
// for completed tasks, don't show any logs // for completed tasks, don't show any logs
printEvents = []Event{} printEvents = []Event{}
case environment.StateFailed: case task.StateFailed:
// for failed, show all logs // for failed, show all logs
printEvents = group.Events printEvents = group.Events
case environment.StateCompleted: case task.StateCompleted:
// for completed tasks, don't show any logs // for completed tasks, don't show any logs
printEvents = []Event{} printEvents = []Event{}
} }

View File

@ -1,5 +1,4 @@
# generated by dagger # generated by dagger
dagger.lock dagger.lock
alpha.dagger.io
dagger.io dagger.io
universe.dagger.io universe.dagger.io

View File

@ -1,2 +1 @@
# dagger universe # dagger universe
alpha.dagger.io

View File

@ -9,73 +9,76 @@ setup() {
# Test 1003-get-started # Test 1003-get-started
@test "doc-1003-get-started" { @test "doc-1003-get-started" {
setup_example_sandbox skip "TODO: Implement for Europa"
# setup_example_sandbox
# Follow tutorial # # Follow tutorial
mkdir -p "$DAGGER_SANDBOX"/plans/local # mkdir -p "$DAGGER_SANDBOX"/plans/local
cp "$DAGGER_PROJECT"/getting-started/plans/todoapp.cue "$DAGGER_SANDBOX"/plans/todoapp.cue # cp "$DAGGER_PROJECT"/getting-started/plans/todoapp.cue "$DAGGER_SANDBOX"/plans/todoapp.cue
cp "$DAGGER_PROJECT"/getting-started/plans/local/local.cue "$DAGGER_SANDBOX"/plans/local/local.cue # cp "$DAGGER_PROJECT"/getting-started/plans/local/local.cue "$DAGGER_SANDBOX"/plans/local/local.cue
dagger --project "$DAGGER_SANDBOX" new 'local' -p "$DAGGER_SANDBOX"/plans/local # dagger --project "$DAGGER_SANDBOX" new 'local' -p "$DAGGER_SANDBOX"/plans/local
dagger --project "$DAGGER_SANDBOX" -e 'local' input socket dockerSocket /var/run/docker.sock # dagger --project "$DAGGER_SANDBOX" -e 'local' input socket dockerSocket /var/run/docker.sock
dagger --project "$DAGGER_SANDBOX" -e 'local' input dir app.source "$DAGGER_SANDBOX" # dagger --project "$DAGGER_SANDBOX" -e 'local' input dir app.source "$DAGGER_SANDBOX"
run dagger --project "$DAGGER_SANDBOX" -e 'local' up # run dagger --project "$DAGGER_SANDBOX" -e 'local' up
assert_success # assert_success
docker rm -f todoapp # docker rm -f todoapp
docker rm -f registry-local # docker rm -f registry-local
} }
@test "doc-1004-first-env" { @test "doc-1004-first-env" {
setup_example_sandbox skip "TODO: Implement for Europa"
# setup_example_sandbox
# Follow tutorial # # Follow tutorial
mkdir -p "$DAGGER_SANDBOX"/multibucket # mkdir -p "$DAGGER_SANDBOX"/multibucket
cp "$DAGGER_PROJECT"/multibucket/source.cue "$DAGGER_SANDBOX"/multibucket # cp "$DAGGER_PROJECT"/multibucket/source.cue "$DAGGER_SANDBOX"/multibucket
cp "$DAGGER_PROJECT"/multibucket/yarn.cue "$DAGGER_SANDBOX"/multibucket # cp "$DAGGER_PROJECT"/multibucket/yarn.cue "$DAGGER_SANDBOX"/multibucket
cp "$DAGGER_PROJECT"/multibucket/netlify.cue "$DAGGER_SANDBOX"/multibucket # cp "$DAGGER_PROJECT"/multibucket/netlify.cue "$DAGGER_SANDBOX"/multibucket
dagger --project "$DAGGER_SANDBOX" doc alpha.dagger.io/netlify # dagger --project "$DAGGER_SANDBOX" doc alpha.dagger.io/netlify
dagger --project "$DAGGER_SANDBOX" doc alpha.dagger.io/js/yarn # dagger --project "$DAGGER_SANDBOX" doc alpha.dagger.io/js/yarn
# Initialize new env # # Initialize new env
dagger --project "$DAGGER_SANDBOX" new 'multibucket' -p "$DAGGER_SANDBOX"/multibucket # dagger --project "$DAGGER_SANDBOX" new 'multibucket' -p "$DAGGER_SANDBOX"/multibucket
# Copy corresponding env # # Copy corresponding env
cp -r "$DAGGER_PROJECT"/.dagger/env/multibucket "$DAGGER_SANDBOX"/.dagger/env/ # cp -r "$DAGGER_PROJECT"/.dagger/env/multibucket "$DAGGER_SANDBOX"/.dagger/env/
# Add missing src input # # Add missing src input
dagger --project "$DAGGER_SANDBOX" -e multibucket input dir src "$DAGGER_SANDBOX" # dagger --project "$DAGGER_SANDBOX" -e multibucket input dir src "$DAGGER_SANDBOX"
# Run test # # Run test
dagger --project "$DAGGER_SANDBOX" -e multibucket up # dagger --project "$DAGGER_SANDBOX" -e multibucket up
url=$(dagger --project "$DAGGER_SANDBOX" -e multibucket query -f text site.netlify.deployUrl) # url=$(dagger --project "$DAGGER_SANDBOX" -e multibucket query -f text site.netlify.deployUrl)
# Check output # # Check output
run curl "$url" # run curl "$url"
assert_output --partial "./static/css/main.9149988f.chunk.css" # assert_output --partial "./static/css/main.9149988f.chunk.css"
} }
@test "doc-1006-google-cloud-run" { @test "doc-1006-google-cloud-run" {
setup_example_sandbox skip "TODO: Implement for Europa"
# setup_example_sandbox
# Follow tutorial # # Follow tutorial
mkdir -p "$DAGGER_SANDBOX"/gcpcloudrun # mkdir -p "$DAGGER_SANDBOX"/gcpcloudrun
cp "$DAGGER_PROJECT"/gcpcloudrun/source.cue "$DAGGER_SANDBOX"/gcpcloudrun # cp "$DAGGER_PROJECT"/gcpcloudrun/source.cue "$DAGGER_SANDBOX"/gcpcloudrun
# Initialize new env # # Initialize new env
dagger --project "$DAGGER_SANDBOX" new 'gcpcloudrun' -p "$DAGGER_SANDBOX"/gcpcloudrun # dagger --project "$DAGGER_SANDBOX" new 'gcpcloudrun' -p "$DAGGER_SANDBOX"/gcpcloudrun
# Copy corresponding env # # Copy corresponding env
cp -r "$DAGGER_PROJECT"/.dagger/env/gcpcloudrun "$DAGGER_SANDBOX"/.dagger/env/ # cp -r "$DAGGER_PROJECT"/.dagger/env/gcpcloudrun "$DAGGER_SANDBOX"/.dagger/env/
# Add missing src input # # Add missing src input
dagger --project "$DAGGER_SANDBOX" -e gcpcloudrun input dir src "$DAGGER_SANDBOX" # dagger --project "$DAGGER_SANDBOX" -e gcpcloudrun input dir src "$DAGGER_SANDBOX"
# Run test # # Run test
run dagger --project "$DAGGER_SANDBOX" -e gcpcloudrun up # run dagger --project "$DAGGER_SANDBOX" -e gcpcloudrun up
assert_success # assert_success
} }
@test "doc-1007-kube-kind" { @test "doc-1007-kube-kind" {

View File

@ -1,42 +1 @@
# Index # Index
- [alpine](./alpine.md) - Base package for Alpine Linux
- [argocd](./argocd.md) - ArgoCD client operations
- [aws](./aws/README.md) - AWS base package
- [aws/cloudformation](./aws/cloudformation.md) - AWS CloudFormation
- [aws/ecr](./aws/ecr.md) - Amazon Elastic Container Registry (ECR)
- [aws/ecs](./aws/ecs.md) - AWS Elastic Container Service (ECS)
- [aws/eks](./aws/eks.md) - AWS Elastic Kubernetes Service (EKS)
- [aws/elb](./aws/elb.md) - AWS Elastic Load Balancer (ELBv2)
- [aws/rds](./aws/rds.md) - AWS Relational Database Service (RDS)
- [aws/s3](./aws/s3.md) - AWS Simple Storage Service
- [azure](./azure/README.md) - Azure base package
- [azure/resourcegroup](./azure/resourcegroup.md) - -
- [azure/staticwebapp](./azure/staticwebapp.md) - -
- [azure/storage](./azure/storage.md) - -
- [bats](./bats.md) - -
- [dagger](./dagger/README.md) - Dagger core types
- [dagger/op](./dagger/op.md) - op: low-level operations for Dagger processing pipelines
- [docker](./docker/README.md) - Docker container operations
- [docker/compose](./docker/compose.md) - Docker-compose operations
- [gcp](./gcp/README.md) - Google Cloud Platform
- [gcp/cloudrun](./gcp/cloudrun.md) - -
- [gcp/gcr](./gcp/gcr.md) - Google Container Registry
- [gcp/gcs](./gcp/gcs.md) - Google Cloud Storage
- [gcp/gke](./gcp/gke.md) - Google Kubernetes Engine
- [gcp/secretmanager](./gcp/secretmanager.md) - Google Cloud Secret Manager
- [git](./git.md) - Git operations
- [go](./go.md) - Go build operations
- [graphql](./graphql.md) - -
- [http](./http.md) - -
- [io](./io.md) - IO operations
- [java/maven](./java/maven.md) - Maven is a build automation tool for Java
- [js/yarn](./js/yarn.md) - Yarn is a package manager for Javascript applications
- [kubernetes](./kubernetes/README.md) - Kubernetes client operations
- [kubernetes/helm](./kubernetes/helm.md) - Helm package manager
- [kubernetes/kustomize](./kubernetes/kustomize.md) - Kustomize config management
- [netlify](./netlify.md) - Netlify client operations
- [os](./os.md) - OS operations
- [random](./random.md) - Random generation utilities
- [terraform](./terraform.md) - Terraform operations
- [trivy](./trivy.md) - -

View File

@ -1,23 +0,0 @@
---
sidebar_label: alpine
---
# alpha.dagger.io/alpine
Base package for Alpine Linux
```cue
import "alpha.dagger.io/alpine"
```
## alpine.#Image
Base image for Alpine Linux
### alpine.#Image Inputs
_No input._
### alpine.#Image Outputs
_No output._

View File

@ -1,137 +0,0 @@
---
sidebar_label: argocd
---
# alpha.dagger.io/argocd
ArgoCD client operations
```cue
import "alpha.dagger.io/argocd"
```
## argocd.#App
Create an ArgoCD application
### argocd.#App Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*config.version* | `*"v2.0.5" \| string` |ArgoCD CLI binary version |
|*config.server* | `string` |ArgoCD server |
|*config.project* | `*"default" \| string` |ArgoCD project |
|*config.token* | `*null \| {\n $dagger: {\n secret: {}\n }\n}` |ArgoCD authentication token |
|*name* | `string` |App name |
|*repo* | `string` |Repository url (git or helm) |
|*path* | `string` |Folder to deploy |
|*server* | `*"https://kubernetes.default.svc" \| string` |Destination server |
|*image.config.version* | `*"v2.0.5" \| string` |ArgoCD CLI binary version |
|*image.config.server* | `string` |ArgoCD server |
|*image.config.project* | `*"default" \| string` |ArgoCD project |
|*image.config.token* | `*null \| {\n $dagger: {\n secret: {}\n }\n}` |ArgoCD authentication token |
|*namespace* | `*"default" \| string` |Destination namespace |
|*env.APP_NAME* | `string` |- |
|*env.APP_REPO* | `string` |- |
|*env.APP_PATH* | `string` |- |
|*env.APP_SERVER* | `*"https://kubernetes.default.svc" \| string` |- |
|*env.APP_NAMESPACE* | `*"default" \| string` |- |
### argocd.#App Outputs
_No output._
## argocd.#CLI
Re-usable CLI component
### argocd.#CLI Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*config.version* | `*"v2.0.5" \| string` |ArgoCD CLI binary version |
|*config.server* | `string` |ArgoCD server |
|*config.project* | `*"default" \| string` |ArgoCD project |
|*config.token* | `*null \| {\n $dagger: {\n secret: {}\n }\n}` |ArgoCD authentication token |
### argocd.#CLI Outputs
_No output._
## argocd.#Config
ArgoCD configuration
### argocd.#Config Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*version* | `*"v2.0.5" \| string` |ArgoCD CLI binary version |
|*server* | `string` |ArgoCD server |
|*project* | `*"default" \| string` |ArgoCD project |
|*token* | `*null \| {\n $dagger: {\n secret: {}\n }\n}` |ArgoCD authentication token |
### argocd.#Config Outputs
_No output._
## argocd.#Status
Get application's status
### argocd.#Status Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*config.version* | `*"v2.0.5" \| string` |ArgoCD CLI binary version |
|*config.server* | `string` |ArgoCD server |
|*config.project* | `*"default" \| string` |ArgoCD project |
|*config.token* | `*null \| {\n $dagger: {\n secret: {}\n }\n}` |ArgoCD authentication token |
|*name* | `string` |ArgoCD application |
### argocd.#Status Outputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*outputs.health* | `string` |Application health |
|*outputs.sync* | `string` |Application sync state |
|*outputs.namespace* | `string` |Namespace |
|*outputs.server* | `string` |Server |
|*outputs.urls* | `string` |Comma separated list of application URLs |
|*outputs.state* | `string` |Last operation state message |
## argocd.#Sync
Sync an application to its target state
### argocd.#Sync Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*config.version* | `*"v2.0.5" \| string` |ArgoCD CLI binary version |
|*config.server* | `string` |ArgoCD server |
|*config.project* | `*"default" \| string` |ArgoCD project |
|*config.token* | `*null \| {\n $dagger: {\n secret: {}\n }\n}` |ArgoCD authentication token |
|*application* | `string` |ArgoCD application |
|*wait* | `*false \| bool` |Wait the application to sync correctly |
|*ctr.image.config.version* | `*"v2.0.5" \| string` |ArgoCD CLI binary version |
|*ctr.image.config.server* | `string` |ArgoCD server |
|*ctr.image.config.project* | `*"default" \| string` |ArgoCD project |
|*ctr.image.config.token* | `*null \| {\n $dagger: {\n secret: {}\n }\n}` |ArgoCD authentication token |
|*ctr.env.APPLICATION* | `string` |- |
|*status.config.version* | `*"v2.0.5" \| string` |ArgoCD CLI binary version |
|*status.config.server* | `string` |ArgoCD server |
|*status.config.project* | `*"default" \| string` |ArgoCD project |
|*status.config.token* | `*null \| {\n $dagger: {\n secret: {}\n }\n}` |ArgoCD authentication token |
|*status.name* | `string` |ArgoCD application |
### argocd.#Sync Outputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*status.outputs.health* | `string` |Application health |
|*status.outputs.sync* | `string` |Application sync state |
|*status.outputs.namespace* | `string` |Namespace |
|*status.outputs.server* | `string` |Server |
|*status.outputs.urls* | `string` |Comma separated list of application URLs |
|*status.outputs.state* | `string` |Last operation state message |

View File

@ -1,45 +0,0 @@
---
sidebar_label: argocd
---
# alpha.dagger.io/argocd
ArgoCD client operations
```cue
import "alpha.dagger.io/argocd"
```
## argocd.#CLI
Re-usable CLI component
### argocd.#CLI Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*config.version* | `*"v2.0.5" \| string` |ArgoCD CLI binary version |
|*config.server* | `string` |ArgoCD server |
|*config.project* | `*"default" \| string` |ArgoCD project |
|*config.token* | `dagger.#Secret` |ArgoCD authentication token |
### argocd.#CLI Outputs
_No output._
## argocd.#Config
ArgoCD configuration
### argocd.#Config Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*version* | `*"v2.0.5" \| string` |ArgoCD CLI binary version |
|*server* | `string` |ArgoCD server |
|*project* | `*"default" \| string` |ArgoCD project |
|*token* | `dagger.#Secret` |ArgoCD authentication token |
### argocd.#Config Outputs
_No output._

View File

@ -1,72 +0,0 @@
---
sidebar_label: app
---
# alpha.dagger.io/argocd/app
ArgoCD applications
```cue
import "alpha.dagger.io/argocd/app"
```
## app.#Application
Get an application
### app.#Application Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*config.version* | `*"v2.0.5" \| string` |ArgoCD CLI binary version |
|*config.server* | `string` |ArgoCD server |
|*config.project* | `*"default" \| string` |ArgoCD project |
|*config.token* | `dagger.#Secret` |ArgoCD authentication token |
|*name* | `string` |ArgoCD application |
### app.#Application Outputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*outputs.health* | `string` |Application health |
|*outputs.sync* | `string` |Application sync state |
|*outputs.namespace* | `string` |Namespace |
|*outputs.server* | `string` |Server |
|*outputs.urls* | `string` |Comma separated list of application URLs |
|*outputs.state* | `string` |Last operation state message |
## app.#Synchronization
Sync an application to its target state
### app.#Synchronization Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*config.version* | `*"v2.0.5" \| string` |ArgoCD CLI binary version |
|*config.server* | `string` |ArgoCD server |
|*config.project* | `*"default" \| string` |ArgoCD project |
|*config.token* | `dagger.#Secret` |ArgoCD authentication token |
|*application* | `string` |ArgoCD application |
### app.#Synchronization Outputs
_No output._
## app.#SynchronizedApplication
Wait for an application to reach a synced and healthy state
### app.#SynchronizedApplication Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*config.version* | `*"v2.0.5" \| string` |ArgoCD CLI binary version |
|*config.server* | `string` |ArgoCD server |
|*config.project* | `*"default" \| string` |ArgoCD project |
|*config.token* | `dagger.#Secret` |ArgoCD authentication token |
|*application* | `string` |ArgoCD application |
### app.#SynchronizedApplication Outputs
_No output._

View File

@ -1,80 +0,0 @@
---
sidebar_label: aws
---
# alpha.dagger.io/aws
AWS base package
```cue
import "alpha.dagger.io/aws"
```
## aws.#CLI
### aws.#CLI Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*config.region* | `string` |AWS region |
|*config.accessKey* | `dagger.#Secret` |AWS access key |
|*config.secretKey* | `dagger.#Secret` |AWS secret key |
|*config.localMode* | `*false \| bool` |AWS localstack mode |
|*version* | `*"1.19" \| string` |- |
### aws.#CLI Outputs
_No output._
## aws.#Config
AWS Config shared by all AWS packages
### aws.#Config Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*region* | `string` |AWS region |
|*accessKey* | `dagger.#Secret` |AWS access key |
|*secretKey* | `dagger.#Secret` |AWS secret key |
|*localMode* | `*false \| bool` |AWS localstack mode |
### aws.#Config Outputs
_No output._
## aws.#V1
Configuration specific to CLI v1
### aws.#V1 Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*config.region* | `string` |AWS region |
|*config.accessKey* | `dagger.#Secret` |AWS access key |
|*config.secretKey* | `dagger.#Secret` |AWS secret key |
|*config.localMode* | `*false \| bool` |AWS localstack mode |
|*version* | `*"1.19" \| string` |- |
### aws.#V1 Outputs
_No output._
## aws.#V2
Configuration specific to CLI v2
### aws.#V2 Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*config.region* | `string` |AWS region |
|*config.accessKey* | `dagger.#Secret` |AWS access key |
|*config.secretKey* | `dagger.#Secret` |AWS secret key |
|*config.localMode* | `*false \| bool` |AWS localstack mode |
|*version* | `*"2.1.29" \| string` |- |
### aws.#V2 Outputs
_No output._

View File

@ -1,34 +0,0 @@
---
sidebar_label: cloudformation
---
# alpha.dagger.io/aws/cloudformation
AWS CloudFormation
```cue
import "alpha.dagger.io/aws/cloudformation"
```
## cloudformation.#Stack
AWS CloudFormation Stack
### cloudformation.#Stack Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*config.region* | `string` |AWS region |
|*config.accessKey* | `dagger.#Secret` |AWS access key |
|*config.secretKey* | `dagger.#Secret` |AWS secret key |
|*config.localMode* | `*false \| bool` |AWS localstack mode |
|*source* | `string` |Source is the Cloudformation template (JSON/YAML string) |
|*stackName* | `string` |Stackname is the cloudformation stack |
|*parameters* | `struct` |Stack parameters |
|*onFailure* | `*"DO_NOTHING" \| "ROLLBACK" \| "DELETE"` |Behavior when failure to create/update the Stack |
|*timeout* | `*10 \| \>=0 & int` |Maximum waiting time until stack creation/update (in minutes) |
|*neverUpdate* | `*false \| true` |Never update the stack if already exists |
### cloudformation.#Stack Outputs
_No output._

View File

@ -1,36 +0,0 @@
---
sidebar_label: ecr
---
# alpha.dagger.io/aws/ecr
Amazon Elastic Container Registry (ECR)
```cue
import "alpha.dagger.io/aws/ecr"
```
## ecr.#Credentials
Convert ECR credentials to Docker Login format
### ecr.#Credentials Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*config.region* | `string` |AWS region |
|*config.accessKey* | `dagger.#Secret` |AWS access key |
|*config.secretKey* | `dagger.#Secret` |AWS secret key |
|*config.localMode* | `*false \| bool` |AWS localstack mode |
|*ctr.image.config.region* | `string` |AWS region |
|*ctr.image.config.accessKey* | `dagger.#Secret` |AWS access key |
|*ctr.image.config.secretKey* | `dagger.#Secret` |AWS secret key |
|*ctr.image.config.localMode* | `*false \| bool` |AWS localstack mode |
|*ctr.image.version* | `*"1.19" \| string` |- |
### ecr.#Credentials Outputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*username* | `"AWS"` |ECR registry |
|*secret* | `string` |ECR registry secret |

View File

@ -1,11 +0,0 @@
---
sidebar_label: ecs
---
# alpha.dagger.io/aws/ecs
AWS Elastic Container Service (ECS)
```cue
import "alpha.dagger.io/aws/ecs"
```

View File

@ -1,32 +0,0 @@
---
sidebar_label: eks
---
# alpha.dagger.io/aws/eks
AWS Elastic Kubernetes Service (EKS)
```cue
import "alpha.dagger.io/aws/eks"
```
## eks.#KubeConfig
KubeConfig config outputs a valid kube-auth-config for kubectl client
### eks.#KubeConfig Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*config.region* | `string` |AWS region |
|*config.accessKey* | `dagger.#Secret` |AWS access key |
|*config.secretKey* | `dagger.#Secret` |AWS secret key |
|*config.localMode* | `*false \| bool` |AWS localstack mode |
|*clusterName* | `string` |EKS cluster name |
|*version* | `*"v1.19.9" \| string` |Kubectl version |
### eks.#KubeConfig Outputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*kubeconfig* | `string` |kubeconfig is the generated kube configuration file |

View File

@ -1,31 +0,0 @@
---
sidebar_label: elb
---
# alpha.dagger.io/aws/elb
AWS Elastic Load Balancer (ELBv2)
```cue
import "alpha.dagger.io/aws/elb"
```
## elb.#RandomRulePriority
Returns an unused rule priority (randomized in available range)
### elb.#RandomRulePriority Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*config.region* | `string` |AWS region |
|*config.accessKey* | `dagger.#Secret` |AWS access key |
|*config.secretKey* | `dagger.#Secret` |AWS secret key |
|*config.localMode* | `*false \| bool` |AWS localstack mode |
|*listenerArn* | `string` |ListenerArn |
### elb.#RandomRulePriority Outputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*priority* | `string` |exported priority |

View File

@ -1,81 +0,0 @@
---
sidebar_label: rds
---
# alpha.dagger.io/aws/rds
AWS Relational Database Service (RDS)
```cue
import "alpha.dagger.io/aws/rds"
```
## rds.#Database
Creates a new Database on an existing RDS Instance
### rds.#Database Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*config.region* | `string` |AWS region |
|*config.accessKey* | `dagger.#Secret` |AWS access key |
|*config.secretKey* | `dagger.#Secret` |AWS secret key |
|*config.localMode* | `*false \| bool` |AWS localstack mode |
|*name* | `string` |DB name |
|*dbArn* | `string` |ARN of the database instance |
|*secretArn* | `string` |ARN of the database secret (for connecting via rds api) |
|*dbType* | `string` |Database type MySQL or PostgreSQL (Aurora Serverless only) |
### rds.#Database Outputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*out* | `string` |Name of the DB created |
## rds.#Instance
Fetches information on an existing RDS Instance
### rds.#Instance Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*config.region* | `string` |AWS region |
|*config.accessKey* | `dagger.#Secret` |AWS access key |
|*config.secretKey* | `dagger.#Secret` |AWS secret key |
|*config.localMode* | `*false \| bool` |AWS localstack mode |
|*dbArn* | `string` |ARN of the database instance |
### rds.#Instance Outputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*hostname* | `_\|_` |DB hostname |
|*port* | `_\|_` |DB port |
|*info* | `_\|_` |- |
## rds.#User
Creates a new user credentials on an existing RDS Instance
### rds.#User Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*config.region* | `string` |AWS region |
|*config.accessKey* | `dagger.#Secret` |AWS access key |
|*config.secretKey* | `dagger.#Secret` |AWS secret key |
|*config.localMode* | `*false \| bool` |AWS localstack mode |
|*username* | `string` |Username |
|*password* | `string` |Password |
|*dbArn* | `string` |ARN of the database instance |
|*secretArn* | `string` |ARN of the database secret (for connecting via rds api) |
|*grantDatabase* | `*"" \| string` |Name of the database to grants access to |
|*dbType* | `string` |Database type MySQL or PostgreSQL (Aurora Serverless only) |
### rds.#User Outputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*out* | `string` |Outputted username |

View File

@ -1,35 +0,0 @@
---
sidebar_label: s3
---
# alpha.dagger.io/aws/s3
AWS Simple Storage Service
```cue
import "alpha.dagger.io/aws/s3"
```
## s3.#Object
S3 Bucket object(s) sync
### s3.#Object Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*config.region* | `string` |AWS region |
|*config.accessKey* | `dagger.#Secret` |AWS access key |
|*config.secretKey* | `dagger.#Secret` |AWS secret key |
|*config.localMode* | `*false \| bool` |AWS localstack mode |
|*source* | `dagger.#Artifact` |Source Artifact to upload to S3 |
|*target* | `string` |Target S3 URL (eg. s3://\<bucket-name\>/\<path\>/\<sub-path\>) |
|*delete* | `*false \| true` |Delete files that already exist on remote destination |
|*contentType* | `*"" \| string` |Object content type |
|*always* | `*true \| false` |Always write the object to S3 |
### s3.#Object Outputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*url* | `string` |URL of the uploaded S3 object |

View File

@ -1,50 +0,0 @@
---
sidebar_label: azure
---
# alpha.dagger.io/azure
Azure base package
```cue
import "alpha.dagger.io/azure"
```
## azure.#CLI
Azure Cli to be used by all Azure packages
### azure.#CLI Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*config.tenantId* | `dagger.#Secret` |AZURE tenant id |
|*config.subscriptionId* | `dagger.#Secret` |AZURE subscription id |
|*config.appId* | `dagger.#Secret` |AZURE app id for the service principal used |
|*config.password* | `dagger.#Secret` |AZURE password for the service principal used |
|*image.from* | `"mcr.microsoft.com/azure-cli:2.27.1@sha256:1e117183100c9fce099ebdc189d73e506e7b02d2b73d767d3fc07caee72f9fb1"` |Remote ref (example: "index.docker.io/alpine:latest") |
|*secret."/run/secrets/appId"* | `dagger.#Secret` |- |
|*secret."/run/secrets/password"* | `dagger.#Secret` |- |
|*secret."/run/secrets/tenantId"* | `dagger.#Secret` |- |
|*secret."/run/secrets/subscriptionId"* | `dagger.#Secret` |- |
### azure.#CLI Outputs
_No output._
## azure.#Config
Azure Config shared by all Azure packages
### azure.#Config Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*tenantId* | `dagger.#Secret` |AZURE tenant id |
|*subscriptionId* | `dagger.#Secret` |AZURE subscription id |
|*appId* | `dagger.#Secret` |AZURE app id for the service principal used |
|*password* | `dagger.#Secret` |AZURE password for the service principal used |
### azure.#Config Outputs
_No output._

View File

@ -1,41 +0,0 @@
---
sidebar_label: resourcegroup
---
# alpha.dagger.io/azure/resourcegroup
```cue
import "alpha.dagger.io/azure/resourcegroup"
```
## resourcegroup.#ResourceGroup
Create a resource group
### resourcegroup.#ResourceGroup Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*config.tenantId* | `dagger.#Secret` |AZURE tenant id |
|*config.subscriptionId* | `dagger.#Secret` |AZURE subscription id |
|*config.appId* | `dagger.#Secret` |AZURE app id for the service principal used |
|*config.password* | `dagger.#Secret` |AZURE password for the service principal used |
|*rgName* | `string` |ResourceGroup name |
|*rgLocation* | `string` |ResourceGroup location |
|*ctr.image.config.tenantId* | `dagger.#Secret` |AZURE tenant id |
|*ctr.image.config.subscriptionId* | `dagger.#Secret` |AZURE subscription id |
|*ctr.image.config.appId* | `dagger.#Secret` |AZURE app id for the service principal used |
|*ctr.image.config.password* | `dagger.#Secret` |AZURE password for the service principal used |
|*ctr.image.image.from* | `"mcr.microsoft.com/azure-cli:2.27.1@sha256:1e117183100c9fce099ebdc189d73e506e7b02d2b73d767d3fc07caee72f9fb1"` |Remote ref (example: "index.docker.io/alpine:latest") |
|*ctr.image.secret."/run/secrets/appId"* | `dagger.#Secret` |- |
|*ctr.image.secret."/run/secrets/password"* | `dagger.#Secret` |- |
|*ctr.image.secret."/run/secrets/tenantId"* | `dagger.#Secret` |- |
|*ctr.image.secret."/run/secrets/subscriptionId"* | `dagger.#Secret` |- |
|*ctr.env.AZURE_DEFAULTS_GROUP* | `string` |- |
|*ctr.env.AZURE_DEFAULTS_LOCATION* | `string` |- |
### resourcegroup.#ResourceGroup Outputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*id* | `string` |ResourceGroup Id Resource Id |

View File

@ -1,53 +0,0 @@
---
sidebar_label: staticwebapp
---
# alpha.dagger.io/azure/staticwebapp
```cue
import "alpha.dagger.io/azure/staticwebapp"
```
## staticwebapp.#StaticWebApp
Create a static web app
### staticwebapp.#StaticWebApp Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*config.tenantId* | `dagger.#Secret` |AZURE tenant id |
|*config.subscriptionId* | `dagger.#Secret` |AZURE subscription id |
|*config.appId* | `dagger.#Secret` |AZURE app id for the service principal used |
|*config.password* | `dagger.#Secret` |AZURE password for the service principal used |
|*rgName* | `string` |ResourceGroup name in which to create static webapp |
|*stappLocation* | `string` |StaticWebApp location |
|*stappName* | `string` |StaticWebApp name |
|*remote* | `string` |GitHubRepository URL |
|*ref* | `*"main" \| string` |GitHub Branch |
|*appLocation* | `*"/" \| string` |Location of your application code |
|*buildLocation* | `*"build" \| string` |Location of your build artifacts |
|*authToken* | `dagger.#Secret` |GitHub Personal Access Token |
|*ctr.image.config.tenantId* | `dagger.#Secret` |AZURE tenant id |
|*ctr.image.config.subscriptionId* | `dagger.#Secret` |AZURE subscription id |
|*ctr.image.config.appId* | `dagger.#Secret` |AZURE app id for the service principal used |
|*ctr.image.config.password* | `dagger.#Secret` |AZURE password for the service principal used |
|*ctr.image.image.from* | `"mcr.microsoft.com/azure-cli:2.27.1@sha256:1e117183100c9fce099ebdc189d73e506e7b02d2b73d767d3fc07caee72f9fb1"` |Remote ref (example: "index.docker.io/alpine:latest") |
|*ctr.image.secret."/run/secrets/appId"* | `dagger.#Secret` |- |
|*ctr.image.secret."/run/secrets/password"* | `dagger.#Secret` |- |
|*ctr.image.secret."/run/secrets/tenantId"* | `dagger.#Secret` |- |
|*ctr.image.secret."/run/secrets/subscriptionId"* | `dagger.#Secret` |- |
|*ctr.env.AZURE_DEFAULTS_GROUP* | `string` |- |
|*ctr.env.AZURE_DEFAULTS_LOCATION* | `string` |- |
|*ctr.env.AZURE_STATICWEBAPP_NAME* | `string` |- |
|*ctr.env.GIT_URL* | `string` |- |
|*ctr.env.GIT_BRANCH* | `*"main" \| string` |- |
|*ctr.env.APP_LOCATION* | `*"/" \| string` |- |
|*ctr.env.BUILD_LOCATION* | `*"build" \| string` |- |
|*ctr.secret."/run/secrets/git_pat"* | `dagger.#Secret` |- |
### staticwebapp.#StaticWebApp Outputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*defaultHostName* | `string` |DefaultHostName generated by Azure |

View File

@ -1,43 +0,0 @@
---
sidebar_label: storage
---
# alpha.dagger.io/azure/storage
```cue
import "alpha.dagger.io/azure/storage"
```
## storage.#StorageAccount
Create a storage account
### storage.#StorageAccount Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*config.tenantId* | `dagger.#Secret` |AZURE tenant id |
|*config.subscriptionId* | `dagger.#Secret` |AZURE subscription id |
|*config.appId* | `dagger.#Secret` |AZURE app id for the service principal used |
|*config.password* | `dagger.#Secret` |AZURE password for the service principal used |
|*rgName* | `string` |ResourceGroup name |
|*stLocation* | `string` |StorageAccount location |
|*stName* | `string` |StorageAccount name |
|*ctr.image.config.tenantId* | `dagger.#Secret` |AZURE tenant id |
|*ctr.image.config.subscriptionId* | `dagger.#Secret` |AZURE subscription id |
|*ctr.image.config.appId* | `dagger.#Secret` |AZURE app id for the service principal used |
|*ctr.image.config.password* | `dagger.#Secret` |AZURE password for the service principal used |
|*ctr.image.image.from* | `"mcr.microsoft.com/azure-cli:2.27.1@sha256:1e117183100c9fce099ebdc189d73e506e7b02d2b73d767d3fc07caee72f9fb1"` |Remote ref (example: "index.docker.io/alpine:latest") |
|*ctr.image.secret."/run/secrets/appId"* | `dagger.#Secret` |- |
|*ctr.image.secret."/run/secrets/password"* | `dagger.#Secret` |- |
|*ctr.image.secret."/run/secrets/tenantId"* | `dagger.#Secret` |- |
|*ctr.image.secret."/run/secrets/subscriptionId"* | `dagger.#Secret` |- |
|*ctr.env.AZURE_DEFAULTS_GROUP* | `string` |- |
|*ctr.env.AZURE_DEFAULTS_LOCATION* | `string` |- |
|*ctr.env.AZURE_STORAGE_ACCOUNT* | `string` |- |
### storage.#StorageAccount Outputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*id* | `string` |StorageAccount Id |

View File

@ -1,22 +0,0 @@
---
sidebar_label: bats
---
# alpha.dagger.io/bats
```cue
import "alpha.dagger.io/bats"
```
## bats.#Bats
### bats.#Bats Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*source* | `dagger.#Artifact` |Source containing bats files |
|*ctr.mount."/src".from* | `dagger.#Artifact` |- |
### bats.#Bats Outputs
_No output._

View File

@ -1,35 +0,0 @@
---
sidebar_label: dagger
---
# alpha.dagger.io/dagger
Dagger core types
```cue
import "alpha.dagger.io/dagger"
```
## dagger.#Secret
A reference to an external secret, for example: - A password - A SSH private key - An API token Secrets are never merged in the Cue tree. They can only be used by a special filesystem mount designed to minimize leak risk.
### dagger.#Secret Inputs
_No input._
### dagger.#Secret Outputs
_No output._
## dagger.#Stream
Dagger stream. Can be mounted as a UNIX socket.
### dagger.#Stream Inputs
_No input._
### dagger.#Stream Outputs
_No output._

View File

@ -1,91 +0,0 @@
---
sidebar_label: engine
---
# alpha.dagger.io/dagger/engine
```cue
import "alpha.dagger.io/dagger/engine"
```
## engine.#Context
### engine.#Context Inputs
_No input._
### engine.#Context Outputs
_No output._
## engine.#FS
A reference to a filesystem tree. For example: - The root filesystem of a container - A source code repository - A directory containing binary artifacts Rule of thumb: if it fits in a tar archive, it fits in a #FS.
### engine.#FS Inputs
_No input._
### engine.#FS Outputs
_No output._
## engine.#ImageConfig
Container image config. See [OCI](https://opencontainers.org/). Spec left open on purpose to account for additional fields. [Image Spec](https://github.com/opencontainers/image-spec/blob/main/specs-go/v1/config.go) [Docker Superset](https://github.com/moby/buildkit/blob/master/frontend/dockerfile/dockerfile2llb/image.go)
### engine.#ImageConfig Inputs
_No input._
### engine.#ImageConfig Outputs
_No output._
## engine.#Plan
A deployment plan executed by `dagger up`
### engine.#Plan Inputs
_No input._
### engine.#Plan Outputs
_No output._
## engine.#Pull
Download a container image from a remote repository
### engine.#Pull Inputs
_No input._
### engine.#Pull Outputs
_No output._
## engine.#Secret
A reference to an external secret, for example: - A password - A SSH private key - An API token Secrets are never merged in the Cue tree. They can only be used by a special filesystem mount designed to minimize leak risk.
### engine.#Secret Inputs
_No input._
### engine.#Secret Outputs
_No output._
## engine.#Service
A reference to a network service endpoint, for example: - A TCP or UDP port - A unix or npipe socket - An HTTPS endpoint
### engine.#Service Inputs
_No input._
### engine.#Service Outputs
_No output._

View File

@ -1,185 +0,0 @@
---
sidebar_label: op
---
# alpha.dagger.io/dagger/op
op: low-level operations for Dagger processing pipelines
```cue
import "alpha.dagger.io/dagger/op"
```
## op.#Copy
### op.#Copy Inputs
_No input._
### op.#Copy Outputs
_No output._
## op.#DockerBuild
### op.#DockerBuild Inputs
_No input._
### op.#DockerBuild Outputs
_No output._
## op.#DockerLogin
### op.#DockerLogin Inputs
_No input._
### op.#DockerLogin Outputs
_No output._
## op.#Exec
### op.#Exec Inputs
_No input._
### op.#Exec Outputs
_No output._
## op.#Export
Export a value from fs state to cue
### op.#Export Inputs
_No input._
### op.#Export Outputs
_No output._
## op.#FetchContainer
### op.#FetchContainer Inputs
_No input._
### op.#FetchContainer Outputs
_No output._
## op.#FetchGit
### op.#FetchGit Inputs
_No input._
### op.#FetchGit Outputs
_No output._
## op.#FetchHTTP
### op.#FetchHTTP Inputs
_No input._
### op.#FetchHTTP Outputs
_No output._
## op.#Load
### op.#Load Inputs
_No input._
### op.#Load Outputs
_No output._
## op.#Local
### op.#Local Inputs
_No input._
### op.#Local Outputs
_No output._
## op.#Mkdir
### op.#Mkdir Inputs
_No input._
### op.#Mkdir Outputs
_No output._
## op.#Op
One operation in a pipeline
### op.#Op Inputs
_No input._
### op.#Op Outputs
_No output._
## op.#PushContainer
### op.#PushContainer Inputs
_No input._
### op.#PushContainer Outputs
_No output._
## op.#SaveImage
### op.#SaveImage Inputs
_No input._
### op.#SaveImage Outputs
_No output._
## op.#Subdir
### op.#Subdir Inputs
_No input._
### op.#Subdir Outputs
_No output._
## op.#Workdir
### op.#Workdir Inputs
_No input._
### op.#Workdir Outputs
_No output._
## op.#WriteFile
### op.#WriteFile Inputs
_No input._
### op.#WriteFile Outputs
_No output._

View File

@ -1,101 +0,0 @@
---
sidebar_label: docker
---
# alpha.dagger.io/docker
Docker container operations
```cue
import "alpha.dagger.io/docker"
```
## docker.#Build
Build a Docker image from source
### docker.#Build Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*source* | `dagger.#Artifact` |Build context |
|*dockerfile* | `*null \| string` |Dockerfile passed as a string |
### docker.#Build Outputs
_No output._
## docker.#Command
A container image that can run any docker command
### docker.#Command Inputs
_No input._
### docker.#Command Outputs
_No output._
## docker.#Load
Load a docker image into a docker engine
### docker.#Load Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*tag* | `string` |Name and optionally a tag in the 'name:tag' format |
|*source* | `dagger.#Artifact` |Image source |
### docker.#Load Outputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*id* | `string` |Image ID |
## docker.#Pull
Pull a docker container
### docker.#Pull Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*from* | `string` |Remote ref (example: "index.docker.io/alpine:latest") |
### docker.#Pull Outputs
_No output._
## docker.#Push
Push a docker image to a remote registry
### docker.#Push Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*target* | `string` |Remote target (example: "index.docker.io/alpine:latest") |
|*source* | `dagger.#Artifact` |Image source |
### docker.#Push Outputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*ref* | `string` |Image ref |
|*digest* | `string` |Image digest |
## docker.#Run
### docker.#Run Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*ref* | `string` |Image reference (e.g: nginx:alpine) |
|*recreate* | `*true \| bool` |Recreate container? |
|*run.env.IMAGE_REF* | `string` |- |
### docker.#Run Outputs
_No output._

View File

@ -1,36 +0,0 @@
---
sidebar_label: compose
---
# alpha.dagger.io/docker/compose
Docker-compose operations
```cue
import "alpha.dagger.io/docker/compose"
```
## compose.#App
### compose.#App Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*name* | `*"source" \| string` |App name (use as COMPOSE_PROJECT_NAME) |
|*registries* | `[...{\n username: string\n secret: {\n $dagger: {\n secret: {}\n }\n }\n}]` |Image registries |
### compose.#App Outputs
_No output._
## compose.#Client
A container image to run the docker-compose client
### compose.#Client Inputs
_No input._
### compose.#Client Outputs
_No output._

View File

@ -1,45 +0,0 @@
---
sidebar_label: gcp
---
# alpha.dagger.io/gcp
Google Cloud Platform
```cue
import "alpha.dagger.io/gcp"
```
## gcp.#Config
Base Google Cloud Config
### gcp.#Config Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*region* | `*null \| string` |GCP region |
|*zone* | `*null \| string` |GCP zone |
|*project* | `string` |GCP project |
|*serviceKey* | `dagger.#Secret` |GCP service key |
### gcp.#Config Outputs
_No output._
## gcp.#GCloud
Re-usable gcloud component
### gcp.#GCloud Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*config.region* | `*null \| string` |GCP region |
|*config.zone* | `*null \| string` |GCP zone |
|*config.project* | `string` |GCP project |
|*config.serviceKey* | `dagger.#Secret` |GCP service key |
### gcp.#GCloud Outputs
_No output._

View File

@ -1,30 +0,0 @@
---
sidebar_label: cloudrun
---
# alpha.dagger.io/gcp/cloudrun
```cue
import "alpha.dagger.io/gcp/cloudrun"
```
## cloudrun.#Service
Service deploys a Cloud Run service based on provided GCR image
### cloudrun.#Service Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*config.region* | `*null \| string` |GCP region |
|*config.zone* | `*null \| string` |GCP zone |
|*config.project* | `string` |GCP project |
|*config.serviceKey* | `dagger.#Secret` |GCP service key |
|*name* | `string` |Cloud Run service name |
|*image* | `string` |GCR image ref |
|*platform* | `*"managed" \| string` |Cloud Run platform |
|*port* | `*"80" \| string` |Cloud Run service exposed port |
### cloudrun.#Service Outputs
_No output._

View File

@ -1,31 +0,0 @@
---
sidebar_label: gcr
---
# alpha.dagger.io/gcp/gcr
Google Container Registry
```cue
import "alpha.dagger.io/gcp/gcr"
```
## gcr.#Credentials
Credentials retriever for GCR
### gcr.#Credentials Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*config.region* | `*null \| string` |GCP region |
|*config.zone* | `*null \| string` |GCP zone |
|*config.project* | `string` |GCP project |
|*config.serviceKey* | `dagger.#Secret` |GCP service key |
### gcr.#Credentials Outputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*username* | `"oauth2accesstoken"` |GCR registry username |
|*secret* | `string` |GCR registry secret |

View File

@ -1,35 +0,0 @@
---
sidebar_label: gcs
---
# alpha.dagger.io/gcp/gcs
Google Cloud Storage
```cue
import "alpha.dagger.io/gcp/gcs"
```
## gcs.#Object
GCS Bucket object(s) sync
### gcs.#Object Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*config.region* | `*null \| string` |GCP region |
|*config.zone* | `*null \| string` |GCP zone |
|*config.project* | `string` |GCP project |
|*config.serviceKey* | `dagger.#Secret` |GCP service key |
|*source* | `dagger.#Artifact` |Source Artifact to upload to GCS |
|*target* | `string` |Target GCS URL (eg. gs://\<bucket-name\>/\<path\>/\<sub-path\>) |
|*delete* | `*false \| true` |Delete files that already exist on remote destination |
|*contentType* | `*"" \| string` |Object content type |
|*always* | `*true \| false` |Always write the object to GCS |
### gcs.#Object Outputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*url* | `string` |URL of the uploaded GCS object |

View File

@ -1,32 +0,0 @@
---
sidebar_label: gke
---
# alpha.dagger.io/gcp/gke
Google Kubernetes Engine
```cue
import "alpha.dagger.io/gcp/gke"
```
## gke.#KubeConfig
KubeConfig config outputs a valid kube-auth-config for kubectl client
### gke.#KubeConfig Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*config.region* | `*null \| string` |GCP region |
|*config.zone* | `*null \| string` |GCP zone |
|*config.project* | `string` |GCP project |
|*config.serviceKey* | `dagger.#Secret` |GCP service key |
|*clusterName* | `string` |GKE cluster name |
|*version* | `*"v1.19.9" \| string` |Kubectl version |
### gke.#KubeConfig Outputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*kubeconfig* | `string` |kubeconfig is the generated kube configuration file |

View File

@ -1,30 +0,0 @@
---
sidebar_label: secretmanager
---
# alpha.dagger.io/gcp/secretmanager
Google Cloud Secret Manager
```cue
import "alpha.dagger.io/gcp/secretmanager"
```
## secretmanager.#Secrets
### secretmanager.#Secrets Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*config.region* | `*null \| string` |GCP region |
|*config.zone* | `*null \| string` |GCP zone |
|*config.project* | `string` |GCP project |
|*config.serviceKey* | `dagger.#Secret` |GCP service key |
|*deployment.image.config.region* | `*null \| string` |GCP region |
|*deployment.image.config.zone* | `*null \| string` |GCP zone |
|*deployment.image.config.project* | `string` |GCP project |
|*deployment.image.config.serviceKey* | `dagger.#Secret` |GCP service key |
### secretmanager.#Secrets Outputs
_No output._

View File

@ -1,93 +0,0 @@
---
sidebar_label: git
---
# alpha.dagger.io/git
Git operations
```cue
import "alpha.dagger.io/git"
```
## git.#Commit
Commit & push to git repository
### git.#Commit Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*repository.remote* | `string` |Repository remote URL |
|*repository.authToken* | `*null \| {\n $dagger: {\n secret: {}\n }\n}` |Authentication token (PAT or password) |
|*repository.branch* | `string` |Git branch |
|*name* | `string` |Username |
|*email* | `string` |Email |
|*message* | `string` |Commit message |
|*force* | `*false \| bool` |Force push options |
### git.#Commit Outputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*hash* | `string` |Commit hash |
## git.#CurrentBranch
Get the name of the current checked out branch or tag
### git.#CurrentBranch Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*repository* | `dagger.#Artifact` |Git repository |
### git.#CurrentBranch Outputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*name* | `string` |Git branch name |
## git.#Image
### git.#Image Inputs
_No input._
### git.#Image Outputs
_No output._
## git.#Repository
A git repository
### git.#Repository Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*remote* | `string` |Git remote link |
|*ref* | `string` |Git ref: can be a commit, tag or branch. Example: "main" |
|*subdir* | `*null \| string` |(optional) Subdirectory |
|*authToken* | `*null \| {\n $dagger: {\n secret: {}\n }\n}` |(optional) Add Personal Access Token |
|*authHeader* | `*null \| {\n $dagger: {\n secret: {}\n }\n}` |(optional) Add OAuth Token |
### git.#Repository Outputs
_No output._
## git.#Tags
List tags of a repository
### git.#Tags Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*repository* | `dagger.#Artifact` |Git repository |
### git.#Tags Outputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*tags* | `[...string]` |Repository tags |

View File

@ -1,80 +0,0 @@
---
sidebar_label: go
---
# alpha.dagger.io/go
Go build operations
```cue
import "alpha.dagger.io/go"
```
## go.#Build
Go application builder
### go.#Build Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*version* | `*"1.16" \| string` |Go version to use |
|*source* | `dagger.#Artifact` |Source Directory to build |
|*packages* | `*"." \| string` |Packages to build |
|*arch* | `*"amd64" \| string` |Target architecture |
|*os* | `*"linux" \| string` |Target OS |
|*tags* | `*"" \| string` |Build tags to use for building |
|*ldflags* | `*"" \| string` |LDFLAGS to use for linking |
### go.#Build Outputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*output* | `string` |Specify the targeted binary name |
## go.#Container
A standalone go environment
### go.#Container Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*version* | `*"1.16" \| string` |Go version to use |
|*source* | `dagger.#Artifact` |Source code |
|*image.from* | `"docker.io/golang:1.16-alpine"` |Remote ref (example: "index.docker.io/alpine:latest") |
### go.#Container Outputs
_No output._
## go.#Go
Re-usable component for the Go compiler
### go.#Go Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*version* | `*"1.16" \| string` |Go version to use |
|*args* | `[...string]` |Arguments to the Go binary |
|*source* | `dagger.#Artifact` |Source Directory to build |
### go.#Go Outputs
_No output._
## go.#Test
### go.#Test Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*version* | `*"1.16" \| string` |Go version to use |
|*source* | `dagger.#Artifact` |Source Directory to build |
|*packages* | `*"." \| string` |Packages to test |
|*args* | `["test", "-v", *"." \| string]` |Arguments to the Go binary |
### go.#Test Outputs
_No output._

View File

@ -1,23 +0,0 @@
---
sidebar_label: graphql
---
# alpha.dagger.io/graphql
```cue
import "alpha.dagger.io/graphql"
```
## graphql.#Query
### graphql.#Query Inputs
_No input._
### graphql.#Query Outputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*post.statusCode* | `string` |- |
|*post.body* | `string` |- |
|*data* | `_\|_` |- |

View File

@ -1,86 +0,0 @@
---
sidebar_label: http
---
# alpha.dagger.io/http
```cue
import "alpha.dagger.io/http"
```
## http.#Delete
### http.#Delete Inputs
_No input._
### http.#Delete Outputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*statusCode* | `string` |- |
|*body* | `string` |- |
## http.#Do
### http.#Do Inputs
_No input._
### http.#Do Outputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*statusCode* | `string` |- |
|*body* | `string` |- |
## http.#Get
### http.#Get Inputs
_No input._
### http.#Get Outputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*statusCode* | `string` |- |
|*body* | `string` |- |
## http.#Post
### http.#Post Inputs
_No input._
### http.#Post Outputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*statusCode* | `string` |- |
|*body* | `string` |- |
## http.#Put
### http.#Put Inputs
_No input._
### http.#Put Outputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*statusCode* | `string` |- |
|*body* | `string` |- |
## http.#Wait
URL listener Creates a dependency on URL
### http.#Wait Inputs
_No input._
### http.#Wait Outputs
_No output._

View File

@ -1,71 +0,0 @@
---
sidebar_label: io
---
# alpha.dagger.io/io
IO operations
```cue
import "alpha.dagger.io/io"
```
## io.#Dir
Standard interface for directory operations in cue
### io.#Dir Inputs
_No input._
### io.#Dir Outputs
_No output._
## io.#File
Standard interface for file operations in cue
### io.#File Inputs
_No input._
### io.#File Outputs
_No output._
## io.#ReadWriter
Standard ReadWriter interface
### io.#ReadWriter Inputs
_No input._
### io.#ReadWriter Outputs
_No output._
## io.#Reader
Standard Reader interface
### io.#Reader Inputs
_No input._
### io.#Reader Outputs
_No output._
## io.#Writer
Standard Writer interface
### io.#Writer Inputs
_No input._
### io.#Writer Outputs
_No output._

View File

@ -1,32 +0,0 @@
---
sidebar_label: maven
---
# alpha.dagger.io/java/maven
Maven is a build automation tool for Java
```cue
import "alpha.dagger.io/java/maven"
```
## maven.#Project
A Maven project
### maven.#Project Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*source* | `dagger.#Artifact` |Application source code |
|*package* | `struct` |Extra alpine packages to install |
|*env* | `struct` |Environment variables |
|*phases* | `*["package"] \| [...string]` |- |
|*goals* | `*[] \| [...string]` |- |
|*args* | `*[] \| [...string]` |Optional arguments for the script |
### maven.#Project Outputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*build* | `struct` |Build output directory |

View File

@ -1,34 +0,0 @@
---
sidebar_label: yarn
---
# alpha.dagger.io/js/yarn
Yarn is a package manager for Javascript applications
```cue
import "alpha.dagger.io/js/yarn"
```
## yarn.#Package
A Yarn package
### yarn.#Package Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*source* | `dagger.#Artifact` |Application source code |
|*package* | `struct` |Extra alpine packages to install |
|*cwd* | `*"." \| string` |working directory to use |
|*env* | `struct` |Environment variables |
|*writeEnvFile* | `*"" \| string` |Write the contents of `environment` to this file, in the "envfile" format |
|*buildDir* | `*"build" \| string` |Read build output from this directory (path must be relative to working directory) |
|*script* | `*"build" \| string` |Run this yarn script |
|*args* | `*[] \| [...string]` |Optional arguments for the script |
### yarn.#Package Outputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*build* | `struct` |Build output directory |

View File

@ -1,44 +0,0 @@
---
sidebar_label: kubernetes
---
# alpha.dagger.io/kubernetes
Kubernetes client operations
```cue
import "alpha.dagger.io/kubernetes"
```
## kubernetes.#Kubectl
Kubectl client
### kubernetes.#Kubectl Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*version* | `*"v1.19.9" \| string` |Kubectl version |
### kubernetes.#Kubectl Outputs
_No output._
## kubernetes.#Resources
Apply Kubernetes resources
### kubernetes.#Resources Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*source* | `dagger.#Artifact` |Kubernetes config to deploy |
|*manifest* | `*null \| string` |Kubernetes manifest to deploy inlined in a string |
|*url* | `*null \| string` |Kubernetes manifest url to deploy remote configuration |
|*namespace* | `*"default" \| string` |Kubernetes Namespace to deploy to |
|*version* | `*"v1.19.9" \| string` |Version of kubectl client |
|*kubeconfig* | `(string\|struct)` |Kube config file |
### kubernetes.#Resources Outputs
_No output._

View File

@ -1,36 +0,0 @@
---
sidebar_label: helm
---
# alpha.dagger.io/kubernetes/helm
Helm package manager
```cue
import "alpha.dagger.io/kubernetes/helm"
```
## helm.#Chart
Install a Helm chart
### helm.#Chart Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*name* | `string` |Helm deployment name |
|*chart* | `*null \| string` |Helm chart to install from repository |
|*repository* | `*null \| string` |Helm chart repository |
|*values* | `*null \| string` |Helm values (either a YAML string or a Cue structure) |
|*namespace* | `string` |Kubernetes Namespace to deploy to |
|*action* | `*"installOrUpgrade" \| "install" \| "upgrade"` |Helm action to apply |
|*timeout* | `*"5m" \| string` |time to wait for any individual Kubernetes operation (like Jobs for hooks) |
|*wait* | `*true \| bool` |if set, will wait until all Pods, PVCs, Services, and minimum number of Pods of a Deployment, StatefulSet, or ReplicaSet are in a ready state before marking the release as successful. It will wait for as long as timeout |
|*atomic* | `*true \| bool` |if set, installation process purges chart on fail. The wait option will be set automatically if atomic is used |
|*kubeconfig* | `(string\|struct)` |Kube config file |
|*version* | `*"3.5.2" \| string` |Helm version |
|*kubectlVersion* | `*"v1.19.9" \| string` |Kubectl version |
### helm.#Chart Outputs
_No output._

View File

@ -1,39 +0,0 @@
---
sidebar_label: kustomize
---
# alpha.dagger.io/kubernetes/kustomize
Kustomize config management
```cue
import "alpha.dagger.io/kubernetes/kustomize"
```
## kustomize.#Kustomization
### kustomize.#Kustomization Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*version* | `*"v3.8.7" \| string` |Kustomize binary version |
### kustomize.#Kustomization Outputs
_No output._
## kustomize.#Kustomize
Apply a Kubernetes Kustomize folder
### kustomize.#Kustomize Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*source* | `dagger.#Artifact` |Kubernetes source |
|*kustomization* | `string` |Optional Kustomization file |
|*version* | `*"v3.8.7" \| string` |Kustomize binary version |
### kustomize.#Kustomize Outputs
_No output._

View File

@ -1,48 +0,0 @@
---
sidebar_label: netlify
---
# alpha.dagger.io/netlify
Netlify client operations
```cue
import "alpha.dagger.io/netlify"
```
## netlify.#Account
Netlify account credentials
### netlify.#Account Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*name* | `*"" \| string` |Use this Netlify account name (also referred to as "team" in the Netlify docs) |
|*token* | `dagger.#Secret` |Netlify authentication token |
### netlify.#Account Outputs
_No output._
## netlify.#Site
Netlify site
### netlify.#Site Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*account.name* | `*"" \| string` |Use this Netlify account name (also referred to as "team" in the Netlify docs) |
|*account.token* | `dagger.#Secret` |Netlify authentication token |
|*contents* | `dagger.#Artifact` |Contents of the application to deploy |
|*name* | `string` |Deploy to this Netlify site |
|*create* | `*true \| bool` |Create the Netlify site if it doesn't exist? |
### netlify.#Site Outputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*url* | `string` |Website url |
|*deployUrl* | `string` |Unique Deploy URL |
|*logsUrl* | `string` |Logs URL for this deployment |

View File

@ -1,55 +0,0 @@
---
sidebar_label: os
---
# alpha.dagger.io/os
OS operations
```cue
import "alpha.dagger.io/os"
```
## os.#Container
Built-in container implementation, using buildkit
### os.#Container Inputs
_No input._
### os.#Container Outputs
_No output._
## os.#Dir
### os.#Dir Inputs
_No input._
### os.#Dir Outputs
_No output._
## os.#File
Built-in file implementation, using buildkit A single file
### os.#File Inputs
_No input._
### os.#File Outputs
_No output._
## os.#ReadDir
### os.#ReadDir Inputs
_No input._
### os.#ReadDir Outputs
_No output._

View File

@ -1,28 +0,0 @@
---
sidebar_label: random
---
# alpha.dagger.io/random
Random generation utilities
```cue
import "alpha.dagger.io/random"
```
## random.#String
Generate a random string
### random.#String Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*seed* | `string` |Seed of the random string to generate. When using the same `seed`, the same random string will be generated because of caching. |
|*length* | `*12 \| number` |length of the string |
### random.#String Outputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*out* | `string` |generated random string |

View File

@ -1,28 +0,0 @@
---
sidebar_label: terraform
---
# alpha.dagger.io/terraform
Terraform operations
```cue
import "alpha.dagger.io/terraform"
```
## terraform.#Configuration
Terraform configuration
### terraform.#Configuration Inputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*version* | `*"latest" \| string` |Terraform version |
|*source* | `dagger.#Artifact` |Source configuration |
### terraform.#Configuration Outputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*output* | `struct` |- |

View File

@ -1,47 +0,0 @@
---
sidebar_label: trivy
---
# alpha.dagger.io/trivy
```cue
import "alpha.dagger.io/trivy"
```
## trivy.#CLI
Re-usable CLI component
### trivy.#CLI Inputs
_No input._
### trivy.#CLI Outputs
_No output._
## trivy.#Config
Trivy Configuration
### trivy.#Config Inputs
_No input._
### trivy.#Config Outputs
_No output._
## trivy.#Image
Scan an Image
### trivy.#Image Inputs
_No input._
### trivy.#Image Outputs
| Name | Type | Description |
| ------------- |:-------------: |:-------------: |
|*ref* | `string` |Reference analyzed |

View File

@ -1,212 +0,0 @@
package environment
import (
"context"
"fmt"
"cuelang.org/go/cue"
cueflow "cuelang.org/go/tools/flow"
"go.dagger.io/dagger/compiler"
"go.dagger.io/dagger/plancontext"
"go.dagger.io/dagger/solver"
"go.dagger.io/dagger/state"
"go.opentelemetry.io/otel"
"go.opentelemetry.io/otel/attribute"
"go.opentelemetry.io/otel/trace"
"github.com/rs/zerolog/log"
)
type Environment struct {
state *state.State
// Layer 1: plan configuration
plan *compiler.Value
// Layer 2: user inputs
input *compiler.Value
// plan + inputs
src *compiler.Value
// Layer 3: computed values
computed *compiler.Value
}
func New(st *state.State) (*Environment, error) {
var err error
e := &Environment{
state: st,
}
e.plan, err = st.CompilePlan(context.TODO())
if err != nil {
return nil, err
}
e.input, err = st.CompileInputs()
if err != nil {
return nil, err
}
e.computed = compiler.NewValue()
e.src = compiler.NewValue()
if err := e.src.FillPath(cue.MakePath(), e.plan); err != nil {
return nil, err
}
if err := e.src.FillPath(cue.MakePath(), e.input); err != nil {
return nil, err
}
return e, nil
}
func (e *Environment) Name() string {
return e.state.Name
}
func (e *Environment) Computed() *compiler.Value {
return e.computed
}
func (e *Environment) Context() *plancontext.Context {
return e.state.Context
}
// Up missing values in environment configuration, and write them to state.
func (e *Environment) Up(ctx context.Context, s solver.Solver) error {
ctx, span := otel.Tracer("dagger").Start(ctx, "environment.Up")
defer span.End()
// Orchestrate execution with cueflow
flow := cueflow.New(
&cueflow.Config{},
e.src.Cue(),
newTaskFunc(newPipelineRunner(e.computed, s, e.state.Context)),
)
if err := flow.Run(ctx); err != nil {
return err
}
// FIXME: canceling the context makes flow return `nil`
// Check explicitly if the context is canceled.
select {
case <-ctx.Done():
return ctx.Err()
default:
return nil
}
}
type DownOpts struct{}
func (e *Environment) Down(ctx context.Context, _ *DownOpts) error {
panic("NOT IMPLEMENTED")
}
type QueryOpts struct{}
func newTaskFunc(runner cueflow.RunnerFunc) cueflow.TaskFunc {
return func(flowVal cue.Value) (cueflow.Runner, error) {
v := compiler.Wrap(flowVal)
if !IsComponent(v) {
// No compute script
return nil, nil
}
return runner, nil
}
}
func newPipelineRunner(computed *compiler.Value, s solver.Solver, pctx *plancontext.Context) cueflow.RunnerFunc {
return cueflow.RunnerFunc(func(t *cueflow.Task) error {
ctx := t.Context()
lg := log.
Ctx(ctx).
With().
Str("task", t.Path().String()).
Logger()
ctx = lg.WithContext(ctx)
ctx, span := otel.Tracer("dagger").Start(ctx, fmt.Sprintf("compute: %s", t.Path().String()))
defer span.End()
for _, dep := range t.Dependencies() {
lg.
Debug().
Str("dependency", dep.Path().String()).
Msg("dependency detected")
}
v := compiler.Wrap(t.Value())
p := NewPipeline(v, s, pctx)
err := p.Run(ctx)
if err != nil {
// Record the error
span.AddEvent("command", trace.WithAttributes(
attribute.String("error", err.Error()),
))
return err
}
// Mirror the computed values in both `Task` and `Result`
if !p.Computed().IsConcrete() {
return nil
}
if err := t.Fill(p.Computed().Cue()); err != nil {
lg.
Error().
Err(err).
Msg("failed to fill task")
return err
}
// Merge task value into output
if err := computed.FillPath(t.Path(), p.Computed()); err != nil {
lg.
Error().
Err(err).
Msg("failed to fill task result")
return err
}
return nil
})
}
func (e *Environment) ScanInputs(ctx context.Context, mergeUserInputs bool) ([]*compiler.Value, error) {
src := e.plan
if mergeUserInputs {
src = e.src
}
return ScanInputs(ctx, src), nil
}
func (e *Environment) ScanOutputs(ctx context.Context) ([]*compiler.Value, error) {
src := compiler.NewValue()
if err := src.FillPath(cue.MakePath(), e.plan); err != nil {
return nil, err
}
if err := src.FillPath(cue.MakePath(), e.input); err != nil {
return nil, err
}
if e.state.Computed != "" {
computed, err := compiler.DecodeJSON("", []byte(e.state.Computed))
if err != nil {
return nil, err
}
if err := src.FillPath(cue.MakePath(), computed); err != nil {
return nil, err
}
}
return ScanOutputs(ctx, src), nil
}

View File

@ -1,82 +0,0 @@
package environment
import (
"context"
"cuelang.org/go/cue"
"go.dagger.io/dagger/compiler"
)
func isReference(val cue.Value) bool {
isRef := func(v cue.Value) bool {
_, ref := v.ReferencePath()
if ref.String() == "" || v.Path().String() == ref.String() {
// not a reference
return false
}
for _, s := range ref.Selectors() {
if s.IsDefinition() {
// if we reference to a definition, we skip the check
return false
}
}
return true
}
op, vals := val.Expr()
if op == cue.NoOp {
return isRef(val)
}
for _, v := range vals {
// if the expr has an op (& or |, etc...), check the expr values, recursively
if isReference(v) {
return true
}
}
return isRef(val)
}
func ScanInputs(ctx context.Context, value *compiler.Value) []*compiler.Value {
inputs := []*compiler.Value{}
value.Walk(
func(val *compiler.Value) bool {
if isReference(val.Cue()) {
return false
}
if !val.HasAttr("input") {
return true
}
inputs = append(inputs, val)
return true
}, nil,
)
return inputs
}
func ScanOutputs(ctx context.Context, value *compiler.Value) []*compiler.Value {
inputs := []*compiler.Value{}
value.Walk(
func(val *compiler.Value) bool {
if !val.HasAttr("output") {
return true
}
inputs = append(inputs, val)
return true
}, nil,
)
return inputs
}

File diff suppressed because it is too large Load Diff

View File

@ -1,162 +0,0 @@
package keychain
import (
"context"
"fmt"
"os"
"time"
"go.mozilla.org/sops/v3"
sopsaes "go.mozilla.org/sops/v3/aes"
sopsage "go.mozilla.org/sops/v3/age"
"go.mozilla.org/sops/v3/cmd/sops/common"
sopskeys "go.mozilla.org/sops/v3/keys"
sopsyaml "go.mozilla.org/sops/v3/stores/yaml"
"go.mozilla.org/sops/v3/version"
)
var (
cipher = sopsaes.NewCipher()
)
// setupEnv: hack to inject a SOPS env var for age
func setupEnv() error {
p, err := Path()
if err != nil {
return err
}
return os.Setenv("SOPS_AGE_KEY_FILE", p)
}
// Encrypt data using SOPS with the AGE backend, using the provided public key
func Encrypt(ctx context.Context, path string, plaintext []byte, key string) ([]byte, error) {
if err := setupEnv(); err != nil {
return nil, err
}
store := &sopsyaml.Store{}
branches, err := store.LoadPlainFile(plaintext)
if err != nil {
return nil, err
}
ageKeys, err := sopsage.MasterKeysFromRecipients(key)
if err != nil {
return nil, err
}
ageMasterKeys := make([]sopskeys.MasterKey, 0, len(ageKeys))
for _, k := range ageKeys {
ageMasterKeys = append(ageMasterKeys, k)
}
var group sops.KeyGroup
group = append(group, ageMasterKeys...)
tree := sops.Tree{
Branches: branches,
Metadata: sops.Metadata{
KeyGroups: []sops.KeyGroup{group},
EncryptedSuffix: "secret",
Version: version.Version,
},
FilePath: path,
}
// Generate a data key
dataKey, errs := tree.GenerateDataKey()
if len(errs) > 0 {
return nil, fmt.Errorf("error encrypting the data key with one or more master keys: %v", errs)
}
err = common.EncryptTree(common.EncryptTreeOpts{
DataKey: dataKey, Tree: &tree, Cipher: cipher,
})
if err != nil {
return nil, err
}
return store.EmitEncryptedFile(tree)
}
// Reencrypt a file with new content using the same keys
func Reencrypt(_ context.Context, path string, plaintext []byte) ([]byte, error) {
if err := setupEnv(); err != nil {
return nil, err
}
current, err := os.ReadFile(path)
if err != nil {
return nil, err
}
// Load the encrypted file
store := &sopsyaml.Store{}
tree, err := store.LoadEncryptedFile(current)
if err != nil {
return nil, err
}
// Update the file with the new data
newBranches, err := store.LoadPlainFile(plaintext)
if err != nil {
return nil, err
}
tree.Branches = newBranches
// Re-encrypt the file
key, err := tree.Metadata.GetDataKey()
if err != nil {
return nil, err
}
err = common.EncryptTree(common.EncryptTreeOpts{
DataKey: key, Tree: &tree, Cipher: cipher,
})
if err != nil {
return nil, err
}
return store.EmitEncryptedFile(tree)
}
// Decrypt data using sops
func Decrypt(_ context.Context, encrypted []byte) ([]byte, error) {
if err := setupEnv(); err != nil {
return nil, err
}
store := &sopsyaml.Store{}
// Load SOPS file and access the data key
tree, err := store.LoadEncryptedFile(encrypted)
if err != nil {
return nil, err
}
key, err := tree.Metadata.GetDataKey()
if err != nil {
if userErr, ok := err.(sops.UserError); ok {
err = fmt.Errorf(userErr.UserError())
}
return nil, err
}
// Decrypt the tree
mac, err := tree.Decrypt(key, cipher)
if err != nil {
return nil, err
}
// Compute the hash of the cleartext tree and compare it with
// the one that was stored in the document. If they match,
// integrity was preserved
originalMac, err := cipher.Decrypt(
tree.Metadata.MessageAuthenticationCode,
key,
tree.Metadata.LastModified.Format(time.RFC3339),
)
if err != nil {
return nil, err
}
if originalMac != mac {
return nil, fmt.Errorf("failed to verify data integrity. expected mac %q, got %q", originalMac, mac)
}
return store.EmitPlainFile(tree.Branches)
}

View File

@ -1,140 +0,0 @@
package keychain
import (
"context"
"errors"
"fmt"
"os"
"path/filepath"
"time"
"filippo.io/age"
"github.com/mitchellh/go-homedir"
"github.com/rs/zerolog/log"
)
func Path() (string, error) {
return homedir.Expand("~/.config/dagger/keys.txt")
}
func EnsureDefaultKey(ctx context.Context) error {
keysFile, err := Path()
if err != nil {
return err
}
// If the keys file already exists, there's nothing to do.
_, err = os.Stat(keysFile)
if err == nil {
return nil
}
// If we got a different error than not existent, abort
if !errors.Is(err, os.ErrNotExist) {
return err
}
// Attempt a migration from the old keys file
migrated, err := migrateKeys(ctx, keysFile)
if err != nil {
return err
}
// If we migrated a previous identity, stop here.
if migrated {
return nil
}
// Otherwise, generate a new key
log.Ctx(ctx).Debug().Msg("generating default key pair")
_, err = Generate(ctx)
return err
}
// migrateKeys attempts a migration from `~/.dagger/keys.txt` to `~/.config/dagger/keys.txt`
func migrateKeys(ctx context.Context, keysFile string) (bool, error) {
oldKeysFile, err := homedir.Expand("~/.dagger/keys.txt")
if err != nil {
return false, err
}
if _, err := os.Stat(oldKeysFile); err != nil {
return false, nil
}
if err := os.MkdirAll(filepath.Dir(keysFile), 0700); err != nil {
return false, err
}
log.Ctx(ctx).Info().Msg("migrating keychain")
return true, os.Rename(oldKeysFile, keysFile)
}
func Default(ctx context.Context) (string, error) {
keys, err := List(ctx)
if err != nil {
return "", err
}
if len(keys) == 0 {
return "", errors.New("no identities found in the keys file")
}
return keys[0].Recipient().String(), nil
}
func Generate(ctx context.Context) (string, error) {
keysFile, err := Path()
if err != nil {
return "", err
}
k, err := age.GenerateX25519Identity()
if err != nil {
return "", fmt.Errorf("internal error: %v", err)
}
if err := os.MkdirAll(filepath.Dir(keysFile), 0700); err != nil {
return "", err
}
f, err := os.OpenFile(keysFile, os.O_WRONLY|os.O_CREATE|os.O_EXCL, 0600)
if err != nil {
return "", fmt.Errorf("failed to open keys file %q: %v", keysFile, err)
}
defer f.Close()
fmt.Fprintf(f, "# created: %s\n", time.Now().Format(time.RFC3339))
fmt.Fprintf(f, "# public key: %s\n", k.Recipient())
fmt.Fprintf(f, "%s\n", k)
pubkey := k.Recipient().String()
log.Ctx(ctx).Debug().Str("publicKey", pubkey).Msg("keypair generated")
return pubkey, nil
}
func List(ctx context.Context) ([]*age.X25519Identity, error) {
keysFile, err := Path()
if err != nil {
return nil, err
}
f, err := os.Open(keysFile)
if err != nil {
return nil, fmt.Errorf("failed to open keys file %q: %w", keysFile, err)
}
ids, err := age.ParseIdentities(f)
if err != nil {
return nil, fmt.Errorf("failed to parse input: %w", err)
}
keys := make([]*age.X25519Identity, 0, len(ids))
for _, id := range ids {
key, ok := id.(*age.X25519Identity)
if !ok {
return nil, fmt.Errorf("internal error: unexpected identity type: %T", id)
}
keys = append(keys, key)
}
return keys, nil
}

View File

@ -17,7 +17,7 @@ const (
) )
func isUniverse(repoName string) bool { func isUniverse(repoName string) bool {
return strings.HasPrefix(strings.ToLower(repoName), pkg.AlphaModule) return strings.HasPrefix(strings.ToLower(repoName), pkg.UniverseModule)
} }
// IsUniverseLatest check that current universe is up-to-date or no // IsUniverseLatest check that current universe is up-to-date or no
@ -32,7 +32,7 @@ func IsUniverseLatest(ctx context.Context, workspace string) (bool, error) {
return false, err return false, err
} }
req, err := newRequire(pkg.AlphaModule, UniverseVersionConstraint) req, err := newRequire(pkg.UniverseModule, UniverseVersionConstraint)
if err != nil { if err != nil {
return false, err return false, err
} }

View File

@ -25,7 +25,7 @@ type Require struct {
func newRequire(repoName, versionConstraint string) (*Require, error) { func newRequire(repoName, versionConstraint string) (*Require, error) {
switch { switch {
case strings.HasPrefix(repoName, pkg.AlphaModule): case strings.HasPrefix(repoName, pkg.UniverseModule):
return parseDaggerRepoName(repoName, versionConstraint) return parseDaggerRepoName(repoName, versionConstraint)
default: default:
return parseGitRepoName(repoName, versionConstraint) return parseGitRepoName(repoName, versionConstraint)
@ -52,7 +52,7 @@ func parseGitRepoName(repoName, versionConstraint string) (*Require, error) {
}, nil }, nil
} }
var daggerRepoNameRegex = regexp.MustCompile(pkg.AlphaModule + `([a-zA-Z0-9/_.-]*)@?([0-9a-zA-Z.-]*)`) var daggerRepoNameRegex = regexp.MustCompile(pkg.UniverseModule + `([a-zA-Z0-9/_.-]*)@?([0-9a-zA-Z.-]*)`)
func parseDaggerRepoName(repoName, versionConstraint string) (*Require, error) { func parseDaggerRepoName(repoName, versionConstraint string) (*Require, error) {
repoMatches := daggerRepoNameRegex.FindStringSubmatch(repoName) repoMatches := daggerRepoNameRegex.FindStringSubmatch(repoName)
@ -62,7 +62,7 @@ func parseDaggerRepoName(repoName, versionConstraint string) (*Require, error) {
} }
return &Require{ return &Require{
repo: pkg.AlphaModule, repo: pkg.UniverseModule,
path: repoMatches[1], path: repoMatches[1],
version: repoMatches[2], version: repoMatches[2],
versionConstraint: versionConstraint, versionConstraint: versionConstraint,

View File

@ -63,9 +63,9 @@ func TestParseArgument(t *testing.T) {
}, },
{ {
name: "Alpha Dagger repo with path", name: "Alpha Dagger repo with path",
in: "alpha.dagger.io/gcp/gke@v0.1.0-alpha.20", in: "universe.dagger.io/gcp/gke@v0.1.0-alpha.20",
want: &Require{ want: &Require{
repo: "alpha.dagger.io", repo: "universe.dagger.io",
path: "/gcp/gke", path: "/gcp/gke",
version: "v0.1.0-alpha.20", version: "v0.1.0-alpha.20",
@ -75,9 +75,9 @@ func TestParseArgument(t *testing.T) {
}, },
{ {
name: "Alpha Dagger repo", name: "Alpha Dagger repo",
in: "alpha.dagger.io@v0.1.0-alpha.23", in: "universe.dagger.io@v0.1.0-alpha.23",
want: &Require{ want: &Require{
repo: "alpha.dagger.io", repo: "universe.dagger.io",
path: "", path: "",
version: "v0.1.0-alpha.23", version: "v0.1.0-alpha.23",

View File

@ -1,2 +0,0 @@
# dagger state
state/**

View File

@ -1,23 +0,0 @@
plan:
package: ./alpine/tests
name: alpine
sops:
kms: []
gcp_kms: []
azure_kv: []
hc_vault: []
age:
- recipient: age1gxwmtwahzwdmrskhf90ppwlnze30lgpm056kuesrxzeuyclrwvpsupwtpk
enc: |
-----BEGIN AGE ENCRYPTED FILE-----
YWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSB0Ky9vQU16Vms0OUpOaVBk
K3J5R1RqREwvTEdYenVJaEl0VDB2aXJ0QTBrCnM2QnhUa0hXS0tGUjRvMnlveUZT
UitUU2lwUHBGUXVmbms1R2srdW1iSGMKLS0tIFExMzh6WlowVUNzRDR2Rkx0MXh0
N0JOK1FwdzkrcGR5V0xhUDdNOFNvYk0KetOvulxA0Hilyhv+eWBqYO3GXNvm38Y1
9Pa7HYazNyi0qMcZpecWlp4QsOoL876dj1rE62cYHT2hkt2J2ijAUw==
-----END AGE ENCRYPTED FILE-----
lastmodified: "2021-07-08T09:51:43Z"
mac: ENC[AES256_GCM,data:5ApIUpcKaDi3UXL9jyeDBSJJw//BZE2eFZxsuCHVRNyR48FBGj7Wn0Ned2shZ6wNsKcZt5TpxdVILc8SEWwJ9NXeAFuTUZqCZypmDBxGdpo1M5eW0xRAEXQ6UpkG2+2uM0hHCEh8rECcvj0zGHMKgrbXOZt+iyeDuMBMME/V58w=,iv:qWCxgIuVD1RMDrWYgzUSK/KNxS85cJONsV0CzsHf/UU=,tag:NEJM7oZmHXn3ujS2zoN1EA==,type:str]
pgp: []
encrypted_suffix: secret
version: 3.7.1

View File

@ -1,2 +0,0 @@
# dagger state
state/**

Some files were not shown because too many files have changed in this diff Show More