From 5fb7ded2d50bb3f6a91c22a9a46b0218360efa72 Mon Sep 17 00:00:00 2001 From: Tom Chauveau Date: Thu, 27 May 2021 18:22:41 +0200 Subject: [PATCH] update stdlib tests that use secrets Signed-off-by: Tom Chauveau --- stdlib/aws/aws.cue | 16 +- stdlib/aws/ecr/ecr.cue | 6 +- stdlib/aws/eks/eks.cue | 19 +- stdlib/aws/elb/elb.cue | 99 +++--- stdlib/aws/rds/rds.cue | 286 ++++++++++-------- stdlib/aws/s3/s3.cue | 34 +-- stdlib/netlify/netlify.cue | 2 +- tests/stdlib.bats | 8 +- .../aws/eks/.dagger/env/default/.gitignore | 2 + .../aws/eks/.dagger/env/default/plan/eks.cue | 53 ++++ .../aws/eks/.dagger/env/default/values.yaml | 26 ++ .../aws/s3/.dagger/env/default/plan/s3.cue | 6 +- .../s3/.dagger/env/default/plan/verify.cue | 8 +- tests/stdlib/docker/push-pull/push-pull.cue | 2 +- .../stdlib/netlify/.dagger/env/net/.gitignore | 2 + .../netlify/.dagger/env/net/plan/netlify.cue | 46 +++ .../netlify/.dagger/env/net/plan/random.cue | 20 ++ .../netlify/.dagger/env/net/values.yaml | 26 ++ 18 files changed, 442 insertions(+), 219 deletions(-) create mode 100644 tests/stdlib/aws/eks/.dagger/env/default/.gitignore create mode 100644 tests/stdlib/aws/eks/.dagger/env/default/plan/eks.cue create mode 100644 tests/stdlib/aws/eks/.dagger/env/default/values.yaml create mode 100644 tests/stdlib/netlify/.dagger/env/net/.gitignore create mode 100644 tests/stdlib/netlify/.dagger/env/net/plan/netlify.cue create mode 100644 tests/stdlib/netlify/.dagger/env/net/plan/random.cue create mode 100644 tests/stdlib/netlify/.dagger/env/net/values.yaml diff --git a/stdlib/aws/aws.cue b/stdlib/aws/aws.cue index 23ef9040..d48e1d7d 100644 --- a/stdlib/aws/aws.cue +++ b/stdlib/aws/aws.cue @@ -40,17 +40,17 @@ import ( "pipefail", "-c", #""" - aws configure set aws_access_key_id "$(cat /run/secrets/access_key)" - aws configure set aws_secret_access_key "$(cat /run/secrets/secret_key)" + aws configure set aws_access_key_id "$(cat /run/secrets/access_key)" + aws configure set aws_secret_access_key "$(cat /run/secrets/secret_key)" - aws configure set default.region "$AWS_DEFAULT_REGION" - aws configure set default.cli_pager "" - aws configure set default.output "json" - """# + aws configure set default.region "$AWS_DEFAULT_REGION" + aws configure set default.cli_pager "" + aws configure set default.output "json" + """#, ] mount: "/run/secrets/access_key": secret: config.accessKey mount: "/run/secrets/secret_key": secret: config.secretKey - env: AWS_DEFAULT_REGION: config.region + env: AWS_DEFAULT_REGION: config.region }, ] -} \ No newline at end of file +} diff --git a/stdlib/aws/ecr/ecr.cue b/stdlib/aws/ecr/ecr.cue index 6a7018d8..7a240018 100644 --- a/stdlib/aws/ecr/ecr.cue +++ b/stdlib/aws/ecr/ecr.cue @@ -38,15 +38,15 @@ import ( "pipefail", "-c", #""" - aws ecr get-login-password > /out - """# + aws ecr get-login-password > /out + """#, ] }, op.#Export & { source: "/out" format: "string" - } + }, ] } } diff --git a/stdlib/aws/eks/eks.cue b/stdlib/aws/eks/eks.cue index 64dfa53a..150fe5f8 100644 --- a/stdlib/aws/eks/eks.cue +++ b/stdlib/aws/eks/eks.cue @@ -18,12 +18,14 @@ import ( // kubeconfig is the generated kube configuration file kubeconfig: { - // FIXME There is a problem with dagger.#Secret type + @dagger(output) string #up: [ op.#Load & { - from: aws.#CLI + from: aws.#CLI & { + "config": config + } }, op.#WriteFile & { @@ -42,15 +44,8 @@ import ( "/entrypoint.sh", ] env: { - AWS_CONFIG_FILE: "/cache/aws/config" - AWS_ACCESS_KEY_ID: config.accessKey - AWS_SECRET_ACCESS_KEY: config.secretKey - AWS_DEFAULT_REGION: config.region - AWS_REGION: config.region - AWS_DEFAULT_OUTPUT: "json" - AWS_PAGER: "" - EKS_CLUSTER: clusterName - KUBECTL_VERSION: version + EKS_CLUSTER: clusterName + KUBECTL_VERSION: version } mount: { "/cache/aws": "cache" @@ -62,5 +57,5 @@ import ( format: "string" }, ] - } @dagger(output) + } } diff --git a/stdlib/aws/elb/elb.cue b/stdlib/aws/elb/elb.cue index 5a3757c7..b1bda016 100644 --- a/stdlib/aws/elb/elb.cue +++ b/stdlib/aws/elb/elb.cue @@ -18,54 +18,65 @@ import ( // exported priority priority: out @dagger(output) - out: string + out: { + string - aws.#Script & { - always: true + #up: [ + op.#Load & { + from: aws.#CLI & { + "config": config + } + }, - files: { - "/inputs/listenerArn": listenerArn - if vhost != _|_ { - "/inputs/vhost": vhost - } - } + op.#Exec & { + args: [ + "/bin/bash", + "--noprofile", + "--norc", + "-eo", + "pipefail", + #""" + if [ -s "$VHOST" ]; then + # We passed a vhost as input, try to recycle priority from previously allocated vhost + priority=$(aws elbv2 describe-rules \ + --listener-arn "$LISTENER_ARN" | \ + jq -r --arg vhost "$VHOST" '.Rules[] | select(.Conditions[].HostHeaderConfig.Values[] == $VHOST) | .Priority') - export: "/priority" + if [ -n "${priority}" ]; then + echo -n "${priority}" > /priority + exit 0 + fi + fi - //FIXME: The code below can end up not finding an available prio - // Better to exclude the existing allocated priorities from the random sequence - code: #""" - if [ -s /inputs/vhost ]; then - # We passed a vhost as input, try to recycle priority from previously allocated vhost - vhost="$(cat /inputs/vhost)" + # Grab a priority random from 1-50k and check if available, retry 10 times if none available + priority=0 + for i in {1..10} + do + p=$(shuf -i 1-50000 -n 1) + # Find the next priority available that we can allocate + aws elbv2 describe-rules \ + --listener-arn "$LISTENER_ARN" \ + | jq -e "select(.Rules[].Priority == \"${p}\") | true" && continue + priority="${p}" + break + done + if [ "${priority}" -lt 1 ]; then + echo "Error: cannot determine a Rule priority" + exit 1 + fi + echo -n "${priority}" > /priority + """#, + ] + env: { + LISTENER_ARN: listenerArn + VHOST: vhost + } + }, - priority=$(aws elbv2 describe-rules \ - --listener-arn "$(cat /inputs/listenerArn)" | \ - jq -r --arg vhost "$vhost" '.Rules[] | select(.Conditions[].HostHeaderConfig.Values[] == $vhost) | .Priority') - - if [ -n "${priority}" ]; then - echo -n "${priority}" > /priority - exit 0 - fi - fi - - # Grab a priority random from 1-50k and check if available, retry 10 times if none available - priority=0 - for i in {1..10} - do - p=$(shuf -i 1-50000 -n 1) - # Find the next priority available that we can allocate - aws elbv2 describe-rules \ - --listener-arn "$(cat /inputs/listenerArn)" \ - | jq -e "select(.Rules[].Priority == \"${p}\") | true" && continue - priority="${p}" - break - done - if [ "${priority}" -lt 1 ]; then - echo "Error: cannot determine a Rule priority" - exit 1 - fi - echo -n "${priority}" > /priority - """# + op.#Export & { + source: "/db_created" + format: "string" + }, + ] } } diff --git a/stdlib/aws/rds/rds.cue b/stdlib/aws/rds/rds.cue index 920f08e8..089d214e 100644 --- a/stdlib/aws/rds/rds.cue +++ b/stdlib/aws/rds/rds.cue @@ -22,45 +22,60 @@ import ( dbType: "mysql" | "postgres" @dagger(input) // Name of the DB created - out: string @dagger(output) + out: { + @dagger(output) + string - aws.#Script & { - "config": config + #up: [ + op.#Load & { + from: aws.#CLI & { + "config": config + } + }, - files: { - "/inputs/name": name - "/inputs/db_arn": dbArn - "/inputs/secret_arn": secretArn - "/inputs/db_type": dbType - } + op.#Exec & { + args: [ + "/bin/bash", + "--noprofile", + "--norc", + "-eo", + "pipefail", + #""" + echo "dbType: $DB_TYPE" - export: "/db_created" + sql="CREATE DATABASE \`"$NAME" \`" + if [ "$DB_TYPE" = postgres ]; then + sql="CREATE DATABASE \""$NAME"\"" + fi - code: #""" - set +o pipefail + echo "$NAME" >> /db_created - dbType="$(cat /inputs/db_type)" - echo "dbType: $dbType" + aws rds-data execute-statement \ + --resource-arn "$DB_ARN" \ + --secret-arn "$SECRET_ARN" \ + --sql "$sql" \ + --database "$DB_TYPE" \ + --no-include-result-metadata \ + |& tee /tmp/out + exit_code=${PIPESTATUS[0]} + if [ $exit_code -ne 0 ]; then + grep -q "database exists\|already exists" /tmp/out || exit $exit_code + fi + """#, + ] + env: { + NAME: name + DB_ARN: dbArn + SECRET_ARN: secretArn + DB_TYPE: dbType + } + }, - sql="CREATE DATABASE \`$(cat /inputs/name)\`" - if [ "$dbType" = postgres ]; then - sql="CREATE DATABASE \"$(cat /inputs/name)\"" - fi - - cp /inputs/name /db_created - - aws rds-data execute-statement \ - --resource-arn "$(cat /inputs/db_arn)" \ - --secret-arn "$(cat /inputs/secret_arn)" \ - --sql "$sql" \ - --database "$dbType" \ - --no-include-result-metadata \ - |& tee /tmp/out - exit_code=${PIPESTATUS[0]} - if [ $exit_code -ne 0 ]; then - grep -q "database exists\|already exists" /tmp/out || exit $exit_code - fi - """# + op.#Export & { + source: "/db_created" + format: "string" + }, + ] } } @@ -69,89 +84,104 @@ import ( config: aws.#Config // Username - username: dagger.#Secret + username: dagger.#Secret @dagger(input) // Password - password: dagger.#Secret + password: dagger.#Secret @dagger(input) // ARN of the database instance - dbArn: string + dbArn: string @dagger(input) // ARN of the database secret (for connecting via rds api) - secretArn: string + secretArn: string @dagger(input) - grantDatabase: string | *"" + grantDatabase: string | *"" @dagger(input) - dbType: "mysql" | "postgres" + dbType: "mysql" | "postgres" @dagger(input) // Outputed username - out: string + out: { + @dagger(output) + string - aws.#Script & { - "config": config + #up: [ + op.#Load & { + from: aws.#CLI & { + "config": config + } + }, - files: { - "/inputs/username": username - "/inputs/password": password - "/inputs/db_arn": dbArn - "/inputs/secret_arn": secretArn - "/inputs/grant_database": grantDatabase - "/inputs/db_type": dbType - } + op.#Exec & { + args: [ + "/bin/bash", + "--noprofile", + "--norc", + "-eo", + "pipefail", + #""" + echo "dbType: $DB_TYPE" + + sql="CREATE USER '"$USERNAME"'@'%' IDENTIFIED BY '"$PASSWORD"'" + if [ "$DB_TYPE" = postgres ]; then + sql="CREATE USER \""$USERNAME"\" WITH PASSWORD '"$PASSWORD"'" + fi + + echo "$USERNAME" >> /username + + aws rds-data execute-statement \ + --resource-arn "$DB_ARN" \ + --secret-arn "$SECRET_ARN" \ + --sql "$sql" \ + --database "$DB_TYPE" \ + --no-include-result-metadata \ + |& tee tmp/out + exit_code=${PIPESTATUS[0]} + if [ $exit_code -ne 0 ]; then + grep -q "Operation CREATE USER failed for\|ERROR" tmp/out || exit $exit_code + fi + + sql="SET PASSWORD FOR '"$USERNAME"'@'%' = PASSWORD('"$PASSWORD"')" + if [ "$DB_TYPE" = postgres ]; then + sql="ALTER ROLE \""$USERNAME"\" WITH PASSWORD '"$PASSWORD"'" + fi + + aws rds-data execute-statement \ + --resource-arn "$DB_ARN" \ + --secret-arn "$SECRET_ARN" \ + --sql "$sql" \ + --database "$DB_TYPE" \ + --no-include-result-metadata + + sql="GRANT ALL ON \`"$GRAND_DATABASE"\`.* to '"$USERNAME"'@'%'" + if [ "$DB_TYPE" = postgres ]; then + sql="GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO \""$USERNAME"\"; GRANT ALL PRIVILEGES ON DATABASE \""$GRAND_DATABASE"\" to \""$USERNAME"\"; GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO \""$USERNAME"\"; ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL PRIVILEGES ON TABLES TO \""$USERNAME"\"; ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL PRIVILEGES ON SEQUENCES TO \""$USERNAME"\"; GRANT USAGE ON SCHEMA public TO \""$USERNAME"\";" + fi + + if [ -s "$GRAND_DATABASE ]; then + aws rds-data execute-statement \ + --resource-arn "$DB_ARN" \ + --secret-arn "$SECRET_ARN" \ + --sql "$sql" \ + --database "$DB_TYPE" \ + --no-include-result-metadata + fi + """#, + ] + env: { + USERNAME: unsername + PASSWORD: password + DB_ARN: dbArn + SECRET_ARN: secretArn + GRAND_DATABASE: grandDatabase + DB_TYPE: dbType + } + }, - export: "/username" - - code: #""" - set +o pipefail - - dbType="$(cat /inputs/db_type)" - echo "dbType: $dbType" - - sql="CREATE USER '$(cat /inputs/username)'@'%' IDENTIFIED BY '$(cat /inputs/password)'" - if [ "$dbType" = postgres ]; then - sql="CREATE USER \"$(cat /inputs/username)\" WITH PASSWORD '$(cat /inputs/password)'" - fi - - cp /inputs/username /username - - aws rds-data execute-statement \ - --resource-arn "$(cat /inputs/db_arn)" \ - --secret-arn "$(cat /inputs/secret_arn)" \ - --sql "$sql" \ - --database "$dbType" \ - --no-include-result-metadata \ - |& tee tmp/out - exit_code=${PIPESTATUS[0]} - if [ $exit_code -ne 0 ]; then - grep -q "Operation CREATE USER failed for\|ERROR" tmp/out || exit $exit_code - fi - - sql="SET PASSWORD FOR '$(cat /inputs/username)'@'%' = PASSWORD('$(cat /inputs/password)')" - if [ "$dbType" = postgres ]; then - sql="ALTER ROLE \"$(cat /inputs/username)\" WITH PASSWORD '$(cat /inputs/password)'" - fi - - aws rds-data execute-statement \ - --resource-arn "$(cat /inputs/db_arn)" \ - --secret-arn "$(cat /inputs/secret_arn)" \ - --sql "$sql" \ - --database "$dbType" \ - --no-include-result-metadata - - sql="GRANT ALL ON \`$(cat /inputs/grant_database)\`.* to '$(cat /inputs/username)'@'%'" - if [ "$dbType" = postgres ]; then - sql="GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO \"$(cat /inputs/username)\"; GRANT ALL PRIVILEGES ON DATABASE \"$(cat /inputs/grant_database)\" to \"$(cat /inputs/username)\"; GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO \"$(cat /inputs/username)\"; ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL PRIVILEGES ON TABLES TO \"$(cat /inputs/username)\"; ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL PRIVILEGES ON SEQUENCES TO \"$(cat /inputs/username)\"; GRANT USAGE ON SCHEMA public TO \"$(cat /inputs/username)\";" - fi - - if [ -s /inputs/grant_database ]; then - aws rds-data execute-statement \ - --resource-arn "$(cat /inputs/db_arn)" \ - --secret-arn "$(cat /inputs/secret_arn)" \ - --sql "$sql" \ - --database "$dbType" \ - --no-include-result-metadata - fi - """# + op.#Export & { + source: "/username" + format: "string" + }, + ] } } @@ -160,35 +190,51 @@ import ( config: aws.#Config // ARN of the database instance - dbArn: string + dbArn: string @dagger(input) // DB hostname - hostname: info.hostname + hostname: info.hostname @dagger(output) // DB port - port: info.port + port: info.port @dagger(output) info: { hostname: string port: int } - info: json.Unmarshal(out) - out: string + info: json.Unmarshal(out) @dagger(output) + out: { + string - aws.#Script & { - "config": config + #up: [ + op.#Load & { + from: aws.#CLI & { + "config": config + } + }, - files: "/inputs/db_arn": dbArn + op.#Exec & { + args: [ + "/bin/bash", + "--noprofile", + "--norc", + "-eo", + "pipefail", + #""" + data=$(aws rds describe-db-clusters --filters "Name=db-cluster-id,Values=$DB_URN" ) + echo "$data" | jq -r '.DBClusters[].Endpoint' > /tmp/out + echo "$data" | jq -r '.DBClusters[].Port' >> /tmp/out + cat /tmp/out | jq -sR 'split("\n") | {hostname: .[0], port: (.[1] | tonumber)}' > /out + """#, + ] + env: DB_ARN: dbArn + }, - export: "/out" - - code: #""" - db_arn="$(cat /inputs/db_arn)" - data=$(aws rds describe-db-clusters --filters "Name=db-cluster-id,Values=$db_arn" ) - echo "$data" | jq -r '.DBClusters[].Endpoint' > /tmp/out - echo "$data" | jq -r '.DBClusters[].Port' >> /tmp/out - cat /tmp/out | jq -sR 'split("\n") | {hostname: .[0], port: (.[1] | tonumber)}' > /out - """# + op.#Export & { + source: "/out" + format: "json" + }, + ] } } diff --git a/stdlib/aws/s3/s3.cue b/stdlib/aws/s3/s3.cue index 1d33c752..9555f419 100644 --- a/stdlib/aws/s3/s3.cue +++ b/stdlib/aws/s3/s3.cue @@ -41,17 +41,17 @@ import ( if sourceInline != _|_ { op.#WriteFile & { - dest: "/source" + dest: "/source" content: sourceInline } - } + }, op.#Exec & { if always != _|_ { "always": always } env: { - TARGET: target + TARGET: target CONTENT_TYPE: contentType } @@ -67,26 +67,26 @@ import ( "pipefail", "-c", #""" - opts="" - op=cp - if [ -d /source ]; then - op=sync - fi - if [ -n "$CONTENT_TYPE" ]; then - opts="--content-type $CONTENT_TYPE" - fi - aws s3 $op $opts /source "$TARGET" - echo "$TARGET" \ - | sed -E 's=^s3://([^/]*)/=https://\1.s3.amazonaws.com/=' \ - > /url - """# + opts="" + op=cp + if [ -d /source ]; then + op=sync + fi + if [ -n "$CONTENT_TYPE" ]; then + opts="--content-type $CONTENT_TYPE" + fi + aws s3 $op $opts /source "$TARGET" + echo "$TARGET" \ + | sed -E 's=^s3://([^/]*)/=https://\1.s3.amazonaws.com/=' \ + > /url + """#, ] }, op.#Export & { source: "/url" format: "string" - } + }, ] } } diff --git a/stdlib/netlify/netlify.cue b/stdlib/netlify/netlify.cue index 44b44778..b209b34e 100644 --- a/stdlib/netlify/netlify.cue +++ b/stdlib/netlify/netlify.cue @@ -83,7 +83,7 @@ import ( NETLIFY_ACCOUNT: account.name } dir: "/src" - mount: "/src": from: contents + mount: "/src": from: contents mount: "/run/secrets/token": secret: account.token } } diff --git a/tests/stdlib.bats b/tests/stdlib.bats index cf4325f7..1032aad0 100644 --- a/tests/stdlib.bats +++ b/tests/stdlib.bats @@ -21,9 +21,7 @@ setup() { } @test "stdlib: netlify" { - skip_unless_secrets_available "$TESTDIR"/stdlib/netlify/inputs.yaml - - "$DAGGER" compute "$TESTDIR"/stdlib/netlify --input-yaml "$TESTDIR"/stdlib/netlify/inputs.yaml + "$DAGGER" up -w "$TESTDIR"/stdlib/netlify/ } @test "stdlib: kubernetes" { @@ -47,9 +45,7 @@ setup() { } @test "stdlib: aws: eks" { - skip_unless_secrets_available "$TESTDIR"/stdlib/aws/inputs.yaml - - "$DAGGER" compute "$TESTDIR"/stdlib/aws/eks --input-yaml "$TESTDIR"/stdlib/aws/inputs.yaml + "$DAGGER" up -w "$TESTDIR"/stdlib/aws/eks } @test "stdlib: aws: ecr" { diff --git a/tests/stdlib/aws/eks/.dagger/env/default/.gitignore b/tests/stdlib/aws/eks/.dagger/env/default/.gitignore new file mode 100644 index 00000000..01ec19b0 --- /dev/null +++ b/tests/stdlib/aws/eks/.dagger/env/default/.gitignore @@ -0,0 +1,2 @@ +# dagger state +state/** diff --git a/tests/stdlib/aws/eks/.dagger/env/default/plan/eks.cue b/tests/stdlib/aws/eks/.dagger/env/default/plan/eks.cue new file mode 100644 index 00000000..d0a92e9d --- /dev/null +++ b/tests/stdlib/aws/eks/.dagger/env/default/plan/eks.cue @@ -0,0 +1,53 @@ +package eks + +import ( + "dagger.io/aws" + "dagger.io/aws/eks" + "dagger.io/kubernetes" + "dagger.io/dagger/op" +) + +TestConfig: awsConfig: aws.#Config & { + region: "us-east-2" +} + +TestCluster: eks.#KubeConfig & { + config: TestConfig.awsConfig + clusterName: *"dagger-example-eks-cluster" | string +} + +TestEks: { + #GetPods: + """ + kubectl get pods -A + """ + + #up: [ + op.#Load & { + from: kubernetes.#Kubectl + }, + + op.#WriteFile & { + dest: "/kubeconfig" + content: TestCluster.kubeconfig + }, + + op.#WriteFile & { + dest: "/getPods.sh" + content: #GetPods + }, + + op.#Exec & { + always: true + args: [ + "/bin/bash", + "--noprofile", + "--norc", + "-eo", + "pipefail", + "/getPods.sh", + ] + env: KUBECONFIG: "/kubeconfig" + }, + ] +} diff --git a/tests/stdlib/aws/eks/.dagger/env/default/values.yaml b/tests/stdlib/aws/eks/.dagger/env/default/values.yaml new file mode 100644 index 00000000..0d793b24 --- /dev/null +++ b/tests/stdlib/aws/eks/.dagger/env/default/values.yaml @@ -0,0 +1,26 @@ +name: default +inputs: + TestConfig.awsConfig.accessKey: + secret: ENC[AES256_GCM,data:dzhlip9kKU8mMEycFjq6MobD5BA=,iv:LKeYUbXpnWIZneGs7DCLVKxv1W2aa/3EVGO4jnDlOgc=,tag:+TcxQahxFTweyoPaROTJSQ==,type:str] + TestConfig.awsConfig.secretKey: + secret: ENC[AES256_GCM,data:bu3AI5jODWv4ePvRKw2l/1UOuH07Z0/oB2hiY4QqrhTcfjdSbr6kBg==,iv:BqddzzXqvAv0cAj2SVhoFx/kUOnRsoevqMRujCINVv0=,tag:u0KjVnbN8h54CLFARJmJ0g==,type:str] +sops: + kms: [] + gcp_kms: [] + azure_kv: [] + hc_vault: [] + age: + - recipient: age1gxwmtwahzwdmrskhf90ppwlnze30lgpm056kuesrxzeuyclrwvpsupwtpk + enc: | + -----BEGIN AGE ENCRYPTED FILE----- + YWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSAzV0ZXNW5qaGNJMjF5bnBO + d1Z1RXFhSnNRM1Vwa3lyWFJ6VVFDZTQ3cUhZClh0N1lxZ3dwSFhHTjRyS092OVVj + Tkw4ZlU4S3g0T1VGS1RYYnB1dGlzbVkKLS0tIEc4T1Z3SEU2NUNhd2FkSXlIUERM + UE5Cd2VwYkd1MHlTOXNJVEU3RVpqU2sK86kXU6ZaaVHTg9BuCEcOxnDrrW00+bwu + AHttbzqYVuC3YxXjOTzAZL8aYTStk14wGdI6TirZ9pX0fyaKAfzBUQ== + -----END AGE ENCRYPTED FILE----- + lastmodified: "2021-05-27T16:01:59Z" + mac: ENC[AES256_GCM,data:T+0rcT9Xi/kJ8+EzCd7ewenDmc1cH/t2MxCpf+QXkILUC/uE8OgROizDMAiUYI2HpeBfZrmUgLMVzlTZirIbC51eWLAf6itbSIGKkVuz0uSNwhRpKGAROg6U1h39Scg6RpAvpzSTZvYOx5SwP78Uc6NQdp5yTDEb+0e9Wqzu+jU=,iv:INAN+EPwBv5dWWHQnaMr4QOBQWx3WCcohORvIPrBZN8=,tag:N4vtDowFKTDSHmMob5HgCw==,type:str] + pgp: [] + encrypted_suffix: secret + version: 3.7.1 diff --git a/tests/stdlib/aws/s3/.dagger/env/default/plan/s3.cue b/tests/stdlib/aws/s3/.dagger/env/default/plan/s3.cue index fab14d50..e5a9f690 100644 --- a/tests/stdlib/aws/s3/.dagger/env/default/plan/s3.cue +++ b/tests/stdlib/aws/s3/.dagger/env/default/plan/s3.cue @@ -24,7 +24,7 @@ TestS3UploadFile: { verify: #VerifyS3 & { config: TestConfig.awsConfig target: deploy.target - file: "test.txt" + file: "test.txt" } } @@ -40,12 +40,12 @@ TestS3UploadDir: { verifyFile: #VerifyS3 & { config: TestConfig.awsConfig target: deploy.target - file: "dirFile.txt" + file: "dirFile.txt" } verifyDir: #VerifyS3 & { config: TestConfig.awsConfig target: deploy.target - file: "foo.txt" + file: "foo.txt" } } diff --git a/tests/stdlib/aws/s3/.dagger/env/default/plan/verify.cue b/tests/stdlib/aws/s3/.dagger/env/default/plan/verify.cue index 92724885..d1fb596b 100644 --- a/tests/stdlib/aws/s3/.dagger/env/default/plan/verify.cue +++ b/tests/stdlib/aws/s3/.dagger/env/default/plan/verify.cue @@ -33,20 +33,20 @@ import ( "-c", #""" aws s3 ls --recursive \#(target) > /contents - """# + """#, ] }, op.#Export & { source: "/contents" format: "string" - } + }, ] } } #VerifyS3: { - file: string + file: string config: aws.#Config target: string @@ -76,7 +76,7 @@ import ( "-eo", "pipefail", "-c", - "grep -q \(file) /test" + "grep -q \(file) /test", ] }, ] diff --git a/tests/stdlib/docker/push-pull/push-pull.cue b/tests/stdlib/docker/push-pull/push-pull.cue index 0487db70..40b72be3 100644 --- a/tests/stdlib/docker/push-pull/push-pull.cue +++ b/tests/stdlib/docker/push-pull/push-pull.cue @@ -11,7 +11,7 @@ source: dagger.#Artifact registry: { username: string - secret: dagger.#Secret + secret: string } TestPushAndPull: { diff --git a/tests/stdlib/netlify/.dagger/env/net/.gitignore b/tests/stdlib/netlify/.dagger/env/net/.gitignore new file mode 100644 index 00000000..01ec19b0 --- /dev/null +++ b/tests/stdlib/netlify/.dagger/env/net/.gitignore @@ -0,0 +1,2 @@ +# dagger state +state/** diff --git a/tests/stdlib/netlify/.dagger/env/net/plan/netlify.cue b/tests/stdlib/netlify/.dagger/env/net/plan/netlify.cue new file mode 100644 index 00000000..798a2f35 --- /dev/null +++ b/tests/stdlib/netlify/.dagger/env/net/plan/netlify.cue @@ -0,0 +1,46 @@ +package netlify + +import ( + "dagger.io/dagger/op" + "dagger.io/alpine" + "dagger.io/netlify" +) + +TestNetlify: { + // Generate a website containing the random number + html: #up: [ + op.#WriteFile & { + content: random + dest: "index.html" + }, + ] + + // Deploy to netlify + deploy: netlify.#Site & { + contents: html + name: "dagger-test" + } + + // Check if the deployed site has the random marker + check: #up: [ + op.#Load & { + from: alpine.#Image & { + package: bash: "=~5.1" + package: curl: true + } + }, + op.#Exec & { + args: [ + "/bin/bash", + "--noprofile", + "--norc", + "-eo", + "pipefail", + "-c", + #""" + test "$(curl \#(deploy.deployUrl))" = "\#(random)" + """#, + ] + }, + ] +} diff --git a/tests/stdlib/netlify/.dagger/env/net/plan/random.cue b/tests/stdlib/netlify/.dagger/env/net/plan/random.cue new file mode 100644 index 00000000..a9c5c710 --- /dev/null +++ b/tests/stdlib/netlify/.dagger/env/net/plan/random.cue @@ -0,0 +1,20 @@ +package netlify + +import ( + "dagger.io/alpine" + "dagger.io/dagger/op" +) + +// Generate a random number +random: { + string + #up: [ + op.#Load & {from: alpine.#Image}, + op.#Exec & { + args: ["sh", "-c", "cat /dev/urandom | tr -dc 'a-z' | fold -w 10 | head -n 1 | tr -d '\n' > /rand"] + }, + op.#Export & { + source: "/rand" + }, + ] +} diff --git a/tests/stdlib/netlify/.dagger/env/net/values.yaml b/tests/stdlib/netlify/.dagger/env/net/values.yaml new file mode 100644 index 00000000..2045a92d --- /dev/null +++ b/tests/stdlib/netlify/.dagger/env/net/values.yaml @@ -0,0 +1,26 @@ +name: net +inputs: + TestNetlify.deploy.account.name: + text: blocklayer + TestNetlify.deploy.account.token: + secret: ENC[AES256_GCM,data:oWKi8eqTUEs+YClokLKeAKsEj3qae4yQTn/67u6ga4Ptcq4+MyYS/6wAUg==,iv:Xfw+L/4p7vO+jb/EVyYOvsIZ9KxZbi30ms2Ckg4E8cE=,tag:G4EBBer04D6FHFP9e+feTw==,type:str] +sops: + kms: [] + gcp_kms: [] + azure_kv: [] + hc_vault: [] + age: + - recipient: age1gxwmtwahzwdmrskhf90ppwlnze30lgpm056kuesrxzeuyclrwvpsupwtpk + enc: | + -----BEGIN AGE ENCRYPTED FILE----- + YWdlLWVuY3J5cHRpb24ub3JnL3YxCi0+IFgyNTUxOSBwWHFLRUtscWVma0lQM3Qv + M2czUFJhSEpnczdJTWhadnJHOWowaXd1dWtJCmk3aU15NDJYcmtUOE5pZ0lIQzRu + dTYvRFdsM0ZoUjFWSG91UnZRVWdvZjgKLS0tIENhK2VWNHByY3hYNUVmWDRmOUFM + SEdUK2RsaUxuVWg2aXUwdVJ0eUtrWWMKWkQDBuL5e4QDx5Wy6+fHiD+J4fp7QdMm + lsqgmxRvJMWgEvm1U+hDAo/Pkn8PFUFJf0KxEvkdF4qGuguQePgzFQ== + -----END AGE ENCRYPTED FILE----- + lastmodified: "2021-05-27T14:36:27Z" + mac: ENC[AES256_GCM,data:S3l8tVat/Yp7fH5feeL4JxL+uQwZ0zwv8/LPsOoBebfDFWuE/j9sFZD304OT7XNCsfG8R/lqdpoxYmiyH6ToHeZyktXalpk0tAkwFXUV4VUZKyIn81UirbtWx4OT6fW7jusqqg2uX3nhvjGd+QerhEC4Qu4o8lQCKCMzLuQjmVw=,iv:4ucl0O+VgdK/SwtEad1jXIWJ4pQSxlWCCUzFbqNLDgg=,tag:xno2U/FIVW6KgSXW5RWDsw==,type:str] + pgp: [] + encrypted_suffix: secret + version: 3.7.1