Merge pull request #643 from samalba/stdlib-docs
docs: Stdlib consistency and cleanup
This commit is contained in:
commit
693a3470fb
@ -4,7 +4,7 @@ sidebar_label: cloudformation
|
|||||||
|
|
||||||
# dagger.io/aws/cloudformation
|
# dagger.io/aws/cloudformation
|
||||||
|
|
||||||
AWS Cloud Formation
|
AWS CloudFormation
|
||||||
|
|
||||||
## #Stack
|
## #Stack
|
||||||
|
|
||||||
@ -19,10 +19,13 @@ AWS CloudFormation Stack
|
|||||||
|*config.secretKey* | `dagger.#Secret` |AWS secret key |
|
|*config.secretKey* | `dagger.#Secret` |AWS secret key |
|
||||||
|*source* | `string` |Source is the Cloudformation template (JSON/YAML string) |
|
|*source* | `string` |Source is the Cloudformation template (JSON/YAML string) |
|
||||||
|*stackName* | `string` |Stackname is the cloudformation stack |
|
|*stackName* | `string` |Stackname is the cloudformation stack |
|
||||||
|
|*parameters* | `struct` |Stack parameters |
|
||||||
|*onFailure* | `*"DO_NOTHING" \| "ROLLBACK" \| "DELETE"` |Behavior when failure to create/update the Stack |
|
|*onFailure* | `*"DO_NOTHING" \| "ROLLBACK" \| "DELETE"` |Behavior when failure to create/update the Stack |
|
||||||
|*timeout* | `*10 \| \>=0 & int` |Maximum waiting time until stack creation/update (in minutes) |
|
|*timeout* | `*10 \| \>=0 & int` |Maximum waiting time until stack creation/update (in minutes) |
|
||||||
|*neverUpdate* | `*false \| bool` |Never update the stack if already exists |
|
|*neverUpdate* | `*false \| true` |Never update the stack if already exists |
|
||||||
|
|
||||||
### #Stack Outputs
|
### #Stack Outputs
|
||||||
|
|
||||||
_No output._
|
| Name | Type | Description |
|
||||||
|
| ------------- |:-------------: |:-------------: |
|
||||||
|
|*outputs* | `struct` |- |
|
||||||
|
@ -8,7 +8,7 @@ Amazon Elastic Container Registry (ECR)
|
|||||||
|
|
||||||
## #Credentials
|
## #Credentials
|
||||||
|
|
||||||
Convert AWS credentials to Docker Registry credentials for ECR
|
Convert ECR credentials to Docker Login format
|
||||||
|
|
||||||
### #Credentials Inputs
|
### #Credentials Inputs
|
||||||
|
|
||||||
@ -23,4 +23,7 @@ Convert AWS credentials to Docker Registry credentials for ECR
|
|||||||
|
|
||||||
### #Credentials Outputs
|
### #Credentials Outputs
|
||||||
|
|
||||||
_No output._
|
| Name | Type | Description |
|
||||||
|
| ------------- |:-------------: |:-------------: |
|
||||||
|
|*username* | `"AWS"` |ECR registry |
|
||||||
|
|*secret* | `string` |ECR registry secret |
|
||||||
|
@ -6,11 +6,11 @@ sidebar_label: rds
|
|||||||
|
|
||||||
AWS Relational Database Service (RDS)
|
AWS Relational Database Service (RDS)
|
||||||
|
|
||||||
## #CreateDB
|
## #Database
|
||||||
|
|
||||||
Creates a new Database on an existing RDS Instance
|
Creates a new Database on an existing RDS Instance
|
||||||
|
|
||||||
### #CreateDB Inputs
|
### #Database Inputs
|
||||||
|
|
||||||
| Name | Type | Description |
|
| Name | Type | Description |
|
||||||
| ------------- |:-------------: |:-------------: |
|
| ------------- |:-------------: |:-------------: |
|
||||||
@ -22,36 +22,12 @@ Creates a new Database on an existing RDS Instance
|
|||||||
|*secretArn* | `string` |ARN of the database secret (for connecting via rds api) |
|
|*secretArn* | `string` |ARN of the database secret (for connecting via rds api) |
|
||||||
|*dbType* | `string` |Database type MySQL or PostgreSQL (Aurora Serverless only) |
|
|*dbType* | `string` |Database type MySQL or PostgreSQL (Aurora Serverless only) |
|
||||||
|
|
||||||
### #CreateDB Outputs
|
### #Database Outputs
|
||||||
|
|
||||||
| Name | Type | Description |
|
| Name | Type | Description |
|
||||||
| ------------- |:-------------: |:-------------: |
|
| ------------- |:-------------: |:-------------: |
|
||||||
|*out* | `string` |Name of the DB created |
|
|*out* | `string` |Name of the DB created |
|
||||||
|
|
||||||
## #CreateUser
|
|
||||||
|
|
||||||
Creates a new user credentials on an existing RDS Instance
|
|
||||||
|
|
||||||
### #CreateUser Inputs
|
|
||||||
|
|
||||||
| Name | Type | Description |
|
|
||||||
| ------------- |:-------------: |:-------------: |
|
|
||||||
|*config.region* | `string` |AWS region |
|
|
||||||
|*config.accessKey* | `dagger.#Secret` |AWS access key |
|
|
||||||
|*config.secretKey* | `dagger.#Secret` |AWS secret key |
|
|
||||||
|*username* | `string` |Username |
|
|
||||||
|*password* | `string` |Password |
|
|
||||||
|*dbArn* | `string` |ARN of the database instance |
|
|
||||||
|*secretArn* | `string` |ARN of the database secret (for connecting via rds api) |
|
|
||||||
|*grantDatabase* | `*"" \| string` |Name of the database to grants access to |
|
|
||||||
|*dbType* | `string` |Database type MySQL or PostgreSQL (Aurora Serverless only) |
|
|
||||||
|
|
||||||
### #CreateUser Outputs
|
|
||||||
|
|
||||||
| Name | Type | Description |
|
|
||||||
| ------------- |:-------------: |:-------------: |
|
|
||||||
|*out* | `string` |Outputed username |
|
|
||||||
|
|
||||||
## #Instance
|
## #Instance
|
||||||
|
|
||||||
Fetches information on an existing RDS Instance
|
Fetches information on an existing RDS Instance
|
||||||
@ -72,3 +48,27 @@ Fetches information on an existing RDS Instance
|
|||||||
|*hostname* | `_\|_` |DB hostname |
|
|*hostname* | `_\|_` |DB hostname |
|
||||||
|*port* | `_\|_` |DB port |
|
|*port* | `_\|_` |DB port |
|
||||||
|*info* | `_\|_` |- |
|
|*info* | `_\|_` |- |
|
||||||
|
|
||||||
|
## #User
|
||||||
|
|
||||||
|
Creates a new user credentials on an existing RDS Instance
|
||||||
|
|
||||||
|
### #User Inputs
|
||||||
|
|
||||||
|
| Name | Type | Description |
|
||||||
|
| ------------- |:-------------: |:-------------: |
|
||||||
|
|*config.region* | `string` |AWS region |
|
||||||
|
|*config.accessKey* | `dagger.#Secret` |AWS access key |
|
||||||
|
|*config.secretKey* | `dagger.#Secret` |AWS secret key |
|
||||||
|
|*username* | `string` |Username |
|
||||||
|
|*password* | `string` |Password |
|
||||||
|
|*dbArn* | `string` |ARN of the database instance |
|
||||||
|
|*secretArn* | `string` |ARN of the database secret (for connecting via rds api) |
|
||||||
|
|*grantDatabase* | `*"" \| string` |Name of the database to grants access to |
|
||||||
|
|*dbType* | `string` |Database type MySQL or PostgreSQL (Aurora Serverless only) |
|
||||||
|
|
||||||
|
### #User Outputs
|
||||||
|
|
||||||
|
| Name | Type | Description |
|
||||||
|
| ------------- |:-------------: |:-------------: |
|
||||||
|
|*out* | `string` |Outputed username |
|
||||||
|
@ -6,45 +6,24 @@ sidebar_label: s3
|
|||||||
|
|
||||||
AWS Simple Storage Service
|
AWS Simple Storage Service
|
||||||
|
|
||||||
## #Put
|
## #Object
|
||||||
|
|
||||||
S3 Bucket upload (file or directory)
|
S3 Bucket object(s) sync
|
||||||
|
|
||||||
### #Put Inputs
|
### #Object Inputs
|
||||||
|
|
||||||
| Name | Type | Description |
|
| Name | Type | Description |
|
||||||
| ------------- |:-------------: |:-------------: |
|
| ------------- |:-------------: |:-------------: |
|
||||||
|*config.region* | `string` |AWS region |
|
|*config.region* | `string` |AWS region |
|
||||||
|*config.accessKey* | `dagger.#Secret` |AWS access key |
|
|*config.accessKey* | `dagger.#Secret` |AWS access key |
|
||||||
|*config.secretKey* | `dagger.#Secret` |AWS secret key |
|
|*config.secretKey* | `dagger.#Secret` |AWS secret key |
|
||||||
|*target* | `string` |Target S3 URL (eg. s3://\<bucket-name\>/\<path\>/\<sub-path\>) |
|
|*source* | `dagger.#Artifact` |Source Artifact to upload to S3 |
|
||||||
|*contentType* | `*"" \| string` |Object content type |
|
|*target* | `string` |Target S3 URL (eg. s3://\<bucket-name\>/\<path\>/\<sub-path\>) |
|
||||||
|*always* | `*true \| bool` |Always write the object to S3 |
|
|*delete* | `*false \| true` |Delete files that already exist on remote destination |
|
||||||
|
|*contentType* | `*"" \| string` |Object content type |
|
||||||
|
|*always* | `*true \| false` |Always write the object to S3 |
|
||||||
|
|
||||||
### #Put Outputs
|
### #Object Outputs
|
||||||
|
|
||||||
| Name | Type | Description |
|
|
||||||
| ------------- |:-------------: |:-------------: |
|
|
||||||
|*url* | `string` |URL of the uploaded S3 object |
|
|
||||||
|
|
||||||
## #Sync
|
|
||||||
|
|
||||||
S3 Bucket sync
|
|
||||||
|
|
||||||
### #Sync Inputs
|
|
||||||
|
|
||||||
| Name | Type | Description |
|
|
||||||
| ------------- |:-------------: |:-------------: |
|
|
||||||
|*config.region* | `string` |AWS region |
|
|
||||||
|*config.accessKey* | `dagger.#Secret` |AWS access key |
|
|
||||||
|*config.secretKey* | `dagger.#Secret` |AWS secret key |
|
|
||||||
|*source* | `dagger.#Artifact` |Source Artifact to upload to S3 |
|
|
||||||
|*target* | `string` |Target S3 URL (eg. s3://\<bucket-name\>/\<path\>/\<sub-path\>) |
|
|
||||||
|*delete* | `*false \| bool` |Files that exist in the destination but not in the source are deleted during sync. |
|
|
||||||
|*contentType* | `*"" \| string` |Object content type |
|
|
||||||
|*always* | `*true \| bool` |Always write the object to S3 |
|
|
||||||
|
|
||||||
### #Sync Outputs
|
|
||||||
|
|
||||||
| Name | Type | Description |
|
| Name | Type | Description |
|
||||||
| ------------- |:-------------: |:-------------: |
|
| ------------- |:-------------: |:-------------: |
|
||||||
|
@ -20,6 +20,7 @@ Credentials retriever for GCR
|
|||||||
|
|
||||||
### #Credentials Outputs
|
### #Credentials Outputs
|
||||||
|
|
||||||
| Name | Type | Description |
|
| Name | Type | Description |
|
||||||
| ------------- |:-------------: |:-------------: |
|
| ------------- |:-------------: |:-------------: |
|
||||||
|*secret* | `string` |- |
|
|*username* | `"oauth2accesstoken"` |GCR registry username |
|
||||||
|
|*secret* | `string` |GCR registry secret |
|
||||||
|
@ -4,6 +4,8 @@ sidebar_label: helm
|
|||||||
|
|
||||||
# dagger.io/kubernetes/helm
|
# dagger.io/kubernetes/helm
|
||||||
|
|
||||||
|
Helm package manager
|
||||||
|
|
||||||
## #Chart
|
## #Chart
|
||||||
|
|
||||||
Install a Helm chart
|
Install a Helm chart
|
||||||
|
@ -8,13 +8,14 @@ import (
|
|||||||
|
|
||||||
// Backend configuration
|
// Backend configuration
|
||||||
backend: {
|
backend: {
|
||||||
|
|
||||||
// Source code to build this container
|
// Source code to build this container
|
||||||
source: git.#Repository | dagger.#Artifact @dagger(input)
|
source: git.#Repository | dagger.#Artifact @dagger(input)
|
||||||
|
|
||||||
// Container environment variables
|
// Container environment variables
|
||||||
environment: {
|
environment: {
|
||||||
[string]: string @dagger(input)
|
[string]: string
|
||||||
}
|
} @dagger(input)
|
||||||
|
|
||||||
// Public hostname (need to match the master domain configures on the loadbalancer)
|
// Public hostname (need to match the master domain configures on the loadbalancer)
|
||||||
hostname: string @dagger(input)
|
hostname: string @dagger(input)
|
||||||
@ -39,16 +40,16 @@ backend: {
|
|||||||
dockerfilePath: *"" | string @dagger(input)
|
dockerfilePath: *"" | string @dagger(input)
|
||||||
// docker build args
|
// docker build args
|
||||||
dockerBuildArgs: {
|
dockerBuildArgs: {
|
||||||
[string]: string @dagger(input)
|
[string]: string
|
||||||
}
|
} @dagger(input)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Init container runs only once when the main container starts
|
// Init container runs only once when the main container starts
|
||||||
initContainer: {
|
initContainer: {
|
||||||
command: [...string] @dagger(input)
|
command: [...string] @dagger(input)
|
||||||
environment: {
|
environment: {
|
||||||
[string]: string @dagger(input)
|
[string]: string
|
||||||
}
|
} @dagger(input)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -9,7 +9,7 @@ database: {
|
|||||||
let slug = name
|
let slug = name
|
||||||
dbType: "mysql" | "postgresql" @dagger(input)
|
dbType: "mysql" | "postgresql" @dagger(input)
|
||||||
|
|
||||||
db: rds.#CreateDB & {
|
db: rds.#Database & {
|
||||||
config: infra.awsConfig
|
config: infra.awsConfig
|
||||||
name: slug
|
name: slug
|
||||||
dbArn: infra.rdsInstanceArn
|
dbArn: infra.rdsInstanceArn
|
||||||
@ -17,7 +17,7 @@ database: {
|
|||||||
secretArn: infra.rdsAdminSecretArn
|
secretArn: infra.rdsAdminSecretArn
|
||||||
}
|
}
|
||||||
|
|
||||||
user: rds.#CreateUser & {
|
user: rds.#User & {
|
||||||
config: infra.awsConfig
|
config: infra.awsConfig
|
||||||
dbArn: infra.rdsInstanceArn
|
dbArn: infra.rdsInstanceArn
|
||||||
"dbType": dbType
|
"dbType": dbType
|
||||||
|
@ -20,7 +20,7 @@ source: dagger.#Artifact @dagger(input)
|
|||||||
// Deployed URL
|
// Deployed URL
|
||||||
url: "\(deploy.url)index.html" @dagger(output)
|
url: "\(deploy.url)index.html" @dagger(output)
|
||||||
|
|
||||||
deploy: s3.#Put & {
|
deploy: s3.#Object & {
|
||||||
always: true
|
always: true
|
||||||
config: awsConfig
|
config: awsConfig
|
||||||
"source": source
|
"source": source
|
||||||
|
39
stdlib/.dagger/env/aws-s3/plan/s3.cue
vendored
39
stdlib/.dagger/env/aws-s3/plan/s3.cue
vendored
@ -14,44 +14,11 @@ bucket: "dagger-ci"
|
|||||||
|
|
||||||
content: "A simple test sentence"
|
content: "A simple test sentence"
|
||||||
|
|
||||||
TestS3UploadFile: {
|
|
||||||
deploy: s3.#Put & {
|
|
||||||
config: TestConfig.awsConfig
|
|
||||||
sourceInline: content
|
|
||||||
target: "s3://\(bucket)/test.txt"
|
|
||||||
}
|
|
||||||
|
|
||||||
verify: #VerifyS3 & {
|
|
||||||
config: TestConfig.awsConfig
|
|
||||||
target: deploy.target
|
|
||||||
file: "test.txt"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
TestDirectory: dagger.#Artifact
|
TestDirectory: dagger.#Artifact
|
||||||
|
|
||||||
TestS3UploadDir: {
|
TestS3Object: {
|
||||||
deploy: s3.#Put & {
|
deploy: s3.#Object & {
|
||||||
config: TestConfig.awsConfig
|
always: true
|
||||||
source: TestDirectory
|
|
||||||
target: "s3://\(bucket)/"
|
|
||||||
}
|
|
||||||
|
|
||||||
verifyFile: #VerifyS3 & {
|
|
||||||
config: TestConfig.awsConfig
|
|
||||||
target: deploy.target
|
|
||||||
file: "dirFile.txt"
|
|
||||||
}
|
|
||||||
|
|
||||||
verifyDir: #VerifyS3 & {
|
|
||||||
config: TestConfig.awsConfig
|
|
||||||
target: deploy.target
|
|
||||||
file: "foo.txt"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
TestS3Sync: {
|
|
||||||
deploy: s3.#Sync & {
|
|
||||||
config: TestConfig.awsConfig
|
config: TestConfig.awsConfig
|
||||||
source: TestDirectory
|
source: TestDirectory
|
||||||
target: "s3://\(bucket)/"
|
target: "s3://\(bucket)/"
|
||||||
|
@ -5,13 +5,17 @@ import (
|
|||||||
"dagger.io/dagger/op"
|
"dagger.io/dagger/op"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// Default Alpine version
|
||||||
let defaultVersion = "3.13.5@sha256:69e70a79f2d41ab5d637de98c1e0b055206ba40a8145e7bddb55ccc04e13cf8f"
|
let defaultVersion = "3.13.5@sha256:69e70a79f2d41ab5d637de98c1e0b055206ba40a8145e7bddb55ccc04e13cf8f"
|
||||||
|
|
||||||
// Base image for Alpine Linux
|
// Base image for Alpine Linux
|
||||||
#Image: {
|
#Image: {
|
||||||
|
// List of packages to install
|
||||||
package: [string]: true | false | string
|
package: [string]: true | false | string
|
||||||
|
// Alpine version to install
|
||||||
version: string | *defaultVersion
|
version: string | *defaultVersion
|
||||||
|
|
||||||
|
// Use of os package not possible : alpine is a low level component
|
||||||
#up: [
|
#up: [
|
||||||
op.#FetchContainer & {
|
op.#FetchContainer & {
|
||||||
ref: "index.docker.io/alpine:\(version)"
|
ref: "index.docker.io/alpine:\(version)"
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
// AWS Cloud Formation
|
// AWS CloudFormation
|
||||||
package cloudformation
|
package cloudformation
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -23,7 +23,7 @@ import (
|
|||||||
// Stack parameters
|
// Stack parameters
|
||||||
parameters: {
|
parameters: {
|
||||||
...
|
...
|
||||||
}
|
} @dagger(input)
|
||||||
|
|
||||||
// Behavior when failure to create/update the Stack
|
// Behavior when failure to create/update the Stack
|
||||||
onFailure: *"DO_NOTHING" | "ROLLBACK" | "DELETE" @dagger(input)
|
onFailure: *"DO_NOTHING" | "ROLLBACK" | "DELETE" @dagger(input)
|
||||||
@ -32,7 +32,7 @@ import (
|
|||||||
timeout: *10 | uint @dagger(input)
|
timeout: *10 | uint @dagger(input)
|
||||||
|
|
||||||
// Never update the stack if already exists
|
// Never update the stack if already exists
|
||||||
neverUpdate: *false | bool @dagger(input)
|
neverUpdate: *false | true @dagger(input)
|
||||||
|
|
||||||
#files: {
|
#files: {
|
||||||
"/entrypoint.sh": #Code
|
"/entrypoint.sh": #Code
|
||||||
@ -48,8 +48,8 @@ import (
|
|||||||
}
|
}
|
||||||
|
|
||||||
outputs: {
|
outputs: {
|
||||||
[string]: string @dagger(output)
|
[string]: string
|
||||||
}
|
} @dagger(output)
|
||||||
|
|
||||||
outputs: #up: [
|
outputs: #up: [
|
||||||
op.#Load & {
|
op.#Load & {
|
||||||
|
@ -6,13 +6,13 @@ import (
|
|||||||
"dagger.io/os"
|
"dagger.io/os"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Convert AWS credentials to Docker Registry credentials for ECR
|
// Convert ECR credentials to Docker Login format
|
||||||
#Credentials: {
|
#Credentials: {
|
||||||
// AWS Config
|
// AWS Config
|
||||||
config: aws.#Config
|
config: aws.#Config
|
||||||
|
|
||||||
// ECR credentials
|
// ECR registry
|
||||||
username: "AWS"
|
username: "AWS" @dagger(output)
|
||||||
|
|
||||||
ctr: os.#Container & {
|
ctr: os.#Container & {
|
||||||
image: aws.#CLI & {
|
image: aws.#CLI & {
|
||||||
@ -22,10 +22,11 @@ import (
|
|||||||
command: "aws ecr get-login-password > /out"
|
command: "aws ecr get-login-password > /out"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ECR registry secret
|
||||||
secret: {
|
secret: {
|
||||||
os.#File & {
|
os.#File & {
|
||||||
from: ctr
|
from: ctr
|
||||||
path: "/out"
|
path: "/out"
|
||||||
}
|
}
|
||||||
}.read.data
|
}.read.data @dagger(output)
|
||||||
}
|
}
|
||||||
|
@ -5,8 +5,8 @@ import (
|
|||||||
"dagger.io/aws"
|
"dagger.io/aws"
|
||||||
)
|
)
|
||||||
|
|
||||||
// RunTask implements ecs run-task for running a single container on ECS
|
// Task implements ecs run-task for running a single container on ECS
|
||||||
#RunTask: {
|
#Task: {
|
||||||
|
|
||||||
// AWS Config
|
// AWS Config
|
||||||
config: aws.#Config
|
config: aws.#Config
|
||||||
|
@ -19,7 +19,6 @@ import (
|
|||||||
|
|
||||||
// kubeconfig is the generated kube configuration file
|
// kubeconfig is the generated kube configuration file
|
||||||
kubeconfig: {
|
kubeconfig: {
|
||||||
@dagger(output)
|
|
||||||
string
|
string
|
||||||
|
|
||||||
#up: [
|
#up: [
|
||||||
@ -58,5 +57,5 @@ import (
|
|||||||
format: "string"
|
format: "string"
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
}
|
} @dagger(output)
|
||||||
}
|
}
|
||||||
|
@ -8,6 +8,7 @@ import (
|
|||||||
|
|
||||||
// Returns an unused rule priority (randomized in available range)
|
// Returns an unused rule priority (randomized in available range)
|
||||||
#RandomRulePriority: {
|
#RandomRulePriority: {
|
||||||
|
|
||||||
// AWS Config
|
// AWS Config
|
||||||
config: aws.#Config
|
config: aws.#Config
|
||||||
|
|
||||||
|
@ -8,7 +8,8 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
// Creates a new Database on an existing RDS Instance
|
// Creates a new Database on an existing RDS Instance
|
||||||
#CreateDB: {
|
#Database: {
|
||||||
|
|
||||||
// AWS Config
|
// AWS Config
|
||||||
config: aws.#Config
|
config: aws.#Config
|
||||||
|
|
||||||
@ -83,7 +84,8 @@ import (
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Creates a new user credentials on an existing RDS Instance
|
// Creates a new user credentials on an existing RDS Instance
|
||||||
#CreateUser: {
|
#User: {
|
||||||
|
|
||||||
// AWS Config
|
// AWS Config
|
||||||
config: aws.#Config
|
config: aws.#Config
|
||||||
|
|
||||||
@ -107,7 +109,6 @@ import (
|
|||||||
|
|
||||||
// Outputed username
|
// Outputed username
|
||||||
out: {
|
out: {
|
||||||
@dagger(output)
|
|
||||||
string
|
string
|
||||||
|
|
||||||
#up: [
|
#up: [
|
||||||
@ -188,11 +189,12 @@ import (
|
|||||||
format: "string"
|
format: "string"
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
}
|
} @dagger(output)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Fetches information on an existing RDS Instance
|
// Fetches information on an existing RDS Instance
|
||||||
#Instance: {
|
#Instance: {
|
||||||
|
|
||||||
// AWS Config
|
// AWS Config
|
||||||
config: aws.#Config
|
config: aws.#Config
|
||||||
|
|
||||||
|
@ -7,115 +7,29 @@ import (
|
|||||||
"dagger.io/aws"
|
"dagger.io/aws"
|
||||||
)
|
)
|
||||||
|
|
||||||
// S3 Bucket upload (file or directory)
|
// S3 Bucket object(s) sync
|
||||||
#Put: {
|
#Object: {
|
||||||
|
|
||||||
// AWS Config
|
// AWS Config
|
||||||
config: aws.#Config
|
config: aws.#Config
|
||||||
|
|
||||||
// Source Artifact to upload to S3
|
|
||||||
source?: dagger.#Artifact @dagger(input)
|
|
||||||
|
|
||||||
// Source inlined as a string to upload to S3
|
|
||||||
sourceInline?: string @dagger(input)
|
|
||||||
|
|
||||||
// Target S3 URL (eg. s3://<bucket-name>/<path>/<sub-path>)
|
|
||||||
target: string @dagger(input)
|
|
||||||
|
|
||||||
// Object content type
|
|
||||||
contentType: string | *"" @dagger(input)
|
|
||||||
|
|
||||||
// Always write the object to S3
|
|
||||||
always: bool | *true @dagger(input)
|
|
||||||
|
|
||||||
// URL of the uploaded S3 object
|
|
||||||
url: {
|
|
||||||
@dagger(output)
|
|
||||||
string
|
|
||||||
|
|
||||||
#up: [
|
|
||||||
op.#Load & {
|
|
||||||
from: aws.#CLI & {
|
|
||||||
"config": config
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
if sourceInline != _|_ {
|
|
||||||
op.#WriteFile & {
|
|
||||||
dest: "/source"
|
|
||||||
content: sourceInline
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
op.#Exec & {
|
|
||||||
if always != _|_ {
|
|
||||||
"always": always
|
|
||||||
}
|
|
||||||
env: {
|
|
||||||
TARGET: target
|
|
||||||
CONTENT_TYPE: contentType
|
|
||||||
}
|
|
||||||
|
|
||||||
if sourceInline == _|_ {
|
|
||||||
mount: "/source": from: source
|
|
||||||
}
|
|
||||||
|
|
||||||
args: [
|
|
||||||
"/bin/bash",
|
|
||||||
"--noprofile",
|
|
||||||
"--norc",
|
|
||||||
"-eo",
|
|
||||||
"pipefail",
|
|
||||||
"-c",
|
|
||||||
#"""
|
|
||||||
opts=""
|
|
||||||
op=cp
|
|
||||||
if [ -d /source ]; then
|
|
||||||
op=sync
|
|
||||||
fi
|
|
||||||
if [ -n "$CONTENT_TYPE" ]; then
|
|
||||||
opts="--content-type $CONTENT_TYPE"
|
|
||||||
fi
|
|
||||||
aws s3 $op $opts /source "$TARGET"
|
|
||||||
echo -n "$TARGET" \
|
|
||||||
| sed -E 's=^s3://([^/]*)/=https://\1.s3.amazonaws.com/=' \
|
|
||||||
> /url
|
|
||||||
"""#,
|
|
||||||
]
|
|
||||||
},
|
|
||||||
|
|
||||||
op.#Export & {
|
|
||||||
source: "/url"
|
|
||||||
format: "string"
|
|
||||||
},
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// S3 Bucket sync
|
|
||||||
#Sync: {
|
|
||||||
// AWS Config
|
|
||||||
config: aws.#Config
|
|
||||||
|
|
||||||
// Source Artifact to upload to S3
|
// Source Artifact to upload to S3
|
||||||
source: dagger.#Artifact @dagger(input)
|
source: dagger.#Artifact @dagger(input)
|
||||||
|
|
||||||
// Target S3 URL (eg. s3://<bucket-name>/<path>/<sub-path>)
|
// Target S3 URL (eg. s3://<bucket-name>/<path>/<sub-path>)
|
||||||
target: string @dagger(input)
|
target: string @dagger(input)
|
||||||
|
|
||||||
// Files that exist in the destination but not in the
|
// Delete files that already exist on remote destination
|
||||||
// source are deleted during sync.
|
delete: *false | true @dagger(input)
|
||||||
delete: *false | bool @dagger(input)
|
|
||||||
|
|
||||||
// Object content type
|
// Object content type
|
||||||
contentType: string | *"" @dagger(input)
|
contentType: string | *"" @dagger(input)
|
||||||
|
|
||||||
// Always write the object to S3
|
// Always write the object to S3
|
||||||
always: bool | *true @dagger(input)
|
always: *true | false @dagger(input)
|
||||||
|
|
||||||
// URL of the uploaded S3 object
|
// URL of the uploaded S3 object
|
||||||
url: {
|
url: {
|
||||||
@dagger(output)
|
|
||||||
string
|
string
|
||||||
|
|
||||||
#up: [
|
#up: [
|
||||||
@ -166,5 +80,5 @@ import (
|
|||||||
format: "string"
|
format: "string"
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
}
|
} @dagger(output)
|
||||||
}
|
}
|
||||||
|
@ -1,74 +0,0 @@
|
|||||||
// DEPRECATED: see dagger.io/os
|
|
||||||
package file
|
|
||||||
|
|
||||||
import (
|
|
||||||
"strings"
|
|
||||||
"dagger.io/dagger"
|
|
||||||
"dagger.io/dagger/op"
|
|
||||||
)
|
|
||||||
|
|
||||||
#Create: {
|
|
||||||
filename: !="" @dagger(input)
|
|
||||||
permissions: int | *0o644 @dagger(input)
|
|
||||||
contents: string | bytes @dagger(input)
|
|
||||||
|
|
||||||
#up: [
|
|
||||||
op.#WriteFile & {dest: filename, content: contents, mode: permissions},
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
#Append: {
|
|
||||||
filename: !="" @dagger(input)
|
|
||||||
permissions: int | *0o644 @dagger(input)
|
|
||||||
contents: string | bytes @dagger(input)
|
|
||||||
from: dagger.#Artifact @dagger(input)
|
|
||||||
|
|
||||||
orig: (#read & {path: filename, "from": from}).data @dagger(output)
|
|
||||||
|
|
||||||
#up: [
|
|
||||||
op.#WriteFile & {dest: filename, content: "\(orig)\(contents)", mode: permissions},
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
#Read: {
|
|
||||||
filename: !="" @dagger(input)
|
|
||||||
from: dagger.#Artifact @dagger(input)
|
|
||||||
contents: (#read & {path: filename, "from": from}).data @dagger(output)
|
|
||||||
}
|
|
||||||
|
|
||||||
#read: {
|
|
||||||
path: !="" @dagger(input)
|
|
||||||
from: dagger.#Artifact @dagger(input)
|
|
||||||
data: {
|
|
||||||
string
|
|
||||||
#up: [
|
|
||||||
op.#Load & {"from": from},
|
|
||||||
op.#Export & {source: path},
|
|
||||||
]
|
|
||||||
} @dagger(output)
|
|
||||||
}
|
|
||||||
|
|
||||||
#Glob: {
|
|
||||||
glob: !="" @dagger(input)
|
|
||||||
filenames: [...string] @dagger(input)
|
|
||||||
from: dagger.#Artifact @dagger(input)
|
|
||||||
files: (_#glob & {"glob": glob, "from": from}).data @dagger(output)
|
|
||||||
// trim suffix because ls always ends with newline
|
|
||||||
filenames: strings.Split(strings.TrimSuffix(files, "\n"), "\n") @dagger(output)
|
|
||||||
}
|
|
||||||
|
|
||||||
_#glob: {
|
|
||||||
glob: !=""
|
|
||||||
from: dagger.#Artifact
|
|
||||||
data: {
|
|
||||||
string
|
|
||||||
_tmppath: "/tmp/ls.out"
|
|
||||||
#up: [
|
|
||||||
op.#Load & {"from": from},
|
|
||||||
op.#Exec & {
|
|
||||||
args: ["sh", "-c", "ls \(glob) > \(_tmppath)"]
|
|
||||||
},
|
|
||||||
op.#Export & {source: _tmppath},
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
@ -11,8 +11,10 @@ import (
|
|||||||
// GCP Config
|
// GCP Config
|
||||||
config: gcp.#Config
|
config: gcp.#Config
|
||||||
|
|
||||||
// GCR credentials
|
// GCR registry username
|
||||||
username: "oauth2accesstoken"
|
username: "oauth2accesstoken" @dagger(output)
|
||||||
|
|
||||||
|
// GCR registry secret
|
||||||
secret: {
|
secret: {
|
||||||
string
|
string
|
||||||
|
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
// Helm package manager
|
||||||
package helm
|
package helm
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -10,6 +11,7 @@ import (
|
|||||||
|
|
||||||
// Install a Helm chart
|
// Install a Helm chart
|
||||||
#Chart: {
|
#Chart: {
|
||||||
|
|
||||||
// Helm deployment name
|
// Helm deployment name
|
||||||
name: string @dagger(input)
|
name: string @dagger(input)
|
||||||
|
|
||||||
|
@ -10,12 +10,6 @@ setup() {
|
|||||||
"$DAGGER" compute "$TESTDIR"/stdlib/go --input-dir TestData="$TESTDIR"/stdlib/go/testdata
|
"$DAGGER" compute "$TESTDIR"/stdlib/go --input-dir TestData="$TESTDIR"/stdlib/go/testdata
|
||||||
}
|
}
|
||||||
|
|
||||||
# FIXME: move to universe/universe.bats
|
|
||||||
# Assigned to: <ADD YOUR NAME HERE>
|
|
||||||
@test "stdlib: file" {
|
|
||||||
"$DAGGER" compute "$TESTDIR"/stdlib/file
|
|
||||||
}
|
|
||||||
|
|
||||||
# FIXME: move to universe/universe.bats
|
# FIXME: move to universe/universe.bats
|
||||||
# Assigned to: <ADD YOUR NAME HERE>
|
# Assigned to: <ADD YOUR NAME HERE>
|
||||||
@test "stdlib: kubernetes" {
|
@test "stdlib: kubernetes" {
|
||||||
|
@ -1,112 +0,0 @@
|
|||||||
package f
|
|
||||||
|
|
||||||
import (
|
|
||||||
"dagger.io/dagger/op"
|
|
||||||
"dagger.io/alpine"
|
|
||||||
"dagger.io/file"
|
|
||||||
)
|
|
||||||
|
|
||||||
TestCreate: {
|
|
||||||
_content: "hello world"
|
|
||||||
|
|
||||||
write: file.#Create & {
|
|
||||||
filename: "/file.txt"
|
|
||||||
contents: _content
|
|
||||||
}
|
|
||||||
|
|
||||||
test: #up: [
|
|
||||||
op.#Load & {from: alpine.#Image},
|
|
||||||
op.#Exec & {
|
|
||||||
args: [
|
|
||||||
"sh",
|
|
||||||
"-ec",
|
|
||||||
"""
|
|
||||||
test "$(cat /file.txt)" = "hello world"
|
|
||||||
""",
|
|
||||||
]
|
|
||||||
mount: "/file.txt": {
|
|
||||||
from: write
|
|
||||||
path: "/file.txt"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
TestRead: {
|
|
||||||
read: file.#Read & {
|
|
||||||
filename: "/etc/alpine-release"
|
|
||||||
from: alpine.#Image & {version: "3.10.6"}
|
|
||||||
}
|
|
||||||
test: #up: [
|
|
||||||
op.#Load & {from: alpine.#Image},
|
|
||||||
op.#Exec & {
|
|
||||||
args: [
|
|
||||||
"sh",
|
|
||||||
"-ec",
|
|
||||||
"""
|
|
||||||
test "\(read.contents)" = "3.10.6\n"
|
|
||||||
""",
|
|
||||||
]
|
|
||||||
},
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
TestRead2: {
|
|
||||||
write: file.#Create & {
|
|
||||||
_content: "hello world"
|
|
||||||
filename: "/file.txt"
|
|
||||||
contents: _content
|
|
||||||
}
|
|
||||||
|
|
||||||
read: file.#Read & {
|
|
||||||
filename: "/file.txt"
|
|
||||||
from: write
|
|
||||||
}
|
|
||||||
|
|
||||||
test: #up: [
|
|
||||||
op.#Load & {from: alpine.#Image},
|
|
||||||
op.#Exec & {
|
|
||||||
args: [
|
|
||||||
"sh",
|
|
||||||
"-ec",
|
|
||||||
"""
|
|
||||||
test "\(read.contents)" = "hello world"
|
|
||||||
""",
|
|
||||||
]
|
|
||||||
},
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
TestAppend: {
|
|
||||||
content1: "hello world"
|
|
||||||
content2: "foo bar"
|
|
||||||
|
|
||||||
write: file.#Create & {
|
|
||||||
filename: "/file.txt"
|
|
||||||
contents: content1
|
|
||||||
}
|
|
||||||
append: file.#Append & {
|
|
||||||
filename: "/file.txt"
|
|
||||||
contents: content2
|
|
||||||
from: write
|
|
||||||
}
|
|
||||||
|
|
||||||
orig: append.orig
|
|
||||||
|
|
||||||
read: file.#Read & {
|
|
||||||
filename: "/file.txt"
|
|
||||||
from: append
|
|
||||||
}
|
|
||||||
|
|
||||||
new: read.contents
|
|
||||||
|
|
||||||
test: new & "hello worldfoo bar"
|
|
||||||
}
|
|
||||||
|
|
||||||
TestGlob: {
|
|
||||||
list: file.#Glob & {
|
|
||||||
glob: "/etc/r*"
|
|
||||||
from: alpine.#Image
|
|
||||||
}
|
|
||||||
test: list.filenames & ["/etc/resolv.conf"]
|
|
||||||
}
|
|
Reference in New Issue
Block a user