commit
fa41177e1a
@ -42,7 +42,7 @@ func TrackProjectCommand(ctx context.Context, cmd *cobra.Command, w *state.Proje
|
||||
Value: hash(gitRepoURL(w.Path)),
|
||||
},
|
||||
{
|
||||
// The project path might contain the username (e.g. /home/user/project), so we hash itfor privacy.
|
||||
// The project path might contain the username (e.g. /home/user/project), so we hash it for privacy.
|
||||
Name: "project_path_hash",
|
||||
Value: hash(w.Path),
|
||||
},
|
||||
|
@ -4,7 +4,7 @@ slug: /
|
||||
|
||||
# What is Dagger?
|
||||
|
||||
Dagger is a portable devkit for CICD. It helps you develop powerful CICD pipelines that can run anwyhere.
|
||||
Dagger is a portable devkit for CICD. It helps you develop powerful CICD pipelines that can run anywhere.
|
||||
|
||||
* Choose from a wide catalog of reusable actions, or create your own in your favorite programming language.
|
||||
* Tie it all together with [CUE](https://cuelang.org), the next-generation declarative language invented at Google. No more YAML hell!
|
||||
|
@ -67,7 +67,7 @@ Dagger will load all `.cue` files recursively in the current Dagger project. Mor
|
||||
|
||||
### Write a Dagger Plan
|
||||
|
||||
A Dagger _plan_ is written in CUE and defines the _resources_, _dependencies_, and _logic_ to deploy an application to an environment. Unlike traditional glue code written in a scripting language such as Bash or PowerShell, a Dagger plan is _declarative_ rather than _imperative_. This frees us from thinking about the order of operations, since Dagger will infer dependendencies and calculate correct order on its own.
|
||||
A Dagger _plan_ is written in CUE and defines the _resources_, _dependencies_, and _logic_ to deploy an application to an environment. Unlike traditional glue code written in a scripting language such as Bash or PowerShell, a Dagger plan is _declarative_ rather than _imperative_. This frees us from thinking about the order of operations, since Dagger will infer dependencies and calculate correct order on its own.
|
||||
|
||||
Let’s first create a directory to hold our Dagger plan separately from our application code:
|
||||
|
||||
@ -155,7 +155,7 @@ dockerSocket struct true Mount local docker socket
|
||||
|
||||
Notice that Dagger now reports that both inputs have been set.
|
||||
|
||||
### Deploy the Appplication
|
||||
### Deploy the Application
|
||||
|
||||
With our plan in place, our environment set, and our inputs defined, we can deploy the application as simply as:
|
||||
|
||||
|
@ -10,7 +10,7 @@ CUE is a powerful configuration language created by Marcel van Lohuizen who co-c
|
||||
|
||||
For decades, developers, engineers, and system administrators alike have used some combination of `INI`, `ENV`, `YAML`, `XML`, and `JSON` (as well as custom formats such as those for Apache, Nginx, et al) to describe configurations, resources, operations, variables, parameters, state, etc. While these examples work fine for storing data, they are merely _data formats_, not languages, and as such they each lack the ability to execute logic and operate on data directly.
|
||||
|
||||
Simple—yet powerful!—things like if statements, for loops, comprehensions, and string interpolation, among others are just not possible in these formats without the use of a separate process for execution. The result is that variables or parameters must be injected, and any logic executed by a templating language (such as Jinja) or by a separate engine instructed by a DSL (Domain-specific Language). Often templating languages and DSLs are used in conjuction and while this technically works, the results are that we end up with code bases, or even single files, that are overly verbose, that intersperse templating languages with various DSLs (and sometimes multiple DSLs that feed ouput from one to the input of another!), that create rigid structures without enforcing schemas (not without more effort), thereby making the code challenging to reason about, difficult to maintain, brittle, and perhaps worst of all, _prone to side effects_.
|
||||
Simple—yet powerful!—things like if statements, for loops, comprehensions, and string interpolation, among others are just not possible in these formats without the use of a separate process for execution. The result is that variables or parameters must be injected, and any logic executed by a templating language (such as Jinja) or by a separate engine instructed by a DSL (Domain-specific Language). Often templating languages and DSLs are used in conjunction and while this technically works, the results are that we end up with code bases, or even single files, that are overly verbose, that intersperse templating languages with various DSLs (and sometimes multiple DSLs that feed output from one to the input of another!), that create rigid structures without enforcing schemas (not without more effort), thereby making the code challenging to reason about, difficult to maintain, brittle, and perhaps worst of all, _prone to side effects_.
|
||||
|
||||
A _configuration language_ such as CUE, allows us to both _specify_ data as well as _act_ upon that data with any logic necessary to achieve the desired output. Furthermore, and perhaps most importantly, CUE allows us to not only specify data as concrete values, but also specify the _types_ those concrete values must be as well as any _constraints_ such as min and max for example. It gives us the ability to define a _schema_ but unlike doing so with say JSON Schema, CUE can both _define_ and _enforce_ the schema, whereas JSON Schema is merely a definition that requires some other process to enforce it.
|
||||
|
||||
@ -147,7 +147,7 @@ Bob:
|
||||
|
||||
```
|
||||
|
||||
The output here is a product of _*unifying*_ the `#Person` _definition_ with an object that contains _concrete values_ each of which is the product of unifying the concrete value with the _types_ and _constraints_ declared by the field in the defintion. [Try it in the CUE playground](https://cuelang.org/play/?id=nAUx1-VlrY4#cue@export@yaml)
|
||||
The output here is a product of _*unifying*_ the `#Person` _definition_ with an object that contains _concrete values_ each of which is the product of unifying the concrete value with the _types_ and _constraints_ declared by the field in the definition. [Try it in the CUE playground](https://cuelang.org/play/?id=nAUx1-VlrY4#cue@export@yaml)
|
||||
|
||||
### Default Values and the Nature of Inheritance
|
||||
|
||||
|
@ -179,7 +179,7 @@ The following `config.cue` defines:
|
||||
|
||||
The below `config.cue` defines:
|
||||
|
||||
- `kubeconfig` a generic value created to embbed this `gke.#KubeConfig` value
|
||||
- `kubeconfig` a generic value created to embed this `gke.#KubeConfig` value
|
||||
- `gcpConfig`: connection to Google using `alpha.dagger.io/gcp`
|
||||
- `gkeConfig`: transform a `gcpConfig` to a readable format for `kubernetes.#Resources.kubeconfig`
|
||||
using `alpha.dagger.io/gcp/gke`
|
||||
@ -194,7 +194,7 @@ The below `config.cue` defines:
|
||||
|
||||
The below `config.cue` defines:
|
||||
|
||||
- `kubeconfig`, a generic value created to embbed this `eksConfig.kubeconfig` value
|
||||
- `kubeconfig`, a generic value created to embed this `eksConfig.kubeconfig` value
|
||||
- `awsConfig`, connection to Amazon using `alpha.dagger.io/aws`
|
||||
- `eksConfig`, transform a `awsConfig` to a readable format for `kubernetes.#Resources.kubeconfig`
|
||||
using `alpha.dagger.io/aws/eks`
|
||||
|
@ -15,7 +15,7 @@ suffix: random.#String & {
|
||||
seed: ""
|
||||
}
|
||||
|
||||
// Query the Cloudformation stackname, or create one with a random suffix to keep unicity
|
||||
// Query the Cloudformation stackname, or create one with a random suffix for uniqueness
|
||||
cfnStackName: *"stack-\(suffix.out)" | string & dagger.#Input
|
||||
|
||||
// AWS Cloudformation stdlib
|
||||
|
@ -16,7 +16,7 @@ tag: "test-ecr"
|
||||
|
||||
// Todoapp deployment pipeline
|
||||
todoApp: {
|
||||
// Build the image from repositoru artifact
|
||||
// Build the image from repository artifact
|
||||
image: docker.#Build & {
|
||||
source: repository
|
||||
}
|
||||
@ -37,7 +37,7 @@ todoApp: {
|
||||
image: remoteImage.ref
|
||||
}
|
||||
|
||||
// Deploy the customized manifest to a kubernetes cluster
|
||||
// Deploy the customized manifest to a kubernetes cluster
|
||||
kubeSrc: kubernetes.#Resources & {
|
||||
"kubeconfig": kubeconfig
|
||||
manifest: deployment.manifest
|
||||
|
@ -23,7 +23,7 @@ manifest: dagger.#Artifact & dagger.#Input
|
||||
|
||||
// Todoapp deployment pipeline
|
||||
todoApp: {
|
||||
// Build the image from repositoru artifact
|
||||
// Build the image from repository artifact
|
||||
image: docker.#Build & {
|
||||
source: repository
|
||||
}
|
||||
|
@ -18,7 +18,7 @@ import (
|
||||
// ArgoCD project
|
||||
project: *"default" | dagger.#Input & {string}
|
||||
|
||||
// Basic authentification to login
|
||||
// Basic authentication to login
|
||||
basicAuth: {
|
||||
// Username
|
||||
username: dagger.#Input & {string}
|
||||
|
@ -5,7 +5,7 @@ import (
|
||||
"alpha.dagger.io/os"
|
||||
)
|
||||
|
||||
// Sync an application to its targer state
|
||||
// Sync an application to its target state
|
||||
#Sync: {
|
||||
// ArgoCD configuration
|
||||
config: #Config
|
||||
|
@ -46,14 +46,14 @@ import (
|
||||
"pipefail",
|
||||
#"""
|
||||
echo "dbType: $DB_TYPE"
|
||||
|
||||
|
||||
sql="CREATE DATABASE \`"$NAME" \`"
|
||||
if [ "$DB_TYPE" = postgres ]; then
|
||||
sql="CREATE DATABASE \""$NAME"\""
|
||||
fi
|
||||
|
||||
|
||||
echo "$NAME" >> /db_created
|
||||
|
||||
|
||||
aws rds-data execute-statement \
|
||||
--resource-arn "$DB_ARN" \
|
||||
--secret-arn "$SECRET_ARN" \
|
||||
@ -107,7 +107,7 @@ import (
|
||||
// Database type MySQL or PostgreSQL (Aurora Serverless only)
|
||||
dbType: "mysql" | "postgres" @dagger(input)
|
||||
|
||||
// Outputed username
|
||||
// Outputted username
|
||||
out: {
|
||||
string
|
||||
|
||||
@ -127,14 +127,14 @@ import (
|
||||
"pipefail",
|
||||
#"""
|
||||
echo "dbType: $DB_TYPE"
|
||||
|
||||
|
||||
sql="CREATE USER '"$USERNAME"'@'%' IDENTIFIED BY '"$PASSWORD"'"
|
||||
if [ "$DB_TYPE" = postgres ]; then
|
||||
sql="CREATE USER \""$USERNAME"\" WITH PASSWORD '"$PASSWORD"'"
|
||||
fi
|
||||
|
||||
|
||||
echo "$USERNAME" >> /username
|
||||
|
||||
|
||||
aws rds-data execute-statement \
|
||||
--resource-arn "$DB_ARN" \
|
||||
--secret-arn "$SECRET_ARN" \
|
||||
@ -146,24 +146,24 @@ import (
|
||||
if [ $exit_code -ne 0 ]; then
|
||||
grep -q "Operation CREATE USER failed for\|ERROR" tmp/out || exit $exit_code
|
||||
fi
|
||||
|
||||
|
||||
sql="SET PASSWORD FOR '"$USERNAME"'@'%' = PASSWORD('"$PASSWORD"')"
|
||||
if [ "$DB_TYPE" = postgres ]; then
|
||||
sql="ALTER ROLE \""$USERNAME"\" WITH PASSWORD '"$PASSWORD"'"
|
||||
fi
|
||||
|
||||
|
||||
aws rds-data execute-statement \
|
||||
--resource-arn "$DB_ARN" \
|
||||
--secret-arn "$SECRET_ARN" \
|
||||
--sql "$sql" \
|
||||
--database "$DB_TYPE" \
|
||||
--no-include-result-metadata
|
||||
|
||||
|
||||
sql="GRANT ALL ON \`"$GRAND_DATABASE"\`.* to '"$USERNAME"'@'%'"
|
||||
if [ "$DB_TYPE" = postgres ]; then
|
||||
sql="GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO \""$USERNAME"\"; GRANT ALL PRIVILEGES ON DATABASE \""$GRAND_DATABASE"\" to \""$USERNAME"\"; GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO \""$USERNAME"\"; ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL PRIVILEGES ON TABLES TO \""$USERNAME"\"; ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL PRIVILEGES ON SEQUENCES TO \""$USERNAME"\"; GRANT USAGE ON SCHEMA public TO \""$USERNAME"\";"
|
||||
fi
|
||||
|
||||
|
||||
if [ -s "$GRAND_DATABASE ]; then
|
||||
aws rds-data execute-statement \
|
||||
--resource-arn "$DB_ARN" \
|
||||
|
@ -103,7 +103,7 @@ for added readability, starting on the next line. This is called
|
||||
status : 1
|
||||
output (2 lines):
|
||||
Error! Something went terribly wrong!
|
||||
Our engineers are panicing... \`>`;/
|
||||
Our engineers are panicking... \`>`;/
|
||||
--
|
||||
```
|
||||
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
package op
|
||||
|
||||
// Full resolution schema enforciong the complete op spec
|
||||
// Full resolution schema enforcing the complete op spec
|
||||
#Op: (#Export |
|
||||
#FetchContainer |
|
||||
#PushContainer |
|
||||
|
@ -132,7 +132,7 @@ import (
|
||||
for registry in registries {
|
||||
op.#Exec & {
|
||||
args: ["/bin/bash", "-c", #"""
|
||||
echo "$TARGER_HOST" | docker login --username "$DOCKER_USERNAME" --password-stdin "$(cat /password)"
|
||||
echo "$TARGET_HOST" | docker login --username "$DOCKER_USERNAME" --password-stdin "$(cat /password)"
|
||||
"""#,
|
||||
]
|
||||
env: {
|
||||
|
@ -25,7 +25,7 @@ The Dagger Core API defines core types and utilities for programming Dagger:
|
||||
### Low-level Engine API: `dagger.io/dagger/engine`
|
||||
|
||||
* *Development import path (implemented subset): `alpha.dagger.io/europa/dagger/engine`*
|
||||
* *Development importa pth (full spec): `alpha.dagger.io/dagger/europa/dagger/engine/spec/engine`*
|
||||
* *Development import path (full spec): `alpha.dagger.io/dagger/europa/dagger/engine/spec/engine`*
|
||||
|
||||
`engine` is a low-level API for accessing the raw capabilities of the Dagger Engine. Most developers should use the Dagger Core API instead (`dagger.io/dagger`), but experts and framework developers can target the engine API directly for maximum control.
|
||||
|
||||
@ -37,5 +37,5 @@ In Europa, `engine` will deprecate the following implicit API:
|
||||
* Convention to embed pipelines in the Cue lattice with the special nested definition `#up`
|
||||
* Convention to reference filesystem state from the Cue lattice with `@dagger(artifact)`
|
||||
* Convention to reference external secrets from the Cue lattice with `@dagger(secret)`
|
||||
* Convention to reference external network endpoints from the Cue lattive with `@dagger(stream)`
|
||||
* Convention to reference external network endpoints from the Cue lattice with `@dagger(stream)`
|
||||
* Convention that some operations (specifically `op.#Local`) are meant to be generated at runtime rather than authored manually.
|
||||
|
@ -9,7 +9,7 @@ package engine
|
||||
path: string
|
||||
// Optionally exclude certain files
|
||||
include: [...string]
|
||||
// Optionall include certain files
|
||||
// Optionally include certain files
|
||||
exclude: [...string]
|
||||
|
||||
output: #FS
|
||||
|
@ -53,7 +53,7 @@ The `docker` package is a native Cue API for Docker. You can use it to build, ru
|
||||
The Dagger container API defines the following types:
|
||||
|
||||
* `#Image`: a container image
|
||||
* `#Run`: run a comand in a container
|
||||
* `#Run`: run a command in a container
|
||||
* `#Push`: upload an image to a repository
|
||||
* `#Pull`: download an image from a repository
|
||||
* `#Build`: build an image
|
||||
|
@ -10,7 +10,7 @@ import (
|
||||
steps: [#Step, ...#Step]
|
||||
output: #Image
|
||||
|
||||
// Generate build DAG from linerar steps
|
||||
// Generate build DAG from linear steps
|
||||
_dag: {
|
||||
for idx, step in steps {
|
||||
"\(idx)": step & {
|
||||
|
@ -40,7 +40,7 @@ test_db_container_name: "changelog_test_postgres"
|
||||
// - caching is buildkit layers
|
||||
//
|
||||
// 3. Open Telemetry integration out-of-the-box
|
||||
// - visualise all steps in Jaeger UI
|
||||
// - visualize all steps in Jaeger UI
|
||||
|
||||
// PIPELINE OVERVIEW ###########################################################
|
||||
//
|
||||
|
@ -174,7 +174,7 @@ func (s Solver) forwardEvents(ch chan *bk.SolveStatus) {
|
||||
}
|
||||
|
||||
// Export will export `st` to `output`
|
||||
// FIXME: this is currently impleneted as a hack, starting a new Build session
|
||||
// FIXME: this is currently implemented as a hack, starting a new Build session
|
||||
// within buildkit from the Control API. Ideally the Gateway API should allow to
|
||||
// Export directly.
|
||||
func (s Solver) Export(ctx context.Context, st llb.State, img *dockerfile2llb.Image, output bk.ExportEntry, platform specs.Platform) (*bk.SolveResponse, error) {
|
||||
|
@ -6,7 +6,7 @@ import (
|
||||
|
||||
engine.#Plan & {
|
||||
inputs: secrets: echo: command: {
|
||||
name: "rtyet" // should fail because command doesnt exist
|
||||
name: "rtyet" // should fail because command doesn't exist
|
||||
args: ["hello europa"]
|
||||
}
|
||||
|
||||
|
@ -10,7 +10,7 @@ import (
|
||||
"github.com/docker/distribution/reference"
|
||||
)
|
||||
|
||||
func getBuildkitInformation(ctx context.Context) (*BuilkitInformation, error) {
|
||||
func getBuildkitInformation(ctx context.Context) (*BuildkitInformation, error) {
|
||||
formatString := "{{.Config.Image}};{{.State.Running}};{{if index .NetworkSettings.Networks \"host\"}}{{\"true\"}}{{else}}{{\"false\"}}{{end}}"
|
||||
cmd := exec.CommandContext(ctx,
|
||||
"docker",
|
||||
@ -48,14 +48,14 @@ func getBuildkitInformation(ctx context.Context) (*BuilkitInformation, error) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &BuilkitInformation{
|
||||
return &BuildkitInformation{
|
||||
Version: tag.Tag(),
|
||||
IsActive: isActive,
|
||||
HaveHostNetwork: haveHostNetwork,
|
||||
}, nil
|
||||
}
|
||||
|
||||
type BuilkitInformation struct {
|
||||
type BuildkitInformation struct {
|
||||
Version string
|
||||
IsActive bool
|
||||
HaveHostNetwork bool
|
||||
|
Reference in New Issue
Block a user