typo: Fix some minor typos.

Signed-off-by: Guillaume Coguiec <guillaume@logical.work>
This commit is contained in:
Guillaume Coguiec 2022-02-07 13:53:49 -05:00
parent c04d0cdd0b
commit bd0f276d30
No known key found for this signature in database
GPG Key ID: FE7148DFA5FA0DC5
22 changed files with 39 additions and 39 deletions

View File

@ -42,7 +42,7 @@ func TrackProjectCommand(ctx context.Context, cmd *cobra.Command, w *state.Proje
Value: hash(gitRepoURL(w.Path)), Value: hash(gitRepoURL(w.Path)),
}, },
{ {
// The project path might contain the username (e.g. /home/user/project), so we hash itfor privacy. // The project path might contain the username (e.g. /home/user/project), so we hash it for privacy.
Name: "project_path_hash", Name: "project_path_hash",
Value: hash(w.Path), Value: hash(w.Path),
}, },

View File

@ -4,7 +4,7 @@ slug: /
# What is Dagger? # What is Dagger?
Dagger is a portable devkit for CICD. It helps you develop powerful CICD pipelines that can run anwyhere. Dagger is a portable devkit for CICD. It helps you develop powerful CICD pipelines that can run anywhere.
* Choose from a wide catalog of reusable actions, or create your own in your favorite programming language. * Choose from a wide catalog of reusable actions, or create your own in your favorite programming language.
* Tie it all together with [CUE](https://cuelang.org), the next-generation declarative language invented at Google. No more YAML hell! * Tie it all together with [CUE](https://cuelang.org), the next-generation declarative language invented at Google. No more YAML hell!

View File

@ -67,7 +67,7 @@ Dagger will load all `.cue` files recursively in the current Dagger project. Mor
### Write a Dagger Plan ### Write a Dagger Plan
A Dagger _plan_ is written in CUE and defines the _resources_, _dependencies_, and _logic_ to deploy an application to an environment. Unlike traditional glue code written in a scripting language such as Bash or PowerShell, a Dagger plan is _declarative_ rather than _imperative_. This frees us from thinking about the order of operations, since Dagger will infer dependendencies and calculate correct order on its own. A Dagger _plan_ is written in CUE and defines the _resources_, _dependencies_, and _logic_ to deploy an application to an environment. Unlike traditional glue code written in a scripting language such as Bash or PowerShell, a Dagger plan is _declarative_ rather than _imperative_. This frees us from thinking about the order of operations, since Dagger will infer dependencies and calculate correct order on its own.
Let&rsquo;s first create a directory to hold our Dagger plan separately from our application code: Let&rsquo;s first create a directory to hold our Dagger plan separately from our application code:
@ -155,7 +155,7 @@ dockerSocket struct true Mount local docker socket
Notice that Dagger now reports that both inputs have been set. Notice that Dagger now reports that both inputs have been set.
### Deploy the Appplication ### Deploy the Application
With our plan in place, our environment set, and our inputs defined, we can deploy the application as simply as: With our plan in place, our environment set, and our inputs defined, we can deploy the application as simply as:

View File

@ -10,7 +10,7 @@ CUE is a powerful configuration language created by Marcel van Lohuizen who co-c
For decades, developers, engineers, and system administrators alike have used some combination of `INI`, `ENV`, `YAML`, `XML`, and `JSON` (as well as custom formats such as those for Apache, Nginx, et al) to describe configurations, resources, operations, variables, parameters, state, etc. While these examples work fine for storing data, they are merely _data formats_, not languages, and as such they each lack the ability to execute logic and operate on data directly. For decades, developers, engineers, and system administrators alike have used some combination of `INI`, `ENV`, `YAML`, `XML`, and `JSON` (as well as custom formats such as those for Apache, Nginx, et al) to describe configurations, resources, operations, variables, parameters, state, etc. While these examples work fine for storing data, they are merely _data formats_, not languages, and as such they each lack the ability to execute logic and operate on data directly.
Simple&mdash;yet powerful!&mdash;things like if statements, for loops, comprehensions, and string interpolation, among others are just not possible in these formats without the use of a separate process for execution. The result is that variables or parameters must be injected, and any logic executed by a templating language (such as Jinja) or by a separate engine instructed by a DSL (Domain-specific Language). Often templating languages and DSLs are used in conjuction and while this technically works, the results are that we end up with code bases, or even single files, that are overly verbose, that intersperse templating languages with various DSLs (and sometimes multiple DSLs that feed ouput from one to the input of another!), that create rigid structures without enforcing schemas (not without more effort), thereby making the code challenging to reason about, difficult to maintain, brittle, and perhaps worst of all, _prone to side effects_. Simple&mdash;yet powerful!&mdash;things like if statements, for loops, comprehensions, and string interpolation, among others are just not possible in these formats without the use of a separate process for execution. The result is that variables or parameters must be injected, and any logic executed by a templating language (such as Jinja) or by a separate engine instructed by a DSL (Domain-specific Language). Often templating languages and DSLs are used in conjunction and while this technically works, the results are that we end up with code bases, or even single files, that are overly verbose, that intersperse templating languages with various DSLs (and sometimes multiple DSLs that feed output from one to the input of another!), that create rigid structures without enforcing schemas (not without more effort), thereby making the code challenging to reason about, difficult to maintain, brittle, and perhaps worst of all, _prone to side effects_.
A _configuration language_ such as CUE, allows us to both _specify_ data as well as _act_ upon that data with any logic necessary to achieve the desired output. Furthermore, and perhaps most importantly, CUE allows us to not only specify data as concrete values, but also specify the _types_ those concrete values must be as well as any _constraints_ such as min and max for example. It gives us the ability to define a _schema_ but unlike doing so with say JSON Schema, CUE can both _define_ and _enforce_ the schema, whereas JSON Schema is merely a definition that requires some other process to enforce it. A _configuration language_ such as CUE, allows us to both _specify_ data as well as _act_ upon that data with any logic necessary to achieve the desired output. Furthermore, and perhaps most importantly, CUE allows us to not only specify data as concrete values, but also specify the _types_ those concrete values must be as well as any _constraints_ such as min and max for example. It gives us the ability to define a _schema_ but unlike doing so with say JSON Schema, CUE can both _define_ and _enforce_ the schema, whereas JSON Schema is merely a definition that requires some other process to enforce it.
@ -147,7 +147,7 @@ Bob:
``` ```
The output here is a product of _*unifying*_ the `#Person` _definition_ with an object that contains _concrete values_ each of which is the product of unifying the concrete value with the _types_ and _constraints_ declared by the field in the defintion. [Try it in the CUE playground](https://cuelang.org/play/?id=nAUx1-VlrY4#cue@export@yaml) The output here is a product of _*unifying*_ the `#Person` _definition_ with an object that contains _concrete values_ each of which is the product of unifying the concrete value with the _types_ and _constraints_ declared by the field in the definition. [Try it in the CUE playground](https://cuelang.org/play/?id=nAUx1-VlrY4#cue@export@yaml)
### Default Values and the Nature of Inheritance ### Default Values and the Nature of Inheritance

View File

@ -179,7 +179,7 @@ The following `config.cue` defines:
The below `config.cue` defines: The below `config.cue` defines:
- `kubeconfig` a generic value created to embbed this `gke.#KubeConfig` value - `kubeconfig` a generic value created to embed this `gke.#KubeConfig` value
- `gcpConfig`: connection to Google using `alpha.dagger.io/gcp` - `gcpConfig`: connection to Google using `alpha.dagger.io/gcp`
- `gkeConfig`: transform a `gcpConfig` to a readable format for `kubernetes.#Resources.kubeconfig` - `gkeConfig`: transform a `gcpConfig` to a readable format for `kubernetes.#Resources.kubeconfig`
using `alpha.dagger.io/gcp/gke` using `alpha.dagger.io/gcp/gke`
@ -194,7 +194,7 @@ The below `config.cue` defines:
The below `config.cue` defines: The below `config.cue` defines:
- `kubeconfig`, a generic value created to embbed this `eksConfig.kubeconfig` value - `kubeconfig`, a generic value created to embed this `eksConfig.kubeconfig` value
- `awsConfig`, connection to Amazon using `alpha.dagger.io/aws` - `awsConfig`, connection to Amazon using `alpha.dagger.io/aws`
- `eksConfig`, transform a `awsConfig` to a readable format for `kubernetes.#Resources.kubeconfig` - `eksConfig`, transform a `awsConfig` to a readable format for `kubernetes.#Resources.kubeconfig`
using `alpha.dagger.io/aws/eks` using `alpha.dagger.io/aws/eks`

View File

@ -15,7 +15,7 @@ suffix: random.#String & {
seed: "" seed: ""
} }
// Query the Cloudformation stackname, or create one with a random suffix to keep unicity // Query the Cloudformation stackname, or create one with a random suffix for uniqueness
cfnStackName: *"stack-\(suffix.out)" | string & dagger.#Input cfnStackName: *"stack-\(suffix.out)" | string & dagger.#Input
// AWS Cloudformation stdlib // AWS Cloudformation stdlib

View File

@ -16,7 +16,7 @@ tag: "test-ecr"
// Todoapp deployment pipeline // Todoapp deployment pipeline
todoApp: { todoApp: {
// Build the image from repositoru artifact // Build the image from repository artifact
image: docker.#Build & { image: docker.#Build & {
source: repository source: repository
} }
@ -37,7 +37,7 @@ todoApp: {
image: remoteImage.ref image: remoteImage.ref
} }
// Deploy the customized manifest to a kubernetes cluster // Deploy the customized manifest to a kubernetes cluster
kubeSrc: kubernetes.#Resources & { kubeSrc: kubernetes.#Resources & {
"kubeconfig": kubeconfig "kubeconfig": kubeconfig
manifest: deployment.manifest manifest: deployment.manifest

View File

@ -23,7 +23,7 @@ manifest: dagger.#Artifact & dagger.#Input
// Todoapp deployment pipeline // Todoapp deployment pipeline
todoApp: { todoApp: {
// Build the image from repositoru artifact // Build the image from repository artifact
image: docker.#Build & { image: docker.#Build & {
source: repository source: repository
} }

View File

@ -18,7 +18,7 @@ import (
// ArgoCD project // ArgoCD project
project: *"default" | dagger.#Input & {string} project: *"default" | dagger.#Input & {string}
// Basic authentification to login // Basic authentication to login
basicAuth: { basicAuth: {
// Username // Username
username: dagger.#Input & {string} username: dagger.#Input & {string}

View File

@ -5,7 +5,7 @@ import (
"alpha.dagger.io/os" "alpha.dagger.io/os"
) )
// Sync an application to its targer state // Sync an application to its target state
#Sync: { #Sync: {
// ArgoCD configuration // ArgoCD configuration
config: #Config config: #Config

View File

@ -46,14 +46,14 @@ import (
"pipefail", "pipefail",
#""" #"""
echo "dbType: $DB_TYPE" echo "dbType: $DB_TYPE"
sql="CREATE DATABASE \`"$NAME" \`" sql="CREATE DATABASE \`"$NAME" \`"
if [ "$DB_TYPE" = postgres ]; then if [ "$DB_TYPE" = postgres ]; then
sql="CREATE DATABASE \""$NAME"\"" sql="CREATE DATABASE \""$NAME"\""
fi fi
echo "$NAME" >> /db_created echo "$NAME" >> /db_created
aws rds-data execute-statement \ aws rds-data execute-statement \
--resource-arn "$DB_ARN" \ --resource-arn "$DB_ARN" \
--secret-arn "$SECRET_ARN" \ --secret-arn "$SECRET_ARN" \
@ -107,7 +107,7 @@ import (
// Database type MySQL or PostgreSQL (Aurora Serverless only) // Database type MySQL or PostgreSQL (Aurora Serverless only)
dbType: "mysql" | "postgres" @dagger(input) dbType: "mysql" | "postgres" @dagger(input)
// Outputed username // Outputted username
out: { out: {
string string
@ -127,14 +127,14 @@ import (
"pipefail", "pipefail",
#""" #"""
echo "dbType: $DB_TYPE" echo "dbType: $DB_TYPE"
sql="CREATE USER '"$USERNAME"'@'%' IDENTIFIED BY '"$PASSWORD"'" sql="CREATE USER '"$USERNAME"'@'%' IDENTIFIED BY '"$PASSWORD"'"
if [ "$DB_TYPE" = postgres ]; then if [ "$DB_TYPE" = postgres ]; then
sql="CREATE USER \""$USERNAME"\" WITH PASSWORD '"$PASSWORD"'" sql="CREATE USER \""$USERNAME"\" WITH PASSWORD '"$PASSWORD"'"
fi fi
echo "$USERNAME" >> /username echo "$USERNAME" >> /username
aws rds-data execute-statement \ aws rds-data execute-statement \
--resource-arn "$DB_ARN" \ --resource-arn "$DB_ARN" \
--secret-arn "$SECRET_ARN" \ --secret-arn "$SECRET_ARN" \
@ -146,24 +146,24 @@ import (
if [ $exit_code -ne 0 ]; then if [ $exit_code -ne 0 ]; then
grep -q "Operation CREATE USER failed for\|ERROR" tmp/out || exit $exit_code grep -q "Operation CREATE USER failed for\|ERROR" tmp/out || exit $exit_code
fi fi
sql="SET PASSWORD FOR '"$USERNAME"'@'%' = PASSWORD('"$PASSWORD"')" sql="SET PASSWORD FOR '"$USERNAME"'@'%' = PASSWORD('"$PASSWORD"')"
if [ "$DB_TYPE" = postgres ]; then if [ "$DB_TYPE" = postgres ]; then
sql="ALTER ROLE \""$USERNAME"\" WITH PASSWORD '"$PASSWORD"'" sql="ALTER ROLE \""$USERNAME"\" WITH PASSWORD '"$PASSWORD"'"
fi fi
aws rds-data execute-statement \ aws rds-data execute-statement \
--resource-arn "$DB_ARN" \ --resource-arn "$DB_ARN" \
--secret-arn "$SECRET_ARN" \ --secret-arn "$SECRET_ARN" \
--sql "$sql" \ --sql "$sql" \
--database "$DB_TYPE" \ --database "$DB_TYPE" \
--no-include-result-metadata --no-include-result-metadata
sql="GRANT ALL ON \`"$GRAND_DATABASE"\`.* to '"$USERNAME"'@'%'" sql="GRANT ALL ON \`"$GRAND_DATABASE"\`.* to '"$USERNAME"'@'%'"
if [ "$DB_TYPE" = postgres ]; then if [ "$DB_TYPE" = postgres ]; then
sql="GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO \""$USERNAME"\"; GRANT ALL PRIVILEGES ON DATABASE \""$GRAND_DATABASE"\" to \""$USERNAME"\"; GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO \""$USERNAME"\"; ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL PRIVILEGES ON TABLES TO \""$USERNAME"\"; ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL PRIVILEGES ON SEQUENCES TO \""$USERNAME"\"; GRANT USAGE ON SCHEMA public TO \""$USERNAME"\";" sql="GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO \""$USERNAME"\"; GRANT ALL PRIVILEGES ON DATABASE \""$GRAND_DATABASE"\" to \""$USERNAME"\"; GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO \""$USERNAME"\"; ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL PRIVILEGES ON TABLES TO \""$USERNAME"\"; ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL PRIVILEGES ON SEQUENCES TO \""$USERNAME"\"; GRANT USAGE ON SCHEMA public TO \""$USERNAME"\";"
fi fi
if [ -s "$GRAND_DATABASE ]; then if [ -s "$GRAND_DATABASE ]; then
aws rds-data execute-statement \ aws rds-data execute-statement \
--resource-arn "$DB_ARN" \ --resource-arn "$DB_ARN" \

View File

@ -103,7 +103,7 @@ for added readability, starting on the next line. This is called
status : 1 status : 1
output (2 lines): output (2 lines):
Error! Something went terribly wrong! Error! Something went terribly wrong!
Our engineers are panicing... \`>`;/ Our engineers are panicking... \`>`;/
-- --
``` ```

View File

@ -2,7 +2,7 @@
package op package op
// Full resolution schema enforciong the complete op spec // Full resolution schema enforcing the complete op spec
#Op: (#Export | #Op: (#Export |
#FetchContainer | #FetchContainer |
#PushContainer | #PushContainer |

View File

@ -132,7 +132,7 @@ import (
for registry in registries { for registry in registries {
op.#Exec & { op.#Exec & {
args: ["/bin/bash", "-c", #""" args: ["/bin/bash", "-c", #"""
echo "$TARGER_HOST" | docker login --username "$DOCKER_USERNAME" --password-stdin "$(cat /password)" echo "$TARGET_HOST" | docker login --username "$DOCKER_USERNAME" --password-stdin "$(cat /password)"
"""#, """#,
] ]
env: { env: {

View File

@ -25,7 +25,7 @@ The Dagger Core API defines core types and utilities for programming Dagger:
### Low-level Engine API: `dagger.io/dagger/engine` ### Low-level Engine API: `dagger.io/dagger/engine`
* *Development import path (implemented subset): `alpha.dagger.io/europa/dagger/engine`* * *Development import path (implemented subset): `alpha.dagger.io/europa/dagger/engine`*
* *Development importa pth (full spec): `alpha.dagger.io/dagger/europa/dagger/engine/spec/engine`* * *Development import path (full spec): `alpha.dagger.io/dagger/europa/dagger/engine/spec/engine`*
`engine` is a low-level API for accessing the raw capabilities of the Dagger Engine. Most developers should use the Dagger Core API instead (`dagger.io/dagger`), but experts and framework developers can target the engine API directly for maximum control. `engine` is a low-level API for accessing the raw capabilities of the Dagger Engine. Most developers should use the Dagger Core API instead (`dagger.io/dagger`), but experts and framework developers can target the engine API directly for maximum control.
@ -37,5 +37,5 @@ In Europa, `engine` will deprecate the following implicit API:
* Convention to embed pipelines in the Cue lattice with the special nested definition `#up` * Convention to embed pipelines in the Cue lattice with the special nested definition `#up`
* Convention to reference filesystem state from the Cue lattice with `@dagger(artifact)` * Convention to reference filesystem state from the Cue lattice with `@dagger(artifact)`
* Convention to reference external secrets from the Cue lattice with `@dagger(secret)` * Convention to reference external secrets from the Cue lattice with `@dagger(secret)`
* Convention to reference external network endpoints from the Cue lattive with `@dagger(stream)` * Convention to reference external network endpoints from the Cue lattice with `@dagger(stream)`
* Convention that some operations (specifically `op.#Local`) are meant to be generated at runtime rather than authored manually. * Convention that some operations (specifically `op.#Local`) are meant to be generated at runtime rather than authored manually.

View File

@ -9,7 +9,7 @@ package engine
path: string path: string
// Optionally exclude certain files // Optionally exclude certain files
include: [...string] include: [...string]
// Optionall include certain files // Optionally include certain files
exclude: [...string] exclude: [...string]
output: #FS output: #FS

View File

@ -53,7 +53,7 @@ The `docker` package is a native Cue API for Docker. You can use it to build, ru
The Dagger container API defines the following types: The Dagger container API defines the following types:
* `#Image`: a container image * `#Image`: a container image
* `#Run`: run a comand in a container * `#Run`: run a command in a container
* `#Push`: upload an image to a repository * `#Push`: upload an image to a repository
* `#Pull`: download an image from a repository * `#Pull`: download an image from a repository
* `#Build`: build an image * `#Build`: build an image

View File

@ -10,7 +10,7 @@ import (
steps: [#Step, ...#Step] steps: [#Step, ...#Step]
output: #Image output: #Image
// Generate build DAG from linerar steps // Generate build DAG from linear steps
_dag: { _dag: {
for idx, step in steps { for idx, step in steps {
"\(idx)": step & { "\(idx)": step & {

View File

@ -40,7 +40,7 @@ test_db_container_name: "changelog_test_postgres"
// - caching is buildkit layers // - caching is buildkit layers
// //
// 3. Open Telemetry integration out-of-the-box // 3. Open Telemetry integration out-of-the-box
// - visualise all steps in Jaeger UI // - visualize all steps in Jaeger UI
// PIPELINE OVERVIEW ########################################################### // PIPELINE OVERVIEW ###########################################################
// //

View File

@ -174,7 +174,7 @@ func (s Solver) forwardEvents(ch chan *bk.SolveStatus) {
} }
// Export will export `st` to `output` // Export will export `st` to `output`
// FIXME: this is currently impleneted as a hack, starting a new Build session // FIXME: this is currently implemented as a hack, starting a new Build session
// within buildkit from the Control API. Ideally the Gateway API should allow to // within buildkit from the Control API. Ideally the Gateway API should allow to
// Export directly. // Export directly.
func (s Solver) Export(ctx context.Context, st llb.State, img *dockerfile2llb.Image, output bk.ExportEntry, platform specs.Platform) (*bk.SolveResponse, error) { func (s Solver) Export(ctx context.Context, st llb.State, img *dockerfile2llb.Image, output bk.ExportEntry, platform specs.Platform) (*bk.SolveResponse, error) {

View File

@ -6,7 +6,7 @@ import (
engine.#Plan & { engine.#Plan & {
inputs: secrets: echo: command: { inputs: secrets: echo: command: {
name: "rtyet" // should fail because command doesnt exist name: "rtyet" // should fail because command doesn't exist
args: ["hello europa"] args: ["hello europa"]
} }

View File

@ -10,7 +10,7 @@ import (
"github.com/docker/distribution/reference" "github.com/docker/distribution/reference"
) )
func getBuildkitInformation(ctx context.Context) (*BuilkitInformation, error) { func getBuildkitInformation(ctx context.Context) (*BuildkitInformation, error) {
formatString := "{{.Config.Image}};{{.State.Running}};{{if index .NetworkSettings.Networks \"host\"}}{{\"true\"}}{{else}}{{\"false\"}}{{end}}" formatString := "{{.Config.Image}};{{.State.Running}};{{if index .NetworkSettings.Networks \"host\"}}{{\"true\"}}{{else}}{{\"false\"}}{{end}}"
cmd := exec.CommandContext(ctx, cmd := exec.CommandContext(ctx,
"docker", "docker",
@ -48,14 +48,14 @@ func getBuildkitInformation(ctx context.Context) (*BuilkitInformation, error) {
return nil, err return nil, err
} }
return &BuilkitInformation{ return &BuildkitInformation{
Version: tag.Tag(), Version: tag.Tag(),
IsActive: isActive, IsActive: isActive,
HaveHostNetwork: haveHostNetwork, HaveHostNetwork: haveHostNetwork,
}, nil }, nil
} }
type BuilkitInformation struct { type BuildkitInformation struct {
Version string Version string
IsActive bool IsActive bool
HaveHostNetwork bool HaveHostNetwork bool