dagger compute auto-fetches standard library from external repo

- Standard library is hosted at github.com/blocklayerhq/dagger-stdlib
- When dagger repo is made public, stdlib can be hosted from same repo
- Vendored cue.mod is merged with stdlib at file granularity
- When developing dagger, set DAGGER_DEV_STDLIB to a local stdlib
  directory

Signed-off-by: Solomon Hykes <sh.github.6811@hykes.org>
This commit is contained in:
Solomon Hykes 2021-02-12 22:41:55 +00:00
parent 77bf0be306
commit bff4186bf4
7 changed files with 42 additions and 353 deletions

View File

@ -15,7 +15,7 @@ test:
.PHONY: cuefmt .PHONY: cuefmt
cuefmt: cuefmt:
@(cue fmt -s ./... && cue trim -s ./...) @(cue fmt -s ./...)
.PHONY: lint .PHONY: lint
lint: cuefmt lint: cuefmt

View File

@ -2,6 +2,7 @@ package dagger
import ( import (
"context" "context"
"os"
"cuelang.org/go/cue" "cuelang.org/go/cue"
cueflow "cuelang.org/go/tools/flow" cueflow "cuelang.org/go/tools/flow"
@ -90,13 +91,41 @@ func (env *Env) SetInput(i *cc.Value) error {
) )
} }
func stdlibLoader() (*cc.Value, error) {
if dev := os.Getenv("DAGGER_DEV_STDLIB"); dev != "" {
v, err := cc.Compile("stdlib.cue", `
do: "local"
dir: string
include: ["cue.mod/pkg"]
`)
if err != nil {
return nil, err
}
return v.MergeTarget(dev, "dir")
}
return cc.Compile("stdlib.cue", `
do: "fetch-git"
remote: "https://github.com/blocklayerhq/dagger-stdlib"
ref: "0625677b5aec1162621ad18fbd7b90dc9d7d54e5"
`)
}
// Update the base configuration // Update the base configuration
func (env *Env) Update(ctx context.Context, s Solver) error { func (env *Env) Update(ctx context.Context, s Solver) error {
// execute updater script
p := NewPipeline(s, nil) p := NewPipeline(s, nil)
// always inject stdlib in cue.mod/pkg
stdlib, err := stdlibLoader()
if err != nil {
return err
}
if err := p.Do(ctx, stdlib); err != nil {
return err
}
// execute updater script
if err := p.Do(ctx, env.updater); err != nil { if err := p.Do(ctx, env.updater); err != nil {
return err return err
} }
// load cue files produced by updater // load cue files produced by updater
// FIXME: BuildAll() to force all files (no required package..) // FIXME: BuildAll() to force all files (no required package..)
base, err := CueBuild(ctx, p.FS()) base, err := CueBuild(ctx, p.FS())
@ -160,6 +189,10 @@ func (env *Env) LocalDirs() map[string]string {
) )
// 2. Scan the environment updater // 2. Scan the environment updater
localdirs(env.Updater()) localdirs(env.Updater())
// 3. In dev mode, always include dev stdlib directory
if dev := os.Getenv("DAGGER_DEV_STDLIB"); dev != "" {
dirs[dev] = dev
}
return dirs return dirs
} }

View File

@ -231,7 +231,13 @@ func (p *Pipeline) Local(ctx context.Context, op *cc.Value) error {
if err := op.Get("include").Decode(&include); err != nil { if err := op.Get("include").Decode(&include); err != nil {
return err return err
} }
p.fs = p.fs.Set(llb.Local(dir, llb.FollowPaths(include))) p.fs = p.fs.Change(func(st llb.State) llb.State {
return st.File(llb.Copy(
llb.Local(dir, llb.FollowPaths(include)),
"/",
"/",
))
})
return nil return nil
} }

View File

@ -1,119 +0,0 @@
package dagger
// A DAG is the basic unit of programming in dagger.
// It is a special kind of program which runs as a pipeline of computing nodes running in parallel,
// instead of a sequence of operations to be run by a single node.
//
// It is a powerful way to automate various parts of an application delivery workflow:
// build, test, deploy, generate configuration, enforce policies, publish artifacts, etc.
//
// The DAG architecture has many benefits:
// - Because DAGs are made of nodes executing in parallel, they are easy to scale.
// - Because all inputs and outputs are snapshotted and content-addressed, DAGs
// can easily be made repeatable, can be cached aggressively, and can be replayed
// at will.
// - Because nodes are executed by the same container engine as docker-build, DAGs
// can be developed using any language or technology capable of running in a docker.
// Dockerfiles and docker images are natively supported for maximum compatibility.
//
// - Because DAGs are programmed declaratively with a powerful configuration language,
// they are much easier to test, debug and refactor than traditional programming languages.
//
// To execute a DAG, the dagger runtime JIT-compiles it to a low-level format called
// llb, and executes it with buildkit.
// Think of buildkit as a specialized VM for running compute graphs; and dagger as
// a complete programming environment for that VM.
//
// The tradeoff for all those wonderful features is that a DAG architecture cannot be used
// for all software: only software than can be run as a pipeline.
//
// A dagger component is a configuration value augmented
// by scripts defining how to compute it, present it to a user,
// encrypt it, etc.
// FIXME: #Component will not match embedded scalars.
// use Runtime.isComponent() for a reliable check
#Component: {
#dagger: #ComponentConfig
...
}
// The contents of a #dagger annotation
#ComponentConfig: {
// script to compute the value
compute?: #Script
}
// Any component can be referenced as a directory, since
// every dagger script outputs a filesystem state (aka a directory)
#Dir: #Component
#Script: [...#Op]
// One operation in a script
#Op: #FetchContainer | #FetchGit | #Export | #Exec | #Local | #Copy | #Load
// Export a value from fs state to cue
#Export: {
do: "export"
// Source path in the container
source: string
format: "json" | "yaml" | *"string" | "number" | "boolean"
}
#Local: {
do: "local"
dir: string
include?: [...string] | *[]
}
// FIXME: bring back load (more efficient than copy)
#Load: {
do: "load"
from: #Component | #Script
}
#Exec: {
do: "exec"
args: [...string]
env?: [string]: string
always?: true | *false
dir: string | *"/"
mount?: [string]: #MountTmp | #MountCache | #MountComponent | #MountScript
}
#MountTmp: "tmpfs"
#MountCache: "cache"
#MountComponent: {
input: #Component
path: string | *"/"
}
#MountScript: {
input: #Script
path: string | *"/"
}
#FetchContainer: {
do: "fetch-container"
ref: string
}
#FetchGit: {
do: "fetch-git"
remote: string
ref: string
}
#Copy: {
do: "copy"
from: #Script | #Component
src: string | *"/"
dest: string | *"/"
}
#TestScript: #Script & [
{do: "fetch-container", ref: "alpine:latest"},
{do: "exec", args: ["echo", "hello", "world"]},
]

View File

@ -1,50 +0,0 @@
package alpine
// Default version pinned to digest. Manually updated.
let defaultDigest="sha256:3c7497bf0c7af93428242d6176e8f7905f2201d8fc5861f45be7a346b5f23436"
ref: string
// Match a combination of inputs 'version' and 'digest':
*{
// no version, no digest:
ref: "index.docker.io/alpine@\(defaultDigest)"
} | {
// version, no digest
version: string
ref: "alpine:\(version)"
} | {
// digest, no version
digest: string
ref: "alpine@\(digest)"
} | {
// version and digest
version: string
digest: string
ref: "alpine:\(version)@\(digest)"
}
// Packages to install
package: [string]: true | false | string
#dagger: compute: [
{
do: "fetch-container"
"ref": ref
},
for pkg, info in package {
if (info & true) != _|_ {
do: "exec"
args: ["apk", "add", "-U", "--no-cache", pkg]
// https://github.com/blocklayerhq/dagger/issues/6
mount: foo: {}
}
if (info & string) != _|_ {
do: "exec"
args: ["apk", "add", "-U", "--no-cache", "\(pkg)\(info)"]
// https://github.com/blocklayerhq/dagger/issues/6
mount: foo: {}
}
},
]

View File

@ -1,119 +0,0 @@
package dagger
// A DAG is the basic unit of programming in dagger.
// It is a special kind of program which runs as a pipeline of computing nodes running in parallel,
// instead of a sequence of operations to be run by a single node.
//
// It is a powerful way to automate various parts of an application delivery workflow:
// build, test, deploy, generate configuration, enforce policies, publish artifacts, etc.
//
// The DAG architecture has many benefits:
// - Because DAGs are made of nodes executing in parallel, they are easy to scale.
// - Because all inputs and outputs are snapshotted and content-addressed, DAGs
// can easily be made repeatable, can be cached aggressively, and can be replayed
// at will.
// - Because nodes are executed by the same container engine as docker-build, DAGs
// can be developed using any language or technology capable of running in a docker.
// Dockerfiles and docker images are natively supported for maximum compatibility.
//
// - Because DAGs are programmed declaratively with a powerful configuration language,
// they are much easier to test, debug and refactor than traditional programming languages.
//
// To execute a DAG, the dagger runtime JIT-compiles it to a low-level format called
// llb, and executes it with buildkit.
// Think of buildkit as a specialized VM for running compute graphs; and dagger as
// a complete programming environment for that VM.
//
// The tradeoff for all those wonderful features is that a DAG architecture cannot be used
// for all software: only software than can be run as a pipeline.
//
// A dagger component is a configuration value augmented
// by scripts defining how to compute it, present it to a user,
// encrypt it, etc.
// FIXME: #Component will not match embedded scalars.
// use Runtime.isComponent() for a reliable check
#Component: {
#dagger: #ComponentConfig
...
}
// The contents of a #dagger annotation
#ComponentConfig: {
// script to compute the value
compute?: #Script
}
// Any component can be referenced as a directory, since
// every dagger script outputs a filesystem state (aka a directory)
#Dir: #Component
#Script: [...#Op]
// One operation in a script
#Op: #FetchContainer | #FetchGit | #Export | #Exec | #Local | #Copy | #Load
// Export a value from fs state to cue
#Export: {
do: "export"
// Source path in the container
source: string
format: "json" | "yaml" | *"string" | "number" | "boolean"
}
#Local: {
do: "local"
dir: string
include?: [...string] | *[]
}
// FIXME: bring back load (more efficient than copy)
#Load: {
do: "load"
from: #Component | #Script
}
#Exec: {
do: "exec"
args: [...string]
env?: [string]: string
always?: true | *false
dir: string | *"/"
mount?: [string]: #MountTmp | #MountCache | #MountComponent | #MountScript
}
#MountTmp: "tmpfs"
#MountCache: "cache"
#MountComponent: {
input: #Component
path: string | *"/"
}
#MountScript: {
input: #Script
path: string | *"/"
}
#FetchContainer: {
do: "fetch-container"
ref: string
}
#FetchGit: {
do: "fetch-git"
remote: string
ref: string
}
#Copy: {
do: "copy"
from: #Script | #Component
src: string | *"/"
dest: string | *"/"
}
#TestScript: #Script & [
{do: "fetch-container", ref: "alpine:latest"},
{do: "exec", args: ["echo", "hello", "world"]},
]

View File

@ -1,62 +0,0 @@
package dagger
// Any component can be referenced as a directory, since
// every dagger script outputs a filesystem state (aka a directory)
#Dir: #dagger: compute: [...#Op]
// One operation in a script
#Op: #FetchContainer | #FetchGit | #Export | #Exec | #Local | #Copy | #Load | #Subdir
// Export a value from fs state to cue
#Export: {
do: "export"
// Source path in the container
source: string
format: "json" | "yaml" | *"string"
}
#Local: {
do: "local"
dir: string
include: [...string] | *[]
}
// FIXME: bring back load (more efficient than copy)
#Load: {
do: "load"
from: _
}
#Subdir: {
do: "subdir"
dir: string | *"/"
}
#Exec: {
do: "exec"
args: [...string]
env?: [string]: string
always?: true | *false
dir: string | *"/"
mount: [string]: "tmp" | "cache" | { from: _, path: string | *"/" }
}
#FetchContainer: {
do: "fetch-container"
ref: string
}
#FetchGit: {
do: "fetch-git"
remote: string
ref: string
}
#Copy: {
do: "copy"
from: _
src: string | *"/"
dest: string | *"/"
}