Move prototype 69-dagger-archon to top-level
Signed-off-by: Solomon Hykes <sh.github.6811@hykes.org>
This commit is contained in:
434
dagger/client.go
Normal file
434
dagger/client.go
Normal file
@@ -0,0 +1,434 @@
|
||||
package dagger
|
||||
|
||||
import (
|
||||
"archive/tar"
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
"golang.org/x/sync/errgroup"
|
||||
|
||||
// Cue
|
||||
"cuelang.org/go/cue"
|
||||
cueerrors "cuelang.org/go/cue/errors"
|
||||
cueformat "cuelang.org/go/cue/format"
|
||||
|
||||
// buildkit
|
||||
bk "github.com/moby/buildkit/client"
|
||||
_ "github.com/moby/buildkit/client/connhelper/dockercontainer"
|
||||
"github.com/moby/buildkit/client/llb"
|
||||
bkgw "github.com/moby/buildkit/frontend/gateway/client"
|
||||
|
||||
// docker output
|
||||
"github.com/containerd/console"
|
||||
"github.com/moby/buildkit/util/progress/progressui"
|
||||
)
|
||||
|
||||
const (
|
||||
defaultBuildkitHost = "docker-container://buildkitd"
|
||||
|
||||
bkConfigKey = "context"
|
||||
bkInputKey = ":dagger:input:"
|
||||
bkActionKey = ":dagger:action:"
|
||||
)
|
||||
|
||||
type Client struct {
|
||||
c *bk.Client
|
||||
|
||||
inputs map[string]llb.State
|
||||
localdirs map[string]string
|
||||
|
||||
BKFrontend bkgw.BuildFunc
|
||||
}
|
||||
|
||||
func NewClient(ctx context.Context, host string) (*Client, error) {
|
||||
// buildkit client
|
||||
if host == "" {
|
||||
host = os.Getenv("BUILDKIT_HOST")
|
||||
}
|
||||
if host == "" {
|
||||
host = defaultBuildkitHost
|
||||
}
|
||||
c, err := bk.New(ctx, host)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "buildkit client")
|
||||
}
|
||||
return &Client{
|
||||
c: c,
|
||||
inputs: map[string]llb.State{},
|
||||
localdirs: map[string]string{},
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (c *Client) ConnectInput(target string, input interface{}) error {
|
||||
var st llb.State
|
||||
switch in := input.(type) {
|
||||
case llb.State:
|
||||
st = in
|
||||
case string:
|
||||
// Generate a random local input label for security
|
||||
st = c.AddLocalDir(in, target)
|
||||
default:
|
||||
return fmt.Errorf("unsupported input type")
|
||||
}
|
||||
c.inputs[bkInputKey+target] = st
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *Client) AddLocalDir(dir, label string, opts ...llb.LocalOption) llb.State {
|
||||
c.localdirs[label] = dir
|
||||
return llb.Local(label, opts...)
|
||||
}
|
||||
|
||||
// Set cue config for future calls.
|
||||
// input can be:
|
||||
// - llb.State: valid cue config directory
|
||||
// - io.Reader: valid cue source
|
||||
// - string: local path to valid cue file or directory
|
||||
// - func(llb.State)llb.Stte: modify existing state
|
||||
|
||||
func (c *Client) SetConfig(inputs ...interface{}) error {
|
||||
for _, input := range inputs {
|
||||
if err := c.setConfig(input); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *Client) setConfig(input interface{}) error {
|
||||
var st llb.State
|
||||
switch in := input.(type) {
|
||||
case llb.State:
|
||||
st = in
|
||||
case func(llb.State) llb.State:
|
||||
// Modify previous state
|
||||
last, ok := c.inputs[bkConfigKey]
|
||||
if !ok {
|
||||
last = llb.Scratch()
|
||||
}
|
||||
st = in(last)
|
||||
case io.Reader:
|
||||
contents, err := ioutil.ReadAll(in)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
st = llb.Scratch().File(llb.Mkfile(
|
||||
"config.cue",
|
||||
0660,
|
||||
contents,
|
||||
))
|
||||
// Interpret string as a path (dir or file)
|
||||
case string:
|
||||
info, err := os.Stat(in)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if info.IsDir() {
|
||||
// FIXME: include pattern *.cue ooh yeah
|
||||
st = c.AddLocalDir(in, "config",
|
||||
//llb.IncludePatterns([]string{"*.cue", "cue.mod"})),
|
||||
llb.FollowPaths([]string{"*.cue", "cue.mod"}),
|
||||
)
|
||||
} else {
|
||||
f, err := os.Open(in)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer f.Close()
|
||||
return c.SetConfig(f)
|
||||
}
|
||||
}
|
||||
c.inputs[bkConfigKey] = st
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *Client) Run(ctx context.Context, action string) (*Output, error) {
|
||||
// Spawn Build() goroutine
|
||||
eg, ctx := errgroup.WithContext(ctx)
|
||||
events := make(chan *bk.SolveStatus)
|
||||
outr, outw := io.Pipe()
|
||||
// Spawn build function
|
||||
eg.Go(c.buildfn(ctx, action, events, outw))
|
||||
// Spawn print function(s)
|
||||
dispCtx := context.TODO()
|
||||
var eventsdup chan *bk.SolveStatus
|
||||
if os.Getenv("DOCKER_OUTPUT") != "" {
|
||||
eventsdup = make(chan *bk.SolveStatus)
|
||||
eg.Go(c.dockerprintfn(dispCtx, eventsdup, os.Stderr))
|
||||
}
|
||||
eg.Go(c.printfn(dispCtx, events, eventsdup))
|
||||
// Retrieve output
|
||||
out := NewOutput()
|
||||
eg.Go(c.outputfn(ctx, outr, out))
|
||||
return out, eg.Wait()
|
||||
}
|
||||
|
||||
func (c *Client) buildfn(ctx context.Context, action string, ch chan *bk.SolveStatus, w io.WriteCloser) func() error {
|
||||
return func() error {
|
||||
defer debugf("buildfn complete")
|
||||
// Setup solve options
|
||||
opts := bk.SolveOpt{
|
||||
FrontendAttrs: map[string]string{
|
||||
bkActionKey: action,
|
||||
},
|
||||
LocalDirs: c.localdirs,
|
||||
FrontendInputs: c.inputs,
|
||||
// FIXME: catch output & return as cue value
|
||||
Exports: []bk.ExportEntry{
|
||||
{
|
||||
Type: bk.ExporterTar,
|
||||
Output: func(m map[string]string) (io.WriteCloser, error) {
|
||||
return w, nil
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
// Setup frontend
|
||||
bkFrontend := c.BKFrontend
|
||||
if bkFrontend == nil {
|
||||
r := &Runtime{}
|
||||
bkFrontend = r.BKFrontend
|
||||
}
|
||||
resp, err := c.c.Build(ctx, opts, "", bkFrontend, ch)
|
||||
if err != nil {
|
||||
// Close exporter pipe so that export processor can return
|
||||
w.Close()
|
||||
err = errors.New(bkCleanError(err.Error()))
|
||||
return errors.Wrap(err, "buildkit solve")
|
||||
}
|
||||
for k, v := range resp.ExporterResponse {
|
||||
// FIXME consume exporter response
|
||||
fmt.Printf("exporter response: %s=%s\n", k, v)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// Read tar export stream from buildkit Build(), and extract cue output
|
||||
func (c *Client) outputfn(ctx context.Context, r io.Reader, out *Output) func() error {
|
||||
return func() error {
|
||||
defer debugf("outputfn complete")
|
||||
tr := tar.NewReader(r)
|
||||
for {
|
||||
debugf("outputfn: reading next tar entry")
|
||||
h, err := tr.Next()
|
||||
if err == io.EOF {
|
||||
break
|
||||
}
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "read tar stream")
|
||||
}
|
||||
if !strings.HasSuffix(h.Name, ".cue") {
|
||||
debugf("skipping non-cue file from exporter tar stream: %s", h.Name)
|
||||
continue
|
||||
}
|
||||
debugf("outputfn: compiling & merging %q", h.Name)
|
||||
// FIXME: only doing this for debug. you can pass tr directly as io.Reader.
|
||||
contents, err := ioutil.ReadAll(tr)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
//if err := out.FillSource(h.Name, tr); err != nil {
|
||||
if err := out.FillSource(h.Name, contents); err != nil {
|
||||
debugf("error with %s: contents=\n------\n%s\n-----\n", h.Name, contents)
|
||||
return errors.Wrap(err, h.Name)
|
||||
}
|
||||
debugf("outputfn: DONE: compiling & merging %q", h.Name)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// Status of a node in the config tree being computed
|
||||
// Node may be a component, or a value within a component
|
||||
// (eg. a script or individual operation in a script)
|
||||
type Node struct {
|
||||
Path cue.Path
|
||||
*bk.Vertex
|
||||
}
|
||||
|
||||
func (n Node) ComponentPath() cue.Path {
|
||||
var parts []cue.Selector
|
||||
for _, sel := range n.Path.Selectors() {
|
||||
if strings.HasPrefix(sel.String(), "#") {
|
||||
break
|
||||
}
|
||||
parts = append(parts, sel)
|
||||
}
|
||||
return cue.MakePath(parts...)
|
||||
}
|
||||
|
||||
func (n Node) Logf(msg string, args ...interface{}) {
|
||||
componentPath := n.ComponentPath().String()
|
||||
args = append([]interface{}{componentPath}, args...)
|
||||
if msg != "" && !strings.HasSuffix(msg, "\n") {
|
||||
msg += "\n"
|
||||
}
|
||||
fmt.Fprintf(os.Stderr, "[%s] "+msg, args...)
|
||||
}
|
||||
|
||||
func (n Node) LogStream(nStream int, data []byte) {
|
||||
var stream string
|
||||
switch nStream {
|
||||
case 1:
|
||||
stream = "stdout"
|
||||
case 2:
|
||||
stream = "stderr"
|
||||
default:
|
||||
stream = fmt.Sprintf("%d", nStream)
|
||||
}
|
||||
// FIXME: use bufio reader?
|
||||
lines := strings.Split(string(data), "\n")
|
||||
for _, line := range lines {
|
||||
n.Logf("[%s] %s", stream, line)
|
||||
}
|
||||
}
|
||||
|
||||
func (n Node) LogError(errmsg string) {
|
||||
n.Logf("ERROR: %s", bkCleanError(errmsg))
|
||||
}
|
||||
|
||||
func (c *Client) printfn(ctx context.Context, ch, ch2 chan *bk.SolveStatus) func() error {
|
||||
return func() error {
|
||||
// Node status mapped to buildkit vertex digest
|
||||
nodesByDigest := map[string]*Node{}
|
||||
// Node status mapped to cue path
|
||||
nodesByPath := map[string]*Node{}
|
||||
|
||||
defer debugf("printfn complete")
|
||||
if ch2 != nil {
|
||||
defer close(ch2)
|
||||
}
|
||||
ticker := time.NewTicker(150 * time.Millisecond)
|
||||
defer ticker.Stop()
|
||||
for {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return ctx.Err()
|
||||
case <-ticker.C:
|
||||
case status, ok := <-ch:
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
if ch2 != nil {
|
||||
ch2 <- status
|
||||
}
|
||||
debugf("status event: vertexes:%d statuses:%d logs:%d\n",
|
||||
len(status.Vertexes),
|
||||
len(status.Statuses),
|
||||
len(status.Logs),
|
||||
)
|
||||
for _, v := range status.Vertexes {
|
||||
p := cue.ParsePath(v.Name)
|
||||
if err := p.Err(); err != nil {
|
||||
debugf("ignoring buildkit vertex %q: not a valid cue path", p.String())
|
||||
continue
|
||||
}
|
||||
n := &Node{
|
||||
Path: p,
|
||||
Vertex: v,
|
||||
}
|
||||
nodesByPath[n.Path.String()] = n
|
||||
nodesByDigest[n.Digest.String()] = n
|
||||
if n.Error != "" {
|
||||
n.LogError(n.Error)
|
||||
}
|
||||
}
|
||||
for _, log := range status.Logs {
|
||||
if n, ok := nodesByDigest[log.Vertex.String()]; ok {
|
||||
n.LogStream(log.Stream, log.Data)
|
||||
}
|
||||
}
|
||||
// debugJSON(status)
|
||||
// FIXME: callbacks for extracting stream/result
|
||||
// see proto 67
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// A helper to remove noise from buildkit error messages.
|
||||
// FIXME: Obviously a cleaner solution would be nice.
|
||||
func bkCleanError(msg string) string {
|
||||
noise := []string{
|
||||
"executor failed running ",
|
||||
"buildkit-runc did not terminate successfully",
|
||||
"rpc error: code = Unknown desc =",
|
||||
"failed to solve: ",
|
||||
}
|
||||
for _, s := range noise {
|
||||
msg = strings.Replace(msg, s, "", -1)
|
||||
}
|
||||
return msg
|
||||
}
|
||||
|
||||
func (c *Client) dockerprintfn(ctx context.Context, ch chan *bk.SolveStatus, out io.Writer) func() error {
|
||||
return func() error {
|
||||
defer debugf("dockerprintfn complete")
|
||||
var cons console.Console
|
||||
// FIXME: use smarter writer from blr
|
||||
return progressui.DisplaySolveStatus(ctx, "", cons, out, ch)
|
||||
}
|
||||
}
|
||||
|
||||
type Output struct {
|
||||
r *cue.Runtime
|
||||
inst *cue.Instance
|
||||
}
|
||||
|
||||
func NewOutput() *Output {
|
||||
r := &cue.Runtime{}
|
||||
inst, _ := r.Compile("", "")
|
||||
return &Output{
|
||||
r: r,
|
||||
inst: inst,
|
||||
}
|
||||
}
|
||||
|
||||
func (o *Output) Print(w io.Writer) error {
|
||||
v := o.Cue().Value().Eval()
|
||||
b, err := cueformat.Node(v.Syntax())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = w.Write(b)
|
||||
return err
|
||||
}
|
||||
|
||||
func (o *Output) JSON() JSON {
|
||||
return cueToJSON(o.Cue().Value())
|
||||
}
|
||||
|
||||
func (o *Output) Cue() *cue.Instance {
|
||||
return o.inst
|
||||
}
|
||||
|
||||
func (o *Output) FillSource(filename string, x interface{}) error {
|
||||
inst, err := o.r.Compile(filename, x)
|
||||
if err != nil {
|
||||
return fmt.Errorf("compile %s: %s", filename, cueerrors.Details(err, nil))
|
||||
}
|
||||
if err := o.FillValue(inst.Value()); err != nil {
|
||||
return fmt.Errorf("merge %s: %s", filename, cueerrors.Details(err, nil))
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (o *Output) FillValue(x interface{}) error {
|
||||
inst, err := o.inst.Fill(x)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err := inst.Value().Validate(); err != nil {
|
||||
return err
|
||||
}
|
||||
o.inst = inst
|
||||
return nil
|
||||
}
|
||||
158
dagger/gen.go
Normal file
158
dagger/gen.go
Normal file
@@ -0,0 +1,158 @@
|
||||
package dagger
|
||||
|
||||
// Generated by gen.sh. DO NOT EDIT.
|
||||
|
||||
var DaggerSpec = `
|
||||
package dagger
|
||||
|
||||
// A DAG is the basic unit of programming in dagger.
|
||||
// It is a special kind of program which runs as a pipeline of computing nodes running in parallel,
|
||||
// instead of a sequence of operations to be run by a single node.
|
||||
//
|
||||
// It is a powerful way to automate various parts of an application delivery workflow:
|
||||
// build, test, deploy, generate configuration, enforce policies, publish artifacts, etc.
|
||||
//
|
||||
// The DAG architecture has many benefits:
|
||||
// - Because DAGs are made of nodes executing in parallel, they are easy to scale.
|
||||
// - Because all inputs and outputs are snapshotted and content-addressed, DAGs
|
||||
// can easily be made repeatable, can be cached aggressively, and can be replayed
|
||||
// at will.
|
||||
// - Because nodes are executed by the same container engine as docker-build, DAGs
|
||||
// can be developed using any language or technology capable of running in a docker.
|
||||
// Dockerfiles and docker images are natively supported for maximum compatibility.
|
||||
//
|
||||
// - Because DAGs are programmed declaratively with a powerful configuration language,
|
||||
// they are much easier to test, debug and refactor than traditional programming languages.
|
||||
//
|
||||
// To execute a DAG, the dagger runtime JIT-compiles it to a low-level format called
|
||||
// llb, and executes it with buildkit.
|
||||
// Think of buildkit as a specialized VM for running compute graphs; and dagger as
|
||||
// a complete programming environment for that VM.
|
||||
//
|
||||
// The tradeoff for all those wonderful features is that a DAG architecture cannot be used
|
||||
// for all software: only software than can be run as a pipeline.
|
||||
//
|
||||
|
||||
// A dagger component is a configuration value augmented
|
||||
// by scripts defining how to compute it, present it to a user,
|
||||
// encrypt it, etc.
|
||||
|
||||
// FIXME: #Component will not match embedded scalars.
|
||||
// use Runtime.isComponent() for a reliable check
|
||||
#Component: {
|
||||
#dagger: #ComponentConfig
|
||||
...
|
||||
}
|
||||
|
||||
// The contents of a #dagger annotation
|
||||
#ComponentConfig: {
|
||||
input?: bool
|
||||
|
||||
// script to compute the value
|
||||
compute?: #Script
|
||||
|
||||
terminal?: {
|
||||
// Display a message when opening a terminal session
|
||||
greeting?: string
|
||||
command: [string]: #Script
|
||||
}
|
||||
// Configure how the component is incorporated to user settings.
|
||||
// Configure how the end-user can configure this component
|
||||
settings?: {
|
||||
// If not specified, scrape from comments
|
||||
title?: string
|
||||
description?: string
|
||||
// Disable user input, even if incomplete?
|
||||
hidden: true | *false
|
||||
ui: _ // insert here something which can be compiled to react-jsonschema-form
|
||||
// Show the cue default value to the user, as a default input value?
|
||||
showDefault: true | *false
|
||||
|
||||
// Insert information needed by:
|
||||
// 1) clients to encrypt
|
||||
// ie. web wizard, cli
|
||||
// 2) middleware to implement deicphering in the cuellb pipeline
|
||||
// eg. integration with clcoud KMS, Vault...
|
||||
//
|
||||
// 3) connectors to make sure secrets are preserved
|
||||
encrypt?: {
|
||||
pubkey: string
|
||||
cipher: string
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// Any component can be referenced as a directory, since
|
||||
// every dagger script outputs a filesystem state (aka a directory)
|
||||
#Dir: #Component
|
||||
|
||||
#Script: [...#Op]
|
||||
|
||||
// One operation in a script
|
||||
#Op: #FetchContainer | #FetchGit | #Export | #Exec | #Load | #Copy
|
||||
|
||||
// Export a value from fs state to cue
|
||||
#Export: {
|
||||
do: "export"
|
||||
// Source path in the container
|
||||
source: string
|
||||
format: "json"|"yaml"|*"string"|"number"|"boolean"
|
||||
}
|
||||
|
||||
#Load: #LoadComponent| #LoadScript
|
||||
#LoadComponent: {
|
||||
do: "load"
|
||||
from: #Component
|
||||
}
|
||||
#LoadScript: {
|
||||
do: "load"
|
||||
from: #Script
|
||||
}
|
||||
|
||||
|
||||
#Exec: {
|
||||
do: "exec"
|
||||
args: [...string]
|
||||
env: [string]: string
|
||||
always: true | *false
|
||||
dir: string | *"/"
|
||||
mount?: [string]: #MountTmp | #MountCache | #MountComponent | #MountScript
|
||||
}
|
||||
|
||||
#MountTmp: "tmpfs"
|
||||
#MountCache: "cache"
|
||||
#MountComponent: {
|
||||
input: #Component
|
||||
path: string | *"/"
|
||||
}
|
||||
#MountScript: {
|
||||
input: #Script
|
||||
path: string | *"/"
|
||||
}
|
||||
|
||||
#FetchContainer: {
|
||||
do: "fetch-container"
|
||||
ref: string
|
||||
}
|
||||
|
||||
#FetchGit: {
|
||||
do: "fetch-git"
|
||||
remote: string
|
||||
ref: string
|
||||
}
|
||||
|
||||
#Copy: {
|
||||
do: "copy"
|
||||
from: #Script | #Component
|
||||
src: string | *"/"
|
||||
dest: string | *"/"
|
||||
}
|
||||
|
||||
|
||||
#TestScript: #Script & [
|
||||
{ do: "fetch-container", ref: "alpine:latest" },
|
||||
{ do: "exec", args: ["echo", "hello", "world" ] }
|
||||
]
|
||||
`
|
||||
17
dagger/gen.sh
Executable file
17
dagger/gen.sh
Executable file
@@ -0,0 +1,17 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
cue eval spec.cue >/dev/null
|
||||
(
|
||||
cat <<'EOF'
|
||||
package dagger
|
||||
|
||||
// Generated by gen.sh. DO NOT EDIT.
|
||||
|
||||
var DaggerSpec = `
|
||||
EOF
|
||||
cat spec.cue
|
||||
cat <<'EOF'
|
||||
`
|
||||
EOF
|
||||
) > gen.go
|
||||
326
dagger/job.go
Normal file
326
dagger/job.go
Normal file
@@ -0,0 +1,326 @@
|
||||
package dagger
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"cuelang.org/go/cue"
|
||||
cueerrors "cuelang.org/go/cue/errors"
|
||||
cueload "cuelang.org/go/cue/load"
|
||||
cueflow "cuelang.org/go/tools/flow"
|
||||
"github.com/moby/buildkit/client/llb"
|
||||
bkgw "github.com/moby/buildkit/frontend/gateway/client"
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
type Solver interface {
|
||||
Solve(context.Context, llb.State) (bkgw.Reference, error)
|
||||
}
|
||||
|
||||
// 1 buildkit build = 1 job
|
||||
type Job struct {
|
||||
c bkgw.Client
|
||||
// needed for cue operations
|
||||
r *Runtime
|
||||
}
|
||||
|
||||
// Execute and wrap the result in a buildkit result
|
||||
func (job Job) BKExecute(ctx context.Context) (_r *bkgw.Result, _e error) {
|
||||
debugf("Executing bk frontend")
|
||||
// wrap errors to avoid crashing buildkit with cue error types (why??)
|
||||
defer func() {
|
||||
if _e != nil {
|
||||
_e = fmt.Errorf("%s", cueerrors.Details(_e, nil))
|
||||
debugf("execute returned an error. Wrapping...")
|
||||
}
|
||||
}()
|
||||
out, err := job.Execute(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// encode job output to buildkit result
|
||||
debugf("[runtime] job executed. Encoding output")
|
||||
// FIXME: we can't serialize result to standalone cue (with no imports).
|
||||
// So the client cannot safely compile output without access to the same cue.mod
|
||||
// as the runtime (which we don't want).
|
||||
// So for now we return the output as json, still parsed as cue on the client
|
||||
// to keep our options open. Once there is a "tree shake" primitive, we can
|
||||
// use that to return cue.
|
||||
//
|
||||
// Uncomment to return actual cue:
|
||||
// ----
|
||||
// outbytes, err := cueformat.Node(out.Value().Eval().Syntax())
|
||||
// if err != nil {
|
||||
// return nil, err
|
||||
// }
|
||||
// ----
|
||||
outbytes := cueToJSON(out.Value())
|
||||
debugf("[runtime] output encoded. Writing output to exporter")
|
||||
outref, err := job.Solve(ctx,
|
||||
llb.Scratch().File(llb.Mkfile("computed.cue", 0600, outbytes)),
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
debugf("[runtime] output written to exporter. returning to buildkit solver")
|
||||
res := bkgw.NewResult()
|
||||
res.SetRef(outref)
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (job Job) Execute(ctx context.Context) (_i *cue.Instance, _e error) {
|
||||
debugf("[runtime] Execute()")
|
||||
defer func() { debugf("[runtime] DONE Execute(): err=%v", _e) }()
|
||||
state, err := job.Config(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// Merge input information into the cue config
|
||||
inputs, err := job.Inputs(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for target := range inputs {
|
||||
// FIXME: cleaner code generation, missing cue.Value.FillPath
|
||||
state, err = job.r.fill(state, `#dagger: input: true`, target)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "connect input %q", target)
|
||||
}
|
||||
}
|
||||
action := job.Action()
|
||||
switch action {
|
||||
case "compute":
|
||||
return job.doCompute(ctx, state)
|
||||
case "export":
|
||||
return job.doExport(ctx, state)
|
||||
default:
|
||||
return job.doExport(ctx, state)
|
||||
}
|
||||
}
|
||||
|
||||
func (job Job) doExport(ctx context.Context, state *cue.Instance) (*cue.Instance, error) {
|
||||
return state, nil
|
||||
}
|
||||
|
||||
func (job Job) doCompute(ctx context.Context, state *cue.Instance) (*cue.Instance, error) {
|
||||
out, err := job.r.Compile("computed.cue", "")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// Setup cueflow
|
||||
debugf("Setting up cueflow")
|
||||
flow := cueflow.New(
|
||||
&cueflow.Config{
|
||||
UpdateFunc: func(c *cueflow.Controller, t *cueflow.Task) error {
|
||||
debugf("cueflow event")
|
||||
if t == nil {
|
||||
return nil
|
||||
}
|
||||
debugf("cueflow task %q: %s", t.Path().String(), t.State().String())
|
||||
if t.State() == cueflow.Terminated {
|
||||
debugf("cueflow task %q: filling result", t.Path().String())
|
||||
out, err = out.Fill(t.Value(), cuePathToStrings(t.Path())...)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// FIXME: catch merge errors early with state
|
||||
}
|
||||
return nil
|
||||
},
|
||||
},
|
||||
state,
|
||||
// Task match func
|
||||
func(v cue.Value) (cueflow.Runner, error) {
|
||||
// Is v a component (has #dagger) with a field 'compute' ?
|
||||
isComponent, err := job.r.isComponent(v, "compute")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if !isComponent {
|
||||
return nil, nil
|
||||
}
|
||||
debugf("[%s] component detected\n", v.Path().String())
|
||||
// task runner func
|
||||
runner := cueflow.RunnerFunc(func(t *cueflow.Task) error {
|
||||
computeScript := t.Value().LookupPath(cue.ParsePath("#dagger.compute"))
|
||||
script, err := job.newScript(computeScript)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// Run the script & fill the result into the task
|
||||
return script.Run(ctx, t)
|
||||
})
|
||||
return runner, nil
|
||||
},
|
||||
)
|
||||
debugf("Running cueflow")
|
||||
if err := flow.Run(ctx); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
debugf("Completed cueflow run. Merging result.")
|
||||
state, err = state.Fill(out)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
debugf("Result merged")
|
||||
// Return only the computed values
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (job Job) bk() bkgw.Client {
|
||||
return job.c
|
||||
}
|
||||
|
||||
func (job Job) Action() string {
|
||||
opts := job.bk().BuildOpts().Opts
|
||||
if action, ok := opts[bkActionKey]; ok {
|
||||
return action
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// Load the cue config for this job
|
||||
// (received as llb input)
|
||||
func (job Job) Config(ctx context.Context) (*cue.Instance, error) {
|
||||
src := llb.Local(bkConfigKey,
|
||||
llb.SessionID(job.bk().BuildOpts().SessionID),
|
||||
llb.SharedKeyHint(bkConfigKey),
|
||||
llb.WithCustomName("load config"),
|
||||
)
|
||||
|
||||
bkInputs, err := job.bk().Inputs(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if st, ok := bkInputs[bkConfigKey]; ok {
|
||||
src = st
|
||||
}
|
||||
// job.runDebug(ctx, src, "ls", "-la", "/mnt")
|
||||
return job.LoadCue(ctx, src)
|
||||
}
|
||||
|
||||
func (job Job) runDebug(ctx context.Context, mnt llb.State, args ...string) error {
|
||||
opts := []llb.RunOption{
|
||||
llb.Args(args),
|
||||
llb.AddMount("/mnt", mnt),
|
||||
}
|
||||
cmd := llb.Image("alpine").Run(opts...).Root()
|
||||
ref, err := job.Solve(ctx, cmd)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "debug")
|
||||
}
|
||||
// force non-lazy solve
|
||||
if _, err := ref.ReadDir(ctx, bkgw.ReadDirRequest{Path: "/"}); err != nil {
|
||||
return errors.Wrap(err, "debug")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (job Job) Inputs(ctx context.Context) (map[string]llb.State, error) {
|
||||
bkInputs, err := job.bk().Inputs(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
inputs := map[string]llb.State{}
|
||||
for key, input := range bkInputs {
|
||||
if !strings.HasPrefix(key, bkInputKey) {
|
||||
continue
|
||||
}
|
||||
target := strings.Replace(key, bkInputKey, "", 1)
|
||||
targetPath := cue.ParsePath(target)
|
||||
if err := targetPath.Err(); err != nil {
|
||||
return nil, errors.Wrapf(err, "input target %q", target)
|
||||
}
|
||||
// FIXME: check that the path can be passed to Fill
|
||||
// (eg. only regular fields, no array indexes, no defs)
|
||||
// see cuePathToStrings
|
||||
inputs[target] = input
|
||||
}
|
||||
return inputs, nil
|
||||
}
|
||||
|
||||
// loadFiles recursively loads all .cue files from a buildkit gateway
|
||||
// FIXME: this is highly inefficient.
|
||||
func loadFiles(ctx context.Context, ref bkgw.Reference, p, overlayPrefix string, overlay map[string]cueload.Source) error {
|
||||
// FIXME: we cannot use `IncludePattern` here, otherwise sub directories
|
||||
// (e.g. "cue.mod") will be skipped.
|
||||
files, err := ref.ReadDir(ctx, bkgw.ReadDirRequest{
|
||||
Path: p,
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, f := range files {
|
||||
fPath := path.Join(p, f.GetPath())
|
||||
if f.IsDir() {
|
||||
if err := loadFiles(ctx, ref, fPath, overlayPrefix, overlay); err != nil {
|
||||
return err
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
if filepath.Ext(fPath) != ".cue" {
|
||||
continue
|
||||
}
|
||||
|
||||
contents, err := ref.ReadFile(ctx, bkgw.ReadRequest{
|
||||
Filename: fPath,
|
||||
})
|
||||
if err != nil {
|
||||
return errors.Wrap(err, f.GetPath())
|
||||
}
|
||||
overlay[path.Join(overlayPrefix, fPath)] = cueload.FromBytes(contents)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (job Job) LoadCue(ctx context.Context, st llb.State, args ...string) (*cue.Instance, error) {
|
||||
// The CUE overlay needs to be prefixed by a non-conflicting path with the
|
||||
// local filesystem, otherwise Cue will merge the Overlay with whatever Cue
|
||||
// files it finds locally.
|
||||
const overlayPrefix = "/config"
|
||||
|
||||
buildConfig := &cueload.Config{
|
||||
Dir: overlayPrefix,
|
||||
Overlay: map[string]cueload.Source{},
|
||||
}
|
||||
buildArgs := args
|
||||
|
||||
// Inject cue files from llb state into overlay
|
||||
ref, err := job.Solve(ctx, st)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := loadFiles(ctx, ref, ".", overlayPrefix, buildConfig.Overlay); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
instances := cueload.Instances(buildArgs, buildConfig)
|
||||
if len(instances) != 1 {
|
||||
return nil, errors.New("only one package is supported at a time")
|
||||
}
|
||||
inst, err := job.r.Build(instances[0])
|
||||
if err != nil {
|
||||
return nil, cueErr(err)
|
||||
}
|
||||
return inst, nil
|
||||
}
|
||||
|
||||
func (job Job) Solve(ctx context.Context, st llb.State) (bkgw.Reference, error) {
|
||||
// marshal llb
|
||||
def, err := st.Marshal(ctx, llb.LinuxAmd64)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// call solve
|
||||
res, err := job.bk().Solve(ctx, bkgw.SolveRequest{Definition: def.ToPB()})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// always use single reference (ignore multiple outputs & metadata)
|
||||
return res.SingleRef()
|
||||
}
|
||||
138
dagger/json.go
Normal file
138
dagger/json.go
Normal file
@@ -0,0 +1,138 @@
|
||||
package dagger
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"cuelang.org/go/cue"
|
||||
cuejson "cuelang.org/go/encoding/json"
|
||||
"github.com/KromDaniel/jonson"
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
type JSON []byte
|
||||
|
||||
func (s JSON) Get(path ...string) ([]byte, error) {
|
||||
if s == nil {
|
||||
s = []byte("{}")
|
||||
}
|
||||
var (
|
||||
root *jonson.JSON
|
||||
)
|
||||
root, err := jonson.Parse(s)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "parse root json")
|
||||
}
|
||||
pointer := root
|
||||
for _, key := range path {
|
||||
// FIXME: we can traverse maps but not arrays (need to handle int keys)
|
||||
pointer = pointer.At(key)
|
||||
}
|
||||
// FIXME: use indent function from stdlib
|
||||
return pointer.ToJSON()
|
||||
}
|
||||
|
||||
func (s JSON) Unset(path ...string) (JSON, error) {
|
||||
if s == nil {
|
||||
s = []byte("{}")
|
||||
}
|
||||
var (
|
||||
root *jonson.JSON
|
||||
)
|
||||
root, err := jonson.Parse(s)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "unset: parse root json")
|
||||
}
|
||||
var (
|
||||
pointer = root
|
||||
pathDir []string
|
||||
)
|
||||
if len(path) > 0 {
|
||||
pathDir = path[:len(path)-1]
|
||||
}
|
||||
for _, key := range pathDir {
|
||||
pointer = pointer.At(key)
|
||||
}
|
||||
if len(path) == 0 {
|
||||
pointer.Set(nil)
|
||||
} else {
|
||||
key := path[len(path)-1]
|
||||
pointer.DeleteMapKey(key)
|
||||
}
|
||||
return root.ToJSON()
|
||||
}
|
||||
|
||||
func (s JSON) Set(valueJSON []byte, path ...string) (JSON, error) {
|
||||
if s == nil {
|
||||
s = []byte("{}")
|
||||
}
|
||||
var (
|
||||
root *jonson.JSON
|
||||
value *jonson.JSON
|
||||
)
|
||||
root, err := jonson.Parse(s)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "parse root json")
|
||||
}
|
||||
value, err = jonson.Parse(valueJSON)
|
||||
if err != nil {
|
||||
return nil, errors.Wrapf(err, "SetJSON: parse value json: |%s|", valueJSON)
|
||||
}
|
||||
var (
|
||||
pointer = root
|
||||
pathDir []string
|
||||
)
|
||||
if len(path) > 0 {
|
||||
pathDir = path[:len(path)-1]
|
||||
}
|
||||
for _, key := range pathDir {
|
||||
if !pointer.ObjectKeyExists(key) {
|
||||
pointer.MapSet(key, jonson.NewEmptyJSONMap())
|
||||
}
|
||||
pointer = pointer.At(key)
|
||||
}
|
||||
if len(path) == 0 {
|
||||
pointer.Set(value)
|
||||
} else {
|
||||
key := path[len(path)-1]
|
||||
pointer.MapSet(key, value)
|
||||
}
|
||||
return root.ToJSON()
|
||||
}
|
||||
|
||||
func (s JSON) Merge(layers ...JSON) (JSON, error) {
|
||||
r := new(cue.Runtime)
|
||||
var resultInst *cue.Instance
|
||||
for i, l := range append([]JSON{s}, layers...) {
|
||||
if l == nil {
|
||||
continue
|
||||
}
|
||||
filename := fmt.Sprintf("%d", i)
|
||||
inst, err := cuejson.Decode(r, filename, []byte(l))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if resultInst == nil {
|
||||
resultInst = inst
|
||||
} else {
|
||||
resultInst, err = resultInst.Fill(inst.Value())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if resultInst.Err != nil {
|
||||
return nil, resultInst.Err
|
||||
}
|
||||
}
|
||||
}
|
||||
b, err := resultInst.Value().MarshalJSON()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return JSON(b), nil
|
||||
}
|
||||
|
||||
func (s JSON) String() string {
|
||||
if s == nil {
|
||||
return "{}"
|
||||
}
|
||||
return string(s)
|
||||
}
|
||||
109
dagger/runtime.go
Normal file
109
dagger/runtime.go
Normal file
@@ -0,0 +1,109 @@
|
||||
//go:generate sh gen.sh
|
||||
package dagger
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"sync"
|
||||
|
||||
"cuelang.org/go/cue"
|
||||
cueerrors "cuelang.org/go/cue/errors"
|
||||
bkgw "github.com/moby/buildkit/frontend/gateway/client"
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
type Runtime struct {
|
||||
l sync.Mutex
|
||||
|
||||
cue.Runtime
|
||||
}
|
||||
|
||||
func (r *Runtime) Cue() *cue.Runtime {
|
||||
return &(r.Runtime)
|
||||
}
|
||||
|
||||
func (r *Runtime) fill(inst *cue.Instance, v interface{}, target string) (*cue.Instance, error) {
|
||||
targetPath := cue.ParsePath(target)
|
||||
if err := targetPath.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
p := cuePathToStrings(targetPath)
|
||||
if src, ok := v.(string); ok {
|
||||
vinst, err := r.Compile(target, src)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return inst.Fill(vinst.Value(), p...)
|
||||
}
|
||||
return inst.Fill(v, p...)
|
||||
}
|
||||
|
||||
// func (r Runtime) Run(...)
|
||||
// Buildkit run entrypoint
|
||||
func (r *Runtime) BKFrontend(ctx context.Context, c bkgw.Client) (*bkgw.Result, error) {
|
||||
return r.newJob(c).BKExecute(ctx)
|
||||
}
|
||||
|
||||
func (r *Runtime) newJob(c bkgw.Client) Job {
|
||||
return Job{
|
||||
r: r,
|
||||
c: c,
|
||||
}
|
||||
}
|
||||
|
||||
// Check whether a value is a valid component
|
||||
// FIXME: calling matchSpec("#Component") is not enough because
|
||||
// it does not match embedded scalars.
|
||||
func (r *Runtime) isComponent(v cue.Value, fields ...string) (bool, error) {
|
||||
cfg := v.LookupPath(cue.ParsePath("#dagger"))
|
||||
if cfg.Err() != nil {
|
||||
// No "#dagger" -> not a component
|
||||
return false, nil
|
||||
}
|
||||
for _, field := range fields {
|
||||
if cfg.Lookup(field).Err() != nil {
|
||||
return false, nil
|
||||
}
|
||||
}
|
||||
if err := r.validateSpec(cfg, "#ComponentConfig"); err != nil {
|
||||
return true, errors.Wrap(err, "invalid #dagger")
|
||||
}
|
||||
return true, nil
|
||||
}
|
||||
|
||||
// eg. validateSpec(op, "#Op")
|
||||
// eg. validateSpec(dag, "#DAG")
|
||||
func (r *Runtime) validateSpec(v cue.Value, defpath string) (err error) {
|
||||
// Expand cue errors to get full details
|
||||
// FIXME: there is probably a cleaner way to do this.
|
||||
defer func() {
|
||||
if err != nil {
|
||||
err = fmt.Errorf("%s", cueerrors.Details(err, nil))
|
||||
}
|
||||
}()
|
||||
r.l.Lock()
|
||||
defer r.l.Unlock()
|
||||
|
||||
// FIXME cache spec instance
|
||||
spec, err := r.Compile("dagger.cue", DaggerSpec)
|
||||
if err != nil {
|
||||
panic("invalid spec")
|
||||
}
|
||||
def := spec.Value().LookupPath(cue.ParsePath(defpath))
|
||||
if err := def.Err(); err != nil {
|
||||
return err
|
||||
}
|
||||
v = v.Eval()
|
||||
if err := v.Validate(); err != nil {
|
||||
return err
|
||||
}
|
||||
res := def.Unify(v)
|
||||
if err := res.Validate(cue.Final()); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *Runtime) matchSpec(v cue.Value, def string) bool {
|
||||
return r.validateSpec(v, def) == nil
|
||||
}
|
||||
326
dagger/script.go
Normal file
326
dagger/script.go
Normal file
@@ -0,0 +1,326 @@
|
||||
package dagger
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
|
||||
"cuelang.org/go/cue"
|
||||
"github.com/moby/buildkit/client/llb"
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
type Script struct {
|
||||
v cue.Value
|
||||
job Job
|
||||
|
||||
// current state
|
||||
state *State
|
||||
}
|
||||
|
||||
func (job Job) newScript(v cue.Value) (*Script, error) {
|
||||
s := &Script{
|
||||
v: v,
|
||||
job: job,
|
||||
state: NewState(job),
|
||||
}
|
||||
if err := s.Validate(); err != nil {
|
||||
return nil, s.err(err, "invalid script")
|
||||
}
|
||||
return s, nil
|
||||
}
|
||||
|
||||
type Action func(context.Context, cue.Value, Fillable) error
|
||||
|
||||
func (s *Script) Run(ctx context.Context, out Fillable) error {
|
||||
op, err := s.Cue().List()
|
||||
if err != nil {
|
||||
return s.err(err, "run")
|
||||
}
|
||||
i := 0
|
||||
for op.Next() {
|
||||
// If op is not concrete, interrupt execution without error.
|
||||
// This allows gradual resolution: compute what you can compute.. leave the rest incomplete.
|
||||
if !cueIsConcrete(op.Value()) {
|
||||
debugf("%s: non-concrete op. Leaving script unfinished", op.Value().Path().String())
|
||||
return nil
|
||||
}
|
||||
if err := s.Do(ctx, op.Value(), out); err != nil {
|
||||
return s.err(err, "run op %d", i+1)
|
||||
}
|
||||
i += 1
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *Script) Do(ctx context.Context, op cue.Value, out Fillable) error {
|
||||
// Skip no-ops without error (allows more flexible use of if())
|
||||
// FIXME: maybe not needed once a clear pattern is established for
|
||||
// how to use if() in a script?
|
||||
if cueIsEmptyStruct(op) {
|
||||
return nil
|
||||
}
|
||||
actions := map[string]Action{
|
||||
// "#Copy": s.copy,
|
||||
"#Exec": s.exec,
|
||||
"#Export": s.export,
|
||||
"#FetchContainer": s.fetchContainer,
|
||||
"#FetchGit": s.fetchGit,
|
||||
"#Load": s.load,
|
||||
"#Copy": s.copy,
|
||||
}
|
||||
for def, action := range actions {
|
||||
if s.matchSpec(op, def) {
|
||||
debugf("OP MATCH: %s: %s: %v", def, op.Path().String(), op)
|
||||
return action(ctx, op, out)
|
||||
}
|
||||
}
|
||||
return fmt.Errorf("[%s] invalid operation: %s", s.v.Path().String(), cueToJSON(op))
|
||||
}
|
||||
|
||||
func (s *Script) copy(ctx context.Context, v cue.Value, out Fillable) error {
|
||||
// Decode copy options
|
||||
var op struct {
|
||||
Src string
|
||||
Dest string
|
||||
}
|
||||
if err := v.Decode(&op); err != nil {
|
||||
return err
|
||||
}
|
||||
from := v.Lookup("from")
|
||||
if isComponent, err := s.job.r.isComponent(from); err != nil {
|
||||
return err
|
||||
} else if isComponent {
|
||||
return s.copyComponent(ctx, from, op.Src, op.Dest)
|
||||
}
|
||||
if s.matchSpec(from, "#Script") {
|
||||
return s.copyScript(ctx, from, op.Src, op.Dest)
|
||||
}
|
||||
return fmt.Errorf("copy: invalid source")
|
||||
}
|
||||
|
||||
func (s *Script) copyScript(ctx context.Context, from cue.Value, src, dest string) error {
|
||||
// Load source script
|
||||
fromScript, err := s.job.newScript(from)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// Execute source script
|
||||
if err := fromScript.Run(ctx, Discard()); err != nil {
|
||||
return err
|
||||
}
|
||||
return s.State().Change(ctx, func(st llb.State) llb.State {
|
||||
return st.File(llb.Copy(
|
||||
fromScript.State().LLB(),
|
||||
src,
|
||||
dest,
|
||||
// FIXME: allow more configurable llb options
|
||||
// For now we define the following convenience presets:
|
||||
&llb.CopyInfo{
|
||||
CopyDirContentsOnly: true,
|
||||
CreateDestPath: true,
|
||||
AllowWildcard: true,
|
||||
},
|
||||
))
|
||||
})
|
||||
}
|
||||
|
||||
func (s *Script) copyComponent(ctx context.Context, from cue.Value, src, dest string) error {
|
||||
return s.copyScript(ctx, from.LookupPath(cue.ParsePath("#dagger.compute")), src, dest)
|
||||
}
|
||||
|
||||
func (s *Script) load(ctx context.Context, op cue.Value, out Fillable) error {
|
||||
from := op.Lookup("from")
|
||||
isComponent, err := s.job.r.isComponent(from)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if isComponent {
|
||||
debugf("LOAD: from is a component")
|
||||
return s.loadScript(ctx, from.LookupPath(cue.ParsePath("#dagger.compute")))
|
||||
}
|
||||
if s.matchSpec(from, "#Script") {
|
||||
return s.loadScript(ctx, from)
|
||||
}
|
||||
return fmt.Errorf("load: invalid source")
|
||||
}
|
||||
|
||||
func (s *Script) loadScript(ctx context.Context, v cue.Value) error {
|
||||
from, err := s.job.newScript(v)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "load")
|
||||
}
|
||||
// NOTE we discard cue outputs from running the loaded script.
|
||||
// This means we load the LLB state but NOT the cue exports.
|
||||
// In other words: cue exports are always private to their original location.
|
||||
if err := from.Run(ctx, Discard()); err != nil {
|
||||
return errors.Wrap(err, "load/execute")
|
||||
}
|
||||
// overwrite buildkit state from loaded from
|
||||
s.state = from.state
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *Script) exec(ctx context.Context, v cue.Value, out Fillable) error {
|
||||
var opts []llb.RunOption
|
||||
var cmd struct {
|
||||
Args []string
|
||||
Env map[string]string
|
||||
Dir string
|
||||
Always bool
|
||||
}
|
||||
v.Decode(&cmd)
|
||||
// marker for status events
|
||||
opts = append(opts, llb.WithCustomName(v.Path().String()))
|
||||
// args
|
||||
opts = append(opts, llb.Args(cmd.Args))
|
||||
// dir
|
||||
dir := cmd.Dir
|
||||
if dir == "" {
|
||||
dir = "/"
|
||||
}
|
||||
// env
|
||||
for k, v := range cmd.Env {
|
||||
opts = append(opts, llb.AddEnv(k, v))
|
||||
}
|
||||
// always?
|
||||
if cmd.Always {
|
||||
cacheBuster, err := randomID(8)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
opts = append(opts, llb.AddEnv("DAGGER_CACHEBUSTER", cacheBuster))
|
||||
}
|
||||
// mounts
|
||||
mnt, _ := v.Lookup("mount").Fields()
|
||||
for mnt.Next() {
|
||||
opt, err := s.mount(ctx, mnt.Label(), mnt.Value())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
opts = append(opts, opt)
|
||||
}
|
||||
// --> Execute
|
||||
return s.State().Change(ctx, func(st llb.State) llb.State {
|
||||
return st.Run(opts...).Root()
|
||||
})
|
||||
}
|
||||
|
||||
func (s *Script) mount(ctx context.Context, dest string, source cue.Value) (llb.RunOption, error) {
|
||||
if s.matchSpec(source, "#MountTmp") {
|
||||
return llb.AddMount(dest, llb.Scratch(), llb.Tmpfs()), nil
|
||||
}
|
||||
if s.matchSpec(source, "#MountCache") {
|
||||
// FIXME: cache mount
|
||||
return nil, fmt.Errorf("FIXME: cache mount not yet implemented")
|
||||
}
|
||||
if s.matchSpec(source, "#MountScript") {
|
||||
return s.mountScript(ctx, dest, source)
|
||||
}
|
||||
if s.matchSpec(source, "#MountComponent") {
|
||||
return s.mountComponent(ctx, dest, source)
|
||||
}
|
||||
return nil, fmt.Errorf("mount %s to %s: invalid source", source.Path().String(), dest)
|
||||
}
|
||||
|
||||
// mount when the input is a script (see mountComponent, mountTmpfs, mountCache)
|
||||
func (s *Script) mountScript(ctx context.Context, dest string, source cue.Value) (llb.RunOption, error) {
|
||||
script, err := s.job.newScript(source)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// FIXME: this is where we re-run everything,
|
||||
// and rely on solver cache / dedup
|
||||
if err := script.Run(ctx, Discard()); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return llb.AddMount(dest, script.State().LLB()), nil
|
||||
}
|
||||
|
||||
func (s *Script) mountComponent(ctx context.Context, dest string, source cue.Value) (llb.RunOption, error) {
|
||||
return s.mountScript(ctx, dest, source.LookupPath(cue.ParsePath("from.#dagger.compute")))
|
||||
}
|
||||
|
||||
func (s *Script) fetchContainer(ctx context.Context, v cue.Value, out Fillable) error {
|
||||
var op struct {
|
||||
Ref string
|
||||
}
|
||||
if err := v.Decode(&op); err != nil {
|
||||
return errors.Wrap(err, "decode fetch-container")
|
||||
}
|
||||
return s.State().Change(ctx, llb.Image(op.Ref))
|
||||
}
|
||||
|
||||
func (s *Script) fetchGit(ctx context.Context, v cue.Value, out Fillable) error {
|
||||
// See #FetchGit in spec.cue
|
||||
var op struct {
|
||||
Remote string
|
||||
Ref string
|
||||
}
|
||||
if err := v.Decode(&op); err != nil {
|
||||
return errors.Wrap(err, "decode fetch-git")
|
||||
}
|
||||
return s.State().Change(ctx, llb.Git(op.Remote, op.Ref))
|
||||
}
|
||||
|
||||
func (s *Script) export(ctx context.Context, v cue.Value, out Fillable) error {
|
||||
// See #Export in spec.cue
|
||||
var op struct {
|
||||
Source string
|
||||
// FIXME: target
|
||||
// Target string
|
||||
Format string
|
||||
}
|
||||
v.Decode(&op)
|
||||
b, err := s.State().ReadFile(ctx, op.Source)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
switch op.Format {
|
||||
case "string":
|
||||
return out.Fill(string(b))
|
||||
case "json":
|
||||
var o interface{}
|
||||
if err := json.Unmarshal(b, &o); err != nil {
|
||||
return err
|
||||
}
|
||||
return out.Fill(o)
|
||||
default:
|
||||
return fmt.Errorf("unsupported export format: %q", op.Format)
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Script) Cue() cue.Value {
|
||||
return s.v
|
||||
}
|
||||
|
||||
func (s *Script) Location() string {
|
||||
return s.Cue().Path().String()
|
||||
}
|
||||
|
||||
func (s *Script) err(e error, msg string, args ...interface{}) error {
|
||||
return errors.Wrapf(e, s.Location()+": "+msg, args...)
|
||||
}
|
||||
|
||||
func (s *Script) Validate() error {
|
||||
return s.job.r.validateSpec(s.Cue(), "#Script")
|
||||
}
|
||||
|
||||
func (s *Script) State() *State {
|
||||
return s.state
|
||||
}
|
||||
|
||||
func (s *Script) matchSpec(v cue.Value, def string) bool {
|
||||
// FIXME: we manually filter out empty structs to avoid false positives
|
||||
// This is necessary because Runtime.ValidateSpec has a bug
|
||||
// where an empty struct matches everything.
|
||||
// see https://github.com/cuelang/cue/issues/566#issuecomment-749735878
|
||||
// Once the bug is fixed, the manual check can be removed.
|
||||
if st, err := v.Struct(); err == nil {
|
||||
if st.Len() == 0 {
|
||||
debugf("FIXME: manually filtering out empty struct from spec match")
|
||||
return false
|
||||
}
|
||||
}
|
||||
return s.job.r.matchSpec(v, def)
|
||||
}
|
||||
152
dagger/spec.cue
Normal file
152
dagger/spec.cue
Normal file
@@ -0,0 +1,152 @@
|
||||
package dagger
|
||||
|
||||
// A DAG is the basic unit of programming in dagger.
|
||||
// It is a special kind of program which runs as a pipeline of computing nodes running in parallel,
|
||||
// instead of a sequence of operations to be run by a single node.
|
||||
//
|
||||
// It is a powerful way to automate various parts of an application delivery workflow:
|
||||
// build, test, deploy, generate configuration, enforce policies, publish artifacts, etc.
|
||||
//
|
||||
// The DAG architecture has many benefits:
|
||||
// - Because DAGs are made of nodes executing in parallel, they are easy to scale.
|
||||
// - Because all inputs and outputs are snapshotted and content-addressed, DAGs
|
||||
// can easily be made repeatable, can be cached aggressively, and can be replayed
|
||||
// at will.
|
||||
// - Because nodes are executed by the same container engine as docker-build, DAGs
|
||||
// can be developed using any language or technology capable of running in a docker.
|
||||
// Dockerfiles and docker images are natively supported for maximum compatibility.
|
||||
//
|
||||
// - Because DAGs are programmed declaratively with a powerful configuration language,
|
||||
// they are much easier to test, debug and refactor than traditional programming languages.
|
||||
//
|
||||
// To execute a DAG, the dagger runtime JIT-compiles it to a low-level format called
|
||||
// llb, and executes it with buildkit.
|
||||
// Think of buildkit as a specialized VM for running compute graphs; and dagger as
|
||||
// a complete programming environment for that VM.
|
||||
//
|
||||
// The tradeoff for all those wonderful features is that a DAG architecture cannot be used
|
||||
// for all software: only software than can be run as a pipeline.
|
||||
//
|
||||
|
||||
// A dagger component is a configuration value augmented
|
||||
// by scripts defining how to compute it, present it to a user,
|
||||
// encrypt it, etc.
|
||||
|
||||
// FIXME: #Component will not match embedded scalars.
|
||||
// use Runtime.isComponent() for a reliable check
|
||||
#Component: {
|
||||
#dagger: #ComponentConfig
|
||||
...
|
||||
}
|
||||
|
||||
// The contents of a #dagger annotation
|
||||
#ComponentConfig: {
|
||||
input?: bool
|
||||
|
||||
// script to compute the value
|
||||
compute?: #Script
|
||||
|
||||
terminal?: {
|
||||
// Display a message when opening a terminal session
|
||||
greeting?: string
|
||||
command: [string]: #Script
|
||||
}
|
||||
// Configure how the component is incorporated to user settings.
|
||||
// Configure how the end-user can configure this component
|
||||
settings?: {
|
||||
// If not specified, scrape from comments
|
||||
title?: string
|
||||
description?: string
|
||||
// Disable user input, even if incomplete?
|
||||
hidden: true | *false
|
||||
ui: _ // insert here something which can be compiled to react-jsonschema-form
|
||||
// Show the cue default value to the user, as a default input value?
|
||||
showDefault: true | *false
|
||||
|
||||
// Insert information needed by:
|
||||
// 1) clients to encrypt
|
||||
// ie. web wizard, cli
|
||||
// 2) middleware to implement deicphering in the cuellb pipeline
|
||||
// eg. integration with clcoud KMS, Vault...
|
||||
//
|
||||
// 3) connectors to make sure secrets are preserved
|
||||
encrypt?: {
|
||||
pubkey: string
|
||||
cipher: string
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// Any component can be referenced as a directory, since
|
||||
// every dagger script outputs a filesystem state (aka a directory)
|
||||
#Dir: #Component
|
||||
|
||||
#Script: [...#Op]
|
||||
|
||||
// One operation in a script
|
||||
#Op: #FetchContainer | #FetchGit | #Export | #Exec | #Load | #Copy
|
||||
|
||||
// Export a value from fs state to cue
|
||||
#Export: {
|
||||
do: "export"
|
||||
// Source path in the container
|
||||
source: string
|
||||
format: "json"|"yaml"|*"string"|"number"|"boolean"
|
||||
}
|
||||
|
||||
#Load: #LoadComponent| #LoadScript
|
||||
#LoadComponent: {
|
||||
do: "load"
|
||||
from: #Component
|
||||
}
|
||||
#LoadScript: {
|
||||
do: "load"
|
||||
from: #Script
|
||||
}
|
||||
|
||||
|
||||
#Exec: {
|
||||
do: "exec"
|
||||
args: [...string]
|
||||
env: [string]: string
|
||||
always: true | *false
|
||||
dir: string | *"/"
|
||||
mount?: [string]: #MountTmp | #MountCache | #MountComponent | #MountScript
|
||||
}
|
||||
|
||||
#MountTmp: "tmpfs"
|
||||
#MountCache: "cache"
|
||||
#MountComponent: {
|
||||
input: #Component
|
||||
path: string | *"/"
|
||||
}
|
||||
#MountScript: {
|
||||
input: #Script
|
||||
path: string | *"/"
|
||||
}
|
||||
|
||||
#FetchContainer: {
|
||||
do: "fetch-container"
|
||||
ref: string
|
||||
}
|
||||
|
||||
#FetchGit: {
|
||||
do: "fetch-git"
|
||||
remote: string
|
||||
ref: string
|
||||
}
|
||||
|
||||
#Copy: {
|
||||
do: "copy"
|
||||
from: #Script | #Component
|
||||
src: string | *"/"
|
||||
dest: string | *"/"
|
||||
}
|
||||
|
||||
|
||||
#TestScript: #Script & [
|
||||
{ do: "fetch-container", ref: "alpine:latest" },
|
||||
{ do: "exec", args: ["echo", "hello", "world" ] }
|
||||
]
|
||||
94
dagger/spec_test.go
Normal file
94
dagger/spec_test.go
Normal file
@@ -0,0 +1,94 @@
|
||||
package dagger
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"cuelang.org/go/cue"
|
||||
)
|
||||
|
||||
func TestMatch(t *testing.T) {
|
||||
var data = []struct {
|
||||
Src string
|
||||
Def string
|
||||
}{
|
||||
{
|
||||
Src: `do: "exec", args: ["echo", "hello"]`,
|
||||
Def: "#Exec",
|
||||
},
|
||||
{
|
||||
Src: `do: "fetch-git", remote: "github.com/shykes/tests"`,
|
||||
Def: "#FetchGit",
|
||||
},
|
||||
{
|
||||
Src: `do: "load", from: [{do: "exec", args: ["echo", "hello"]}]`,
|
||||
Def: "#Load",
|
||||
},
|
||||
{
|
||||
Src: `do: "load", from: #dagger: compute: [{do: "exec", args: ["echo", "hello"]}]`,
|
||||
Def: "#Load",
|
||||
},
|
||||
// Make sure an empty op does NOT match
|
||||
{
|
||||
Src: ``,
|
||||
Def: "",
|
||||
},
|
||||
{
|
||||
Src: `do: "load"
|
||||
let package={bash:">3.0"}
|
||||
from: foo
|
||||
let foo={#dagger: compute: [
|
||||
{do: "fetch-container", ref: "alpine"},
|
||||
for pkg, info in package {
|
||||
if (info & true) != _|_ {
|
||||
do: "exec"
|
||||
args: ["echo", "hello", pkg]
|
||||
}
|
||||
if (info & string) != _|_ {
|
||||
do: "exec"
|
||||
args: ["echo", "hello", pkg, info]
|
||||
}
|
||||
}
|
||||
]}
|
||||
`,
|
||||
Def: "#Load",
|
||||
},
|
||||
}
|
||||
for _, d := range data {
|
||||
testMatch(t, d.Src, d.Def)
|
||||
}
|
||||
}
|
||||
|
||||
// Test an example op for false positives and negatives
|
||||
func testMatch(t *testing.T, src interface{}, def string) {
|
||||
r := &Runtime{}
|
||||
op := compile(t, r, src)
|
||||
if def != "" {
|
||||
if !r.matchSpec(op, def) {
|
||||
t.Errorf("false negative: %s: %q", def, src)
|
||||
}
|
||||
}
|
||||
for _, cmpDef := range []string{
|
||||
"#Exec",
|
||||
"#FetchGit",
|
||||
"#FetchContainer",
|
||||
"#Export",
|
||||
"#Load",
|
||||
"#Copy",
|
||||
} {
|
||||
if cmpDef == def {
|
||||
continue
|
||||
}
|
||||
if r.matchSpec(op, cmpDef) {
|
||||
t.Errorf("false positive: %s: %q", cmpDef, src)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func compile(t *testing.T, r *Runtime, src interface{}) cue.Value {
|
||||
inst, err := r.Compile("", src)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
return inst.Value()
|
||||
}
|
||||
53
dagger/state.go
Normal file
53
dagger/state.go
Normal file
@@ -0,0 +1,53 @@
|
||||
package dagger
|
||||
|
||||
import (
|
||||
"context"
|
||||
"os"
|
||||
|
||||
"github.com/moby/buildkit/client/llb"
|
||||
bkgw "github.com/moby/buildkit/frontend/gateway/client"
|
||||
)
|
||||
|
||||
type State struct {
|
||||
// Before last solve
|
||||
input llb.State
|
||||
// After last solve
|
||||
output bkgw.Reference
|
||||
// How to produce the output
|
||||
s Solver
|
||||
}
|
||||
|
||||
func NewState(s Solver) *State {
|
||||
return &State{
|
||||
input: llb.Scratch(),
|
||||
s: s,
|
||||
}
|
||||
}
|
||||
|
||||
func (s *State) ReadFile(ctx context.Context, filename string) ([]byte, error) {
|
||||
if s.output == nil {
|
||||
return nil, os.ErrNotExist
|
||||
}
|
||||
return s.output.ReadFile(ctx, bkgw.ReadRequest{Filename: filename})
|
||||
}
|
||||
|
||||
func (s *State) Change(ctx context.Context, op interface{}) error {
|
||||
input := s.input
|
||||
switch OP := op.(type) {
|
||||
case llb.State:
|
||||
input = OP
|
||||
case func(llb.State) llb.State:
|
||||
input = OP(input)
|
||||
}
|
||||
output, err := s.s.Solve(ctx, input)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
s.input = input
|
||||
s.output = output
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *State) LLB() llb.State {
|
||||
return s.input
|
||||
}
|
||||
26
dagger/ui/ui.go
Normal file
26
dagger/ui/ui.go
Normal file
@@ -0,0 +1,26 @@
|
||||
package ui
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func Fatalf(msg string, args ...interface{}) {
|
||||
if !strings.HasSuffix(msg, "\n") {
|
||||
msg = msg + "\n"
|
||||
}
|
||||
fmt.Fprintf(os.Stderr, msg, args...)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
func Fatal(msg interface{}) {
|
||||
Fatalf("%s\n", msg)
|
||||
}
|
||||
|
||||
func Info(msg string, args ...interface{}) {
|
||||
if !strings.HasSuffix(msg, "\n") {
|
||||
msg = msg + "\n"
|
||||
}
|
||||
fmt.Fprintf(os.Stderr, "[info] "+msg, args...)
|
||||
}
|
||||
288
dagger/utils.go
Normal file
288
dagger/utils.go
Normal file
@@ -0,0 +1,288 @@
|
||||
package dagger
|
||||
|
||||
import (
|
||||
"crypto/rand"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"cuelang.org/go/cue"
|
||||
cueAst "cuelang.org/go/cue/ast"
|
||||
cueerrors "cuelang.org/go/cue/errors"
|
||||
cueformat "cuelang.org/go/cue/format"
|
||||
cueload "cuelang.org/go/cue/load"
|
||||
cueParser "cuelang.org/go/cue/parser"
|
||||
"github.com/moby/buildkit/client/llb"
|
||||
"github.com/moby/buildkit/client/llb/imagemetaresolver"
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
// A nil equivalent for cue.Value (when returning errors)
|
||||
var qnil cue.Value
|
||||
|
||||
type Fillable interface {
|
||||
Fill(interface{}) error
|
||||
}
|
||||
|
||||
func Discard() Fillable {
|
||||
return discard{}
|
||||
}
|
||||
|
||||
type discard struct{}
|
||||
|
||||
func (d discard) Fill(x interface{}) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
type fillableValue struct {
|
||||
root cue.Value
|
||||
}
|
||||
|
||||
func cuePrint(v cue.Value) (string, error) {
|
||||
b, err := cueformat.Node(v.Syntax())
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return string(b), nil
|
||||
}
|
||||
|
||||
func (f *fillableValue) Fill(v interface{}) error {
|
||||
root2 := f.root.Fill(v)
|
||||
if err := root2.Err(); err != nil {
|
||||
return err
|
||||
}
|
||||
f.root = root2
|
||||
return nil
|
||||
}
|
||||
|
||||
func cueScratch(r *cue.Runtime) Fillable {
|
||||
f := &fillableValue{}
|
||||
if inst, err := r.Compile("", ""); err == nil {
|
||||
f.root = inst.Value()
|
||||
}
|
||||
return f
|
||||
}
|
||||
|
||||
func cueErr(err error) error {
|
||||
return fmt.Errorf("%s", cueerrors.Details(err, &cueerrors.Config{}))
|
||||
}
|
||||
|
||||
func cueDecodeArray(a cue.Value, idx int, out interface{}) {
|
||||
a.LookupPath(cue.MakePath(cue.Index(idx))).Decode(out)
|
||||
}
|
||||
|
||||
func cueToJSON(v cue.Value) JSON {
|
||||
var out JSON
|
||||
v.Walk(
|
||||
func(v cue.Value) bool {
|
||||
b, err := v.MarshalJSON()
|
||||
if err == nil {
|
||||
newOut, err := out.Set(b, cuePathToStrings(v.Path())...)
|
||||
if err == nil {
|
||||
out = newOut
|
||||
}
|
||||
return false
|
||||
}
|
||||
return true
|
||||
},
|
||||
nil,
|
||||
)
|
||||
return out
|
||||
}
|
||||
|
||||
// Build a cue instance from a directory and args
|
||||
func cueBuild(r *cue.Runtime, cueRoot string, buildArgs ...string) (*cue.Instance, error) {
|
||||
var err error
|
||||
cueRoot, err = filepath.Abs(cueRoot)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
buildConfig := &cueload.Config{
|
||||
ModuleRoot: cueRoot,
|
||||
Dir: cueRoot,
|
||||
}
|
||||
instances := cueload.Instances(buildArgs, buildConfig)
|
||||
if len(instances) != 1 {
|
||||
return nil, errors.New("only one package is supported at a time")
|
||||
}
|
||||
return r.Build(instances[0])
|
||||
}
|
||||
|
||||
func debugJSON(v interface{}) {
|
||||
if os.Getenv("DEBUG") != "" {
|
||||
e := json.NewEncoder(os.Stderr)
|
||||
e.SetIndent("", " ")
|
||||
e.Encode(v)
|
||||
}
|
||||
}
|
||||
|
||||
func debugf(msg string, args ...interface{}) {
|
||||
if !strings.HasSuffix(msg, "\n") {
|
||||
msg = msg + "\n"
|
||||
}
|
||||
if os.Getenv("DEBUG") != "" {
|
||||
fmt.Fprintf(os.Stderr, msg, args...)
|
||||
}
|
||||
}
|
||||
|
||||
func debug(msg string) {
|
||||
if os.Getenv("DEBUG") != "" {
|
||||
fmt.Fprintln(os.Stderr, msg)
|
||||
}
|
||||
}
|
||||
|
||||
func randomID(size int) (string, error) {
|
||||
b := make([]byte, size)
|
||||
_, err := rand.Read(b)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return fmt.Sprintf("%x", b), nil
|
||||
}
|
||||
|
||||
func cueWrapExpr(p string, v cueAst.Expr) (cueAst.Expr, error) {
|
||||
pExpr, err := cueParser.ParseExpr("path", p)
|
||||
if err != nil {
|
||||
return v, err
|
||||
}
|
||||
out := v
|
||||
cursor := pExpr
|
||||
walk:
|
||||
for {
|
||||
switch c := cursor.(type) {
|
||||
case *cueAst.SelectorExpr:
|
||||
out = cueAst.NewStruct(
|
||||
&cueAst.Field{
|
||||
Value: out,
|
||||
Label: c.Sel,
|
||||
},
|
||||
)
|
||||
cursor = c.X
|
||||
case *cueAst.Ident:
|
||||
out = cueAst.NewStruct(
|
||||
&cueAst.Field{
|
||||
Value: out,
|
||||
Label: c,
|
||||
},
|
||||
)
|
||||
break walk
|
||||
default:
|
||||
return out, fmt.Errorf("invalid path expression: %q", p)
|
||||
}
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func cueWrapFile(p string, v interface{}) (*cueAst.File, error) {
|
||||
f, err := cueParser.ParseFile("value", v)
|
||||
if err != nil {
|
||||
return f, err
|
||||
}
|
||||
decls := make([]cueAst.Decl, 0, len(f.Decls))
|
||||
for _, decl := range f.Decls {
|
||||
switch d := decl.(type) {
|
||||
case *cueAst.Field:
|
||||
wrappedExpr, err := cueWrapExpr(p, cueAst.NewStruct(d))
|
||||
if err != nil {
|
||||
return f, err
|
||||
}
|
||||
decls = append(decls, &cueAst.EmbedDecl{Expr: wrappedExpr})
|
||||
case *cueAst.EmbedDecl:
|
||||
wrappedExpr, err := cueWrapExpr(p, d.Expr)
|
||||
if err != nil {
|
||||
return f, err
|
||||
}
|
||||
d.Expr = wrappedExpr
|
||||
decls = append(decls, d)
|
||||
case *cueAst.ImportDecl:
|
||||
decls = append(decls, decl)
|
||||
default:
|
||||
fmt.Printf("skipping unsupported decl type %#v\n\n", decl)
|
||||
continue
|
||||
}
|
||||
}
|
||||
f.Decls = decls
|
||||
return f, nil
|
||||
}
|
||||
|
||||
func cueIsEmptyStruct(v cue.Value) bool {
|
||||
if st, err := v.Struct(); err == nil {
|
||||
if st.Len() == 0 {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// Return false if v is not concrete, or contains any
|
||||
// non-concrete fields or items.
|
||||
func cueIsConcrete(v cue.Value) bool {
|
||||
// FIXME: use Value.Walk?
|
||||
if it, err := v.Fields(); err == nil {
|
||||
for it.Next() {
|
||||
if !cueIsConcrete(it.Value()) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
if it, err := v.List(); err == nil {
|
||||
for it.Next() {
|
||||
if !cueIsConcrete(it.Value()) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
dv, _ := v.Default()
|
||||
return v.IsConcrete() || dv.IsConcrete()
|
||||
}
|
||||
|
||||
// LLB Helper to pull a Docker image + all its metadata
|
||||
func llbDockerImage(ref string) llb.State {
|
||||
return llb.Image(
|
||||
ref,
|
||||
llb.WithMetaResolver(imagemetaresolver.Default()),
|
||||
)
|
||||
}
|
||||
|
||||
func cueStringsToCuePath(parts ...string) cue.Path {
|
||||
selectors := make([]cue.Selector, 0, len(parts))
|
||||
for _, part := range parts {
|
||||
selectors = append(selectors, cue.Str(part))
|
||||
}
|
||||
return cue.MakePath(selectors...)
|
||||
}
|
||||
|
||||
func cuePathToStrings(p cue.Path) []string {
|
||||
selectors := p.Selectors()
|
||||
out := make([]string, len(selectors))
|
||||
for i, sel := range selectors {
|
||||
out[i] = sel.String()
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
// Validate a cue path, and return a canonical version
|
||||
func cueCleanPath(p string) (string, error) {
|
||||
cp := cue.ParsePath(p)
|
||||
return cp.String(), cp.Err()
|
||||
}
|
||||
|
||||
func autoMarshal(value interface{}) ([]byte, error) {
|
||||
switch v := value.(type) {
|
||||
case []byte:
|
||||
return v, nil
|
||||
case string:
|
||||
return []byte(v), nil
|
||||
case io.Reader:
|
||||
return ioutil.ReadAll(v)
|
||||
default:
|
||||
return nil, fmt.Errorf("unsupported marshal inoput type")
|
||||
}
|
||||
return []byte(fmt.Sprintf("%v", value)), nil
|
||||
}
|
||||
Reference in New Issue
Block a user