From 5c9f7b8ff0af2ae6578d3313aa4b61fb236bc0a5 Mon Sep 17 00:00:00 2001 From: Akihiro Suda Date: Fri, 1 Feb 2019 01:30:39 +0900 Subject: [PATCH] buildctl: new CLI ("Option C+") See https://github.com/moby/buildkit/pull/807#issuecomment-468146089 Close #774 Signed-off-by: Akihiro Suda --- README.md | 30 +-- cache/remotecache/local/local.go | 11 +- client/build_test.go | 8 +- client/client_test.go | 277 ++++++++++++++------ client/solve.go | 58 +++-- cmd/buildctl/build.go | 322 ++++------------------- cmd/buildctl/build/allow.go | 18 ++ cmd/buildctl/build/build.go | 19 ++ cmd/buildctl/build/exportcache.go | 82 ++++++ cmd/buildctl/build/exportcache_test.go | 60 +++++ cmd/buildctl/build/importcache.go | 62 +++++ cmd/buildctl/build/importcache_test.go | 61 +++++ cmd/buildctl/build/local.go | 6 + cmd/buildctl/build/opt.go | 27 ++ cmd/buildctl/build/output.go | 121 +++++++++ cmd/buildctl/build/secret.go | 61 +++++ cmd/buildctl/build/ssh.go | 23 ++ cmd/buildctl/build_test.go | 106 -------- examples/build-using-dockerfile/main.go | 18 +- frontend/dockerfile/dockerfile_test.go | 326 +++++++++++++++++------- 20 files changed, 1098 insertions(+), 598 deletions(-) create mode 100644 cmd/buildctl/build/allow.go create mode 100644 cmd/buildctl/build/build.go create mode 100644 cmd/buildctl/build/exportcache.go create mode 100644 cmd/buildctl/build/exportcache_test.go create mode 100644 cmd/buildctl/build/importcache.go create mode 100644 cmd/buildctl/build/importcache_test.go create mode 100644 cmd/buildctl/build/local.go create mode 100644 cmd/buildctl/build/opt.go create mode 100644 cmd/buildctl/build/output.go create mode 100644 cmd/buildctl/build/secret.go create mode 100644 cmd/buildctl/build/ssh.go diff --git a/README.md b/README.md index ada424d9f655..1db2e841e87c 100644 --- a/README.md +++ b/README.md @@ -122,7 +122,7 @@ During development, Dockerfile frontend (dockerfile.v0) is also part of the Buil ``` buildctl build --frontend=dockerfile.v0 --local context=. --local dockerfile=. -buildctl build --frontend=dockerfile.v0 --local context=. --local dockerfile=. --frontend-opt target=foo --frontend-opt build-arg:foo=bar +buildctl build --frontend=dockerfile.v0 --local context=. --local dockerfile=. --opt target=foo --opt build-arg:foo=bar ``` `--local` exposes local source files from client to the builder. `context` and `dockerfile` are the names Dockerfile frontend looks for build context and Dockerfile location. @@ -146,31 +146,31 @@ docker inspect myimage External versions of the Dockerfile frontend are pushed to https://hub.docker.com/r/docker/dockerfile-upstream and https://hub.docker.com/r/docker/dockerfile and can be used with the gateway frontend. The source for the external frontend is currently located in `./frontend/dockerfile/cmd/dockerfile-frontend` but will move out of this repository in the future ([#163](https://github.com/moby/buildkit/issues/163)). For automatic build from master branch of this repository `docker/dockerfile-upsteam:master` or `docker/dockerfile-upstream:master-experimental` image can be used. ``` -buildctl build --frontend=gateway.v0 --frontend-opt=source=docker/dockerfile --local context=. --local dockerfile=. -buildctl build --frontend gateway.v0 --frontend-opt=source=docker/dockerfile --frontend-opt=context=git://github.com/moby/moby --frontend-opt build-arg:APT_MIRROR=cdn-fastly.deb.debian.org +buildctl build --frontend gateway.v0 --opt source=docker/dockerfile --local context=. --local dockerfile=. +buildctl build --frontend gateway.v0 --opt source=docker/dockerfile --opt context=git://github.com/moby/moby --opt build-arg:APT_MIRROR=cdn-fastly.deb.debian.org ```` ##### Building a Dockerfile with experimental features like `RUN --mount=type=(bind|cache|tmpfs|secret|ssh)` See [`frontend/dockerfile/docs/experimental.md`](frontend/dockerfile/docs/experimental.md). -### Exporters +### Output -By default, the build result and intermediate cache will only remain internally in BuildKit. Exporter needs to be specified to retrieve the result. +By default, the build result and intermediate cache will only remain internally in BuildKit. An output needs to be specified to retrieve the result. ##### Exporting resulting image to containerd The containerd worker needs to be used ``` -buildctl build ... --exporter=image --exporter-opt name=docker.io/username/image +buildctl build ... --output type=image,name=docker.io/username/image ctr --namespace=buildkit images ls ``` ##### Push resulting image to registry ``` -buildctl build ... --exporter=image --exporter-opt name=docker.io/username/image --exporter-opt push=true +buildctl build ... --output type=image,name=docker.io/username/image,push=true ``` If credentials are required, `buildctl` will attempt to read Docker configuration file. @@ -181,21 +181,21 @@ If credentials are required, `buildctl` will attempt to read Docker configuratio The local client will copy the files directly to the client. This is useful if BuildKit is being used for building something else than container images. ``` -buildctl build ... --exporter=local --exporter-opt output=path/to/output-dir +buildctl build ... --output type=local,dest=path/to/output-dir ``` ##### Exporting built image to Docker ``` # exported tarball is also compatible with OCI spec -buildctl build ... --exporter=docker --exporter-opt name=myimage | docker load +buildctl build ... --output type=docker,name=myimage | docker load ``` ##### Exporting [OCI Image Format](https://github.com/opencontainers/image-spec) tarball to client ``` -buildctl build ... --exporter=oci --exporter-opt output=path/to/output.tar -buildctl build ... --exporter=oci > output.tar +buildctl build ... --output type=oci,dest=path/to/output.tar +buildctl build ... --output type=oci > output.tar ``` ### Exporting/Importing build cache (not image itself) @@ -210,8 +210,8 @@ buildctl build ... --import-cache type=registry,ref=localhost:5000/myrepo:buildc #### To/From local filesystem ``` -buildctl build ... --export-cache type=local,store=path/to/input-dir -buildctl build ... --import-cache type=local,store=path/to/output-dir +buildctl build ... --export-cache type=local,src=path/to/input-dir +buildctl build ... --import-cache type=local,dest=path/to/output-dir ``` The directory layout conforms to OCI Image Spec v1.0. @@ -220,11 +220,11 @@ The directory layout conforms to OCI Image Spec v1.0. * `mode=min` (default): only export layers for the resulting image * `mode=max`: export all the layers of all intermediate steps * `ref=docker.io/user/image:tag`: reference for `registry` cache exporter -* `store=path/to/output-dir`: directory for `local` cache exporter +* `src=path/to/output-dir`: directory for `local` cache exporter #### `--import-cache` options * `ref=docker.io/user/image:tag`: reference for `registry` cache importer -* `store=path/to/input-dir`: directory for `local` cache importer +* `dest=path/to/input-dir`: directory for `local` cache importer * `digest=sha256:deadbeef`: digest of the manifest list to import for `local` cache importer. Defaults to the digest of "latest" tag in `index.json` ### Other diff --git a/cache/remotecache/local/local.go b/cache/remotecache/local/local.go index 741521237fa4..f66d5b4ac767 100644 --- a/cache/remotecache/local/local.go +++ b/cache/remotecache/local/local.go @@ -15,16 +15,17 @@ import ( const ( attrDigest = "digest" - attrStore = "store" + attrSrc = "src" + attrDest = "dest" contentStoreIDPrefix = "local:" ) // ResolveCacheExporterFunc for "local" cache exporter. func ResolveCacheExporterFunc(sm *session.Manager) remotecache.ResolveCacheExporterFunc { return func(ctx context.Context, attrs map[string]string) (remotecache.Exporter, error) { - store := attrs[attrStore] + store := attrs[attrDest] if store == "" { - return nil, errors.New("local cache exporter requires store") + return nil, errors.New("local cache exporter requires dest") } csID := contentStoreIDPrefix + store cs, err := getContentStore(ctx, sm, csID) @@ -43,9 +44,9 @@ func ResolveCacheImporterFunc(sm *session.Manager) remotecache.ResolveCacheImpor return nil, specs.Descriptor{}, errors.New("local cache importer requires explicit digest") } dgst := digest.Digest(dgstStr) - store := attrs[attrStore] + store := attrs[attrSrc] if store == "" { - return nil, specs.Descriptor{}, errors.New("local cache importer requires store") + return nil, specs.Descriptor{}, errors.New("local cache importer requires src") } csID := contentStoreIDPrefix + store cs, err := getContentStore(ctx, sm, csID) diff --git a/client/build_test.go b/client/build_test.go index 95f4c995551f..2071e585c2c2 100644 --- a/client/build_test.go +++ b/client/build_test.go @@ -85,8 +85,12 @@ func testClientGatewaySolve(t *testing.T, sb integration.Sandbox) { testStr := "This is a test" _, err = c.Build(ctx, SolveOpt{ - Exporter: ExporterLocal, - ExporterOutputDir: tmpdir, + Exports: []ExportEntry{ + { + Type: ExporterLocal, + OutputDir: tmpdir, + }, + }, FrontendAttrs: map[string]string{ optKey: testStr, }, diff --git a/client/client_test.go b/client/client_test.go index d37b928c439d..568e1306ecae 100644 --- a/client/client_test.go +++ b/client/client_test.go @@ -192,9 +192,13 @@ func testSSHMount(t *testing.T, sb integration.Sandbox) { defer os.RemoveAll(destDir) _, err = c.Solve(context.TODO(), def, SolveOpt{ - Exporter: ExporterLocal, - ExporterOutputDir: destDir, - Session: []session.Attachable{ssh}, + Exports: []ExportEntry{ + { + Type: ExporterLocal, + OutputDir: destDir, + }, + }, + Session: []session.Attachable{ssh}, }, nil) require.NoError(t, err) @@ -220,9 +224,13 @@ func testSSHMount(t *testing.T, sb integration.Sandbox) { require.NoError(t, err) _, err = c.Solve(context.TODO(), def, SolveOpt{ - Exporter: ExporterLocal, - ExporterOutputDir: destDir, - Session: []session.Attachable{ssh}, + Exports: []ExportEntry{ + { + Type: ExporterLocal, + OutputDir: destDir, + }, + }, + Session: []session.Attachable{ssh}, }, nil) require.NoError(t, err) @@ -267,9 +275,13 @@ func testSSHMount(t *testing.T, sb integration.Sandbox) { defer os.RemoveAll(destDir) _, err = c.Solve(context.TODO(), def, SolveOpt{ - Exporter: ExporterLocal, - ExporterOutputDir: destDir, - Session: []session.Attachable{ssh}, + Exports: []ExportEntry{ + { + Type: ExporterLocal, + OutputDir: destDir, + }, + }, + Session: []session.Attachable{ssh}, }, nil) require.NoError(t, err) @@ -424,8 +436,12 @@ func testFrontendImageNaming(t *testing.T, sb integration.Sandbox) { defer os.RemoveAll(destDir) so := SolveOpt{ - Exporter: exp, - ExporterAttrs: map[string]string{}, + Exports: []ExportEntry{ + { + Type: exp, + Attrs: map[string]string{}, + }, + }, } out := filepath.Join(destDir, "out.tar") @@ -438,19 +454,19 @@ func testFrontendImageNaming(t *testing.T, sb integration.Sandbox) { case ExporterDocker: outW, err := os.Create(out) require.NoError(t, err) - so.ExporterOutput = outW + so.Exports[0].Output = outW case ExporterImage: imageName = registry + "/" + imageName - so.ExporterAttrs["push"] = "true" + so.Exports[0].Attrs["push"] = "true" } feName := imageName switch winner { case "caller": feName = "loser:latest" - so.ExporterAttrs["name"] = imageName + so.Exports[0].Attrs["name"] = imageName case "frontend": - so.ExporterAttrs["name"] = "*" + so.Exports[0].Attrs["name"] = "*" } frontend := func(ctx context.Context, c gateway.Client) (*gateway.Result, error) { @@ -623,8 +639,12 @@ func testRelativeWorkDir(t *testing.T, sb integration.Sandbox) { defer os.RemoveAll(destDir) _, err = c.Solve(context.TODO(), def, SolveOpt{ - Exporter: ExporterLocal, - ExporterOutputDir: destDir, + Exports: []ExportEntry{ + { + Type: ExporterLocal, + OutputDir: destDir, + }, + }, }, nil) require.NoError(t, err) @@ -707,8 +727,12 @@ func testBuildHTTPSource(t *testing.T, sb integration.Sandbox) { defer os.RemoveAll(tmpdir) _, err = c.Solve(context.TODO(), def, SolveOpt{ - Exporter: ExporterLocal, - ExporterOutputDir: tmpdir, + Exports: []ExportEntry{ + { + Type: ExporterLocal, + OutputDir: tmpdir, + }, + }, }, nil) require.NoError(t, err) @@ -726,8 +750,12 @@ func testBuildHTTPSource(t *testing.T, sb integration.Sandbox) { require.NoError(t, err) _, err = c.Solve(context.TODO(), def, SolveOpt{ - Exporter: ExporterLocal, - ExporterOutputDir: tmpdir, + Exports: []ExportEntry{ + { + Type: ExporterLocal, + OutputDir: tmpdir, + }, + }, }, nil) require.NoError(t, err) @@ -772,8 +800,12 @@ func testResolveAndHosts(t *testing.T, sb integration.Sandbox) { defer os.RemoveAll(destDir) _, err = c.Solve(context.TODO(), def, SolveOpt{ - Exporter: ExporterLocal, - ExporterOutputDir: destDir, + Exports: []ExportEntry{ + { + Type: ExporterLocal, + OutputDir: destDir, + }, + }, }, nil) require.NoError(t, err) @@ -815,8 +847,12 @@ func testUser(t *testing.T, sb integration.Sandbox) { defer os.RemoveAll(destDir) _, err = c.Solve(context.TODO(), def, SolveOpt{ - Exporter: ExporterLocal, - ExporterOutputDir: destDir, + Exports: []ExportEntry{ + { + Type: ExporterLocal, + OutputDir: destDir, + }, + }, }, nil) require.NoError(t, err) @@ -873,9 +909,13 @@ func testOCIExporter(t *testing.T, sb integration.Sandbox) { attrs["name"] = target } _, err = c.Solve(context.TODO(), def, SolveOpt{ - Exporter: exp, - ExporterAttrs: attrs, - ExporterOutput: outW, + Exports: []ExportEntry{ + { + Type: exp, + Attrs: attrs, + Output: outW, + }, + }, }, nil) require.NoError(t, err) @@ -953,9 +993,13 @@ func testFrontendMetadataReturn(t *testing.T, sb integration.Sandbox) { } res, err := c.Build(context.TODO(), SolveOpt{ - Exporter: ExporterOCI, - ExporterAttrs: map[string]string{}, - ExporterOutput: nopWriteCloser{ioutil.Discard}, + Exports: []ExportEntry{ + { + Type: ExporterOCI, + Attrs: map[string]string{}, + Output: nopWriteCloser{ioutil.Discard}, + }, + }, }, "", frontend, nil) require.NoError(t, err) require.Contains(t, res.ExporterResponse, "frontend.returned") @@ -994,10 +1038,14 @@ func testBuildPushAndValidate(t *testing.T, sb integration.Sandbox) { target := registry + "/buildkit/testpush:latest" _, err = c.Solve(context.TODO(), def, SolveOpt{ - Exporter: ExporterImage, - ExporterAttrs: map[string]string{ - "name": target, - "push": "true", + Exports: []ExportEntry{ + { + Type: ExporterImage, + Attrs: map[string]string{ + "name": target, + "push": "true", + }, + }, }, }, nil) require.NoError(t, err) @@ -1013,8 +1061,12 @@ func testBuildPushAndValidate(t *testing.T, sb integration.Sandbox) { defer os.RemoveAll(destDir) _, err = c.Solve(context.TODO(), def, SolveOpt{ - Exporter: ExporterLocal, - ExporterOutputDir: destDir, + Exports: []ExportEntry{ + { + Type: ExporterLocal, + OutputDir: destDir, + }, + }, }, nil) require.NoError(t, err) @@ -1158,7 +1210,7 @@ func testBuildPushAndValidate(t *testing.T, sb integration.Sandbox) { require.False(t, ok) } -func testBasicCacheImportExport(t *testing.T, sb integration.Sandbox, cacheOptionsEntry CacheOptionsEntry) { +func testBasicCacheImportExport(t *testing.T, sb integration.Sandbox, cacheOptionsEntryImport, cacheOptionsEntryExport CacheOptionsEntry) { requiresLinux(t) c, err := New(context.TODO(), sb.Address()) require.NoError(t, err) @@ -1182,10 +1234,14 @@ func testBasicCacheImportExport(t *testing.T, sb integration.Sandbox, cacheOptio defer os.RemoveAll(destDir) _, err = c.Solve(context.TODO(), def, SolveOpt{ - Exporter: ExporterLocal, - ExporterOutputDir: destDir, + Exports: []ExportEntry{ + { + Type: ExporterLocal, + OutputDir: destDir, + }, + }, CacheExports: []CacheOptionsEntry{ - cacheOptionsEntry, + cacheOptionsEntryExport, }, }, nil) require.NoError(t, err) @@ -1207,10 +1263,13 @@ func testBasicCacheImportExport(t *testing.T, sb integration.Sandbox, cacheOptio defer os.RemoveAll(destDir) _, err = c.Solve(context.TODO(), def, SolveOpt{ - Exporter: ExporterLocal, - ExporterOutputDir: destDir, + Exports: []ExportEntry{ + { + Type: ExporterLocal, + OutputDir: destDir, + }}, CacheImports: []CacheOptionsEntry{ - cacheOptionsEntry, + cacheOptionsEntryImport, }, }, nil) require.NoError(t, err) @@ -1237,20 +1296,26 @@ func testBasicRegistryCacheImportExport(t *testing.T, sb integration.Sandbox) { "ref": target, }, } - testBasicCacheImportExport(t, sb, o) + testBasicCacheImportExport(t, sb, o, o) } func testBasicLocalCacheImportExport(t *testing.T, sb integration.Sandbox) { dir, err := ioutil.TempDir("", "buildkit") require.NoError(t, err) defer os.RemoveAll(dir) - o := CacheOptionsEntry{ + im := CacheOptionsEntry{ Type: "local", Attrs: map[string]string{ - "store": dir, + "src": dir, }, } - testBasicCacheImportExport(t, sb, o) + ex := CacheOptionsEntry{ + Type: "local", + Attrs: map[string]string{ + "dest": dir, + }, + } + testBasicCacheImportExport(t, sb, im, ex) } func testBasicInlineCacheImportExport(t *testing.T, sb integration.Sandbox) { @@ -1281,10 +1346,14 @@ func testBasicInlineCacheImportExport(t *testing.T, sb integration.Sandbox) { target := registry + "/buildkit/testexportinline:latest" resp, err := c.Solve(context.TODO(), def, SolveOpt{ - Exporter: ExporterImage, - ExporterAttrs: map[string]string{ - "name": target, - "push": "true", + Exports: []ExportEntry{ + { + Type: ExporterImage, + Attrs: map[string]string{ + "name": target, + "push": "true", + }, + }, }, CacheExports: []CacheOptionsEntry{ { @@ -1306,9 +1375,13 @@ func testBasicInlineCacheImportExport(t *testing.T, sb integration.Sandbox) { checkAllRemoved(t, c, sb) resp, err = c.Solve(context.TODO(), def, SolveOpt{ - Exporter: ExporterImage, // specifying inline cache exporter is needed for reproducing containerimage.digest // (not needed for reproducing rootfs/unique) + Exports: []ExportEntry{ + { + Type: ExporterImage, + }, + }, CacheExports: []CacheOptionsEntry{ { Type: "inline", @@ -1336,7 +1409,11 @@ func testBasicInlineCacheImportExport(t *testing.T, sb integration.Sandbox) { checkAllRemoved(t, c, sb) resp, err = c.Solve(context.TODO(), def, SolveOpt{ - Exporter: ExporterImage, + Exports: []ExportEntry{ + { + Type: ExporterImage, + }, + }, CacheImports: []CacheOptionsEntry{ { Type: "registry", @@ -1369,8 +1446,12 @@ func readFileInImage(c *Client, ref, path string) ([]byte, error) { defer os.RemoveAll(destDir) _, err = c.Solve(context.TODO(), def, SolveOpt{ - Exporter: ExporterLocal, - ExporterOutputDir: destDir, + Exports: []ExportEntry{ + { + Type: ExporterLocal, + OutputDir: destDir, + }, + }, }, nil) if err != nil { return nil, err @@ -1419,8 +1500,12 @@ func testCachedMounts(t *testing.T, sb integration.Sandbox) { require.NoError(t, err) _, err = c.Solve(context.TODO(), def, SolveOpt{ - Exporter: ExporterLocal, - ExporterOutputDir: destDir, + Exports: []ExportEntry{ + { + Type: ExporterLocal, + OutputDir: destDir, + }, + }, }, nil) require.NoError(t, err) @@ -1535,8 +1620,12 @@ func testDuplicateWhiteouts(t *testing.T, sb integration.Sandbox) { require.NoError(t, err) _, err = c.Solve(context.TODO(), def, SolveOpt{ - Exporter: ExporterOCI, - ExporterOutput: outW, + Exports: []ExportEntry{ + { + Type: ExporterOCI, + Output: outW, + }, + }, }, nil) require.NoError(t, err) @@ -1601,8 +1690,12 @@ func testWhiteoutParentDir(t *testing.T, sb integration.Sandbox) { outW, err := os.Create(out) require.NoError(t, err) _, err = c.Solve(context.TODO(), def, SolveOpt{ - Exporter: ExporterOCI, - ExporterOutput: outW, + Exports: []ExportEntry{ + { + Type: ExporterOCI, + Output: outW, + }, + }, }, nil) require.NoError(t, err) @@ -1732,8 +1825,12 @@ func testProxyEnv(t *testing.T, sb integration.Sandbox) { defer os.RemoveAll(destDir) _, err = c.Solve(context.TODO(), def, SolveOpt{ - Exporter: ExporterLocal, - ExporterOutputDir: destDir, + Exports: []ExportEntry{ + { + Type: ExporterLocal, + OutputDir: destDir, + }, + }, }, nil) require.NoError(t, err) @@ -1756,8 +1853,12 @@ func testProxyEnv(t *testing.T, sb integration.Sandbox) { defer os.RemoveAll(destDir) _, err = c.Solve(context.TODO(), def, SolveOpt{ - Exporter: ExporterLocal, - ExporterOutputDir: destDir, + Exports: []ExportEntry{ + { + Type: ExporterLocal, + OutputDir: destDir, + }, + }, }, nil) require.NoError(t, err) @@ -1886,23 +1987,35 @@ func testInvalidExporter(t *testing.T, sb integration.Sandbox) { } for _, exp := range []string{ExporterOCI, ExporterDocker} { _, err = c.Solve(context.TODO(), def, SolveOpt{ - Exporter: exp, - ExporterAttrs: attrs, + Exports: []ExportEntry{ + { + Type: exp, + Attrs: attrs, + }, + }, }, nil) // output file writer is required require.Error(t, err) _, err = c.Solve(context.TODO(), def, SolveOpt{ - Exporter: exp, - ExporterAttrs: attrs, - ExporterOutputDir: destDir, + Exports: []ExportEntry{ + { + Type: exp, + Attrs: attrs, + OutputDir: destDir, + }, + }, }, nil) // output directory is not supported require.Error(t, err) } _, err = c.Solve(context.TODO(), def, SolveOpt{ - Exporter: ExporterLocal, - ExporterAttrs: attrs, + Exports: []ExportEntry{ + { + Type: ExporterLocal, + Attrs: attrs, + }, + }, }, nil) // output directory is required require.Error(t, err) @@ -1911,9 +2024,13 @@ func testInvalidExporter(t *testing.T, sb integration.Sandbox) { require.NoError(t, err) defer f.Close() _, err = c.Solve(context.TODO(), def, SolveOpt{ - Exporter: ExporterLocal, - ExporterAttrs: attrs, - ExporterOutput: f, + Exports: []ExportEntry{ + { + Type: ExporterLocal, + Attrs: attrs, + Output: f, + }, + }, }, nil) // output file writer is not supported require.Error(t, err) @@ -1950,8 +2067,12 @@ func testParallelLocalBuilds(t *testing.T, sb integration.Sandbox) { defer os.RemoveAll(destDir) _, err = c.Solve(ctx, def, SolveOpt{ - Exporter: ExporterLocal, - ExporterOutputDir: destDir, + Exports: []ExportEntry{ + { + Type: ExporterLocal, + OutputDir: destDir, + }, + }, LocalDirs: map[string]string{ "source": srcDir, }, diff --git a/client/solve.go b/client/solve.go index 92a03857197e..9d2238af42ea 100644 --- a/client/solve.go +++ b/client/solve.go @@ -30,10 +30,7 @@ import ( ) type SolveOpt struct { - Exporter string - ExporterAttrs map[string]string - ExporterOutput io.WriteCloser // for ExporterOCI and ExporterDocker - ExporterOutputDir string // for ExporterLocal + Exports []ExportEntry LocalDirs map[string]string SharedKey string Frontend string @@ -44,6 +41,13 @@ type SolveOpt struct { AllowedEntitlements []entitlements.Entitlement } +type ExportEntry struct { + Type string + Attrs map[string]string + Output io.WriteCloser // for ExporterOCI and ExporterDocker + OutputDir string // for ExporterLocal +} + type CacheOptionsEntry struct { Type string Attrs map[string]string @@ -103,29 +107,37 @@ func (c *Client) solve(ctx context.Context, def *llb.Definition, runGateway runG s.Allow(a) } - switch opt.Exporter { + var ex ExportEntry + if len(opt.Exports) > 1 { + return nil, errors.New("currently only single Exports can be specified") + } + if len(opt.Exports) == 1 { + ex = opt.Exports[0] + } + + switch ex.Type { case ExporterLocal: - if opt.ExporterOutput != nil { + if ex.Output != nil { return nil, errors.New("output file writer is not supported by local exporter") } - if opt.ExporterOutputDir == "" { + if ex.OutputDir == "" { return nil, errors.New("output directory is required for local exporter") } - s.Allow(filesync.NewFSSyncTargetDir(opt.ExporterOutputDir)) + s.Allow(filesync.NewFSSyncTargetDir(ex.OutputDir)) case ExporterOCI, ExporterDocker: - if opt.ExporterOutputDir != "" { - return nil, errors.Errorf("output directory %s is not supported by %s exporter", opt.ExporterOutputDir, opt.Exporter) + if ex.OutputDir != "" { + return nil, errors.Errorf("output directory %s is not supported by %s exporter", ex.OutputDir, ex.Type) } - if opt.ExporterOutput == nil { - return nil, errors.Errorf("output file writer is required for %s exporter", opt.Exporter) + if ex.Output == nil { + return nil, errors.Errorf("output file writer is required for %s exporter", ex.Type) } - s.Allow(filesync.NewFSSyncTarget(opt.ExporterOutput)) + s.Allow(filesync.NewFSSyncTarget(ex.Output)) default: - if opt.ExporterOutput != nil { - return nil, errors.Errorf("output file writer is not supported by %s exporter", opt.Exporter) + if ex.Output != nil { + return nil, errors.Errorf("output file writer is not supported by %s exporter", ex.Type) } - if opt.ExporterOutputDir != "" { - return nil, errors.Errorf("output directory %s is not supported by %s exporter", opt.ExporterOutputDir, opt.Exporter) + if ex.OutputDir != "" { + return nil, errors.Errorf("output directory %s is not supported by %s exporter", ex.OutputDir, ex.Type) } } @@ -165,8 +177,8 @@ func (c *Client) solve(ctx context.Context, def *llb.Definition, runGateway runG resp, err := c.controlClient().Solve(ctx, &controlapi.SolveRequest{ Ref: ref, Definition: pbd, - Exporter: opt.Exporter, - ExporterAttrs: opt.ExporterAttrs, + Exporter: ex.Type, + ExporterAttrs: ex.Attrs, Session: s.ID(), Frontend: opt.Frontend, FrontendAttrs: opt.FrontendAttrs, @@ -347,9 +359,9 @@ func parseCacheOptions(opt SolveOpt) (*cacheOptions, error) { frontendAttrs := make(map[string]string) for _, ex := range opt.CacheExports { if ex.Type == "local" { - csDir := ex.Attrs["store"] + csDir := ex.Attrs["dest"] if csDir == "" { - return nil, errors.New("local cache exporter requires store") + return nil, errors.New("local cache exporter requires dest") } if err := os.MkdirAll(csDir, 0755); err != nil { return nil, err @@ -380,9 +392,9 @@ func parseCacheOptions(opt SolveOpt) (*cacheOptions, error) { for _, im := range opt.CacheImports { attrs := im.Attrs if im.Type == "local" { - csDir := im.Attrs["store"] + csDir := im.Attrs["src"] if csDir == "" { - return nil, errors.New("local cache importer requires store") + return nil, errors.New("local cache importer requires src") } if err := os.MkdirAll(csDir, 0755); err != nil { return nil, err diff --git a/cmd/buildctl/build.go b/cmd/buildctl/build.go index 5826d6f3d47f..1b26bb46a072 100644 --- a/cmd/buildctl/build.go +++ b/cmd/buildctl/build.go @@ -2,22 +2,19 @@ package main import ( "context" - "encoding/csv" "encoding/json" "io" "os" - "strings" "github.com/containerd/console" "github.com/moby/buildkit/client" "github.com/moby/buildkit/client/llb" + "github.com/moby/buildkit/cmd/buildctl/build" bccommon "github.com/moby/buildkit/cmd/buildctl/common" "github.com/moby/buildkit/session" "github.com/moby/buildkit/session/auth/authprovider" - "github.com/moby/buildkit/session/secrets/secretsprovider" "github.com/moby/buildkit/session/sshforward/sshprovider" "github.com/moby/buildkit/solver/pb" - "github.com/moby/buildkit/util/entitlements" "github.com/moby/buildkit/util/progress/progressui" "github.com/opencontainers/go-digest" "github.com/pkg/errors" @@ -30,15 +27,25 @@ var buildCommand = cli.Command{ Name: "build", Aliases: []string{"b"}, Usage: "build", - Action: build, + UsageText: ` + To build and push an image using Dockerfile: + $ buildctl build --frontend dockerfile.v0 --opt target=foo --opt build-arg:foo=bar --local context=. --local dockerfile=. --output type=image,name=docker.io/username/image,push=true + `, + Action: buildAction, Flags: []cli.Flag{ + cli.StringSliceFlag{ + Name: "output,o", + Usage: "Define exports for build result, e.g. --output type=image,name=docker.io/username/image,push=true", + }, cli.StringFlag{ - Name: "exporter", - Usage: "Define exporter for build result", + Name: "exporter", + Usage: "Define exporter for build result (DEPRECATED: use --export type=[,=]", + Hidden: true, }, cli.StringSliceFlag{ - Name: "exporter-opt", - Usage: "Define custom options for exporter", + Name: "exporter-opt", + Usage: "Define custom options for exporter (DEPRECATED: use --output type=[,=]", + Hidden: true, }, cli.StringFlag{ Name: "progress", @@ -58,8 +65,13 @@ var buildCommand = cli.Command{ Usage: "Define frontend used for build", }, cli.StringSliceFlag{ - Name: "frontend-opt", - Usage: "Define custom options for frontend", + Name: "opt", + Usage: "Define custom options for frontend, e.g. --opt target=foo --opt build-arg:foo=bar", + }, + cli.StringSliceFlag{ + Name: "frontend-opt", + Usage: "Define custom options for frontend, e.g. --frontend-opt target=foo --frontend-opt build-arg:foo=bar (DEPRECATED: use --opt)", + Hidden: true, }, cli.BoolFlag{ Name: "no-cache", @@ -67,7 +79,7 @@ var buildCommand = cli.Command{ }, cli.StringSliceFlag{ Name: "export-cache", - Usage: "Export build cache, e.g. type=registry,ref=example.com/foo/bar, or type=local,store=path/to/dir", + Usage: "Export build cache, e.g. type=registry,ref=example.com/foo/bar, or type=local,dest=path/to/dir", }, cli.StringSliceFlag{ Name: "export-cache-opt", @@ -76,7 +88,7 @@ var buildCommand = cli.Command{ }, cli.StringSliceFlag{ Name: "import-cache", - Usage: "Import build cache", + Usage: "Import build cache, e.g. type=registry,ref=example.com/foo/bar, or type=local,src=path/to/dir", }, cli.StringSliceFlag{ Name: "secret", @@ -124,7 +136,7 @@ func openTraceFile(clicontext *cli.Context) (*os.File, error) { return nil, nil } -func build(clicontext *cli.Context) error { +func buildAction(clicontext *cli.Context) error { c, err := bccommon.ResolveClient(clicontext) if err != nil { return err @@ -145,7 +157,7 @@ func build(clicontext *cli.Context) error { attachable := []session.Attachable{authprovider.NewDockerAuthProvider()} if ssh := clicontext.StringSlice("ssh"); len(ssh) > 0 { - configs, err := parseSSHSpecs(ssh) + configs, err := build.ParseSSH(ssh) if err != nil { return err } @@ -157,23 +169,37 @@ func build(clicontext *cli.Context) error { } if secrets := clicontext.StringSlice("secret"); len(secrets) > 0 { - secretProvider, err := parseSecretSpecs(secrets) + secretProvider, err := build.ParseSecret(secrets) if err != nil { return err } attachable = append(attachable, secretProvider) } - allowed, err := parseEntitlements(clicontext.StringSlice("allow")) + allowed, err := build.ParseAllow(clicontext.StringSlice("allow")) if err != nil { return err } - cacheExports, err := parseExportCache(clicontext.StringSlice("export-cache"), clicontext.StringSlice("export-cache-opt")) + var exports []client.ExportEntry + if legacyExporter := clicontext.String("exporter"); legacyExporter != "" { + logrus.Warnf("--exporter is deprecated. Please use --output type=[,=] instead.") + if len(clicontext.StringSlice("output")) > 0 { + return errors.New("--exporter cannot be used with --output") + } + exports, err = build.ParseLegacyExporter(clicontext.String("exporter"), clicontext.StringSlice("exporter-opt")) + } else { + exports, err = build.ParseOutput(clicontext.StringSlice("output")) + } if err != nil { return err } - cacheImports, err := parseImportCache(clicontext.StringSlice("import-cache")) + + cacheExports, err := build.ParseExportCache(clicontext.StringSlice("export-cache"), clicontext.StringSlice("export-cache-opt")) + if err != nil { + return err + } + cacheImports, err := build.ParseImportCache(clicontext.StringSlice("import-cache")) if err != nil { return err } @@ -182,8 +208,7 @@ func build(clicontext *cli.Context) error { eg, ctx := errgroup.WithContext(bccommon.CommandContext(clicontext)) solveOpt := client.SolveOpt{ - Exporter: clicontext.String("exporter"), - // ExporterAttrs is set later + Exports: exports, // LocalDirs is set later Frontend: clicontext.String("frontend"), // FrontendAttrs is set later @@ -192,24 +217,13 @@ func build(clicontext *cli.Context) error { Session: attachable, AllowedEntitlements: allowed, } - solveOpt.ExporterAttrs, err = attrMap(clicontext.StringSlice("exporter-opt")) - if err != nil { - return errors.Wrap(err, "invalid exporter-opt") - } - solveOpt.ExporterOutput, solveOpt.ExporterOutputDir, err = resolveExporterOutput(solveOpt.Exporter, solveOpt.ExporterAttrs["output"]) - if err != nil { - return errors.Wrap(err, "invalid exporter-opt: output") - } - if solveOpt.ExporterOutput != nil || solveOpt.ExporterOutputDir != "" { - delete(solveOpt.ExporterAttrs, "output") - } - solveOpt.FrontendAttrs, err = attrMap(clicontext.StringSlice("frontend-opt")) + solveOpt.FrontendAttrs, err = build.ParseOpt(clicontext.StringSlice("opt"), clicontext.StringSlice("frontend-opt")) if err != nil { - return errors.Wrap(err, "invalid frontend-opt") + return errors.Wrap(err, "invalid opt") } - solveOpt.LocalDirs, err = attrMap(clicontext.StringSlice("local")) + solveOpt.LocalDirs, err = build.ParseLocal(clicontext.StringSlice("local")) if err != nil { return errors.Wrap(err, "invalid local") } @@ -280,241 +294,3 @@ func build(clicontext *cli.Context) error { return eg.Wait() } - -func parseExportCacheCSV(s string) (client.CacheOptionsEntry, error) { - ex := client.CacheOptionsEntry{ - Type: "", - Attrs: map[string]string{}, - } - csvReader := csv.NewReader(strings.NewReader(s)) - fields, err := csvReader.Read() - if err != nil { - return ex, err - } - for _, field := range fields { - parts := strings.SplitN(field, "=", 2) - key := strings.ToLower(parts[0]) - value := parts[1] - switch key { - case "type": - ex.Type = value - default: - ex.Attrs[key] = value - } - } - if ex.Type == "" { - return ex, errors.New("--export-cache requires type=") - } - if _, ok := ex.Attrs["mode"]; !ok { - ex.Attrs["mode"] = "min" - } - return ex, nil -} - -func parseExportCache(exportCaches, legacyExportCacheOpts []string) ([]client.CacheOptionsEntry, error) { - var exports []client.CacheOptionsEntry - if len(legacyExportCacheOpts) > 0 { - if len(exportCaches) != 1 { - return nil, errors.New("--export-cache-opt requires exactly single --export-cache") - } - } - for _, exportCache := range exportCaches { - legacy := !strings.Contains(exportCache, "type=") - if legacy { - logrus.Warnf("--export-cache --export-cache-opt = is deprecated. Please use --export-cache type=registry,ref=,=[,=] instead.") - attrs, err := attrMap(legacyExportCacheOpts) - if err != nil { - return nil, err - } - if _, ok := attrs["mode"]; !ok { - attrs["mode"] = "min" - } - attrs["ref"] = exportCache - exports = append(exports, client.CacheOptionsEntry{ - Type: "registry", - Attrs: attrs, - }) - } else { - if len(legacyExportCacheOpts) > 0 { - return nil, errors.New("--export-cache-opt is not supported for the specified --export-cache. Please use --export-cache type=,=[,=] instead.") - } - ex, err := parseExportCacheCSV(exportCache) - if err != nil { - return nil, err - } - exports = append(exports, ex) - } - } - return exports, nil - -} - -func parseImportCacheCSV(s string) (client.CacheOptionsEntry, error) { - im := client.CacheOptionsEntry{ - Type: "", - Attrs: map[string]string{}, - } - csvReader := csv.NewReader(strings.NewReader(s)) - fields, err := csvReader.Read() - if err != nil { - return im, err - } - for _, field := range fields { - parts := strings.SplitN(field, "=", 2) - key := strings.ToLower(parts[0]) - value := parts[1] - switch key { - case "type": - im.Type = value - default: - im.Attrs[key] = value - } - } - if im.Type == "" { - return im, errors.New("--import-cache requires type=") - } - return im, nil -} - -func parseImportCache(importCaches []string) ([]client.CacheOptionsEntry, error) { - var imports []client.CacheOptionsEntry - for _, importCache := range importCaches { - legacy := !strings.Contains(importCache, "type=") - if legacy { - logrus.Warnf("--import-cache is deprecated. Please use --import-cache type=registry,ref=,=[,=] instead.") - imports = append(imports, client.CacheOptionsEntry{ - Type: "registry", - Attrs: map[string]string{"ref": importCache}, - }) - } else { - im, err := parseImportCacheCSV(importCache) - if err != nil { - return nil, err - } - imports = append(imports, im) - } - } - return imports, nil -} - -func attrMap(sl []string) (map[string]string, error) { - m := map[string]string{} - for _, v := range sl { - parts := strings.SplitN(v, "=", 2) - if len(parts) != 2 { - return nil, errors.Errorf("invalid value %s", v) - } - m[parts[0]] = parts[1] - } - return m, nil -} - -func parseSecretSpecs(sl []string) (session.Attachable, error) { - fs := make([]secretsprovider.FileSource, 0, len(sl)) - for _, v := range sl { - s, err := parseSecret(v) - if err != nil { - return nil, err - } - fs = append(fs, *s) - } - store, err := secretsprovider.NewFileStore(fs) - if err != nil { - return nil, err - } - return secretsprovider.NewSecretProvider(store), nil -} - -func parseSecret(value string) (*secretsprovider.FileSource, error) { - csvReader := csv.NewReader(strings.NewReader(value)) - fields, err := csvReader.Read() - if err != nil { - return nil, errors.Wrap(err, "failed to parse csv secret") - } - - fs := secretsprovider.FileSource{} - - for _, field := range fields { - parts := strings.SplitN(field, "=", 2) - key := strings.ToLower(parts[0]) - - if len(parts) != 2 { - return nil, errors.Errorf("invalid field '%s' must be a key=value pair", field) - } - - value := parts[1] - switch key { - case "type": - if value != "file" { - return nil, errors.Errorf("unsupported secret type %q", value) - } - case "id": - fs.ID = value - case "source", "src": - fs.FilePath = value - default: - return nil, errors.Errorf("unexpected key '%s' in '%s'", key, field) - } - } - return &fs, nil -} - -// resolveExporterOutput returns at most either one of io.WriteCloser (single file) or a string (directory path). -func resolveExporterOutput(exporter, output string) (io.WriteCloser, string, error) { - switch exporter { - case client.ExporterLocal: - if output == "" { - return nil, "", errors.New("output directory is required for local exporter") - } - return nil, output, nil - case client.ExporterOCI, client.ExporterDocker: - if output != "" { - fi, err := os.Stat(output) - if err != nil && !os.IsNotExist(err) { - return nil, "", errors.Wrapf(err, "invalid destination file: %s", output) - } - if err == nil && fi.IsDir() { - return nil, "", errors.Errorf("destination file is a directory") - } - w, err := os.Create(output) - return w, "", err - } - // if no output file is specified, use stdout - if _, err := console.ConsoleFromFile(os.Stdout); err == nil { - return nil, "", errors.Errorf("output file is required for %s exporter. refusing to write to console", exporter) - } - return os.Stdout, "", nil - default: // e.g. client.ExporterImage - if output != "" { - return nil, "", errors.Errorf("output %s is not supported by %s exporter", output, exporter) - } - return nil, "", nil - } -} - -func parseEntitlements(inp []string) ([]entitlements.Entitlement, error) { - ent := make([]entitlements.Entitlement, 0, len(inp)) - for _, v := range inp { - e, err := entitlements.Parse(v) - if err != nil { - return nil, err - } - ent = append(ent, e) - } - return ent, nil -} - -func parseSSHSpecs(inp []string) ([]sshprovider.AgentConfig, error) { - configs := make([]sshprovider.AgentConfig, 0, len(inp)) - for _, v := range inp { - parts := strings.SplitN(v, "=", 2) - cfg := sshprovider.AgentConfig{ - ID: parts[0], - } - if len(parts) > 1 { - cfg.Paths = strings.Split(parts[1], ",") - } - configs = append(configs, cfg) - } - return configs, nil -} diff --git a/cmd/buildctl/build/allow.go b/cmd/buildctl/build/allow.go new file mode 100644 index 000000000000..fe43e5676dad --- /dev/null +++ b/cmd/buildctl/build/allow.go @@ -0,0 +1,18 @@ +package build + +import ( + "github.com/moby/buildkit/util/entitlements" +) + +// ParseAllow parses --allow +func ParseAllow(inp []string) ([]entitlements.Entitlement, error) { + ent := make([]entitlements.Entitlement, 0, len(inp)) + for _, v := range inp { + e, err := entitlements.Parse(v) + if err != nil { + return nil, err + } + ent = append(ent, e) + } + return ent, nil +} diff --git a/cmd/buildctl/build/build.go b/cmd/buildctl/build/build.go new file mode 100644 index 000000000000..738da597dcd9 --- /dev/null +++ b/cmd/buildctl/build/build.go @@ -0,0 +1,19 @@ +package build + +import ( + "strings" + + "github.com/pkg/errors" +) + +func attrMap(sl []string) (map[string]string, error) { + m := map[string]string{} + for _, v := range sl { + parts := strings.SplitN(v, "=", 2) + if len(parts) != 2 { + return nil, errors.Errorf("invalid value %s", v) + } + m[parts[0]] = parts[1] + } + return m, nil +} diff --git a/cmd/buildctl/build/exportcache.go b/cmd/buildctl/build/exportcache.go new file mode 100644 index 000000000000..080dd8582784 --- /dev/null +++ b/cmd/buildctl/build/exportcache.go @@ -0,0 +1,82 @@ +package build + +import ( + "encoding/csv" + "strings" + + "github.com/moby/buildkit/client" + "github.com/pkg/errors" + "github.com/sirupsen/logrus" +) + +func parseExportCacheCSV(s string) (client.CacheOptionsEntry, error) { + ex := client.CacheOptionsEntry{ + Type: "", + Attrs: map[string]string{}, + } + csvReader := csv.NewReader(strings.NewReader(s)) + fields, err := csvReader.Read() + if err != nil { + return ex, err + } + for _, field := range fields { + parts := strings.SplitN(field, "=", 2) + if len(parts) != 2 { + return ex, errors.Errorf("invalid value %s", field) + } + key := strings.ToLower(parts[0]) + value := parts[1] + switch key { + case "type": + ex.Type = value + default: + ex.Attrs[key] = value + } + } + if ex.Type == "" { + return ex, errors.New("--export-cache requires type=") + } + if _, ok := ex.Attrs["mode"]; !ok { + ex.Attrs["mode"] = "min" + } + return ex, nil +} + +// ParseExportCache parses --export-cache (and legacy --export-cache-opt) +func ParseExportCache(exportCaches, legacyExportCacheOpts []string) ([]client.CacheOptionsEntry, error) { + var exports []client.CacheOptionsEntry + if len(legacyExportCacheOpts) > 0 { + if len(exportCaches) != 1 { + return nil, errors.New("--export-cache-opt requires exactly single --export-cache") + } + } + for _, exportCache := range exportCaches { + legacy := !strings.Contains(exportCache, "type=") + if legacy { + logrus.Warnf("--export-cache --export-cache-opt = is deprecated. Please use --export-cache type=registry,ref=,=[,=] instead.") + attrs, err := attrMap(legacyExportCacheOpts) + if err != nil { + return nil, err + } + if _, ok := attrs["mode"]; !ok { + attrs["mode"] = "min" + } + attrs["ref"] = exportCache + exports = append(exports, client.CacheOptionsEntry{ + Type: "registry", + Attrs: attrs, + }) + } else { + if len(legacyExportCacheOpts) > 0 { + return nil, errors.New("--export-cache-opt is not supported for the specified --export-cache. Please use --export-cache type=,=[,=] instead.") + } + ex, err := parseExportCacheCSV(exportCache) + if err != nil { + return nil, err + } + exports = append(exports, ex) + } + } + return exports, nil + +} diff --git a/cmd/buildctl/build/exportcache_test.go b/cmd/buildctl/build/exportcache_test.go new file mode 100644 index 000000000000..77a3b8aa255c --- /dev/null +++ b/cmd/buildctl/build/exportcache_test.go @@ -0,0 +1,60 @@ +package build + +import ( + "testing" + + "github.com/moby/buildkit/client" + "github.com/stretchr/testify/require" +) + +func TestParseExportCache(t *testing.T) { + type testCase struct { + exportCaches []string // --export-cache + legacyExportCacheOpts []string // --export-cache-opt (legacy) + expected []client.CacheOptionsEntry + expectedErr string + } + testCases := []testCase{ + { + exportCaches: []string{"type=registry,ref=example.com/foo/bar"}, + expected: []client.CacheOptionsEntry{ + { + Type: "registry", + Attrs: map[string]string{ + "ref": "example.com/foo/bar", + "mode": "min", + }, + }, + }, + }, + { + exportCaches: []string{"example.com/foo/bar"}, + legacyExportCacheOpts: []string{"mode=max"}, + expected: []client.CacheOptionsEntry{ + { + Type: "registry", + Attrs: map[string]string{ + "ref": "example.com/foo/bar", + "mode": "max", + }, + }, + }, + }, + { + exportCaches: []string{"type=registry,ref=example.com/foo/bar"}, + legacyExportCacheOpts: []string{"mode=max"}, + expectedErr: "--export-cache-opt is not supported for the specified --export-cache", + }, + // TODO: test multiple exportCaches (valid for CLI but not supported by solver) + + } + for _, tc := range testCases { + ex, err := ParseExportCache(tc.exportCaches, tc.legacyExportCacheOpts) + if tc.expectedErr == "" { + require.EqualValues(t, tc.expected, ex) + } else { + require.Error(t, err) + require.Contains(t, err.Error(), tc.expectedErr) + } + } +} diff --git a/cmd/buildctl/build/importcache.go b/cmd/buildctl/build/importcache.go new file mode 100644 index 000000000000..a300f327bbae --- /dev/null +++ b/cmd/buildctl/build/importcache.go @@ -0,0 +1,62 @@ +package build + +import ( + "encoding/csv" + "strings" + + "github.com/moby/buildkit/client" + "github.com/pkg/errors" + "github.com/sirupsen/logrus" +) + +func parseImportCacheCSV(s string) (client.CacheOptionsEntry, error) { + im := client.CacheOptionsEntry{ + Type: "", + Attrs: map[string]string{}, + } + csvReader := csv.NewReader(strings.NewReader(s)) + fields, err := csvReader.Read() + if err != nil { + return im, err + } + for _, field := range fields { + parts := strings.SplitN(field, "=", 2) + if len(parts) != 2 { + return im, errors.Errorf("invalid value %s", field) + } + key := strings.ToLower(parts[0]) + value := parts[1] + switch key { + case "type": + im.Type = value + default: + im.Attrs[key] = value + } + } + if im.Type == "" { + return im, errors.New("--import-cache requires type=") + } + return im, nil +} + +// ParseImportCache parses --import-cache +func ParseImportCache(importCaches []string) ([]client.CacheOptionsEntry, error) { + var imports []client.CacheOptionsEntry + for _, importCache := range importCaches { + legacy := !strings.Contains(importCache, "type=") + if legacy { + logrus.Warn("--import-cache is deprecated. Please use --import-cache type=registry,ref=,=[,=] instead.") + imports = append(imports, client.CacheOptionsEntry{ + Type: "registry", + Attrs: map[string]string{"ref": importCache}, + }) + } else { + im, err := parseImportCacheCSV(importCache) + if err != nil { + return nil, err + } + imports = append(imports, im) + } + } + return imports, nil +} diff --git a/cmd/buildctl/build/importcache_test.go b/cmd/buildctl/build/importcache_test.go new file mode 100644 index 000000000000..8dd18bcf16ab --- /dev/null +++ b/cmd/buildctl/build/importcache_test.go @@ -0,0 +1,61 @@ +package build + +import ( + "testing" + + "github.com/moby/buildkit/client" + "github.com/stretchr/testify/require" +) + +func TestParseImportCache(t *testing.T) { + type testCase struct { + importCaches []string // --import-cache + expected []client.CacheOptionsEntry + expectedErr string + } + testCases := []testCase{ + { + importCaches: []string{"type=registry,ref=example.com/foo/bar", "type=local,src=/path/to/store"}, + expected: []client.CacheOptionsEntry{ + { + Type: "registry", + Attrs: map[string]string{ + "ref": "example.com/foo/bar", + }, + }, + { + Type: "local", + Attrs: map[string]string{ + "src": "/path/to/store", + }, + }, + }, + }, + { + importCaches: []string{"example.com/foo/bar", "example.com/baz/qux"}, + expected: []client.CacheOptionsEntry{ + { + Type: "registry", + Attrs: map[string]string{ + "ref": "example.com/foo/bar", + }, + }, + { + Type: "registry", + Attrs: map[string]string{ + "ref": "example.com/baz/qux", + }, + }, + }, + }, + } + for _, tc := range testCases { + im, err := ParseImportCache(tc.importCaches) + if tc.expectedErr == "" { + require.EqualValues(t, tc.expected, im) + } else { + require.Error(t, err) + require.Contains(t, err.Error(), tc.expectedErr) + } + } +} diff --git a/cmd/buildctl/build/local.go b/cmd/buildctl/build/local.go new file mode 100644 index 000000000000..098953d422ab --- /dev/null +++ b/cmd/buildctl/build/local.go @@ -0,0 +1,6 @@ +package build + +// ParseLocal parses --local +func ParseLocal(locals []string) (map[string]string, error) { + return attrMap(locals) +} diff --git a/cmd/buildctl/build/opt.go b/cmd/buildctl/build/opt.go new file mode 100644 index 000000000000..93acc7dd23ac --- /dev/null +++ b/cmd/buildctl/build/opt.go @@ -0,0 +1,27 @@ +package build + +import ( + "github.com/sirupsen/logrus" +) + +func ParseOpt(opts, legacyFrontendOpts []string) (map[string]string, error) { + m := make(map[string]string) + if len(legacyFrontendOpts) > 0 { + logrus.Warn("--frontend-opt = is deprecated. Please use --opt = instead.") + legacy, err := attrMap(legacyFrontendOpts) + if err != nil { + return nil, err + } + for k, v := range legacy { + m[k] = v + } + } + modern, err := attrMap(opts) + if err != nil { + return nil, err + } + for k, v := range modern { + m[k] = v + } + return m, nil +} diff --git a/cmd/buildctl/build/output.go b/cmd/buildctl/build/output.go new file mode 100644 index 000000000000..61584077d74a --- /dev/null +++ b/cmd/buildctl/build/output.go @@ -0,0 +1,121 @@ +package build + +import ( + "encoding/csv" + "io" + "os" + "strings" + + "github.com/containerd/console" + "github.com/moby/buildkit/client" + "github.com/pkg/errors" +) + +// parseOutputCSV parses a single --output CSV string +func parseOutputCSV(s string) (client.ExportEntry, error) { + ex := client.ExportEntry{ + Type: "", + Attrs: map[string]string{}, + } + csvReader := csv.NewReader(strings.NewReader(s)) + fields, err := csvReader.Read() + if err != nil { + return ex, err + } + for _, field := range fields { + parts := strings.SplitN(field, "=", 2) + if len(parts) != 2 { + return ex, errors.Errorf("invalid value %s", field) + } + key := strings.ToLower(parts[0]) + value := parts[1] + switch key { + case "type": + ex.Type = value + default: + ex.Attrs[key] = value + } + } + if ex.Type == "" { + return ex, errors.New("--output requires type=") + } + if v, ok := ex.Attrs["output"]; ok { + return ex, errors.Errorf("output=%s not supported for --output, you meant dest=%s?", v, v) + } + ex.Output, ex.OutputDir, err = resolveExporterDest(ex.Type, ex.Attrs["dest"]) + if err != nil { + return ex, errors.Wrap(err, "invalid output option: output") + } + if ex.Output != nil || ex.OutputDir != "" { + delete(ex.Attrs, "dest") + } + return ex, nil +} + +// ParseOutput parses --output +func ParseOutput(exports []string) ([]client.ExportEntry, error) { + var entries []client.ExportEntry + for _, s := range exports { + e, err := parseOutputCSV(s) + if err != nil { + return nil, err + } + entries = append(entries, e) + } + return entries, nil +} + +// ParseLegacyExporter parses legacy --exporter --exporter-opt = +func ParseLegacyExporter(legacyExporter string, legacyExporterOpts []string) ([]client.ExportEntry, error) { + var ex client.ExportEntry + ex.Type = legacyExporter + var err error + ex.Attrs, err = attrMap(legacyExporterOpts) + if err != nil { + return nil, errors.Wrap(err, "invalid exporter-opt") + } + if v, ok := ex.Attrs["dest"]; ok { + return nil, errors.Errorf("dest=%s not supported for --exporter-opt, you meant output=%s?", v, v) + } + ex.Output, ex.OutputDir, err = resolveExporterDest(ex.Type, ex.Attrs["output"]) + if err != nil { + return nil, errors.Wrap(err, "invalid exporter option: output") + } + if ex.Output != nil || ex.OutputDir != "" { + delete(ex.Attrs, "output") + } + return []client.ExportEntry{ex}, nil +} + +// resolveExporterDest returns at most either one of io.WriteCloser (single file) or a string (directory path). +func resolveExporterDest(exporter, dest string) (io.WriteCloser, string, error) { + switch exporter { + case client.ExporterLocal: + if dest == "" { + return nil, "", errors.New("output directory is required for local exporter") + } + return nil, dest, nil + case client.ExporterOCI, client.ExporterDocker: + if dest != "" { + fi, err := os.Stat(dest) + if err != nil && !os.IsNotExist(err) { + return nil, "", errors.Wrapf(err, "invalid destination file: %s", dest) + } + if err == nil && fi.IsDir() { + return nil, "", errors.Errorf("destination file is a directory") + } + w, err := os.Create(dest) + return w, "", err + } + // if no output file is specified, use stdout + if _, err := console.ConsoleFromFile(os.Stdout); err == nil { + return nil, "", errors.Errorf("output file is required for %s exporter. refusing to write to console", exporter) + } + return os.Stdout, "", nil + default: // e.g. client.ExporterImage + if dest != "" { + return nil, "", errors.Errorf("output %s is not supported by %s exporter", dest, exporter) + } + return nil, "", nil + } +} diff --git a/cmd/buildctl/build/secret.go b/cmd/buildctl/build/secret.go new file mode 100644 index 000000000000..1019eac3cd71 --- /dev/null +++ b/cmd/buildctl/build/secret.go @@ -0,0 +1,61 @@ +package build + +import ( + "encoding/csv" + "strings" + + "github.com/moby/buildkit/session" + "github.com/moby/buildkit/session/secrets/secretsprovider" + "github.com/pkg/errors" +) + +// ParseSecret parses --secret +func ParseSecret(sl []string) (session.Attachable, error) { + fs := make([]secretsprovider.FileSource, 0, len(sl)) + for _, v := range sl { + s, err := parseSecret(v) + if err != nil { + return nil, err + } + fs = append(fs, *s) + } + store, err := secretsprovider.NewFileStore(fs) + if err != nil { + return nil, err + } + return secretsprovider.NewSecretProvider(store), nil +} + +func parseSecret(value string) (*secretsprovider.FileSource, error) { + csvReader := csv.NewReader(strings.NewReader(value)) + fields, err := csvReader.Read() + if err != nil { + return nil, errors.Wrap(err, "failed to parse csv secret") + } + + fs := secretsprovider.FileSource{} + + for _, field := range fields { + parts := strings.SplitN(field, "=", 2) + key := strings.ToLower(parts[0]) + + if len(parts) != 2 { + return nil, errors.Errorf("invalid field '%s' must be a key=value pair", field) + } + + value := parts[1] + switch key { + case "type": + if value != "file" { + return nil, errors.Errorf("unsupported secret type %q", value) + } + case "id": + fs.ID = value + case "source", "src": + fs.FilePath = value + default: + return nil, errors.Errorf("unexpected key '%s' in '%s'", key, field) + } + } + return &fs, nil +} diff --git a/cmd/buildctl/build/ssh.go b/cmd/buildctl/build/ssh.go new file mode 100644 index 000000000000..05e71712df3c --- /dev/null +++ b/cmd/buildctl/build/ssh.go @@ -0,0 +1,23 @@ +package build + +import ( + "strings" + + "github.com/moby/buildkit/session/sshforward/sshprovider" +) + +// ParseSSH parses --ssh +func ParseSSH(inp []string) ([]sshprovider.AgentConfig, error) { + configs := make([]sshprovider.AgentConfig, 0, len(inp)) + for _, v := range inp { + parts := strings.SplitN(v, "=", 2) + cfg := sshprovider.AgentConfig{ + ID: parts[0], + } + if len(parts) > 1 { + cfg.Paths = strings.Split(parts[1], ",") + } + configs = append(configs, cfg) + } + return configs, nil +} diff --git a/cmd/buildctl/build_test.go b/cmd/buildctl/build_test.go index a68f11af44ab..f54312bdb023 100644 --- a/cmd/buildctl/build_test.go +++ b/cmd/buildctl/build_test.go @@ -14,7 +14,6 @@ import ( "github.com/containerd/containerd" "github.com/containerd/containerd/namespaces" "github.com/containerd/continuity/fs/fstest" - "github.com/moby/buildkit/client" "github.com/moby/buildkit/client/llb" "github.com/moby/buildkit/util/testutil/integration" "github.com/stretchr/testify/require" @@ -120,108 +119,3 @@ func tmpdir(appliers ...fstest.Applier) (string, error) { } return tmpdir, nil } - -func TestParseExportCache(t *testing.T) { - type testCase struct { - exportCaches []string // --export-cache - legacyExportCacheOpts []string // --export-cache-opt (legacy) - expected []client.CacheOptionsEntry - expectedErr string - } - testCases := []testCase{ - { - exportCaches: []string{"type=registry,ref=example.com/foo/bar"}, - expected: []client.CacheOptionsEntry{ - { - Type: "registry", - Attrs: map[string]string{ - "ref": "example.com/foo/bar", - "mode": "min", - }, - }, - }, - }, - { - exportCaches: []string{"example.com/foo/bar"}, - legacyExportCacheOpts: []string{"mode=max"}, - expected: []client.CacheOptionsEntry{ - { - Type: "registry", - Attrs: map[string]string{ - "ref": "example.com/foo/bar", - "mode": "max", - }, - }, - }, - }, - { - exportCaches: []string{"type=registry,ref=example.com/foo/bar"}, - legacyExportCacheOpts: []string{"mode=max"}, - expectedErr: "--export-cache-opt is not supported for the specified --export-cache", - }, - // TODO: test multiple exportCaches (valid for CLI but not supported by solver) - - } - for _, tc := range testCases { - ex, err := parseExportCache(tc.exportCaches, tc.legacyExportCacheOpts) - if tc.expectedErr == "" { - require.EqualValues(t, tc.expected, ex) - } else { - require.Error(t, err) - require.Contains(t, err.Error(), tc.expectedErr) - } - } -} - -func TestParseImportCache(t *testing.T) { - type testCase struct { - importCaches []string // --import-cache - expected []client.CacheOptionsEntry - expectedErr string - } - testCases := []testCase{ - { - importCaches: []string{"type=registry,ref=example.com/foo/bar", "type=local,store=/path/to/store"}, - expected: []client.CacheOptionsEntry{ - { - Type: "registry", - Attrs: map[string]string{ - "ref": "example.com/foo/bar", - }, - }, - { - Type: "local", - Attrs: map[string]string{ - "store": "/path/to/store", - }, - }, - }, - }, - { - importCaches: []string{"example.com/foo/bar", "example.com/baz/qux"}, - expected: []client.CacheOptionsEntry{ - { - Type: "registry", - Attrs: map[string]string{ - "ref": "example.com/foo/bar", - }, - }, - { - Type: "registry", - Attrs: map[string]string{ - "ref": "example.com/baz/qux", - }, - }, - }, - }, - } - for _, tc := range testCases { - im, err := parseImportCache(tc.importCaches) - if tc.expectedErr == "" { - require.EqualValues(t, tc.expected, im) - } else { - require.Error(t, err) - require.Contains(t, err.Error(), tc.expectedErr) - } - } -} diff --git a/examples/build-using-dockerfile/main.go b/examples/build-using-dockerfile/main.go index 12fa84398fb4..68df8402a190 100644 --- a/examples/build-using-dockerfile/main.go +++ b/examples/build-using-dockerfile/main.go @@ -160,14 +160,18 @@ func newSolveOpt(clicontext *cli.Context, w io.WriteCloser) (*client.SolveOpt, e frontendAttrs["build-arg:"+kv[0]] = kv[1] } return &client.SolveOpt{ - Exporter: "docker", // TODO: use containerd image store when it is integrated to Docker - ExporterAttrs: map[string]string{ - "name": clicontext.String("tag"), + Exports: []client.ExportEntry{ + { + Type: "docker", // TODO: use containerd image store when it is integrated to Docker + Attrs: map[string]string{ + "name": clicontext.String("tag"), + }, + Output: w, + }, }, - ExporterOutput: w, - LocalDirs: localDirs, - Frontend: frontend, - FrontendAttrs: frontendAttrs, + LocalDirs: localDirs, + Frontend: frontend, + FrontendAttrs: frontendAttrs, }, nil } diff --git a/frontend/dockerfile/dockerfile_test.go b/frontend/dockerfile/dockerfile_test.go index 5fe8591b650b..34c3892912f5 100644 --- a/frontend/dockerfile/dockerfile_test.go +++ b/frontend/dockerfile/dockerfile_test.go @@ -235,8 +235,12 @@ COPY link/foo . defer os.RemoveAll(destDir) _, err = f.Solve(context.TODO(), c, client.SolveOpt{ - Exporter: client.ExporterLocal, - ExporterOutputDir: destDir, + Exports: []client.ExportEntry{ + { + Type: client.ExporterLocal, + OutputDir: destDir, + }, + }, LocalDirs: map[string]string{ builder.DefaultLocalNameDockerfile: dir, builder.DefaultLocalNameContext: dir, @@ -275,8 +279,12 @@ COPY --from=build /sub2/foo bar defer os.RemoveAll(destDir) _, err = f.Solve(context.TODO(), c, client.SolveOpt{ - Exporter: client.ExporterLocal, - ExporterOutputDir: destDir, + Exports: []client.ExportEntry{ + { + Type: client.ExporterLocal, + OutputDir: destDir, + }, + }, LocalDirs: map[string]string{ builder.DefaultLocalNameDockerfile: dir, builder.DefaultLocalNameContext: dir, @@ -350,8 +358,12 @@ COPY --from=build /out . builder.DefaultLocalNameDockerfile: dir, builder.DefaultLocalNameContext: dir, }, - Exporter: client.ExporterLocal, - ExporterOutputDir: destDir, + Exports: []client.ExportEntry{ + { + Type: client.ExporterLocal, + OutputDir: destDir, + }, + }, }, nil) require.NoError(t, err) @@ -397,8 +409,12 @@ COPY arch-$TARGETARCH whoami FrontendAttrs: map[string]string{ "platform": "windows/amd64,linux/arm,linux/s390x", }, - Exporter: client.ExporterLocal, - ExporterOutputDir: destDir, + Exports: []client.ExportEntry{ + { + Type: client.ExporterLocal, + OutputDir: destDir, + }, + }, }, nil) require.NoError(t, err) @@ -432,8 +448,12 @@ COPY arch-$TARGETARCH whoami FrontendAttrs: map[string]string{ "platform": "windows/amd64,linux/arm/v6,linux/ppc64le", }, - Exporter: client.ExporterOCI, - ExporterOutput: outW, + Exports: []client.ExportEntry{ + { + Type: client.ExporterOCI, + Output: outW, + }, + }, }, nil) require.NoError(t, err) @@ -532,8 +552,12 @@ COPY foo / defer os.RemoveAll(destDir) _, err = f.Solve(context.TODO(), c, client.SolveOpt{ - Exporter: client.ExporterLocal, - ExporterOutputDir: destDir, + Exports: []client.ExportEntry{ + { + Type: client.ExporterLocal, + OutputDir: destDir, + }, + }, LocalDirs: map[string]string{ builder.DefaultLocalNameDockerfile: dir, builder.DefaultLocalNameContext: dir, @@ -667,8 +691,12 @@ COPY --from=0 /foo /foo "context": server.URL + "/df", "filename": "mydockerfile", // this is bogus, any name should work }, - Exporter: client.ExporterLocal, - ExporterOutputDir: destDir, + Exports: []client.ExportEntry{ + { + Type: client.ExporterLocal, + OutputDir: destDir, + }, + }, }, nil) require.NoError(t, err) @@ -707,9 +735,13 @@ CMD ["test"] target := "docker.io/moby/cmdoverridetest:latest" _, err = f.Solve(context.TODO(), c, client.SolveOpt{ - Exporter: client.ExporterImage, - ExporterAttrs: map[string]string{ - "name": target, + Exports: []client.ExportEntry{ + { + Type: client.ExporterImage, + Attrs: map[string]string{ + "name": target, + }, + }, }, LocalDirs: map[string]string{ builder.DefaultLocalNameDockerfile: dir, @@ -732,9 +764,13 @@ ENTRYPOINT my entrypoint target = "docker.io/moby/cmdoverridetest2:latest" _, err = f.Solve(context.TODO(), c, client.SolveOpt{ - Exporter: client.ExporterImage, - ExporterAttrs: map[string]string{ - "name": target, + Exports: []client.ExportEntry{ + { + Type: client.ExporterImage, + Attrs: map[string]string{ + "name": target, + }, + }, }, LocalDirs: map[string]string{ builder.DefaultLocalNameDockerfile: dir, @@ -795,9 +831,13 @@ LABEL foo=bar target := "docker.io/moby/testpullscratch:latest" _, err = f.Solve(context.TODO(), c, client.SolveOpt{ - Exporter: client.ExporterImage, - ExporterAttrs: map[string]string{ - "name": target, + Exports: []client.ExportEntry{ + { + Type: client.ExporterImage, + Attrs: map[string]string{ + "name": target, + }, + }, }, LocalDirs: map[string]string{ builder.DefaultLocalNameDockerfile: dir, @@ -821,9 +861,13 @@ COPY foo . target = "docker.io/moby/testpullscratch2:latest" _, err = f.Solve(context.TODO(), c, client.SolveOpt{ - Exporter: client.ExporterImage, - ExporterAttrs: map[string]string{ - "name": target, + Exports: []client.ExportEntry{ + { + Type: client.ExporterImage, + Attrs: map[string]string{ + "name": target, + }, + }, }, LocalDirs: map[string]string{ builder.DefaultLocalNameDockerfile: dir, @@ -872,8 +916,12 @@ COPY foo . defer os.RemoveAll(destDir) _, err = c.Solve(context.TODO(), def, client.SolveOpt{ - Exporter: client.ExporterLocal, - ExporterOutputDir: destDir, + Exports: []client.ExportEntry{ + { + Type: client.ExporterLocal, + OutputDir: destDir, + }, + }, LocalDirs: map[string]string{ builder.DefaultLocalNameDockerfile: dir, builder.DefaultLocalNameContext: dir, @@ -1400,9 +1448,13 @@ EXPOSE 5000 target := "example.com/moby/dockerfileexpansion:test" _, err = f.Solve(context.TODO(), c, client.SolveOpt{ - Exporter: client.ExporterImage, - ExporterAttrs: map[string]string{ - "name": target, + Exports: []client.ExportEntry{ + { + Type: client.ExporterImage, + Attrs: map[string]string{ + "name": target, + }, + }, }, LocalDirs: map[string]string{ builder.DefaultLocalNameDockerfile: dir, @@ -1488,8 +1540,12 @@ Dockerfile defer os.RemoveAll(destDir) _, err = f.Solve(context.TODO(), c, client.SolveOpt{ - Exporter: client.ExporterLocal, - ExporterOutputDir: destDir, + Exports: []client.ExportEntry{ + { + Type: client.ExporterLocal, + OutputDir: destDir, + }, + }, LocalDirs: map[string]string{ builder.DefaultLocalNameDockerfile: dir, builder.DefaultLocalNameContext: dir, @@ -1720,8 +1776,12 @@ USER nobody defer os.RemoveAll(destDir) _, err = f.Solve(context.TODO(), c, client.SolveOpt{ - Exporter: client.ExporterLocal, - ExporterOutputDir: destDir, + Exports: []client.ExportEntry{ + { + Type: client.ExporterLocal, + OutputDir: destDir, + }, + }, LocalDirs: map[string]string{ builder.DefaultLocalNameDockerfile: dir, builder.DefaultLocalNameContext: dir, @@ -1740,9 +1800,13 @@ USER nobody // test user in exported target := "example.com/moby/dockerfileuser:test" _, err = f.Solve(context.TODO(), c, client.SolveOpt{ - Exporter: client.ExporterImage, - ExporterAttrs: map[string]string{ - "name": target, + Exports: []client.ExportEntry{ + { + Type: client.ExporterImage, + Attrs: map[string]string{ + "name": target, + }, + }, }, LocalDirs: map[string]string{ builder.DefaultLocalNameDockerfile: dir, @@ -1814,8 +1878,12 @@ COPY --from=base /out / defer os.RemoveAll(destDir) _, err = f.Solve(context.TODO(), c, client.SolveOpt{ - Exporter: client.ExporterLocal, - ExporterOutputDir: destDir, + Exports: []client.ExportEntry{ + { + Type: client.ExporterLocal, + OutputDir: destDir, + }, + }, LocalDirs: map[string]string{ builder.DefaultLocalNameDockerfile: dir, builder.DefaultLocalNameContext: dir, @@ -1865,8 +1933,12 @@ COPY files dest defer os.RemoveAll(destDir) _, err = f.Solve(context.TODO(), c, client.SolveOpt{ - Exporter: client.ExporterLocal, - ExporterOutputDir: destDir, + Exports: []client.ExportEntry{ + { + Type: client.ExporterLocal, + OutputDir: destDir, + }, + }, LocalDirs: map[string]string{ builder.DefaultLocalNameDockerfile: dir, builder.DefaultLocalNameContext: dir, @@ -1909,8 +1981,12 @@ COPY $FOO baz defer os.RemoveAll(destDir) _, err = f.Solve(context.TODO(), c, client.SolveOpt{ - Exporter: client.ExporterLocal, - ExporterOutputDir: destDir, + Exports: []client.ExportEntry{ + { + Type: client.ExporterLocal, + OutputDir: destDir, + }, + }, LocalDirs: map[string]string{ builder.DefaultLocalNameDockerfile: dir, builder.DefaultLocalNameContext: dir, @@ -1960,8 +2036,12 @@ COPY sub/dir1 subdest6 defer os.RemoveAll(destDir) _, err = f.Solve(context.TODO(), c, client.SolveOpt{ - Exporter: client.ExporterLocal, - ExporterOutputDir: destDir, + Exports: []client.ExportEntry{ + { + Type: client.ExporterLocal, + OutputDir: destDir, + }, + }, LocalDirs: map[string]string{ builder.DefaultLocalNameDockerfile: dir, builder.DefaultLocalNameContext: dir, @@ -2066,8 +2146,12 @@ COPY --from=build foo bar2 FrontendAttrs: map[string]string{ "context": server.URL + "/.git#first", }, - Exporter: client.ExporterLocal, - ExporterOutputDir: destDir, + Exports: []client.ExportEntry{ + { + Type: client.ExporterLocal, + OutputDir: destDir, + }, + }, }, nil) require.NoError(t, err) @@ -2088,8 +2172,12 @@ COPY --from=build foo bar2 FrontendAttrs: map[string]string{ "context": server.URL + "/.git", }, - Exporter: client.ExporterLocal, - ExporterOutputDir: destDir, + Exports: []client.ExportEntry{ + { + Type: client.ExporterLocal, + OutputDir: destDir, + }, + }, }, nil) require.NoError(t, err) @@ -2151,8 +2239,12 @@ COPY foo bar "context": server.URL + "/myurl", "filename": "mydockerfile", }, - Exporter: client.ExporterLocal, - ExporterOutputDir: destDir, + Exports: []client.ExportEntry{ + { + Type: client.ExporterLocal, + OutputDir: destDir, + }, + }, }, nil) require.NoError(t, err) @@ -2184,8 +2276,12 @@ COPY --from=busybox /etc/passwd test defer os.RemoveAll(destDir) _, err = f.Solve(context.TODO(), c, client.SolveOpt{ - Exporter: client.ExporterLocal, - ExporterOutputDir: destDir, + Exports: []client.ExportEntry{ + { + Type: client.ExporterLocal, + OutputDir: destDir, + }, + }, LocalDirs: map[string]string{ builder.DefaultLocalNameDockerfile: dir, builder.DefaultLocalNameContext: dir, @@ -2218,8 +2314,12 @@ COPY --from=golang /usr/bin/go go defer os.RemoveAll(destDir) _, err = f.Solve(context.TODO(), c, client.SolveOpt{ - Exporter: client.ExporterLocal, - ExporterOutputDir: destDir, + Exports: []client.ExportEntry{ + { + Type: client.ExporterLocal, + OutputDir: destDir, + }, + }, LocalDirs: map[string]string{ builder.DefaultLocalNameDockerfile: dir, builder.DefaultLocalNameContext: dir, @@ -2259,8 +2359,12 @@ COPY --from=stage1 baz bax defer os.RemoveAll(destDir) _, err = f.Solve(context.TODO(), c, client.SolveOpt{ - Exporter: client.ExporterLocal, - ExporterOutputDir: destDir, + Exports: []client.ExportEntry{ + { + Type: client.ExporterLocal, + OutputDir: destDir, + }, + }, LocalDirs: map[string]string{ builder.DefaultLocalNameDockerfile: dir, builder.DefaultLocalNameContext: dir, @@ -2302,9 +2406,13 @@ LABEL foo=bar FrontendAttrs: map[string]string{ "label:bar": "baz", }, - Exporter: client.ExporterImage, - ExporterAttrs: map[string]string{ - "name": target, + Exports: []client.ExportEntry{ + { + Type: client.ExporterImage, + Attrs: map[string]string{ + "name": target, + }, + }, }, LocalDirs: map[string]string{ builder.DefaultLocalNameDockerfile: dir, @@ -2387,8 +2495,12 @@ COPY --from=base unique / target := registry + "/buildkit/testexportdf:latest" _, err = f.Solve(context.TODO(), c, client.SolveOpt{ - Exporter: client.ExporterLocal, - ExporterOutputDir: destDir, + Exports: []client.ExportEntry{ + { + Type: client.ExporterLocal, + OutputDir: destDir, + }, + }, CacheExports: []client.CacheOptionsEntry{ { Type: "registry", @@ -2422,8 +2534,12 @@ COPY --from=base unique / FrontendAttrs: map[string]string{ "cache-from": target, }, - Exporter: client.ExporterLocal, - ExporterOutputDir: destDir, + Exports: []client.ExportEntry{ + { + Type: client.ExporterLocal, + OutputDir: destDir, + }, + }, LocalDirs: map[string]string{ builder.DefaultLocalNameDockerfile: dir, builder.DefaultLocalNameContext: dir, @@ -2471,9 +2587,13 @@ RUN echo bar > bar target := "example.com/moby/dockerfileids:test" opt := client.SolveOpt{ FrontendAttrs: map[string]string{}, - Exporter: client.ExporterImage, - ExporterAttrs: map[string]string{ - "name": target, + Exports: []client.ExportEntry{ + { + Type: client.ExporterImage, + Attrs: map[string]string{ + "name": target, + }, + }, }, LocalDirs: map[string]string{ builder.DefaultLocalNameDockerfile: dir, @@ -2485,7 +2605,7 @@ RUN echo bar > bar require.NoError(t, err) target2 := "example.com/moby/dockerfileids2:test" - opt.ExporterAttrs["name"] = target2 + opt.Exports[0].Attrs["name"] = target2 _, err = f.Solve(context.TODO(), c, opt, nil) require.NoError(t, err) @@ -2557,16 +2677,20 @@ RUN echo bar > bar cacheTarget := registry + "/test/dockerfileexpids:cache" opt := client.SolveOpt{ FrontendAttrs: map[string]string{}, - Exporter: client.ExporterImage, + Exports: []client.ExportEntry{ + { + Type: client.ExporterImage, + Attrs: map[string]string{ + "name": target, + }, + }, + }, CacheExports: []client.CacheOptionsEntry{ { Type: "registry", Attrs: map[string]string{"ref": cacheTarget}, }, }, - ExporterAttrs: map[string]string{ - "name": target, - }, LocalDirs: map[string]string{ builder.DefaultLocalNameDockerfile: dir, builder.DefaultLocalNameContext: dir, @@ -2595,7 +2719,7 @@ RUN echo bar > bar target2 := "example.com/moby/dockerfileexpids2:test" - opt.ExporterAttrs["name"] = target2 + opt.Exports[0].Attrs["name"] = target2 opt.FrontendAttrs["cache-from"] = cacheTarget _, err = f.Solve(context.TODO(), c, opt, nil) @@ -2634,9 +2758,13 @@ COPY --from=s1 unique2 / defer os.RemoveAll(destDir) opt := client.SolveOpt{ - FrontendAttrs: map[string]string{}, - Exporter: client.ExporterLocal, - ExporterOutputDir: destDir, + FrontendAttrs: map[string]string{}, + Exports: []client.ExportEntry{ + { + Type: client.ExporterLocal, + OutputDir: destDir, + }, + }, LocalDirs: map[string]string{ builder.DefaultLocalNameDockerfile: dir, builder.DefaultLocalNameContext: dir, @@ -2651,7 +2779,7 @@ COPY --from=s1 unique2 / defer os.RemoveAll(destDir) opt.FrontendAttrs["no-cache"] = "" - opt.ExporterOutputDir = destDir2 + opt.Exports[0].OutputDir = destDir2 _, err = f.Solve(context.TODO(), c, opt, nil) require.NoError(t, err) @@ -2676,7 +2804,7 @@ COPY --from=s1 unique2 / defer os.RemoveAll(destDir) opt.FrontendAttrs["no-cache"] = "s1" - opt.ExporterOutputDir = destDir3 + opt.Exports[0].OutputDir = destDir3 _, err = f.Solve(context.TODO(), c, opt, nil) require.NoError(t, err) @@ -2718,8 +2846,12 @@ COPY foo2 bar2 defer os.RemoveAll(destDir) opt := client.SolveOpt{ - Exporter: client.ExporterLocal, - ExporterOutputDir: destDir, + Exports: []client.ExportEntry{ + { + Type: client.ExporterLocal, + OutputDir: destDir, + }, + }, LocalDirs: map[string]string{ builder.DefaultLocalNameDockerfile: dir, builder.DefaultLocalNameContext: dir, @@ -2765,12 +2897,16 @@ COPY --from=build out . defer os.RemoveAll(destDir) opt := client.SolveOpt{ - Exporter: client.ExporterLocal, + Exports: []client.ExportEntry{ + { + Type: client.ExporterLocal, + OutputDir: destDir, + }, + }, FrontendAttrs: map[string]string{ "platform": "darwin/ppc64le", "build-arg:TARGETOS": "freebsd", }, - ExporterOutputDir: destDir, LocalDirs: map[string]string{ builder.DefaultLocalNameDockerfile: dir, builder.DefaultLocalNameContext: dir, @@ -2822,8 +2958,12 @@ COPY --from=build /out / "build-arg:http_proxy": "hpvalue", "build-arg:NO_PROXY": "npvalue", }, - Exporter: client.ExporterLocal, - ExporterOutputDir: destDir, + Exports: []client.ExportEntry{ + { + Type: client.ExporterLocal, + OutputDir: destDir, + }, + }, LocalDirs: map[string]string{ builder.DefaultLocalNameDockerfile: dir, builder.DefaultLocalNameContext: dir, @@ -2847,8 +2987,12 @@ COPY --from=build /out / "build-arg:FOO": "foocontents", "build-arg:http_proxy": "hpvalue2", }, - Exporter: client.ExporterLocal, - ExporterOutputDir: destDir, + Exports: []client.ExportEntry{ + { + Type: client.ExporterLocal, + OutputDir: destDir, + }, + }, LocalDirs: map[string]string{ builder.DefaultLocalNameDockerfile: dir, builder.DefaultLocalNameContext: dir, @@ -2872,8 +3016,12 @@ COPY --from=build /out / "build-arg:FOO": "foocontents2", "build-arg:http_proxy": "hpvalue2", }, - Exporter: client.ExporterLocal, - ExporterOutputDir: destDir, + Exports: []client.ExportEntry{ + { + Type: client.ExporterLocal, + OutputDir: destDir, + }, + }, LocalDirs: map[string]string{ builder.DefaultLocalNameDockerfile: dir, builder.DefaultLocalNameContext: dir,