From b262c3874d373f95ab6d27d27a9f31ed06c04537 Mon Sep 17 00:00:00 2001 From: Mathias Fredriksson Date: Mon, 20 Jun 2022 20:13:03 +0300 Subject: [PATCH 01/16] feat: Add support for zstd compression of slim binaries --- Makefile | 1 + scripts/build_go_slim.sh | 23 ++++++++++++++++++++++- 2 files changed, 23 insertions(+), 1 deletion(-) diff --git a/Makefile b/Makefile index e7b5fbe4668fd..5600bbb90ae21 100644 --- a/Makefile +++ b/Makefile @@ -35,6 +35,7 @@ build: site/out/index.html $(shell find . -not -path './vendor/*' -type f -name # build slim artifacts and copy them to the site output directory ./scripts/build_go_slim.sh \ --version "$(VERSION)" \ + --compress 22 \ --output ./dist/ \ linux:amd64,armv7,arm64 \ windows:amd64,arm64 \ diff --git a/scripts/build_go_slim.sh b/scripts/build_go_slim.sh index 851aecd85ebbb..e7bdcf8db100b 100755 --- a/scripts/build_go_slim.sh +++ b/scripts/build_go_slim.sh @@ -23,8 +23,9 @@ source "$(dirname "${BASH_SOURCE[0]}")/lib.sh" version="" output_path="" +compress="" -args="$(getopt -o "" -l version:,output: -- "$@")" +args="$(getopt -o "" -l version:,output:,compress: -- "$@")" eval set -- "$args" while true; do case "$1" in @@ -36,6 +37,10 @@ while true; do output_path="$2" shift 2 ;; + --compress) + compress="$2" + shift 2 + ;; --) shift break @@ -48,6 +53,13 @@ done # Check dependencies dependencies go +if [[ -n $compress ]]; then + dependencies tar zstd + + if [[ ! $compress == [0-9]* ]] || ((compress > 22)) || ((compress < 1)); then + error "Invalid value for compress, must in in the range of [1, 22]" + fi +fi # Remove the "v" prefix. version="${version#v}" @@ -92,3 +104,12 @@ for f in ./coder-slim_*; do dest="$dest_dir/$hyphenated" cp "$f" "$dest" done + +if [[ -n $compress ]]; then + ( + cd "$dest_dir" + tar cf coder.tar coder-* + rm coder-* + zstd --ultra --long -"${compress}" --rm --no-progress coder.tar -o coder.tar.zst + ) +fi From fb24c029549aa1ba011522eb21064fcb2f17d534 Mon Sep 17 00:00:00 2001 From: Mathias Fredriksson Date: Mon, 20 Jun 2022 20:16:12 +0300 Subject: [PATCH 02/16] feat: Add support for extract and serve of zstd bins --- cli/server.go | 1 + coderd/coderd.go | 15 +++- site/site.go | 158 +++++++++++++++++++++++++++++++++++++-- site/site_test.go | 185 ++++++++++++++++++++++++++++++++++++++++++++-- 4 files changed, 345 insertions(+), 14 deletions(-) diff --git a/cli/server.go b/cli/server.go index 65a9be4ebfe53..3230031c4f927 100644 --- a/cli/server.go +++ b/cli/server.go @@ -254,6 +254,7 @@ func server() *cobra.Command { Logger: logger.Named("coderd"), Database: databasefake.New(), Pubsub: database.NewPubsubInMemory(), + CacheDir: cacheDir, GoogleTokenValidator: googleTokenValidator, SecureAuthCookie: secureAuthCookie, SSHKeygenAlgorithm: sshKeygenAlgorithm, diff --git a/coderd/coderd.go b/coderd/coderd.go index c4822145b5a60..ce610c06b66f2 100644 --- a/coderd/coderd.go +++ b/coderd/coderd.go @@ -7,6 +7,7 @@ import ( "io" "net/http" "net/url" + "path/filepath" "sync" "time" @@ -42,6 +43,9 @@ type Options struct { Database database.Store Pubsub database.Pubsub + // CacheDir is used for caching files served by the API. + CacheDir string + AgentConnectionUpdateFrequency time.Duration // APIRateLimit is the minutely throughput rate limit per user or ip. // Setting a rate limit <0 will disable the rate limiter across the entire @@ -78,11 +82,20 @@ func New(options *Options) *API { } } + siteCacheDir := options.CacheDir + if siteCacheDir != "" { + siteCacheDir = filepath.Join(siteCacheDir, "site") + } + binFS, err := site.ExtractOrReadBinFS(siteCacheDir, site.FS()) + if err != nil { + panic(xerrors.Errorf("read site bin failed: %w", err)) + } + r := chi.NewRouter() api := &API{ Options: options, Handler: r, - siteHandler: site.Handler(site.FS()), + siteHandler: site.Handler(site.FS(), binFS), } api.workspaceAgentCache = wsconncache.New(api.dialWorkspaceAgent, 0) diff --git a/site/site.go b/site/site.go index af302f997b814..7c98d5fae128c 100644 --- a/site/site.go +++ b/site/site.go @@ -1,13 +1,15 @@ package site import ( + "archive/tar" "bytes" "context" - + "errors" "fmt" "io" "io/fs" "net/http" + "os" "path" "path/filepath" "strings" @@ -15,7 +17,9 @@ import ( "time" "github.com/justinas/nosurf" + "github.com/klauspost/compress/zstd" "github.com/unrolled/secure" + "golang.org/x/exp/slices" "golang.org/x/xerrors" ) @@ -29,22 +33,25 @@ func WithAPIResponse(ctx context.Context, apiResponse APIResponse) context.Conte } // Handler returns an HTTP handler for serving the static site. -func Handler(fileSystem fs.FS) http.Handler { +func Handler(siteFS fs.FS, binFS http.FileSystem) http.Handler { // html files are handled by a text/template. Non-html files // are served by the default file server. // // REMARK: text/template is needed to inject values on each request like // CSRF. - files, err := htmlFiles(fileSystem) - + files, err := htmlFiles(siteFS) if err != nil { panic(xerrors.Errorf("Failed to return handler for static files. Html files failed to load: %w", err)) } + mux := http.NewServeMux() + mux.Handle("/bin/", http.StripPrefix("/bin", http.FileServer(binFS))) + mux.Handle("/", http.FileServer(http.FS(siteFS))) // All other non-html static files. + return secureHeaders(&handler{ - fs: fileSystem, + fs: siteFS, htmlFiles: files, - h: http.FileServer(http.FS(fileSystem)), // All other non-html static files + h: mux, }) } @@ -146,8 +153,13 @@ func (h *handler) ServeHTTP(resp http.ResponseWriter, req *http.Request) { return } + switch { + // If requesting binaries, serve straight up. + case reqFile == "bin" || strings.HasPrefix(reqFile, "bin/"): + h.h.ServeHTTP(resp, req) + return // If the original file path exists we serve it. - if h.exists(reqFile) { + case h.exists(reqFile): if ShouldCacheFile(reqFile) { resp.Header().Add("Cache-Control", "public, max-age=31536000, immutable") } @@ -357,7 +369,6 @@ func htmlFiles(files fs.FS) (*htmlTemplates, error) { return nil }) - if err != nil { return nil, err } @@ -366,3 +377,134 @@ func htmlFiles(files fs.FS) (*htmlTemplates, error) { tpls: root, }, nil } + +// ExtractOrReadBinFS checks the provided fs for compressed coder +// binaries and extracts them into dest/bin if found. As a fallback, +// the provided FS is checked for a /bin directory, if it is non-empty +// it is returned. Finally dest/bin is returned as a fallback allowing +// binaries to be manually placed in dest (usually +// ${CODER_CACHE_DIRECTORY}/site/bin). +func ExtractOrReadBinFS(dest string, siteFS fs.FS) (http.FileSystem, error) { + if dest == "" { + // No destionation on fs, embedd fs is the only option. + binFS, err := fs.Sub(siteFS, "bin") + if err != nil { + return nil, xerrors.Errorf("cache path is empty and embedd fs does not have /bin: %w", err) + } + return http.FS(binFS), nil + } + + dest = filepath.Join(dest, "bin") + mkdest := func() (http.FileSystem, error) { + err := os.MkdirAll(dest, 0o700) + if err != nil { + return nil, xerrors.Errorf("mkdir failed: %w", err) + } + return http.Dir(dest), nil + } + + archive, err := siteFS.Open("bin/coder.tar.zst") + if err != nil { + if xerrors.Is(err, fs.ErrNotExist) { + files, err := fs.ReadDir(siteFS, "bin") + if err != nil { + if xerrors.Is(err, fs.ErrNotExist) { + // Given fs does not have a bin directory, + // serve from cache directory. + return mkdest() + } + return nil, xerrors.Errorf("site fs read dir failed: %w", err) + } + + if len(filterFiles(files, "GITKEEP")) > 0 { + // If there are other files than bin/GITKEEP, + // serve the files. + binFS, err := fs.Sub(siteFS, "bin") + if err != nil { + return nil, xerrors.Errorf("site fs sub dir failed: %w", err) + } + return http.FS(binFS), nil + } + + // Nothing we can do, serve the cache directory, + // thus allowing binaries to be places there. + return mkdest() + } + return nil, xerrors.Errorf("open coder binary archive failed: %w", err) + } + defer archive.Close() + + dir, err := mkdest() + if err != nil { + return nil, err + } + + n, err := extractBin(dest, archive) + if err != nil { + return nil, xerrors.Errorf("extract coder binaries failed: %w", err) + } + if n == 0 { + return nil, xerrors.New("no files were extracted from coder binaries archive") + } + + return dir, nil +} + +func filterFiles(files []fs.DirEntry, names ...string) []fs.DirEntry { + var filtered []fs.DirEntry + for _, f := range files { + if slices.Contains(names, f.Name()) { + continue + } + filtered = append(filtered, f) + } + return filtered +} + +func extractBin(dest string, r io.Reader) (n int, err error) { + opts := []zstd.DOption{ + // Concurrency doesn't help us when decoding the tar and + // can actually slow us down. + zstd.WithDecoderConcurrency(1), + // Ignoring checksums give us a slight performance + // increase, we assume no corruption due to embedding. + zstd.IgnoreChecksum(true), + // Allow the decoder to use more memory giving us a 2-3x + // performance boost. + zstd.WithDecoderLowmem(false), + } + zr, err := zstd.NewReader(r, opts...) + if err != nil { + return 0, xerrors.Errorf("open zstd archive failed: %w", err) + } + defer zr.Close() + + tr := tar.NewReader(zr) + for { + h, err := tr.Next() + if err != nil { + if errors.Is(err, io.EOF) { + return n, nil + } + return n, xerrors.Errorf("read tar archive failed: %w", err) + } + + name := filepath.Join(dest, filepath.Base(h.Name)) + f, err := os.Create(name) + if err != nil { + return n, xerrors.Errorf("create file failed: %w", err) + } + //#nosec // We created this tar, no risk of decompression bomb. + _, err = io.Copy(f, tr) + if err != nil { + _ = f.Close() + return n, xerrors.Errorf("write file contents failed: %w", err) + } + err = f.Close() + if err != nil { + return n, xerrors.Errorf("close file failed: %w", err) + } + + n++ + } +} diff --git a/site/site_test.go b/site/site_test.go index 86c3e22fa3484..e5331664aa58d 100644 --- a/site/site_test.go +++ b/site/site_test.go @@ -7,10 +7,13 @@ import ( "io" "net/http" "net/http/httptest" + "os" + "strings" "testing" "testing/fstest" "time" + "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/coder/coder/site" @@ -36,8 +39,9 @@ func TestCaching(t *testing.T) { Data: []byte("folderFile"), }, } + binFS := http.FS(fstest.MapFS{}) - srv := httptest.NewServer(site.Handler(rootFS)) + srv := httptest.NewServer(site.Handler(rootFS, binFS)) defer srv.Close() // Create a context @@ -95,15 +99,16 @@ func TestServingFiles(t *testing.T) { Data: []byte("dashboard-css-bytes"), }, } + binFS := http.FS(fstest.MapFS{}) - srv := httptest.NewServer(site.Handler(rootFS)) + srv := httptest.NewServer(site.Handler(rootFS, binFS)) defer srv.Close() // Create a context ctx, cancelFunc := context.WithTimeout(context.Background(), 1*time.Second) defer cancelFunc() - var testCases = []struct { + testCases := []struct { path string expected string }{ @@ -150,7 +155,7 @@ func TestServingFiles(t *testing.T) { func TestShouldCacheFile(t *testing.T) { t.Parallel() - var testCases = []struct { + testCases := []struct { reqFile string expected bool }{ @@ -171,6 +176,175 @@ func TestShouldCacheFile(t *testing.T) { } } +func readFile(t *testing.T, name string) []byte { + t.Helper() + b, err := os.ReadFile(name) + if err != nil { + t.Fatal(err) + } + return b +} + +func TestServingBin(t *testing.T) { + t.Parallel() + + // Create a misc rootfs for realistic test. + rootFS := fstest.MapFS{ + "index.html": &fstest.MapFile{ + Data: []byte("index-bytes"), + }, + "favicon.ico": &fstest.MapFile{ + Data: []byte("favicon-bytes"), + }, + "dashboard.js": &fstest.MapFile{ + Data: []byte("dashboard-js-bytes"), + }, + "dashboard.css": &fstest.MapFile{ + Data: []byte("dashboard-css-bytes"), + }, + } + + type req struct { + url string + wantStatus int + wantBody []byte + } + tests := []struct { + name string + fs fstest.MapFS + reqs []req + wantErr bool + }{ + { + name: "Extract and serve bin", + fs: fstest.MapFS{ + "bin/coder.tar.zst": &fstest.MapFile{ + // echo iamcoder >coder-linux-amd64 + // tar cf coder.tar coder-linux-amd64 + // zstd --long --ultra -22 coder.tar + Data: []byte{ + 0x28, 0xb5, 0x2f, 0xfd, 0x64, 0x00, 0x27, 0xf5, 0x02, 0x00, 0x12, 0xc4, + 0x0e, 0x16, 0xa0, 0xb5, 0x39, 0x00, 0xe8, 0x67, 0x59, 0xaf, 0xe3, 0xdd, + 0x8d, 0xfe, 0x47, 0xe8, 0x9d, 0x9c, 0x44, 0x0b, 0x75, 0x70, 0x61, 0x52, + 0x0d, 0x56, 0xaa, 0x16, 0xb9, 0x5a, 0x0a, 0x4b, 0x40, 0xd2, 0x7a, 0x05, + 0xd1, 0xd7, 0xe3, 0xf9, 0xf9, 0x07, 0xef, 0xda, 0x77, 0x04, 0xff, 0xe8, + 0x7a, 0x94, 0x56, 0x9a, 0x40, 0x3b, 0x94, 0x61, 0x18, 0x91, 0x90, 0x21, + 0x0c, 0x00, 0xf3, 0xc5, 0xe5, 0xd8, 0x80, 0x10, 0x06, 0x0a, 0x08, 0x86, + 0xb2, 0x00, 0x60, 0x12, 0x70, 0xd3, 0x51, 0x05, 0x04, 0x20, 0x16, 0x2c, + 0x79, 0xad, 0x01, 0xc0, 0xf5, 0x28, 0x08, 0x03, 0x1c, 0x4c, 0x84, 0xf4, + }, + }, + }, + reqs: []req{ + {url: "/bin/coder-linux-amd64", wantStatus: http.StatusOK, wantBody: []byte("iamcoder\n")}, + {url: "/bin/GITKEEP", wantStatus: http.StatusNotFound}, + }, + }, + { + name: "Error on invalid archive", + fs: fstest.MapFS{ + "bin/coder.tar.zst": &fstest.MapFile{ + Data: []byte{ + 0x28, 0xb5, 0x2f, 0xfd, 0x64, 0x00, 0x27, 0xf5, 0x02, 0x00, 0x12, 0xc4, + 0x0e, 0x16, 0xa0, 0xb5, 0x39, 0x00, 0xe8, 0x67, 0x59, 0xaf, 0xe3, 0xdd, + 0x8d, 0xfe, 0x47, 0xe8, 0x9d, 0x9c, 0x44, 0x0b, 0x75, 0x70, 0x61, 0x52, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // Zeroed from above test. + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // Zeroed from above test. + 0x7a, 0x94, 0x56, 0x9a, 0x40, 0x3b, 0x94, 0x61, 0x18, 0x91, 0x90, 0x21, + 0x0c, 0x00, 0xf3, 0xc5, 0xe5, 0xd8, 0x80, 0x10, 0x06, 0x0a, 0x08, 0x86, + 0xb2, 0x00, 0x60, 0x12, 0x70, 0xd3, 0x51, 0x05, 0x04, 0x20, 0x16, 0x2c, + 0x79, 0xad, 0x01, 0xc0, 0xf5, 0x28, 0x08, 0x03, 0x1c, 0x4c, 0x84, 0xf4, + }, + }, + }, + wantErr: true, + }, + { + name: "Error on empty archive", + fs: fstest.MapFS{ + "bin/coder.tar.zst": &fstest.MapFile{Data: []byte{}}, + }, + wantErr: true, + }, + { + name: "Serve local fs", + fs: fstest.MapFS{ + // Only GITKEEP file on embedded fs, won't be served. + "bin/GITKEEP": &fstest.MapFile{}, + }, + reqs: []req{ + {url: "/bin/coder-linux-amd64", wantStatus: http.StatusNotFound}, + {url: "/bin/GITKEEP", wantStatus: http.StatusNotFound}, + }, + }, + { + name: "Serve local fs when embedd fs empty", + fs: fstest.MapFS{}, + reqs: []req{ + {url: "/bin/coder-linux-amd64", wantStatus: http.StatusNotFound}, + {url: "/bin/GITKEEP", wantStatus: http.StatusNotFound}, + }, + }, + { + name: "Serve embedd fs", + fs: fstest.MapFS{ + "bin/GITKEEP": &fstest.MapFile{ + Data: []byte(""), + }, + "bin/coder-linux-amd64": &fstest.MapFile{ + Data: []byte("embedd"), + }, + }, + reqs: []req{ + {url: "/bin/coder-linux-amd64", wantStatus: http.StatusOK, wantBody: []byte("embedd")}, + {url: "/bin/GITKEEP", wantStatus: http.StatusOK, wantBody: []byte("")}, + }, + }, + } + //nolint // Parallel test detection issue. + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + dest := t.TempDir() + binFS, err := site.ExtractOrReadBinFS(dest, tt.fs) + if !tt.wantErr && err != nil { + require.NoError(t, err, "extract or read failed") + } else if tt.wantErr { + require.Error(t, err, "extraction or read did not fail") + } + + srv := httptest.NewServer(site.Handler(rootFS, binFS)) + defer srv.Close() + + // Create a context + ctx, cancelFunc := context.WithTimeout(context.Background(), 1*time.Second) + defer cancelFunc() + + for _, tr := range tt.reqs { + t.Run(strings.TrimPrefix(tr.url, "/"), func(t *testing.T) { + req, err := http.NewRequestWithContext(ctx, "GET", srv.URL+tr.url, nil) + require.NoError(t, err, "http request failed") + + resp, err := http.DefaultClient.Do(req) + require.NoError(t, err, "http do failed") + defer resp.Body.Close() + + gotStatus := resp.StatusCode + if tr.wantStatus > 0 { + assert.Equal(t, tr.wantStatus, gotStatus, "status did not match") + } + if tr.wantBody != nil { + gotBody, _ := io.ReadAll(resp.Body) + assert.Equal(t, string(tr.wantBody), string(gotBody), "body did not match") + } + }) + } + }) + } +} + func TestServeAPIResponse(t *testing.T) { t.Parallel() @@ -180,6 +354,7 @@ func TestServeAPIResponse(t *testing.T) { Data: []byte(`{"code":{{ .APIResponse.StatusCode }},"message":"{{ .APIResponse.Message }}"}`), }, } + binFS := http.FS(fstest.MapFS{}) apiResponse := site.APIResponse{ StatusCode: http.StatusBadGateway, @@ -187,7 +362,7 @@ func TestServeAPIResponse(t *testing.T) { } srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { r = r.WithContext(site.WithAPIResponse(r.Context(), apiResponse)) - site.Handler(rootFS).ServeHTTP(w, r) + site.Handler(rootFS, binFS).ServeHTTP(w, r) })) defer srv.Close() From 6c7e1f7325fda6a2c0b492ebadcb765c6a6cbce4 Mon Sep 17 00:00:00 2001 From: Mathias Fredriksson Date: Mon, 20 Jun 2022 20:33:58 +0300 Subject: [PATCH 03/16] chore: Remove unused function --- site/site_test.go | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/site/site_test.go b/site/site_test.go index e5331664aa58d..07d46834df376 100644 --- a/site/site_test.go +++ b/site/site_test.go @@ -7,7 +7,6 @@ import ( "io" "net/http" "net/http/httptest" - "os" "strings" "testing" "testing/fstest" @@ -176,15 +175,6 @@ func TestShouldCacheFile(t *testing.T) { } } -func readFile(t *testing.T, name string) []byte { - t.Helper() - b, err := os.ReadFile(name) - if err != nil { - t.Fatal(err) - } - return b -} - func TestServingBin(t *testing.T) { t.Parallel() From b4271082fc46a7ff32c2186321d1f69dc06173af Mon Sep 17 00:00:00 2001 From: Mathias Fredriksson Date: Mon, 20 Jun 2022 20:46:55 +0300 Subject: [PATCH 04/16] Always read body in test --- site/site_test.go | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/site/site_test.go b/site/site_test.go index 07d46834df376..648d279bc2552 100644 --- a/site/site_test.go +++ b/site/site_test.go @@ -322,11 +322,12 @@ func TestServingBin(t *testing.T) { defer resp.Body.Close() gotStatus := resp.StatusCode + gotBody, _ := io.ReadAll(resp.Body) + if tr.wantStatus > 0 { assert.Equal(t, tr.wantStatus, gotStatus, "status did not match") } if tr.wantBody != nil { - gotBody, _ := io.ReadAll(resp.Body) assert.Equal(t, string(tr.wantBody), string(gotBody), "body did not match") } }) From 3c688233d041d6363e0dd8303dd67b8c9f9cdf43 Mon Sep 17 00:00:00 2001 From: Mathias Fredriksson Date: Mon, 20 Jun 2022 20:59:31 +0300 Subject: [PATCH 05/16] chore: Log what we are doing --- scripts/build_go_slim.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/build_go_slim.sh b/scripts/build_go_slim.sh index e7bdcf8db100b..3d99f067b4b50 100755 --- a/scripts/build_go_slim.sh +++ b/scripts/build_go_slim.sh @@ -106,6 +106,7 @@ for f in ./coder-slim_*; do done if [[ -n $compress ]]; then + log "--- Compressing coder-slim binaries using zstd level $compress ($dest_dir/coder.tar.zst)" ( cd "$dest_dir" tar cf coder.tar coder-* From 0c6e367b0924e7c43c22bffa51cb486cf46782aa Mon Sep 17 00:00:00 2001 From: Mathias Fredriksson Date: Mon, 20 Jun 2022 21:16:46 +0300 Subject: [PATCH 06/16] chore: Fix PR comments --- site/site.go | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/site/site.go b/site/site.go index 7c98d5fae128c..a4bf62fec47a2 100644 --- a/site/site.go +++ b/site/site.go @@ -461,14 +461,14 @@ func filterFiles(files []fs.DirEntry, names ...string) []fs.DirEntry { return filtered } -func extractBin(dest string, r io.Reader) (n int, err error) { +func extractBin(dest string, r io.Reader) (numExtraced int, err error) { opts := []zstd.DOption{ // Concurrency doesn't help us when decoding the tar and // can actually slow us down. zstd.WithDecoderConcurrency(1), - // Ignoring checksums give us a slight performance - // increase, we assume no corruption due to embedding. - zstd.IgnoreChecksum(true), + // Ignoring checksums can give a slight performance + // boost but it's probalby not worth the reduced safety. + zstd.IgnoreChecksum(false), // Allow the decoder to use more memory giving us a 2-3x // performance boost. zstd.WithDecoderLowmem(false), @@ -480,6 +480,7 @@ func extractBin(dest string, r io.Reader) (n int, err error) { defer zr.Close() tr := tar.NewReader(zr) + n := 0 for { h, err := tr.Next() if err != nil { From de8c01ffe44a78dd29c13b90356ceeb859aec214 Mon Sep 17 00:00:00 2001 From: Mathias Fredriksson Date: Mon, 20 Jun 2022 21:25:22 +0300 Subject: [PATCH 07/16] chore: Fix PR comments --- scripts/build_go_slim.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/build_go_slim.sh b/scripts/build_go_slim.sh index 3d99f067b4b50..e27a76abbb5d2 100755 --- a/scripts/build_go_slim.sh +++ b/scripts/build_go_slim.sh @@ -23,7 +23,7 @@ source "$(dirname "${BASH_SOURCE[0]}")/lib.sh" version="" output_path="" -compress="" +compress=0 args="$(getopt -o "" -l version:,output:,compress: -- "$@")" eval set -- "$args" @@ -53,10 +53,10 @@ done # Check dependencies dependencies go -if [[ -n $compress ]]; then +if [[ $compress != 0 ]]; then dependencies tar zstd - if [[ ! $compress == [0-9]* ]] || ((compress > 22)) || ((compress < 1)); then + if [[ $compress != [0-9]* ]] || ((compress > 22)) || ((compress < 1)); then error "Invalid value for compress, must in in the range of [1, 22]" fi fi From 528c34c533338ef1a3fb494008f5c2921ed6daa1 Mon Sep 17 00:00:00 2001 From: Mathias Fredriksson Date: Mon, 20 Jun 2022 21:26:09 +0300 Subject: [PATCH 08/16] chore: Fix PR comments --- scripts/build_go_slim.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/build_go_slim.sh b/scripts/build_go_slim.sh index e27a76abbb5d2..761dc1dc97d79 100755 --- a/scripts/build_go_slim.sh +++ b/scripts/build_go_slim.sh @@ -105,7 +105,7 @@ for f in ./coder-slim_*; do cp "$f" "$dest" done -if [[ -n $compress ]]; then +if [[ $compress != 0 ]]; then log "--- Compressing coder-slim binaries using zstd level $compress ($dest_dir/coder.tar.zst)" ( cd "$dest_dir" From db959f393582fe592abef2619277904e31179203 Mon Sep 17 00:00:00 2001 From: Mathias Fredriksson Date: Mon, 20 Jun 2022 21:27:07 +0300 Subject: [PATCH 09/16] Apply suggestions from code review Co-authored-by: Dean Sheather --- site/site.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/site/site.go b/site/site.go index a4bf62fec47a2..65d21591618fb 100644 --- a/site/site.go +++ b/site/site.go @@ -386,10 +386,10 @@ func htmlFiles(files fs.FS) (*htmlTemplates, error) { // ${CODER_CACHE_DIRECTORY}/site/bin). func ExtractOrReadBinFS(dest string, siteFS fs.FS) (http.FileSystem, error) { if dest == "" { - // No destionation on fs, embedd fs is the only option. + // No destination on fs, embedded fs is the only option. binFS, err := fs.Sub(siteFS, "bin") if err != nil { - return nil, xerrors.Errorf("cache path is empty and embedd fs does not have /bin: %w", err) + return nil, xerrors.Errorf("cache path is empty and embedded fs does not have /bin: %w", err) } return http.FS(binFS), nil } From bd322c7a35ca081984980860458e705a647835d6 Mon Sep 17 00:00:00 2001 From: Mathias Fredriksson Date: Mon, 20 Jun 2022 21:32:58 +0300 Subject: [PATCH 10/16] chore: Use pushd/popd vs subshell --- scripts/build_go_slim.sh | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/scripts/build_go_slim.sh b/scripts/build_go_slim.sh index 761dc1dc97d79..3414f5b3dc2a5 100755 --- a/scripts/build_go_slim.sh +++ b/scripts/build_go_slim.sh @@ -107,10 +107,9 @@ done if [[ $compress != 0 ]]; then log "--- Compressing coder-slim binaries using zstd level $compress ($dest_dir/coder.tar.zst)" - ( - cd "$dest_dir" - tar cf coder.tar coder-* - rm coder-* - zstd --ultra --long -"${compress}" --rm --no-progress coder.tar -o coder.tar.zst - ) + pushd "$dest_dir" + tar cf coder.tar coder-* + rm coder-* + zstd --ultra --long -"${compress}" --rm --no-progress coder.tar -o coder.tar.zst + popd fi From 5c070d55fcd8c4b3aa0f561bddce19774ee4fafc Mon Sep 17 00:00:00 2001 From: Mathias Fredriksson Date: Mon, 20 Jun 2022 21:38:26 +0300 Subject: [PATCH 11/16] chore: Switch use of (( to [[ --- scripts/build_go_slim.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/build_go_slim.sh b/scripts/build_go_slim.sh index 3414f5b3dc2a5..5256fb352666e 100755 --- a/scripts/build_go_slim.sh +++ b/scripts/build_go_slim.sh @@ -56,7 +56,7 @@ dependencies go if [[ $compress != 0 ]]; then dependencies tar zstd - if [[ $compress != [0-9]* ]] || ((compress > 22)) || ((compress < 1)); then + if [[ $compress != [0-9]* ]] || [[ $compress -gt 22 ]] || [[ $compress -lt 1 ]]; then error "Invalid value for compress, must in in the range of [1, 22]" fi fi From c3e46139cedeff2287feca0ec9382fd7fb81e3b1 Mon Sep 17 00:00:00 2001 From: Mathias Fredriksson Date: Mon, 20 Jun 2022 21:50:16 +0300 Subject: [PATCH 12/16] fix: Use compress in github actions --- .github/workflows/coder.yaml | 4 ++++ .github/workflows/release.yaml | 8 ++++++++ 2 files changed, 12 insertions(+) diff --git a/.github/workflows/coder.yaml b/.github/workflows/coder.yaml index b22961fad04b6..98eed66af4637 100644 --- a/.github/workflows/coder.yaml +++ b/.github/workflows/coder.yaml @@ -370,6 +370,9 @@ jobs: - name: Install nfpm run: go install github.com/goreleaser/nfpm/v2/cmd/nfpm@v2.16.0 + - name: Install zstd + run: sudo apt-get install -y zstd + - name: Build site run: make -B site/out/index.html @@ -382,6 +385,7 @@ jobs: # build slim binaries ./scripts/build_go_slim.sh \ --output ./dist/ \ + --compress 22 \ linux:amd64,armv7,arm64 \ windows:amd64,arm64 \ darwin:amd64,arm64 diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 24838bc4ac5d1..5ae04d665b08c 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -68,6 +68,9 @@ jobs: - name: Install nfpm run: go install github.com/goreleaser/nfpm/v2/cmd/nfpm@v2.16.0 + - name: Install zstd + run: sudo apt-get install -y zstd + - name: Build Site run: make site/out/index.html @@ -80,6 +83,7 @@ jobs: # build slim binaries ./scripts/build_go_slim.sh \ --output ./dist/ \ + --compress 22 \ linux:amd64,armv7,arm64 \ windows:amd64,arm64 \ darwin:amd64,arm64 @@ -198,6 +202,9 @@ jobs: brew tap mitchellh/gon brew install mitchellh/gon/gon + # Used for compressing embedded slim binaries + brew install zstd + - name: Build Site run: make site/out/index.html @@ -210,6 +217,7 @@ jobs: # build slim binaries ./scripts/build_go_slim.sh \ --output ./dist/ \ + --compress 22 \ linux:amd64,armv7,arm64 \ windows:amd64,arm64 \ darwin:amd64,arm64 From d7159449bcc094db5b09a608519849f7508e6f73 Mon Sep 17 00:00:00 2001 From: Mathias Fredriksson Date: Mon, 20 Jun 2022 21:50:44 +0300 Subject: [PATCH 13/16] chore: Update build_go_slim.sh usage --- scripts/build_go_slim.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/build_go_slim.sh b/scripts/build_go_slim.sh index 5256fb352666e..1832117180a42 100755 --- a/scripts/build_go_slim.sh +++ b/scripts/build_go_slim.sh @@ -3,7 +3,7 @@ # This script builds multiple "slim" Go binaries for Coder with the given OS and # architecture combinations. This wraps ./build_go_matrix.sh. # -# Usage: ./build_go_slim.sh [--version 1.2.3-devel+abcdef] [--output dist/] os1:arch1,arch2 os2:arch1 os1:arch3 +# Usage: ./build_go_slim.sh [--version 1.2.3-devel+abcdef] [--output dist/] [--compress 22] os1:arch1,arch2 os2:arch1 os1:arch3 # # If no OS:arch combinations are provided, nothing will happen and no error will # be returned. If no version is specified, defaults to the version from From 2db796116a9f3b18f30219e7753b90e03b4c8c39 Mon Sep 17 00:00:00 2001 From: Mathias Fredriksson Date: Mon, 20 Jun 2022 21:51:34 +0300 Subject: [PATCH 14/16] fix: Use compression level 6 for Makefile builds --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 5600bbb90ae21..56fc493d358a7 100644 --- a/Makefile +++ b/Makefile @@ -35,7 +35,7 @@ build: site/out/index.html $(shell find . -not -path './vendor/*' -type f -name # build slim artifacts and copy them to the site output directory ./scripts/build_go_slim.sh \ --version "$(VERSION)" \ - --compress 22 \ + --compress 6 \ --output ./dist/ \ linux:amd64,armv7,arm64 \ windows:amd64,arm64 \ From e65a52822ec20835574c16bf8bd52cbf42070554 Mon Sep 17 00:00:00 2001 From: Mathias Fredriksson Date: Mon, 20 Jun 2022 22:07:01 +0300 Subject: [PATCH 15/16] fix: Explain compress option --- scripts/build_go_slim.sh | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/scripts/build_go_slim.sh b/scripts/build_go_slim.sh index 1832117180a42..28f148be6a17f 100755 --- a/scripts/build_go_slim.sh +++ b/scripts/build_go_slim.sh @@ -15,6 +15,10 @@ # # The built binaries are additionally copied to the site output directory so # they can be packaged into non-slim binaries correctly. +# +# When the --compress parameter is provided, the binaries in site/bin +# will be compressed using zstd into site/bin/coder.tar.zst, this helps reduce +# final binary size significantly. set -euo pipefail shopt -s nullglob From 650558e439dff2fb93af5e76b3847265a050f323 Mon Sep 17 00:00:00 2001 From: Mathias Fredriksson Date: Tue, 21 Jun 2022 12:37:53 +0300 Subject: [PATCH 16/16] fix: Add zstd --force to prevent prompts --- scripts/build_go_slim.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/build_go_slim.sh b/scripts/build_go_slim.sh index 28f148be6a17f..c5b029ec44c1c 100755 --- a/scripts/build_go_slim.sh +++ b/scripts/build_go_slim.sh @@ -114,6 +114,6 @@ if [[ $compress != 0 ]]; then pushd "$dest_dir" tar cf coder.tar coder-* rm coder-* - zstd --ultra --long -"${compress}" --rm --no-progress coder.tar -o coder.tar.zst + zstd --force --ultra --long -"${compress}" --rm --no-progress coder.tar -o coder.tar.zst popd fi