diff --git a/.github/actions/setup-go/action.yaml b/.github/actions/setup-go/action.yaml
index 4d91a9b2acb07..6ee57ff57db6b 100644
--- a/.github/actions/setup-go/action.yaml
+++ b/.github/actions/setup-go/action.yaml
@@ -42,7 +42,7 @@ runs:
- name: Install gotestsum
shell: bash
- run: go install gotest.tools/gotestsum@3f7ff0ec4aeb6f95f5d67c998b71f272aa8a8b41 # v1.12.1
+ run: go install gotest.tools/gotestsum@0d9599e513d70e5792bb9334869f82f6e8b53d4d # main as of 2025-05-15
# It isn't necessary that we ever do this, but it helps
# separate the "setup" from the "run" times.
diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml
index f27885314b8e7..ad8f5d1289715 100644
--- a/.github/workflows/ci.yaml
+++ b/.github/workflows/ci.yaml
@@ -336,7 +336,7 @@ jobs:
# a separate repository to allow its use before actions/checkout.
- name: Setup RAM Disks
if: runner.os == 'Windows'
- uses: coder/setup-ramdisk-action@79dacfe70c47ad6d6c0dd7f45412368802641439
+ uses: coder/setup-ramdisk-action@81c5c441bda00c6c3d6bcee2e5a33ed4aadbbcc1
- name: Checkout
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
@@ -613,7 +613,7 @@ jobs:
# c.f. discussion on https://github.com/coder/coder/pull/15106
- name: Run Tests
run: |
- gotestsum --junitfile="gotests.xml" -- -race -parallel 4 -p 4 ./...
+ gotestsum --junitfile="gotests.xml" --packages="./..." --rerun-fails=2 --rerun-fails-abort-on-data-race -- -race -parallel 4 -p 4
- name: Upload Test Cache
uses: ./.github/actions/test-cache/upload
@@ -665,7 +665,7 @@ jobs:
POSTGRES_VERSION: "16"
run: |
make test-postgres-docker
- DB=ci gotestsum --junitfile="gotests.xml" -- -race -parallel 4 -p 4 ./...
+ DB=ci gotestsum --junitfile="gotests.xml" --packages="./..." --rerun-fails=2 --rerun-fails-abort-on-data-race -- -race -parallel 4 -p 4
- name: Upload Test Cache
uses: ./.github/actions/test-cache/upload
diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml
index ce1e803d3e41e..881cc4c437db6 100644
--- a/.github/workflows/release.yaml
+++ b/.github/workflows/release.yaml
@@ -924,55 +924,3 @@ jobs:
continue-on-error: true
run: |
make sqlc-push
-
- update-calendar:
- name: "Update release calendar in docs"
- runs-on: "ubuntu-latest"
- needs: [release, publish-homebrew, publish-winget, publish-sqlc]
- if: ${{ !inputs.dry_run }}
- permissions:
- contents: write
- pull-requests: write
- steps:
- - name: Harden Runner
- uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
- with:
- egress-policy: audit
-
- - name: Checkout repository
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- with:
- fetch-depth: 0 # Needed to get all tags for version calculation
-
- - name: Set up Git
- run: |
- git config user.name "Coder CI"
- git config user.email "cdrci@coder.com"
-
- - name: Run update script
- run: |
- ./scripts/update-release-calendar.sh
- make fmt/markdown
-
- - name: Check for changes
- id: check_changes
- run: |
- if git diff --quiet docs/install/releases/index.md; then
- echo "No changes detected in release calendar."
- echo "changes=false" >> $GITHUB_OUTPUT
- else
- echo "Changes detected in release calendar."
- echo "changes=true" >> $GITHUB_OUTPUT
- fi
-
- - name: Create Pull Request
- if: steps.check_changes.outputs.changes == 'true'
- uses: peter-evans/create-pull-request@ff45666b9427631e3450c54a1bcbee4d9ff4d7c0 # v3.0.0
- with:
- commit-message: "docs: update release calendar"
- title: "docs: update release calendar"
- body: |
- This PR automatically updates the release calendar in the docs.
- branch: bot/update-release-calendar
- delete-branch: true
- labels: docs
diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml
index 5b68e4b26c20d..f9902ede655cf 100644
--- a/.github/workflows/scorecard.yml
+++ b/.github/workflows/scorecard.yml
@@ -47,6 +47,6 @@ jobs:
# Upload the results to GitHub's code scanning dashboard.
- name: "Upload to code-scanning"
- uses: github/codeql-action/upload-sarif@60168efe1c415ce0f5521ea06d5c2062adbeed1b # v3.28.17
+ uses: github/codeql-action/upload-sarif@ff0a06e83cb2de871e5a09832bc6a81e7276941f # v3.28.18
with:
sarif_file: results.sarif
diff --git a/.github/workflows/security.yaml b/.github/workflows/security.yaml
index f9f461cfe9966..721584b89e202 100644
--- a/.github/workflows/security.yaml
+++ b/.github/workflows/security.yaml
@@ -38,7 +38,7 @@ jobs:
uses: ./.github/actions/setup-go
- name: Initialize CodeQL
- uses: github/codeql-action/init@60168efe1c415ce0f5521ea06d5c2062adbeed1b # v3.28.17
+ uses: github/codeql-action/init@ff0a06e83cb2de871e5a09832bc6a81e7276941f # v3.28.18
with:
languages: go, javascript
@@ -48,7 +48,7 @@ jobs:
rm Makefile
- name: Perform CodeQL Analysis
- uses: github/codeql-action/analyze@60168efe1c415ce0f5521ea06d5c2062adbeed1b # v3.28.17
+ uses: github/codeql-action/analyze@ff0a06e83cb2de871e5a09832bc6a81e7276941f # v3.28.18
- name: Send Slack notification on failure
if: ${{ failure() }}
@@ -150,7 +150,7 @@ jobs:
severity: "CRITICAL,HIGH"
- name: Upload Trivy scan results to GitHub Security tab
- uses: github/codeql-action/upload-sarif@60168efe1c415ce0f5521ea06d5c2062adbeed1b # v3.28.17
+ uses: github/codeql-action/upload-sarif@ff0a06e83cb2de871e5a09832bc6a81e7276941f # v3.28.18
with:
sarif_file: trivy-results.sarif
category: "Trivy"
diff --git a/.gitignore b/.gitignore
index 24021e54ddde2..5aa08b2512527 100644
--- a/.gitignore
+++ b/.gitignore
@@ -50,6 +50,8 @@ site/stats/
*.tfplan
*.lock.hcl
.terraform/
+!coderd/testdata/parameters/modules/.terraform/
+!provisioner/terraform/testdata/modules-source-caching/.terraform/
**/.coderv2/*
**/__debug_bin
diff --git a/agent/agent.go b/agent/agent.go
index d0e668af34d74..ffdacfb64ba75 100644
--- a/agent/agent.go
+++ b/agent/agent.go
@@ -363,9 +363,11 @@ func (a *agent) runLoop() {
if ctx.Err() != nil {
// Context canceled errors may come from websocket pings, so we
// don't want to use `errors.Is(err, context.Canceled)` here.
+ a.logger.Warn(ctx, "runLoop exited with error", slog.Error(ctx.Err()))
return
}
if a.isClosed() {
+ a.logger.Warn(ctx, "runLoop exited because agent is closed")
return
}
if errors.Is(err, io.EOF) {
@@ -1046,7 +1048,11 @@ func (a *agent) run() (retErr error) {
return a.statsReporter.reportLoop(ctx, aAPI)
})
- return connMan.wait()
+ err = connMan.wait()
+ if err != nil {
+ a.logger.Info(context.Background(), "connection manager errored", slog.Error(err))
+ }
+ return err
}
// handleManifest returns a function that fetches and processes the manifest
diff --git a/agent/agent_test.go b/agent/agent_test.go
index fe2c99059e9d8..029fbb0f8ea32 100644
--- a/agent/agent_test.go
+++ b/agent/agent_test.go
@@ -1262,10 +1262,6 @@ func TestAgent_SSHConnectionLoginVars(t *testing.T) {
key: "LOGNAME",
want: u.Username,
},
- {
- key: "HOME",
- want: u.HomeDir,
- },
{
key: "SHELL",
want: shell,
@@ -1502,7 +1498,7 @@ func TestAgent_Lifecycle(t *testing.T) {
_, client, _, _, _ := setupAgent(t, agentsdk.Manifest{
Scripts: []codersdk.WorkspaceAgentScript{{
- Script: "true",
+ Script: "echo foo",
Timeout: 30 * time.Second,
RunOnStart: true,
}},
@@ -1935,8 +1931,6 @@ func TestAgent_ReconnectingPTYContainer(t *testing.T) {
t.Skip("Set CODER_TEST_USE_DOCKER=1 to run this test")
}
- ctx := testutil.Context(t, testutil.WaitLong)
-
pool, err := dockertest.NewPool("")
require.NoError(t, err, "Could not connect to docker")
ct, err := pool.RunWithOptions(&dockertest.RunOptions{
@@ -1948,10 +1942,10 @@ func TestAgent_ReconnectingPTYContainer(t *testing.T) {
config.RestartPolicy = docker.RestartPolicy{Name: "no"}
})
require.NoError(t, err, "Could not start container")
- t.Cleanup(func() {
+ defer func() {
err := pool.Purge(ct)
require.NoError(t, err, "Could not stop container")
- })
+ }()
// Wait for container to start
require.Eventually(t, func() bool {
ct, ok := pool.ContainerByName(ct.Container.Name)
@@ -1962,6 +1956,7 @@ func TestAgent_ReconnectingPTYContainer(t *testing.T) {
conn, _, _, _, _ := setupAgent(t, agentsdk.Manifest{}, 0, func(_ *agenttest.Client, o *agent.Options) {
o.ExperimentalDevcontainersEnabled = true
})
+ ctx := testutil.Context(t, testutil.WaitLong)
ac, err := conn.ReconnectingPTY(ctx, uuid.New(), 80, 80, "/bin/sh", func(arp *workspacesdk.AgentReconnectingPTYInit) {
arp.Container = ct.Container.ID
})
@@ -2005,9 +2000,6 @@ func TestAgent_DevcontainerAutostart(t *testing.T) {
t.Skip("Set CODER_TEST_USE_DOCKER=1 to run this test")
}
- ctx := testutil.Context(t, testutil.WaitLong)
-
- // Connect to Docker
pool, err := dockertest.NewPool("")
require.NoError(t, err, "Could not connect to docker")
@@ -2051,7 +2043,7 @@ func TestAgent_DevcontainerAutostart(t *testing.T) {
},
},
}
- // nolint: dogsled
+ //nolint:dogsled
conn, _, _, _, _ := setupAgent(t, manifest, 0, func(_ *agenttest.Client, o *agent.Options) {
o.ExperimentalDevcontainersEnabled = true
})
@@ -2079,8 +2071,7 @@ func TestAgent_DevcontainerAutostart(t *testing.T) {
return false
}, testutil.WaitSuperLong, testutil.IntervalMedium, "no container with workspace folder label found")
-
- t.Cleanup(func() {
+ defer func() {
// We can't rely on pool here because the container is not
// managed by it (it is managed by @devcontainer/cli).
err := pool.Client.RemoveContainer(docker.RemoveContainerOptions{
@@ -2089,13 +2080,15 @@ func TestAgent_DevcontainerAutostart(t *testing.T) {
Force: true,
})
assert.NoError(t, err, "remove container")
- })
+ }()
containerInfo, err := pool.Client.InspectContainer(container.ID)
require.NoError(t, err, "inspect container")
t.Logf("Container state: status: %v", containerInfo.State.Status)
require.True(t, containerInfo.State.Running, "container should be running")
+ ctx := testutil.Context(t, testutil.WaitLong)
+
ac, err := conn.ReconnectingPTY(ctx, uuid.New(), 80, 80, "", func(opts *workspacesdk.AgentReconnectingPTYInit) {
opts.Container = container.ID
})
@@ -2124,6 +2117,173 @@ func TestAgent_DevcontainerAutostart(t *testing.T) {
require.NoError(t, err, "file should exist outside devcontainer")
}
+// TestAgent_DevcontainerRecreate tests that RecreateDevcontainer
+// recreates a devcontainer and emits logs.
+//
+// This tests end-to-end functionality of auto-starting a devcontainer.
+// It runs "devcontainer up" which creates a real Docker container. As
+// such, it does not run by default in CI.
+//
+// You can run it manually as follows:
+//
+// CODER_TEST_USE_DOCKER=1 go test -count=1 ./agent -run TestAgent_DevcontainerRecreate
+func TestAgent_DevcontainerRecreate(t *testing.T) {
+ if os.Getenv("CODER_TEST_USE_DOCKER") != "1" {
+ t.Skip("Set CODER_TEST_USE_DOCKER=1 to run this test")
+ }
+ t.Parallel()
+
+ pool, err := dockertest.NewPool("")
+ require.NoError(t, err, "Could not connect to docker")
+
+ // Prepare temporary devcontainer for test (mywork).
+ devcontainerID := uuid.New()
+ devcontainerLogSourceID := uuid.New()
+ workspaceFolder := filepath.Join(t.TempDir(), "mywork")
+ t.Logf("Workspace folder: %s", workspaceFolder)
+ devcontainerPath := filepath.Join(workspaceFolder, ".devcontainer")
+ err = os.MkdirAll(devcontainerPath, 0o755)
+ require.NoError(t, err, "create devcontainer directory")
+ devcontainerFile := filepath.Join(devcontainerPath, "devcontainer.json")
+ err = os.WriteFile(devcontainerFile, []byte(`{
+ "name": "mywork",
+ "image": "busybox:latest",
+ "cmd": ["sleep", "infinity"]
+ }`), 0o600)
+ require.NoError(t, err, "write devcontainer.json")
+
+ manifest := agentsdk.Manifest{
+ // Set up pre-conditions for auto-starting a devcontainer, the
+ // script is used to extract the log source ID.
+ Devcontainers: []codersdk.WorkspaceAgentDevcontainer{
+ {
+ ID: devcontainerID,
+ Name: "test",
+ WorkspaceFolder: workspaceFolder,
+ },
+ },
+ Scripts: []codersdk.WorkspaceAgentScript{
+ {
+ ID: devcontainerID,
+ LogSourceID: devcontainerLogSourceID,
+ },
+ },
+ }
+
+ //nolint:dogsled
+ conn, client, _, _, _ := setupAgent(t, manifest, 0, func(_ *agenttest.Client, o *agent.Options) {
+ o.ExperimentalDevcontainersEnabled = true
+ })
+
+ ctx := testutil.Context(t, testutil.WaitLong)
+
+ // We enabled autostart for the devcontainer, so ready is a good
+ // indication that the devcontainer is up and running. Importantly,
+ // this also means that the devcontainer startup is no longer
+ // producing logs that may interfere with the recreate logs.
+ testutil.Eventually(ctx, t, func(context.Context) bool {
+ states := client.GetLifecycleStates()
+ return slices.Contains(states, codersdk.WorkspaceAgentLifecycleReady)
+ }, testutil.IntervalMedium, "devcontainer not ready")
+
+ t.Logf("Looking for container with label: devcontainer.local_folder=%s", workspaceFolder)
+
+ var container docker.APIContainers
+ testutil.Eventually(ctx, t, func(context.Context) bool {
+ containers, err := pool.Client.ListContainers(docker.ListContainersOptions{All: true})
+ if err != nil {
+ t.Logf("Error listing containers: %v", err)
+ return false
+ }
+ for _, c := range containers {
+ t.Logf("Found container: %s with labels: %v", c.ID[:12], c.Labels)
+ if v, ok := c.Labels["devcontainer.local_folder"]; ok && v == workspaceFolder {
+ t.Logf("Found matching container: %s", c.ID[:12])
+ container = c
+ return true
+ }
+ }
+ return false
+ }, testutil.IntervalMedium, "no container with workspace folder label found")
+ defer func(container docker.APIContainers) {
+ // We can't rely on pool here because the container is not
+ // managed by it (it is managed by @devcontainer/cli).
+ err := pool.Client.RemoveContainer(docker.RemoveContainerOptions{
+ ID: container.ID,
+ RemoveVolumes: true,
+ Force: true,
+ })
+ assert.Error(t, err, "container should be removed by recreate")
+ }(container)
+
+ ctx = testutil.Context(t, testutil.WaitLong) // Reset context.
+
+ // Capture logs via ScriptLogger.
+ logsCh := make(chan *proto.BatchCreateLogsRequest, 1)
+ client.SetLogsChannel(logsCh)
+
+ // Invoke recreate to trigger the destruction and recreation of the
+ // devcontainer, we do it in a goroutine so we can process logs
+ // concurrently.
+ go func(container docker.APIContainers) {
+ err := conn.RecreateDevcontainer(ctx, container.ID)
+ assert.NoError(t, err, "recreate devcontainer should succeed")
+ }(container)
+
+ t.Logf("Checking recreate logs for outcome...")
+
+ // Wait for the logs to be emitted, the @devcontainer/cli up command
+ // will emit a log with the outcome at the end suggesting we did
+ // receive all the logs.
+waitForOutcomeLoop:
+ for {
+ batch := testutil.RequireReceive(ctx, t, logsCh)
+
+ if bytes.Equal(batch.LogSourceId, devcontainerLogSourceID[:]) {
+ for _, log := range batch.Logs {
+ t.Logf("Received log: %s", log.Output)
+ if strings.Contains(log.Output, "\"outcome\"") {
+ break waitForOutcomeLoop
+ }
+ }
+ }
+ }
+
+ t.Logf("Checking there's a new container with label: devcontainer.local_folder=%s", workspaceFolder)
+
+ // Make sure the container exists and isn't the same as the old one.
+ testutil.Eventually(ctx, t, func(context.Context) bool {
+ containers, err := pool.Client.ListContainers(docker.ListContainersOptions{All: true})
+ if err != nil {
+ t.Logf("Error listing containers: %v", err)
+ return false
+ }
+ for _, c := range containers {
+ t.Logf("Found container: %s with labels: %v", c.ID[:12], c.Labels)
+ if v, ok := c.Labels["devcontainer.local_folder"]; ok && v == workspaceFolder {
+ if c.ID == container.ID {
+ t.Logf("Found same container: %s", c.ID[:12])
+ return false
+ }
+ t.Logf("Found new container: %s", c.ID[:12])
+ container = c
+ return true
+ }
+ }
+ return false
+ }, testutil.IntervalMedium, "new devcontainer not found")
+ defer func(container docker.APIContainers) {
+ // We can't rely on pool here because the container is not
+ // managed by it (it is managed by @devcontainer/cli).
+ err := pool.Client.RemoveContainer(docker.RemoveContainerOptions{
+ ID: container.ID,
+ RemoveVolumes: true,
+ Force: true,
+ })
+ assert.NoError(t, err, "remove container")
+ }(container)
+}
+
func TestAgent_Dial(t *testing.T) {
t.Parallel()
diff --git a/agent/agentcontainers/acmock/acmock.go b/agent/agentcontainers/acmock/acmock.go
index 93c84e8c54fd3..869d2f7d0923b 100644
--- a/agent/agentcontainers/acmock/acmock.go
+++ b/agent/agentcontainers/acmock/acmock.go
@@ -1,9 +1,9 @@
// Code generated by MockGen. DO NOT EDIT.
-// Source: .. (interfaces: Lister)
+// Source: .. (interfaces: Lister,DevcontainerCLI)
//
// Generated by this command:
//
-// mockgen -destination ./acmock.go -package acmock .. Lister
+// mockgen -destination ./acmock.go -package acmock .. Lister,DevcontainerCLI
//
// Package acmock is a generated GoMock package.
@@ -13,6 +13,7 @@ import (
context "context"
reflect "reflect"
+ agentcontainers "github.com/coder/coder/v2/agent/agentcontainers"
codersdk "github.com/coder/coder/v2/codersdk"
gomock "go.uber.org/mock/gomock"
)
@@ -55,3 +56,47 @@ func (mr *MockListerMockRecorder) List(ctx any) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "List", reflect.TypeOf((*MockLister)(nil).List), ctx)
}
+
+// MockDevcontainerCLI is a mock of DevcontainerCLI interface.
+type MockDevcontainerCLI struct {
+ ctrl *gomock.Controller
+ recorder *MockDevcontainerCLIMockRecorder
+ isgomock struct{}
+}
+
+// MockDevcontainerCLIMockRecorder is the mock recorder for MockDevcontainerCLI.
+type MockDevcontainerCLIMockRecorder struct {
+ mock *MockDevcontainerCLI
+}
+
+// NewMockDevcontainerCLI creates a new mock instance.
+func NewMockDevcontainerCLI(ctrl *gomock.Controller) *MockDevcontainerCLI {
+ mock := &MockDevcontainerCLI{ctrl: ctrl}
+ mock.recorder = &MockDevcontainerCLIMockRecorder{mock}
+ return mock
+}
+
+// EXPECT returns an object that allows the caller to indicate expected use.
+func (m *MockDevcontainerCLI) EXPECT() *MockDevcontainerCLIMockRecorder {
+ return m.recorder
+}
+
+// Up mocks base method.
+func (m *MockDevcontainerCLI) Up(ctx context.Context, workspaceFolder, configPath string, opts ...agentcontainers.DevcontainerCLIUpOptions) (string, error) {
+ m.ctrl.T.Helper()
+ varargs := []any{ctx, workspaceFolder, configPath}
+ for _, a := range opts {
+ varargs = append(varargs, a)
+ }
+ ret := m.ctrl.Call(m, "Up", varargs...)
+ ret0, _ := ret[0].(string)
+ ret1, _ := ret[1].(error)
+ return ret0, ret1
+}
+
+// Up indicates an expected call of Up.
+func (mr *MockDevcontainerCLIMockRecorder) Up(ctx, workspaceFolder, configPath any, opts ...any) *gomock.Call {
+ mr.mock.ctrl.T.Helper()
+ varargs := append([]any{ctx, workspaceFolder, configPath}, opts...)
+ return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Up", reflect.TypeOf((*MockDevcontainerCLI)(nil).Up), varargs...)
+}
diff --git a/agent/agentcontainers/acmock/doc.go b/agent/agentcontainers/acmock/doc.go
index 47679708b0fc8..b807efa253b75 100644
--- a/agent/agentcontainers/acmock/doc.go
+++ b/agent/agentcontainers/acmock/doc.go
@@ -1,4 +1,4 @@
// Package acmock contains a mock implementation of agentcontainers.Lister for use in tests.
package acmock
-//go:generate mockgen -destination ./acmock.go -package acmock .. Lister
+//go:generate mockgen -destination ./acmock.go -package acmock .. Lister,DevcontainerCLI
diff --git a/agent/agentcontainers/api.go b/agent/agentcontainers/api.go
index c3779af67633a..f2164c9a874ff 100644
--- a/agent/agentcontainers/api.go
+++ b/agent/agentcontainers/api.go
@@ -20,6 +20,7 @@ import (
"github.com/coder/coder/v2/agent/agentexec"
"github.com/coder/coder/v2/coderd/httpapi"
"github.com/coder/coder/v2/codersdk"
+ "github.com/coder/coder/v2/codersdk/agentsdk"
"github.com/coder/quartz"
)
@@ -43,6 +44,7 @@ type API struct {
cl Lister
dccli DevcontainerCLI
clock quartz.Clock
+ scriptLogger func(logSourceID uuid.UUID) ScriptLogger
// lockCh protects the below fields. We use a channel instead of a
// mutex so we can handle cancellation properly.
@@ -52,6 +54,8 @@ type API struct {
devcontainerNames map[string]struct{} // Track devcontainer names to avoid duplicates.
knownDevcontainers []codersdk.WorkspaceAgentDevcontainer // Track predefined and runtime-detected devcontainers.
configFileModifiedTimes map[string]time.Time // Track when config files were last modified.
+
+ devcontainerLogSourceIDs map[string]uuid.UUID // Track devcontainer log source IDs.
}
// Option is a functional option for API.
@@ -65,6 +69,15 @@ func WithClock(clock quartz.Clock) Option {
}
}
+// WithCacheDuration sets the cache duration for the API.
+// This is used to control how often the API refreshes the list of
+// containers. The default is 10 seconds.
+func WithCacheDuration(d time.Duration) Option {
+ return func(api *API) {
+ api.cacheDuration = d
+ }
+}
+
// WithExecer sets the agentexec.Execer implementation to use.
func WithExecer(execer agentexec.Execer) Option {
return func(api *API) {
@@ -91,13 +104,30 @@ func WithDevcontainerCLI(dccli DevcontainerCLI) Option {
// WithDevcontainers sets the known devcontainers for the API. This
// allows the API to be aware of devcontainers defined in the workspace
// agent manifest.
-func WithDevcontainers(devcontainers []codersdk.WorkspaceAgentDevcontainer) Option {
+func WithDevcontainers(devcontainers []codersdk.WorkspaceAgentDevcontainer, scripts []codersdk.WorkspaceAgentScript) Option {
return func(api *API) {
- if len(devcontainers) > 0 {
- api.knownDevcontainers = slices.Clone(devcontainers)
- api.devcontainerNames = make(map[string]struct{}, len(devcontainers))
- for _, devcontainer := range devcontainers {
- api.devcontainerNames[devcontainer.Name] = struct{}{}
+ if len(devcontainers) == 0 {
+ return
+ }
+ api.knownDevcontainers = slices.Clone(devcontainers)
+ api.devcontainerNames = make(map[string]struct{}, len(devcontainers))
+ api.devcontainerLogSourceIDs = make(map[string]uuid.UUID)
+ for _, devcontainer := range devcontainers {
+ api.devcontainerNames[devcontainer.Name] = struct{}{}
+ for _, script := range scripts {
+ // The devcontainer scripts match the devcontainer ID for
+ // identification.
+ if script.ID == devcontainer.ID {
+ api.devcontainerLogSourceIDs[devcontainer.WorkspaceFolder] = script.LogSourceID
+ break
+ }
+ }
+ if api.devcontainerLogSourceIDs[devcontainer.WorkspaceFolder] == uuid.Nil {
+ api.logger.Error(api.ctx, "devcontainer log source ID not found for devcontainer",
+ slog.F("devcontainer", devcontainer.Name),
+ slog.F("workspace_folder", devcontainer.WorkspaceFolder),
+ slog.F("config_path", devcontainer.ConfigPath),
+ )
}
}
}
@@ -112,6 +142,27 @@ func WithWatcher(w watcher.Watcher) Option {
}
}
+// ScriptLogger is an interface for sending devcontainer logs to the
+// controlplane.
+type ScriptLogger interface {
+ Send(ctx context.Context, log ...agentsdk.Log) error
+ Flush(ctx context.Context) error
+}
+
+// noopScriptLogger is a no-op implementation of the ScriptLogger
+// interface.
+type noopScriptLogger struct{}
+
+func (noopScriptLogger) Send(context.Context, ...agentsdk.Log) error { return nil }
+func (noopScriptLogger) Flush(context.Context) error { return nil }
+
+// WithScriptLogger sets the script logger provider for devcontainer operations.
+func WithScriptLogger(scriptLogger func(logSourceID uuid.UUID) ScriptLogger) Option {
+ return func(api *API) {
+ api.scriptLogger = scriptLogger
+ }
+}
+
// NewAPI returns a new API with the given options applied.
func NewAPI(logger slog.Logger, options ...Option) *API {
ctx, cancel := context.WithCancel(context.Background())
@@ -127,7 +178,10 @@ func NewAPI(logger slog.Logger, options ...Option) *API {
devcontainerNames: make(map[string]struct{}),
knownDevcontainers: []codersdk.WorkspaceAgentDevcontainer{},
configFileModifiedTimes: make(map[string]time.Time),
+ scriptLogger: func(uuid.UUID) ScriptLogger { return noopScriptLogger{} },
}
+ // The ctx and logger must be set before applying options to avoid
+ // nil pointer dereference.
for _, opt := range options {
opt(api)
}
@@ -214,8 +268,10 @@ func (api *API) Routes() http.Handler {
r := chi.NewRouter()
r.Get("/", api.handleList)
- r.Get("/devcontainers", api.handleListDevcontainers)
- r.Post("/{id}/recreate", api.handleRecreate)
+ r.Route("/devcontainers", func(r chi.Router) {
+ r.Get("/", api.handleDevcontainersList)
+ r.Post("/container/{container}/recreate", api.handleDevcontainerRecreate)
+ })
return r
}
@@ -289,7 +345,8 @@ func (api *API) getContainers(ctx context.Context) (codersdk.WorkspaceAgentListC
}
// Check if the container is running and update the known devcontainers.
- for _, container := range updated.Containers {
+ for i := range updated.Containers {
+ container := &updated.Containers[i]
workspaceFolder := container.Labels[DevcontainerLocalFolderLabel]
configFile := container.Labels[DevcontainerConfigFileLabel]
@@ -297,6 +354,20 @@ func (api *API) getContainers(ctx context.Context) (codersdk.WorkspaceAgentListC
continue
}
+ container.DevcontainerDirty = dirtyStates[workspaceFolder]
+ if container.DevcontainerDirty {
+ lastModified, hasModTime := api.configFileModifiedTimes[configFile]
+ if hasModTime && container.CreatedAt.After(lastModified) {
+ api.logger.Info(ctx, "new container created after config modification, not marking as dirty",
+ slog.F("container", container.ID),
+ slog.F("created_at", container.CreatedAt),
+ slog.F("config_modified_at", lastModified),
+ slog.F("file", configFile),
+ )
+ container.DevcontainerDirty = false
+ }
+ }
+
// Check if this is already in our known list.
if knownIndex := slices.IndexFunc(api.knownDevcontainers, func(dc codersdk.WorkspaceAgentDevcontainer) bool {
return dc.WorkspaceFolder == workspaceFolder
@@ -309,7 +380,7 @@ func (api *API) getContainers(ctx context.Context) (codersdk.WorkspaceAgentListC
}
}
api.knownDevcontainers[knownIndex].Running = container.Running
- api.knownDevcontainers[knownIndex].Container = &container
+ api.knownDevcontainers[knownIndex].Container = container
// Check if this container was created after the config
// file was modified.
@@ -348,40 +419,27 @@ func (api *API) getContainers(ctx context.Context) (codersdk.WorkspaceAgentListC
}
}
- dirty := dirtyStates[workspaceFolder]
- if dirty {
- lastModified, hasModTime := api.configFileModifiedTimes[configFile]
- if hasModTime && container.CreatedAt.After(lastModified) {
- api.logger.Info(ctx, "new container created after config modification, not marking as dirty",
- slog.F("container", container.ID),
- slog.F("created_at", container.CreatedAt),
- slog.F("config_modified_at", lastModified),
- slog.F("file", configFile),
- )
- dirty = false
- }
- }
-
api.knownDevcontainers = append(api.knownDevcontainers, codersdk.WorkspaceAgentDevcontainer{
ID: uuid.New(),
Name: name,
WorkspaceFolder: workspaceFolder,
ConfigPath: configFile,
Running: container.Running,
- Dirty: dirty,
- Container: &container,
+ Dirty: container.DevcontainerDirty,
+ Container: container,
})
}
return copyListContainersResponse(api.containers), nil
}
-// handleRecreate handles the HTTP request to recreate a container.
-func (api *API) handleRecreate(w http.ResponseWriter, r *http.Request) {
+// handleDevcontainerRecreate handles the HTTP request to recreate a
+// devcontainer by referencing the container.
+func (api *API) handleDevcontainerRecreate(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
- id := chi.URLParam(r, "id")
+ containerID := chi.URLParam(r, "container")
- if id == "" {
+ if containerID == "" {
httpapi.Write(ctx, w, http.StatusBadRequest, codersdk.Response{
Message: "Missing container ID or name",
Detail: "Container ID or name is required to recreate a devcontainer.",
@@ -399,7 +457,7 @@ func (api *API) handleRecreate(w http.ResponseWriter, r *http.Request) {
}
containerIdx := slices.IndexFunc(containers.Containers, func(c codersdk.WorkspaceAgentContainer) bool {
- return c.Match(id)
+ return c.Match(containerID)
})
if containerIdx == -1 {
httpapi.Write(ctx, w, http.StatusNotFound, codersdk.Response{
@@ -418,12 +476,31 @@ func (api *API) handleRecreate(w http.ResponseWriter, r *http.Request) {
if workspaceFolder == "" {
httpapi.Write(ctx, w, http.StatusBadRequest, codersdk.Response{
Message: "Missing workspace folder label",
- Detail: "The workspace folder label is required to recreate a devcontainer.",
+ Detail: "The container is not a devcontainer, the container must have the workspace folder label to support recreation.",
})
return
}
- _, err = api.dccli.Up(ctx, workspaceFolder, configPath, WithRemoveExistingContainer())
+ // Send logs via agent logging facilities.
+ logSourceID := api.devcontainerLogSourceIDs[workspaceFolder]
+ if logSourceID == uuid.Nil {
+ // Fallback to the external log source ID if not found.
+ logSourceID = agentsdk.ExternalLogSourceID
+ }
+ scriptLogger := api.scriptLogger(logSourceID)
+ defer func() {
+ flushCtx, cancel := context.WithTimeout(api.ctx, 5*time.Second)
+ defer cancel()
+ if err := scriptLogger.Flush(flushCtx); err != nil {
+ api.logger.Error(flushCtx, "flush devcontainer logs failed", slog.Error(err))
+ }
+ }()
+ infoW := agentsdk.LogsWriter(ctx, scriptLogger.Send, logSourceID, codersdk.LogLevelInfo)
+ defer infoW.Close()
+ errW := agentsdk.LogsWriter(ctx, scriptLogger.Send, logSourceID, codersdk.LogLevelError)
+ defer errW.Close()
+
+ _, err = api.dccli.Up(ctx, workspaceFolder, configPath, WithOutput(infoW, errW), WithRemoveExistingContainer())
if err != nil {
httpapi.Write(ctx, w, http.StatusInternalServerError, codersdk.Response{
Message: "Could not recreate devcontainer",
@@ -434,32 +511,35 @@ func (api *API) handleRecreate(w http.ResponseWriter, r *http.Request) {
// TODO(mafredri): Temporarily handle clearing the dirty state after
// recreation, later on this should be handled by a "container watcher".
- select {
- case <-api.ctx.Done():
- return
- case <-ctx.Done():
- return
- case api.lockCh <- struct{}{}:
- defer func() { <-api.lockCh }()
- }
- for i := range api.knownDevcontainers {
- if api.knownDevcontainers[i].WorkspaceFolder == workspaceFolder {
- if api.knownDevcontainers[i].Dirty {
- api.logger.Info(ctx, "clearing dirty flag after recreation",
- slog.F("workspace_folder", workspaceFolder),
- slog.F("name", api.knownDevcontainers[i].Name),
- )
- api.knownDevcontainers[i].Dirty = false
+ if !api.doLockedHandler(w, r, func() {
+ for i := range api.knownDevcontainers {
+ if api.knownDevcontainers[i].WorkspaceFolder == workspaceFolder {
+ if api.knownDevcontainers[i].Dirty {
+ api.logger.Info(ctx, "clearing dirty flag after recreation",
+ slog.F("workspace_folder", workspaceFolder),
+ slog.F("name", api.knownDevcontainers[i].Name),
+ )
+ api.knownDevcontainers[i].Dirty = false
+ // TODO(mafredri): This should be handled by a service that
+ // updates the devcontainer state periodically and on-demand.
+ api.knownDevcontainers[i].Container = nil
+ // Set the modified time to the zero value to indicate that
+ // the containers list must be refreshed. This will see to
+ // it that the new container is re-assigned.
+ api.mtime = time.Time{}
+ }
+ return
}
- break
}
+ }) {
+ return
}
w.WriteHeader(http.StatusNoContent)
}
-// handleListDevcontainers handles the HTTP request to list known devcontainers.
-func (api *API) handleListDevcontainers(w http.ResponseWriter, r *http.Request) {
+// handleDevcontainersList handles the HTTP request to list known devcontainers.
+func (api *API) handleDevcontainersList(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
// Run getContainers to detect the latest devcontainers and their state.
@@ -472,15 +552,12 @@ func (api *API) handleListDevcontainers(w http.ResponseWriter, r *http.Request)
return
}
- select {
- case <-api.ctx.Done():
- return
- case <-ctx.Done():
+ var devcontainers []codersdk.WorkspaceAgentDevcontainer
+ if !api.doLockedHandler(w, r, func() {
+ devcontainers = slices.Clone(api.knownDevcontainers)
+ }) {
return
- case api.lockCh <- struct{}{}:
}
- devcontainers := slices.Clone(api.knownDevcontainers)
- <-api.lockCh
slices.SortFunc(devcontainers, func(a, b codersdk.WorkspaceAgentDevcontainer) int {
if cmp := strings.Compare(a.WorkspaceFolder, b.WorkspaceFolder); cmp != 0 {
@@ -499,34 +576,67 @@ func (api *API) handleListDevcontainers(w http.ResponseWriter, r *http.Request)
// markDevcontainerDirty finds the devcontainer with the given config file path
// and marks it as dirty. It acquires the lock before modifying the state.
func (api *API) markDevcontainerDirty(configPath string, modifiedAt time.Time) {
+ ok := api.doLocked(func() {
+ // Record the timestamp of when this configuration file was modified.
+ api.configFileModifiedTimes[configPath] = modifiedAt
+
+ for i := range api.knownDevcontainers {
+ if api.knownDevcontainers[i].ConfigPath != configPath {
+ continue
+ }
+
+ // TODO(mafredri): Simplistic mark for now, we should check if the
+ // container is running and if the config file was modified after
+ // the container was created.
+ if !api.knownDevcontainers[i].Dirty {
+ api.logger.Info(api.ctx, "marking devcontainer as dirty",
+ slog.F("file", configPath),
+ slog.F("name", api.knownDevcontainers[i].Name),
+ slog.F("workspace_folder", api.knownDevcontainers[i].WorkspaceFolder),
+ slog.F("modified_at", modifiedAt),
+ )
+ api.knownDevcontainers[i].Dirty = true
+ if api.knownDevcontainers[i].Container != nil {
+ api.knownDevcontainers[i].Container.DevcontainerDirty = true
+ }
+ }
+ }
+ })
+ if !ok {
+ api.logger.Debug(api.ctx, "mark devcontainer dirty failed", slog.F("file", configPath))
+ }
+}
+
+func (api *API) doLockedHandler(w http.ResponseWriter, r *http.Request, f func()) bool {
select {
+ case <-r.Context().Done():
+ httpapi.Write(r.Context(), w, http.StatusRequestTimeout, codersdk.Response{
+ Message: "Request canceled",
+ Detail: "Request was canceled before we could process it.",
+ })
+ return false
case <-api.ctx.Done():
- return
+ httpapi.Write(r.Context(), w, http.StatusServiceUnavailable, codersdk.Response{
+ Message: "API closed",
+ Detail: "The API is closed and cannot process requests.",
+ })
+ return false
case api.lockCh <- struct{}{}:
defer func() { <-api.lockCh }()
}
+ f()
+ return true
+}
- // Record the timestamp of when this configuration file was modified.
- api.configFileModifiedTimes[configPath] = modifiedAt
-
- for i := range api.knownDevcontainers {
- if api.knownDevcontainers[i].ConfigPath != configPath {
- continue
- }
-
- // TODO(mafredri): Simplistic mark for now, we should check if the
- // container is running and if the config file was modified after
- // the container was created.
- if !api.knownDevcontainers[i].Dirty {
- api.logger.Info(api.ctx, "marking devcontainer as dirty",
- slog.F("file", configPath),
- slog.F("name", api.knownDevcontainers[i].Name),
- slog.F("workspace_folder", api.knownDevcontainers[i].WorkspaceFolder),
- slog.F("modified_at", modifiedAt),
- )
- api.knownDevcontainers[i].Dirty = true
- }
+func (api *API) doLocked(f func()) bool {
+ select {
+ case <-api.ctx.Done():
+ return false
+ case api.lockCh <- struct{}{}:
+ defer func() { <-api.lockCh }()
}
+ f()
+ return true
}
func (api *API) Close() error {
diff --git a/agent/agentcontainers/api_internal_test.go b/agent/agentcontainers/api_internal_test.go
deleted file mode 100644
index 331c41e8df10b..0000000000000
--- a/agent/agentcontainers/api_internal_test.go
+++ /dev/null
@@ -1,163 +0,0 @@
-package agentcontainers
-
-import (
- "math/rand"
- "strings"
- "testing"
- "time"
-
- "github.com/google/uuid"
- "github.com/stretchr/testify/assert"
- "github.com/stretchr/testify/require"
- "go.uber.org/mock/gomock"
-
- "cdr.dev/slog"
- "cdr.dev/slog/sloggers/slogtest"
- "github.com/coder/coder/v2/agent/agentcontainers/acmock"
- "github.com/coder/coder/v2/codersdk"
- "github.com/coder/coder/v2/testutil"
- "github.com/coder/quartz"
-)
-
-func TestAPI(t *testing.T) {
- t.Parallel()
-
- // List tests the API.getContainers method using a mock
- // implementation. It specifically tests caching behavior.
- t.Run("List", func(t *testing.T) {
- t.Parallel()
-
- fakeCt := fakeContainer(t)
- fakeCt2 := fakeContainer(t)
- makeResponse := func(cts ...codersdk.WorkspaceAgentContainer) codersdk.WorkspaceAgentListContainersResponse {
- return codersdk.WorkspaceAgentListContainersResponse{Containers: cts}
- }
-
- // Each test case is called multiple times to ensure idempotency
- for _, tc := range []struct {
- name string
- // data to be stored in the handler
- cacheData codersdk.WorkspaceAgentListContainersResponse
- // duration of cache
- cacheDur time.Duration
- // relative age of the cached data
- cacheAge time.Duration
- // function to set up expectations for the mock
- setupMock func(*acmock.MockLister)
- // expected result
- expected codersdk.WorkspaceAgentListContainersResponse
- // expected error
- expectedErr string
- }{
- {
- name: "no cache",
- setupMock: func(mcl *acmock.MockLister) {
- mcl.EXPECT().List(gomock.Any()).Return(makeResponse(fakeCt), nil).AnyTimes()
- },
- expected: makeResponse(fakeCt),
- },
- {
- name: "no data",
- cacheData: makeResponse(),
- cacheAge: 2 * time.Second,
- cacheDur: time.Second,
- setupMock: func(mcl *acmock.MockLister) {
- mcl.EXPECT().List(gomock.Any()).Return(makeResponse(fakeCt), nil).AnyTimes()
- },
- expected: makeResponse(fakeCt),
- },
- {
- name: "cached data",
- cacheAge: time.Second,
- cacheData: makeResponse(fakeCt),
- cacheDur: 2 * time.Second,
- expected: makeResponse(fakeCt),
- },
- {
- name: "lister error",
- setupMock: func(mcl *acmock.MockLister) {
- mcl.EXPECT().List(gomock.Any()).Return(makeResponse(), assert.AnError).AnyTimes()
- },
- expectedErr: assert.AnError.Error(),
- },
- {
- name: "stale cache",
- cacheAge: 2 * time.Second,
- cacheData: makeResponse(fakeCt),
- cacheDur: time.Second,
- setupMock: func(mcl *acmock.MockLister) {
- mcl.EXPECT().List(gomock.Any()).Return(makeResponse(fakeCt2), nil).AnyTimes()
- },
- expected: makeResponse(fakeCt2),
- },
- } {
- tc := tc
- t.Run(tc.name, func(t *testing.T) {
- t.Parallel()
- var (
- ctx = testutil.Context(t, testutil.WaitShort)
- clk = quartz.NewMock(t)
- ctrl = gomock.NewController(t)
- mockLister = acmock.NewMockLister(ctrl)
- now = time.Now().UTC()
- logger = slogtest.Make(t, nil).Leveled(slog.LevelDebug)
- api = NewAPI(logger, WithLister(mockLister))
- )
- defer api.Close()
-
- api.cacheDuration = tc.cacheDur
- api.clock = clk
- api.containers = tc.cacheData
- if tc.cacheAge != 0 {
- api.mtime = now.Add(-tc.cacheAge)
- }
- if tc.setupMock != nil {
- tc.setupMock(mockLister)
- }
-
- clk.Set(now).MustWait(ctx)
-
- // Repeat the test to ensure idempotency
- for i := 0; i < 2; i++ {
- actual, err := api.getContainers(ctx)
- if tc.expectedErr != "" {
- require.Empty(t, actual, "expected no data (attempt %d)", i)
- require.ErrorContains(t, err, tc.expectedErr, "expected error (attempt %d)", i)
- } else {
- require.NoError(t, err, "expected no error (attempt %d)", i)
- require.Equal(t, tc.expected, actual, "expected containers to be equal (attempt %d)", i)
- }
- }
- })
- }
- })
-}
-
-func fakeContainer(t *testing.T, mut ...func(*codersdk.WorkspaceAgentContainer)) codersdk.WorkspaceAgentContainer {
- t.Helper()
- ct := codersdk.WorkspaceAgentContainer{
- CreatedAt: time.Now().UTC(),
- ID: uuid.New().String(),
- FriendlyName: testutil.GetRandomName(t),
- Image: testutil.GetRandomName(t) + ":" + strings.Split(uuid.New().String(), "-")[0],
- Labels: map[string]string{
- testutil.GetRandomName(t): testutil.GetRandomName(t),
- },
- Running: true,
- Ports: []codersdk.WorkspaceAgentContainerPort{
- {
- Network: "tcp",
- Port: testutil.RandomPortNoListen(t),
- HostPort: testutil.RandomPortNoListen(t),
- //nolint:gosec // this is a test
- HostIP: []string{"127.0.0.1", "[::1]", "localhost", "0.0.0.0", "[::]", testutil.GetRandomName(t)}[rand.Intn(6)],
- },
- },
- Status: testutil.MustRandString(t, 10),
- Volumes: map[string]string{testutil.GetRandomName(t): testutil.GetRandomName(t)},
- }
- for _, m := range mut {
- m(&ct)
- }
- return ct
-}
diff --git a/agent/agentcontainers/api_test.go b/agent/agentcontainers/api_test.go
index 45044b4e43e2e..2e173b7d5a6b4 100644
--- a/agent/agentcontainers/api_test.go
+++ b/agent/agentcontainers/api_test.go
@@ -3,8 +3,10 @@ package agentcontainers_test
import (
"context"
"encoding/json"
+ "math/rand"
"net/http"
"net/http/httptest"
+ "strings"
"testing"
"time"
@@ -13,11 +15,13 @@ import (
"github.com/google/uuid"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
+ "go.uber.org/mock/gomock"
"golang.org/x/xerrors"
"cdr.dev/slog"
"cdr.dev/slog/sloggers/slogtest"
"github.com/coder/coder/v2/agent/agentcontainers"
+ "github.com/coder/coder/v2/agent/agentcontainers/acmock"
"github.com/coder/coder/v2/agent/agentcontainers/watcher"
"github.com/coder/coder/v2/codersdk"
"github.com/coder/coder/v2/testutil"
@@ -146,6 +150,136 @@ func (w *fakeWatcher) sendEventWaitNextCalled(ctx context.Context, event fsnotif
func TestAPI(t *testing.T) {
t.Parallel()
+ // List tests the API.getContainers method using a mock
+ // implementation. It specifically tests caching behavior.
+ t.Run("List", func(t *testing.T) {
+ t.Parallel()
+
+ fakeCt := fakeContainer(t)
+ fakeCt2 := fakeContainer(t)
+ makeResponse := func(cts ...codersdk.WorkspaceAgentContainer) codersdk.WorkspaceAgentListContainersResponse {
+ return codersdk.WorkspaceAgentListContainersResponse{Containers: cts}
+ }
+
+ // Each test case is called multiple times to ensure idempotency
+ for _, tc := range []struct {
+ name string
+ // data to be stored in the handler
+ cacheData codersdk.WorkspaceAgentListContainersResponse
+ // duration of cache
+ cacheDur time.Duration
+ // relative age of the cached data
+ cacheAge time.Duration
+ // function to set up expectations for the mock
+ setupMock func(mcl *acmock.MockLister, preReq *gomock.Call)
+ // expected result
+ expected codersdk.WorkspaceAgentListContainersResponse
+ // expected error
+ expectedErr string
+ }{
+ {
+ name: "no cache",
+ setupMock: func(mcl *acmock.MockLister, preReq *gomock.Call) {
+ mcl.EXPECT().List(gomock.Any()).Return(makeResponse(fakeCt), nil).After(preReq).AnyTimes()
+ },
+ expected: makeResponse(fakeCt),
+ },
+ {
+ name: "no data",
+ cacheData: makeResponse(),
+ cacheAge: 2 * time.Second,
+ cacheDur: time.Second,
+ setupMock: func(mcl *acmock.MockLister, preReq *gomock.Call) {
+ mcl.EXPECT().List(gomock.Any()).Return(makeResponse(fakeCt), nil).After(preReq).AnyTimes()
+ },
+ expected: makeResponse(fakeCt),
+ },
+ {
+ name: "cached data",
+ cacheAge: time.Second,
+ cacheData: makeResponse(fakeCt),
+ cacheDur: 2 * time.Second,
+ expected: makeResponse(fakeCt),
+ },
+ {
+ name: "lister error",
+ setupMock: func(mcl *acmock.MockLister, preReq *gomock.Call) {
+ mcl.EXPECT().List(gomock.Any()).Return(makeResponse(), assert.AnError).After(preReq).AnyTimes()
+ },
+ expectedErr: assert.AnError.Error(),
+ },
+ {
+ name: "stale cache",
+ cacheAge: 2 * time.Second,
+ cacheData: makeResponse(fakeCt),
+ cacheDur: time.Second,
+ setupMock: func(mcl *acmock.MockLister, preReq *gomock.Call) {
+ mcl.EXPECT().List(gomock.Any()).Return(makeResponse(fakeCt2), nil).After(preReq).AnyTimes()
+ },
+ expected: makeResponse(fakeCt2),
+ },
+ } {
+ tc := tc
+ t.Run(tc.name, func(t *testing.T) {
+ t.Parallel()
+ var (
+ ctx = testutil.Context(t, testutil.WaitShort)
+ clk = quartz.NewMock(t)
+ ctrl = gomock.NewController(t)
+ mockLister = acmock.NewMockLister(ctrl)
+ now = time.Now().UTC()
+ logger = slogtest.Make(t, nil).Leveled(slog.LevelDebug)
+ r = chi.NewRouter()
+ api = agentcontainers.NewAPI(logger,
+ agentcontainers.WithCacheDuration(tc.cacheDur),
+ agentcontainers.WithClock(clk),
+ agentcontainers.WithLister(mockLister),
+ )
+ )
+ defer api.Close()
+
+ r.Mount("/", api.Routes())
+
+ preReq := mockLister.EXPECT().List(gomock.Any()).Return(tc.cacheData, nil).Times(1)
+ if tc.setupMock != nil {
+ tc.setupMock(mockLister, preReq)
+ }
+
+ if tc.cacheAge != 0 {
+ clk.Set(now.Add(-tc.cacheAge)).MustWait(ctx)
+ } else {
+ clk.Set(now).MustWait(ctx)
+ }
+
+ // Prime the cache with the initial data.
+ req := httptest.NewRequest(http.MethodGet, "/", nil)
+ rec := httptest.NewRecorder()
+ r.ServeHTTP(rec, req)
+
+ clk.Set(now).MustWait(ctx)
+
+ // Repeat the test to ensure idempotency
+ for i := 0; i < 2; i++ {
+ req = httptest.NewRequest(http.MethodGet, "/", nil)
+ rec = httptest.NewRecorder()
+ r.ServeHTTP(rec, req)
+
+ if tc.expectedErr != "" {
+ got := &codersdk.Error{}
+ err := json.NewDecoder(rec.Body).Decode(got)
+ require.NoError(t, err, "unmarshal response failed")
+ require.ErrorContains(t, got, tc.expectedErr, "expected error (attempt %d)", i)
+ } else {
+ var got codersdk.WorkspaceAgentListContainersResponse
+ err := json.NewDecoder(rec.Body).Decode(&got)
+ require.NoError(t, err, "unmarshal response failed")
+ require.Equal(t, tc.expected, got, "expected containers to be equal (attempt %d)", i)
+ }
+ }
+ })
+ }
+ })
+
t.Run("Recreate", func(t *testing.T) {
t.Parallel()
@@ -173,7 +307,7 @@ func TestAPI(t *testing.T) {
wantBody string
}{
{
- name: "Missing ID",
+ name: "Missing container ID",
containerID: "",
lister: &fakeLister{},
devcontainerCLI: &fakeDevcontainerCLI{},
@@ -260,7 +394,7 @@ func TestAPI(t *testing.T) {
r.Mount("/", api.Routes())
// Simulate HTTP request to the recreate endpoint.
- req := httptest.NewRequest(http.MethodPost, "/"+tt.containerID+"/recreate", nil)
+ req := httptest.NewRequest(http.MethodPost, "/devcontainers/container/"+tt.containerID+"/recreate", nil)
rec := httptest.NewRecorder()
r.ServeHTTP(rec, req)
@@ -563,8 +697,17 @@ func TestAPI(t *testing.T) {
agentcontainers.WithWatcher(watcher.NewNoop()),
}
+ // Generate matching scripts for the known devcontainers
+ // (required to extract log source ID).
+ var scripts []codersdk.WorkspaceAgentScript
+ for i := range tt.knownDevcontainers {
+ scripts = append(scripts, codersdk.WorkspaceAgentScript{
+ ID: tt.knownDevcontainers[i].ID,
+ LogSourceID: uuid.New(),
+ })
+ }
if len(tt.knownDevcontainers) > 0 {
- apiOptions = append(apiOptions, agentcontainers.WithDevcontainers(tt.knownDevcontainers))
+ apiOptions = append(apiOptions, agentcontainers.WithDevcontainers(tt.knownDevcontainers, scripts))
}
api := agentcontainers.NewAPI(logger, apiOptions...)
@@ -651,6 +794,9 @@ func TestAPI(t *testing.T) {
require.NoError(t, err)
require.Len(t, response.Devcontainers, 1)
assert.False(t, response.Devcontainers[0].Dirty,
+ "devcontainer should not be marked as dirty initially")
+ require.NotNil(t, response.Devcontainers[0].Container, "container should not be nil")
+ assert.False(t, response.Devcontainers[0].Container.DevcontainerDirty,
"container should not be marked as dirty initially")
// Verify the watcher is watching the config file.
@@ -680,6 +826,9 @@ func TestAPI(t *testing.T) {
require.Len(t, response.Devcontainers, 1)
assert.True(t, response.Devcontainers[0].Dirty,
"container should be marked as dirty after config file was modified")
+ require.NotNil(t, response.Devcontainers[0].Container, "container should not be nil")
+ assert.True(t, response.Devcontainers[0].Container.DevcontainerDirty,
+ "container should be marked as dirty after config file was modified")
mClock.Advance(time.Minute).MustWait(ctx)
@@ -698,7 +847,10 @@ func TestAPI(t *testing.T) {
require.NoError(t, err)
require.Len(t, response.Devcontainers, 1)
assert.False(t, response.Devcontainers[0].Dirty,
- "dirty flag should be cleared after container recreation")
+ "dirty flag should be cleared on the devcontainer after container recreation")
+ require.NotNil(t, response.Devcontainers[0].Container, "container should not be nil")
+ assert.False(t, response.Devcontainers[0].Container.DevcontainerDirty,
+ "dirty flag should be cleared on the container after container recreation")
})
}
@@ -716,3 +868,32 @@ func mustFindDevcontainerByPath(t *testing.T, devcontainers []codersdk.Workspace
require.Failf(t, "no devcontainer found with workspace folder %q", path)
return codersdk.WorkspaceAgentDevcontainer{} // Unreachable, but required for compilation
}
+
+func fakeContainer(t *testing.T, mut ...func(*codersdk.WorkspaceAgentContainer)) codersdk.WorkspaceAgentContainer {
+ t.Helper()
+ ct := codersdk.WorkspaceAgentContainer{
+ CreatedAt: time.Now().UTC(),
+ ID: uuid.New().String(),
+ FriendlyName: testutil.GetRandomName(t),
+ Image: testutil.GetRandomName(t) + ":" + strings.Split(uuid.New().String(), "-")[0],
+ Labels: map[string]string{
+ testutil.GetRandomName(t): testutil.GetRandomName(t),
+ },
+ Running: true,
+ Ports: []codersdk.WorkspaceAgentContainerPort{
+ {
+ Network: "tcp",
+ Port: testutil.RandomPortNoListen(t),
+ HostPort: testutil.RandomPortNoListen(t),
+ //nolint:gosec // this is a test
+ HostIP: []string{"127.0.0.1", "[::1]", "localhost", "0.0.0.0", "[::]", testutil.GetRandomName(t)}[rand.Intn(6)],
+ },
+ },
+ Status: testutil.MustRandString(t, 10),
+ Volumes: map[string]string{testutil.GetRandomName(t): testutil.GetRandomName(t)},
+ }
+ for _, m := range mut {
+ m(&ct)
+ }
+ return ct
+}
diff --git a/agent/agentcontainers/devcontainer.go b/agent/agentcontainers/devcontainer.go
index e04c308934a2c..09d4837d4b27a 100644
--- a/agent/agentcontainers/devcontainer.go
+++ b/agent/agentcontainers/devcontainer.go
@@ -22,7 +22,8 @@ const (
const devcontainerUpScriptTemplate = `
if ! which devcontainer > /dev/null 2>&1; then
- echo "ERROR: Unable to start devcontainer, @devcontainers/cli is not installed."
+ echo "ERROR: Unable to start devcontainer, @devcontainers/cli is not installed or not found in \$PATH." 1>&2
+ echo "Please install @devcontainers/cli by running \"npm install -g @devcontainers/cli\" or by using the \"devcontainers-cli\" Coder module." 1>&2
exit 1
fi
devcontainer up %s
@@ -65,7 +66,9 @@ func devcontainerStartupScript(dc codersdk.WorkspaceAgentDevcontainer, script co
args = append(args, fmt.Sprintf("--config %q", dc.ConfigPath))
}
cmd := fmt.Sprintf(devcontainerUpScriptTemplate, strings.Join(args, " "))
- script.Script = cmd
+ // Force the script to run in /bin/sh, since some shells (e.g. fish)
+ // don't support the script.
+ script.Script = fmt.Sprintf("/bin/sh -c '%s'", cmd)
// Disable RunOnStart, scripts have this set so that when devcontainers
// have not been enabled, a warning will be surfaced in the agent logs.
script.RunOnStart = false
diff --git a/agent/agentcontainers/devcontainercli.go b/agent/agentcontainers/devcontainercli.go
index d6060f862cb40..7e3122b182fdb 100644
--- a/agent/agentcontainers/devcontainercli.go
+++ b/agent/agentcontainers/devcontainercli.go
@@ -31,8 +31,18 @@ func WithRemoveExistingContainer() DevcontainerCLIUpOptions {
}
}
+// WithOutput sets stdout and stderr writers for Up command logs.
+func WithOutput(stdout, stderr io.Writer) DevcontainerCLIUpOptions {
+ return func(o *devcontainerCLIUpConfig) {
+ o.stdout = stdout
+ o.stderr = stderr
+ }
+}
+
type devcontainerCLIUpConfig struct {
removeExistingContainer bool
+ stdout io.Writer
+ stderr io.Writer
}
func applyDevcontainerCLIUpOptions(opts []DevcontainerCLIUpOptions) devcontainerCLIUpConfig {
@@ -78,18 +88,28 @@ func (d *devcontainerCLI) Up(ctx context.Context, workspaceFolder, configPath st
}
cmd := d.execer.CommandContext(ctx, "devcontainer", args...)
- var stdout bytes.Buffer
- cmd.Stdout = io.MultiWriter(&stdout, &devcontainerCLILogWriter{ctx: ctx, logger: logger.With(slog.F("stdout", true))})
- cmd.Stderr = &devcontainerCLILogWriter{ctx: ctx, logger: logger.With(slog.F("stderr", true))}
+ // Capture stdout for parsing and stream logs for both default and provided writers.
+ var stdoutBuf bytes.Buffer
+ stdoutWriters := []io.Writer{&stdoutBuf, &devcontainerCLILogWriter{ctx: ctx, logger: logger.With(slog.F("stdout", true))}}
+ if conf.stdout != nil {
+ stdoutWriters = append(stdoutWriters, conf.stdout)
+ }
+ cmd.Stdout = io.MultiWriter(stdoutWriters...)
+ // Stream stderr logs and provided writer if any.
+ stderrWriters := []io.Writer{&devcontainerCLILogWriter{ctx: ctx, logger: logger.With(slog.F("stderr", true))}}
+ if conf.stderr != nil {
+ stderrWriters = append(stderrWriters, conf.stderr)
+ }
+ cmd.Stderr = io.MultiWriter(stderrWriters...)
if err := cmd.Run(); err != nil {
- if _, err2 := parseDevcontainerCLILastLine(ctx, logger, stdout.Bytes()); err2 != nil {
+ if _, err2 := parseDevcontainerCLILastLine(ctx, logger, stdoutBuf.Bytes()); err2 != nil {
err = errors.Join(err, err2)
}
return "", err
}
- result, err := parseDevcontainerCLILastLine(ctx, logger, stdout.Bytes())
+ result, err := parseDevcontainerCLILastLine(ctx, logger, stdoutBuf.Bytes())
if err != nil {
return "", err
}
diff --git a/agent/agentcontainers/devcontainercli_test.go b/agent/agentcontainers/devcontainercli_test.go
index d768b997cc1e1..cdba0211ab94e 100644
--- a/agent/agentcontainers/devcontainercli_test.go
+++ b/agent/agentcontainers/devcontainercli_test.go
@@ -128,6 +128,45 @@ func TestDevcontainerCLI_ArgsAndParsing(t *testing.T) {
})
}
+// TestDevcontainerCLI_WithOutput tests that WithOutput captures CLI
+// logs to provided writers.
+func TestDevcontainerCLI_WithOutput(t *testing.T) {
+ t.Parallel()
+
+ // Prepare test executable and logger.
+ testExePath, err := os.Executable()
+ require.NoError(t, err, "get test executable path")
+ logger := slogtest.Make(t, &slogtest.Options{IgnoreErrors: true}).Leveled(slog.LevelDebug)
+ ctx := testutil.Context(t, testutil.WaitMedium)
+
+ // Buffers to capture stdout and stderr.
+ outBuf := &bytes.Buffer{}
+ errBuf := &bytes.Buffer{}
+
+ // Simulate CLI execution with a standard up.log file.
+ wantArgs := "up --log-format json --workspace-folder /test/workspace"
+ testExecer := &testDevcontainerExecer{
+ testExePath: testExePath,
+ wantArgs: wantArgs,
+ wantError: false,
+ logFile: filepath.Join("testdata", "devcontainercli", "parse", "up.log"),
+ }
+ dccli := agentcontainers.NewDevcontainerCLI(logger, testExecer)
+
+ // Call Up with WithOutput to capture CLI logs.
+ containerID, err := dccli.Up(ctx, "/test/workspace", "", agentcontainers.WithOutput(outBuf, errBuf))
+ require.NoError(t, err, "Up should succeed")
+ require.NotEmpty(t, containerID, "expected non-empty container ID")
+
+ // Read expected log content.
+ expLog, err := os.ReadFile(filepath.Join("testdata", "devcontainercli", "parse", "up.log"))
+ require.NoError(t, err, "reading expected log file")
+
+ // Verify stdout buffer contains the CLI logs and stderr is empty.
+ assert.Equal(t, string(expLog), outBuf.String(), "stdout buffer should match CLI logs")
+ assert.Empty(t, errBuf.String(), "stderr buffer should be empty on success")
+}
+
// testDevcontainerExecer implements the agentexec.Execer interface for testing.
type testDevcontainerExecer struct {
testExePath string
diff --git a/agent/agenttest/client.go b/agent/agenttest/client.go
index 73fb40e826519..24658c44d6e18 100644
--- a/agent/agenttest/client.go
+++ b/agent/agenttest/client.go
@@ -60,6 +60,7 @@ func NewClient(t testing.TB,
err = agentproto.DRPCRegisterAgent(mux, fakeAAPI)
require.NoError(t, err)
server := drpcserver.NewWithOptions(mux, drpcserver.Options{
+ Manager: drpcsdk.DefaultDRPCOptions(nil),
Log: func(err error) {
if xerrors.Is(err, io.EOF) {
return
diff --git a/agent/api.go b/agent/api.go
index f09d39b172bd5..2e15530adc608 100644
--- a/agent/api.go
+++ b/agent/api.go
@@ -7,6 +7,8 @@ import (
"github.com/go-chi/chi/v5"
+ "github.com/google/uuid"
+
"github.com/coder/coder/v2/agent/agentcontainers"
"github.com/coder/coder/v2/coderd/httpapi"
"github.com/coder/coder/v2/codersdk"
@@ -40,12 +42,15 @@ func (a *agent) apiHandler() (http.Handler, func() error) {
if a.experimentalDevcontainersEnabled {
containerAPIOpts := []agentcontainers.Option{
agentcontainers.WithExecer(a.execer),
+ agentcontainers.WithScriptLogger(func(logSourceID uuid.UUID) agentcontainers.ScriptLogger {
+ return a.logSender.GetScriptLogger(logSourceID)
+ }),
}
manifest := a.manifest.Load()
if manifest != nil && len(manifest.Devcontainers) > 0 {
containerAPIOpts = append(
containerAPIOpts,
- agentcontainers.WithDevcontainers(manifest.Devcontainers),
+ agentcontainers.WithDevcontainers(manifest.Devcontainers, manifest.Scripts),
)
}
diff --git a/cli/agent.go b/cli/agent.go
index 5d6cdbd66b4e0..deca447664337 100644
--- a/cli/agent.go
+++ b/cli/agent.go
@@ -25,6 +25,8 @@ import (
"cdr.dev/slog/sloggers/sloghuman"
"cdr.dev/slog/sloggers/slogjson"
"cdr.dev/slog/sloggers/slogstackdriver"
+ "github.com/coder/serpent"
+
"github.com/coder/coder/v2/agent"
"github.com/coder/coder/v2/agent/agentexec"
"github.com/coder/coder/v2/agent/agentssh"
@@ -33,7 +35,6 @@ import (
"github.com/coder/coder/v2/cli/clilog"
"github.com/coder/coder/v2/codersdk"
"github.com/coder/coder/v2/codersdk/agentsdk"
- "github.com/coder/serpent"
)
func (r *RootCmd) workspaceAgent() *serpent.Command {
@@ -62,8 +63,10 @@ func (r *RootCmd) workspaceAgent() *serpent.Command {
// This command isn't useful to manually execute.
Hidden: true,
Handler: func(inv *serpent.Invocation) error {
- ctx, cancel := context.WithCancel(inv.Context())
- defer cancel()
+ ctx, cancel := context.WithCancelCause(inv.Context())
+ defer func() {
+ cancel(xerrors.New("agent exited"))
+ }()
var (
ignorePorts = map[int]string{}
@@ -280,7 +283,6 @@ func (r *RootCmd) workspaceAgent() *serpent.Command {
return xerrors.Errorf("add executable to $PATH: %w", err)
}
- prometheusRegistry := prometheus.NewRegistry()
subsystemsRaw := inv.Environ.Get(agent.EnvAgentSubsystem)
subsystems := []codersdk.AgentSubsystem{}
for _, s := range strings.Split(subsystemsRaw, ",") {
@@ -324,45 +326,69 @@ func (r *RootCmd) workspaceAgent() *serpent.Command {
logger.Info(ctx, "agent devcontainer detection not enabled")
}
- agnt := agent.New(agent.Options{
- Client: client,
- Logger: logger,
- LogDir: logDir,
- ScriptDataDir: scriptDataDir,
- // #nosec G115 - Safe conversion as tailnet listen port is within uint16 range (0-65535)
- TailnetListenPort: uint16(tailnetListenPort),
- ExchangeToken: func(ctx context.Context) (string, error) {
- if exchangeToken == nil {
- return client.SDK.SessionToken(), nil
- }
- resp, err := exchangeToken(ctx)
- if err != nil {
- return "", err
- }
- client.SetSessionToken(resp.SessionToken)
- return resp.SessionToken, nil
- },
- EnvironmentVariables: environmentVariables,
- IgnorePorts: ignorePorts,
- SSHMaxTimeout: sshMaxTimeout,
- Subsystems: subsystems,
-
- PrometheusRegistry: prometheusRegistry,
- BlockFileTransfer: blockFileTransfer,
- Execer: execer,
-
- ExperimentalDevcontainersEnabled: experimentalDevcontainersEnabled,
- })
-
- promHandler := agent.PrometheusMetricsHandler(prometheusRegistry, logger)
- prometheusSrvClose := ServeHandler(ctx, logger, promHandler, prometheusAddress, "prometheus")
- defer prometheusSrvClose()
-
- debugSrvClose := ServeHandler(ctx, logger, agnt.HTTPDebug(), debugAddress, "debug")
- defer debugSrvClose()
-
- <-ctx.Done()
- return agnt.Close()
+ reinitEvents := agentsdk.WaitForReinitLoop(ctx, logger, client)
+
+ var (
+ lastErr error
+ mustExit bool
+ )
+ for {
+ prometheusRegistry := prometheus.NewRegistry()
+
+ agnt := agent.New(agent.Options{
+ Client: client,
+ Logger: logger,
+ LogDir: logDir,
+ ScriptDataDir: scriptDataDir,
+ // #nosec G115 - Safe conversion as tailnet listen port is within uint16 range (0-65535)
+ TailnetListenPort: uint16(tailnetListenPort),
+ ExchangeToken: func(ctx context.Context) (string, error) {
+ if exchangeToken == nil {
+ return client.SDK.SessionToken(), nil
+ }
+ resp, err := exchangeToken(ctx)
+ if err != nil {
+ return "", err
+ }
+ client.SetSessionToken(resp.SessionToken)
+ return resp.SessionToken, nil
+ },
+ EnvironmentVariables: environmentVariables,
+ IgnorePorts: ignorePorts,
+ SSHMaxTimeout: sshMaxTimeout,
+ Subsystems: subsystems,
+
+ PrometheusRegistry: prometheusRegistry,
+ BlockFileTransfer: blockFileTransfer,
+ Execer: execer,
+ ExperimentalDevcontainersEnabled: experimentalDevcontainersEnabled,
+ })
+
+ promHandler := agent.PrometheusMetricsHandler(prometheusRegistry, logger)
+ prometheusSrvClose := ServeHandler(ctx, logger, promHandler, prometheusAddress, "prometheus")
+
+ debugSrvClose := ServeHandler(ctx, logger, agnt.HTTPDebug(), debugAddress, "debug")
+
+ select {
+ case <-ctx.Done():
+ logger.Info(ctx, "agent shutting down", slog.Error(context.Cause(ctx)))
+ mustExit = true
+ case event := <-reinitEvents:
+ logger.Info(ctx, "agent received instruction to reinitialize",
+ slog.F("workspace_id", event.WorkspaceID), slog.F("reason", event.Reason))
+ }
+
+ lastErr = agnt.Close()
+ debugSrvClose()
+ prometheusSrvClose()
+
+ if mustExit {
+ break
+ }
+
+ logger.Info(ctx, "agent reinitializing")
+ }
+ return lastErr
},
}
diff --git a/cli/logout_test.go b/cli/logout_test.go
index 62c93c2d6f81b..9e7e95c68f211 100644
--- a/cli/logout_test.go
+++ b/cli/logout_test.go
@@ -1,6 +1,7 @@
package cli_test
import (
+ "fmt"
"os"
"runtime"
"testing"
@@ -89,10 +90,14 @@ func TestLogout(t *testing.T) {
logout.Stdin = pty.Input()
logout.Stdout = pty.Output()
+ executable, err := os.Executable()
+ require.NoError(t, err)
+ require.NotEqual(t, "", executable)
+
go func() {
defer close(logoutChan)
- err := logout.Run()
- assert.ErrorContains(t, err, "You are not logged in. Try logging in using 'coder login '.")
+ err = logout.Run()
+ assert.Contains(t, err.Error(), fmt.Sprintf("Try logging in using '%s login '.", executable))
}()
<-logoutChan
diff --git a/cli/root.go b/cli/root.go
index 1dba212316c74..8fec1a945b0b3 100644
--- a/cli/root.go
+++ b/cli/root.go
@@ -72,7 +72,7 @@ const (
varDisableDirect = "disable-direct-connections"
varDisableNetworkTelemetry = "disable-network-telemetry"
- notLoggedInMessage = "You are not logged in. Try logging in using 'coder login '."
+ notLoggedInMessage = "You are not logged in. Try logging in using '%s login '."
envNoVersionCheck = "CODER_NO_VERSION_WARNING"
envNoFeatureWarning = "CODER_NO_FEATURE_WARNING"
@@ -534,7 +534,11 @@ func (r *RootCmd) InitClient(client *codersdk.Client) serpent.MiddlewareFunc {
rawURL, err := conf.URL().Read()
// If the configuration files are absent, the user is logged out
if os.IsNotExist(err) {
- return xerrors.New(notLoggedInMessage)
+ binPath, err := os.Executable()
+ if err != nil {
+ binPath = "coder"
+ }
+ return xerrors.Errorf(notLoggedInMessage, binPath)
}
if err != nil {
return err
diff --git a/cli/server.go b/cli/server.go
index d32ed51c06007..c5532e07e7a81 100644
--- a/cli/server.go
+++ b/cli/server.go
@@ -928,6 +928,37 @@ func (r *RootCmd) Server(newAPI func(context.Context, *coderd.Options) (*coderd.
options.StatsBatcher = batcher
defer closeBatcher()
+ // Manage notifications.
+ var (
+ notificationsCfg = options.DeploymentValues.Notifications
+ notificationsManager *notifications.Manager
+ )
+
+ metrics := notifications.NewMetrics(options.PrometheusRegistry)
+ helpers := templateHelpers(options)
+
+ // The enqueuer is responsible for enqueueing notifications to the given store.
+ enqueuer, err := notifications.NewStoreEnqueuer(notificationsCfg, options.Database, helpers, logger.Named("notifications.enqueuer"), quartz.NewReal())
+ if err != nil {
+ return xerrors.Errorf("failed to instantiate notification store enqueuer: %w", err)
+ }
+ options.NotificationsEnqueuer = enqueuer
+
+ // The notification manager is responsible for:
+ // - creating notifiers and managing their lifecycles (notifiers are responsible for dequeueing/sending notifications)
+ // - keeping the store updated with status updates
+ notificationsManager, err = notifications.NewManager(notificationsCfg, options.Database, options.Pubsub, helpers, metrics, logger.Named("notifications.manager"))
+ if err != nil {
+ return xerrors.Errorf("failed to instantiate notification manager: %w", err)
+ }
+
+ // nolint:gocritic // We need to run the manager in a notifier context.
+ notificationsManager.Run(dbauthz.AsNotifier(ctx))
+
+ // Run report generator to distribute periodic reports.
+ notificationReportGenerator := reports.NewReportGenerator(ctx, logger.Named("notifications.report_generator"), options.Database, options.NotificationsEnqueuer, quartz.NewReal())
+ defer notificationReportGenerator.Close()
+
// We use a separate coderAPICloser so the Enterprise API
// can have its own close functions. This is cleaner
// than abstracting the Coder API itself.
@@ -975,37 +1006,6 @@ func (r *RootCmd) Server(newAPI func(context.Context, *coderd.Options) (*coderd.
return xerrors.Errorf("write config url: %w", err)
}
- // Manage notifications.
- var (
- notificationsCfg = options.DeploymentValues.Notifications
- notificationsManager *notifications.Manager
- )
-
- metrics := notifications.NewMetrics(options.PrometheusRegistry)
- helpers := templateHelpers(options)
-
- // The enqueuer is responsible for enqueueing notifications to the given store.
- enqueuer, err := notifications.NewStoreEnqueuer(notificationsCfg, options.Database, helpers, logger.Named("notifications.enqueuer"), quartz.NewReal())
- if err != nil {
- return xerrors.Errorf("failed to instantiate notification store enqueuer: %w", err)
- }
- options.NotificationsEnqueuer = enqueuer
-
- // The notification manager is responsible for:
- // - creating notifiers and managing their lifecycles (notifiers are responsible for dequeueing/sending notifications)
- // - keeping the store updated with status updates
- notificationsManager, err = notifications.NewManager(notificationsCfg, options.Database, options.Pubsub, helpers, metrics, logger.Named("notifications.manager"))
- if err != nil {
- return xerrors.Errorf("failed to instantiate notification manager: %w", err)
- }
-
- // nolint:gocritic // We need to run the manager in a notifier context.
- notificationsManager.Run(dbauthz.AsNotifier(ctx))
-
- // Run report generator to distribute periodic reports.
- notificationReportGenerator := reports.NewReportGenerator(ctx, logger.Named("notifications.report_generator"), options.Database, options.NotificationsEnqueuer, quartz.NewReal())
- defer notificationReportGenerator.Close()
-
// Since errCh only has one buffered slot, all routines
// sending on it must be wrapped in a select/default to
// avoid leaving dangling goroutines waiting for the
diff --git a/cli/ssh.go b/cli/ssh.go
index 7c5bda073f973..5cc81284ca317 100644
--- a/cli/ssh.go
+++ b/cli/ssh.go
@@ -90,14 +90,33 @@ func (r *RootCmd) ssh() *serpent.Command {
wsClient := workspacesdk.New(client)
cmd := &serpent.Command{
Annotations: workspaceCommand,
- Use: "ssh ",
- Short: "Start a shell into a workspace",
+ Use: "ssh [command]",
+ Short: "Start a shell into a workspace or run a command",
+ Long: "This command does not have full parity with the standard SSH command. For users who need the full functionality of SSH, create an ssh configuration with `coder config-ssh`.\n\n" +
+ FormatExamples(
+ Example{
+ Description: "Use `--` to separate and pass flags directly to the command executed via SSH.",
+ Command: "coder ssh -- ls -la",
+ },
+ ),
Middleware: serpent.Chain(
- serpent.RequireNArgs(1),
+ // Require at least one arg for the workspace name
+ func(next serpent.HandlerFunc) serpent.HandlerFunc {
+ return func(i *serpent.Invocation) error {
+ got := len(i.Args)
+ if got < 1 {
+ return xerrors.New("expected the name of a workspace")
+ }
+
+ return next(i)
+ }
+ },
r.InitClient(client),
initAppearance(client, &appearanceConfig),
),
Handler: func(inv *serpent.Invocation) (retErr error) {
+ command := strings.Join(inv.Args[1:], " ")
+
// Before dialing the SSH server over TCP, capture Interrupt signals
// so that if we are interrupted, we have a chance to tear down the
// TCP session cleanly before exiting. If we don't, then the TCP
@@ -547,40 +566,46 @@ func (r *RootCmd) ssh() *serpent.Command {
sshSession.Stdout = inv.Stdout
sshSession.Stderr = inv.Stderr
- err = sshSession.Shell()
- if err != nil {
- return xerrors.Errorf("start shell: %w", err)
- }
+ if command != "" {
+ err := sshSession.Run(command)
+ if err != nil {
+ return xerrors.Errorf("run command: %w", err)
+ }
+ } else {
+ err = sshSession.Shell()
+ if err != nil {
+ return xerrors.Errorf("start shell: %w", err)
+ }
- // Put cancel at the top of the defer stack to initiate
- // shutdown of services.
- defer cancel()
+ // Put cancel at the top of the defer stack to initiate
+ // shutdown of services.
+ defer cancel()
- if validOut {
- // Set initial window size.
- width, height, err := term.GetSize(int(stdoutFile.Fd()))
- if err == nil {
- _ = sshSession.WindowChange(height, width)
+ if validOut {
+ // Set initial window size.
+ width, height, err := term.GetSize(int(stdoutFile.Fd()))
+ if err == nil {
+ _ = sshSession.WindowChange(height, width)
+ }
}
- }
- err = sshSession.Wait()
- conn.SendDisconnectedTelemetry()
- if err != nil {
- if exitErr := (&gossh.ExitError{}); errors.As(err, &exitErr) {
- // Clear the error since it's not useful beyond
- // reporting status.
- return ExitError(exitErr.ExitStatus(), nil)
- }
- // If the connection drops unexpectedly, we get an
- // ExitMissingError but no other error details, so try to at
- // least give the user a better message
- if errors.Is(err, &gossh.ExitMissingError{}) {
- return ExitError(255, xerrors.New("SSH connection ended unexpectedly"))
+ err = sshSession.Wait()
+ conn.SendDisconnectedTelemetry()
+ if err != nil {
+ if exitErr := (&gossh.ExitError{}); errors.As(err, &exitErr) {
+ // Clear the error since it's not useful beyond
+ // reporting status.
+ return ExitError(exitErr.ExitStatus(), nil)
+ }
+ // If the connection drops unexpectedly, we get an
+ // ExitMissingError but no other error details, so try to at
+ // least give the user a better message
+ if errors.Is(err, &gossh.ExitMissingError{}) {
+ return ExitError(255, xerrors.New("SSH connection ended unexpectedly"))
+ }
+ return xerrors.Errorf("session ended: %w", err)
}
- return xerrors.Errorf("session ended: %w", err)
}
-
return nil
},
}
diff --git a/cli/ssh_test.go b/cli/ssh_test.go
index 5fcb6205d5e45..49f83daa0612a 100644
--- a/cli/ssh_test.go
+++ b/cli/ssh_test.go
@@ -2200,6 +2200,127 @@ func TestSSH_CoderConnect(t *testing.T) {
<-cmdDone
})
+
+ t.Run("OneShot", func(t *testing.T) {
+ t.Parallel()
+
+ client, workspace, agentToken := setupWorkspaceForAgent(t)
+ inv, root := clitest.New(t, "ssh", workspace.Name, "echo 'hello world'")
+ clitest.SetupConfig(t, client, root)
+
+ // Capture command output
+ output := new(bytes.Buffer)
+ inv.Stdout = output
+
+ ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong)
+ defer cancel()
+
+ cmdDone := tGo(t, func() {
+ err := inv.WithContext(ctx).Run()
+ assert.NoError(t, err)
+ })
+
+ _ = agenttest.New(t, client.URL, agentToken)
+ coderdtest.AwaitWorkspaceAgents(t, client, workspace.ID)
+
+ <-cmdDone
+
+ // Verify command output
+ assert.Contains(t, output.String(), "hello world")
+ })
+
+ t.Run("OneShotExitCode", func(t *testing.T) {
+ t.Parallel()
+
+ client, workspace, agentToken := setupWorkspaceForAgent(t)
+
+ // Setup agent first to avoid race conditions
+ _ = agenttest.New(t, client.URL, agentToken)
+ coderdtest.AwaitWorkspaceAgents(t, client, workspace.ID)
+
+ ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong)
+ defer cancel()
+
+ // Test successful exit code
+ t.Run("Success", func(t *testing.T) {
+ inv, root := clitest.New(t, "ssh", workspace.Name, "exit 0")
+ clitest.SetupConfig(t, client, root)
+
+ err := inv.WithContext(ctx).Run()
+ assert.NoError(t, err)
+ })
+
+ // Test error exit code
+ t.Run("Error", func(t *testing.T) {
+ inv, root := clitest.New(t, "ssh", workspace.Name, "exit 1")
+ clitest.SetupConfig(t, client, root)
+
+ err := inv.WithContext(ctx).Run()
+ assert.Error(t, err)
+ var exitErr *ssh.ExitError
+ assert.True(t, errors.As(err, &exitErr))
+ assert.Equal(t, 1, exitErr.ExitStatus())
+ })
+ })
+
+ t.Run("OneShotStdio", func(t *testing.T) {
+ t.Parallel()
+ client, workspace, agentToken := setupWorkspaceForAgent(t)
+ _, _ = tGoContext(t, func(ctx context.Context) {
+ // Run this async so the SSH command has to wait for
+ // the build and agent to connect!
+ _ = agenttest.New(t, client.URL, agentToken)
+ <-ctx.Done()
+ })
+
+ clientOutput, clientInput := io.Pipe()
+ serverOutput, serverInput := io.Pipe()
+ defer func() {
+ for _, c := range []io.Closer{clientOutput, clientInput, serverOutput, serverInput} {
+ _ = c.Close()
+ }
+ }()
+
+ ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong)
+ defer cancel()
+
+ inv, root := clitest.New(t, "ssh", "--stdio", workspace.Name, "echo 'hello stdio'")
+ clitest.SetupConfig(t, client, root)
+ inv.Stdin = clientOutput
+ inv.Stdout = serverInput
+ inv.Stderr = io.Discard
+
+ cmdDone := tGo(t, func() {
+ err := inv.WithContext(ctx).Run()
+ assert.NoError(t, err)
+ })
+
+ conn, channels, requests, err := ssh.NewClientConn(&testutil.ReaderWriterConn{
+ Reader: serverOutput,
+ Writer: clientInput,
+ }, "", &ssh.ClientConfig{
+ // #nosec
+ HostKeyCallback: ssh.InsecureIgnoreHostKey(),
+ })
+ require.NoError(t, err)
+ defer conn.Close()
+
+ sshClient := ssh.NewClient(conn, channels, requests)
+ session, err := sshClient.NewSession()
+ require.NoError(t, err)
+ defer session.Close()
+
+ // Capture and verify command output
+ output, err := session.Output("echo 'hello back'")
+ require.NoError(t, err)
+ assert.Contains(t, string(output), "hello back")
+
+ err = sshClient.Close()
+ require.NoError(t, err)
+ _ = clientOutput.Close()
+
+ <-cmdDone
+ })
}
type fakeCoderConnectDialer struct{}
diff --git a/cli/testdata/coder_--help.golden b/cli/testdata/coder_--help.golden
index 5a3ad462cdae8..f3c6f56a7a191 100644
--- a/cli/testdata/coder_--help.golden
+++ b/cli/testdata/coder_--help.golden
@@ -46,7 +46,7 @@ SUBCOMMANDS:
show Display details of a workspace's resources and agents
speedtest Run upload and download tests from your machine to a
workspace
- ssh Start a shell into a workspace
+ ssh Start a shell into a workspace or run a command
start Start a workspace
stat Show resource usage for the current workspace.
state Manually manage Terraform state to fix broken workspaces
diff --git a/cli/testdata/coder_provisioner_list_--output_json.golden b/cli/testdata/coder_provisioner_list_--output_json.golden
index 3daeb89febcb4..e8b3637bdffa6 100644
--- a/cli/testdata/coder_provisioner_list_--output_json.golden
+++ b/cli/testdata/coder_provisioner_list_--output_json.golden
@@ -7,7 +7,7 @@
"last_seen_at": "====[timestamp]=====",
"name": "test",
"version": "v0.0.0-devel",
- "api_version": "1.5",
+ "api_version": "1.6",
"provisioners": [
"echo"
],
diff --git a/cli/testdata/coder_ssh_--help.golden b/cli/testdata/coder_ssh_--help.golden
index 1f7122dd655a2..8019dbdc2a4a4 100644
--- a/cli/testdata/coder_ssh_--help.golden
+++ b/cli/testdata/coder_ssh_--help.golden
@@ -1,9 +1,18 @@
coder v0.0.0-devel
USAGE:
- coder ssh [flags]
-
- Start a shell into a workspace
+ coder ssh [flags] [command]
+
+ Start a shell into a workspace or run a command
+
+ This command does not have full parity with the standard SSH command. For
+ users who need the full functionality of SSH, create an ssh configuration with
+ `coder config-ssh`.
+
+ - Use `--` to separate and pass flags directly to the command executed via
+ SSH.:
+
+ $ coder ssh -- ls -la
OPTIONS:
--disable-autostart bool, $CODER_SSH_DISABLE_AUTOSTART (default: false)
diff --git a/cli/testdata/coder_users_--help.golden b/cli/testdata/coder_users_--help.golden
index 585588cbc6e18..949dc97c3b8d2 100644
--- a/cli/testdata/coder_users_--help.golden
+++ b/cli/testdata/coder_users_--help.golden
@@ -10,10 +10,10 @@ USAGE:
SUBCOMMANDS:
activate Update a user's status to 'active'. Active users can fully
interact with the platform
- create
+ create Create a new user.
delete Delete a user by username or user_id.
edit-roles Edit a user's roles by username or id
- list
+ list Prints the list of users.
show Show a single user. Use 'me' to indicate the currently
authenticated user.
suspend Update a user's status to 'suspended'. A suspended user cannot
diff --git a/cli/testdata/coder_users_create_--help.golden b/cli/testdata/coder_users_create_--help.golden
index 5f57485b52f3c..04f976ab6843c 100644
--- a/cli/testdata/coder_users_create_--help.golden
+++ b/cli/testdata/coder_users_create_--help.golden
@@ -3,6 +3,8 @@ coder v0.0.0-devel
USAGE:
coder users create [flags]
+ Create a new user.
+
OPTIONS:
-O, --org string, $CODER_ORGANIZATION
Select which organization (uuid or name) to use.
diff --git a/cli/testdata/coder_users_list_--help.golden b/cli/testdata/coder_users_list_--help.golden
index 563ad76e1dc72..22c1fe172faf5 100644
--- a/cli/testdata/coder_users_list_--help.golden
+++ b/cli/testdata/coder_users_list_--help.golden
@@ -3,6 +3,8 @@ coder v0.0.0-devel
USAGE:
coder users list [flags]
+ Prints the list of users.
+
Aliases: ls
OPTIONS:
diff --git a/cli/usercreate.go b/cli/usercreate.go
index f73a3165ee908..643e3554650e5 100644
--- a/cli/usercreate.go
+++ b/cli/usercreate.go
@@ -28,7 +28,8 @@ func (r *RootCmd) userCreate() *serpent.Command {
)
client := new(codersdk.Client)
cmd := &serpent.Command{
- Use: "create",
+ Use: "create",
+ Short: "Create a new user.",
Middleware: serpent.Chain(
serpent.RequireNArgs(0),
r.InitClient(client),
diff --git a/cli/userlist.go b/cli/userlist.go
index 48f27f83119a4..e24281ad76d68 100644
--- a/cli/userlist.go
+++ b/cli/userlist.go
@@ -23,6 +23,7 @@ func (r *RootCmd) userList() *serpent.Command {
cmd := &serpent.Command{
Use: "list",
+ Short: "Prints the list of users.",
Aliases: []string{"ls"},
Middleware: serpent.Chain(
serpent.RequireNArgs(0),
diff --git a/cli/userlist_test.go b/cli/userlist_test.go
index 1a4409bb898ac..2681f0d2a462e 100644
--- a/cli/userlist_test.go
+++ b/cli/userlist_test.go
@@ -4,6 +4,8 @@ import (
"bytes"
"context"
"encoding/json"
+ "fmt"
+ "os"
"testing"
"github.com/stretchr/testify/assert"
@@ -69,9 +71,12 @@ func TestUserList(t *testing.T) {
t.Run("NoURLFileErrorHasHelperText", func(t *testing.T) {
t.Parallel()
+ executable, err := os.Executable()
+ require.NoError(t, err)
+
inv, _ := clitest.New(t, "users", "list")
- err := inv.Run()
- require.Contains(t, err.Error(), "Try logging in using 'coder login '.")
+ err = inv.Run()
+ require.Contains(t, err.Error(), fmt.Sprintf("Try logging in using '%s login '.", executable))
})
t.Run("SessionAuthErrorHasHelperText", func(t *testing.T) {
t.Parallel()
diff --git a/coderd/agentapi/api.go b/coderd/agentapi/api.go
index 1b2b8d92a10ef..8a0871bc083d4 100644
--- a/coderd/agentapi/api.go
+++ b/coderd/agentapi/api.go
@@ -30,6 +30,7 @@ import (
"github.com/coder/coder/v2/coderd/wspubsub"
"github.com/coder/coder/v2/codersdk"
"github.com/coder/coder/v2/codersdk/agentsdk"
+ "github.com/coder/coder/v2/codersdk/drpcsdk"
"github.com/coder/coder/v2/tailnet"
tailnetproto "github.com/coder/coder/v2/tailnet/proto"
"github.com/coder/quartz"
@@ -209,6 +210,7 @@ func (a *API) Server(ctx context.Context) (*drpcserver.Server, error) {
return drpcserver.NewWithOptions(&tracing.DRPCHandler{Handler: mux},
drpcserver.Options{
+ Manager: drpcsdk.DefaultDRPCOptions(nil),
Log: func(err error) {
if xerrors.Is(err, io.EOF) {
return
diff --git a/coderd/apidoc/docs.go b/coderd/apidoc/docs.go
index fb5ae20e448c8..d55582afbbe8b 100644
--- a/coderd/apidoc/docs.go
+++ b/coderd/apidoc/docs.go
@@ -4109,6 +4109,7 @@ const docTemplate = `{
"CoderSessionToken": []
}
],
+ "description": "Returns a list of templates for the specified organization.\nBy default, only non-deprecated templates are returned.\nTo include deprecated templates, specify ` + "`" + `deprecated:true` + "`" + ` in the search query.",
"produces": [
"application/json"
],
@@ -4936,6 +4937,7 @@ const docTemplate = `{
"CoderSessionToken": []
}
],
+ "description": "Returns a list of templates.\nBy default, only non-deprecated templates are returned.\nTo include deprecated templates, specify ` + "`" + `deprecated:true` + "`" + ` in the search query.",
"produces": [
"application/json"
],
@@ -8444,6 +8446,31 @@ const docTemplate = `{
}
}
},
+ "/workspaceagents/me/reinit": {
+ "get": {
+ "security": [
+ {
+ "CoderSessionToken": []
+ }
+ ],
+ "produces": [
+ "application/json"
+ ],
+ "tags": [
+ "Agents"
+ ],
+ "summary": "Get workspace agent reinitialization",
+ "operationId": "get-workspace-agent-reinitialization",
+ "responses": {
+ "200": {
+ "description": "OK",
+ "schema": {
+ "$ref": "#/definitions/agentsdk.ReinitializationEvent"
+ }
+ }
+ }
+ }
+ },
"/workspaceagents/me/rpc": {
"get": {
"security": [
@@ -8579,6 +8606,42 @@ const docTemplate = `{
}
}
},
+ "/workspaceagents/{workspaceagent}/containers/devcontainers/container/{container}/recreate": {
+ "post": {
+ "security": [
+ {
+ "CoderSessionToken": []
+ }
+ ],
+ "tags": [
+ "Agents"
+ ],
+ "summary": "Recreate devcontainer for workspace agent",
+ "operationId": "recreate-devcontainer-for-workspace-agent",
+ "parameters": [
+ {
+ "type": "string",
+ "format": "uuid",
+ "description": "Workspace agent ID",
+ "name": "workspaceagent",
+ "in": "path",
+ "required": true
+ },
+ {
+ "type": "string",
+ "description": "Container ID or name",
+ "name": "container",
+ "in": "path",
+ "required": true
+ }
+ ],
+ "responses": {
+ "204": {
+ "description": "No Content"
+ }
+ }
+ }
+ },
"/workspaceagents/{workspaceagent}/coordinate": {
"get": {
"security": [
@@ -10489,6 +10552,26 @@ const docTemplate = `{
}
}
},
+ "agentsdk.ReinitializationEvent": {
+ "type": "object",
+ "properties": {
+ "reason": {
+ "$ref": "#/definitions/agentsdk.ReinitializationReason"
+ },
+ "workspaceID": {
+ "type": "string"
+ }
+ }
+ },
+ "agentsdk.ReinitializationReason": {
+ "type": "string",
+ "enum": [
+ "prebuild_claimed"
+ ],
+ "x-enum-varnames": [
+ "ReinitializeReasonPrebuildClaimed"
+ ]
+ },
"aisdk.Attachment": {
"type": "object",
"properties": {
@@ -15526,6 +15609,9 @@ const docTemplate = `{
"updated_at": {
"type": "string",
"format": "date-time"
+ },
+ "use_classic_parameter_flow": {
+ "type": "boolean"
}
}
},
@@ -17021,6 +17107,14 @@ const docTemplate = `{
"operating_system": {
"type": "string"
},
+ "parent_id": {
+ "format": "uuid",
+ "allOf": [
+ {
+ "$ref": "#/definitions/uuid.NullUUID"
+ }
+ ]
+ },
"ready_at": {
"type": "string",
"format": "date-time"
@@ -17076,6 +17170,10 @@ const docTemplate = `{
"type": "string",
"format": "date-time"
},
+ "devcontainer_dirty": {
+ "description": "DevcontainerDirty is true if the devcontainer configuration has changed\nsince the container was created. This is used to determine if the\ncontainer needs to be rebuilt.",
+ "type": "boolean"
+ },
"id": {
"description": "ID is the unique identifier of the container.",
"type": "string"
@@ -19033,6 +19131,18 @@ const docTemplate = `{
"url.Userinfo": {
"type": "object"
},
+ "uuid.NullUUID": {
+ "type": "object",
+ "properties": {
+ "uuid": {
+ "type": "string"
+ },
+ "valid": {
+ "description": "Valid is true if UUID is not NULL",
+ "type": "boolean"
+ }
+ }
+ },
"workspaceapps.AccessMethod": {
"type": "string",
"enum": [
diff --git a/coderd/apidoc/swagger.json b/coderd/apidoc/swagger.json
index 8420c9ea0f812..00f940737a1d6 100644
--- a/coderd/apidoc/swagger.json
+++ b/coderd/apidoc/swagger.json
@@ -3628,6 +3628,7 @@
"CoderSessionToken": []
}
],
+ "description": "Returns a list of templates for the specified organization.\nBy default, only non-deprecated templates are returned.\nTo include deprecated templates, specify `deprecated:true` in the search query.",
"produces": ["application/json"],
"tags": ["Templates"],
"summary": "Get templates by organization",
@@ -4355,6 +4356,7 @@
"CoderSessionToken": []
}
],
+ "description": "Returns a list of templates.\nBy default, only non-deprecated templates are returned.\nTo include deprecated templates, specify `deprecated:true` in the search query.",
"produces": ["application/json"],
"tags": ["Templates"],
"summary": "Get all templates",
@@ -7461,6 +7463,27 @@
}
}
},
+ "/workspaceagents/me/reinit": {
+ "get": {
+ "security": [
+ {
+ "CoderSessionToken": []
+ }
+ ],
+ "produces": ["application/json"],
+ "tags": ["Agents"],
+ "summary": "Get workspace agent reinitialization",
+ "operationId": "get-workspace-agent-reinitialization",
+ "responses": {
+ "200": {
+ "description": "OK",
+ "schema": {
+ "$ref": "#/definitions/agentsdk.ReinitializationEvent"
+ }
+ }
+ }
+ }
+ },
"/workspaceagents/me/rpc": {
"get": {
"security": [
@@ -7582,6 +7605,40 @@
}
}
},
+ "/workspaceagents/{workspaceagent}/containers/devcontainers/container/{container}/recreate": {
+ "post": {
+ "security": [
+ {
+ "CoderSessionToken": []
+ }
+ ],
+ "tags": ["Agents"],
+ "summary": "Recreate devcontainer for workspace agent",
+ "operationId": "recreate-devcontainer-for-workspace-agent",
+ "parameters": [
+ {
+ "type": "string",
+ "format": "uuid",
+ "description": "Workspace agent ID",
+ "name": "workspaceagent",
+ "in": "path",
+ "required": true
+ },
+ {
+ "type": "string",
+ "description": "Container ID or name",
+ "name": "container",
+ "in": "path",
+ "required": true
+ }
+ ],
+ "responses": {
+ "204": {
+ "description": "No Content"
+ }
+ }
+ }
+ },
"/workspaceagents/{workspaceagent}/coordinate": {
"get": {
"security": [
@@ -9300,6 +9357,22 @@
}
}
},
+ "agentsdk.ReinitializationEvent": {
+ "type": "object",
+ "properties": {
+ "reason": {
+ "$ref": "#/definitions/agentsdk.ReinitializationReason"
+ },
+ "workspaceID": {
+ "type": "string"
+ }
+ }
+ },
+ "agentsdk.ReinitializationReason": {
+ "type": "string",
+ "enum": ["prebuild_claimed"],
+ "x-enum-varnames": ["ReinitializeReasonPrebuildClaimed"]
+ },
"aisdk.Attachment": {
"type": "object",
"properties": {
@@ -14128,6 +14201,9 @@
"updated_at": {
"type": "string",
"format": "date-time"
+ },
+ "use_classic_parameter_flow": {
+ "type": "boolean"
}
}
},
@@ -15538,6 +15614,14 @@
"operating_system": {
"type": "string"
},
+ "parent_id": {
+ "format": "uuid",
+ "allOf": [
+ {
+ "$ref": "#/definitions/uuid.NullUUID"
+ }
+ ]
+ },
"ready_at": {
"type": "string",
"format": "date-time"
@@ -15593,6 +15677,10 @@
"type": "string",
"format": "date-time"
},
+ "devcontainer_dirty": {
+ "description": "DevcontainerDirty is true if the devcontainer configuration has changed\nsince the container was created. This is used to determine if the\ncontainer needs to be rebuilt.",
+ "type": "boolean"
+ },
"id": {
"description": "ID is the unique identifier of the container.",
"type": "string"
@@ -17442,6 +17530,18 @@
"url.Userinfo": {
"type": "object"
},
+ "uuid.NullUUID": {
+ "type": "object",
+ "properties": {
+ "uuid": {
+ "type": "string"
+ },
+ "valid": {
+ "description": "Valid is true if UUID is not NULL",
+ "type": "boolean"
+ }
+ }
+ },
"workspaceapps.AccessMethod": {
"type": "string",
"enum": ["path", "subdomain", "terminal"],
diff --git a/coderd/coderd.go b/coderd/coderd.go
index 1b4b5746b7f7e..3989f8a87ea1b 100644
--- a/coderd/coderd.go
+++ b/coderd/coderd.go
@@ -19,6 +19,8 @@ import (
"sync/atomic"
"time"
+ "github.com/coder/coder/v2/coderd/prebuilds"
+
"github.com/andybalholm/brotli"
"github.com/go-chi/chi/v5"
"github.com/go-chi/chi/v5/middleware"
@@ -41,12 +43,13 @@ import (
"github.com/coder/quartz"
"github.com/coder/serpent"
+ "github.com/coder/coder/v2/codersdk/drpcsdk"
+
"github.com/coder/coder/v2/coderd/ai"
"github.com/coder/coder/v2/coderd/cryptokeys"
"github.com/coder/coder/v2/coderd/entitlements"
"github.com/coder/coder/v2/coderd/files"
"github.com/coder/coder/v2/coderd/idpsync"
- "github.com/coder/coder/v2/coderd/prebuilds"
"github.com/coder/coder/v2/coderd/runtimeconfig"
"github.com/coder/coder/v2/coderd/webpush"
@@ -84,7 +87,6 @@ import (
"github.com/coder/coder/v2/coderd/workspaceapps"
"github.com/coder/coder/v2/coderd/workspacestats"
"github.com/coder/coder/v2/codersdk"
- "github.com/coder/coder/v2/codersdk/drpcsdk"
"github.com/coder/coder/v2/codersdk/healthsdk"
"github.com/coder/coder/v2/provisionerd/proto"
"github.com/coder/coder/v2/provisionersdk"
@@ -800,6 +802,11 @@ func New(options *Options) *API {
PostAuthAdditionalHeadersFunc: options.PostAuthAdditionalHeadersFunc,
})
+ workspaceAgentInfo := httpmw.ExtractWorkspaceAgentAndLatestBuild(httpmw.ExtractWorkspaceAgentAndLatestBuildConfig{
+ DB: options.Database,
+ Optional: false,
+ })
+
// API rate limit middleware. The counter is local and not shared between
// replicas or instances of this middleware.
apiRateLimiter := httpmw.RateLimit(options.APIRateLimit, time.Minute)
@@ -1287,10 +1294,7 @@ func New(options *Options) *API {
httpmw.RequireAPIKeyOrWorkspaceProxyAuth(),
).Get("/connection", api.workspaceAgentConnectionGeneric)
r.Route("/me", func(r chi.Router) {
- r.Use(httpmw.ExtractWorkspaceAgentAndLatestBuild(httpmw.ExtractWorkspaceAgentAndLatestBuildConfig{
- DB: options.Database,
- Optional: false,
- }))
+ r.Use(workspaceAgentInfo)
r.Get("/rpc", api.workspaceAgentRPC)
r.Patch("/logs", api.patchWorkspaceAgentLogs)
r.Patch("/app-status", api.patchWorkspaceAgentAppStatus)
@@ -1299,6 +1303,7 @@ func New(options *Options) *API {
r.Get("/external-auth", api.workspaceAgentsExternalAuth)
r.Get("/gitsshkey", api.agentGitSSHKey)
r.Post("/log-source", api.workspaceAgentPostLogSource)
+ r.Get("/reinit", api.workspaceAgentReinit)
})
r.Route("/{workspaceagent}", func(r chi.Router) {
r.Use(
@@ -1321,6 +1326,7 @@ func New(options *Options) *API {
r.Get("/listening-ports", api.workspaceAgentListeningPorts)
r.Get("/connection", api.workspaceAgentConnection)
r.Get("/containers", api.workspaceAgentListContainers)
+ r.Post("/containers/devcontainers/container/{container}/recreate", api.workspaceAgentRecreateDevcontainer)
r.Get("/coordinate", api.workspaceAgentClientCoordinate)
// PTY is part of workspaceAppServer.
@@ -1592,7 +1598,7 @@ type API struct {
// passed to dbauthz.
AccessControlStore *atomic.Pointer[dbauthz.AccessControlStore]
PortSharer atomic.Pointer[portsharing.PortSharer]
- FileCache files.Cache
+ FileCache *files.Cache
PrebuildsClaimer atomic.Pointer[prebuilds.Claimer]
PrebuildsReconciler atomic.Pointer[prebuilds.ReconciliationOrchestrator]
@@ -1717,13 +1723,30 @@ func compressHandler(h http.Handler) http.Handler {
return cmp.Handler(h)
}
+type MemoryProvisionerDaemonOption func(*memoryProvisionerDaemonOptions)
+
+func MemoryProvisionerWithVersionOverride(version string) MemoryProvisionerDaemonOption {
+ return func(opts *memoryProvisionerDaemonOptions) {
+ opts.versionOverride = version
+ }
+}
+
+type memoryProvisionerDaemonOptions struct {
+ versionOverride string
+}
+
// CreateInMemoryProvisionerDaemon is an in-memory connection to a provisionerd.
// Useful when starting coderd and provisionerd in the same process.
func (api *API) CreateInMemoryProvisionerDaemon(dialCtx context.Context, name string, provisionerTypes []codersdk.ProvisionerType) (client proto.DRPCProvisionerDaemonClient, err error) {
return api.CreateInMemoryTaggedProvisionerDaemon(dialCtx, name, provisionerTypes, nil)
}
-func (api *API) CreateInMemoryTaggedProvisionerDaemon(dialCtx context.Context, name string, provisionerTypes []codersdk.ProvisionerType, provisionerTags map[string]string) (client proto.DRPCProvisionerDaemonClient, err error) {
+func (api *API) CreateInMemoryTaggedProvisionerDaemon(dialCtx context.Context, name string, provisionerTypes []codersdk.ProvisionerType, provisionerTags map[string]string, opts ...MemoryProvisionerDaemonOption) (client proto.DRPCProvisionerDaemonClient, err error) {
+ options := &memoryProvisionerDaemonOptions{}
+ for _, opt := range opts {
+ opt(options)
+ }
+
tracer := api.TracerProvider.Tracer(tracing.TracerName)
clientSession, serverSession := drpcsdk.MemTransportPipe()
defer func() {
@@ -1750,6 +1773,12 @@ func (api *API) CreateInMemoryTaggedProvisionerDaemon(dialCtx context.Context, n
return nil, xerrors.Errorf("failed to parse built-in provisioner key ID: %w", err)
}
+ apiVersion := proto.CurrentVersion.String()
+ if options.versionOverride != "" && flag.Lookup("test.v") != nil {
+ // This should only be usable for unit testing. To fake a different provisioner version
+ apiVersion = options.versionOverride
+ }
+
//nolint:gocritic // in-memory provisioners are owned by system
daemon, err := api.Database.UpsertProvisionerDaemon(dbauthz.AsSystemRestricted(dialCtx), database.UpsertProvisionerDaemonParams{
Name: name,
@@ -1759,7 +1788,7 @@ func (api *API) CreateInMemoryTaggedProvisionerDaemon(dialCtx context.Context, n
Tags: provisionersdk.MutateTags(uuid.Nil, provisionerTags),
LastSeenAt: sql.NullTime{Time: dbtime.Now(), Valid: true},
Version: buildinfo.Version(),
- APIVersion: proto.CurrentVersion.String(),
+ APIVersion: apiVersion,
KeyID: keyID,
})
if err != nil {
@@ -1771,6 +1800,7 @@ func (api *API) CreateInMemoryTaggedProvisionerDaemon(dialCtx context.Context, n
logger := api.Logger.Named(fmt.Sprintf("inmem-provisionerd-%s", name))
srv, err := provisionerdserver.NewServer(
api.ctx, // use the same ctx as the API
+ daemon.APIVersion,
api.AccessURL,
daemon.ID,
defaultOrg.ID,
@@ -1793,6 +1823,7 @@ func (api *API) CreateInMemoryTaggedProvisionerDaemon(dialCtx context.Context, n
Clock: api.Clock,
},
api.NotificationsEnqueuer,
+ &api.PrebuildsReconciler,
)
if err != nil {
return nil, err
@@ -1803,6 +1834,7 @@ func (api *API) CreateInMemoryTaggedProvisionerDaemon(dialCtx context.Context, n
}
server := drpcserver.NewWithOptions(&tracing.DRPCHandler{Handler: mux},
drpcserver.Options{
+ Manager: drpcsdk.DefaultDRPCOptions(nil),
Log: func(err error) {
if xerrors.Is(err, io.EOF) {
return
diff --git a/coderd/coderdtest/coderdtest.go b/coderd/coderdtest/coderdtest.go
index e843d0d748578..a25f0576e76be 100644
--- a/coderd/coderdtest/coderdtest.go
+++ b/coderd/coderdtest/coderdtest.go
@@ -135,6 +135,7 @@ type Options struct {
// IncludeProvisionerDaemon when true means to start an in-memory provisionerD
IncludeProvisionerDaemon bool
+ ProvisionerDaemonVersion string
ProvisionerDaemonTags map[string]string
MetricsCacheRefreshInterval time.Duration
AgentStatsRefreshInterval time.Duration
@@ -601,7 +602,7 @@ func NewWithAPI(t testing.TB, options *Options) (*codersdk.Client, io.Closer, *c
setHandler(rootHandler)
var provisionerCloser io.Closer = nopcloser{}
if options.IncludeProvisionerDaemon {
- provisionerCloser = NewTaggedProvisionerDaemon(t, coderAPI, "test", options.ProvisionerDaemonTags)
+ provisionerCloser = NewTaggedProvisionerDaemon(t, coderAPI, "test", options.ProvisionerDaemonTags, coderd.MemoryProvisionerWithVersionOverride(options.ProvisionerDaemonVersion))
}
client := codersdk.New(serverURL)
t.Cleanup(func() {
@@ -648,7 +649,7 @@ func NewProvisionerDaemon(t testing.TB, coderAPI *coderd.API) io.Closer {
return NewTaggedProvisionerDaemon(t, coderAPI, "test", nil)
}
-func NewTaggedProvisionerDaemon(t testing.TB, coderAPI *coderd.API, name string, provisionerTags map[string]string) io.Closer {
+func NewTaggedProvisionerDaemon(t testing.TB, coderAPI *coderd.API, name string, provisionerTags map[string]string, opts ...coderd.MemoryProvisionerDaemonOption) io.Closer {
t.Helper()
// t.Cleanup runs in last added, first called order. t.TempDir() will delete
@@ -676,7 +677,7 @@ func NewTaggedProvisionerDaemon(t testing.TB, coderAPI *coderd.API, name string,
connectedCh := make(chan struct{})
daemon := provisionerd.New(func(dialCtx context.Context) (provisionerdproto.DRPCProvisionerDaemonClient, error) {
- return coderAPI.CreateInMemoryTaggedProvisionerDaemon(dialCtx, name, []codersdk.ProvisionerType{codersdk.ProvisionerTypeEcho}, provisionerTags)
+ return coderAPI.CreateInMemoryTaggedProvisionerDaemon(dialCtx, name, []codersdk.ProvisionerType{codersdk.ProvisionerTypeEcho}, provisionerTags, opts...)
}, &provisionerd.Options{
Logger: coderAPI.Logger.Named("provisionerd").Leveled(slog.LevelDebug),
UpdateInterval: 250 * time.Millisecond,
@@ -1105,6 +1106,69 @@ func (w WorkspaceAgentWaiter) MatchResources(m func([]codersdk.WorkspaceResource
return w
}
+// WaitForAgentFn represents a boolean assertion to be made against each agent
+// that a given WorkspaceAgentWaited knows about. Each WaitForAgentFn should apply
+// the check to a single agent, but it should be named for plural, because `func (w WorkspaceAgentWaiter) WaitFor`
+// applies the check to all agents that it is aware of. This ensures that the public API of the waiter
+// reads correctly. For example:
+//
+// waiter := coderdtest.NewWorkspaceAgentWaiter(t, client, r.Workspace.ID)
+// waiter.WaitFor(coderdtest.AgentsReady)
+type WaitForAgentFn func(agent codersdk.WorkspaceAgent) bool
+
+// AgentsReady checks that the latest lifecycle state of an agent is "Ready".
+func AgentsReady(agent codersdk.WorkspaceAgent) bool {
+ return agent.LifecycleState == codersdk.WorkspaceAgentLifecycleReady
+}
+
+// AgentsNotReady checks that the latest lifecycle state of an agent is anything except "Ready".
+func AgentsNotReady(agent codersdk.WorkspaceAgent) bool {
+ return !AgentsReady(agent)
+}
+
+func (w WorkspaceAgentWaiter) WaitFor(criteria ...WaitForAgentFn) {
+ w.t.Helper()
+
+ agentNamesMap := make(map[string]struct{}, len(w.agentNames))
+ for _, name := range w.agentNames {
+ agentNamesMap[name] = struct{}{}
+ }
+
+ ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong)
+ defer cancel()
+
+ w.t.Logf("waiting for workspace agents (workspace %s)", w.workspaceID)
+ require.Eventually(w.t, func() bool {
+ var err error
+ workspace, err := w.client.Workspace(ctx, w.workspaceID)
+ if err != nil {
+ return false
+ }
+ if workspace.LatestBuild.Job.CompletedAt == nil {
+ return false
+ }
+ if workspace.LatestBuild.Job.CompletedAt.IsZero() {
+ return false
+ }
+
+ for _, resource := range workspace.LatestBuild.Resources {
+ for _, agent := range resource.Agents {
+ if len(w.agentNames) > 0 {
+ if _, ok := agentNamesMap[agent.Name]; !ok {
+ continue
+ }
+ }
+ for _, criterium := range criteria {
+ if !criterium(agent) {
+ return false
+ }
+ }
+ }
+ }
+ return true
+ }, testutil.WaitLong, testutil.IntervalMedium)
+}
+
// Wait waits for the agent(s) to connect and fails the test if they do not within testutil.WaitLong
func (w WorkspaceAgentWaiter) Wait() []codersdk.WorkspaceResource {
w.t.Helper()
diff --git a/coderd/coderdtest/oidctest/idp.go b/coderd/coderdtest/oidctest/idp.go
index b82f8a00dedb4..c7f7d35937198 100644
--- a/coderd/coderdtest/oidctest/idp.go
+++ b/coderd/coderdtest/oidctest/idp.go
@@ -307,7 +307,7 @@ func WithCustomClientAuth(hook func(t testing.TB, req *http.Request) (url.Values
// WithLogging is optional, but will log some HTTP calls made to the IDP.
func WithLogging(t testing.TB, options *slogtest.Options) func(*FakeIDP) {
return func(f *FakeIDP) {
- f.logger = slogtest.Make(t, options)
+ f.logger = slogtest.Make(t, options).Named("fakeidp")
}
}
@@ -794,6 +794,7 @@ func (f *FakeIDP) newToken(t testing.TB, email string, expires time.Time) string
func (f *FakeIDP) newRefreshTokens(email string) string {
refreshToken := uuid.NewString()
f.refreshTokens.Store(refreshToken, email)
+ f.logger.Info(context.Background(), "new refresh token", slog.F("email", email), slog.F("token", refreshToken))
return refreshToken
}
@@ -1003,6 +1004,7 @@ func (f *FakeIDP) httpHandler(t testing.TB) http.Handler {
return
}
+ f.logger.Info(r.Context(), "http idp call refresh_token", slog.F("token", refreshToken))
_, ok := f.refreshTokens.Load(refreshToken)
if !assert.True(t, ok, "invalid refresh_token") {
http.Error(rw, "invalid refresh_token", http.StatusBadRequest)
@@ -1026,6 +1028,7 @@ func (f *FakeIDP) httpHandler(t testing.TB) http.Handler {
f.refreshTokensUsed.Store(refreshToken, true)
// Always invalidate the refresh token after it is used.
f.refreshTokens.Delete(refreshToken)
+ f.logger.Info(r.Context(), "refresh token invalidated", slog.F("token", refreshToken))
case "urn:ietf:params:oauth:grant-type:device_code":
// Device flow
var resp externalauth.ExchangeDeviceCodeResponse
diff --git a/coderd/database/dbauthz/dbauthz.go b/coderd/database/dbauthz/dbauthz.go
index 2ed230dd7a8f3..928dee0e30ea3 100644
--- a/coderd/database/dbauthz/dbauthz.go
+++ b/coderd/database/dbauthz/dbauthz.go
@@ -12,21 +12,19 @@ import (
"time"
"github.com/google/uuid"
- "golang.org/x/xerrors"
-
"github.com/open-policy-agent/opa/topdown"
+ "golang.org/x/xerrors"
"cdr.dev/slog"
- "github.com/coder/coder/v2/coderd/prebuilds"
- "github.com/coder/coder/v2/coderd/rbac/policy"
- "github.com/coder/coder/v2/coderd/rbac/rolestore"
-
"github.com/coder/coder/v2/coderd/database"
"github.com/coder/coder/v2/coderd/database/dbtime"
"github.com/coder/coder/v2/coderd/httpapi/httpapiconstraints"
"github.com/coder/coder/v2/coderd/httpmw/loggermw"
+ "github.com/coder/coder/v2/coderd/prebuilds"
"github.com/coder/coder/v2/coderd/rbac"
+ "github.com/coder/coder/v2/coderd/rbac/policy"
+ "github.com/coder/coder/v2/coderd/rbac/rolestore"
"github.com/coder/coder/v2/coderd/util/slice"
"github.com/coder/coder/v2/provisionersdk"
)
@@ -347,6 +345,7 @@ var (
rbac.ResourceNotificationPreference.Type: {policy.ActionCreate, policy.ActionUpdate, policy.ActionDelete},
rbac.ResourceNotificationTemplate.Type: {policy.ActionCreate, policy.ActionUpdate, policy.ActionDelete},
rbac.ResourceCryptoKey.Type: {policy.ActionCreate, policy.ActionUpdate, policy.ActionDelete},
+ rbac.ResourceFile.Type: {policy.ActionCreate, policy.ActionRead},
}),
Org: map[string][]rbac.Permission{},
User: []rbac.Permission{},
@@ -3021,6 +3020,15 @@ func (q *querier) GetWorkspaceAgentsByResourceIDs(ctx context.Context, ids []uui
return q.db.GetWorkspaceAgentsByResourceIDs(ctx, ids)
}
+func (q *querier) GetWorkspaceAgentsByWorkspaceAndBuildNumber(ctx context.Context, arg database.GetWorkspaceAgentsByWorkspaceAndBuildNumberParams) ([]database.WorkspaceAgent, error) {
+ _, err := q.GetWorkspaceByID(ctx, arg.WorkspaceID)
+ if err != nil {
+ return nil, err
+ }
+
+ return q.db.GetWorkspaceAgentsByWorkspaceAndBuildNumber(ctx, arg)
+}
+
func (q *querier) GetWorkspaceAgentsCreatedAfter(ctx context.Context, createdAt time.Time) ([]database.WorkspaceAgent, error) {
if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceSystem); err != nil {
return nil, err
diff --git a/coderd/database/dbauthz/dbauthz_test.go b/coderd/database/dbauthz/dbauthz_test.go
index 6dc9a32f03943..a0289f222392b 100644
--- a/coderd/database/dbauthz/dbauthz_test.go
+++ b/coderd/database/dbauthz/dbauthz_test.go
@@ -1214,8 +1214,8 @@ func (s *MethodTestSuite) TestTemplate() {
JobID: job.ID,
TemplateID: uuid.NullUUID{UUID: t.ID, Valid: true},
})
- dbgen.TemplateVersionTerraformValues(s.T(), db, database.InsertTemplateVersionTerraformValuesByJobIDParams{
- JobID: job.ID,
+ dbgen.TemplateVersionTerraformValues(s.T(), db, database.TemplateVersionTerraformValue{
+ TemplateVersionID: tv.ID,
})
check.Args(tv.ID).Asserts(t, policy.ActionRead)
}))
@@ -2009,6 +2009,38 @@ func (s *MethodTestSuite) TestWorkspace() {
agt := dbgen.WorkspaceAgent(s.T(), db, database.WorkspaceAgent{ResourceID: res.ID})
check.Args(agt.ID).Asserts(w, policy.ActionRead).Returns(agt)
}))
+ s.Run("GetWorkspaceAgentsByWorkspaceAndBuildNumber", s.Subtest(func(db database.Store, check *expects) {
+ u := dbgen.User(s.T(), db, database.User{})
+ o := dbgen.Organization(s.T(), db, database.Organization{})
+ tpl := dbgen.Template(s.T(), db, database.Template{
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{
+ TemplateID: uuid.NullUUID{UUID: tpl.ID, Valid: true},
+ OrganizationID: o.ID,
+ CreatedBy: u.ID,
+ })
+ w := dbgen.Workspace(s.T(), db, database.WorkspaceTable{
+ TemplateID: tpl.ID,
+ OrganizationID: o.ID,
+ OwnerID: u.ID,
+ })
+ j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{
+ Type: database.ProvisionerJobTypeWorkspaceBuild,
+ })
+ b := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{
+ JobID: j.ID,
+ WorkspaceID: w.ID,
+ TemplateVersionID: tv.ID,
+ })
+ res := dbgen.WorkspaceResource(s.T(), db, database.WorkspaceResource{JobID: b.JobID})
+ agt := dbgen.WorkspaceAgent(s.T(), db, database.WorkspaceAgent{ResourceID: res.ID})
+ check.Args(database.GetWorkspaceAgentsByWorkspaceAndBuildNumberParams{
+ WorkspaceID: w.ID,
+ BuildNumber: 1,
+ }).Asserts(w, policy.ActionRead).Returns([]database.WorkspaceAgent{agt})
+ }))
s.Run("GetWorkspaceAgentLifecycleStateByID", s.Subtest(func(db database.Store, check *expects) {
u := dbgen.User(s.T(), db, database.User{})
o := dbgen.Organization(s.T(), db, database.Organization{})
@@ -3986,8 +4018,9 @@ func (s *MethodTestSuite) TestSystemFunctions() {
s.Run("InsertWorkspaceAgent", s.Subtest(func(db database.Store, check *expects) {
dbtestutil.DisableForeignKeysAndTriggers(s.T(), db)
check.Args(database.InsertWorkspaceAgentParams{
- ID: uuid.New(),
- Name: "dev",
+ ID: uuid.New(),
+ Name: "dev",
+ APIKeyScope: database.AgentKeyScopeEnumAll,
}).Asserts(rbac.ResourceSystem, policy.ActionCreate)
}))
s.Run("InsertWorkspaceApp", s.Subtest(func(db database.Store, check *expects) {
diff --git a/coderd/database/dbfake/dbfake.go b/coderd/database/dbfake/dbfake.go
index abadd78f07b36..fb2ea4bfd56b1 100644
--- a/coderd/database/dbfake/dbfake.go
+++ b/coderd/database/dbfake/dbfake.go
@@ -294,6 +294,8 @@ type TemplateVersionBuilder struct {
ps pubsub.Pubsub
resources []*sdkproto.Resource
params []database.TemplateVersionParameter
+ presets []database.TemplateVersionPreset
+ presetParams []database.TemplateVersionPresetParameter
promote bool
autoCreateTemplate bool
}
@@ -339,6 +341,13 @@ func (t TemplateVersionBuilder) Params(ps ...database.TemplateVersionParameter)
return t
}
+func (t TemplateVersionBuilder) Preset(preset database.TemplateVersionPreset, params ...database.TemplateVersionPresetParameter) TemplateVersionBuilder {
+ // nolint: revive // returns modified struct
+ t.presets = append(t.presets, preset)
+ t.presetParams = append(t.presetParams, params...)
+ return t
+}
+
func (t TemplateVersionBuilder) SkipCreateTemplate() TemplateVersionBuilder {
// nolint: revive // returns modified struct
t.autoCreateTemplate = false
@@ -378,6 +387,25 @@ func (t TemplateVersionBuilder) Do() TemplateVersionResponse {
require.NoError(t.t, err)
}
+ for _, preset := range t.presets {
+ dbgen.Preset(t.t, t.db, database.InsertPresetParams{
+ ID: preset.ID,
+ TemplateVersionID: version.ID,
+ Name: preset.Name,
+ CreatedAt: version.CreatedAt,
+ DesiredInstances: preset.DesiredInstances,
+ InvalidateAfterSecs: preset.InvalidateAfterSecs,
+ })
+ }
+
+ for _, presetParam := range t.presetParams {
+ dbgen.PresetParameter(t.t, t.db, database.InsertPresetParametersParams{
+ TemplateVersionPresetID: presetParam.TemplateVersionPresetID,
+ Names: []string{presetParam.Name},
+ Values: []string{presetParam.Value},
+ })
+ }
+
payload, err := json.Marshal(provisionerdserver.TemplateVersionImportJob{
TemplateVersionID: t.seed.ID,
})
diff --git a/coderd/database/dbgen/dbgen.go b/coderd/database/dbgen/dbgen.go
index 55c2fe4cf6965..286c80f1c2143 100644
--- a/coderd/database/dbgen/dbgen.go
+++ b/coderd/database/dbgen/dbgen.go
@@ -29,6 +29,7 @@ import (
"github.com/coder/coder/v2/coderd/rbac"
"github.com/coder/coder/v2/codersdk"
"github.com/coder/coder/v2/cryptorand"
+ "github.com/coder/coder/v2/provisionerd/proto"
"github.com/coder/coder/v2/testutil"
)
@@ -181,6 +182,7 @@ func WorkspaceAgentPortShare(t testing.TB, db database.Store, orig database.Work
func WorkspaceAgent(t testing.TB, db database.Store, orig database.WorkspaceAgent) database.WorkspaceAgent {
agt, err := db.InsertWorkspaceAgent(genCtx, database.InsertWorkspaceAgentParams{
ID: takeFirst(orig.ID, uuid.New()),
+ ParentID: takeFirst(orig.ParentID, uuid.NullUUID{}),
CreatedAt: takeFirst(orig.CreatedAt, dbtime.Now()),
UpdatedAt: takeFirst(orig.UpdatedAt, dbtime.Now()),
Name: takeFirst(orig.Name, testutil.GetRandomName(t)),
@@ -210,6 +212,7 @@ func WorkspaceAgent(t testing.TB, db database.Store, orig database.WorkspaceAgen
MOTDFile: takeFirst(orig.TroubleshootingURL, ""),
DisplayApps: append([]database.DisplayApp{}, orig.DisplayApps...),
DisplayOrder: takeFirst(orig.DisplayOrder, 1),
+ APIKeyScope: takeFirst(orig.APIKeyScope, database.AgentKeyScopeEnumAll),
})
require.NoError(t, err, "insert workspace agent")
return agt
@@ -995,17 +998,32 @@ func TemplateVersionParameter(t testing.TB, db database.Store, orig database.Tem
return version
}
-func TemplateVersionTerraformValues(t testing.TB, db database.Store, orig database.InsertTemplateVersionTerraformValuesByJobIDParams) {
+func TemplateVersionTerraformValues(t testing.TB, db database.Store, orig database.TemplateVersionTerraformValue) database.TemplateVersionTerraformValue {
t.Helper()
+ jobID := uuid.New()
+ if orig.TemplateVersionID != uuid.Nil {
+ v, err := db.GetTemplateVersionByID(genCtx, orig.TemplateVersionID)
+ if err == nil {
+ jobID = v.JobID
+ }
+ }
+
params := database.InsertTemplateVersionTerraformValuesByJobIDParams{
- JobID: takeFirst(orig.JobID, uuid.New()),
- CachedPlan: takeFirstSlice(orig.CachedPlan, []byte("{}")),
- UpdatedAt: takeFirst(orig.UpdatedAt, dbtime.Now()),
+ JobID: jobID,
+ CachedPlan: takeFirstSlice(orig.CachedPlan, []byte("{}")),
+ CachedModuleFiles: orig.CachedModuleFiles,
+ UpdatedAt: takeFirst(orig.UpdatedAt, dbtime.Now()),
+ ProvisionerdVersion: takeFirst(orig.ProvisionerdVersion, proto.CurrentVersion.String()),
}
err := db.InsertTemplateVersionTerraformValuesByJobID(genCtx, params)
require.NoError(t, err, "insert template version parameter")
+
+ v, err := db.GetTemplateVersionTerraformValues(genCtx, orig.TemplateVersionID)
+ require.NoError(t, err, "get template version values")
+
+ return v
}
func WorkspaceAgentStat(t testing.TB, db database.Store, orig database.WorkspaceAgentStat) database.WorkspaceAgentStat {
@@ -1222,6 +1240,7 @@ func TelemetryItem(t testing.TB, db database.Store, seed database.TelemetryItem)
func Preset(t testing.TB, db database.Store, seed database.InsertPresetParams) database.TemplateVersionPreset {
preset, err := db.InsertPreset(genCtx, database.InsertPresetParams{
+ ID: takeFirst(seed.ID, uuid.New()),
TemplateVersionID: takeFirst(seed.TemplateVersionID, uuid.New()),
Name: takeFirst(seed.Name, testutil.GetRandomName(t)),
CreatedAt: takeFirst(seed.CreatedAt, dbtime.Now()),
diff --git a/coderd/database/dbmem/dbmem.go b/coderd/database/dbmem/dbmem.go
index 6bae4455a89ef..fc5a10cafc481 100644
--- a/coderd/database/dbmem/dbmem.go
+++ b/coderd/database/dbmem/dbmem.go
@@ -1380,6 +1380,12 @@ func (q *FakeQuerier) getProvisionerJobsByIDsWithQueuePositionLockedGlobalQueue(
return jobs, nil
}
+// isDeprecated returns true if the template is deprecated.
+// A template is considered deprecated when it has a deprecation message.
+func isDeprecated(template database.Template) bool {
+ return template.Deprecated != ""
+}
+
func (*FakeQuerier) AcquireLock(_ context.Context, _ int64) error {
return xerrors.New("AcquireLock must only be called within a transaction")
}
@@ -7648,6 +7654,30 @@ func (q *FakeQuerier) GetWorkspaceAgentsByResourceIDs(ctx context.Context, resou
return q.getWorkspaceAgentsByResourceIDsNoLock(ctx, resourceIDs)
}
+func (q *FakeQuerier) GetWorkspaceAgentsByWorkspaceAndBuildNumber(ctx context.Context, arg database.GetWorkspaceAgentsByWorkspaceAndBuildNumberParams) ([]database.WorkspaceAgent, error) {
+ err := validateDatabaseType(arg)
+ if err != nil {
+ return nil, err
+ }
+
+ build, err := q.GetWorkspaceBuildByWorkspaceIDAndBuildNumber(ctx, database.GetWorkspaceBuildByWorkspaceIDAndBuildNumberParams(arg))
+ if err != nil {
+ return nil, err
+ }
+
+ resources, err := q.getWorkspaceResourcesByJobIDNoLock(ctx, build.JobID)
+ if err != nil {
+ return nil, err
+ }
+
+ var resourceIDs []uuid.UUID
+ for _, resource := range resources {
+ resourceIDs = append(resourceIDs, resource.ID)
+ }
+
+ return q.GetWorkspaceAgentsByResourceIDs(ctx, resourceIDs)
+}
+
func (q *FakeQuerier) GetWorkspaceAgentsCreatedAfter(_ context.Context, after time.Time) ([]database.WorkspaceAgent, error) {
q.mutex.RLock()
defer q.mutex.RUnlock()
@@ -9313,9 +9343,11 @@ func (q *FakeQuerier) InsertTemplateVersionTerraformValuesByJobID(_ context.Cont
// Insert the new row
row := database.TemplateVersionTerraformValue{
- TemplateVersionID: templateVersion.ID,
- CachedPlan: arg.CachedPlan,
- UpdatedAt: arg.UpdatedAt,
+ TemplateVersionID: templateVersion.ID,
+ UpdatedAt: arg.UpdatedAt,
+ CachedPlan: arg.CachedPlan,
+ CachedModuleFiles: arg.CachedModuleFiles,
+ ProvisionerdVersion: arg.ProvisionerdVersion,
}
q.templateVersionTerraformValues = append(q.templateVersionTerraformValues, row)
return nil
@@ -9569,6 +9601,7 @@ func (q *FakeQuerier) InsertWorkspaceAgent(_ context.Context, arg database.Inser
agent := database.WorkspaceAgent{
ID: arg.ID,
+ ParentID: arg.ParentID,
CreatedAt: arg.CreatedAt,
UpdatedAt: arg.UpdatedAt,
ResourceID: arg.ResourceID,
@@ -9587,6 +9620,7 @@ func (q *FakeQuerier) InsertWorkspaceAgent(_ context.Context, arg database.Inser
LifecycleState: database.WorkspaceAgentLifecycleStateCreated,
DisplayApps: arg.DisplayApps,
DisplayOrder: arg.DisplayOrder,
+ APIKeyScope: arg.APIKeyScope,
}
q.workspaceAgents = append(q.workspaceAgents, agent)
@@ -11050,6 +11084,7 @@ func (q *FakeQuerier) UpdateTemplateMetaByID(_ context.Context, arg database.Upd
tpl.GroupACL = arg.GroupACL
tpl.AllowUserCancelWorkspaceJobs = arg.AllowUserCancelWorkspaceJobs
tpl.MaxPortSharingLevel = arg.MaxPortSharingLevel
+ tpl.UseClassicParameterFlow = arg.UseClassicParameterFlow
q.templates[idx] = tpl
return nil
}
@@ -13021,7 +13056,17 @@ func (q *FakeQuerier) GetAuthorizedTemplates(ctx context.Context, arg database.G
if arg.ExactName != "" && !strings.EqualFold(template.Name, arg.ExactName) {
continue
}
- if arg.Deprecated.Valid && arg.Deprecated.Bool == (template.Deprecated != "") {
+ // Filters templates based on the search query filter 'Deprecated' status
+ // Matching SQL logic:
+ // -- Filter by deprecated
+ // AND CASE
+ // WHEN :deprecated IS NOT NULL THEN
+ // CASE
+ // WHEN :deprecated THEN deprecated != ''
+ // ELSE deprecated = ''
+ // END
+ // ELSE true
+ if arg.Deprecated.Valid && arg.Deprecated.Bool != isDeprecated(template) {
continue
}
if arg.FuzzyName != "" {
diff --git a/coderd/database/dbmetrics/querymetrics.go b/coderd/database/dbmetrics/querymetrics.go
index 128e741da1d76..a5a22aad1a0bf 100644
--- a/coderd/database/dbmetrics/querymetrics.go
+++ b/coderd/database/dbmetrics/querymetrics.go
@@ -1754,6 +1754,13 @@ func (m queryMetricsStore) GetWorkspaceAgentsByResourceIDs(ctx context.Context,
return agents, err
}
+func (m queryMetricsStore) GetWorkspaceAgentsByWorkspaceAndBuildNumber(ctx context.Context, arg database.GetWorkspaceAgentsByWorkspaceAndBuildNumberParams) ([]database.WorkspaceAgent, error) {
+ start := time.Now()
+ r0, r1 := m.s.GetWorkspaceAgentsByWorkspaceAndBuildNumber(ctx, arg)
+ m.queryLatencies.WithLabelValues("GetWorkspaceAgentsByWorkspaceAndBuildNumber").Observe(time.Since(start).Seconds())
+ return r0, r1
+}
+
func (m queryMetricsStore) GetWorkspaceAgentsCreatedAfter(ctx context.Context, createdAt time.Time) ([]database.WorkspaceAgent, error) {
start := time.Now()
agents, err := m.s.GetWorkspaceAgentsCreatedAfter(ctx, createdAt)
diff --git a/coderd/database/dbmock/dbmock.go b/coderd/database/dbmock/dbmock.go
index 17b263dfb2e07..0d66dcec11848 100644
--- a/coderd/database/dbmock/dbmock.go
+++ b/coderd/database/dbmock/dbmock.go
@@ -3678,6 +3678,21 @@ func (mr *MockStoreMockRecorder) GetWorkspaceAgentsByResourceIDs(ctx, ids any) *
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetWorkspaceAgentsByResourceIDs", reflect.TypeOf((*MockStore)(nil).GetWorkspaceAgentsByResourceIDs), ctx, ids)
}
+// GetWorkspaceAgentsByWorkspaceAndBuildNumber mocks base method.
+func (m *MockStore) GetWorkspaceAgentsByWorkspaceAndBuildNumber(ctx context.Context, arg database.GetWorkspaceAgentsByWorkspaceAndBuildNumberParams) ([]database.WorkspaceAgent, error) {
+ m.ctrl.T.Helper()
+ ret := m.ctrl.Call(m, "GetWorkspaceAgentsByWorkspaceAndBuildNumber", ctx, arg)
+ ret0, _ := ret[0].([]database.WorkspaceAgent)
+ ret1, _ := ret[1].(error)
+ return ret0, ret1
+}
+
+// GetWorkspaceAgentsByWorkspaceAndBuildNumber indicates an expected call of GetWorkspaceAgentsByWorkspaceAndBuildNumber.
+func (mr *MockStoreMockRecorder) GetWorkspaceAgentsByWorkspaceAndBuildNumber(ctx, arg any) *gomock.Call {
+ mr.mock.ctrl.T.Helper()
+ return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetWorkspaceAgentsByWorkspaceAndBuildNumber", reflect.TypeOf((*MockStore)(nil).GetWorkspaceAgentsByWorkspaceAndBuildNumber), ctx, arg)
+}
+
// GetWorkspaceAgentsCreatedAfter mocks base method.
func (m *MockStore) GetWorkspaceAgentsCreatedAfter(ctx context.Context, createdAt time.Time) ([]database.WorkspaceAgent, error) {
m.ctrl.T.Helper()
diff --git a/coderd/database/dump.sql b/coderd/database/dump.sql
index 9ce3b0171d2d4..2f23b3ad4ce78 100644
--- a/coderd/database/dump.sql
+++ b/coderd/database/dump.sql
@@ -5,6 +5,11 @@ CREATE TYPE agent_id_name_pair AS (
name text
);
+CREATE TYPE agent_key_scope_enum AS ENUM (
+ 'all',
+ 'no_user_data'
+);
+
CREATE TYPE api_key_scope AS ENUM (
'all',
'application_connect'
@@ -1440,9 +1445,13 @@ CREATE TABLE template_version_presets (
CREATE TABLE template_version_terraform_values (
template_version_id uuid NOT NULL,
updated_at timestamp with time zone DEFAULT now() NOT NULL,
- cached_plan jsonb NOT NULL
+ cached_plan jsonb NOT NULL,
+ cached_module_files uuid,
+ provisionerd_version text DEFAULT ''::text NOT NULL
);
+COMMENT ON COLUMN template_version_terraform_values.provisionerd_version IS 'What version of the provisioning engine was used to generate the cached plan and module files.';
+
CREATE TABLE template_version_variables (
template_version_id uuid NOT NULL,
name text NOT NULL,
@@ -1551,7 +1560,8 @@ CREATE TABLE templates (
require_active_version boolean DEFAULT false NOT NULL,
deprecated text DEFAULT ''::text NOT NULL,
activity_bump bigint DEFAULT '3600000000000'::bigint NOT NULL,
- max_port_sharing_level app_sharing_level DEFAULT 'owner'::app_sharing_level NOT NULL
+ max_port_sharing_level app_sharing_level DEFAULT 'owner'::app_sharing_level NOT NULL,
+ use_classic_parameter_flow boolean DEFAULT false NOT NULL
);
COMMENT ON COLUMN templates.default_ttl IS 'The default duration for autostop for workspaces created from this template.';
@@ -1572,6 +1582,8 @@ COMMENT ON COLUMN templates.autostart_block_days_of_week IS 'A bitmap of days of
COMMENT ON COLUMN templates.deprecated IS 'If set to a non empty string, the template will no longer be able to be used. The message will be displayed to the user.';
+COMMENT ON COLUMN templates.use_classic_parameter_flow IS 'Determines whether to default to the dynamic parameter creation flow for this template or continue using the legacy classic parameter creation flow.This is a template wide setting, the template admin can revert to the classic flow if there are any issues. An escape hatch is required, as workspace creation is a core workflow and cannot break. This column will be removed when the dynamic parameter creation flow is stable.';
+
CREATE VIEW template_with_names AS
SELECT templates.id,
templates.created_at,
@@ -1601,6 +1613,7 @@ CREATE VIEW template_with_names AS
templates.deprecated,
templates.activity_bump,
templates.max_port_sharing_level,
+ templates.use_classic_parameter_flow,
COALESCE(visible_users.avatar_url, ''::text) AS created_by_avatar_url,
COALESCE(visible_users.username, ''::text) AS created_by_username,
COALESCE(organizations.name, ''::text) AS organization_name,
@@ -1832,6 +1845,8 @@ CREATE TABLE workspace_agents (
display_apps display_app[] DEFAULT '{vscode,vscode_insiders,web_terminal,ssh_helper,port_forwarding_helper}'::display_app[],
api_version text DEFAULT ''::text NOT NULL,
display_order integer DEFAULT 0 NOT NULL,
+ parent_id uuid,
+ api_key_scope agent_key_scope_enum DEFAULT 'all'::agent_key_scope_enum NOT NULL,
CONSTRAINT max_logs_length CHECK ((logs_length <= 1048576)),
CONSTRAINT subsystems_not_none CHECK ((NOT ('none'::workspace_agent_subsystem = ANY (subsystems))))
);
@@ -1858,6 +1873,8 @@ COMMENT ON COLUMN workspace_agents.ready_at IS 'The time the agent entered the r
COMMENT ON COLUMN workspace_agents.display_order IS 'Specifies the order in which to display agents in user interfaces.';
+COMMENT ON COLUMN workspace_agents.api_key_scope IS 'Defines the scope of the API key associated with the agent. ''all'' allows access to everything, ''no_user_data'' restricts it to exclude user data.';
+
CREATE UNLOGGED TABLE workspace_app_audit_sessions (
agent_id uuid NOT NULL,
app_id uuid NOT NULL,
@@ -2022,18 +2039,52 @@ CREATE VIEW workspace_build_with_user AS
COMMENT ON VIEW workspace_build_with_user IS 'Joins in the username + avatar url of the initiated by user.';
+CREATE TABLE workspaces (
+ id uuid NOT NULL,
+ created_at timestamp with time zone NOT NULL,
+ updated_at timestamp with time zone NOT NULL,
+ owner_id uuid NOT NULL,
+ organization_id uuid NOT NULL,
+ template_id uuid NOT NULL,
+ deleted boolean DEFAULT false NOT NULL,
+ name character varying(64) NOT NULL,
+ autostart_schedule text,
+ ttl bigint,
+ last_used_at timestamp with time zone DEFAULT '0001-01-01 00:00:00+00'::timestamp with time zone NOT NULL,
+ dormant_at timestamp with time zone,
+ deleting_at timestamp with time zone,
+ automatic_updates automatic_updates DEFAULT 'never'::automatic_updates NOT NULL,
+ favorite boolean DEFAULT false NOT NULL,
+ next_start_at timestamp with time zone
+);
+
+COMMENT ON COLUMN workspaces.favorite IS 'Favorite is true if the workspace owner has favorited the workspace.';
+
CREATE VIEW workspace_latest_builds AS
- SELECT DISTINCT ON (wb.workspace_id) wb.id,
- wb.workspace_id,
- wb.template_version_id,
- wb.job_id,
- wb.template_version_preset_id,
- wb.transition,
- wb.created_at,
- pj.job_status
- FROM (workspace_builds wb
- JOIN provisioner_jobs pj ON ((wb.job_id = pj.id)))
- ORDER BY wb.workspace_id, wb.build_number DESC;
+ SELECT latest_build.id,
+ latest_build.workspace_id,
+ latest_build.template_version_id,
+ latest_build.job_id,
+ latest_build.template_version_preset_id,
+ latest_build.transition,
+ latest_build.created_at,
+ latest_build.job_status
+ FROM (workspaces
+ LEFT JOIN LATERAL ( SELECT workspace_builds.id,
+ workspace_builds.workspace_id,
+ workspace_builds.template_version_id,
+ workspace_builds.job_id,
+ workspace_builds.template_version_preset_id,
+ workspace_builds.transition,
+ workspace_builds.created_at,
+ provisioner_jobs.job_status
+ FROM (workspace_builds
+ JOIN provisioner_jobs ON ((provisioner_jobs.id = workspace_builds.job_id)))
+ WHERE (workspace_builds.workspace_id = workspaces.id)
+ ORDER BY workspace_builds.build_number DESC
+ LIMIT 1) latest_build ON (true))
+ WHERE (workspaces.deleted = false)
+ ORDER BY workspaces.id;
CREATE TABLE workspace_modules (
id uuid NOT NULL,
@@ -2070,27 +2121,6 @@ CREATE TABLE workspace_resources (
module_path text
);
-CREATE TABLE workspaces (
- id uuid NOT NULL,
- created_at timestamp with time zone NOT NULL,
- updated_at timestamp with time zone NOT NULL,
- owner_id uuid NOT NULL,
- organization_id uuid NOT NULL,
- template_id uuid NOT NULL,
- deleted boolean DEFAULT false NOT NULL,
- name character varying(64) NOT NULL,
- autostart_schedule text,
- ttl bigint,
- last_used_at timestamp with time zone DEFAULT '0001-01-01 00:00:00+00'::timestamp with time zone NOT NULL,
- dormant_at timestamp with time zone,
- deleting_at timestamp with time zone,
- automatic_updates automatic_updates DEFAULT 'never'::automatic_updates NOT NULL,
- favorite boolean DEFAULT false NOT NULL,
- next_start_at timestamp with time zone
-);
-
-COMMENT ON COLUMN workspaces.favorite IS 'Favorite is true if the workspace owner has favorited the workspace.';
-
CREATE VIEW workspace_prebuilds AS
WITH all_prebuilds AS (
SELECT w.id,
@@ -2850,6 +2880,9 @@ ALTER TABLE ONLY template_version_preset_parameters
ALTER TABLE ONLY template_version_presets
ADD CONSTRAINT template_version_presets_template_version_id_fkey FOREIGN KEY (template_version_id) REFERENCES template_versions(id) ON DELETE CASCADE;
+ALTER TABLE ONLY template_version_terraform_values
+ ADD CONSTRAINT template_version_terraform_values_cached_module_files_fkey FOREIGN KEY (cached_module_files) REFERENCES files(id);
+
ALTER TABLE ONLY template_version_terraform_values
ADD CONSTRAINT template_version_terraform_values_template_version_id_fkey FOREIGN KEY (template_version_id) REFERENCES template_versions(id) ON DELETE CASCADE;
@@ -2922,6 +2955,9 @@ ALTER TABLE ONLY workspace_agent_logs
ALTER TABLE ONLY workspace_agent_volume_resource_monitors
ADD CONSTRAINT workspace_agent_volume_resource_monitors_agent_id_fkey FOREIGN KEY (agent_id) REFERENCES workspace_agents(id) ON DELETE CASCADE;
+ALTER TABLE ONLY workspace_agents
+ ADD CONSTRAINT workspace_agents_parent_id_fkey FOREIGN KEY (parent_id) REFERENCES workspace_agents(id) ON DELETE CASCADE;
+
ALTER TABLE ONLY workspace_agents
ADD CONSTRAINT workspace_agents_resource_id_fkey FOREIGN KEY (resource_id) REFERENCES workspace_resources(id) ON DELETE CASCADE;
diff --git a/coderd/database/foreign_key_constraint.go b/coderd/database/foreign_key_constraint.go
index 0db3e9522547e..d6b87ddff5376 100644
--- a/coderd/database/foreign_key_constraint.go
+++ b/coderd/database/foreign_key_constraint.go
@@ -46,6 +46,7 @@ const (
ForeignKeyTemplateVersionParametersTemplateVersionID ForeignKeyConstraint = "template_version_parameters_template_version_id_fkey" // ALTER TABLE ONLY template_version_parameters ADD CONSTRAINT template_version_parameters_template_version_id_fkey FOREIGN KEY (template_version_id) REFERENCES template_versions(id) ON DELETE CASCADE;
ForeignKeyTemplateVersionPresetParametTemplateVersionPresetID ForeignKeyConstraint = "template_version_preset_paramet_template_version_preset_id_fkey" // ALTER TABLE ONLY template_version_preset_parameters ADD CONSTRAINT template_version_preset_paramet_template_version_preset_id_fkey FOREIGN KEY (template_version_preset_id) REFERENCES template_version_presets(id) ON DELETE CASCADE;
ForeignKeyTemplateVersionPresetsTemplateVersionID ForeignKeyConstraint = "template_version_presets_template_version_id_fkey" // ALTER TABLE ONLY template_version_presets ADD CONSTRAINT template_version_presets_template_version_id_fkey FOREIGN KEY (template_version_id) REFERENCES template_versions(id) ON DELETE CASCADE;
+ ForeignKeyTemplateVersionTerraformValuesCachedModuleFiles ForeignKeyConstraint = "template_version_terraform_values_cached_module_files_fkey" // ALTER TABLE ONLY template_version_terraform_values ADD CONSTRAINT template_version_terraform_values_cached_module_files_fkey FOREIGN KEY (cached_module_files) REFERENCES files(id);
ForeignKeyTemplateVersionTerraformValuesTemplateVersionID ForeignKeyConstraint = "template_version_terraform_values_template_version_id_fkey" // ALTER TABLE ONLY template_version_terraform_values ADD CONSTRAINT template_version_terraform_values_template_version_id_fkey FOREIGN KEY (template_version_id) REFERENCES template_versions(id) ON DELETE CASCADE;
ForeignKeyTemplateVersionVariablesTemplateVersionID ForeignKeyConstraint = "template_version_variables_template_version_id_fkey" // ALTER TABLE ONLY template_version_variables ADD CONSTRAINT template_version_variables_template_version_id_fkey FOREIGN KEY (template_version_id) REFERENCES template_versions(id) ON DELETE CASCADE;
ForeignKeyTemplateVersionWorkspaceTagsTemplateVersionID ForeignKeyConstraint = "template_version_workspace_tags_template_version_id_fkey" // ALTER TABLE ONLY template_version_workspace_tags ADD CONSTRAINT template_version_workspace_tags_template_version_id_fkey FOREIGN KEY (template_version_id) REFERENCES template_versions(id) ON DELETE CASCADE;
@@ -70,6 +71,7 @@ const (
ForeignKeyWorkspaceAgentScriptsWorkspaceAgentID ForeignKeyConstraint = "workspace_agent_scripts_workspace_agent_id_fkey" // ALTER TABLE ONLY workspace_agent_scripts ADD CONSTRAINT workspace_agent_scripts_workspace_agent_id_fkey FOREIGN KEY (workspace_agent_id) REFERENCES workspace_agents(id) ON DELETE CASCADE;
ForeignKeyWorkspaceAgentStartupLogsAgentID ForeignKeyConstraint = "workspace_agent_startup_logs_agent_id_fkey" // ALTER TABLE ONLY workspace_agent_logs ADD CONSTRAINT workspace_agent_startup_logs_agent_id_fkey FOREIGN KEY (agent_id) REFERENCES workspace_agents(id) ON DELETE CASCADE;
ForeignKeyWorkspaceAgentVolumeResourceMonitorsAgentID ForeignKeyConstraint = "workspace_agent_volume_resource_monitors_agent_id_fkey" // ALTER TABLE ONLY workspace_agent_volume_resource_monitors ADD CONSTRAINT workspace_agent_volume_resource_monitors_agent_id_fkey FOREIGN KEY (agent_id) REFERENCES workspace_agents(id) ON DELETE CASCADE;
+ ForeignKeyWorkspaceAgentsParentID ForeignKeyConstraint = "workspace_agents_parent_id_fkey" // ALTER TABLE ONLY workspace_agents ADD CONSTRAINT workspace_agents_parent_id_fkey FOREIGN KEY (parent_id) REFERENCES workspace_agents(id) ON DELETE CASCADE;
ForeignKeyWorkspaceAgentsResourceID ForeignKeyConstraint = "workspace_agents_resource_id_fkey" // ALTER TABLE ONLY workspace_agents ADD CONSTRAINT workspace_agents_resource_id_fkey FOREIGN KEY (resource_id) REFERENCES workspace_resources(id) ON DELETE CASCADE;
ForeignKeyWorkspaceAppAuditSessionsAgentID ForeignKeyConstraint = "workspace_app_audit_sessions_agent_id_fkey" // ALTER TABLE ONLY workspace_app_audit_sessions ADD CONSTRAINT workspace_app_audit_sessions_agent_id_fkey FOREIGN KEY (agent_id) REFERENCES workspace_agents(id) ON DELETE CASCADE;
ForeignKeyWorkspaceAppStatsAgentID ForeignKeyConstraint = "workspace_app_stats_agent_id_fkey" // ALTER TABLE ONLY workspace_app_stats ADD CONSTRAINT workspace_app_stats_agent_id_fkey FOREIGN KEY (agent_id) REFERENCES workspace_agents(id);
diff --git a/coderd/database/migrations/000320_terraform_cached_modules.down.sql b/coderd/database/migrations/000320_terraform_cached_modules.down.sql
new file mode 100644
index 0000000000000..6894e43ca9a98
--- /dev/null
+++ b/coderd/database/migrations/000320_terraform_cached_modules.down.sql
@@ -0,0 +1 @@
+ALTER TABLE template_version_terraform_values DROP COLUMN cached_module_files;
diff --git a/coderd/database/migrations/000320_terraform_cached_modules.up.sql b/coderd/database/migrations/000320_terraform_cached_modules.up.sql
new file mode 100644
index 0000000000000..17028040de7d1
--- /dev/null
+++ b/coderd/database/migrations/000320_terraform_cached_modules.up.sql
@@ -0,0 +1 @@
+ALTER TABLE template_version_terraform_values ADD COLUMN cached_module_files uuid references files(id);
diff --git a/coderd/database/migrations/000321_add_parent_id_to_workspace_agents.down.sql b/coderd/database/migrations/000321_add_parent_id_to_workspace_agents.down.sql
new file mode 100644
index 0000000000000..ab810126ad60e
--- /dev/null
+++ b/coderd/database/migrations/000321_add_parent_id_to_workspace_agents.down.sql
@@ -0,0 +1,2 @@
+ALTER TABLE workspace_agents
+DROP COLUMN IF EXISTS parent_id;
diff --git a/coderd/database/migrations/000321_add_parent_id_to_workspace_agents.up.sql b/coderd/database/migrations/000321_add_parent_id_to_workspace_agents.up.sql
new file mode 100644
index 0000000000000..f2fd7a8c1cd10
--- /dev/null
+++ b/coderd/database/migrations/000321_add_parent_id_to_workspace_agents.up.sql
@@ -0,0 +1,2 @@
+ALTER TABLE workspace_agents
+ADD COLUMN parent_id UUID REFERENCES workspace_agents (id) ON DELETE CASCADE;
diff --git a/coderd/database/migrations/000322_rename_test_notification.down.sql b/coderd/database/migrations/000322_rename_test_notification.down.sql
new file mode 100644
index 0000000000000..06bfab4370d1d
--- /dev/null
+++ b/coderd/database/migrations/000322_rename_test_notification.down.sql
@@ -0,0 +1,3 @@
+UPDATE notification_templates
+SET name = 'Test Notification'
+WHERE id = 'c425f63e-716a-4bf4-ae24-78348f706c3f';
diff --git a/coderd/database/migrations/000322_rename_test_notification.up.sql b/coderd/database/migrations/000322_rename_test_notification.up.sql
new file mode 100644
index 0000000000000..52b2db5a9353b
--- /dev/null
+++ b/coderd/database/migrations/000322_rename_test_notification.up.sql
@@ -0,0 +1,3 @@
+UPDATE notification_templates
+SET name = 'Troubleshooting Notification'
+WHERE id = 'c425f63e-716a-4bf4-ae24-78348f706c3f';
diff --git a/coderd/database/migrations/000323_workspace_latest_builds_optimization.down.sql b/coderd/database/migrations/000323_workspace_latest_builds_optimization.down.sql
new file mode 100644
index 0000000000000..9d9ae7aff4bd9
--- /dev/null
+++ b/coderd/database/migrations/000323_workspace_latest_builds_optimization.down.sql
@@ -0,0 +1,58 @@
+DROP VIEW workspace_prebuilds;
+DROP VIEW workspace_latest_builds;
+
+-- Revert to previous version from 000314_prebuilds.up.sql
+CREATE VIEW workspace_latest_builds AS
+SELECT DISTINCT ON (workspace_id)
+ wb.id,
+ wb.workspace_id,
+ wb.template_version_id,
+ wb.job_id,
+ wb.template_version_preset_id,
+ wb.transition,
+ wb.created_at,
+ pj.job_status
+FROM workspace_builds wb
+ INNER JOIN provisioner_jobs pj ON wb.job_id = pj.id
+ORDER BY wb.workspace_id, wb.build_number DESC;
+
+-- Recreate the dependent views
+CREATE VIEW workspace_prebuilds AS
+ WITH all_prebuilds AS (
+ SELECT w.id,
+ w.name,
+ w.template_id,
+ w.created_at
+ FROM workspaces w
+ WHERE (w.owner_id = 'c42fdf75-3097-471c-8c33-fb52454d81c0'::uuid)
+ ), workspaces_with_latest_presets AS (
+ SELECT DISTINCT ON (workspace_builds.workspace_id) workspace_builds.workspace_id,
+ workspace_builds.template_version_preset_id
+ FROM workspace_builds
+ WHERE (workspace_builds.template_version_preset_id IS NOT NULL)
+ ORDER BY workspace_builds.workspace_id, workspace_builds.build_number DESC
+ ), workspaces_with_agents_status AS (
+ SELECT w.id AS workspace_id,
+ bool_and((wa.lifecycle_state = 'ready'::workspace_agent_lifecycle_state)) AS ready
+ FROM (((workspaces w
+ JOIN workspace_latest_builds wlb ON ((wlb.workspace_id = w.id)))
+ JOIN workspace_resources wr ON ((wr.job_id = wlb.job_id)))
+ JOIN workspace_agents wa ON ((wa.resource_id = wr.id)))
+ WHERE (w.owner_id = 'c42fdf75-3097-471c-8c33-fb52454d81c0'::uuid)
+ GROUP BY w.id
+ ), current_presets AS (
+ SELECT w.id AS prebuild_id,
+ wlp.template_version_preset_id
+ FROM (workspaces w
+ JOIN workspaces_with_latest_presets wlp ON ((wlp.workspace_id = w.id)))
+ WHERE (w.owner_id = 'c42fdf75-3097-471c-8c33-fb52454d81c0'::uuid)
+ )
+ SELECT p.id,
+ p.name,
+ p.template_id,
+ p.created_at,
+ COALESCE(a.ready, false) AS ready,
+ cp.template_version_preset_id AS current_preset_id
+ FROM ((all_prebuilds p
+ LEFT JOIN workspaces_with_agents_status a ON ((a.workspace_id = p.id)))
+ JOIN current_presets cp ON ((cp.prebuild_id = p.id)));
diff --git a/coderd/database/migrations/000323_workspace_latest_builds_optimization.up.sql b/coderd/database/migrations/000323_workspace_latest_builds_optimization.up.sql
new file mode 100644
index 0000000000000..d65e09ef47339
--- /dev/null
+++ b/coderd/database/migrations/000323_workspace_latest_builds_optimization.up.sql
@@ -0,0 +1,85 @@
+-- Drop the dependent views
+DROP VIEW workspace_prebuilds;
+-- Previously created in 000314_prebuilds.up.sql
+DROP VIEW workspace_latest_builds;
+
+-- The previous version of this view had two sequential scans on two very large
+-- tables. This version optimized it by using index scans (via a lateral join)
+-- AND avoiding selecting builds from deleted workspaces.
+CREATE VIEW workspace_latest_builds AS
+SELECT
+ latest_build.id,
+ latest_build.workspace_id,
+ latest_build.template_version_id,
+ latest_build.job_id,
+ latest_build.template_version_preset_id,
+ latest_build.transition,
+ latest_build.created_at,
+ latest_build.job_status
+FROM workspaces
+LEFT JOIN LATERAL (
+ SELECT
+ workspace_builds.id AS id,
+ workspace_builds.workspace_id AS workspace_id,
+ workspace_builds.template_version_id AS template_version_id,
+ workspace_builds.job_id AS job_id,
+ workspace_builds.template_version_preset_id AS template_version_preset_id,
+ workspace_builds.transition AS transition,
+ workspace_builds.created_at AS created_at,
+ provisioner_jobs.job_status AS job_status
+ FROM
+ workspace_builds
+ JOIN
+ provisioner_jobs
+ ON
+ provisioner_jobs.id = workspace_builds.job_id
+ WHERE
+ workspace_builds.workspace_id = workspaces.id
+ ORDER BY
+ build_number DESC
+ LIMIT
+ 1
+) latest_build ON TRUE
+WHERE workspaces.deleted = false
+ORDER BY workspaces.id ASC;
+
+-- Recreate the dependent views
+CREATE VIEW workspace_prebuilds AS
+ WITH all_prebuilds AS (
+ SELECT w.id,
+ w.name,
+ w.template_id,
+ w.created_at
+ FROM workspaces w
+ WHERE (w.owner_id = 'c42fdf75-3097-471c-8c33-fb52454d81c0'::uuid)
+ ), workspaces_with_latest_presets AS (
+ SELECT DISTINCT ON (workspace_builds.workspace_id) workspace_builds.workspace_id,
+ workspace_builds.template_version_preset_id
+ FROM workspace_builds
+ WHERE (workspace_builds.template_version_preset_id IS NOT NULL)
+ ORDER BY workspace_builds.workspace_id, workspace_builds.build_number DESC
+ ), workspaces_with_agents_status AS (
+ SELECT w.id AS workspace_id,
+ bool_and((wa.lifecycle_state = 'ready'::workspace_agent_lifecycle_state)) AS ready
+ FROM (((workspaces w
+ JOIN workspace_latest_builds wlb ON ((wlb.workspace_id = w.id)))
+ JOIN workspace_resources wr ON ((wr.job_id = wlb.job_id)))
+ JOIN workspace_agents wa ON ((wa.resource_id = wr.id)))
+ WHERE (w.owner_id = 'c42fdf75-3097-471c-8c33-fb52454d81c0'::uuid)
+ GROUP BY w.id
+ ), current_presets AS (
+ SELECT w.id AS prebuild_id,
+ wlp.template_version_preset_id
+ FROM (workspaces w
+ JOIN workspaces_with_latest_presets wlp ON ((wlp.workspace_id = w.id)))
+ WHERE (w.owner_id = 'c42fdf75-3097-471c-8c33-fb52454d81c0'::uuid)
+ )
+ SELECT p.id,
+ p.name,
+ p.template_id,
+ p.created_at,
+ COALESCE(a.ready, false) AS ready,
+ cp.template_version_preset_id AS current_preset_id
+ FROM ((all_prebuilds p
+ LEFT JOIN workspaces_with_agents_status a ON ((a.workspace_id = p.id)))
+ JOIN current_presets cp ON ((cp.prebuild_id = p.id)));
diff --git a/coderd/database/migrations/000324_resource_replacements_notification.down.sql b/coderd/database/migrations/000324_resource_replacements_notification.down.sql
new file mode 100644
index 0000000000000..8da13f718b635
--- /dev/null
+++ b/coderd/database/migrations/000324_resource_replacements_notification.down.sql
@@ -0,0 +1 @@
+DELETE FROM notification_templates WHERE id = '89d9745a-816e-4695-a17f-3d0a229e2b8d';
diff --git a/coderd/database/migrations/000324_resource_replacements_notification.up.sql b/coderd/database/migrations/000324_resource_replacements_notification.up.sql
new file mode 100644
index 0000000000000..395332adaee20
--- /dev/null
+++ b/coderd/database/migrations/000324_resource_replacements_notification.up.sql
@@ -0,0 +1,34 @@
+INSERT INTO notification_templates
+ (id, name, title_template, body_template, "group", actions)
+VALUES ('89d9745a-816e-4695-a17f-3d0a229e2b8d',
+ 'Prebuilt Workspace Resource Replaced',
+ E'There might be a problem with a recently claimed prebuilt workspace',
+ $$
+Workspace **{{.Labels.workspace}}** was claimed from a prebuilt workspace by **{{.Labels.claimant}}**.
+
+During the claim, Terraform destroyed and recreated the following resources
+because one or more immutable attributes changed:
+
+{{range $resource, $paths := .Data.replacements -}}
+- _{{ $resource }}_ was replaced due to changes to _{{ $paths }}_
+{{end}}
+
+When Terraform must change an immutable attribute, it replaces the entire resource.
+If you’re using prebuilds to speed up provisioning, unexpected replacements will slow down
+workspace startup—even when claiming a prebuilt environment.
+
+For tips on preventing replacements and improving claim performance, see [this guide](https://coder.com/docs/admin/templates/extending-templates/prebuilt-workspaces#preventing-resource-replacement).
+
+NOTE: this prebuilt workspace used the **{{.Labels.preset}}** preset.
+$$,
+ 'Template Events',
+ '[
+ {
+ "label": "View workspace build",
+ "url": "{{base_url}}/@{{.Labels.claimant}}/{{.Labels.workspace}}/builds/{{.Labels.workspace_build_num}}"
+ },
+ {
+ "label": "View template version",
+ "url": "{{base_url}}/templates/{{.Labels.org}}/{{.Labels.template}}/versions/{{.Labels.template_version}}"
+ }
+ ]'::jsonb);
diff --git a/coderd/database/migrations/000325_dynamic_parameters_metadata.down.sql b/coderd/database/migrations/000325_dynamic_parameters_metadata.down.sql
new file mode 100644
index 0000000000000..991871b5700ab
--- /dev/null
+++ b/coderd/database/migrations/000325_dynamic_parameters_metadata.down.sql
@@ -0,0 +1 @@
+ALTER TABLE template_version_terraform_values DROP COLUMN provisionerd_version;
diff --git a/coderd/database/migrations/000325_dynamic_parameters_metadata.up.sql b/coderd/database/migrations/000325_dynamic_parameters_metadata.up.sql
new file mode 100644
index 0000000000000..211693b7f3e79
--- /dev/null
+++ b/coderd/database/migrations/000325_dynamic_parameters_metadata.up.sql
@@ -0,0 +1,4 @@
+ALTER TABLE template_version_terraform_values ADD COLUMN IF NOT EXISTS provisionerd_version TEXT NOT NULL DEFAULT '';
+
+COMMENT ON COLUMN template_version_terraform_values.provisionerd_version IS
+ 'What version of the provisioning engine was used to generate the cached plan and module files.';
diff --git a/coderd/database/migrations/000326_add_api_key_scope_to_workspace_agents.down.sql b/coderd/database/migrations/000326_add_api_key_scope_to_workspace_agents.down.sql
new file mode 100644
index 0000000000000..48477606d80b1
--- /dev/null
+++ b/coderd/database/migrations/000326_add_api_key_scope_to_workspace_agents.down.sql
@@ -0,0 +1,6 @@
+-- Remove the api_key_scope column from the workspace_agents table
+ALTER TABLE workspace_agents
+DROP COLUMN IF EXISTS api_key_scope;
+
+-- Drop the enum type for API key scope
+DROP TYPE IF EXISTS agent_key_scope_enum;
diff --git a/coderd/database/migrations/000326_add_api_key_scope_to_workspace_agents.up.sql b/coderd/database/migrations/000326_add_api_key_scope_to_workspace_agents.up.sql
new file mode 100644
index 0000000000000..ee0581fcdb145
--- /dev/null
+++ b/coderd/database/migrations/000326_add_api_key_scope_to_workspace_agents.up.sql
@@ -0,0 +1,10 @@
+-- Create the enum type for API key scope
+CREATE TYPE agent_key_scope_enum AS ENUM ('all', 'no_user_data');
+
+-- Add the api_key_scope column to the workspace_agents table
+-- It defaults to 'all' to maintain existing behavior for current agents.
+ALTER TABLE workspace_agents
+ADD COLUMN api_key_scope agent_key_scope_enum NOT NULL DEFAULT 'all';
+
+-- Add a comment explaining the purpose of the column
+COMMENT ON COLUMN workspace_agents.api_key_scope IS 'Defines the scope of the API key associated with the agent. ''all'' allows access to everything, ''no_user_data'' restricts it to exclude user data.';
diff --git a/coderd/database/migrations/000327_version_dynamic_parameter_flow.down.sql b/coderd/database/migrations/000327_version_dynamic_parameter_flow.down.sql
new file mode 100644
index 0000000000000..6839abb73d9c9
--- /dev/null
+++ b/coderd/database/migrations/000327_version_dynamic_parameter_flow.down.sql
@@ -0,0 +1,28 @@
+DROP VIEW template_with_names;
+
+-- Drop the column
+ALTER TABLE templates DROP COLUMN use_classic_parameter_flow;
+
+
+CREATE VIEW
+ template_with_names
+AS
+SELECT
+ templates.*,
+ coalesce(visible_users.avatar_url, '') AS created_by_avatar_url,
+ coalesce(visible_users.username, '') AS created_by_username,
+ coalesce(organizations.name, '') AS organization_name,
+ coalesce(organizations.display_name, '') AS organization_display_name,
+ coalesce(organizations.icon, '') AS organization_icon
+FROM
+ templates
+ LEFT JOIN
+ visible_users
+ ON
+ templates.created_by = visible_users.id
+ LEFT JOIN
+ organizations
+ ON templates.organization_id = organizations.id
+;
+
+COMMENT ON VIEW template_with_names IS 'Joins in the display name information such as username, avatar, and organization name.';
diff --git a/coderd/database/migrations/000327_version_dynamic_parameter_flow.up.sql b/coderd/database/migrations/000327_version_dynamic_parameter_flow.up.sql
new file mode 100644
index 0000000000000..ba724b3fb8da2
--- /dev/null
+++ b/coderd/database/migrations/000327_version_dynamic_parameter_flow.up.sql
@@ -0,0 +1,36 @@
+-- Default to `false`. Users will have to manually opt back into the classic parameter flow.
+-- We want the new experience to be tried first.
+ALTER TABLE templates ADD COLUMN use_classic_parameter_flow BOOL NOT NULL DEFAULT false;
+
+COMMENT ON COLUMN templates.use_classic_parameter_flow IS
+ 'Determines whether to default to the dynamic parameter creation flow for this template '
+ 'or continue using the legacy classic parameter creation flow.'
+ 'This is a template wide setting, the template admin can revert to the classic flow if there are any issues. '
+ 'An escape hatch is required, as workspace creation is a core workflow and cannot break. '
+ 'This column will be removed when the dynamic parameter creation flow is stable.';
+
+
+-- Update the template_with_names view by recreating it.
+DROP VIEW template_with_names;
+CREATE VIEW
+ template_with_names
+AS
+SELECT
+ templates.*,
+ coalesce(visible_users.avatar_url, '') AS created_by_avatar_url,
+ coalesce(visible_users.username, '') AS created_by_username,
+ coalesce(organizations.name, '') AS organization_name,
+ coalesce(organizations.display_name, '') AS organization_display_name,
+ coalesce(organizations.icon, '') AS organization_icon
+FROM
+ templates
+ LEFT JOIN
+ visible_users
+ ON
+ templates.created_by = visible_users.id
+ LEFT JOIN
+ organizations
+ ON templates.organization_id = organizations.id
+;
+
+COMMENT ON VIEW template_with_names IS 'Joins in the display name information such as username, avatar, and organization name.';
diff --git a/coderd/database/modelqueries.go b/coderd/database/modelqueries.go
index 1bf37ce0c09e6..4144c183de380 100644
--- a/coderd/database/modelqueries.go
+++ b/coderd/database/modelqueries.go
@@ -117,6 +117,7 @@ func (q *sqlQuerier) GetAuthorizedTemplates(ctx context.Context, arg GetTemplate
&i.Deprecated,
&i.ActivityBump,
&i.MaxPortSharingLevel,
+ &i.UseClassicParameterFlow,
&i.CreatedByAvatarURL,
&i.CreatedByUsername,
&i.OrganizationName,
diff --git a/coderd/database/models.go b/coderd/database/models.go
index c8ac71e8b9398..ff49b8f471be0 100644
--- a/coderd/database/models.go
+++ b/coderd/database/models.go
@@ -74,6 +74,64 @@ func AllAPIKeyScopeValues() []APIKeyScope {
}
}
+type AgentKeyScopeEnum string
+
+const (
+ AgentKeyScopeEnumAll AgentKeyScopeEnum = "all"
+ AgentKeyScopeEnumNoUserData AgentKeyScopeEnum = "no_user_data"
+)
+
+func (e *AgentKeyScopeEnum) Scan(src interface{}) error {
+ switch s := src.(type) {
+ case []byte:
+ *e = AgentKeyScopeEnum(s)
+ case string:
+ *e = AgentKeyScopeEnum(s)
+ default:
+ return fmt.Errorf("unsupported scan type for AgentKeyScopeEnum: %T", src)
+ }
+ return nil
+}
+
+type NullAgentKeyScopeEnum struct {
+ AgentKeyScopeEnum AgentKeyScopeEnum `json:"agent_key_scope_enum"`
+ Valid bool `json:"valid"` // Valid is true if AgentKeyScopeEnum is not NULL
+}
+
+// Scan implements the Scanner interface.
+func (ns *NullAgentKeyScopeEnum) Scan(value interface{}) error {
+ if value == nil {
+ ns.AgentKeyScopeEnum, ns.Valid = "", false
+ return nil
+ }
+ ns.Valid = true
+ return ns.AgentKeyScopeEnum.Scan(value)
+}
+
+// Value implements the driver Valuer interface.
+func (ns NullAgentKeyScopeEnum) Value() (driver.Value, error) {
+ if !ns.Valid {
+ return nil, nil
+ }
+ return string(ns.AgentKeyScopeEnum), nil
+}
+
+func (e AgentKeyScopeEnum) Valid() bool {
+ switch e {
+ case AgentKeyScopeEnumAll,
+ AgentKeyScopeEnumNoUserData:
+ return true
+ }
+ return false
+}
+
+func AllAgentKeyScopeEnumValues() []AgentKeyScopeEnum {
+ return []AgentKeyScopeEnum{
+ AgentKeyScopeEnumAll,
+ AgentKeyScopeEnumNoUserData,
+ }
+}
+
type AppSharingLevel string
const (
@@ -3056,6 +3114,7 @@ type Template struct {
Deprecated string `db:"deprecated" json:"deprecated"`
ActivityBump int64 `db:"activity_bump" json:"activity_bump"`
MaxPortSharingLevel AppSharingLevel `db:"max_port_sharing_level" json:"max_port_sharing_level"`
+ UseClassicParameterFlow bool `db:"use_classic_parameter_flow" json:"use_classic_parameter_flow"`
CreatedByAvatarURL string `db:"created_by_avatar_url" json:"created_by_avatar_url"`
CreatedByUsername string `db:"created_by_username" json:"created_by_username"`
OrganizationName string `db:"organization_name" json:"organization_name"`
@@ -3101,6 +3160,8 @@ type TemplateTable struct {
Deprecated string `db:"deprecated" json:"deprecated"`
ActivityBump int64 `db:"activity_bump" json:"activity_bump"`
MaxPortSharingLevel AppSharingLevel `db:"max_port_sharing_level" json:"max_port_sharing_level"`
+ // Determines whether to default to the dynamic parameter creation flow for this template or continue using the legacy classic parameter creation flow.This is a template wide setting, the template admin can revert to the classic flow if there are any issues. An escape hatch is required, as workspace creation is a core workflow and cannot break. This column will be removed when the dynamic parameter creation flow is stable.
+ UseClassicParameterFlow bool `db:"use_classic_parameter_flow" json:"use_classic_parameter_flow"`
}
// Records aggregated usage statistics for templates/users. All usage is rounded up to the nearest minute.
@@ -3224,6 +3285,9 @@ type TemplateVersionTerraformValue struct {
TemplateVersionID uuid.UUID `db:"template_version_id" json:"template_version_id"`
UpdatedAt time.Time `db:"updated_at" json:"updated_at"`
CachedPlan json.RawMessage `db:"cached_plan" json:"cached_plan"`
+ CachedModuleFiles uuid.NullUUID `db:"cached_module_files" json:"cached_module_files"`
+ // What version of the provisioning engine was used to generate the cached plan and module files.
+ ProvisionerdVersion string `db:"provisionerd_version" json:"provisionerd_version"`
}
type TemplateVersionVariable struct {
@@ -3401,7 +3465,10 @@ type WorkspaceAgent struct {
DisplayApps []DisplayApp `db:"display_apps" json:"display_apps"`
APIVersion string `db:"api_version" json:"api_version"`
// Specifies the order in which to display agents in user interfaces.
- DisplayOrder int32 `db:"display_order" json:"display_order"`
+ DisplayOrder int32 `db:"display_order" json:"display_order"`
+ ParentID uuid.NullUUID `db:"parent_id" json:"parent_id"`
+ // Defines the scope of the API key associated with the agent. 'all' allows access to everything, 'no_user_data' restricts it to exclude user data.
+ APIKeyScope AgentKeyScopeEnum `db:"api_key_scope" json:"api_key_scope"`
}
// Workspace agent devcontainer configuration
diff --git a/coderd/database/querier.go b/coderd/database/querier.go
index d0f74ee609724..81b8d58758ada 100644
--- a/coderd/database/querier.go
+++ b/coderd/database/querier.go
@@ -400,6 +400,7 @@ type sqlcQuerier interface {
GetWorkspaceAgentUsageStats(ctx context.Context, createdAt time.Time) ([]GetWorkspaceAgentUsageStatsRow, error)
GetWorkspaceAgentUsageStatsAndLabels(ctx context.Context, createdAt time.Time) ([]GetWorkspaceAgentUsageStatsAndLabelsRow, error)
GetWorkspaceAgentsByResourceIDs(ctx context.Context, ids []uuid.UUID) ([]WorkspaceAgent, error)
+ GetWorkspaceAgentsByWorkspaceAndBuildNumber(ctx context.Context, arg GetWorkspaceAgentsByWorkspaceAndBuildNumberParams) ([]WorkspaceAgent, error)
GetWorkspaceAgentsCreatedAfter(ctx context.Context, createdAt time.Time) ([]WorkspaceAgent, error)
GetWorkspaceAgentsInLatestBuildByWorkspaceID(ctx context.Context, workspaceID uuid.UUID) ([]WorkspaceAgent, error)
GetWorkspaceAppByAgentIDAndSlug(ctx context.Context, arg GetWorkspaceAppByAgentIDAndSlugParams) (WorkspaceApp, error)
diff --git a/coderd/database/queries.sql.go b/coderd/database/queries.sql.go
index cd5b297c85e07..ac08d72d0e493 100644
--- a/coderd/database/queries.sql.go
+++ b/coderd/database/queries.sql.go
@@ -6149,6 +6149,7 @@ WHERE w.id IN (
AND b.template_version_id = t.active_version_id
AND p.current_preset_id = $3::uuid
AND p.ready
+ AND NOT t.deleted
LIMIT 1 FOR UPDATE OF p SKIP LOCKED -- Ensure that a concurrent request will not select the same prebuild.
)
RETURNING w.id, w.name
@@ -6184,6 +6185,7 @@ FROM workspace_latest_builds wlb
-- prebuilds that are still building.
INNER JOIN templates t ON t.active_version_id = wlb.template_version_id
WHERE wlb.job_status IN ('pending'::provisioner_job_status, 'running'::provisioner_job_status)
+ -- AND NOT t.deleted -- We don't exclude deleted templates because there's no constraint in the DB preventing a soft deletion on a template while workspaces are running.
GROUP BY t.id, wpb.template_version_id, wpb.transition, wlb.template_version_preset_id
`
@@ -6298,6 +6300,7 @@ WITH filtered_builds AS (
WHERE tvp.desired_instances IS NOT NULL -- Consider only presets that have a prebuild configuration.
AND wlb.transition = 'start'::workspace_transition
AND w.owner_id = 'c42fdf75-3097-471c-8c33-fb52454d81c0'
+ AND NOT t.deleted
),
time_sorted_builds AS (
-- Group builds by preset, then sort each group by created_at.
@@ -6449,6 +6452,7 @@ FROM templates t
INNER JOIN template_version_presets tvp ON tvp.template_version_id = tv.id
INNER JOIN organizations o ON o.id = t.organization_id
WHERE tvp.desired_instances IS NOT NULL -- Consider only presets that have a prebuild configuration.
+ -- AND NOT t.deleted -- We don't exclude deleted templates because there's no constraint in the DB preventing a soft deletion on a template while workspaces are running.
AND (t.id = $1::uuid OR $1 IS NULL)
`
@@ -6678,6 +6682,7 @@ func (q *sqlQuerier) GetPresetsByTemplateVersionID(ctx context.Context, template
const insertPreset = `-- name: InsertPreset :one
INSERT INTO template_version_presets (
+ id,
template_version_id,
name,
created_at,
@@ -6689,11 +6694,13 @@ VALUES (
$2,
$3,
$4,
- $5
+ $5,
+ $6
) RETURNING id, template_version_id, name, created_at, desired_instances, invalidate_after_secs
`
type InsertPresetParams struct {
+ ID uuid.UUID `db:"id" json:"id"`
TemplateVersionID uuid.UUID `db:"template_version_id" json:"template_version_id"`
Name string `db:"name" json:"name"`
CreatedAt time.Time `db:"created_at" json:"created_at"`
@@ -6703,6 +6710,7 @@ type InsertPresetParams struct {
func (q *sqlQuerier) InsertPreset(ctx context.Context, arg InsertPresetParams) (TemplateVersionPreset, error) {
row := q.db.QueryRowContext(ctx, insertPreset,
+ arg.ID,
arg.TemplateVersionID,
arg.Name,
arg.CreatedAt,
@@ -10419,7 +10427,7 @@ func (q *sqlQuerier) GetTemplateAverageBuildTime(ctx context.Context, arg GetTem
const getTemplateByID = `-- name: GetTemplateByID :one
SELECT
- id, created_at, updated_at, organization_id, deleted, name, provisioner, active_version_id, description, default_ttl, created_by, icon, user_acl, group_acl, display_name, allow_user_cancel_workspace_jobs, allow_user_autostart, allow_user_autostop, failure_ttl, time_til_dormant, time_til_dormant_autodelete, autostop_requirement_days_of_week, autostop_requirement_weeks, autostart_block_days_of_week, require_active_version, deprecated, activity_bump, max_port_sharing_level, created_by_avatar_url, created_by_username, organization_name, organization_display_name, organization_icon
+ id, created_at, updated_at, organization_id, deleted, name, provisioner, active_version_id, description, default_ttl, created_by, icon, user_acl, group_acl, display_name, allow_user_cancel_workspace_jobs, allow_user_autostart, allow_user_autostop, failure_ttl, time_til_dormant, time_til_dormant_autodelete, autostop_requirement_days_of_week, autostop_requirement_weeks, autostart_block_days_of_week, require_active_version, deprecated, activity_bump, max_port_sharing_level, use_classic_parameter_flow, created_by_avatar_url, created_by_username, organization_name, organization_display_name, organization_icon
FROM
template_with_names
WHERE
@@ -10460,6 +10468,7 @@ func (q *sqlQuerier) GetTemplateByID(ctx context.Context, id uuid.UUID) (Templat
&i.Deprecated,
&i.ActivityBump,
&i.MaxPortSharingLevel,
+ &i.UseClassicParameterFlow,
&i.CreatedByAvatarURL,
&i.CreatedByUsername,
&i.OrganizationName,
@@ -10471,7 +10480,7 @@ func (q *sqlQuerier) GetTemplateByID(ctx context.Context, id uuid.UUID) (Templat
const getTemplateByOrganizationAndName = `-- name: GetTemplateByOrganizationAndName :one
SELECT
- id, created_at, updated_at, organization_id, deleted, name, provisioner, active_version_id, description, default_ttl, created_by, icon, user_acl, group_acl, display_name, allow_user_cancel_workspace_jobs, allow_user_autostart, allow_user_autostop, failure_ttl, time_til_dormant, time_til_dormant_autodelete, autostop_requirement_days_of_week, autostop_requirement_weeks, autostart_block_days_of_week, require_active_version, deprecated, activity_bump, max_port_sharing_level, created_by_avatar_url, created_by_username, organization_name, organization_display_name, organization_icon
+ id, created_at, updated_at, organization_id, deleted, name, provisioner, active_version_id, description, default_ttl, created_by, icon, user_acl, group_acl, display_name, allow_user_cancel_workspace_jobs, allow_user_autostart, allow_user_autostop, failure_ttl, time_til_dormant, time_til_dormant_autodelete, autostop_requirement_days_of_week, autostop_requirement_weeks, autostart_block_days_of_week, require_active_version, deprecated, activity_bump, max_port_sharing_level, use_classic_parameter_flow, created_by_avatar_url, created_by_username, organization_name, organization_display_name, organization_icon
FROM
template_with_names AS templates
WHERE
@@ -10520,6 +10529,7 @@ func (q *sqlQuerier) GetTemplateByOrganizationAndName(ctx context.Context, arg G
&i.Deprecated,
&i.ActivityBump,
&i.MaxPortSharingLevel,
+ &i.UseClassicParameterFlow,
&i.CreatedByAvatarURL,
&i.CreatedByUsername,
&i.OrganizationName,
@@ -10530,7 +10540,7 @@ func (q *sqlQuerier) GetTemplateByOrganizationAndName(ctx context.Context, arg G
}
const getTemplates = `-- name: GetTemplates :many
-SELECT id, created_at, updated_at, organization_id, deleted, name, provisioner, active_version_id, description, default_ttl, created_by, icon, user_acl, group_acl, display_name, allow_user_cancel_workspace_jobs, allow_user_autostart, allow_user_autostop, failure_ttl, time_til_dormant, time_til_dormant_autodelete, autostop_requirement_days_of_week, autostop_requirement_weeks, autostart_block_days_of_week, require_active_version, deprecated, activity_bump, max_port_sharing_level, created_by_avatar_url, created_by_username, organization_name, organization_display_name, organization_icon FROM template_with_names AS templates
+SELECT id, created_at, updated_at, organization_id, deleted, name, provisioner, active_version_id, description, default_ttl, created_by, icon, user_acl, group_acl, display_name, allow_user_cancel_workspace_jobs, allow_user_autostart, allow_user_autostop, failure_ttl, time_til_dormant, time_til_dormant_autodelete, autostop_requirement_days_of_week, autostop_requirement_weeks, autostart_block_days_of_week, require_active_version, deprecated, activity_bump, max_port_sharing_level, use_classic_parameter_flow, created_by_avatar_url, created_by_username, organization_name, organization_display_name, organization_icon FROM template_with_names AS templates
ORDER BY (name, id) ASC
`
@@ -10572,6 +10582,7 @@ func (q *sqlQuerier) GetTemplates(ctx context.Context) ([]Template, error) {
&i.Deprecated,
&i.ActivityBump,
&i.MaxPortSharingLevel,
+ &i.UseClassicParameterFlow,
&i.CreatedByAvatarURL,
&i.CreatedByUsername,
&i.OrganizationName,
@@ -10593,7 +10604,7 @@ func (q *sqlQuerier) GetTemplates(ctx context.Context) ([]Template, error) {
const getTemplatesWithFilter = `-- name: GetTemplatesWithFilter :many
SELECT
- id, created_at, updated_at, organization_id, deleted, name, provisioner, active_version_id, description, default_ttl, created_by, icon, user_acl, group_acl, display_name, allow_user_cancel_workspace_jobs, allow_user_autostart, allow_user_autostop, failure_ttl, time_til_dormant, time_til_dormant_autodelete, autostop_requirement_days_of_week, autostop_requirement_weeks, autostart_block_days_of_week, require_active_version, deprecated, activity_bump, max_port_sharing_level, created_by_avatar_url, created_by_username, organization_name, organization_display_name, organization_icon
+ id, created_at, updated_at, organization_id, deleted, name, provisioner, active_version_id, description, default_ttl, created_by, icon, user_acl, group_acl, display_name, allow_user_cancel_workspace_jobs, allow_user_autostart, allow_user_autostop, failure_ttl, time_til_dormant, time_til_dormant_autodelete, autostop_requirement_days_of_week, autostop_requirement_weeks, autostart_block_days_of_week, require_active_version, deprecated, activity_bump, max_port_sharing_level, use_classic_parameter_flow, created_by_avatar_url, created_by_username, organization_name, organization_display_name, organization_icon
FROM
template_with_names AS templates
WHERE
@@ -10693,6 +10704,7 @@ func (q *sqlQuerier) GetTemplatesWithFilter(ctx context.Context, arg GetTemplate
&i.Deprecated,
&i.ActivityBump,
&i.MaxPortSharingLevel,
+ &i.UseClassicParameterFlow,
&i.CreatedByAvatarURL,
&i.CreatedByUsername,
&i.OrganizationName,
@@ -10869,7 +10881,8 @@ SET
display_name = $6,
allow_user_cancel_workspace_jobs = $7,
group_acl = $8,
- max_port_sharing_level = $9
+ max_port_sharing_level = $9,
+ use_classic_parameter_flow = $10
WHERE
id = $1
`
@@ -10884,6 +10897,7 @@ type UpdateTemplateMetaByIDParams struct {
AllowUserCancelWorkspaceJobs bool `db:"allow_user_cancel_workspace_jobs" json:"allow_user_cancel_workspace_jobs"`
GroupACL TemplateACL `db:"group_acl" json:"group_acl"`
MaxPortSharingLevel AppSharingLevel `db:"max_port_sharing_level" json:"max_port_sharing_level"`
+ UseClassicParameterFlow bool `db:"use_classic_parameter_flow" json:"use_classic_parameter_flow"`
}
func (q *sqlQuerier) UpdateTemplateMetaByID(ctx context.Context, arg UpdateTemplateMetaByIDParams) error {
@@ -10897,6 +10911,7 @@ func (q *sqlQuerier) UpdateTemplateMetaByID(ctx context.Context, arg UpdateTempl
arg.AllowUserCancelWorkspaceJobs,
arg.GroupACL,
arg.MaxPortSharingLevel,
+ arg.UseClassicParameterFlow,
)
return err
}
@@ -11698,7 +11713,7 @@ func (q *sqlQuerier) UpdateTemplateVersionExternalAuthProvidersByJobID(ctx conte
const getTemplateVersionTerraformValues = `-- name: GetTemplateVersionTerraformValues :one
SELECT
- template_version_terraform_values.template_version_id, template_version_terraform_values.updated_at, template_version_terraform_values.cached_plan
+ template_version_terraform_values.template_version_id, template_version_terraform_values.updated_at, template_version_terraform_values.cached_plan, template_version_terraform_values.cached_module_files, template_version_terraform_values.provisionerd_version
FROM
template_version_terraform_values
WHERE
@@ -11708,7 +11723,13 @@ WHERE
func (q *sqlQuerier) GetTemplateVersionTerraformValues(ctx context.Context, templateVersionID uuid.UUID) (TemplateVersionTerraformValue, error) {
row := q.db.QueryRowContext(ctx, getTemplateVersionTerraformValues, templateVersionID)
var i TemplateVersionTerraformValue
- err := row.Scan(&i.TemplateVersionID, &i.UpdatedAt, &i.CachedPlan)
+ err := row.Scan(
+ &i.TemplateVersionID,
+ &i.UpdatedAt,
+ &i.CachedPlan,
+ &i.CachedModuleFiles,
+ &i.ProvisionerdVersion,
+ )
return i, err
}
@@ -11717,24 +11738,36 @@ INSERT INTO
template_version_terraform_values (
template_version_id,
cached_plan,
- updated_at
+ cached_module_files,
+ updated_at,
+ provisionerd_version
)
VALUES
(
(select id from template_versions where job_id = $1),
$2,
- $3
+ $3,
+ $4,
+ $5
)
`
type InsertTemplateVersionTerraformValuesByJobIDParams struct {
- JobID uuid.UUID `db:"job_id" json:"job_id"`
- CachedPlan json.RawMessage `db:"cached_plan" json:"cached_plan"`
- UpdatedAt time.Time `db:"updated_at" json:"updated_at"`
+ JobID uuid.UUID `db:"job_id" json:"job_id"`
+ CachedPlan json.RawMessage `db:"cached_plan" json:"cached_plan"`
+ CachedModuleFiles uuid.NullUUID `db:"cached_module_files" json:"cached_module_files"`
+ UpdatedAt time.Time `db:"updated_at" json:"updated_at"`
+ ProvisionerdVersion string `db:"provisionerd_version" json:"provisionerd_version"`
}
func (q *sqlQuerier) InsertTemplateVersionTerraformValuesByJobID(ctx context.Context, arg InsertTemplateVersionTerraformValuesByJobIDParams) error {
- _, err := q.db.ExecContext(ctx, insertTemplateVersionTerraformValuesByJobID, arg.JobID, arg.CachedPlan, arg.UpdatedAt)
+ _, err := q.db.ExecContext(ctx, insertTemplateVersionTerraformValuesByJobID,
+ arg.JobID,
+ arg.CachedPlan,
+ arg.CachedModuleFiles,
+ arg.UpdatedAt,
+ arg.ProvisionerdVersion,
+ )
return err
}
@@ -13913,7 +13946,7 @@ func (q *sqlQuerier) DeleteOldWorkspaceAgentLogs(ctx context.Context, threshold
const getWorkspaceAgentAndLatestBuildByAuthToken = `-- name: GetWorkspaceAgentAndLatestBuildByAuthToken :one
SELECT
workspaces.id, workspaces.created_at, workspaces.updated_at, workspaces.owner_id, workspaces.organization_id, workspaces.template_id, workspaces.deleted, workspaces.name, workspaces.autostart_schedule, workspaces.ttl, workspaces.last_used_at, workspaces.dormant_at, workspaces.deleting_at, workspaces.automatic_updates, workspaces.favorite, workspaces.next_start_at,
- workspace_agents.id, workspace_agents.created_at, workspace_agents.updated_at, workspace_agents.name, workspace_agents.first_connected_at, workspace_agents.last_connected_at, workspace_agents.disconnected_at, workspace_agents.resource_id, workspace_agents.auth_token, workspace_agents.auth_instance_id, workspace_agents.architecture, workspace_agents.environment_variables, workspace_agents.operating_system, workspace_agents.instance_metadata, workspace_agents.resource_metadata, workspace_agents.directory, workspace_agents.version, workspace_agents.last_connected_replica_id, workspace_agents.connection_timeout_seconds, workspace_agents.troubleshooting_url, workspace_agents.motd_file, workspace_agents.lifecycle_state, workspace_agents.expanded_directory, workspace_agents.logs_length, workspace_agents.logs_overflowed, workspace_agents.started_at, workspace_agents.ready_at, workspace_agents.subsystems, workspace_agents.display_apps, workspace_agents.api_version, workspace_agents.display_order,
+ workspace_agents.id, workspace_agents.created_at, workspace_agents.updated_at, workspace_agents.name, workspace_agents.first_connected_at, workspace_agents.last_connected_at, workspace_agents.disconnected_at, workspace_agents.resource_id, workspace_agents.auth_token, workspace_agents.auth_instance_id, workspace_agents.architecture, workspace_agents.environment_variables, workspace_agents.operating_system, workspace_agents.instance_metadata, workspace_agents.resource_metadata, workspace_agents.directory, workspace_agents.version, workspace_agents.last_connected_replica_id, workspace_agents.connection_timeout_seconds, workspace_agents.troubleshooting_url, workspace_agents.motd_file, workspace_agents.lifecycle_state, workspace_agents.expanded_directory, workspace_agents.logs_length, workspace_agents.logs_overflowed, workspace_agents.started_at, workspace_agents.ready_at, workspace_agents.subsystems, workspace_agents.display_apps, workspace_agents.api_version, workspace_agents.display_order, workspace_agents.parent_id, workspace_agents.api_key_scope,
workspace_build_with_user.id, workspace_build_with_user.created_at, workspace_build_with_user.updated_at, workspace_build_with_user.workspace_id, workspace_build_with_user.template_version_id, workspace_build_with_user.build_number, workspace_build_with_user.transition, workspace_build_with_user.initiator_id, workspace_build_with_user.provisioner_state, workspace_build_with_user.job_id, workspace_build_with_user.deadline, workspace_build_with_user.reason, workspace_build_with_user.daily_cost, workspace_build_with_user.max_deadline, workspace_build_with_user.template_version_preset_id, workspace_build_with_user.initiator_by_avatar_url, workspace_build_with_user.initiator_by_username
FROM
workspace_agents
@@ -14003,6 +14036,8 @@ func (q *sqlQuerier) GetWorkspaceAgentAndLatestBuildByAuthToken(ctx context.Cont
pq.Array(&i.WorkspaceAgent.DisplayApps),
&i.WorkspaceAgent.APIVersion,
&i.WorkspaceAgent.DisplayOrder,
+ &i.WorkspaceAgent.ParentID,
+ &i.WorkspaceAgent.APIKeyScope,
&i.WorkspaceBuild.ID,
&i.WorkspaceBuild.CreatedAt,
&i.WorkspaceBuild.UpdatedAt,
@@ -14026,7 +14061,7 @@ func (q *sqlQuerier) GetWorkspaceAgentAndLatestBuildByAuthToken(ctx context.Cont
const getWorkspaceAgentByID = `-- name: GetWorkspaceAgentByID :one
SELECT
- id, created_at, updated_at, name, first_connected_at, last_connected_at, disconnected_at, resource_id, auth_token, auth_instance_id, architecture, environment_variables, operating_system, instance_metadata, resource_metadata, directory, version, last_connected_replica_id, connection_timeout_seconds, troubleshooting_url, motd_file, lifecycle_state, expanded_directory, logs_length, logs_overflowed, started_at, ready_at, subsystems, display_apps, api_version, display_order
+ id, created_at, updated_at, name, first_connected_at, last_connected_at, disconnected_at, resource_id, auth_token, auth_instance_id, architecture, environment_variables, operating_system, instance_metadata, resource_metadata, directory, version, last_connected_replica_id, connection_timeout_seconds, troubleshooting_url, motd_file, lifecycle_state, expanded_directory, logs_length, logs_overflowed, started_at, ready_at, subsystems, display_apps, api_version, display_order, parent_id, api_key_scope
FROM
workspace_agents
WHERE
@@ -14068,13 +14103,15 @@ func (q *sqlQuerier) GetWorkspaceAgentByID(ctx context.Context, id uuid.UUID) (W
pq.Array(&i.DisplayApps),
&i.APIVersion,
&i.DisplayOrder,
+ &i.ParentID,
+ &i.APIKeyScope,
)
return i, err
}
const getWorkspaceAgentByInstanceID = `-- name: GetWorkspaceAgentByInstanceID :one
SELECT
- id, created_at, updated_at, name, first_connected_at, last_connected_at, disconnected_at, resource_id, auth_token, auth_instance_id, architecture, environment_variables, operating_system, instance_metadata, resource_metadata, directory, version, last_connected_replica_id, connection_timeout_seconds, troubleshooting_url, motd_file, lifecycle_state, expanded_directory, logs_length, logs_overflowed, started_at, ready_at, subsystems, display_apps, api_version, display_order
+ id, created_at, updated_at, name, first_connected_at, last_connected_at, disconnected_at, resource_id, auth_token, auth_instance_id, architecture, environment_variables, operating_system, instance_metadata, resource_metadata, directory, version, last_connected_replica_id, connection_timeout_seconds, troubleshooting_url, motd_file, lifecycle_state, expanded_directory, logs_length, logs_overflowed, started_at, ready_at, subsystems, display_apps, api_version, display_order, parent_id, api_key_scope
FROM
workspace_agents
WHERE
@@ -14118,6 +14155,8 @@ func (q *sqlQuerier) GetWorkspaceAgentByInstanceID(ctx context.Context, authInst
pq.Array(&i.DisplayApps),
&i.APIVersion,
&i.DisplayOrder,
+ &i.ParentID,
+ &i.APIKeyScope,
)
return i, err
}
@@ -14337,7 +14376,7 @@ func (q *sqlQuerier) GetWorkspaceAgentScriptTimingsByBuildID(ctx context.Context
const getWorkspaceAgentsByResourceIDs = `-- name: GetWorkspaceAgentsByResourceIDs :many
SELECT
- id, created_at, updated_at, name, first_connected_at, last_connected_at, disconnected_at, resource_id, auth_token, auth_instance_id, architecture, environment_variables, operating_system, instance_metadata, resource_metadata, directory, version, last_connected_replica_id, connection_timeout_seconds, troubleshooting_url, motd_file, lifecycle_state, expanded_directory, logs_length, logs_overflowed, started_at, ready_at, subsystems, display_apps, api_version, display_order
+ id, created_at, updated_at, name, first_connected_at, last_connected_at, disconnected_at, resource_id, auth_token, auth_instance_id, architecture, environment_variables, operating_system, instance_metadata, resource_metadata, directory, version, last_connected_replica_id, connection_timeout_seconds, troubleshooting_url, motd_file, lifecycle_state, expanded_directory, logs_length, logs_overflowed, started_at, ready_at, subsystems, display_apps, api_version, display_order, parent_id, api_key_scope
FROM
workspace_agents
WHERE
@@ -14385,6 +14424,84 @@ func (q *sqlQuerier) GetWorkspaceAgentsByResourceIDs(ctx context.Context, ids []
pq.Array(&i.DisplayApps),
&i.APIVersion,
&i.DisplayOrder,
+ &i.ParentID,
+ &i.APIKeyScope,
+ ); err != nil {
+ return nil, err
+ }
+ items = append(items, i)
+ }
+ if err := rows.Close(); err != nil {
+ return nil, err
+ }
+ if err := rows.Err(); err != nil {
+ return nil, err
+ }
+ return items, nil
+}
+
+const getWorkspaceAgentsByWorkspaceAndBuildNumber = `-- name: GetWorkspaceAgentsByWorkspaceAndBuildNumber :many
+SELECT
+ workspace_agents.id, workspace_agents.created_at, workspace_agents.updated_at, workspace_agents.name, workspace_agents.first_connected_at, workspace_agents.last_connected_at, workspace_agents.disconnected_at, workspace_agents.resource_id, workspace_agents.auth_token, workspace_agents.auth_instance_id, workspace_agents.architecture, workspace_agents.environment_variables, workspace_agents.operating_system, workspace_agents.instance_metadata, workspace_agents.resource_metadata, workspace_agents.directory, workspace_agents.version, workspace_agents.last_connected_replica_id, workspace_agents.connection_timeout_seconds, workspace_agents.troubleshooting_url, workspace_agents.motd_file, workspace_agents.lifecycle_state, workspace_agents.expanded_directory, workspace_agents.logs_length, workspace_agents.logs_overflowed, workspace_agents.started_at, workspace_agents.ready_at, workspace_agents.subsystems, workspace_agents.display_apps, workspace_agents.api_version, workspace_agents.display_order, workspace_agents.parent_id, workspace_agents.api_key_scope
+FROM
+ workspace_agents
+JOIN
+ workspace_resources ON workspace_agents.resource_id = workspace_resources.id
+JOIN
+ workspace_builds ON workspace_resources.job_id = workspace_builds.job_id
+WHERE
+ workspace_builds.workspace_id = $1 :: uuid AND
+ workspace_builds.build_number = $2 :: int
+`
+
+type GetWorkspaceAgentsByWorkspaceAndBuildNumberParams struct {
+ WorkspaceID uuid.UUID `db:"workspace_id" json:"workspace_id"`
+ BuildNumber int32 `db:"build_number" json:"build_number"`
+}
+
+func (q *sqlQuerier) GetWorkspaceAgentsByWorkspaceAndBuildNumber(ctx context.Context, arg GetWorkspaceAgentsByWorkspaceAndBuildNumberParams) ([]WorkspaceAgent, error) {
+ rows, err := q.db.QueryContext(ctx, getWorkspaceAgentsByWorkspaceAndBuildNumber, arg.WorkspaceID, arg.BuildNumber)
+ if err != nil {
+ return nil, err
+ }
+ defer rows.Close()
+ var items []WorkspaceAgent
+ for rows.Next() {
+ var i WorkspaceAgent
+ if err := rows.Scan(
+ &i.ID,
+ &i.CreatedAt,
+ &i.UpdatedAt,
+ &i.Name,
+ &i.FirstConnectedAt,
+ &i.LastConnectedAt,
+ &i.DisconnectedAt,
+ &i.ResourceID,
+ &i.AuthToken,
+ &i.AuthInstanceID,
+ &i.Architecture,
+ &i.EnvironmentVariables,
+ &i.OperatingSystem,
+ &i.InstanceMetadata,
+ &i.ResourceMetadata,
+ &i.Directory,
+ &i.Version,
+ &i.LastConnectedReplicaID,
+ &i.ConnectionTimeoutSeconds,
+ &i.TroubleshootingURL,
+ &i.MOTDFile,
+ &i.LifecycleState,
+ &i.ExpandedDirectory,
+ &i.LogsLength,
+ &i.LogsOverflowed,
+ &i.StartedAt,
+ &i.ReadyAt,
+ pq.Array(&i.Subsystems),
+ pq.Array(&i.DisplayApps),
+ &i.APIVersion,
+ &i.DisplayOrder,
+ &i.ParentID,
+ &i.APIKeyScope,
); err != nil {
return nil, err
}
@@ -14400,7 +14517,7 @@ func (q *sqlQuerier) GetWorkspaceAgentsByResourceIDs(ctx context.Context, ids []
}
const getWorkspaceAgentsCreatedAfter = `-- name: GetWorkspaceAgentsCreatedAfter :many
-SELECT id, created_at, updated_at, name, first_connected_at, last_connected_at, disconnected_at, resource_id, auth_token, auth_instance_id, architecture, environment_variables, operating_system, instance_metadata, resource_metadata, directory, version, last_connected_replica_id, connection_timeout_seconds, troubleshooting_url, motd_file, lifecycle_state, expanded_directory, logs_length, logs_overflowed, started_at, ready_at, subsystems, display_apps, api_version, display_order FROM workspace_agents WHERE created_at > $1
+SELECT id, created_at, updated_at, name, first_connected_at, last_connected_at, disconnected_at, resource_id, auth_token, auth_instance_id, architecture, environment_variables, operating_system, instance_metadata, resource_metadata, directory, version, last_connected_replica_id, connection_timeout_seconds, troubleshooting_url, motd_file, lifecycle_state, expanded_directory, logs_length, logs_overflowed, started_at, ready_at, subsystems, display_apps, api_version, display_order, parent_id, api_key_scope FROM workspace_agents WHERE created_at > $1
`
func (q *sqlQuerier) GetWorkspaceAgentsCreatedAfter(ctx context.Context, createdAt time.Time) ([]WorkspaceAgent, error) {
@@ -14444,6 +14561,8 @@ func (q *sqlQuerier) GetWorkspaceAgentsCreatedAfter(ctx context.Context, created
pq.Array(&i.DisplayApps),
&i.APIVersion,
&i.DisplayOrder,
+ &i.ParentID,
+ &i.APIKeyScope,
); err != nil {
return nil, err
}
@@ -14460,7 +14579,7 @@ func (q *sqlQuerier) GetWorkspaceAgentsCreatedAfter(ctx context.Context, created
const getWorkspaceAgentsInLatestBuildByWorkspaceID = `-- name: GetWorkspaceAgentsInLatestBuildByWorkspaceID :many
SELECT
- workspace_agents.id, workspace_agents.created_at, workspace_agents.updated_at, workspace_agents.name, workspace_agents.first_connected_at, workspace_agents.last_connected_at, workspace_agents.disconnected_at, workspace_agents.resource_id, workspace_agents.auth_token, workspace_agents.auth_instance_id, workspace_agents.architecture, workspace_agents.environment_variables, workspace_agents.operating_system, workspace_agents.instance_metadata, workspace_agents.resource_metadata, workspace_agents.directory, workspace_agents.version, workspace_agents.last_connected_replica_id, workspace_agents.connection_timeout_seconds, workspace_agents.troubleshooting_url, workspace_agents.motd_file, workspace_agents.lifecycle_state, workspace_agents.expanded_directory, workspace_agents.logs_length, workspace_agents.logs_overflowed, workspace_agents.started_at, workspace_agents.ready_at, workspace_agents.subsystems, workspace_agents.display_apps, workspace_agents.api_version, workspace_agents.display_order
+ workspace_agents.id, workspace_agents.created_at, workspace_agents.updated_at, workspace_agents.name, workspace_agents.first_connected_at, workspace_agents.last_connected_at, workspace_agents.disconnected_at, workspace_agents.resource_id, workspace_agents.auth_token, workspace_agents.auth_instance_id, workspace_agents.architecture, workspace_agents.environment_variables, workspace_agents.operating_system, workspace_agents.instance_metadata, workspace_agents.resource_metadata, workspace_agents.directory, workspace_agents.version, workspace_agents.last_connected_replica_id, workspace_agents.connection_timeout_seconds, workspace_agents.troubleshooting_url, workspace_agents.motd_file, workspace_agents.lifecycle_state, workspace_agents.expanded_directory, workspace_agents.logs_length, workspace_agents.logs_overflowed, workspace_agents.started_at, workspace_agents.ready_at, workspace_agents.subsystems, workspace_agents.display_apps, workspace_agents.api_version, workspace_agents.display_order, workspace_agents.parent_id, workspace_agents.api_key_scope
FROM
workspace_agents
JOIN
@@ -14520,6 +14639,8 @@ func (q *sqlQuerier) GetWorkspaceAgentsInLatestBuildByWorkspaceID(ctx context.Co
pq.Array(&i.DisplayApps),
&i.APIVersion,
&i.DisplayOrder,
+ &i.ParentID,
+ &i.APIKeyScope,
); err != nil {
return nil, err
}
@@ -14538,6 +14659,7 @@ const insertWorkspaceAgent = `-- name: InsertWorkspaceAgent :one
INSERT INTO
workspace_agents (
id,
+ parent_id,
created_at,
updated_at,
name,
@@ -14554,14 +14676,16 @@ INSERT INTO
troubleshooting_url,
motd_file,
display_apps,
- display_order
+ display_order,
+ api_key_scope
)
VALUES
- ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17, $18) RETURNING id, created_at, updated_at, name, first_connected_at, last_connected_at, disconnected_at, resource_id, auth_token, auth_instance_id, architecture, environment_variables, operating_system, instance_metadata, resource_metadata, directory, version, last_connected_replica_id, connection_timeout_seconds, troubleshooting_url, motd_file, lifecycle_state, expanded_directory, logs_length, logs_overflowed, started_at, ready_at, subsystems, display_apps, api_version, display_order
+ ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17, $18, $19, $20) RETURNING id, created_at, updated_at, name, first_connected_at, last_connected_at, disconnected_at, resource_id, auth_token, auth_instance_id, architecture, environment_variables, operating_system, instance_metadata, resource_metadata, directory, version, last_connected_replica_id, connection_timeout_seconds, troubleshooting_url, motd_file, lifecycle_state, expanded_directory, logs_length, logs_overflowed, started_at, ready_at, subsystems, display_apps, api_version, display_order, parent_id, api_key_scope
`
type InsertWorkspaceAgentParams struct {
ID uuid.UUID `db:"id" json:"id"`
+ ParentID uuid.NullUUID `db:"parent_id" json:"parent_id"`
CreatedAt time.Time `db:"created_at" json:"created_at"`
UpdatedAt time.Time `db:"updated_at" json:"updated_at"`
Name string `db:"name" json:"name"`
@@ -14579,11 +14703,13 @@ type InsertWorkspaceAgentParams struct {
MOTDFile string `db:"motd_file" json:"motd_file"`
DisplayApps []DisplayApp `db:"display_apps" json:"display_apps"`
DisplayOrder int32 `db:"display_order" json:"display_order"`
+ APIKeyScope AgentKeyScopeEnum `db:"api_key_scope" json:"api_key_scope"`
}
func (q *sqlQuerier) InsertWorkspaceAgent(ctx context.Context, arg InsertWorkspaceAgentParams) (WorkspaceAgent, error) {
row := q.db.QueryRowContext(ctx, insertWorkspaceAgent,
arg.ID,
+ arg.ParentID,
arg.CreatedAt,
arg.UpdatedAt,
arg.Name,
@@ -14601,6 +14727,7 @@ func (q *sqlQuerier) InsertWorkspaceAgent(ctx context.Context, arg InsertWorkspa
arg.MOTDFile,
pq.Array(arg.DisplayApps),
arg.DisplayOrder,
+ arg.APIKeyScope,
)
var i WorkspaceAgent
err := row.Scan(
@@ -14635,6 +14762,8 @@ func (q *sqlQuerier) InsertWorkspaceAgent(ctx context.Context, arg InsertWorkspa
pq.Array(&i.DisplayApps),
&i.APIVersion,
&i.DisplayOrder,
+ &i.ParentID,
+ &i.APIKeyScope,
)
return i, err
}
@@ -18075,7 +18204,7 @@ LEFT JOIN LATERAL (
) latest_build ON TRUE
LEFT JOIN LATERAL (
SELECT
- id, created_at, updated_at, organization_id, deleted, name, provisioner, active_version_id, description, default_ttl, created_by, icon, user_acl, group_acl, display_name, allow_user_cancel_workspace_jobs, allow_user_autostart, allow_user_autostop, failure_ttl, time_til_dormant, time_til_dormant_autodelete, autostop_requirement_days_of_week, autostop_requirement_weeks, autostart_block_days_of_week, require_active_version, deprecated, activity_bump, max_port_sharing_level
+ id, created_at, updated_at, organization_id, deleted, name, provisioner, active_version_id, description, default_ttl, created_by, icon, user_acl, group_acl, display_name, allow_user_cancel_workspace_jobs, allow_user_autostart, allow_user_autostop, failure_ttl, time_til_dormant, time_til_dormant_autodelete, autostop_requirement_days_of_week, autostop_requirement_weeks, autostart_block_days_of_week, require_active_version, deprecated, activity_bump, max_port_sharing_level, use_classic_parameter_flow
FROM
templates
WHERE
diff --git a/coderd/database/queries/prebuilds.sql b/coderd/database/queries/prebuilds.sql
index 1d3a827c98586..8c27ddf62b7c3 100644
--- a/coderd/database/queries/prebuilds.sql
+++ b/coderd/database/queries/prebuilds.sql
@@ -15,6 +15,7 @@ WHERE w.id IN (
AND b.template_version_id = t.active_version_id
AND p.current_preset_id = @preset_id::uuid
AND p.ready
+ AND NOT t.deleted
LIMIT 1 FOR UPDATE OF p SKIP LOCKED -- Ensure that a concurrent request will not select the same prebuild.
)
RETURNING w.id, w.name;
@@ -40,6 +41,7 @@ FROM templates t
INNER JOIN template_version_presets tvp ON tvp.template_version_id = tv.id
INNER JOIN organizations o ON o.id = t.organization_id
WHERE tvp.desired_instances IS NOT NULL -- Consider only presets that have a prebuild configuration.
+ -- AND NOT t.deleted -- We don't exclude deleted templates because there's no constraint in the DB preventing a soft deletion on a template while workspaces are running.
AND (t.id = sqlc.narg('template_id')::uuid OR sqlc.narg('template_id') IS NULL);
-- name: GetRunningPrebuiltWorkspaces :many
@@ -70,6 +72,7 @@ FROM workspace_latest_builds wlb
-- prebuilds that are still building.
INNER JOIN templates t ON t.active_version_id = wlb.template_version_id
WHERE wlb.job_status IN ('pending'::provisioner_job_status, 'running'::provisioner_job_status)
+ -- AND NOT t.deleted -- We don't exclude deleted templates because there's no constraint in the DB preventing a soft deletion on a template while workspaces are running.
GROUP BY t.id, wpb.template_version_id, wpb.transition, wlb.template_version_preset_id;
-- GetPresetsBackoff groups workspace builds by preset ID.
@@ -98,6 +101,7 @@ WITH filtered_builds AS (
WHERE tvp.desired_instances IS NOT NULL -- Consider only presets that have a prebuild configuration.
AND wlb.transition = 'start'::workspace_transition
AND w.owner_id = 'c42fdf75-3097-471c-8c33-fb52454d81c0'
+ AND NOT t.deleted
),
time_sorted_builds AS (
-- Group builds by preset, then sort each group by created_at.
diff --git a/coderd/database/queries/presets.sql b/coderd/database/queries/presets.sql
index 15bcea0c28fb5..6d5646a285b4a 100644
--- a/coderd/database/queries/presets.sql
+++ b/coderd/database/queries/presets.sql
@@ -1,5 +1,6 @@
-- name: InsertPreset :one
INSERT INTO template_version_presets (
+ id,
template_version_id,
name,
created_at,
@@ -7,6 +8,7 @@ INSERT INTO template_version_presets (
invalidate_after_secs
)
VALUES (
+ @id,
@template_version_id,
@name,
@created_at,
diff --git a/coderd/database/queries/templates.sql b/coderd/database/queries/templates.sql
index 84df9633a1a53..3a0d34885f3d9 100644
--- a/coderd/database/queries/templates.sql
+++ b/coderd/database/queries/templates.sql
@@ -124,7 +124,8 @@ SET
display_name = $6,
allow_user_cancel_workspace_jobs = $7,
group_acl = $8,
- max_port_sharing_level = $9
+ max_port_sharing_level = $9,
+ use_classic_parameter_flow = $10
WHERE
id = $1
;
diff --git a/coderd/database/queries/templateversionterraformvalues.sql b/coderd/database/queries/templateversionterraformvalues.sql
index 61d5e23cf5c5c..2ded4a2675375 100644
--- a/coderd/database/queries/templateversionterraformvalues.sql
+++ b/coderd/database/queries/templateversionterraformvalues.sql
@@ -11,11 +11,15 @@ INSERT INTO
template_version_terraform_values (
template_version_id,
cached_plan,
- updated_at
+ cached_module_files,
+ updated_at,
+ provisionerd_version
)
VALUES
(
(select id from template_versions where job_id = @job_id),
@cached_plan,
- @updated_at
+ @cached_module_files,
+ @updated_at,
+ @provisionerd_version
);
diff --git a/coderd/database/queries/workspaceagents.sql b/coderd/database/queries/workspaceagents.sql
index 52d8b5275fc97..5965f0cb16fbf 100644
--- a/coderd/database/queries/workspaceagents.sql
+++ b/coderd/database/queries/workspaceagents.sql
@@ -31,6 +31,7 @@ SELECT * FROM workspace_agents WHERE created_at > $1;
INSERT INTO
workspace_agents (
id,
+ parent_id,
created_at,
updated_at,
name,
@@ -47,10 +48,11 @@ INSERT INTO
troubleshooting_url,
motd_file,
display_apps,
- display_order
+ display_order,
+ api_key_scope
)
VALUES
- ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17, $18) RETURNING *;
+ ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17, $18, $19, $20) RETURNING *;
-- name: UpdateWorkspaceAgentConnectionByID :exec
UPDATE
@@ -252,6 +254,19 @@ WHERE
wb.workspace_id = @workspace_id :: uuid
);
+-- name: GetWorkspaceAgentsByWorkspaceAndBuildNumber :many
+SELECT
+ workspace_agents.*
+FROM
+ workspace_agents
+JOIN
+ workspace_resources ON workspace_agents.resource_id = workspace_resources.id
+JOIN
+ workspace_builds ON workspace_resources.job_id = workspace_builds.job_id
+WHERE
+ workspace_builds.workspace_id = @workspace_id :: uuid AND
+ workspace_builds.build_number = @build_number :: int;
+
-- name: GetWorkspaceAgentAndLatestBuildByAuthToken :one
SELECT
sqlc.embed(workspaces),
diff --git a/coderd/externalauth_test.go b/coderd/externalauth_test.go
index 87197528fc087..c9ba4911214de 100644
--- a/coderd/externalauth_test.go
+++ b/coderd/externalauth_test.go
@@ -706,4 +706,82 @@ func TestExternalAuthCallback(t *testing.T) {
})
require.NoError(t, err)
})
+ t.Run("AgentAPIKeyScope", func(t *testing.T) {
+ t.Parallel()
+
+ for _, tt := range []struct {
+ apiKeyScope string
+ expectsError bool
+ }{
+ {apiKeyScope: "all", expectsError: false},
+ {apiKeyScope: "no_user_data", expectsError: true},
+ } {
+ t.Run(tt.apiKeyScope, func(t *testing.T) {
+ t.Parallel()
+
+ client := coderdtest.New(t, &coderdtest.Options{
+ IncludeProvisionerDaemon: true,
+ ExternalAuthConfigs: []*externalauth.Config{{
+ InstrumentedOAuth2Config: &testutil.OAuth2Config{},
+ ID: "github",
+ Regex: regexp.MustCompile(`github\.com`),
+ Type: codersdk.EnhancedExternalAuthProviderGitHub.String(),
+ }},
+ })
+ user := coderdtest.CreateFirstUser(t, client)
+ authToken := uuid.NewString()
+ version := coderdtest.CreateTemplateVersion(t, client, user.OrganizationID, &echo.Responses{
+ Parse: echo.ParseComplete,
+ ProvisionPlan: echo.PlanComplete,
+ ProvisionApply: echo.ProvisionApplyWithAgentAndAPIKeyScope(authToken, tt.apiKeyScope),
+ })
+ template := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID)
+ coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID)
+ workspace := coderdtest.CreateWorkspace(t, client, template.ID)
+ coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, workspace.LatestBuild.ID)
+
+ agentClient := agentsdk.New(client.URL)
+ agentClient.SetSessionToken(authToken)
+
+ token, err := agentClient.ExternalAuth(t.Context(), agentsdk.ExternalAuthRequest{
+ Match: "github.com/asd/asd",
+ })
+
+ if tt.expectsError {
+ require.Error(t, err)
+ var sdkErr *codersdk.Error
+ require.ErrorAs(t, err, &sdkErr)
+ require.Equal(t, http.StatusForbidden, sdkErr.StatusCode())
+ return
+ }
+
+ require.NoError(t, err)
+ require.NotEmpty(t, token.URL)
+
+ // Start waiting for the token callback...
+ tokenChan := make(chan agentsdk.ExternalAuthResponse, 1)
+ go func() {
+ token, err := agentClient.ExternalAuth(t.Context(), agentsdk.ExternalAuthRequest{
+ Match: "github.com/asd/asd",
+ Listen: true,
+ })
+ assert.NoError(t, err)
+ tokenChan <- token
+ }()
+
+ time.Sleep(250 * time.Millisecond)
+
+ resp := coderdtest.RequestExternalAuthCallback(t, "github", client)
+ require.Equal(t, http.StatusTemporaryRedirect, resp.StatusCode)
+
+ token = <-tokenChan
+ require.Equal(t, "access_token", token.Username)
+
+ token, err = agentClient.ExternalAuth(t.Context(), agentsdk.ExternalAuthRequest{
+ Match: "github.com/asd/asd",
+ })
+ require.NoError(t, err)
+ })
+ }
+ })
}
diff --git a/coderd/files/cache.go b/coderd/files/cache.go
index b823680fa7245..56e9a715de189 100644
--- a/coderd/files/cache.go
+++ b/coderd/files/cache.go
@@ -16,7 +16,7 @@ import (
// NewFromStore returns a file cache that will fetch files from the provided
// database.
-func NewFromStore(store database.Store) Cache {
+func NewFromStore(store database.Store) *Cache {
fetcher := func(ctx context.Context, fileID uuid.UUID) (fs.FS, error) {
file, err := store.GetFileByID(ctx, fileID)
if err != nil {
@@ -27,7 +27,7 @@ func NewFromStore(store database.Store) Cache {
return archivefs.FromTarReader(content), nil
}
- return Cache{
+ return &Cache{
lock: sync.Mutex{},
data: make(map[uuid.UUID]*cacheEntry),
fetcher: fetcher,
@@ -63,7 +63,11 @@ func (c *Cache) Acquire(ctx context.Context, fileID uuid.UUID) (fs.FS, error) {
// mutex has been released, or we would continue to hold the lock until the
// entire file has been fetched, which may be slow, and would prevent other
// files from being fetched in parallel.
- return c.prepare(ctx, fileID).Load()
+ it, err := c.prepare(ctx, fileID).Load()
+ if err != nil {
+ c.Release(fileID)
+ }
+ return it, err
}
func (c *Cache) prepare(ctx context.Context, fileID uuid.UUID) *lazy.ValueWithError[fs.FS] {
@@ -108,3 +112,12 @@ func (c *Cache) Release(fileID uuid.UUID) {
delete(c.data, fileID)
}
+
+// Count returns the number of files currently in the cache.
+// Mainly used for unit testing assertions.
+func (c *Cache) Count() int {
+ c.lock.Lock()
+ defer c.lock.Unlock()
+
+ return len(c.data)
+}
diff --git a/coderd/files/overlay.go b/coderd/files/overlay.go
new file mode 100644
index 0000000000000..fa0e590d1e6c2
--- /dev/null
+++ b/coderd/files/overlay.go
@@ -0,0 +1,51 @@
+package files
+
+import (
+ "io/fs"
+ "path"
+ "strings"
+)
+
+// overlayFS allows you to "join" together multiple fs.FS. Files in any specific
+// overlay will only be accessible if their path starts with the base path
+// provided for the overlay. eg. An overlay at the path .terraform/modules
+// should contain files with paths inside the .terraform/modules folder.
+type overlayFS struct {
+ baseFS fs.FS
+ overlays []Overlay
+}
+
+type Overlay struct {
+ Path string
+ fs.FS
+}
+
+func NewOverlayFS(baseFS fs.FS, overlays []Overlay) fs.FS {
+ return overlayFS{
+ baseFS: baseFS,
+ overlays: overlays,
+ }
+}
+
+func (f overlayFS) target(p string) fs.FS {
+ target := f.baseFS
+ for _, overlay := range f.overlays {
+ if strings.HasPrefix(path.Clean(p), overlay.Path) {
+ target = overlay.FS
+ break
+ }
+ }
+ return target
+}
+
+func (f overlayFS) Open(p string) (fs.File, error) {
+ return f.target(p).Open(p)
+}
+
+func (f overlayFS) ReadDir(p string) ([]fs.DirEntry, error) {
+ return fs.ReadDir(f.target(p), p)
+}
+
+func (f overlayFS) ReadFile(p string) ([]byte, error) {
+ return fs.ReadFile(f.target(p), p)
+}
diff --git a/coderd/files/overlay_test.go b/coderd/files/overlay_test.go
new file mode 100644
index 0000000000000..29209a478d552
--- /dev/null
+++ b/coderd/files/overlay_test.go
@@ -0,0 +1,43 @@
+package files_test
+
+import (
+ "io/fs"
+ "testing"
+
+ "github.com/spf13/afero"
+ "github.com/stretchr/testify/require"
+
+ "github.com/coder/coder/v2/coderd/files"
+)
+
+func TestOverlayFS(t *testing.T) {
+ t.Parallel()
+
+ a := afero.NewMemMapFs()
+ afero.WriteFile(a, "main.tf", []byte("terraform {}"), 0o644)
+ afero.WriteFile(a, ".terraform/modules/example_module/main.tf", []byte("inaccessible"), 0o644)
+ afero.WriteFile(a, ".terraform/modules/other_module/main.tf", []byte("inaccessible"), 0o644)
+ b := afero.NewMemMapFs()
+ afero.WriteFile(b, ".terraform/modules/modules.json", []byte("{}"), 0o644)
+ afero.WriteFile(b, ".terraform/modules/example_module/main.tf", []byte("terraform {}"), 0o644)
+
+ it := files.NewOverlayFS(afero.NewIOFS(a), []files.Overlay{{
+ Path: ".terraform/modules",
+ FS: afero.NewIOFS(b),
+ }})
+
+ content, err := fs.ReadFile(it, "main.tf")
+ require.NoError(t, err)
+ require.Equal(t, "terraform {}", string(content))
+
+ _, err = fs.ReadFile(it, ".terraform/modules/other_module/main.tf")
+ require.Error(t, err)
+
+ content, err = fs.ReadFile(it, ".terraform/modules/modules.json")
+ require.NoError(t, err)
+ require.Equal(t, "{}", string(content))
+
+ content, err = fs.ReadFile(it, ".terraform/modules/example_module/main.tf")
+ require.NoError(t, err)
+ require.Equal(t, "terraform {}", string(content))
+}
diff --git a/coderd/gitsshkey.go b/coderd/gitsshkey.go
index 110c16c7409d2..b9724689c5a7b 100644
--- a/coderd/gitsshkey.go
+++ b/coderd/gitsshkey.go
@@ -145,6 +145,10 @@ func (api *API) agentGitSSHKey(rw http.ResponseWriter, r *http.Request) {
}
gitSSHKey, err := api.Database.GetGitSSHKey(ctx, workspace.OwnerID)
+ if httpapi.IsUnauthorizedError(err) {
+ httpapi.Forbidden(rw)
+ return
+ }
if err != nil {
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
Message: "Internal error fetching git SSH key.",
diff --git a/coderd/gitsshkey_test.go b/coderd/gitsshkey_test.go
index 22d23176aa1c8..abd18508ce018 100644
--- a/coderd/gitsshkey_test.go
+++ b/coderd/gitsshkey_test.go
@@ -2,6 +2,7 @@ package coderd_test
import (
"context"
+ "net/http"
"testing"
"github.com/google/uuid"
@@ -12,6 +13,7 @@ import (
"github.com/coder/coder/v2/coderd/coderdtest"
"github.com/coder/coder/v2/coderd/database"
"github.com/coder/coder/v2/coderd/gitsshkey"
+ "github.com/coder/coder/v2/codersdk"
"github.com/coder/coder/v2/codersdk/agentsdk"
"github.com/coder/coder/v2/provisioner/echo"
"github.com/coder/coder/v2/testutil"
@@ -126,3 +128,51 @@ func TestAgentGitSSHKey(t *testing.T) {
require.NoError(t, err)
require.NotEmpty(t, agentKey.PrivateKey)
}
+
+func TestAgentGitSSHKey_APIKeyScopes(t *testing.T) {
+ t.Parallel()
+
+ for _, tt := range []struct {
+ apiKeyScope string
+ expectError bool
+ }{
+ {apiKeyScope: "all", expectError: false},
+ {apiKeyScope: "no_user_data", expectError: true},
+ } {
+ t.Run(tt.apiKeyScope, func(t *testing.T) {
+ t.Parallel()
+
+ client := coderdtest.New(t, &coderdtest.Options{
+ IncludeProvisionerDaemon: true,
+ })
+ user := coderdtest.CreateFirstUser(t, client)
+ authToken := uuid.NewString()
+ version := coderdtest.CreateTemplateVersion(t, client, user.OrganizationID, &echo.Responses{
+ Parse: echo.ParseComplete,
+ ProvisionPlan: echo.PlanComplete,
+ ProvisionApply: echo.ProvisionApplyWithAgentAndAPIKeyScope(authToken, tt.apiKeyScope),
+ })
+ project := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID)
+ coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID)
+ workspace := coderdtest.CreateWorkspace(t, client, project.ID)
+ coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, workspace.LatestBuild.ID)
+
+ agentClient := agentsdk.New(client.URL)
+ agentClient.SetSessionToken(authToken)
+
+ ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong)
+ defer cancel()
+
+ _, err := agentClient.GitSSHKey(ctx)
+
+ if tt.expectError {
+ require.Error(t, err)
+ var sdkErr *codersdk.Error
+ require.ErrorAs(t, err, &sdkErr)
+ require.Equal(t, http.StatusForbidden, sdkErr.StatusCode())
+ } else {
+ require.NoError(t, err)
+ }
+ })
+ }
+}
diff --git a/coderd/httpmw/apikey.go b/coderd/httpmw/apikey.go
index d614b37a3d897..4b92848b773e2 100644
--- a/coderd/httpmw/apikey.go
+++ b/coderd/httpmw/apikey.go
@@ -232,16 +232,21 @@ func ExtractAPIKey(rw http.ResponseWriter, r *http.Request, cfg ExtractAPIKeyCon
return optionalWrite(http.StatusUnauthorized, resp)
}
- var (
- link database.UserLink
- now = dbtime.Now()
- // Tracks if the API key has properties updated
- changed = false
- )
+ now := dbtime.Now()
+ if key.ExpiresAt.Before(now) {
+ return optionalWrite(http.StatusUnauthorized, codersdk.Response{
+ Message: SignedOutErrorMessage,
+ Detail: fmt.Sprintf("API key expired at %q.", key.ExpiresAt.String()),
+ })
+ }
+
+ // We only check OIDC stuff if we have a valid APIKey. An expired key means we don't trust the requestor
+ // really is the user whose key they have, and so we shouldn't be doing anything on their behalf including possibly
+ // refreshing the OIDC token.
if key.LoginType == database.LoginTypeGithub || key.LoginType == database.LoginTypeOIDC {
var err error
//nolint:gocritic // System needs to fetch UserLink to check if it's valid.
- link, err = cfg.DB.GetUserLinkByUserIDLoginType(dbauthz.AsSystemRestricted(ctx), database.GetUserLinkByUserIDLoginTypeParams{
+ link, err := cfg.DB.GetUserLinkByUserIDLoginType(dbauthz.AsSystemRestricted(ctx), database.GetUserLinkByUserIDLoginTypeParams{
UserID: key.UserID,
LoginType: key.LoginType,
})
@@ -258,7 +263,7 @@ func ExtractAPIKey(rw http.ResponseWriter, r *http.Request, cfg ExtractAPIKeyCon
})
}
// Check if the OAuth token is expired
- if link.OAuthExpiry.Before(now) && !link.OAuthExpiry.IsZero() && link.OAuthRefreshToken != "" {
+ if !link.OAuthExpiry.IsZero() && link.OAuthExpiry.Before(now) {
if cfg.OAuth2Configs.IsZero() {
return write(http.StatusInternalServerError, codersdk.Response{
Message: internalErrorMessage,
@@ -267,12 +272,15 @@ func ExtractAPIKey(rw http.ResponseWriter, r *http.Request, cfg ExtractAPIKeyCon
})
}
+ var friendlyName string
var oauthConfig promoauth.OAuth2Config
switch key.LoginType {
case database.LoginTypeGithub:
oauthConfig = cfg.OAuth2Configs.Github
+ friendlyName = "GitHub"
case database.LoginTypeOIDC:
oauthConfig = cfg.OAuth2Configs.OIDC
+ friendlyName = "OpenID Connect"
default:
return write(http.StatusInternalServerError, codersdk.Response{
Message: internalErrorMessage,
@@ -292,7 +300,13 @@ func ExtractAPIKey(rw http.ResponseWriter, r *http.Request, cfg ExtractAPIKeyCon
})
}
- // If it is, let's refresh it from the provided config
+ if link.OAuthRefreshToken == "" {
+ return optionalWrite(http.StatusUnauthorized, codersdk.Response{
+ Message: SignedOutErrorMessage,
+ Detail: fmt.Sprintf("%s session expired at %q. Try signing in again.", friendlyName, link.OAuthExpiry.String()),
+ })
+ }
+ // We have a refresh token, so let's try it
token, err := oauthConfig.TokenSource(r.Context(), &oauth2.Token{
AccessToken: link.OAuthAccessToken,
RefreshToken: link.OAuthRefreshToken,
@@ -300,28 +314,39 @@ func ExtractAPIKey(rw http.ResponseWriter, r *http.Request, cfg ExtractAPIKeyCon
}).Token()
if err != nil {
return write(http.StatusUnauthorized, codersdk.Response{
- Message: "Could not refresh expired Oauth token. Try re-authenticating to resolve this issue.",
- Detail: err.Error(),
+ Message: fmt.Sprintf(
+ "Could not refresh expired %s token. Try re-authenticating to resolve this issue.",
+ friendlyName),
+ Detail: err.Error(),
})
}
link.OAuthAccessToken = token.AccessToken
link.OAuthRefreshToken = token.RefreshToken
link.OAuthExpiry = token.Expiry
- key.ExpiresAt = token.Expiry
- changed = true
+ //nolint:gocritic // system needs to update user link
+ link, err = cfg.DB.UpdateUserLink(dbauthz.AsSystemRestricted(ctx), database.UpdateUserLinkParams{
+ UserID: link.UserID,
+ LoginType: link.LoginType,
+ OAuthAccessToken: link.OAuthAccessToken,
+ OAuthAccessTokenKeyID: sql.NullString{}, // dbcrypt will update as required
+ OAuthRefreshToken: link.OAuthRefreshToken,
+ OAuthRefreshTokenKeyID: sql.NullString{}, // dbcrypt will update as required
+ OAuthExpiry: link.OAuthExpiry,
+ // Refresh should keep the same debug context because we use
+ // the original claims for the group/role sync.
+ Claims: link.Claims,
+ })
+ if err != nil {
+ return write(http.StatusInternalServerError, codersdk.Response{
+ Message: internalErrorMessage,
+ Detail: fmt.Sprintf("update user_link: %s.", err.Error()),
+ })
+ }
}
}
- // Checking if the key is expired.
- // NOTE: The `RequireAuth` React component depends on this `Detail` to detect when
- // the users token has expired. If you change the text here, make sure to update it
- // in site/src/components/RequireAuth/RequireAuth.tsx as well.
- if key.ExpiresAt.Before(now) {
- return optionalWrite(http.StatusUnauthorized, codersdk.Response{
- Message: SignedOutErrorMessage,
- Detail: fmt.Sprintf("API key expired at %q.", key.ExpiresAt.String()),
- })
- }
+ // Tracks if the API key has properties updated
+ changed := false
// Only update LastUsed once an hour to prevent database spam.
if now.Sub(key.LastUsed) > time.Hour {
@@ -363,29 +388,6 @@ func ExtractAPIKey(rw http.ResponseWriter, r *http.Request, cfg ExtractAPIKeyCon
Detail: fmt.Sprintf("API key couldn't update: %s.", err.Error()),
})
}
- // If the API Key is associated with a user_link (e.g. Github/OIDC)
- // then we want to update the relevant oauth fields.
- if link.UserID != uuid.Nil {
- //nolint:gocritic // system needs to update user link
- link, err = cfg.DB.UpdateUserLink(dbauthz.AsSystemRestricted(ctx), database.UpdateUserLinkParams{
- UserID: link.UserID,
- LoginType: link.LoginType,
- OAuthAccessToken: link.OAuthAccessToken,
- OAuthAccessTokenKeyID: sql.NullString{}, // dbcrypt will update as required
- OAuthRefreshToken: link.OAuthRefreshToken,
- OAuthRefreshTokenKeyID: sql.NullString{}, // dbcrypt will update as required
- OAuthExpiry: link.OAuthExpiry,
- // Refresh should keep the same debug context because we use
- // the original claims for the group/role sync.
- Claims: link.Claims,
- })
- if err != nil {
- return write(http.StatusInternalServerError, codersdk.Response{
- Message: internalErrorMessage,
- Detail: fmt.Sprintf("update user_link: %s.", err.Error()),
- })
- }
- }
// We only want to update this occasionally to reduce DB write
// load. We update alongside the UserLink and APIKey since it's
diff --git a/coderd/httpmw/apikey_test.go b/coderd/httpmw/apikey_test.go
index bd979e88235ad..6e2e75ace9825 100644
--- a/coderd/httpmw/apikey_test.go
+++ b/coderd/httpmw/apikey_test.go
@@ -508,6 +508,102 @@ func TestAPIKey(t *testing.T) {
require.Equal(t, sentAPIKey.ExpiresAt, gotAPIKey.ExpiresAt)
})
+ t.Run("APIKeyExpiredOAuthExpired", func(t *testing.T) {
+ t.Parallel()
+ var (
+ db = dbmem.New()
+ user = dbgen.User(t, db, database.User{})
+ sentAPIKey, token = dbgen.APIKey(t, db, database.APIKey{
+ UserID: user.ID,
+ LastUsed: dbtime.Now().AddDate(0, 0, -1),
+ ExpiresAt: dbtime.Now().AddDate(0, 0, -1),
+ LoginType: database.LoginTypeOIDC,
+ })
+ _ = dbgen.UserLink(t, db, database.UserLink{
+ UserID: user.ID,
+ LoginType: database.LoginTypeOIDC,
+ OAuthExpiry: dbtime.Now().AddDate(0, 0, -1),
+ })
+
+ r = httptest.NewRequest("GET", "/", nil)
+ rw = httptest.NewRecorder()
+ )
+ r.Header.Set(codersdk.SessionTokenHeader, token)
+
+ // Include a valid oauth token for refreshing. If this token is invalid,
+ // it is difficult to tell an auth failure from an expired api key, or
+ // an expired oauth key.
+ oauthToken := &oauth2.Token{
+ AccessToken: "wow",
+ RefreshToken: "moo",
+ Expiry: dbtime.Now().AddDate(0, 0, 1),
+ }
+ httpmw.ExtractAPIKeyMW(httpmw.ExtractAPIKeyConfig{
+ DB: db,
+ OAuth2Configs: &httpmw.OAuth2Configs{
+ OIDC: &testutil.OAuth2Config{
+ Token: oauthToken,
+ },
+ },
+ RedirectToLogin: false,
+ })(successHandler).ServeHTTP(rw, r)
+ res := rw.Result()
+ defer res.Body.Close()
+ require.Equal(t, http.StatusUnauthorized, res.StatusCode)
+
+ gotAPIKey, err := db.GetAPIKeyByID(r.Context(), sentAPIKey.ID)
+ require.NoError(t, err)
+
+ require.Equal(t, sentAPIKey.LastUsed, gotAPIKey.LastUsed)
+ require.Equal(t, sentAPIKey.ExpiresAt, gotAPIKey.ExpiresAt)
+ })
+
+ t.Run("APIKeyExpiredOAuthNotExpired", func(t *testing.T) {
+ t.Parallel()
+ var (
+ db = dbmem.New()
+ user = dbgen.User(t, db, database.User{})
+ sentAPIKey, token = dbgen.APIKey(t, db, database.APIKey{
+ UserID: user.ID,
+ LastUsed: dbtime.Now().AddDate(0, 0, -1),
+ ExpiresAt: dbtime.Now().AddDate(0, 0, -1),
+ LoginType: database.LoginTypeOIDC,
+ })
+ _ = dbgen.UserLink(t, db, database.UserLink{
+ UserID: user.ID,
+ LoginType: database.LoginTypeOIDC,
+ })
+
+ r = httptest.NewRequest("GET", "/", nil)
+ rw = httptest.NewRecorder()
+ )
+ r.Header.Set(codersdk.SessionTokenHeader, token)
+
+ oauthToken := &oauth2.Token{
+ AccessToken: "wow",
+ RefreshToken: "moo",
+ Expiry: dbtime.Now().AddDate(0, 0, 1),
+ }
+ httpmw.ExtractAPIKeyMW(httpmw.ExtractAPIKeyConfig{
+ DB: db,
+ OAuth2Configs: &httpmw.OAuth2Configs{
+ OIDC: &testutil.OAuth2Config{
+ Token: oauthToken,
+ },
+ },
+ RedirectToLogin: false,
+ })(successHandler).ServeHTTP(rw, r)
+ res := rw.Result()
+ defer res.Body.Close()
+ require.Equal(t, http.StatusUnauthorized, res.StatusCode)
+
+ gotAPIKey, err := db.GetAPIKeyByID(r.Context(), sentAPIKey.ID)
+ require.NoError(t, err)
+
+ require.Equal(t, sentAPIKey.LastUsed, gotAPIKey.LastUsed)
+ require.Equal(t, sentAPIKey.ExpiresAt, gotAPIKey.ExpiresAt)
+ })
+
t.Run("OAuthRefresh", func(t *testing.T) {
t.Parallel()
var (
@@ -553,7 +649,67 @@ func TestAPIKey(t *testing.T) {
require.NoError(t, err)
require.Equal(t, sentAPIKey.LastUsed, gotAPIKey.LastUsed)
- require.Equal(t, oauthToken.Expiry, gotAPIKey.ExpiresAt)
+ // Note that OAuth expiry is independent of APIKey expiry, so an OIDC refresh DOES NOT affect the expiry of the
+ // APIKey
+ require.Equal(t, sentAPIKey.ExpiresAt, gotAPIKey.ExpiresAt)
+
+ gotLink, err := db.GetUserLinkByUserIDLoginType(r.Context(), database.GetUserLinkByUserIDLoginTypeParams{
+ UserID: user.ID,
+ LoginType: database.LoginTypeGithub,
+ })
+ require.NoError(t, err)
+ require.Equal(t, gotLink.OAuthRefreshToken, "moo")
+ })
+
+ t.Run("OAuthExpiredNoRefresh", func(t *testing.T) {
+ t.Parallel()
+ var (
+ ctx = testutil.Context(t, testutil.WaitShort)
+ db = dbmem.New()
+ user = dbgen.User(t, db, database.User{})
+ sentAPIKey, token = dbgen.APIKey(t, db, database.APIKey{
+ UserID: user.ID,
+ LastUsed: dbtime.Now(),
+ ExpiresAt: dbtime.Now().AddDate(0, 0, 1),
+ LoginType: database.LoginTypeGithub,
+ })
+
+ r = httptest.NewRequest("GET", "/", nil)
+ rw = httptest.NewRecorder()
+ )
+ _, err := db.InsertUserLink(ctx, database.InsertUserLinkParams{
+ UserID: user.ID,
+ LoginType: database.LoginTypeGithub,
+ OAuthExpiry: dbtime.Now().AddDate(0, 0, -1),
+ OAuthAccessToken: "letmein",
+ })
+ require.NoError(t, err)
+
+ r.Header.Set(codersdk.SessionTokenHeader, token)
+
+ oauthToken := &oauth2.Token{
+ AccessToken: "wow",
+ RefreshToken: "moo",
+ Expiry: dbtime.Now().AddDate(0, 0, 1),
+ }
+ httpmw.ExtractAPIKeyMW(httpmw.ExtractAPIKeyConfig{
+ DB: db,
+ OAuth2Configs: &httpmw.OAuth2Configs{
+ Github: &testutil.OAuth2Config{
+ Token: oauthToken,
+ },
+ },
+ RedirectToLogin: false,
+ })(successHandler).ServeHTTP(rw, r)
+ res := rw.Result()
+ defer res.Body.Close()
+ require.Equal(t, http.StatusUnauthorized, res.StatusCode)
+
+ gotAPIKey, err := db.GetAPIKeyByID(r.Context(), sentAPIKey.ID)
+ require.NoError(t, err)
+
+ require.Equal(t, sentAPIKey.LastUsed, gotAPIKey.LastUsed)
+ require.Equal(t, sentAPIKey.ExpiresAt, gotAPIKey.ExpiresAt)
})
t.Run("RemoteIPUpdates", func(t *testing.T) {
diff --git a/coderd/httpmw/workspaceagent.go b/coderd/httpmw/workspaceagent.go
index 241fa385681e6..0ee231b2f5a12 100644
--- a/coderd/httpmw/workspaceagent.go
+++ b/coderd/httpmw/workspaceagent.go
@@ -109,12 +109,18 @@ func ExtractWorkspaceAgentAndLatestBuild(opts ExtractWorkspaceAgentAndLatestBuil
return
}
- subject, _, err := UserRBACSubject(ctx, opts.DB, row.WorkspaceTable.OwnerID, rbac.WorkspaceAgentScope(rbac.WorkspaceAgentScopeParams{
- WorkspaceID: row.WorkspaceTable.ID,
- OwnerID: row.WorkspaceTable.OwnerID,
- TemplateID: row.WorkspaceTable.TemplateID,
- VersionID: row.WorkspaceBuild.TemplateVersionID,
- }))
+ subject, _, err := UserRBACSubject(
+ ctx,
+ opts.DB,
+ row.WorkspaceTable.OwnerID,
+ rbac.WorkspaceAgentScope(rbac.WorkspaceAgentScopeParams{
+ WorkspaceID: row.WorkspaceTable.ID,
+ OwnerID: row.WorkspaceTable.OwnerID,
+ TemplateID: row.WorkspaceTable.TemplateID,
+ VersionID: row.WorkspaceBuild.TemplateVersionID,
+ BlockUserData: row.WorkspaceAgent.APIKeyScope == database.AgentKeyScopeEnumNoUserData,
+ }),
+ )
if err != nil {
httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{
Message: "Internal error with workspace agent authorization context.",
diff --git a/coderd/notifications/events.go b/coderd/notifications/events.go
index 2f45205bf33ec..35d9925055da5 100644
--- a/coderd/notifications/events.go
+++ b/coderd/notifications/events.go
@@ -39,6 +39,7 @@ var (
TemplateTemplateDeprecated = uuid.MustParse("f40fae84-55a2-42cd-99fa-b41c1ca64894")
TemplateWorkspaceBuildsFailedReport = uuid.MustParse("34a20db2-e9cc-4a93-b0e4-8569699d7a00")
+ TemplateWorkspaceResourceReplaced = uuid.MustParse("89d9745a-816e-4695-a17f-3d0a229e2b8d")
)
// Notification-related events.
diff --git a/coderd/notifications/notifications_test.go b/coderd/notifications/notifications_test.go
index 12372b74a14c3..8f8a3c82441e0 100644
--- a/coderd/notifications/notifications_test.go
+++ b/coderd/notifications/notifications_test.go
@@ -35,6 +35,9 @@ import (
"golang.org/x/xerrors"
"cdr.dev/slog"
+ "github.com/coder/quartz"
+ "github.com/coder/serpent"
+
"github.com/coder/coder/v2/coderd/coderdtest"
"github.com/coder/coder/v2/coderd/database"
"github.com/coder/coder/v2/coderd/database/dbauthz"
@@ -48,8 +51,6 @@ import (
"github.com/coder/coder/v2/coderd/util/syncmap"
"github.com/coder/coder/v2/codersdk"
"github.com/coder/coder/v2/testutil"
- "github.com/coder/quartz"
- "github.com/coder/serpent"
)
// updateGoldenFiles is a flag that can be set to update golden files.
@@ -1226,6 +1227,29 @@ func TestNotificationTemplates_Golden(t *testing.T) {
Labels: map[string]string{},
},
},
+ {
+ name: "TemplateWorkspaceResourceReplaced",
+ id: notifications.TemplateWorkspaceResourceReplaced,
+ payload: types.MessagePayload{
+ UserName: "Bobby",
+ UserEmail: "bobby@coder.com",
+ UserUsername: "bobby",
+ Labels: map[string]string{
+ "org": "cern",
+ "workspace": "my-workspace",
+ "workspace_build_num": "2",
+ "template": "docker",
+ "template_version": "angry_torvalds",
+ "preset": "particle-accelerator",
+ "claimant": "prebuilds-claimer",
+ },
+ Data: map[string]any{
+ "replacements": map[string]string{
+ "docker_container[0]": "env, hostname",
+ },
+ },
+ },
+ },
}
// We must have a test case for every notification_template. This is enforced below:
diff --git a/coderd/notifications/notificationstest/fake_enqueuer.go b/coderd/notifications/notificationstest/fake_enqueuer.go
index 8fbc2cee25806..568091818295c 100644
--- a/coderd/notifications/notificationstest/fake_enqueuer.go
+++ b/coderd/notifications/notificationstest/fake_enqueuer.go
@@ -9,6 +9,7 @@ import (
"github.com/prometheus/client_golang/prometheus"
"github.com/coder/coder/v2/coderd/database/dbauthz"
+ "github.com/coder/coder/v2/coderd/notifications"
"github.com/coder/coder/v2/coderd/rbac"
"github.com/coder/coder/v2/coderd/rbac/policy"
)
@@ -19,6 +20,12 @@ type FakeEnqueuer struct {
sent []*FakeNotification
}
+var _ notifications.Enqueuer = &FakeEnqueuer{}
+
+func NewFakeEnqueuer() *FakeEnqueuer {
+ return &FakeEnqueuer{}
+}
+
type FakeNotification struct {
UserID, TemplateID uuid.UUID
Labels map[string]string
diff --git a/coderd/notifications/testdata/rendered-templates/smtp/TemplateWorkspaceResourceReplaced.html.golden b/coderd/notifications/testdata/rendered-templates/smtp/TemplateWorkspaceResourceReplaced.html.golden
new file mode 100644
index 0000000000000..6d64eed0249a7
--- /dev/null
+++ b/coderd/notifications/testdata/rendered-templates/smtp/TemplateWorkspaceResourceReplaced.html.golden
@@ -0,0 +1,131 @@
+From: system@coder.com
+To: bobby@coder.com
+Subject: There might be a problem with a recently claimed prebuilt workspace
+Message-Id: 02ee4935-73be-4fa1-a290-ff9999026b13@blush-whale-48
+Date: Fri, 11 Oct 2024 09:03:06 +0000
+Content-Type: multipart/alternative; boundary=bbe61b741255b6098bb6b3c1f41b885773df633cb18d2a3002b68e4bc9c4
+MIME-Version: 1.0
+
+--bbe61b741255b6098bb6b3c1f41b885773df633cb18d2a3002b68e4bc9c4
+Content-Transfer-Encoding: quoted-printable
+Content-Type: text/plain; charset=UTF-8
+
+Hi Bobby,
+
+Workspace my-workspace was claimed from a prebuilt workspace by prebuilds-c=
+laimer.
+
+During the claim, Terraform destroyed and recreated the following resources
+because one or more immutable attributes changed:
+
+docker_container[0] was replaced due to changes to env, hostname
+
+When Terraform must change an immutable attribute, it replaces the entire r=
+esource.
+If you=E2=80=99re using prebuilds to speed up provisioning, unexpected repl=
+acements will slow down
+workspace startup=E2=80=94even when claiming a prebuilt environment.
+
+For tips on preventing replacements and improving claim performance, see th=
+is guide (https://coder.com/docs/admin/templates/extending-templates/prebui=
+lt-workspaces#preventing-resource-replacement).
+
+NOTE: this prebuilt workspace used the particle-accelerator preset.
+
+
+View workspace build: http://test.com/@prebuilds-claimer/my-workspace/build=
+s/2
+
+View template version: http://test.com/templates/cern/docker/versions/angry=
+_torvalds
+
+--bbe61b741255b6098bb6b3c1f41b885773df633cb18d2a3002b68e4bc9c4
+Content-Transfer-Encoding: quoted-printable
+Content-Type: text/html; charset=UTF-8
+
+
+
+
+
+
+ There might be a problem with a recently claimed prebuilt worksp=
+ace
+
+
+
+
+
+
+
+ There might be a problem with a recently claimed prebuilt workspace
+
+
+
Hi Bobby,
+
Workspace my-workspace was claimed from a prebu=
+ilt workspace by prebuilds-claimer.
+
+
During the claim, Terraform destroyed and recreated the following resour=
+ces
+because one or more immutable attributes changed:
+
+
+
_dockercontainer[0] was replaced due to changes to env, h=
+ostname
+
+
+
+
When Terraform must change an immutable attribute, it replaces the entir=
+e resource.
+If you=E2=80=99re using prebuilds to speed up provisioning, unexpected repl=
+acements will slow down
+workspace startup=E2=80=94even when claiming a prebuilt environment.
+
+
For tips on preventing replacements and improving claim performance, see=
+ this guide.
+
+
NOTE: this prebuilt workspace used the particle-accelerator preset.
|
diff --git a/docs/admin/templates/extending-templates/prebuilt-workspaces.md b/docs/admin/templates/extending-templates/prebuilt-workspaces.md
index bbff3b7f15747..3fd82d62d1943 100644
--- a/docs/admin/templates/extending-templates/prebuilt-workspaces.md
+++ b/docs/admin/templates/extending-templates/prebuilt-workspaces.md
@@ -25,7 +25,7 @@ Prebuilt workspaces are tightly integrated with [workspace presets](./parameters
## Prerequisites
- [**Premium license**](../../licensing/index.md)
-- **Compatible Terraform provider**: Use `coder/coder` Terraform provider `>= 2.4.0`.
+- **Compatible Terraform provider**: Use `coder/coder` Terraform provider `>= 2.4.1`.
- **Feature flag**: Enable the `workspace-prebuilds` [experiment](../../../reference/cli/server.md#--experiments).
## Enable prebuilt workspaces for template presets
@@ -75,7 +75,7 @@ Prebuilt workspaces follow a specific lifecycle from creation through eligibilit
Prebuilt workspaces that fail during provisioning are retried with a backoff to prevent transient failures.
-1. When a developer requests a new workspace, the claiming process occurs:
+1. When a developer creates a new workspace, the claiming process occurs:
1. Developer selects a template and preset that has prebuilt workspaces configured.
1. If an eligible prebuilt workspace exists, ownership transfers from the `prebuilds` user to the requesting user.
@@ -84,13 +84,17 @@ Prebuilt workspaces follow a specific lifecycle from creation through eligibilit
[`coder_workspace_owner`](https://registry.terraform.io/providers/coder/coder/latest/docs/data-sources/workspace_owner)
datasources (see [Preventing resource replacement](#preventing-resource-replacement) for further considerations).
- The developer doesn't see the claiming process — the workspace will just be ready faster than usual.
+ The claiming process is transparent to the developer — the workspace will just be ready faster than usual.
You can view available prebuilt workspaces in the **Workspaces** view in the Coder dashboard:

_Note the search term `owner:prebuilds`._
+Unclaimed prebuilt workspaces can be interacted with in the same way as any other workspace.
+However, if a Prebuilt workspace is stopped, the reconciliation loop will not destroy it.
+This gives template admins the ability to park problematic prebuilt workspaces in a stopped state for further investigation.
+
### Template updates and the prebuilt workspace lifecycle
Prebuilt workspaces are not updated after they are provisioned.
@@ -163,6 +167,19 @@ resource "docker_container" "workspace" {
Learn more about `ignore_changes` in the [Terraform documentation](https://developer.hashicorp.com/terraform/language/meta-arguments/lifecycle#ignore_changes).
+_A note on "immutable" attributes: Terraform providers may specify `ForceNew` on their resources' attributes. Any change
+to these attributes require the replacement (destruction and recreation) of the managed resource instance, rather than an in-place update.
+For example, the [`ami`](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/instance#ami-1) attribute on the `aws_instance` resource
+has [`ForceNew`](https://github.com/hashicorp/terraform-provider-aws/blob/main/internal/service/ec2/ec2_instance.go#L75-L81) set,
+since the AMI cannot be changed in-place._
+
+#### Updating claimed prebuilt workspace templates
+
+Once a prebuilt workspace has been claimed, and if its template uses `ignore_changes`, users may run into an issue where the agent
+does not reconnect after a template update. This shortcoming is described in [this issue](https://github.com/coder/coder/issues/17840)
+and will be addressed before the next release (v2.23). In the interim, a simple workaround is to restart the workspace
+when it is in this problematic state.
+
### Current limitations
The prebuilt workspaces feature has these current limitations:
@@ -171,13 +188,13 @@ The prebuilt workspaces feature has these current limitations:
Prebuilt workspaces can only be used with the default organization.
- [coder/internal#364](https://github.com/coder/internal/issues/364)
+ [View issue](https://github.com/coder/internal/issues/364)
- **Autoscaling**
Prebuilt workspaces remain running until claimed. There's no automated mechanism to reduce instances during off-hours.
- [coder/internal#312](https://github.com/coder/internal/issues/312)
+ [View issue](https://github.com/coder/internal/issues/312)
### Monitoring and observability
diff --git a/docs/admin/users/index.md b/docs/admin/users/index.md
index af26f4bb62a2b..b7d98b919734c 100644
--- a/docs/admin/users/index.md
+++ b/docs/admin/users/index.md
@@ -206,3 +206,42 @@ The following filters are supported:
- `created_before` and `created_after` - The time a user was created. Uses the
RFC3339Nano format.
- `login_type` - Represents the login type of the user. Refer to the [LoginType documentation](https://pkg.go.dev/github.com/coder/coder/v2/codersdk#LoginType) for a list of supported values
+
+## Retrieve your list of Coder users
+
+
+
+You can use the Coder CLI or API to retrieve your list of users.
+
+### CLI
+
+Use `users list` to export the list of users to a CSV file:
+
+```shell
+coder users list > users.csv
+```
+
+Visit the [users list](../../reference/cli/users_list.md) documentation for more options.
+
+### API
+
+Use [get users](../../reference/api/users.md#get-users):
+
+```shell
+curl -X GET http://coder-server:8080/api/v2/users \
+ -H 'Accept: application/json' \
+ -H 'Coder-Session-Token: API_KEY'
+```
+
+To export the results to a CSV file, you can use [`jq`](https://jqlang.org/) to process the JSON response:
+
+```shell
+curl -X GET http://coder-server:8080/api/v2/users \
+ -H 'Accept: application/json' \
+ -H 'Coder-Session-Token: API_KEY' | \
+ jq -r '.users | (map(keys) | add | unique) as $cols | $cols, (.[] | [.[$cols[]]] | @csv)' > users.csv
+```
+
+Visit the [get users](../../reference/api/users.md#get-users) documentation for more options.
+
+
diff --git a/docs/images/user-guides/desktop/coder-desktop-file-sync-add.png b/docs/images/user-guides/desktop/coder-desktop-file-sync-add.png
new file mode 100644
index 0000000000000..35e59d76866f2
Binary files /dev/null and b/docs/images/user-guides/desktop/coder-desktop-file-sync-add.png differ
diff --git a/docs/images/user-guides/desktop/coder-desktop-file-sync-conflicts-mouseover.png b/docs/images/user-guides/desktop/coder-desktop-file-sync-conflicts-mouseover.png
new file mode 100644
index 0000000000000..80a5185585c1a
Binary files /dev/null and b/docs/images/user-guides/desktop/coder-desktop-file-sync-conflicts-mouseover.png differ
diff --git a/docs/images/user-guides/desktop/coder-desktop-file-sync-staging.png b/docs/images/user-guides/desktop/coder-desktop-file-sync-staging.png
new file mode 100644
index 0000000000000..6b846f3ef244f
Binary files /dev/null and b/docs/images/user-guides/desktop/coder-desktop-file-sync-staging.png differ
diff --git a/docs/images/user-guides/desktop/coder-desktop-file-sync-watching.png b/docs/images/user-guides/desktop/coder-desktop-file-sync-watching.png
new file mode 100644
index 0000000000000..7875980186e33
Binary files /dev/null and b/docs/images/user-guides/desktop/coder-desktop-file-sync-watching.png differ
diff --git a/docs/images/user-guides/desktop/coder-desktop-file-sync.png b/docs/images/user-guides/desktop/coder-desktop-file-sync.png
new file mode 100644
index 0000000000000..5976528010371
Binary files /dev/null and b/docs/images/user-guides/desktop/coder-desktop-file-sync.png differ
diff --git a/docs/images/user-guides/desktop/coder-desktop-workspaces.png b/docs/images/user-guides/desktop/coder-desktop-workspaces.png
index 664228fe214e7..c621c7e541094 100644
Binary files a/docs/images/user-guides/desktop/coder-desktop-workspaces.png and b/docs/images/user-guides/desktop/coder-desktop-workspaces.png differ
diff --git a/docs/install/releases/index.md b/docs/install/releases/index.md
index b6c27a67b1da1..96c6c4f03120b 100644
--- a/docs/install/releases/index.md
+++ b/docs/install/releases/index.md
@@ -57,13 +57,13 @@ pages.
| Release name | Release Date | Status | Latest Release |
|------------------------------------------------|-------------------|------------------|----------------------------------------------------------------|
-| [2.16](https://coder.com/changelog/coder-2-16) | October 01, 2024 | Not Supported | [v2.16.1](https://github.com/coder/coder/releases/tag/v2.16.1) |
-| [2.17](https://coder.com/changelog/coder-2-17) | November 05, 2024 | Not Supported | [v2.17.3](https://github.com/coder/coder/releases/tag/v2.17.3) |
+| [2.17](https://coder.com/changelog/coder-2-17) | November 04, 2024 | Not Supported | [v2.17.3](https://github.com/coder/coder/releases/tag/v2.17.3) |
| [2.18](https://coder.com/changelog/coder-2-18) | December 03, 2024 | Not Supported | [v2.18.5](https://github.com/coder/coder/releases/tag/v2.18.5) |
-| [2.19](https://coder.com/changelog/coder-2-19) | February 04, 2025 | Security Support | [v2.19.3](https://github.com/coder/coder/releases/tag/v2.19.3) |
-| [2.20](https://coder.com/changelog/coder-2-20) | March 04, 2025 | Stable | [v2.20.3](https://github.com/coder/coder/releases/tag/v2.20.3) |
-| [2.21](https://coder.com/changelog/coder-2-21) | April 01, 2025 | Mainline | [v2.21.3](https://github.com/coder/coder/releases/tag/v2.21.3) |
-| 2.22 | May 06, 2025 | Not Released | N/A |
+| [2.19](https://coder.com/changelog/coder-2-19) | February 04, 2025 | Not Supported | [v2.19.3](https://github.com/coder/coder/releases/tag/v2.19.3) |
+| [2.20](https://coder.com/changelog/coder-2-20) | March 04, 2025 | Security Support | [v2.20.3](https://github.com/coder/coder/releases/tag/v2.20.3) |
+| [2.21](https://coder.com/changelog/coder-2-21) | April 02, 2025 | Stable | [v2.21.3](https://github.com/coder/coder/releases/tag/v2.21.3) |
+| [2.22](https://coder.com/changelog/coder-2-22) | May 16, 2025 | Mainline | [v2.22.0](https://github.com/coder/coder/releases/tag/v2.22.0) |
+| 2.23 | | Not Released | N/A |
> [!TIP]
diff --git a/docs/manifest.json b/docs/manifest.json
index 4519767b071dd..3af0cc7505057 100644
--- a/docs/manifest.json
+++ b/docs/manifest.json
@@ -193,7 +193,7 @@
"description": "Use Coder Desktop to access your workspace like it's a local machine",
"path": "./user-guides/desktop/index.md",
"icon_path": "./images/icons/computer-code.svg",
- "state": ["early access"]
+ "state": ["beta"]
},
{
"title": "Workspace Management",
@@ -503,6 +503,11 @@
"description": "Authenticate with provider APIs to provision workspaces",
"path": "./admin/templates/extending-templates/provider-authentication.md"
},
+ {
+ "title": "Configure a template for dev containers",
+ "description": "How to use configure your template for dev containers",
+ "path": "./admin/templates/extending-templates/devcontainers.md"
+ },
{
"title": "Process Logging",
"description": "Log workspace processes",
@@ -1455,7 +1460,7 @@
},
{
"title": "ssh",
- "description": "Start a shell into a workspace",
+ "description": "Start a shell into a workspace or run a command",
"path": "reference/cli/ssh.md"
},
{
@@ -1625,6 +1630,7 @@
},
{
"title": "users create",
+ "description": "Create a new user.",
"path": "reference/cli/users_create.md"
},
{
@@ -1639,6 +1645,7 @@
},
{
"title": "users list",
+ "description": "Prints the list of users.",
"path": "reference/cli/users_list.md"
},
{
diff --git a/docs/reference/api/agents.md b/docs/reference/api/agents.md
index 853cb67e38bfd..f126fec59978c 100644
--- a/docs/reference/api/agents.md
+++ b/docs/reference/api/agents.md
@@ -470,6 +470,38 @@ curl -X PATCH http://coder-server:8080/api/v2/workspaceagents/me/logs \
To perform this operation, you must be authenticated. [Learn more](authentication.md).
+## Get workspace agent reinitialization
+
+### Code samples
+
+```shell
+# Example request using curl
+curl -X GET http://coder-server:8080/api/v2/workspaceagents/me/reinit \
+ -H 'Accept: application/json' \
+ -H 'Coder-Session-Token: API_KEY'
+```
+
+`GET /workspaceagents/me/reinit`
+
+### Example responses
+
+> 200 Response
+
+```json
+{
+ "reason": "prebuild_claimed",
+ "workspaceID": "string"
+}
+```
+
+### Responses
+
+| Status | Meaning | Description | Schema |
+|--------|---------------------------------------------------------|-------------|----------------------------------------------------------------------------|
+| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | [agentsdk.ReinitializationEvent](schemas.md#agentsdkreinitializationevent) |
+
+To perform this operation, you must be authenticated. [Learn more](authentication.md).
+
## Get workspace agent by ID
### Code samples
@@ -577,6 +609,10 @@ curl -X GET http://coder-server:8080/api/v2/workspaceagents/{workspaceagent} \
"logs_overflowed": true,
"name": "string",
"operating_system": "string",
+ "parent_id": {
+ "uuid": "string",
+ "valid": true
+ },
"ready_at": "2019-08-24T14:15:22Z",
"resource_id": "4d5215ed-38bb-48ed-879a-fdb9ca58522f",
"scripts": [
@@ -740,6 +776,7 @@ curl -X GET http://coder-server:8080/api/v2/workspaceagents/{workspaceagent}/con
"containers": [
{
"created_at": "2019-08-24T14:15:22Z",
+ "devcontainer_dirty": true,
"id": "string",
"image": "string",
"labels": {
@@ -777,6 +814,33 @@ curl -X GET http://coder-server:8080/api/v2/workspaceagents/{workspaceagent}/con
To perform this operation, you must be authenticated. [Learn more](authentication.md).
+## Recreate devcontainer for workspace agent
+
+### Code samples
+
+```shell
+# Example request using curl
+curl -X POST http://coder-server:8080/api/v2/workspaceagents/{workspaceagent}/containers/devcontainers/container/{container}/recreate \
+ -H 'Coder-Session-Token: API_KEY'
+```
+
+`POST /workspaceagents/{workspaceagent}/containers/devcontainers/container/{container}/recreate`
+
+### Parameters
+
+| Name | In | Type | Required | Description |
+|------------------|------|--------------|----------|----------------------|
+| `workspaceagent` | path | string(uuid) | true | Workspace agent ID |
+| `container` | path | string | true | Container ID or name |
+
+### Responses
+
+| Status | Meaning | Description | Schema |
+|--------|-----------------------------------------------------------------|-------------|--------|
+| 204 | [No Content](https://tools.ietf.org/html/rfc7231#section-6.3.5) | No Content | |
+
+To perform this operation, you must be authenticated. [Learn more](authentication.md).
+
## Coordinate workspace agent
### Code samples
diff --git a/docs/reference/api/builds.md b/docs/reference/api/builds.md
index 1f795c3d7d313..8e88df96c1d29 100644
--- a/docs/reference/api/builds.md
+++ b/docs/reference/api/builds.md
@@ -164,6 +164,10 @@ curl -X GET http://coder-server:8080/api/v2/users/{user}/workspace/{workspacenam
"logs_overflowed": true,
"name": "string",
"operating_system": "string",
+ "parent_id": {
+ "uuid": "string",
+ "valid": true
+ },
"ready_at": "2019-08-24T14:15:22Z",
"resource_id": "4d5215ed-38bb-48ed-879a-fdb9ca58522f",
"scripts": [
@@ -393,6 +397,10 @@ curl -X GET http://coder-server:8080/api/v2/workspacebuilds/{workspacebuild} \
"logs_overflowed": true,
"name": "string",
"operating_system": "string",
+ "parent_id": {
+ "uuid": "string",
+ "valid": true
+ },
"ready_at": "2019-08-24T14:15:22Z",
"resource_id": "4d5215ed-38bb-48ed-879a-fdb9ca58522f",
"scripts": [
@@ -737,6 +745,10 @@ curl -X GET http://coder-server:8080/api/v2/workspacebuilds/{workspacebuild}/res
"logs_overflowed": true,
"name": "string",
"operating_system": "string",
+ "parent_id": {
+ "uuid": "string",
+ "valid": true
+ },
"ready_at": "2019-08-24T14:15:22Z",
"resource_id": "4d5215ed-38bb-48ed-879a-fdb9ca58522f",
"scripts": [
@@ -859,6 +871,9 @@ Status Code **200**
| `»» logs_overflowed` | boolean | false | | |
| `»» name` | string | false | | |
| `»» operating_system` | string | false | | |
+| `»» parent_id` | [uuid.NullUUID](schemas.md#uuidnulluuid) | false | | |
+| `»»» uuid` | string | false | | |
+| `»»» valid` | boolean | false | | Valid is true if UUID is not NULL |
| `»» ready_at` | string(date-time) | false | | |
| `»» resource_id` | string(uuid) | false | | |
| `»» scripts` | array | false | | |
@@ -1092,6 +1107,10 @@ curl -X GET http://coder-server:8080/api/v2/workspacebuilds/{workspacebuild}/sta
"logs_overflowed": true,
"name": "string",
"operating_system": "string",
+ "parent_id": {
+ "uuid": "string",
+ "valid": true
+ },
"ready_at": "2019-08-24T14:15:22Z",
"resource_id": "4d5215ed-38bb-48ed-879a-fdb9ca58522f",
"scripts": [
@@ -1394,6 +1413,10 @@ curl -X GET http://coder-server:8080/api/v2/workspaces/{workspace}/builds \
"logs_overflowed": true,
"name": "string",
"operating_system": "string",
+ "parent_id": {
+ "uuid": "string",
+ "valid": true
+ },
"ready_at": "2019-08-24T14:15:22Z",
"resource_id": "4d5215ed-38bb-48ed-879a-fdb9ca58522f",
"scripts": [
@@ -1573,6 +1596,9 @@ Status Code **200**
| `»»» logs_overflowed` | boolean | false | | |
| `»»» name` | string | false | | |
| `»»» operating_system` | string | false | | |
+| `»»» parent_id` | [uuid.NullUUID](schemas.md#uuidnulluuid) | false | | |
+| `»»»» uuid` | string | false | | |
+| `»»»» valid` | boolean | false | | Valid is true if UUID is not NULL |
| `»»» ready_at` | string(date-time) | false | | |
| `»»» resource_id` | string(uuid) | false | | |
| `»»» scripts` | array | false | | |
@@ -1867,6 +1893,10 @@ curl -X POST http://coder-server:8080/api/v2/workspaces/{workspace}/builds \
"logs_overflowed": true,
"name": "string",
"operating_system": "string",
+ "parent_id": {
+ "uuid": "string",
+ "valid": true
+ },
"ready_at": "2019-08-24T14:15:22Z",
"resource_id": "4d5215ed-38bb-48ed-879a-fdb9ca58522f",
"scripts": [
diff --git a/docs/reference/api/schemas.md b/docs/reference/api/schemas.md
index 6ca005b4ec69c..aa704b0fe6a57 100644
--- a/docs/reference/api/schemas.md
+++ b/docs/reference/api/schemas.md
@@ -182,6 +182,36 @@
| `icon` | string | false | | |
| `id` | string | false | | ID is a unique identifier for the log source. It is scoped to a workspace agent, and can be statically defined inside code to prevent duplicate sources from being created for the same agent. |
+## agentsdk.ReinitializationEvent
+
+```json
+{
+ "reason": "prebuild_claimed",
+ "workspaceID": "string"
+}
+```
+
+### Properties
+
+| Name | Type | Required | Restrictions | Description |
+|---------------|--------------------------------------------------------------------|----------|--------------|-------------|
+| `reason` | [agentsdk.ReinitializationReason](#agentsdkreinitializationreason) | false | | |
+| `workspaceID` | string | false | | |
+
+## agentsdk.ReinitializationReason
+
+```json
+"prebuild_claimed"
+```
+
+### Properties
+
+#### Enumerated Values
+
+| Value |
+|--------------------|
+| `prebuild_claimed` |
+
## aisdk.Attachment
```json
@@ -6563,7 +6593,8 @@ Git clone makes use of this by parsing the URL from: 'Username for "https://gith
"require_active_version": true,
"time_til_dormant_autodelete_ms": 0,
"time_til_dormant_ms": 0,
- "updated_at": "2019-08-24T14:15:22Z"
+ "updated_at": "2019-08-24T14:15:22Z",
+ "use_classic_parameter_flow": true
}
```
@@ -6602,6 +6633,7 @@ Git clone makes use of this by parsing the URL from: 'Username for "https://gith
| `time_til_dormant_autodelete_ms` | integer | false | | |
| `time_til_dormant_ms` | integer | false | | |
| `updated_at` | string | false | | |
+| `use_classic_parameter_flow` | boolean | false | | |
#### Enumerated Values
@@ -8304,6 +8336,10 @@ If the schedule is empty, the user will be updated to use the default schedule.|
"logs_overflowed": true,
"name": "string",
"operating_system": "string",
+ "parent_id": {
+ "uuid": "string",
+ "valid": true
+ },
"ready_at": "2019-08-24T14:15:22Z",
"resource_id": "4d5215ed-38bb-48ed-879a-fdb9ca58522f",
"scripts": [
@@ -8508,6 +8544,10 @@ If the schedule is empty, the user will be updated to use the default schedule.|
"logs_overflowed": true,
"name": "string",
"operating_system": "string",
+ "parent_id": {
+ "uuid": "string",
+ "valid": true
+ },
"ready_at": "2019-08-24T14:15:22Z",
"resource_id": "4d5215ed-38bb-48ed-879a-fdb9ca58522f",
"scripts": [
@@ -8564,6 +8604,7 @@ If the schedule is empty, the user will be updated to use the default schedule.|
| `logs_overflowed` | boolean | false | | |
| `name` | string | false | | |
| `operating_system` | string | false | | |
+| `parent_id` | [uuid.NullUUID](#uuidnulluuid) | false | | |
| `ready_at` | string | false | | |
| `resource_id` | string | false | | |
| `scripts` | array of [codersdk.WorkspaceAgentScript](#codersdkworkspaceagentscript) | false | | |
@@ -8580,6 +8621,7 @@ If the schedule is empty, the user will be updated to use the default schedule.|
```json
{
"created_at": "2019-08-24T14:15:22Z",
+ "devcontainer_dirty": true,
"id": "string",
"image": "string",
"labels": {
@@ -8606,19 +8648,20 @@ If the schedule is empty, the user will be updated to use the default schedule.|
### Properties
-| Name | Type | Required | Restrictions | Description |
-|--------------------|---------------------------------------------------------------------------------------|----------|--------------|--------------------------------------------------------------------------------------------------------------------------------------------|
-| `created_at` | string | false | | Created at is the time the container was created. |
-| `id` | string | false | | ID is the unique identifier of the container. |
-| `image` | string | false | | Image is the name of the container image. |
-| `labels` | object | false | | Labels is a map of key-value pairs of container labels. |
-| » `[any property]` | string | false | | |
-| `name` | string | false | | Name is the human-readable name of the container. |
-| `ports` | array of [codersdk.WorkspaceAgentContainerPort](#codersdkworkspaceagentcontainerport) | false | | Ports includes ports exposed by the container. |
-| `running` | boolean | false | | Running is true if the container is currently running. |
-| `status` | string | false | | Status is the current status of the container. This is somewhat implementation-dependent, but should generally be a human-readable string. |
-| `volumes` | object | false | | Volumes is a map of "things" mounted into the container. Again, this is somewhat implementation-dependent. |
-| » `[any property]` | string | false | | |
+| Name | Type | Required | Restrictions | Description |
+|----------------------|---------------------------------------------------------------------------------------|----------|--------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| `created_at` | string | false | | Created at is the time the container was created. |
+| `devcontainer_dirty` | boolean | false | | Devcontainer dirty is true if the devcontainer configuration has changed since the container was created. This is used to determine if the container needs to be rebuilt. |
+| `id` | string | false | | ID is the unique identifier of the container. |
+| `image` | string | false | | Image is the name of the container image. |
+| `labels` | object | false | | Labels is a map of key-value pairs of container labels. |
+| » `[any property]` | string | false | | |
+| `name` | string | false | | Name is the human-readable name of the container. |
+| `ports` | array of [codersdk.WorkspaceAgentContainerPort](#codersdkworkspaceagentcontainerport) | false | | Ports includes ports exposed by the container. |
+| `running` | boolean | false | | Running is true if the container is currently running. |
+| `status` | string | false | | Status is the current status of the container. This is somewhat implementation-dependent, but should generally be a human-readable string. |
+| `volumes` | object | false | | Volumes is a map of "things" mounted into the container. Again, this is somewhat implementation-dependent. |
+| » `[any property]` | string | false | | |
## codersdk.WorkspaceAgentContainerPort
@@ -8685,6 +8728,7 @@ If the schedule is empty, the user will be updated to use the default schedule.|
"containers": [
{
"created_at": "2019-08-24T14:15:22Z",
+ "devcontainer_dirty": true,
"id": "string",
"image": "string",
"labels": {
@@ -9256,6 +9300,10 @@ If the schedule is empty, the user will be updated to use the default schedule.|
"logs_overflowed": true,
"name": "string",
"operating_system": "string",
+ "parent_id": {
+ "uuid": "string",
+ "valid": true
+ },
"ready_at": "2019-08-24T14:15:22Z",
"resource_id": "4d5215ed-38bb-48ed-879a-fdb9ca58522f",
"scripts": [
@@ -9672,6 +9720,10 @@ If the schedule is empty, the user will be updated to use the default schedule.|
"logs_overflowed": true,
"name": "string",
"operating_system": "string",
+ "parent_id": {
+ "uuid": "string",
+ "valid": true
+ },
"ready_at": "2019-08-24T14:15:22Z",
"resource_id": "4d5215ed-38bb-48ed-879a-fdb9ca58522f",
"scripts": [
@@ -9954,6 +10006,10 @@ If the schedule is empty, the user will be updated to use the default schedule.|
"logs_overflowed": true,
"name": "string",
"operating_system": "string",
+ "parent_id": {
+ "uuid": "string",
+ "valid": true
+ },
"ready_at": "2019-08-24T14:15:22Z",
"resource_id": "4d5215ed-38bb-48ed-879a-fdb9ca58522f",
"scripts": [
@@ -11941,6 +11997,22 @@ RegionIDs in range 900-999 are reserved for end users to run their own DERP node
None
+## uuid.NullUUID
+
+```json
+{
+ "uuid": "string",
+ "valid": true
+}
+```
+
+### Properties
+
+| Name | Type | Required | Restrictions | Description |
+|---------|---------|----------|--------------|-----------------------------------|
+| `uuid` | string | false | | |
+| `valid` | boolean | false | | Valid is true if UUID is not NULL |
+
## workspaceapps.AccessMethod
```json
diff --git a/docs/reference/api/templates.md b/docs/reference/api/templates.md
index ef136764bf2c5..c662118868656 100644
--- a/docs/reference/api/templates.md
+++ b/docs/reference/api/templates.md
@@ -13,6 +13,10 @@ curl -X GET http://coder-server:8080/api/v2/organizations/{organization}/templat
`GET /organizations/{organization}/templates`
+Returns a list of templates for the specified organization.
+By default, only non-deprecated templates are returned.
+To include deprecated templates, specify `deprecated:true` in the search query.
+
### Parameters
| Name | In | Type | Required | Description |
@@ -74,7 +78,8 @@ curl -X GET http://coder-server:8080/api/v2/organizations/{organization}/templat
"require_active_version": true,
"time_til_dormant_autodelete_ms": 0,
"time_til_dormant_ms": 0,
- "updated_at": "2019-08-24T14:15:22Z"
+ "updated_at": "2019-08-24T14:15:22Z",
+ "use_classic_parameter_flow": true
}
]
```
@@ -130,6 +135,7 @@ Restarts will only happen on weekdays in this list on weeks which line up with W
|`» time_til_dormant_autodelete_ms`|integer|false|||
|`» time_til_dormant_ms`|integer|false|||
|`» updated_at`|string(date-time)|false|||
+|`» use_classic_parameter_flow`|boolean|false|||
#### Enumerated Values
@@ -251,7 +257,8 @@ curl -X POST http://coder-server:8080/api/v2/organizations/{organization}/templa
"require_active_version": true,
"time_til_dormant_autodelete_ms": 0,
"time_til_dormant_ms": 0,
- "updated_at": "2019-08-24T14:15:22Z"
+ "updated_at": "2019-08-24T14:15:22Z",
+ "use_classic_parameter_flow": true
}
```
@@ -399,7 +406,8 @@ curl -X GET http://coder-server:8080/api/v2/organizations/{organization}/templat
"require_active_version": true,
"time_til_dormant_autodelete_ms": 0,
"time_til_dormant_ms": 0,
- "updated_at": "2019-08-24T14:15:22Z"
+ "updated_at": "2019-08-24T14:15:22Z",
+ "use_classic_parameter_flow": true
}
```
@@ -739,6 +747,10 @@ curl -X GET http://coder-server:8080/api/v2/templates \
`GET /templates`
+Returns a list of templates.
+By default, only non-deprecated templates are returned.
+To include deprecated templates, specify `deprecated:true` in the search query.
+
### Example responses
> 200 Response
@@ -794,7 +806,8 @@ curl -X GET http://coder-server:8080/api/v2/templates \
"require_active_version": true,
"time_til_dormant_autodelete_ms": 0,
"time_til_dormant_ms": 0,
- "updated_at": "2019-08-24T14:15:22Z"
+ "updated_at": "2019-08-24T14:15:22Z",
+ "use_classic_parameter_flow": true
}
]
```
@@ -850,6 +863,7 @@ Restarts will only happen on weekdays in this list on weeks which line up with W
|`» time_til_dormant_autodelete_ms`|integer|false|||
|`» time_til_dormant_ms`|integer|false|||
|`» updated_at`|string(date-time)|false|||
+|`» use_classic_parameter_flow`|boolean|false|||
#### Enumerated Values
@@ -991,7 +1005,8 @@ curl -X GET http://coder-server:8080/api/v2/templates/{template} \
"require_active_version": true,
"time_til_dormant_autodelete_ms": 0,
"time_til_dormant_ms": 0,
- "updated_at": "2019-08-24T14:15:22Z"
+ "updated_at": "2019-08-24T14:15:22Z",
+ "use_classic_parameter_flow": true
}
```
@@ -1120,7 +1135,8 @@ curl -X PATCH http://coder-server:8080/api/v2/templates/{template} \
"require_active_version": true,
"time_til_dormant_autodelete_ms": 0,
"time_til_dormant_ms": 0,
- "updated_at": "2019-08-24T14:15:22Z"
+ "updated_at": "2019-08-24T14:15:22Z",
+ "use_classic_parameter_flow": true
}
```
@@ -2348,6 +2364,10 @@ curl -X GET http://coder-server:8080/api/v2/templateversions/{templateversion}/d
"logs_overflowed": true,
"name": "string",
"operating_system": "string",
+ "parent_id": {
+ "uuid": "string",
+ "valid": true
+ },
"ready_at": "2019-08-24T14:15:22Z",
"resource_id": "4d5215ed-38bb-48ed-879a-fdb9ca58522f",
"scripts": [
@@ -2470,6 +2490,9 @@ Status Code **200**
| `»» logs_overflowed` | boolean | false | | |
| `»» name` | string | false | | |
| `»» operating_system` | string | false | | |
+| `»» parent_id` | [uuid.NullUUID](schemas.md#uuidnulluuid) | false | | |
+| `»»» uuid` | string | false | | |
+| `»»» valid` | boolean | false | | Valid is true if UUID is not NULL |
| `»» ready_at` | string(date-time) | false | | |
| `»» resource_id` | string(uuid) | false | | |
| `»» scripts` | array | false | | |
@@ -2869,6 +2892,10 @@ curl -X GET http://coder-server:8080/api/v2/templateversions/{templateversion}/r
"logs_overflowed": true,
"name": "string",
"operating_system": "string",
+ "parent_id": {
+ "uuid": "string",
+ "valid": true
+ },
"ready_at": "2019-08-24T14:15:22Z",
"resource_id": "4d5215ed-38bb-48ed-879a-fdb9ca58522f",
"scripts": [
@@ -2991,6 +3018,9 @@ Status Code **200**
| `»» logs_overflowed` | boolean | false | | |
| `»» name` | string | false | | |
| `»» operating_system` | string | false | | |
+| `»» parent_id` | [uuid.NullUUID](schemas.md#uuidnulluuid) | false | | |
+| `»»» uuid` | string | false | | |
+| `»»» valid` | boolean | false | | Valid is true if UUID is not NULL |
| `»» ready_at` | string(date-time) | false | | |
| `»» resource_id` | string(uuid) | false | | |
| `»» scripts` | array | false | | |
diff --git a/docs/reference/api/workspaces.md b/docs/reference/api/workspaces.md
index 5d09c46a01d30..8e25cd0bd58e6 100644
--- a/docs/reference/api/workspaces.md
+++ b/docs/reference/api/workspaces.md
@@ -219,6 +219,10 @@ of the template will be used.
"logs_overflowed": true,
"name": "string",
"operating_system": "string",
+ "parent_id": {
+ "uuid": "string",
+ "valid": true
+ },
"ready_at": "2019-08-24T14:15:22Z",
"resource_id": "4d5215ed-38bb-48ed-879a-fdb9ca58522f",
"scripts": [
@@ -496,6 +500,10 @@ curl -X GET http://coder-server:8080/api/v2/users/{user}/workspace/{workspacenam
"logs_overflowed": true,
"name": "string",
"operating_system": "string",
+ "parent_id": {
+ "uuid": "string",
+ "valid": true
+ },
"ready_at": "2019-08-24T14:15:22Z",
"resource_id": "4d5215ed-38bb-48ed-879a-fdb9ca58522f",
"scripts": [
@@ -799,6 +807,10 @@ of the template will be used.
"logs_overflowed": true,
"name": "string",
"operating_system": "string",
+ "parent_id": {
+ "uuid": "string",
+ "valid": true
+ },
"ready_at": "2019-08-24T14:15:22Z",
"resource_id": "4d5215ed-38bb-48ed-879a-fdb9ca58522f",
"scripts": [
@@ -1062,6 +1074,10 @@ curl -X GET http://coder-server:8080/api/v2/workspaces \
"logs_overflowed": true,
"name": "string",
"operating_system": "string",
+ "parent_id": {
+ "uuid": "string",
+ "valid": true
+ },
"ready_at": "2019-08-24T14:15:22Z",
"resource_id": "4d5215ed-38bb-48ed-879a-fdb9ca58522f",
"scripts": [
@@ -1340,6 +1356,10 @@ curl -X GET http://coder-server:8080/api/v2/workspaces/{workspace} \
"logs_overflowed": true,
"name": "string",
"operating_system": "string",
+ "parent_id": {
+ "uuid": "string",
+ "valid": true
+ },
"ready_at": "2019-08-24T14:15:22Z",
"resource_id": "4d5215ed-38bb-48ed-879a-fdb9ca58522f",
"scripts": [
@@ -1733,6 +1753,10 @@ curl -X PUT http://coder-server:8080/api/v2/workspaces/{workspace}/dormant \
"logs_overflowed": true,
"name": "string",
"operating_system": "string",
+ "parent_id": {
+ "uuid": "string",
+ "valid": true
+ },
"ready_at": "2019-08-24T14:15:22Z",
"resource_id": "4d5215ed-38bb-48ed-879a-fdb9ca58522f",
"scripts": [
diff --git a/docs/reference/cli/index.md b/docs/reference/cli/index.md
index 1803fd460c65b..2106374eba150 100644
--- a/docs/reference/cli/index.md
+++ b/docs/reference/cli/index.md
@@ -53,7 +53,7 @@ Coder — A tool for provisioning self-hosted development environments with Terr
| [schedule](./schedule.md) | Schedule automated start and stop times for workspaces |
| [show](./show.md) | Display details of a workspace's resources and agents |
| [speedtest](./speedtest.md) | Run upload and download tests from your machine to a workspace |
-| [ssh](./ssh.md) | Start a shell into a workspace |
+| [ssh](./ssh.md) | Start a shell into a workspace or run a command |
| [start](./start.md) | Start a workspace |
| [stat](./stat.md) | Show resource usage for the current workspace. |
| [stop](./stop.md) | Stop a workspace |
diff --git a/docs/reference/cli/ssh.md b/docs/reference/cli/ssh.md
index c5bae755c8419..aaa76bd256e9e 100644
--- a/docs/reference/cli/ssh.md
+++ b/docs/reference/cli/ssh.md
@@ -1,12 +1,22 @@
# ssh
-Start a shell into a workspace
+Start a shell into a workspace or run a command
## Usage
```console
-coder ssh [flags]
+coder ssh [flags] [command]
+```
+
+## Description
+
+```console
+This command does not have full parity with the standard SSH command. For users who need the full functionality of SSH, create an ssh configuration with `coder config-ssh`.
+
+ - Use `--` to separate and pass flags directly to the command executed via SSH.:
+
+ $ coder ssh -- ls -la
```
## Options
diff --git a/docs/reference/cli/users.md b/docs/reference/cli/users.md
index d942699d6ee31..5f05375e8b13e 100644
--- a/docs/reference/cli/users.md
+++ b/docs/reference/cli/users.md
@@ -17,8 +17,8 @@ coder users [subcommand]
| Name | Purpose |
|--------------------------------------------------|---------------------------------------------------------------------------------------|
-| [create](./users_create.md) | |
-| [list](./users_list.md) | |
+| [create](./users_create.md) | Create a new user. |
+| [list](./users_list.md) | Prints the list of users. |
| [show](./users_show.md) | Show a single user. Use 'me' to indicate the currently authenticated user. |
| [delete](./users_delete.md) | Delete a user by username or user_id. |
| [edit-roles](./users_edit-roles.md) | Edit a user's roles by username or id |
diff --git a/docs/reference/cli/users_create.md b/docs/reference/cli/users_create.md
index 61768ebfdbbf8..646eb55ffb5ba 100644
--- a/docs/reference/cli/users_create.md
+++ b/docs/reference/cli/users_create.md
@@ -1,6 +1,8 @@
# users create
+Create a new user.
+
## Usage
```console
diff --git a/docs/reference/cli/users_list.md b/docs/reference/cli/users_list.md
index 9293ff13c923c..93122e7741072 100644
--- a/docs/reference/cli/users_list.md
+++ b/docs/reference/cli/users_list.md
@@ -1,6 +1,8 @@
# users list
+Prints the list of users.
+
Aliases:
* ls
diff --git a/docs/user-guides/desktop/index.md b/docs/user-guides/desktop/index.md
index 72d627c7a3e71..69a32837a8b87 100644
--- a/docs/user-guides/desktop/index.md
+++ b/docs/user-guides/desktop/index.md
@@ -1,4 +1,4 @@
-# Coder Desktop (Early Access)
+# Coder Desktop (Beta)
Use Coder Desktop to work on your workspaces as though they're on your LAN, no
port-forwarding required.
@@ -22,7 +22,7 @@ You can install Coder Desktop on macOS or Windows.
Alternatively, you can manually install Coder Desktop from the [releases page](https://github.com/coder/coder-desktop-macos/releases).
-1. Open **Coder Desktop** from the Applications directory. When macOS asks if you want to open it, select **Open**.
+1. Open **Coder Desktop** from the Applications directory.
1. The application is treated as a system VPN. macOS will prompt you to confirm with:
@@ -79,11 +79,11 @@ Before you can use Coder Desktop, you will need to sign in.
## macOS
-
+ 
## Windows
-
+ 
@@ -97,19 +97,19 @@ Before you can use Coder Desktop, you will need to sign in.
1. In your web browser, you may be prompted to sign in to Coder with your credentials:
-
+ 
1. Copy the session token to the clipboard:
-
+ 
1. Paste the token in the **Session Token** field of the **Sign In** screen, then select **Sign In**:

-1. macOS: Allow the VPN configuration for Coder Desktop if you are prompted.
+1. macOS: Allow the VPN configuration for Coder Desktop if you are prompted:
-
+ 
1. Select the Coder icon in the menu bar (macOS) or system tray (Windows), and click the **Coder Connect** toggle to enable the connection.
@@ -129,28 +129,80 @@ While active, Coder Connect will list the workspaces you own and will configure
To copy the `.coder` hostname of a workspace agent, you can click the copy icon beside it.
-On macOS you can use `ping6` in your terminal to verify the connection to your workspace:
+You can also connect to the SSH server in your workspace using any SSH client, such as OpenSSH or PuTTY:
```shell
- ping6 -c 5 your-workspace.coder
+ ssh your-workspace.coder
```
-On Windows, you can use `ping` in a Command Prompt or PowerShell terminal to verify the connection to your workspace:
+Any services listening on ports in your workspace will be available on the same hostname. For example, you can access a web server on port `8080` by visiting `http://your-workspace.coder:8080` in your browser.
+
+> [!NOTE]
+> Currently, the Coder IDE extensions for VSCode and JetBrains create their own tunnel and do not utilize the Coder Connect tunnel to connect to workspaces.
+
+### Ping your workspace
+
+
+
+### macOS
+
+Use `ping6` in your terminal to verify the connection to your workspace:
```shell
- ping -n 5 your-workspace.coder
+ ping6 -c 5 your-workspace.coder
```
-Any services listening on ports in your workspace will be available on the same hostname. For example, you can access a web server on port `8080` by visiting `http://your-workspace.coder:8080` in your browser.
+### Windows
-You can also connect to the SSH server in your workspace using any SSH client, such as OpenSSH or PuTTY:
+Use `ping` in a Command Prompt or PowerShell terminal to verify the connection to your workspace:
```shell
- ssh your-workspace.coder
+ ping -n 5 your-workspace.coder
```
+
)
) : (
-
+
)}
diff --git a/site/src/components/GitDeviceAuth/GitDeviceAuth.tsx b/site/src/components/GitDeviceAuth/GitDeviceAuth.tsx
index 5bbf036943773..7b3d8091abfeb 100644
--- a/site/src/components/GitDeviceAuth/GitDeviceAuth.tsx
+++ b/site/src/components/GitDeviceAuth/GitDeviceAuth.tsx
@@ -1,5 +1,4 @@
import type { Interpolation, Theme } from "@emotion/react";
-import OpenInNewIcon from "@mui/icons-material/OpenInNew";
import AlertTitle from "@mui/material/AlertTitle";
import CircularProgress from "@mui/material/CircularProgress";
import Link from "@mui/material/Link";
@@ -8,6 +7,7 @@ import type { ExternalAuthDevice } from "api/typesGenerated";
import { isAxiosError } from "axios";
import { Alert, AlertDetail } from "components/Alert/Alert";
import { CopyButton } from "components/CopyButton/CopyButton";
+import { ExternalLinkIcon } from "lucide-react";
import type { FC } from "react";
interface GitDeviceAuthProps {
@@ -134,7 +134,11 @@ export const GitDeviceAuth: FC = ({
Copy your one-time code:
{externalAuthDevice.user_code}
-
+ {" "}
+
Then open the link below and paste it:
@@ -146,7 +150,7 @@ export const GitDeviceAuth: FC = ({
target="_blank"
rel="noreferrer"
>
-
+
Open and Paste
diff --git a/site/src/components/HelpTooltip/HelpTooltip.tsx b/site/src/components/HelpTooltip/HelpTooltip.tsx
index 2ae8700114b3b..0a46f9a10f199 100644
--- a/site/src/components/HelpTooltip/HelpTooltip.tsx
+++ b/site/src/components/HelpTooltip/HelpTooltip.tsx
@@ -5,7 +5,6 @@ import {
css,
useTheme,
} from "@emotion/react";
-import OpenInNewIcon from "@mui/icons-material/OpenInNew";
import Link from "@mui/material/Link";
import { Stack } from "components/Stack/Stack";
import {
@@ -16,6 +15,7 @@ import {
PopoverTrigger,
usePopover,
} from "components/deprecated/Popover/Popover";
+import { ExternalLinkIcon } from "lucide-react";
import { CircleHelpIcon } from "lucide-react";
import {
type FC,
@@ -137,7 +137,7 @@ interface HelpTooltipLink {
export const HelpTooltipLink: FC = ({ children, href }) => {
return (
-
+
{children}
);
diff --git a/site/src/components/Icons/FileCopyIcon.tsx b/site/src/components/Icons/FileCopyIcon.tsx
deleted file mode 100644
index bd6fc359fe71f..0000000000000
--- a/site/src/components/Icons/FileCopyIcon.tsx
+++ /dev/null
@@ -1,10 +0,0 @@
-import SvgIcon, { type SvgIconProps } from "@mui/material/SvgIcon";
-
-export const FileCopyIcon = (props: SvgIconProps): JSX.Element => (
-
-
-
-);
diff --git a/site/src/components/Loader/Loader.tsx b/site/src/components/Loader/Loader.tsx
index 0121b352eaeb1..ef590aecfbca0 100644
--- a/site/src/components/Loader/Loader.tsx
+++ b/site/src/components/Loader/Loader.tsx
@@ -1,10 +1,10 @@
import type { Interpolation, Theme } from "@emotion/react";
-import { Spinner } from "components/deprecated/Spinner/Spinner";
+import { Spinner } from "components/Spinner/Spinner";
import type { FC, HTMLAttributes } from "react";
interface LoaderProps extends HTMLAttributes {
fullscreen?: boolean;
- size?: number;
+ size?: "sm" | "lg";
/**
* A label for the loader. This is used for accessibility purposes.
*/
@@ -13,7 +13,7 @@ interface LoaderProps extends HTMLAttributes {
export const Loader: FC = ({
fullscreen,
- size = 26,
+ size = "lg",
label = "Loading...",
...attrs
}) => {
@@ -23,7 +23,7 @@ export const Loader: FC = ({
data-testid="loader"
{...attrs}
>
-
+
);
};
diff --git a/site/src/components/PaginationWidget/PageButtons.tsx b/site/src/components/PaginationWidget/PageButtons.tsx
index 1e5f9ff7df18c..666720b62b913 100644
--- a/site/src/components/PaginationWidget/PageButtons.tsx
+++ b/site/src/components/PaginationWidget/PageButtons.tsx
@@ -1,11 +1,9 @@
-import { useTheme } from "@emotion/react";
-import Button from "@mui/material/Button";
+import { Button } from "components/Button/Button";
import type { FC, ReactNode } from "react";
type NumberedPageButtonProps = {
pageNumber: number;
totalPages: number;
-
onClick?: () => void;
highlighted?: boolean;
disabled?: boolean;
@@ -68,23 +66,10 @@ const BasePageButton: FC = ({
highlighted = false,
disabled = false,
}) => {
- const theme = useTheme();
-
return (
)}
@@ -158,7 +160,7 @@ const RoleTable: FC = ({
}
+ startIcon={}
variant="contained"
>
Create custom role
diff --git a/site/src/pages/OrganizationSettingsPage/OrganizationMembersPageView.tsx b/site/src/pages/OrganizationSettingsPage/OrganizationMembersPageView.tsx
index 686842b196b0b..99e80cb6de397 100644
--- a/site/src/pages/OrganizationSettingsPage/OrganizationMembersPageView.tsx
+++ b/site/src/pages/OrganizationSettingsPage/OrganizationMembersPageView.tsx
@@ -1,5 +1,3 @@
-import PersonAdd from "@mui/icons-material/PersonAdd";
-import LoadingButton from "@mui/lab/LoadingButton";
import { getErrorMessage } from "api/errors";
import type {
Group,
@@ -24,6 +22,7 @@ import {
SettingsHeader,
SettingsHeaderTitle,
} from "components/SettingsHeader/SettingsHeader";
+import { Spinner } from "components/Spinner/Spinner";
import { Stack } from "components/Stack/Stack";
import {
Table,
@@ -35,6 +34,7 @@ import {
} from "components/Table/Table";
import { UserAutocomplete } from "components/UserAutocomplete/UserAutocomplete";
import type { PaginationResultInfo } from "hooks/usePaginatedQuery";
+import { UserPlusIcon } from "lucide-react";
import { EllipsisVertical, TriangleAlert } from "lucide-react";
import { UserGroupsCell } from "pages/UsersPage/UsersTable/UserGroupsCell";
import { type FC, useState } from "react";
@@ -237,15 +237,16 @@ const AddOrganizationMember: FC = ({
}}
/>
- }
- loading={isLoading}
+ variant="outline"
>
+
+
+
Add user
-
+
);
diff --git a/site/src/pages/ResetPasswordPage/ChangePasswordPage.tsx b/site/src/pages/ResetPasswordPage/ChangePasswordPage.tsx
index a05fea8cc7761..e2a8c8206e713 100644
--- a/site/src/pages/ResetPasswordPage/ChangePasswordPage.tsx
+++ b/site/src/pages/ResetPasswordPage/ChangePasswordPage.tsx
@@ -1,12 +1,13 @@
import type { Interpolation, Theme } from "@emotion/react";
-import LoadingButton from "@mui/lab/LoadingButton";
-import Button from "@mui/material/Button";
+import MUIButton from "@mui/material/Button";
import TextField from "@mui/material/TextField";
import { isApiValidationError } from "api/errors";
import { changePasswordWithOTP } from "api/queries/users";
import { ErrorAlert } from "components/Alert/ErrorAlert";
+import { Button } from "components/Button/Button";
import { CustomLogo } from "components/CustomLogo/CustomLogo";
import { displaySuccess } from "components/GlobalSnackbar/utils";
+import { Spinner } from "components/Spinner/Spinner";
import { Stack } from "components/Stack/Stack";
import { useFormik } from "formik";
import type { FC } from "react";
@@ -115,16 +116,16 @@ const ChangePasswordPage: FC = ({ redirect }) => {
/>
-
+
Reset password
-
-
+ = ({ redirect }) => {
to="/login"
>
Back to login
-
+
diff --git a/site/src/pages/ResetPasswordPage/RequestOTPPage.tsx b/site/src/pages/ResetPasswordPage/RequestOTPPage.tsx
index 6579eb1a0a265..f67395b3f732a 100644
--- a/site/src/pages/ResetPasswordPage/RequestOTPPage.tsx
+++ b/site/src/pages/ResetPasswordPage/RequestOTPPage.tsx
@@ -1,10 +1,10 @@
import { type Interpolation, type Theme, useTheme } from "@emotion/react";
-import LoadingButton from "@mui/lab/LoadingButton";
-import Button from "@mui/material/Button";
import TextField from "@mui/material/TextField";
import { requestOneTimePassword } from "api/queries/users";
import { ErrorAlert } from "components/Alert/ErrorAlert";
+import { Button } from "components/Button/Button";
import { CustomLogo } from "components/CustomLogo/CustomLogo";
+import { Spinner } from "components/Spinner/Spinner";
import { Stack } from "components/Stack/Stack";
import type { FC } from "react";
import { Helmet } from "react-helmet-async";
@@ -88,23 +88,17 @@ const RequestOTP: FC = ({
/>
-
+
Reset password
-
-
- Cancel
+
+
+ Cancel
@@ -150,8 +144,8 @@ const RequestOTPSuccess: FC<{ email: string }> = ({ email }) => {
Contact your deployment administrator if you encounter issues.
-
- Back to login
+
+ Back to login
diff --git a/site/src/pages/SetupPage/SetupPageView.tsx b/site/src/pages/SetupPage/SetupPageView.tsx
index 42c8faedea348..b8735cbf0dbfa 100644
--- a/site/src/pages/SetupPage/SetupPageView.tsx
+++ b/site/src/pages/SetupPage/SetupPageView.tsx
@@ -1,8 +1,7 @@
import GitHubIcon from "@mui/icons-material/GitHub";
-import LoadingButton from "@mui/lab/LoadingButton";
import AlertTitle from "@mui/material/AlertTitle";
import Autocomplete from "@mui/material/Autocomplete";
-import Button from "@mui/material/Button";
+import MuiButton from "@mui/material/Button";
import Checkbox from "@mui/material/Checkbox";
import Link from "@mui/material/Link";
import MenuItem from "@mui/material/MenuItem";
@@ -11,10 +10,12 @@ import { countries } from "api/countriesGenerated";
import type * as TypesGen from "api/typesGenerated";
import { isAxiosError } from "axios";
import { Alert, AlertDetail } from "components/Alert/Alert";
+import { Button } from "components/Button/Button";
import { FormFields, VerticalForm } from "components/Form/Form";
import { CoderIcon } from "components/Icons/CoderIcon";
import { PasswordField } from "components/PasswordField/PasswordField";
import { SignInLayout } from "components/SignInLayout/SignInLayout";
+import { Spinner } from "components/Spinner/Spinner";
import { Stack } from "components/Stack/Stack";
import { type FormikContextType, useFormik } from "formik";
import type { ChangeEvent, FC } from "react";
@@ -172,7 +173,7 @@ export const SetupPageView: FC = ({
{authMethods?.github.enabled && (
<>
- = ({
size="xlarge"
>
{Language.githubCreate}
-
+
@@ -376,15 +377,16 @@ export const SetupPageView: FC = ({
)}
-
+
{Language.create}
-
+
diff --git a/site/src/pages/StarterTemplatePage/StarterTemplatePageView.tsx b/site/src/pages/StarterTemplatePage/StarterTemplatePageView.tsx
index 00872ed8d5bfb..3767ca9b1cab2 100644
--- a/site/src/pages/StarterTemplatePage/StarterTemplatePageView.tsx
+++ b/site/src/pages/StarterTemplatePage/StarterTemplatePageView.tsx
@@ -1,5 +1,4 @@
import { useTheme } from "@emotion/react";
-import PlusIcon from "@mui/icons-material/AddOutlined";
import Button from "@mui/material/Button";
import type { TemplateExample } from "api/typesGenerated";
import { ErrorAlert } from "components/Alert/ErrorAlert";
@@ -13,7 +12,7 @@ import {
PageHeaderTitle,
} from "components/PageHeader/PageHeader";
import { Stack } from "components/Stack/Stack";
-import { ExternalLinkIcon } from "lucide-react";
+import { ExternalLinkIcon, PlusIcon } from "lucide-react";
import type { FC } from "react";
import { Link } from "react-router-dom";
@@ -58,7 +57,7 @@ export const StarterTemplatePageView: FC = ({
variant="contained"
component={Link}
to={`/templates/new?exampleId=${starterTemplate.id}`}
- startIcon={}
+ startIcon={}
>
Use template
diff --git a/site/src/pages/TemplatePage/TemplateEmbedPage/TemplateEmbedPage.tsx b/site/src/pages/TemplatePage/TemplateEmbedPage/TemplateEmbedPage.tsx
index 24aa6a8e7068b..74295ed63cf72 100644
--- a/site/src/pages/TemplatePage/TemplateEmbedPage/TemplateEmbedPage.tsx
+++ b/site/src/pages/TemplatePage/TemplateEmbedPage/TemplateEmbedPage.tsx
@@ -1,5 +1,3 @@
-import CheckOutlined from "@mui/icons-material/CheckOutlined";
-import FileCopyOutlined from "@mui/icons-material/FileCopyOutlined";
import Button from "@mui/material/Button";
import FormControlLabel from "@mui/material/FormControlLabel";
import Radio from "@mui/material/Radio";
@@ -10,6 +8,7 @@ import { FormSection, VerticalForm } from "components/Form/Form";
import { Loader } from "components/Loader/Loader";
import { RichParameterInput } from "components/RichParameterInput/RichParameterInput";
import { useClipboard } from "hooks/useClipboard";
+import { CheckIcon, CopyIcon } from "lucide-react";
import { useTemplateLayoutContext } from "pages/TemplatePage/TemplateLayout";
import { type FC, useEffect, useState } from "react";
import { Helmet } from "react-helmet-async";
@@ -187,9 +186,9 @@ export const TemplateEmbedPageView: FC = ({
css={{ borderRadius: 999 }}
startIcon={
clipboard.showCopiedSuccess ? (
-
+
) : (
-
+
)
}
variant="contained"
diff --git a/site/src/pages/TemplatePage/TemplateInsightsPage/IntervalMenu.tsx b/site/src/pages/TemplatePage/TemplateInsightsPage/IntervalMenu.tsx
index 9386f0916629c..c7da8332a29ab 100644
--- a/site/src/pages/TemplatePage/TemplateInsightsPage/IntervalMenu.tsx
+++ b/site/src/pages/TemplatePage/TemplateInsightsPage/IntervalMenu.tsx
@@ -1,8 +1,8 @@
-import CheckOutlined from "@mui/icons-material/CheckOutlined";
import ExpandMoreOutlined from "@mui/icons-material/ExpandMoreOutlined";
-import Button from "@mui/material/Button";
import Menu from "@mui/material/Menu";
import MenuItem from "@mui/material/MenuItem";
+import { Button } from "components/Button/Button";
+import { CheckIcon } from "lucide-react";
import { type FC, useRef, useState } from "react";
const insightsIntervals = {
@@ -38,9 +38,10 @@ export const IntervalMenu: FC = ({ value, onChange }) => {
aria-haspopup="true"
aria-expanded={open ? "true" : undefined}
onClick={() => setOpen(true)}
- endIcon={}
+ variant="outline"
>
{insightsIntervals[value].label}
+