From 899836d47a73107436c92b2db37a6c8d03db4971 Mon Sep 17 00:00:00 2001 From: Hugo Dutka Date: Fri, 10 Jan 2025 15:21:03 +0100 Subject: [PATCH 0001/1096] chore: reduce Windows PG tests flakiness (#16090) This PR: - Reduces test parallelism on Windows in CI - Unifies wait intervals on Windows with Linux and macOS. Previously we had custom intervals for Windows to reduce test flakiness on smaller CI workers, but we don't run tests on small CI workers anymore. Due to how our CI file is defined, forks run tests on small CI machines, but I'm not sure if the different intervals actually help or whether that's a heuristic that happened to fix issues on a particular day and was it ever reevaluated. I propose we make the change and if someone complains, revert it. In particular, reduced test parallelism seems to actually help: I was able to run Windows tests 5 times in a row without flakes. Not sure if that's going to fix the problem long term, but it seems worth trying. --- .github/workflows/ci.yaml | 5 ++++- testutil/duration.go | 2 -- testutil/duration_windows.go | 24 ------------------------ 3 files changed, 4 insertions(+), 27 deletions(-) delete mode 100644 testutil/duration_windows.go diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index e5180b037a916..5492a2354ede6 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -445,7 +445,10 @@ jobs: # C: drive is extremely slow: https://github.com/actions/runner-images/issues/8755 mkdir -p "R:/temp/embedded-pg" go run scripts/embedded-pg/main.go -path "R:/temp/embedded-pg" - DB=ci gotestsum --format standard-quiet -- -v -short -count=1 ./... + # Reduce test parallelism, mirroring what we do for race tests. + # We'd been encountering issues with timing related flakes, and + # this seems to help. + DB=ci gotestsum --format standard-quiet -- -v -short -count=1 -parallel 4 -p 4 ./... else go run scripts/embedded-pg/main.go DB=ci gotestsum --format standard-quiet -- -v -short -count=1 ./... diff --git a/testutil/duration.go b/testutil/duration.go index 44ae418eba74f..a8c35030cdea2 100644 --- a/testutil/duration.go +++ b/testutil/duration.go @@ -1,5 +1,3 @@ -//go:build !windows - package testutil import ( diff --git a/testutil/duration_windows.go b/testutil/duration_windows.go deleted file mode 100644 index d363dae2ba596..0000000000000 --- a/testutil/duration_windows.go +++ /dev/null @@ -1,24 +0,0 @@ -package testutil - -import "time" - -// Constants for timing out operations, usable for creating contexts -// that timeout or in require.Eventually. -// -// Windows durations are adjusted for slow CI workers. -const ( - WaitShort = 30 * time.Second - WaitMedium = 40 * time.Second - WaitLong = 70 * time.Second - WaitSuperLong = 240 * time.Second -) - -// Constants for delaying repeated operations, e.g. in -// require.Eventually. -// -// Windows durations are adjusted for slow CI workers. -const ( - IntervalFast = 100 * time.Millisecond - IntervalMedium = 1000 * time.Millisecond - IntervalSlow = 4 * time.Second -) From 8c44cd3dfd839b71af520d48fca1e15f009576dc Mon Sep 17 00:00:00 2001 From: Mathias Fredriksson Date: Fri, 10 Jan 2025 16:48:11 +0200 Subject: [PATCH 0002/1096] test(cli/ssh): fix ssh start conflict test by faking API response (#16082) --- cli/ssh_test.go | 30 ++++++++++++++++++++++-------- 1 file changed, 22 insertions(+), 8 deletions(-) diff --git a/cli/ssh_test.go b/cli/ssh_test.go index bd107852251f7..4fd52971df1cf 100644 --- a/cli/ssh_test.go +++ b/cli/ssh_test.go @@ -154,16 +154,25 @@ func TestSSH(t *testing.T) { // a start build of the workspace. isFirstBuild := true buildURL := regexp.MustCompile("/api/v2/workspaces/.*/builds") - buildReq := make(chan struct{}) - buildResume := make(chan struct{}) + buildPause := make(chan bool) + buildDone := make(chan struct{}) buildSyncMW := func(next http.Handler) http.Handler { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { if r.Method == http.MethodPost && buildURL.MatchString(r.URL.Path) { if !isFirstBuild { - t.Log("buildSyncMW: blocking build") - buildReq <- struct{}{} - <-buildResume + t.Log("buildSyncMW: pausing build") + if shouldContinue := <-buildPause; !shouldContinue { + // We can't force the API to trigger a build conflict (racy) so we fake it. + t.Log("buildSyncMW: return conflict") + w.WriteHeader(http.StatusConflict) + return + } t.Log("buildSyncMW: resuming build") + defer func() { + t.Log("buildSyncMW: sending build done") + buildDone <- struct{}{} + t.Log("buildSyncMW: done") + }() } else { isFirstBuild = false } @@ -211,10 +220,15 @@ func TestSSH(t *testing.T) { for _, pty := range ptys { pty.ExpectMatchContext(ctx, "Workspace was stopped, starting workspace to allow connecting to") } - for range ptys { - testutil.RequireRecvCtx(ctx, t, buildReq) + + // Allow one build to complete. + testutil.RequireSendCtx(ctx, t, buildPause, true) + testutil.RequireRecvCtx(ctx, t, buildDone) + + // Allow the remaining builds to continue. + for i := 0; i < len(ptys)-1; i++ { + testutil.RequireSendCtx(ctx, t, buildPause, false) } - close(buildResume) var foundConflict int for _, pty := range ptys { From 8b9763dd2c5d82046c99437d8acdeaa6b552e8c4 Mon Sep 17 00:00:00 2001 From: Michael Smith Date: Fri, 10 Jan 2025 09:58:42 -0500 Subject: [PATCH 0003/1096] fix: ensure active Deployment Page nav links are highlighted (#16092) ## Changes made - Updated links in the deployment settings page to ensure that they're highlighted properly - Updated comment about previous PR to make sure it's clear why we needed a workaround. --- site/src/components/Sidebar/Sidebar.tsx | 6 ++-- .../management/DeploymentSidebarView.tsx | 36 +++++++++++-------- 2 files changed, 26 insertions(+), 16 deletions(-) diff --git a/site/src/components/Sidebar/Sidebar.tsx b/site/src/components/Sidebar/Sidebar.tsx index 7799ba0384eeb..880ceecec2265 100644 --- a/site/src/components/Sidebar/Sidebar.tsx +++ b/site/src/components/Sidebar/Sidebar.tsx @@ -61,9 +61,11 @@ export const SettingsSidebarNavItem: FC = ({ href, end, }) => { - // useMatch is necessary to verify if the current path matches the href on the initial render of the route + // 2025-01-10: useMatch is a workaround for a bug we encountered when you + // pass a render function to NavLink's className prop, and try to access + // NavLinks's isActive state value for the conditional styling. isActive + // wasn't always evaluating to true when it should be, but useMatch worked const matchResult = useMatch(href); - return ( = ({
{permissions.viewDeploymentValues && ( - General + General )} {permissions.viewAllLicenses && ( - Licenses + Licenses )} {permissions.editDeploymentValues && ( - Appearance + + Appearance + )} {permissions.viewDeploymentValues && ( - User Authentication + + User Authentication + )} {permissions.viewDeploymentValues && ( - + External Authentication )} {/* Not exposing this yet since token exchange is not finished yet. - OAuth2 Applications */} {permissions.viewDeploymentValues && ( - Network + Network )} {permissions.readWorkspaceProxies && ( - + Workspace Proxies )} {permissions.viewDeploymentValues && ( - Security + Security )} {permissions.viewDeploymentValues && ( - Observability + + Observability + )} {permissions.viewAllUsers && ( - Users + Users )} {permissions.viewNotificationTemplate && ( - +
Notifications @@ -102,11 +108,13 @@ const DeploymentSettingsNavigation: FC = ({ )} {permissions.viewOrganizationIDPSyncSettings && ( - + IdP Organization Sync )} - {!isPremium && Premium} + {!isPremium && ( + Premium + )}
); From 08dd2ab4cca50f708a4ade7db8b395fca60a573a Mon Sep 17 00:00:00 2001 From: Gregory McCue Date: Fri, 10 Jan 2025 12:02:25 -0500 Subject: [PATCH 0004/1096] docs: fix typo in prometheus.md (#16091) Fixes small `scrape_config` typo in `prometheus.md` --- docs/admin/integrations/prometheus.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/admin/integrations/prometheus.md b/docs/admin/integrations/prometheus.md index 9440d90a19bd0..d849f192aaa3d 100644 --- a/docs/admin/integrations/prometheus.md +++ b/docs/admin/integrations/prometheus.md @@ -59,7 +59,7 @@ spec: ### Prometheus configuration To allow Prometheus to scrape the Coder metrics, you will need to create a -`scape_config` in your `prometheus.yml` file, or in the Prometheus Helm chart +`scrape_config` in your `prometheus.yml` file, or in the Prometheus Helm chart values. The following is an example `scrape_config`. ```yaml From 14cd58dc3bcf4437c95e0612f372a46801245d22 Mon Sep 17 00:00:00 2001 From: Ethan <39577870+ethanndickson@users.noreply.github.com> Date: Mon, 13 Jan 2025 14:10:45 +1100 Subject: [PATCH 0005/1096] fix(site): fix typo on new workspace screen (#16099) Closes #16084. --- site/src/components/Form/Form.stories.tsx | 2 +- site/src/pages/CreateWorkspacePage/CreateWorkspacePageView.tsx | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/site/src/components/Form/Form.stories.tsx b/site/src/components/Form/Form.stories.tsx index 9b71e6e9c1d97..46c783347b374 100644 --- a/site/src/components/Form/Form.stories.tsx +++ b/site/src/components/Form/Form.stories.tsx @@ -9,7 +9,7 @@ const meta: Meta = { children: ( diff --git a/site/src/pages/CreateWorkspacePage/CreateWorkspacePageView.tsx b/site/src/pages/CreateWorkspacePage/CreateWorkspacePageView.tsx index 3abca78f67b89..e657535c0a265 100644 --- a/site/src/pages/CreateWorkspacePage/CreateWorkspacePageView.tsx +++ b/site/src/pages/CreateWorkspacePage/CreateWorkspacePageView.tsx @@ -194,7 +194,7 @@ export const CreateWorkspacePageView: FC = ({ title="General" description={ permissions.createWorkspaceForUser - ? "The name of the workspace and its owner. Only admins can create workspace for other users." + ? "The name of the workspace and its owner. Only admins can create workspaces for other users." : "The name of your new workspace." } > From 88a9c4bb59e7509059d11b7d6d26955caef4281d Mon Sep 17 00:00:00 2001 From: Ethan <39577870+ethanndickson@users.noreply.github.com> Date: Mon, 13 Jan 2025 16:53:51 +1100 Subject: [PATCH 0006/1096] ci: switch test-go on macOS to depot runners (#16100) We use depot runners where possible everywhere else. As a bonus, the depot runners for Mac would appear to be slightly beefier than the GitHub ones (8 vs 6 cores). We've already been using the depot macOS runners to build the VPN dylib for the past month or so. --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 5492a2354ede6..32afb97a3cffd 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -318,7 +318,7 @@ jobs: run: ./scripts/check_unstaged.sh test-go: - runs-on: ${{ matrix.os == 'ubuntu-latest' && github.repository_owner == 'coder' && 'depot-ubuntu-22.04-4' || matrix.os == 'macos-latest' && github.repository_owner == 'coder' && 'macos-latest-xlarge' || matrix.os == 'windows-2022' && github.repository_owner == 'coder' && 'windows-latest-16-cores' || matrix.os }} + runs-on: ${{ matrix.os == 'ubuntu-latest' && github.repository_owner == 'coder' && 'depot-ubuntu-22.04-4' || matrix.os == 'macos-latest' && github.repository_owner == 'coder' && 'depot-macos-latest' || matrix.os == 'windows-2022' && github.repository_owner == 'coder' && 'windows-latest-16-cores' || matrix.os }} needs: changes if: needs.changes.outputs.go == 'true' || needs.changes.outputs.ci == 'true' || github.ref == 'refs/heads/main' timeout-minutes: 20 From 859abcde4ef1d0dca72fd4a37728538790715989 Mon Sep 17 00:00:00 2001 From: Danny Kopping Date: Mon, 13 Jan 2025 11:28:45 +0200 Subject: [PATCH 0007/1096] chore: send notification to `#dev` on any CI failure on `main` (#16102) We've had a [few failures in main](https://github.com/coder/coder/actions?query=branch%3Amain+is%3Afailure) of late, and unless the committer of the change has CI notifications enabled we may not be aware of the failure. This PR sends a Slack notification to the #dev channel so everyone has visibility. Signed-off-by: Danny Kopping --- .github/workflows/ci.yaml | 49 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 49 insertions(+) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 32afb97a3cffd..1a2ce44f8aec3 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -1248,3 +1248,52 @@ jobs: - name: Setup and run sqlc vet run: | make sqlc-vet + + notify-slack-on-failure: + runs-on: ubuntu-latest + if: always() && failure() && github.ref == 'refs/heads/main' + + steps: + - name: Send Slack notification + run: | + curl -X POST -H 'Content-type: application/json' \ + --data '{ + "blocks": [ + { + "type": "header", + "text": { + "type": "plain_text", + "text": "❌ CI Failure in `main`", + "emoji": true + } + }, + { + "type": "section", + "fields": [ + { + "type": "mrkdwn", + "text": "*Workflow:*\n${{ github.workflow }}" + }, + { + "type": "mrkdwn", + "text": "*Failed Job:*\n${{ github.job }}" + }, + { + "type": "mrkdwn", + "text": "*Committer:*\n${{ github.actor }}" + }, + { + "type": "mrkdwn", + "text": "*Commit:*\n${{ github.sha }}" + } + ] + }, + { + "type": "section", + "text": { + "type": "mrkdwn", + "text": "*View failure:* <${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|Click here>" + } + } + ] + }' ${{ secrets.CI_FAILURE_SLACK_WEBHOOK }} From a7fe35af2523809178aea0c8d55482207c341a04 Mon Sep 17 00:00:00 2001 From: Ethan <39577870+ethanndickson@users.noreply.github.com> Date: Mon, 13 Jan 2025 20:51:55 +1100 Subject: [PATCH 0008/1096] fix: use `netstat` over `ss` when testing unix socket (#16103) Closes https://github.com/coder/internal/issues/274. `TestSSH/RemoteForwardUnixSocket` previously used `ss` for confirming if a socket was listening. `ss` isn't available on macOS, causing the test to flake. The test previously passed on macOS as a 2 could always be read on the SSH connection, presumably reading it as part of some escape sequence? I confirmed the test passed on Linux if you comment out the `ss` command, the pty would always read a sequence ending in `[?2`. --- cli/ssh_test.go | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/cli/ssh_test.go b/cli/ssh_test.go index 4fd52971df1cf..8006297f0c3e1 100644 --- a/cli/ssh_test.go +++ b/cli/ssh_test.go @@ -1146,9 +1146,8 @@ func TestSSH(t *testing.T) { // started and accepting input on stdin. _ = pty.Peek(ctx, 1) - // Download the test page - pty.WriteLine(fmt.Sprintf("ss -xl state listening src %s | wc -l", remoteSock)) - pty.ExpectMatch("2") + pty.WriteLine(fmt.Sprintf("netstat -an | grep -q %s; echo \"returned $?\"", remoteSock)) + pty.ExpectMatchContext(ctx, "returned 0") // And we're done. pty.WriteLine("exit") From 73d8dde6edfdbf779b27d1812747db7bc8ab713f Mon Sep 17 00:00:00 2001 From: Danny Kopping Date: Mon, 13 Jan 2025 12:15:15 +0200 Subject: [PATCH 0009/1096] chore: notify #dev of nightly gauntlet failures (#16105) Expands on https://github.com/coder/coder/pull/16102 This workflow is currently failing every night, so this will not only raise immediate awareness but will also be easy to validate this job. Signed-off-by: Danny Kopping --- .github/workflows/ci.yaml | 2 +- .github/workflows/nightly-gauntlet.yaml | 49 +++++++++++++++++++++++++ 2 files changed, 50 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 1a2ce44f8aec3..4f357727b6278 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -1263,7 +1263,7 @@ jobs: "type": "header", "text": { "type": "plain_text", - "text": "❌ CI Failure in `main`", + "text": "❌ CI Failure in main", "emoji": true } }, diff --git a/.github/workflows/nightly-gauntlet.yaml b/.github/workflows/nightly-gauntlet.yaml index 8aa74f1825dd7..3208c97a9e6bc 100644 --- a/.github/workflows/nightly-gauntlet.yaml +++ b/.github/workflows/nightly-gauntlet.yaml @@ -72,3 +72,52 @@ jobs: if: always() with: api-key: ${{ secrets.DATADOG_API_KEY }} + + notify-slack-on-failure: + runs-on: ubuntu-latest + if: always() && failure() + + steps: + - name: Send Slack notification + run: | + curl -X POST -H 'Content-type: application/json' \ + --data '{ + "blocks": [ + { + "type": "header", + "text": { + "type": "plain_text", + "text": "❌ Nightly gauntlet failed", + "emoji": true + } + }, + { + "type": "section", + "fields": [ + { + "type": "mrkdwn", + "text": "*Workflow:*\n${{ github.workflow }}" + }, + { + "type": "mrkdwn", + "text": "*Failed Job:*\n${{ github.job }}" + }, + { + "type": "mrkdwn", + "text": "*Committer:*\n${{ github.actor }}" + }, + { + "type": "mrkdwn", + "text": "*Commit:*\n${{ github.sha }}" + } + ] + }, + { + "type": "section", + "text": { + "type": "mrkdwn", + "text": "*View failure:* <${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|Click here>" + } + } + ] + }' ${{ secrets.CI_FAILURE_SLACK_WEBHOOK }} From 4543b21b7c69e19c6a3c352de40ff452e8ab19e3 Mon Sep 17 00:00:00 2001 From: Sas Swart Date: Mon, 13 Jan 2025 13:08:16 +0200 Subject: [PATCH 0010/1096] feat(coderd/database): track user status changes over time (#16019) RE: https://github.com/coder/coder/issues/15740, https://github.com/coder/coder/issues/15297 In order to add a graph to the coder frontend to show user status over time as an indicator of license usage, this PR adds the following: * a new `api.insightsUserStatusCountsOverTime` endpoint to the API * which calls a new `GetUserStatusCountsOverTime` query from postgres * which relies on two new tables `user_status_changes` and `user_deleted` * which are populated by a new trigger and function that tracks updates to the users table The chart itself will be added in a subsequent PR --------- Co-authored-by: Mathias Fredriksson --- Makefile | 5 +- coderd/apidoc/docs.go | 61 ++ coderd/apidoc/swagger.json | 57 ++ coderd/coderd.go | 1 + coderd/database/dbauthz/dbauthz.go | 7 + coderd/database/dbauthz/dbauthz_test.go | 6 + coderd/database/dbmem/dbmem.go | 56 ++ coderd/database/dbmetrics/querymetrics.go | 7 + coderd/database/dbmock/dbmock.go | 15 + coderd/database/dump.sql | 65 +++ coderd/database/foreign_key_constraint.go | 2 + .../000283_user_status_changes.down.sql | 9 + .../000283_user_status_changes.up.sql | 75 +++ .../000283_user_status_changes.up.sql | 42 ++ coderd/database/models.go | 15 + coderd/database/querier.go | 13 + coderd/database/querier_test.go | 521 ++++++++++++++++++ coderd/database/queries.sql.go | 165 ++++++ coderd/database/queries/insights.sql | 131 +++++ coderd/database/unique_constraint.go | 2 + coderd/insights.go | 63 +++ codersdk/insights.go | 31 ++ docs/reference/api/insights.md | 50 ++ docs/reference/api/schemas.md | 44 ++ site/src/api/typesGenerated.ts | 16 + 25 files changed, 1456 insertions(+), 3 deletions(-) create mode 100644 coderd/database/migrations/000283_user_status_changes.down.sql create mode 100644 coderd/database/migrations/000283_user_status_changes.up.sql create mode 100644 coderd/database/migrations/testdata/fixtures/000283_user_status_changes.up.sql diff --git a/Makefile b/Makefile index 2cd40a7dabfa3..423402260c26b 100644 --- a/Makefile +++ b/Makefile @@ -521,6 +521,7 @@ lint/markdown: node_modules/.installed # All files generated by the database should be added here, and this can be used # as a target for jobs that need to run after the database is generated. DB_GEN_FILES := \ + coderd/database/dump.sql \ coderd/database/querier.go \ coderd/database/unique_constraint.go \ coderd/database/dbmem/dbmem.go \ @@ -540,8 +541,6 @@ GEN_FILES := \ provisionersdk/proto/provisioner.pb.go \ provisionerd/proto/provisionerd.pb.go \ vpn/vpn.pb.go \ - coderd/database/dump.sql \ - $(DB_GEN_FILES) \ site/src/api/typesGenerated.ts \ coderd/rbac/object_gen.go \ codersdk/rbacresources_gen.go \ @@ -559,7 +558,7 @@ GEN_FILES := \ coderd/database/pubsub/psmock/psmock.go # all gen targets should be added here and to gen/mark-fresh -gen: $(GEN_FILES) +gen: gen/db $(GEN_FILES) .PHONY: gen gen/db: $(DB_GEN_FILES) diff --git a/coderd/apidoc/docs.go b/coderd/apidoc/docs.go index a8bfcb2af3b19..15da8b7eb5c36 100644 --- a/coderd/apidoc/docs.go +++ b/coderd/apidoc/docs.go @@ -1398,6 +1398,40 @@ const docTemplate = `{ } } }, + "/insights/user-status-counts": { + "get": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "produces": [ + "application/json" + ], + "tags": [ + "Insights" + ], + "summary": "Get insights about user status counts", + "operationId": "get-insights-about-user-status-counts", + "parameters": [ + { + "type": "integer", + "description": "Time-zone offset (e.g. -2)", + "name": "tz_offset", + "in": "query", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/codersdk.GetUserStatusCountsResponse" + } + } + } + } + }, "/integrations/jfrog/xray-scan": { "get": { "security": [ @@ -11207,6 +11241,20 @@ const docTemplate = `{ } } }, + "codersdk.GetUserStatusCountsResponse": { + "type": "object", + "properties": { + "status_counts": { + "type": "object", + "additionalProperties": { + "type": "array", + "items": { + "$ref": "#/definitions/codersdk.UserStatusChangeCount" + } + } + } + } + }, "codersdk.GetUsersResponse": { "type": "object", "properties": { @@ -14570,6 +14618,19 @@ const docTemplate = `{ "UserStatusSuspended" ] }, + "codersdk.UserStatusChangeCount": { + "type": "object", + "properties": { + "count": { + "type": "integer", + "example": 10 + }, + "date": { + "type": "string", + "format": "date-time" + } + } + }, "codersdk.ValidateUserPasswordRequest": { "type": "object", "required": [ diff --git a/coderd/apidoc/swagger.json b/coderd/apidoc/swagger.json index d7c32d8a33a52..df288ed1876c8 100644 --- a/coderd/apidoc/swagger.json +++ b/coderd/apidoc/swagger.json @@ -1219,6 +1219,36 @@ } } }, + "/insights/user-status-counts": { + "get": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "produces": ["application/json"], + "tags": ["Insights"], + "summary": "Get insights about user status counts", + "operationId": "get-insights-about-user-status-counts", + "parameters": [ + { + "type": "integer", + "description": "Time-zone offset (e.g. -2)", + "name": "tz_offset", + "in": "query", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/codersdk.GetUserStatusCountsResponse" + } + } + } + } + }, "/integrations/jfrog/xray-scan": { "get": { "security": [ @@ -10054,6 +10084,20 @@ } } }, + "codersdk.GetUserStatusCountsResponse": { + "type": "object", + "properties": { + "status_counts": { + "type": "object", + "additionalProperties": { + "type": "array", + "items": { + "$ref": "#/definitions/codersdk.UserStatusChangeCount" + } + } + } + } + }, "codersdk.GetUsersResponse": { "type": "object", "properties": { @@ -13244,6 +13288,19 @@ "UserStatusSuspended" ] }, + "codersdk.UserStatusChangeCount": { + "type": "object", + "properties": { + "count": { + "type": "integer", + "example": 10 + }, + "date": { + "type": "string", + "format": "date-time" + } + } + }, "codersdk.ValidateUserPasswordRequest": { "type": "object", "required": ["password"], diff --git a/coderd/coderd.go b/coderd/coderd.go index fd8a10a44f140..7b8cde9dc6ae4 100644 --- a/coderd/coderd.go +++ b/coderd/coderd.go @@ -1281,6 +1281,7 @@ func New(options *Options) *API { r.Use(apiKeyMiddleware) r.Get("/daus", api.deploymentDAUs) r.Get("/user-activity", api.insightsUserActivity) + r.Get("/user-status-counts", api.insightsUserStatusCounts) r.Get("/user-latency", api.insightsUserLatency) r.Get("/templates", api.insightsTemplates) }) diff --git a/coderd/database/dbauthz/dbauthz.go b/coderd/database/dbauthz/dbauthz.go index 0a35667ed0178..a4c3208aa5e6d 100644 --- a/coderd/database/dbauthz/dbauthz.go +++ b/coderd/database/dbauthz/dbauthz.go @@ -2421,6 +2421,13 @@ func (q *querier) GetUserNotificationPreferences(ctx context.Context, userID uui return q.db.GetUserNotificationPreferences(ctx, userID) } +func (q *querier) GetUserStatusCounts(ctx context.Context, arg database.GetUserStatusCountsParams) ([]database.GetUserStatusCountsRow, error) { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceUser); err != nil { + return nil, err + } + return q.db.GetUserStatusCounts(ctx, arg) +} + func (q *querier) GetUserWorkspaceBuildParameters(ctx context.Context, params database.GetUserWorkspaceBuildParametersParams) ([]database.GetUserWorkspaceBuildParametersRow, error) { u, err := q.db.GetUserByID(ctx, params.OwnerID) if err != nil { diff --git a/coderd/database/dbauthz/dbauthz_test.go b/coderd/database/dbauthz/dbauthz_test.go index 93e9a4318d1ed..78500792933b5 100644 --- a/coderd/database/dbauthz/dbauthz_test.go +++ b/coderd/database/dbauthz/dbauthz_test.go @@ -1708,6 +1708,12 @@ func (s *MethodTestSuite) TestUser() { rbac.ResourceTemplate.InOrg(orgID), policy.ActionRead, ) })) + s.Run("GetUserStatusCounts", s.Subtest(func(db database.Store, check *expects) { + check.Args(database.GetUserStatusCountsParams{ + StartTime: time.Now().Add(-time.Hour * 24 * 30), + EndTime: time.Now(), + }).Asserts(rbac.ResourceUser, policy.ActionRead) + })) } func (s *MethodTestSuite) TestWorkspace() { diff --git a/coderd/database/dbmem/dbmem.go b/coderd/database/dbmem/dbmem.go index d3b7b3fb35f5f..9e3c3621b3420 100644 --- a/coderd/database/dbmem/dbmem.go +++ b/coderd/database/dbmem/dbmem.go @@ -88,6 +88,7 @@ func New() database.Store { customRoles: make([]database.CustomRole, 0), locks: map[int64]struct{}{}, runtimeConfig: map[string]string{}, + userStatusChanges: make([]database.UserStatusChange, 0), }, } // Always start with a default org. Matching migration 198. @@ -256,6 +257,7 @@ type data struct { lastLicenseID int32 defaultProxyDisplayName string defaultProxyIconURL string + userStatusChanges []database.UserStatusChange } func tryPercentile(fs []float64, p float64) float64 { @@ -5669,6 +5671,42 @@ func (q *FakeQuerier) GetUserNotificationPreferences(_ context.Context, userID u return out, nil } +func (q *FakeQuerier) GetUserStatusCounts(_ context.Context, arg database.GetUserStatusCountsParams) ([]database.GetUserStatusCountsRow, error) { + q.mutex.RLock() + defer q.mutex.RUnlock() + + err := validateDatabaseType(arg) + if err != nil { + return nil, err + } + + result := make([]database.GetUserStatusCountsRow, 0) + for _, change := range q.userStatusChanges { + if change.ChangedAt.Before(arg.StartTime) || change.ChangedAt.After(arg.EndTime) { + continue + } + date := time.Date(change.ChangedAt.Year(), change.ChangedAt.Month(), change.ChangedAt.Day(), 0, 0, 0, 0, time.UTC) + if !slices.ContainsFunc(result, func(r database.GetUserStatusCountsRow) bool { + return r.Status == change.NewStatus && r.Date.Equal(date) + }) { + result = append(result, database.GetUserStatusCountsRow{ + Status: change.NewStatus, + Date: date, + Count: 1, + }) + } else { + for i, r := range result { + if r.Status == change.NewStatus && r.Date.Equal(date) { + result[i].Count++ + break + } + } + } + } + + return result, nil +} + func (q *FakeQuerier) GetUserWorkspaceBuildParameters(_ context.Context, params database.GetUserWorkspaceBuildParametersParams) ([]database.GetUserWorkspaceBuildParametersRow, error) { q.mutex.RLock() defer q.mutex.RUnlock() @@ -8021,6 +8059,12 @@ func (q *FakeQuerier) InsertUser(_ context.Context, arg database.InsertUserParam sort.Slice(q.users, func(i, j int) bool { return q.users[i].CreatedAt.Before(q.users[j].CreatedAt) }) + + q.userStatusChanges = append(q.userStatusChanges, database.UserStatusChange{ + UserID: user.ID, + NewStatus: user.Status, + ChangedAt: user.UpdatedAt, + }) return user, nil } @@ -9062,12 +9106,18 @@ func (q *FakeQuerier) UpdateInactiveUsersToDormant(_ context.Context, params dat Username: user.Username, LastSeenAt: user.LastSeenAt, }) + q.userStatusChanges = append(q.userStatusChanges, database.UserStatusChange{ + UserID: user.ID, + NewStatus: database.UserStatusDormant, + ChangedAt: params.UpdatedAt, + }) } } if len(updated) == 0 { return nil, sql.ErrNoRows } + return updated, nil } @@ -9868,6 +9918,12 @@ func (q *FakeQuerier) UpdateUserStatus(_ context.Context, arg database.UpdateUse user.Status = arg.Status user.UpdatedAt = arg.UpdatedAt q.users[index] = user + + q.userStatusChanges = append(q.userStatusChanges, database.UserStatusChange{ + UserID: user.ID, + NewStatus: user.Status, + ChangedAt: user.UpdatedAt, + }) return user, nil } return database.User{}, sql.ErrNoRows diff --git a/coderd/database/dbmetrics/querymetrics.go b/coderd/database/dbmetrics/querymetrics.go index 5df5c547a20d6..599fad08779ac 100644 --- a/coderd/database/dbmetrics/querymetrics.go +++ b/coderd/database/dbmetrics/querymetrics.go @@ -1344,6 +1344,13 @@ func (m queryMetricsStore) GetUserNotificationPreferences(ctx context.Context, u return r0, r1 } +func (m queryMetricsStore) GetUserStatusCounts(ctx context.Context, arg database.GetUserStatusCountsParams) ([]database.GetUserStatusCountsRow, error) { + start := time.Now() + r0, r1 := m.s.GetUserStatusCounts(ctx, arg) + m.queryLatencies.WithLabelValues("GetUserStatusCounts").Observe(time.Since(start).Seconds()) + return r0, r1 +} + func (m queryMetricsStore) GetUserWorkspaceBuildParameters(ctx context.Context, ownerID database.GetUserWorkspaceBuildParametersParams) ([]database.GetUserWorkspaceBuildParametersRow, error) { start := time.Now() r0, r1 := m.s.GetUserWorkspaceBuildParameters(ctx, ownerID) diff --git a/coderd/database/dbmock/dbmock.go b/coderd/database/dbmock/dbmock.go index 6b552fe5060ff..51d0c59c1d879 100644 --- a/coderd/database/dbmock/dbmock.go +++ b/coderd/database/dbmock/dbmock.go @@ -2825,6 +2825,21 @@ func (mr *MockStoreMockRecorder) GetUserNotificationPreferences(arg0, arg1 any) return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetUserNotificationPreferences", reflect.TypeOf((*MockStore)(nil).GetUserNotificationPreferences), arg0, arg1) } +// GetUserStatusCounts mocks base method. +func (m *MockStore) GetUserStatusCounts(arg0 context.Context, arg1 database.GetUserStatusCountsParams) ([]database.GetUserStatusCountsRow, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetUserStatusCounts", arg0, arg1) + ret0, _ := ret[0].([]database.GetUserStatusCountsRow) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetUserStatusCounts indicates an expected call of GetUserStatusCounts. +func (mr *MockStoreMockRecorder) GetUserStatusCounts(arg0, arg1 any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetUserStatusCounts", reflect.TypeOf((*MockStore)(nil).GetUserStatusCounts), arg0, arg1) +} + // GetUserWorkspaceBuildParameters mocks base method. func (m *MockStore) GetUserWorkspaceBuildParameters(arg0 context.Context, arg1 database.GetUserWorkspaceBuildParametersParams) ([]database.GetUserWorkspaceBuildParametersRow, error) { m.ctrl.T.Helper() diff --git a/coderd/database/dump.sql b/coderd/database/dump.sql index 50519485dc505..7812f6e8e4e5a 100644 --- a/coderd/database/dump.sql +++ b/coderd/database/dump.sql @@ -423,6 +423,36 @@ $$; COMMENT ON FUNCTION provisioner_tagset_contains(provisioner_tags tagset, job_tags tagset) IS 'Returns true if the provisioner_tags contains the job_tags, or if the job_tags represents an untagged provisioner and the superset is exactly equal to the subset.'; +CREATE FUNCTION record_user_status_change() RETURNS trigger + LANGUAGE plpgsql + AS $$ +BEGIN + IF TG_OP = 'INSERT' OR OLD.status IS DISTINCT FROM NEW.status THEN + INSERT INTO user_status_changes ( + user_id, + new_status, + changed_at + ) VALUES ( + NEW.id, + NEW.status, + NEW.updated_at + ); + END IF; + + IF OLD.deleted = FALSE AND NEW.deleted = TRUE THEN + INSERT INTO user_deleted ( + user_id, + deleted_at + ) VALUES ( + NEW.id, + NEW.updated_at + ); + END IF; + + RETURN NEW; +END; +$$; + CREATE FUNCTION remove_organization_member_role() RETURNS trigger LANGUAGE plpgsql AS $$ @@ -1377,6 +1407,14 @@ CREATE VIEW template_with_names AS COMMENT ON VIEW template_with_names IS 'Joins in the display name information such as username, avatar, and organization name.'; +CREATE TABLE user_deleted ( + id uuid DEFAULT gen_random_uuid() NOT NULL, + user_id uuid NOT NULL, + deleted_at timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL +); + +COMMENT ON TABLE user_deleted IS 'Tracks when users were deleted'; + CREATE TABLE user_links ( user_id uuid NOT NULL, login_type login_type NOT NULL, @@ -1395,6 +1433,15 @@ COMMENT ON COLUMN user_links.oauth_refresh_token_key_id IS 'The ID of the key us COMMENT ON COLUMN user_links.claims IS 'Claims from the IDP for the linked user. Includes both id_token and userinfo claims. '; +CREATE TABLE user_status_changes ( + id uuid DEFAULT gen_random_uuid() NOT NULL, + user_id uuid NOT NULL, + new_status user_status NOT NULL, + changed_at timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL +); + +COMMENT ON TABLE user_status_changes IS 'Tracks the history of user status changes'; + CREATE TABLE workspace_agent_log_sources ( workspace_agent_id uuid NOT NULL, id uuid NOT NULL, @@ -1980,9 +2027,15 @@ ALTER TABLE ONLY template_versions ALTER TABLE ONLY templates ADD CONSTRAINT templates_pkey PRIMARY KEY (id); +ALTER TABLE ONLY user_deleted + ADD CONSTRAINT user_deleted_pkey PRIMARY KEY (id); + ALTER TABLE ONLY user_links ADD CONSTRAINT user_links_pkey PRIMARY KEY (user_id, login_type); +ALTER TABLE ONLY user_status_changes + ADD CONSTRAINT user_status_changes_pkey PRIMARY KEY (id); + ALTER TABLE ONLY users ADD CONSTRAINT users_pkey PRIMARY KEY (id); @@ -2093,6 +2146,10 @@ CREATE INDEX idx_tailnet_tunnels_dst_id ON tailnet_tunnels USING hash (dst_id); CREATE INDEX idx_tailnet_tunnels_src_id ON tailnet_tunnels USING hash (src_id); +CREATE INDEX idx_user_deleted_deleted_at ON user_deleted USING btree (deleted_at); + +CREATE INDEX idx_user_status_changes_changed_at ON user_status_changes USING btree (changed_at); + CREATE UNIQUE INDEX idx_users_email ON users USING btree (email) WHERE (deleted = false); CREATE UNIQUE INDEX idx_users_username ON users USING btree (username) WHERE (deleted = false); @@ -2235,6 +2292,8 @@ CREATE TRIGGER trigger_upsert_user_links BEFORE INSERT OR UPDATE ON user_links F CREATE TRIGGER update_notification_message_dedupe_hash BEFORE INSERT OR UPDATE ON notification_messages FOR EACH ROW EXECUTE FUNCTION compute_notification_message_dedupe_hash(); +CREATE TRIGGER user_status_change_trigger AFTER INSERT OR UPDATE ON users FOR EACH ROW EXECUTE FUNCTION record_user_status_change(); + ALTER TABLE ONLY api_keys ADD CONSTRAINT api_keys_user_id_uuid_fkey FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE; @@ -2358,6 +2417,9 @@ ALTER TABLE ONLY templates ALTER TABLE ONLY templates ADD CONSTRAINT templates_organization_id_fkey FOREIGN KEY (organization_id) REFERENCES organizations(id) ON DELETE CASCADE; +ALTER TABLE ONLY user_deleted + ADD CONSTRAINT user_deleted_user_id_fkey FOREIGN KEY (user_id) REFERENCES users(id); + ALTER TABLE ONLY user_links ADD CONSTRAINT user_links_oauth_access_token_key_id_fkey FOREIGN KEY (oauth_access_token_key_id) REFERENCES dbcrypt_keys(active_key_digest); @@ -2367,6 +2429,9 @@ ALTER TABLE ONLY user_links ALTER TABLE ONLY user_links ADD CONSTRAINT user_links_user_id_fkey FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE; +ALTER TABLE ONLY user_status_changes + ADD CONSTRAINT user_status_changes_user_id_fkey FOREIGN KEY (user_id) REFERENCES users(id); + ALTER TABLE ONLY workspace_agent_log_sources ADD CONSTRAINT workspace_agent_log_sources_workspace_agent_id_fkey FOREIGN KEY (workspace_agent_id) REFERENCES workspace_agents(id) ON DELETE CASCADE; diff --git a/coderd/database/foreign_key_constraint.go b/coderd/database/foreign_key_constraint.go index 669ab85f945bd..52f98a679a71b 100644 --- a/coderd/database/foreign_key_constraint.go +++ b/coderd/database/foreign_key_constraint.go @@ -47,9 +47,11 @@ const ( ForeignKeyTemplateVersionsTemplateID ForeignKeyConstraint = "template_versions_template_id_fkey" // ALTER TABLE ONLY template_versions ADD CONSTRAINT template_versions_template_id_fkey FOREIGN KEY (template_id) REFERENCES templates(id) ON DELETE CASCADE; ForeignKeyTemplatesCreatedBy ForeignKeyConstraint = "templates_created_by_fkey" // ALTER TABLE ONLY templates ADD CONSTRAINT templates_created_by_fkey FOREIGN KEY (created_by) REFERENCES users(id) ON DELETE RESTRICT; ForeignKeyTemplatesOrganizationID ForeignKeyConstraint = "templates_organization_id_fkey" // ALTER TABLE ONLY templates ADD CONSTRAINT templates_organization_id_fkey FOREIGN KEY (organization_id) REFERENCES organizations(id) ON DELETE CASCADE; + ForeignKeyUserDeletedUserID ForeignKeyConstraint = "user_deleted_user_id_fkey" // ALTER TABLE ONLY user_deleted ADD CONSTRAINT user_deleted_user_id_fkey FOREIGN KEY (user_id) REFERENCES users(id); ForeignKeyUserLinksOauthAccessTokenKeyID ForeignKeyConstraint = "user_links_oauth_access_token_key_id_fkey" // ALTER TABLE ONLY user_links ADD CONSTRAINT user_links_oauth_access_token_key_id_fkey FOREIGN KEY (oauth_access_token_key_id) REFERENCES dbcrypt_keys(active_key_digest); ForeignKeyUserLinksOauthRefreshTokenKeyID ForeignKeyConstraint = "user_links_oauth_refresh_token_key_id_fkey" // ALTER TABLE ONLY user_links ADD CONSTRAINT user_links_oauth_refresh_token_key_id_fkey FOREIGN KEY (oauth_refresh_token_key_id) REFERENCES dbcrypt_keys(active_key_digest); ForeignKeyUserLinksUserID ForeignKeyConstraint = "user_links_user_id_fkey" // ALTER TABLE ONLY user_links ADD CONSTRAINT user_links_user_id_fkey FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE; + ForeignKeyUserStatusChangesUserID ForeignKeyConstraint = "user_status_changes_user_id_fkey" // ALTER TABLE ONLY user_status_changes ADD CONSTRAINT user_status_changes_user_id_fkey FOREIGN KEY (user_id) REFERENCES users(id); ForeignKeyWorkspaceAgentLogSourcesWorkspaceAgentID ForeignKeyConstraint = "workspace_agent_log_sources_workspace_agent_id_fkey" // ALTER TABLE ONLY workspace_agent_log_sources ADD CONSTRAINT workspace_agent_log_sources_workspace_agent_id_fkey FOREIGN KEY (workspace_agent_id) REFERENCES workspace_agents(id) ON DELETE CASCADE; ForeignKeyWorkspaceAgentMetadataWorkspaceAgentID ForeignKeyConstraint = "workspace_agent_metadata_workspace_agent_id_fkey" // ALTER TABLE ONLY workspace_agent_metadata ADD CONSTRAINT workspace_agent_metadata_workspace_agent_id_fkey FOREIGN KEY (workspace_agent_id) REFERENCES workspace_agents(id) ON DELETE CASCADE; ForeignKeyWorkspaceAgentPortShareWorkspaceID ForeignKeyConstraint = "workspace_agent_port_share_workspace_id_fkey" // ALTER TABLE ONLY workspace_agent_port_share ADD CONSTRAINT workspace_agent_port_share_workspace_id_fkey FOREIGN KEY (workspace_id) REFERENCES workspaces(id) ON DELETE CASCADE; diff --git a/coderd/database/migrations/000283_user_status_changes.down.sql b/coderd/database/migrations/000283_user_status_changes.down.sql new file mode 100644 index 0000000000000..fbe85a6be0fe5 --- /dev/null +++ b/coderd/database/migrations/000283_user_status_changes.down.sql @@ -0,0 +1,9 @@ +DROP TRIGGER IF EXISTS user_status_change_trigger ON users; + +DROP FUNCTION IF EXISTS record_user_status_change(); + +DROP INDEX IF EXISTS idx_user_status_changes_changed_at; +DROP INDEX IF EXISTS idx_user_deleted_deleted_at; + +DROP TABLE IF EXISTS user_status_changes; +DROP TABLE IF EXISTS user_deleted; diff --git a/coderd/database/migrations/000283_user_status_changes.up.sql b/coderd/database/migrations/000283_user_status_changes.up.sql new file mode 100644 index 0000000000000..d712465851eff --- /dev/null +++ b/coderd/database/migrations/000283_user_status_changes.up.sql @@ -0,0 +1,75 @@ +CREATE TABLE user_status_changes ( + id uuid PRIMARY KEY DEFAULT gen_random_uuid(), + user_id uuid NOT NULL REFERENCES users(id), + new_status user_status NOT NULL, + changed_at timestamptz NOT NULL DEFAULT CURRENT_TIMESTAMP +); + +COMMENT ON TABLE user_status_changes IS 'Tracks the history of user status changes'; + +CREATE INDEX idx_user_status_changes_changed_at ON user_status_changes(changed_at); + +INSERT INTO user_status_changes ( + user_id, + new_status, + changed_at +) +SELECT + id, + status, + created_at +FROM users +WHERE NOT deleted; + +CREATE TABLE user_deleted ( + id uuid PRIMARY KEY DEFAULT gen_random_uuid(), + user_id uuid NOT NULL REFERENCES users(id), + deleted_at timestamptz NOT NULL DEFAULT CURRENT_TIMESTAMP +); + +COMMENT ON TABLE user_deleted IS 'Tracks when users were deleted'; + +CREATE INDEX idx_user_deleted_deleted_at ON user_deleted(deleted_at); + +INSERT INTO user_deleted ( + user_id, + deleted_at +) +SELECT + id, + updated_at +FROM users +WHERE deleted; + +CREATE OR REPLACE FUNCTION record_user_status_change() RETURNS trigger AS $$ +BEGIN + IF TG_OP = 'INSERT' OR OLD.status IS DISTINCT FROM NEW.status THEN + INSERT INTO user_status_changes ( + user_id, + new_status, + changed_at + ) VALUES ( + NEW.id, + NEW.status, + NEW.updated_at + ); + END IF; + + IF OLD.deleted = FALSE AND NEW.deleted = TRUE THEN + INSERT INTO user_deleted ( + user_id, + deleted_at + ) VALUES ( + NEW.id, + NEW.updated_at + ); + END IF; + + RETURN NEW; +END; +$$ LANGUAGE plpgsql; + +CREATE TRIGGER user_status_change_trigger + AFTER INSERT OR UPDATE ON users + FOR EACH ROW + EXECUTE FUNCTION record_user_status_change(); diff --git a/coderd/database/migrations/testdata/fixtures/000283_user_status_changes.up.sql b/coderd/database/migrations/testdata/fixtures/000283_user_status_changes.up.sql new file mode 100644 index 0000000000000..9559fa3ad0df8 --- /dev/null +++ b/coderd/database/migrations/testdata/fixtures/000283_user_status_changes.up.sql @@ -0,0 +1,42 @@ +INSERT INTO + users ( + id, + email, + username, + hashed_password, + created_at, + updated_at, + status, + rbac_roles, + login_type, + avatar_url, + last_seen_at, + quiet_hours_schedule, + theme_preference, + name, + github_com_user_id, + hashed_one_time_passcode, + one_time_passcode_expires_at + ) + VALUES ( + '5755e622-fadd-44ca-98da-5df070491844', -- uuid + 'test@example.com', + 'testuser', + 'hashed_password', + '2024-01-01 00:00:00', + '2024-01-01 00:00:00', + 'active', + '{}', + 'password', + '', + '2024-01-01 00:00:00', + '', + '', + '', + 123, + NULL, + NULL + ); + +UPDATE users SET status = 'dormant', updated_at = '2024-01-01 01:00:00' WHERE id = '5755e622-fadd-44ca-98da-5df070491844'; +UPDATE users SET deleted = true, updated_at = '2024-01-01 02:00:00' WHERE id = '5755e622-fadd-44ca-98da-5df070491844'; diff --git a/coderd/database/models.go b/coderd/database/models.go index 9ca80d119a502..35484c7856e14 100644 --- a/coderd/database/models.go +++ b/coderd/database/models.go @@ -2953,6 +2953,13 @@ type User struct { OneTimePasscodeExpiresAt sql.NullTime `db:"one_time_passcode_expires_at" json:"one_time_passcode_expires_at"` } +// Tracks when users were deleted +type UserDeleted struct { + ID uuid.UUID `db:"id" json:"id"` + UserID uuid.UUID `db:"user_id" json:"user_id"` + DeletedAt time.Time `db:"deleted_at" json:"deleted_at"` +} + type UserLink struct { UserID uuid.UUID `db:"user_id" json:"user_id"` LoginType LoginType `db:"login_type" json:"login_type"` @@ -2968,6 +2975,14 @@ type UserLink struct { Claims UserLinkClaims `db:"claims" json:"claims"` } +// Tracks the history of user status changes +type UserStatusChange struct { + ID uuid.UUID `db:"id" json:"id"` + UserID uuid.UUID `db:"user_id" json:"user_id"` + NewStatus UserStatus `db:"new_status" json:"new_status"` + ChangedAt time.Time `db:"changed_at" json:"changed_at"` +} + // Visible fields of users are allowed to be joined with other tables for including context of other resources. type VisibleUser struct { ID uuid.UUID `db:"id" json:"id"` diff --git a/coderd/database/querier.go b/coderd/database/querier.go index 620cc14b3fd26..ba151e0e8abb0 100644 --- a/coderd/database/querier.go +++ b/coderd/database/querier.go @@ -289,6 +289,19 @@ type sqlcQuerier interface { GetUserLinkByUserIDLoginType(ctx context.Context, arg GetUserLinkByUserIDLoginTypeParams) (UserLink, error) GetUserLinksByUserID(ctx context.Context, userID uuid.UUID) ([]UserLink, error) GetUserNotificationPreferences(ctx context.Context, userID uuid.UUID) ([]NotificationPreference, error) + // GetUserStatusCounts returns the count of users in each status over time. + // The time range is inclusively defined by the start_time and end_time parameters. + // + // Bucketing: + // Between the start_time and end_time, we include each timestamp where a user's status changed or they were deleted. + // We do not bucket these results by day or some other time unit. This is because such bucketing would hide potentially + // important patterns. If a user was active for 23 hours and 59 minutes, and then suspended, a daily bucket would hide this. + // A daily bucket would also have required us to carefully manage the timezone of the bucket based on the timezone of the user. + // + // Accumulation: + // We do not start counting from 0 at the start_time. We check the last status change before the start_time for each user. As such, + // the result shows the total number of users in each status on any particular day. + GetUserStatusCounts(ctx context.Context, arg GetUserStatusCountsParams) ([]GetUserStatusCountsRow, error) GetUserWorkspaceBuildParameters(ctx context.Context, arg GetUserWorkspaceBuildParametersParams) ([]GetUserWorkspaceBuildParametersRow, error) // This will never return deleted users. GetUsers(ctx context.Context, arg GetUsersParams) ([]GetUsersRow, error) diff --git a/coderd/database/querier_test.go b/coderd/database/querier_test.go index 28d7108ae31ad..0c7a445ed7104 100644 --- a/coderd/database/querier_test.go +++ b/coderd/database/querier_test.go @@ -7,6 +7,7 @@ import ( "database/sql" "encoding/json" "fmt" + "maps" "sort" "testing" "time" @@ -2255,6 +2256,526 @@ func TestGroupRemovalTrigger(t *testing.T) { }, db2sdk.List(extraUserGroups, onlyGroupIDs)) } +func TestGetUserStatusCounts(t *testing.T) { + t.Parallel() + + if !dbtestutil.WillUsePostgres() { + t.SkipNow() + } + + timezones := []string{ + "Canada/Newfoundland", + "Africa/Johannesburg", + "America/New_York", + "Europe/London", + "Asia/Tokyo", + "Australia/Sydney", + } + + for _, tz := range timezones { + tz := tz + t.Run(tz, func(t *testing.T) { + t.Parallel() + + location, err := time.LoadLocation(tz) + if err != nil { + t.Fatalf("failed to load location: %v", err) + } + today := dbtime.Now().In(location) + createdAt := today.Add(-5 * 24 * time.Hour) + firstTransitionTime := createdAt.Add(2 * 24 * time.Hour) + secondTransitionTime := firstTransitionTime.Add(2 * 24 * time.Hour) + + t.Run("No Users", func(t *testing.T) { + t.Parallel() + db, _ := dbtestutil.NewDB(t) + ctx := testutil.Context(t, testutil.WaitShort) + + end := dbtime.Now() + start := end.Add(-30 * 24 * time.Hour) + + counts, err := db.GetUserStatusCounts(ctx, database.GetUserStatusCountsParams{ + StartTime: start, + EndTime: end, + }) + require.NoError(t, err) + require.Empty(t, counts, "should return no results when there are no users") + }) + + t.Run("One User/Creation Only", func(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + status database.UserStatus + }{ + { + name: "Active Only", + status: database.UserStatusActive, + }, + { + name: "Dormant Only", + status: database.UserStatusDormant, + }, + { + name: "Suspended Only", + status: database.UserStatusSuspended, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + db, _ := dbtestutil.NewDB(t) + ctx := testutil.Context(t, testutil.WaitShort) + + // Create a user that's been in the specified status for the past 30 days + dbgen.User(t, db, database.User{ + Status: tc.status, + CreatedAt: createdAt, + UpdatedAt: createdAt, + }) + + // Query for the last 30 days + userStatusChanges, err := db.GetUserStatusCounts(ctx, database.GetUserStatusCountsParams{ + StartTime: createdAt, + EndTime: today, + }) + require.NoError(t, err) + require.NotEmpty(t, userStatusChanges, "should return results") + + require.Len(t, userStatusChanges, 2, "should have 1 entry per status change plus and 1 entry for the end of the range = 2 entries") + + require.Equal(t, userStatusChanges[0].Status, tc.status, "should have the correct status") + require.Equal(t, userStatusChanges[0].Count, int64(1), "should have 1 user") + require.True(t, userStatusChanges[0].Date.Equal(createdAt), "should have the correct date") + + require.Equal(t, userStatusChanges[1].Status, tc.status, "should have the correct status") + require.Equal(t, userStatusChanges[1].Count, int64(1), "should have 1 user") + require.True(t, userStatusChanges[1].Date.Equal(today), "should have the correct date") + }) + } + }) + + t.Run("One User/One Transition", func(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + initialStatus database.UserStatus + targetStatus database.UserStatus + expectedCounts map[time.Time]map[database.UserStatus]int64 + }{ + { + name: "Active to Dormant", + initialStatus: database.UserStatusActive, + targetStatus: database.UserStatusDormant, + expectedCounts: map[time.Time]map[database.UserStatus]int64{ + createdAt: { + database.UserStatusActive: 1, + database.UserStatusDormant: 0, + }, + firstTransitionTime: { + database.UserStatusDormant: 1, + database.UserStatusActive: 0, + }, + today: { + database.UserStatusDormant: 1, + database.UserStatusActive: 0, + }, + }, + }, + { + name: "Active to Suspended", + initialStatus: database.UserStatusActive, + targetStatus: database.UserStatusSuspended, + expectedCounts: map[time.Time]map[database.UserStatus]int64{ + createdAt: { + database.UserStatusActive: 1, + database.UserStatusSuspended: 0, + }, + firstTransitionTime: { + database.UserStatusSuspended: 1, + database.UserStatusActive: 0, + }, + today: { + database.UserStatusSuspended: 1, + database.UserStatusActive: 0, + }, + }, + }, + { + name: "Dormant to Active", + initialStatus: database.UserStatusDormant, + targetStatus: database.UserStatusActive, + expectedCounts: map[time.Time]map[database.UserStatus]int64{ + createdAt: { + database.UserStatusDormant: 1, + database.UserStatusActive: 0, + }, + firstTransitionTime: { + database.UserStatusActive: 1, + database.UserStatusDormant: 0, + }, + today: { + database.UserStatusActive: 1, + database.UserStatusDormant: 0, + }, + }, + }, + { + name: "Dormant to Suspended", + initialStatus: database.UserStatusDormant, + targetStatus: database.UserStatusSuspended, + expectedCounts: map[time.Time]map[database.UserStatus]int64{ + createdAt: { + database.UserStatusDormant: 1, + database.UserStatusSuspended: 0, + }, + firstTransitionTime: { + database.UserStatusSuspended: 1, + database.UserStatusDormant: 0, + }, + today: { + database.UserStatusSuspended: 1, + database.UserStatusDormant: 0, + }, + }, + }, + { + name: "Suspended to Active", + initialStatus: database.UserStatusSuspended, + targetStatus: database.UserStatusActive, + expectedCounts: map[time.Time]map[database.UserStatus]int64{ + createdAt: { + database.UserStatusSuspended: 1, + database.UserStatusActive: 0, + }, + firstTransitionTime: { + database.UserStatusActive: 1, + database.UserStatusSuspended: 0, + }, + today: { + database.UserStatusActive: 1, + database.UserStatusSuspended: 0, + }, + }, + }, + { + name: "Suspended to Dormant", + initialStatus: database.UserStatusSuspended, + targetStatus: database.UserStatusDormant, + expectedCounts: map[time.Time]map[database.UserStatus]int64{ + createdAt: { + database.UserStatusSuspended: 1, + database.UserStatusDormant: 0, + }, + firstTransitionTime: { + database.UserStatusDormant: 1, + database.UserStatusSuspended: 0, + }, + today: { + database.UserStatusDormant: 1, + database.UserStatusSuspended: 0, + }, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + db, _ := dbtestutil.NewDB(t) + ctx := testutil.Context(t, testutil.WaitShort) + + // Create a user that starts with initial status + user := dbgen.User(t, db, database.User{ + Status: tc.initialStatus, + CreatedAt: createdAt, + UpdatedAt: createdAt, + }) + + // After 2 days, change status to target status + user, err := db.UpdateUserStatus(ctx, database.UpdateUserStatusParams{ + ID: user.ID, + Status: tc.targetStatus, + UpdatedAt: firstTransitionTime, + }) + require.NoError(t, err) + + // Query for the last 5 days + userStatusChanges, err := db.GetUserStatusCounts(ctx, database.GetUserStatusCountsParams{ + StartTime: createdAt, + EndTime: today, + }) + require.NoError(t, err) + require.NotEmpty(t, userStatusChanges, "should return results") + + gotCounts := map[time.Time]map[database.UserStatus]int64{} + for _, row := range userStatusChanges { + gotDateInLocation := row.Date.In(location) + if _, ok := gotCounts[gotDateInLocation]; !ok { + gotCounts[gotDateInLocation] = map[database.UserStatus]int64{} + } + if _, ok := gotCounts[gotDateInLocation][row.Status]; !ok { + gotCounts[gotDateInLocation][row.Status] = 0 + } + gotCounts[gotDateInLocation][row.Status] += row.Count + } + require.Equal(t, tc.expectedCounts, gotCounts) + }) + } + }) + + t.Run("Two Users/One Transition", func(t *testing.T) { + t.Parallel() + + type transition struct { + from database.UserStatus + to database.UserStatus + } + + type testCase struct { + name string + user1Transition transition + user2Transition transition + } + + testCases := []testCase{ + { + name: "Active->Dormant and Dormant->Suspended", + user1Transition: transition{ + from: database.UserStatusActive, + to: database.UserStatusDormant, + }, + user2Transition: transition{ + from: database.UserStatusDormant, + to: database.UserStatusSuspended, + }, + }, + { + name: "Suspended->Active and Active->Dormant", + user1Transition: transition{ + from: database.UserStatusSuspended, + to: database.UserStatusActive, + }, + user2Transition: transition{ + from: database.UserStatusActive, + to: database.UserStatusDormant, + }, + }, + { + name: "Dormant->Active and Suspended->Dormant", + user1Transition: transition{ + from: database.UserStatusDormant, + to: database.UserStatusActive, + }, + user2Transition: transition{ + from: database.UserStatusSuspended, + to: database.UserStatusDormant, + }, + }, + { + name: "Active->Suspended and Suspended->Active", + user1Transition: transition{ + from: database.UserStatusActive, + to: database.UserStatusSuspended, + }, + user2Transition: transition{ + from: database.UserStatusSuspended, + to: database.UserStatusActive, + }, + }, + { + name: "Dormant->Suspended and Dormant->Active", + user1Transition: transition{ + from: database.UserStatusDormant, + to: database.UserStatusSuspended, + }, + user2Transition: transition{ + from: database.UserStatusDormant, + to: database.UserStatusActive, + }, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + db, _ := dbtestutil.NewDB(t) + ctx := testutil.Context(t, testutil.WaitShort) + + user1 := dbgen.User(t, db, database.User{ + Status: tc.user1Transition.from, + CreatedAt: createdAt, + UpdatedAt: createdAt, + }) + user2 := dbgen.User(t, db, database.User{ + Status: tc.user2Transition.from, + CreatedAt: createdAt, + UpdatedAt: createdAt, + }) + + // First transition at 2 days + user1, err := db.UpdateUserStatus(ctx, database.UpdateUserStatusParams{ + ID: user1.ID, + Status: tc.user1Transition.to, + UpdatedAt: firstTransitionTime, + }) + require.NoError(t, err) + + // Second transition at 4 days + user2, err = db.UpdateUserStatus(ctx, database.UpdateUserStatusParams{ + ID: user2.ID, + Status: tc.user2Transition.to, + UpdatedAt: secondTransitionTime, + }) + require.NoError(t, err) + + userStatusChanges, err := db.GetUserStatusCounts(ctx, database.GetUserStatusCountsParams{ + StartTime: createdAt, + EndTime: today, + }) + require.NoError(t, err) + require.NotEmpty(t, userStatusChanges) + gotCounts := map[time.Time]map[database.UserStatus]int64{ + createdAt.In(location): {}, + firstTransitionTime.In(location): {}, + secondTransitionTime.In(location): {}, + today.In(location): {}, + } + for _, row := range userStatusChanges { + dateInLocation := row.Date.In(location) + switch { + case dateInLocation.Equal(createdAt.In(location)): + gotCounts[createdAt][row.Status] = row.Count + case dateInLocation.Equal(firstTransitionTime.In(location)): + gotCounts[firstTransitionTime][row.Status] = row.Count + case dateInLocation.Equal(secondTransitionTime.In(location)): + gotCounts[secondTransitionTime][row.Status] = row.Count + case dateInLocation.Equal(today.In(location)): + gotCounts[today][row.Status] = row.Count + default: + t.Fatalf("unexpected date %s", row.Date) + } + } + + expectedCounts := map[time.Time]map[database.UserStatus]int64{} + for _, status := range []database.UserStatus{ + tc.user1Transition.from, + tc.user1Transition.to, + tc.user2Transition.from, + tc.user2Transition.to, + } { + if _, ok := expectedCounts[createdAt]; !ok { + expectedCounts[createdAt] = map[database.UserStatus]int64{} + } + expectedCounts[createdAt][status] = 0 + } + + expectedCounts[createdAt][tc.user1Transition.from]++ + expectedCounts[createdAt][tc.user2Transition.from]++ + + expectedCounts[firstTransitionTime] = map[database.UserStatus]int64{} + maps.Copy(expectedCounts[firstTransitionTime], expectedCounts[createdAt]) + expectedCounts[firstTransitionTime][tc.user1Transition.from]-- + expectedCounts[firstTransitionTime][tc.user1Transition.to]++ + + expectedCounts[secondTransitionTime] = map[database.UserStatus]int64{} + maps.Copy(expectedCounts[secondTransitionTime], expectedCounts[firstTransitionTime]) + expectedCounts[secondTransitionTime][tc.user2Transition.from]-- + expectedCounts[secondTransitionTime][tc.user2Transition.to]++ + + expectedCounts[today] = map[database.UserStatus]int64{} + maps.Copy(expectedCounts[today], expectedCounts[secondTransitionTime]) + + require.Equal(t, expectedCounts[createdAt], gotCounts[createdAt]) + require.Equal(t, expectedCounts[firstTransitionTime], gotCounts[firstTransitionTime]) + require.Equal(t, expectedCounts[secondTransitionTime], gotCounts[secondTransitionTime]) + require.Equal(t, expectedCounts[today], gotCounts[today]) + }) + } + }) + + t.Run("User precedes and survives query range", func(t *testing.T) { + t.Parallel() + db, _ := dbtestutil.NewDB(t) + ctx := testutil.Context(t, testutil.WaitShort) + + _ = dbgen.User(t, db, database.User{ + Status: database.UserStatusActive, + CreatedAt: createdAt, + UpdatedAt: createdAt, + }) + + userStatusChanges, err := db.GetUserStatusCounts(ctx, database.GetUserStatusCountsParams{ + StartTime: createdAt.Add(time.Hour * 24), + EndTime: today, + }) + require.NoError(t, err) + + require.Len(t, userStatusChanges, 2) + require.Equal(t, userStatusChanges[0].Count, int64(1)) + require.Equal(t, userStatusChanges[0].Status, database.UserStatusActive) + require.Equal(t, userStatusChanges[1].Count, int64(1)) + require.Equal(t, userStatusChanges[1].Status, database.UserStatusActive) + }) + + t.Run("User deleted before query range", func(t *testing.T) { + t.Parallel() + db, _ := dbtestutil.NewDB(t) + ctx := testutil.Context(t, testutil.WaitShort) + + user := dbgen.User(t, db, database.User{ + Status: database.UserStatusActive, + CreatedAt: createdAt, + UpdatedAt: createdAt, + }) + + err = db.UpdateUserDeletedByID(ctx, user.ID) + require.NoError(t, err) + + userStatusChanges, err := db.GetUserStatusCounts(ctx, database.GetUserStatusCountsParams{ + StartTime: today.Add(time.Hour * 24), + EndTime: today.Add(time.Hour * 48), + }) + require.NoError(t, err) + require.Empty(t, userStatusChanges) + }) + + t.Run("User deleted during query range", func(t *testing.T) { + t.Parallel() + db, _ := dbtestutil.NewDB(t) + ctx := testutil.Context(t, testutil.WaitShort) + + user := dbgen.User(t, db, database.User{ + Status: database.UserStatusActive, + CreatedAt: createdAt, + UpdatedAt: createdAt, + }) + + err := db.UpdateUserDeletedByID(ctx, user.ID) + require.NoError(t, err) + + userStatusChanges, err := db.GetUserStatusCounts(ctx, database.GetUserStatusCountsParams{ + StartTime: createdAt, + EndTime: today.Add(time.Hour * 24), + }) + require.NoError(t, err) + require.Equal(t, userStatusChanges[0].Count, int64(1)) + require.Equal(t, userStatusChanges[0].Status, database.UserStatusActive) + require.Equal(t, userStatusChanges[1].Count, int64(0)) + require.Equal(t, userStatusChanges[1].Status, database.UserStatusActive) + require.Equal(t, userStatusChanges[2].Count, int64(0)) + require.Equal(t, userStatusChanges[2].Status, database.UserStatusActive) + }) + }) + } +} + func requireUsersMatch(t testing.TB, expected []database.User, found []database.GetUsersRow, msg string) { t.Helper() require.ElementsMatch(t, expected, database.ConvertUserRows(found), msg) diff --git a/coderd/database/queries.sql.go b/coderd/database/queries.sql.go index 8fbb7c0b5be6c..9301e4b6f725c 100644 --- a/coderd/database/queries.sql.go +++ b/coderd/database/queries.sql.go @@ -3094,6 +3094,171 @@ func (q *sqlQuerier) GetUserLatencyInsights(ctx context.Context, arg GetUserLate return items, nil } +const getUserStatusCounts = `-- name: GetUserStatusCounts :many +WITH + -- dates_of_interest defines all points in time that are relevant to the query. + -- It includes the start_time, all status changes, all deletions, and the end_time. +dates_of_interest AS ( + SELECT $1::timestamptz AS date + + UNION + + SELECT DISTINCT changed_at AS date + FROM user_status_changes + WHERE changed_at > $1::timestamptz + AND changed_at < $2::timestamptz + + UNION + + SELECT DISTINCT deleted_at AS date + FROM user_deleted + WHERE deleted_at > $1::timestamptz + AND deleted_at < $2::timestamptz + + UNION + + SELECT $2::timestamptz AS date +), + -- latest_status_before_range defines the status of each user before the start_time. + -- We do not include users who were deleted before the start_time. We use this to ensure that + -- we correctly count users prior to the start_time for a complete graph. +latest_status_before_range AS ( + SELECT + DISTINCT usc.user_id, + usc.new_status, + usc.changed_at, + ud.deleted + FROM user_status_changes usc + LEFT JOIN LATERAL ( + SELECT COUNT(*) > 0 AS deleted + FROM user_deleted ud + WHERE ud.user_id = usc.user_id AND (ud.deleted_at < usc.changed_at OR ud.deleted_at < $1) + ) AS ud ON true + WHERE usc.changed_at < $1::timestamptz + ORDER BY usc.user_id, usc.changed_at DESC +), + -- status_changes_during_range defines the status of each user during the start_time and end_time. + -- If a user is deleted during the time range, we count status changes between the start_time and the deletion date. + -- Theoretically, it should probably not be possible to update the status of a deleted user, but we + -- need to ensure that this is enforced, so that a change in business logic later does not break this graph. +status_changes_during_range AS ( + SELECT + usc.user_id, + usc.new_status, + usc.changed_at, + ud.deleted + FROM user_status_changes usc + LEFT JOIN LATERAL ( + SELECT COUNT(*) > 0 AS deleted + FROM user_deleted ud + WHERE ud.user_id = usc.user_id AND ud.deleted_at < usc.changed_at + ) AS ud ON true + WHERE usc.changed_at >= $1::timestamptz + AND usc.changed_at <= $2::timestamptz +), + -- relevant_status_changes defines the status of each user at any point in time. + -- It includes the status of each user before the start_time, and the status of each user during the start_time and end_time. +relevant_status_changes AS ( + SELECT + user_id, + new_status, + changed_at + FROM latest_status_before_range + WHERE NOT deleted + + UNION ALL + + SELECT + user_id, + new_status, + changed_at + FROM status_changes_during_range + WHERE NOT deleted +), + -- statuses defines all the distinct statuses that were present just before and during the time range. + -- This is used to ensure that we have a series for every relevant status. +statuses AS ( + SELECT DISTINCT new_status FROM relevant_status_changes +), + -- We only want to count the latest status change for each user on each date and then filter them by the relevant status. + -- We use the row_number function to ensure that we only count the latest status change for each user on each date. + -- We then filter the status changes by the relevant status in the final select statement below. +ranked_status_change_per_user_per_date AS ( + SELECT + d.date, + rsc1.user_id, + ROW_NUMBER() OVER (PARTITION BY d.date, rsc1.user_id ORDER BY rsc1.changed_at DESC) AS rn, + rsc1.new_status + FROM dates_of_interest d + LEFT JOIN relevant_status_changes rsc1 ON rsc1.changed_at <= d.date +) +SELECT + rscpupd.date, + statuses.new_status AS status, + COUNT(rscpupd.user_id) FILTER ( + WHERE rscpupd.rn = 1 + AND ( + rscpupd.new_status = statuses.new_status + AND ( + -- Include users who haven't been deleted + NOT EXISTS (SELECT 1 FROM user_deleted WHERE user_id = rscpupd.user_id) + OR + -- Or users whose deletion date is after the current date we're looking at + rscpupd.date < (SELECT deleted_at FROM user_deleted WHERE user_id = rscpupd.user_id) + ) + ) + ) AS count +FROM ranked_status_change_per_user_per_date rscpupd +CROSS JOIN statuses +GROUP BY rscpupd.date, statuses.new_status +` + +type GetUserStatusCountsParams struct { + StartTime time.Time `db:"start_time" json:"start_time"` + EndTime time.Time `db:"end_time" json:"end_time"` +} + +type GetUserStatusCountsRow struct { + Date time.Time `db:"date" json:"date"` + Status UserStatus `db:"status" json:"status"` + Count int64 `db:"count" json:"count"` +} + +// GetUserStatusCounts returns the count of users in each status over time. +// The time range is inclusively defined by the start_time and end_time parameters. +// +// Bucketing: +// Between the start_time and end_time, we include each timestamp where a user's status changed or they were deleted. +// We do not bucket these results by day or some other time unit. This is because such bucketing would hide potentially +// important patterns. If a user was active for 23 hours and 59 minutes, and then suspended, a daily bucket would hide this. +// A daily bucket would also have required us to carefully manage the timezone of the bucket based on the timezone of the user. +// +// Accumulation: +// We do not start counting from 0 at the start_time. We check the last status change before the start_time for each user. As such, +// the result shows the total number of users in each status on any particular day. +func (q *sqlQuerier) GetUserStatusCounts(ctx context.Context, arg GetUserStatusCountsParams) ([]GetUserStatusCountsRow, error) { + rows, err := q.db.QueryContext(ctx, getUserStatusCounts, arg.StartTime, arg.EndTime) + if err != nil { + return nil, err + } + defer rows.Close() + var items []GetUserStatusCountsRow + for rows.Next() { + var i GetUserStatusCountsRow + if err := rows.Scan(&i.Date, &i.Status, &i.Count); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + const upsertTemplateUsageStats = `-- name: UpsertTemplateUsageStats :exec WITH latest_start AS ( diff --git a/coderd/database/queries/insights.sql b/coderd/database/queries/insights.sql index de107bc0e80c7..c6c6a78fc4b73 100644 --- a/coderd/database/queries/insights.sql +++ b/coderd/database/queries/insights.sql @@ -771,3 +771,134 @@ SELECT FROM unique_template_params utp JOIN workspace_build_parameters wbp ON (utp.workspace_build_ids @> ARRAY[wbp.workspace_build_id] AND utp.name = wbp.name) GROUP BY utp.num, utp.template_ids, utp.name, utp.type, utp.display_name, utp.description, utp.options, wbp.value; + +-- name: GetUserStatusCounts :many +-- GetUserStatusCounts returns the count of users in each status over time. +-- The time range is inclusively defined by the start_time and end_time parameters. +-- +-- Bucketing: +-- Between the start_time and end_time, we include each timestamp where a user's status changed or they were deleted. +-- We do not bucket these results by day or some other time unit. This is because such bucketing would hide potentially +-- important patterns. If a user was active for 23 hours and 59 minutes, and then suspended, a daily bucket would hide this. +-- A daily bucket would also have required us to carefully manage the timezone of the bucket based on the timezone of the user. +-- +-- Accumulation: +-- We do not start counting from 0 at the start_time. We check the last status change before the start_time for each user. As such, +-- the result shows the total number of users in each status on any particular day. +WITH + -- dates_of_interest defines all points in time that are relevant to the query. + -- It includes the start_time, all status changes, all deletions, and the end_time. +dates_of_interest AS ( + SELECT @start_time::timestamptz AS date + + UNION + + SELECT DISTINCT changed_at AS date + FROM user_status_changes + WHERE changed_at > @start_time::timestamptz + AND changed_at < @end_time::timestamptz + + UNION + + SELECT DISTINCT deleted_at AS date + FROM user_deleted + WHERE deleted_at > @start_time::timestamptz + AND deleted_at < @end_time::timestamptz + + UNION + + SELECT @end_time::timestamptz AS date +), + -- latest_status_before_range defines the status of each user before the start_time. + -- We do not include users who were deleted before the start_time. We use this to ensure that + -- we correctly count users prior to the start_time for a complete graph. +latest_status_before_range AS ( + SELECT + DISTINCT usc.user_id, + usc.new_status, + usc.changed_at, + ud.deleted + FROM user_status_changes usc + LEFT JOIN LATERAL ( + SELECT COUNT(*) > 0 AS deleted + FROM user_deleted ud + WHERE ud.user_id = usc.user_id AND (ud.deleted_at < usc.changed_at OR ud.deleted_at < @start_time) + ) AS ud ON true + WHERE usc.changed_at < @start_time::timestamptz + ORDER BY usc.user_id, usc.changed_at DESC +), + -- status_changes_during_range defines the status of each user during the start_time and end_time. + -- If a user is deleted during the time range, we count status changes between the start_time and the deletion date. + -- Theoretically, it should probably not be possible to update the status of a deleted user, but we + -- need to ensure that this is enforced, so that a change in business logic later does not break this graph. +status_changes_during_range AS ( + SELECT + usc.user_id, + usc.new_status, + usc.changed_at, + ud.deleted + FROM user_status_changes usc + LEFT JOIN LATERAL ( + SELECT COUNT(*) > 0 AS deleted + FROM user_deleted ud + WHERE ud.user_id = usc.user_id AND ud.deleted_at < usc.changed_at + ) AS ud ON true + WHERE usc.changed_at >= @start_time::timestamptz + AND usc.changed_at <= @end_time::timestamptz +), + -- relevant_status_changes defines the status of each user at any point in time. + -- It includes the status of each user before the start_time, and the status of each user during the start_time and end_time. +relevant_status_changes AS ( + SELECT + user_id, + new_status, + changed_at + FROM latest_status_before_range + WHERE NOT deleted + + UNION ALL + + SELECT + user_id, + new_status, + changed_at + FROM status_changes_during_range + WHERE NOT deleted +), + -- statuses defines all the distinct statuses that were present just before and during the time range. + -- This is used to ensure that we have a series for every relevant status. +statuses AS ( + SELECT DISTINCT new_status FROM relevant_status_changes +), + -- We only want to count the latest status change for each user on each date and then filter them by the relevant status. + -- We use the row_number function to ensure that we only count the latest status change for each user on each date. + -- We then filter the status changes by the relevant status in the final select statement below. +ranked_status_change_per_user_per_date AS ( + SELECT + d.date, + rsc1.user_id, + ROW_NUMBER() OVER (PARTITION BY d.date, rsc1.user_id ORDER BY rsc1.changed_at DESC) AS rn, + rsc1.new_status + FROM dates_of_interest d + LEFT JOIN relevant_status_changes rsc1 ON rsc1.changed_at <= d.date +) +SELECT + rscpupd.date, + statuses.new_status AS status, + COUNT(rscpupd.user_id) FILTER ( + WHERE rscpupd.rn = 1 + AND ( + rscpupd.new_status = statuses.new_status + AND ( + -- Include users who haven't been deleted + NOT EXISTS (SELECT 1 FROM user_deleted WHERE user_id = rscpupd.user_id) + OR + -- Or users whose deletion date is after the current date we're looking at + rscpupd.date < (SELECT deleted_at FROM user_deleted WHERE user_id = rscpupd.user_id) + ) + ) + ) AS count +FROM ranked_status_change_per_user_per_date rscpupd +CROSS JOIN statuses +GROUP BY rscpupd.date, statuses.new_status; + diff --git a/coderd/database/unique_constraint.go b/coderd/database/unique_constraint.go index f4470c6546698..f253aa98ec266 100644 --- a/coderd/database/unique_constraint.go +++ b/coderd/database/unique_constraint.go @@ -62,7 +62,9 @@ const ( UniqueTemplateVersionsPkey UniqueConstraint = "template_versions_pkey" // ALTER TABLE ONLY template_versions ADD CONSTRAINT template_versions_pkey PRIMARY KEY (id); UniqueTemplateVersionsTemplateIDNameKey UniqueConstraint = "template_versions_template_id_name_key" // ALTER TABLE ONLY template_versions ADD CONSTRAINT template_versions_template_id_name_key UNIQUE (template_id, name); UniqueTemplatesPkey UniqueConstraint = "templates_pkey" // ALTER TABLE ONLY templates ADD CONSTRAINT templates_pkey PRIMARY KEY (id); + UniqueUserDeletedPkey UniqueConstraint = "user_deleted_pkey" // ALTER TABLE ONLY user_deleted ADD CONSTRAINT user_deleted_pkey PRIMARY KEY (id); UniqueUserLinksPkey UniqueConstraint = "user_links_pkey" // ALTER TABLE ONLY user_links ADD CONSTRAINT user_links_pkey PRIMARY KEY (user_id, login_type); + UniqueUserStatusChangesPkey UniqueConstraint = "user_status_changes_pkey" // ALTER TABLE ONLY user_status_changes ADD CONSTRAINT user_status_changes_pkey PRIMARY KEY (id); UniqueUsersPkey UniqueConstraint = "users_pkey" // ALTER TABLE ONLY users ADD CONSTRAINT users_pkey PRIMARY KEY (id); UniqueWorkspaceAgentLogSourcesPkey UniqueConstraint = "workspace_agent_log_sources_pkey" // ALTER TABLE ONLY workspace_agent_log_sources ADD CONSTRAINT workspace_agent_log_sources_pkey PRIMARY KEY (workspace_agent_id, id); UniqueWorkspaceAgentMetadataPkey UniqueConstraint = "workspace_agent_metadata_pkey" // ALTER TABLE ONLY workspace_agent_metadata ADD CONSTRAINT workspace_agent_metadata_pkey PRIMARY KEY (workspace_agent_id, key); diff --git a/coderd/insights.go b/coderd/insights.go index d5faacee90bd5..e4695f50495fb 100644 --- a/coderd/insights.go +++ b/coderd/insights.go @@ -292,6 +292,69 @@ func (api *API) insightsUserLatency(rw http.ResponseWriter, r *http.Request) { httpapi.Write(ctx, rw, http.StatusOK, resp) } +// @Summary Get insights about user status counts +// @ID get-insights-about-user-status-counts +// @Security CoderSessionToken +// @Produce json +// @Tags Insights +// @Param tz_offset query int true "Time-zone offset (e.g. -2)" +// @Success 200 {object} codersdk.GetUserStatusCountsResponse +// @Router /insights/user-status-counts [get] +func (api *API) insightsUserStatusCounts(rw http.ResponseWriter, r *http.Request) { + ctx := r.Context() + + p := httpapi.NewQueryParamParser() + vals := r.URL.Query() + tzOffset := p.Int(vals, 0, "tz_offset") + p.ErrorExcessParams(vals) + + if len(p.Errors) > 0 { + httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{ + Message: "Query parameters have invalid values.", + Validations: p.Errors, + }) + return + } + + loc := time.FixedZone("", tzOffset*3600) + // If the time is 14:01 or 14:31, we still want to include all the + // data between 14:00 and 15:00. Our rollups buckets are 30 minutes + // so this works nicely. It works just as well for 23:59 as well. + nextHourInLoc := time.Now().In(loc).Truncate(time.Hour).Add(time.Hour) + // Always return 60 days of data (2 months). + sixtyDaysAgo := nextHourInLoc.In(loc).Truncate(24*time.Hour).AddDate(0, 0, -60) + + rows, err := api.Database.GetUserStatusCounts(ctx, database.GetUserStatusCountsParams{ + StartTime: sixtyDaysAgo, + EndTime: nextHourInLoc, + }) + if err != nil { + if httpapi.IsUnauthorizedError(err) { + httpapi.Forbidden(rw) + return + } + httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ + Message: "Internal error fetching user status counts over time.", + Detail: err.Error(), + }) + return + } + + resp := codersdk.GetUserStatusCountsResponse{ + StatusCounts: make(map[codersdk.UserStatus][]codersdk.UserStatusChangeCount), + } + + for _, row := range rows { + status := codersdk.UserStatus(row.Status) + resp.StatusCounts[status] = append(resp.StatusCounts[status], codersdk.UserStatusChangeCount{ + Date: row.Date, + Count: row.Count, + }) + } + + httpapi.Write(ctx, rw, http.StatusOK, resp) +} + // @Summary Get insights about templates // @ID get-insights-about-templates // @Security CoderSessionToken diff --git a/codersdk/insights.go b/codersdk/insights.go index c9e708de8f34a..ef44b6b8d013e 100644 --- a/codersdk/insights.go +++ b/codersdk/insights.go @@ -282,3 +282,34 @@ func (c *Client) TemplateInsights(ctx context.Context, req TemplateInsightsReque var result TemplateInsightsResponse return result, json.NewDecoder(resp.Body).Decode(&result) } + +type GetUserStatusCountsResponse struct { + StatusCounts map[UserStatus][]UserStatusChangeCount `json:"status_counts"` +} + +type UserStatusChangeCount struct { + Date time.Time `json:"date" format:"date-time"` + Count int64 `json:"count" example:"10"` +} + +type GetUserStatusCountsRequest struct { + Offset time.Time `json:"offset" format:"date-time"` +} + +func (c *Client) GetUserStatusCounts(ctx context.Context, req GetUserStatusCountsRequest) (GetUserStatusCountsResponse, error) { + qp := url.Values{} + qp.Add("offset", req.Offset.Format(insightsTimeLayout)) + + reqURL := fmt.Sprintf("/api/v2/insights/user-status-counts?%s", qp.Encode()) + resp, err := c.Request(ctx, http.MethodGet, reqURL, nil) + if err != nil { + return GetUserStatusCountsResponse{}, xerrors.Errorf("make request: %w", err) + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return GetUserStatusCountsResponse{}, ReadBodyAsError(resp) + } + var result GetUserStatusCountsResponse + return result, json.NewDecoder(resp.Body).Decode(&result) +} diff --git a/docs/reference/api/insights.md b/docs/reference/api/insights.md index e59d74ec6c7f8..b8fcdbbb1e776 100644 --- a/docs/reference/api/insights.md +++ b/docs/reference/api/insights.md @@ -260,3 +260,53 @@ curl -X GET http://coder-server:8080/api/v2/insights/user-latency?start_time=201 | 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | [codersdk.UserLatencyInsightsResponse](schemas.md#codersdkuserlatencyinsightsresponse) | To perform this operation, you must be authenticated. [Learn more](authentication.md). + +## Get insights about user status counts + +### Code samples + +```shell +# Example request using curl +curl -X GET http://coder-server:8080/api/v2/insights/user-status-counts?tz_offset=0 \ + -H 'Accept: application/json' \ + -H 'Coder-Session-Token: API_KEY' +``` + +`GET /insights/user-status-counts` + +### Parameters + +| Name | In | Type | Required | Description | +|-------------|-------|---------|----------|----------------------------| +| `tz_offset` | query | integer | true | Time-zone offset (e.g. -2) | + +### Example responses + +> 200 Response + +```json +{ + "status_counts": { + "property1": [ + { + "count": 10, + "date": "2019-08-24T14:15:22Z" + } + ], + "property2": [ + { + "count": 10, + "date": "2019-08-24T14:15:22Z" + } + ] + } +} +``` + +### Responses + +| Status | Meaning | Description | Schema | +|--------|---------------------------------------------------------|-------------|----------------------------------------------------------------------------------------| +| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | [codersdk.GetUserStatusCountsResponse](schemas.md#codersdkgetuserstatuscountsresponse) | + +To perform this operation, you must be authenticated. [Learn more](authentication.md). diff --git a/docs/reference/api/schemas.md b/docs/reference/api/schemas.md index b6874bc5b1bc9..542294a01fa37 100644 --- a/docs/reference/api/schemas.md +++ b/docs/reference/api/schemas.md @@ -3000,6 +3000,34 @@ Git clone makes use of this by parsing the URL from: 'Username for "https://gith |-------|--------|----------|--------------|-------------| | `key` | string | false | | | +## codersdk.GetUserStatusCountsResponse + +```json +{ + "status_counts": { + "property1": [ + { + "count": 10, + "date": "2019-08-24T14:15:22Z" + } + ], + "property2": [ + { + "count": 10, + "date": "2019-08-24T14:15:22Z" + } + ] + } +} +``` + +### Properties + +| Name | Type | Required | Restrictions | Description | +|--------------------|---------------------------------------------------------------------------|----------|--------------|-------------| +| `status_counts` | object | false | | | +| » `[any property]` | array of [codersdk.UserStatusChangeCount](#codersdkuserstatuschangecount) | false | | | + ## codersdk.GetUsersResponse ```json @@ -6724,6 +6752,22 @@ If the schedule is empty, the user will be updated to use the default schedule.| | `dormant` | | `suspended` | +## codersdk.UserStatusChangeCount + +```json +{ + "count": 10, + "date": "2019-08-24T14:15:22Z" +} +``` + +### Properties + +| Name | Type | Required | Restrictions | Description | +|---------|---------|----------|--------------|-------------| +| `count` | integer | false | | | +| `date` | string | false | | | + ## codersdk.ValidateUserPasswordRequest ```json diff --git a/site/src/api/typesGenerated.ts b/site/src/api/typesGenerated.ts index 4956de8691ed7..acb5254a61a0a 100644 --- a/site/src/api/typesGenerated.ts +++ b/site/src/api/typesGenerated.ts @@ -882,6 +882,16 @@ export interface GenerateAPIKeyResponse { readonly key: string; } +// From codersdk/insights.go +export interface GetUserStatusCountsRequest { + readonly offset: string; +} + +// From codersdk/insights.go +export interface GetUserStatusCountsResponse { + readonly status_counts: Record; +} + // From codersdk/users.go export interface GetUsersResponse { readonly users: readonly User[]; @@ -2690,6 +2700,12 @@ export interface UserRoles { // From codersdk/users.go export type UserStatus = "active" | "dormant" | "suspended"; +// From codersdk/insights.go +export interface UserStatusChangeCount { + readonly date: string; + readonly count: number; +} + export const UserStatuses: UserStatus[] = ["active", "dormant", "suspended"]; // From codersdk/users.go From f9f72de1d66ce153230c30391f09fa4933ff91b0 Mon Sep 17 00:00:00 2001 From: Danny Kopping Date: Mon, 13 Jan 2025 13:42:37 +0200 Subject: [PATCH 0011/1096] chore: predicate slack notification job on other jobs (#16106) `always()` does not seem to work Extending https://github.com/coder/coder/pull/16105 Signed-off-by: Danny Kopping --- .github/workflows/nightly-gauntlet.yaml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.github/workflows/nightly-gauntlet.yaml b/.github/workflows/nightly-gauntlet.yaml index 3208c97a9e6bc..c76eba484bfde 100644 --- a/.github/workflows/nightly-gauntlet.yaml +++ b/.github/workflows/nightly-gauntlet.yaml @@ -74,8 +74,11 @@ jobs: api-key: ${{ secrets.DATADOG_API_KEY }} notify-slack-on-failure: + needs: + - go-race + - go-timing runs-on: ubuntu-latest - if: always() && failure() + if: failure() steps: - name: Send Slack notification From 24dd8a17d3bf40bd39c613a18b7e2d8c165ec9c9 Mon Sep 17 00:00:00 2001 From: Ethan <39577870+ethanndickson@users.noreply.github.com> Date: Mon, 13 Jan 2025 23:07:10 +1100 Subject: [PATCH 0012/1096] ci: switch test-go-pg on macOS to depot runners (#16101) Since I missed this in #16100 :( --- .github/workflows/ci.yaml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 4f357727b6278..60717776a87eb 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -379,7 +379,7 @@ jobs: api-key: ${{ secrets.DATADOG_API_KEY }} test-go-pg: - runs-on: ${{ matrix.os == 'ubuntu-latest' && github.repository_owner == 'coder' && 'depot-ubuntu-22.04-4' || matrix.os == 'macos-latest' && github.repository_owner == 'coder' && 'macos-latest-xlarge' || matrix.os == 'windows-2022' && github.repository_owner == 'coder' && 'windows-latest-16-cores' || matrix.os }} + runs-on: ${{ matrix.os == 'ubuntu-latest' && github.repository_owner == 'coder' && 'depot-ubuntu-22.04-4' || matrix.os == 'macos-latest' && github.repository_owner == 'coder' && 'depot-macos-latest' || matrix.os == 'windows-2022' && github.repository_owner == 'coder' && 'windows-latest-16-cores' || matrix.os }} needs: changes if: needs.changes.outputs.go == 'true' || needs.changes.outputs.ci == 'true' || github.ref == 'refs/heads/main' # This timeout must be greater than the timeout set by `go test` in @@ -419,6 +419,8 @@ jobs: env: POSTGRES_VERSION: "13" TS_DEBUG_DISCO: "true" + LC_CTYPE: "en_US.UTF-8" + LC_ALL: "en_US.UTF-8" shell: bash run: | # if macOS, install google-chrome for scaletests From 22236f29883f87cf0e939030c69533e5b15c7da4 Mon Sep 17 00:00:00 2001 From: Danny Kopping Date: Mon, 13 Jan 2025 16:06:43 +0200 Subject: [PATCH 0013/1096] chore: only notify about CI failure on `main` if `required` job fails (#16114) This should be the last PR to get this working Looks like the `nightly-gauntlet` is working as expected, and this is a clone of that. --------- Signed-off-by: Danny Kopping --- .github/workflows/ci.yaml | 8 +++----- .github/workflows/nightly-gauntlet.yaml | 4 ---- 2 files changed, 3 insertions(+), 9 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 60717776a87eb..df752d2aa41f0 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -1252,8 +1252,10 @@ jobs: make sqlc-vet notify-slack-on-failure: + needs: + - required runs-on: ubuntu-latest - if: always() && failure() && github.ref == 'refs/heads/main' + if: failure() && github.ref == 'refs/heads/main' steps: - name: Send Slack notification @@ -1276,10 +1278,6 @@ jobs: "type": "mrkdwn", "text": "*Workflow:*\n${{ github.workflow }}" }, - { - "type": "mrkdwn", - "text": "*Failed Job:*\n${{ github.job }}" - }, { "type": "mrkdwn", "text": "*Committer:*\n${{ github.actor }}" diff --git a/.github/workflows/nightly-gauntlet.yaml b/.github/workflows/nightly-gauntlet.yaml index c76eba484bfde..5814ddf72b60f 100644 --- a/.github/workflows/nightly-gauntlet.yaml +++ b/.github/workflows/nightly-gauntlet.yaml @@ -101,10 +101,6 @@ jobs: "type": "mrkdwn", "text": "*Workflow:*\n${{ github.workflow }}" }, - { - "type": "mrkdwn", - "text": "*Failed Job:*\n${{ github.job }}" - }, { "type": "mrkdwn", "text": "*Committer:*\n${{ github.actor }}" From 009069cd47e2c9dc6a21b21cca5e5f250948e20d Mon Sep 17 00:00:00 2001 From: Danielle Maywood Date: Mon, 13 Jan 2025 15:01:47 +0000 Subject: [PATCH 0014/1096] feat: allow notification templates to be disabled by default (#16093) Change as part of https://github.com/coder/coder/pull/16071 It has been decided that we want to be able to have some notification templates be disabled _by default_ https://github.com/coder/coder/pull/16071#issuecomment-2580757061. This adds a new column (`enabled_by_default`) to `notification_templates` that defaults to `TRUE`. It also modifies the `inhibit_enqueue_if_disabled` function to reject notifications for templates that have `enabled_by_default = FALSE` with the user not explicitly enabling it. --- coderd/apidoc/docs.go | 3 ++ coderd/apidoc/swagger.json | 3 ++ coderd/database/dump.sql | 28 +++++++--- ...notification_templates_by_default.down.sql | 18 +++++++ ...g_notification_templates_by_default.up.sql | 29 +++++++++++ ..._updated_notifications_by_default.down.sql | 9 ++++ ...ly_updated_notifications_by_default.up.sql | 9 ++++ coderd/database/models.go | 5 +- coderd/database/queries.sql.go | 9 ++-- coderd/notifications.go | 17 +++--- coderd/notifications/enqueuer.go | 2 +- coderd/notifications/notifications_test.go | 52 +++++++++++++++++++ codersdk/notifications.go | 17 +++--- docs/admin/security/audit-logs.md | 2 +- docs/reference/api/notifications.md | 24 +++++---- docs/reference/api/schemas.md | 22 ++++---- enterprise/audit/table.go | 17 +++--- site/src/api/typesGenerated.ts | 1 + .../NotificationsPage/NotificationsPage.tsx | 19 ++++++- site/src/testHelpers/entities.ts | 7 +++ 20 files changed, 231 insertions(+), 62 deletions(-) create mode 100644 coderd/database/migrations/000284_allow_disabling_notification_templates_by_default.down.sql create mode 100644 coderd/database/migrations/000284_allow_disabling_notification_templates_by_default.up.sql create mode 100644 coderd/database/migrations/000285_disable_workspace_created_and_manually_updated_notifications_by_default.down.sql create mode 100644 coderd/database/migrations/000285_disable_workspace_created_and_manually_updated_notifications_by_default.up.sql diff --git a/coderd/apidoc/docs.go b/coderd/apidoc/docs.go index 15da8b7eb5c36..3d85e63a6081a 100644 --- a/coderd/apidoc/docs.go +++ b/coderd/apidoc/docs.go @@ -11712,6 +11712,9 @@ const docTemplate = `{ "body_template": { "type": "string" }, + "enabled_by_default": { + "type": "boolean" + }, "group": { "type": "string" }, diff --git a/coderd/apidoc/swagger.json b/coderd/apidoc/swagger.json index df288ed1876c8..615329654edb9 100644 --- a/coderd/apidoc/swagger.json +++ b/coderd/apidoc/swagger.json @@ -10506,6 +10506,9 @@ "body_template": { "type": "string" }, + "enabled_by_default": { + "type": "boolean" + }, "group": { "type": "string" }, diff --git a/coderd/database/dump.sql b/coderd/database/dump.sql index 7812f6e8e4e5a..37e2cd4d764bf 100644 --- a/coderd/database/dump.sql +++ b/coderd/database/dump.sql @@ -346,13 +346,24 @@ CREATE FUNCTION inhibit_enqueue_if_disabled() RETURNS trigger LANGUAGE plpgsql AS $$ BEGIN - -- Fail the insertion if the user has disabled this notification. - IF EXISTS (SELECT 1 - FROM notification_preferences - WHERE disabled = TRUE - AND user_id = NEW.user_id - AND notification_template_id = NEW.notification_template_id) THEN - RAISE EXCEPTION 'cannot enqueue message: user has disabled this notification'; + -- Fail the insertion if one of the following: + -- * the user has disabled this notification. + -- * the notification template is disabled by default and hasn't + -- been explicitly enabled by the user. + IF EXISTS ( + SELECT 1 FROM notification_templates + LEFT JOIN notification_preferences + ON notification_preferences.notification_template_id = notification_templates.id + AND notification_preferences.user_id = NEW.user_id + WHERE notification_templates.id = NEW.notification_template_id AND ( + -- Case 1: The user has explicitly disabled this template + notification_preferences.disabled = TRUE + OR + -- Case 2: The template is disabled by default AND the user hasn't enabled it + (notification_templates.enabled_by_default = FALSE AND notification_preferences.notification_template_id IS NULL) + ) + ) THEN + RAISE EXCEPTION 'cannot enqueue message: notification is not enabled'; END IF; RETURN NEW; @@ -874,7 +885,8 @@ CREATE TABLE notification_templates ( actions jsonb, "group" text, method notification_method, - kind notification_template_kind DEFAULT 'system'::notification_template_kind NOT NULL + kind notification_template_kind DEFAULT 'system'::notification_template_kind NOT NULL, + enabled_by_default boolean DEFAULT true NOT NULL ); COMMENT ON TABLE notification_templates IS 'Templates from which to create notification messages.'; diff --git a/coderd/database/migrations/000284_allow_disabling_notification_templates_by_default.down.sql b/coderd/database/migrations/000284_allow_disabling_notification_templates_by_default.down.sql new file mode 100644 index 0000000000000..cdcaff6553f52 --- /dev/null +++ b/coderd/database/migrations/000284_allow_disabling_notification_templates_by_default.down.sql @@ -0,0 +1,18 @@ +ALTER TABLE notification_templates DROP COLUMN enabled_by_default; + +CREATE OR REPLACE FUNCTION inhibit_enqueue_if_disabled() + RETURNS TRIGGER AS +$$ +BEGIN + -- Fail the insertion if the user has disabled this notification. + IF EXISTS (SELECT 1 + FROM notification_preferences + WHERE disabled = TRUE + AND user_id = NEW.user_id + AND notification_template_id = NEW.notification_template_id) THEN + RAISE EXCEPTION 'cannot enqueue message: user has disabled this notification'; + END IF; + + RETURN NEW; +END; +$$ LANGUAGE plpgsql; diff --git a/coderd/database/migrations/000284_allow_disabling_notification_templates_by_default.up.sql b/coderd/database/migrations/000284_allow_disabling_notification_templates_by_default.up.sql new file mode 100644 index 0000000000000..462d859d95be3 --- /dev/null +++ b/coderd/database/migrations/000284_allow_disabling_notification_templates_by_default.up.sql @@ -0,0 +1,29 @@ +ALTER TABLE notification_templates ADD COLUMN enabled_by_default boolean DEFAULT TRUE NOT NULL; + +CREATE OR REPLACE FUNCTION inhibit_enqueue_if_disabled() + RETURNS TRIGGER AS +$$ +BEGIN + -- Fail the insertion if one of the following: + -- * the user has disabled this notification. + -- * the notification template is disabled by default and hasn't + -- been explicitly enabled by the user. + IF EXISTS ( + SELECT 1 FROM notification_templates + LEFT JOIN notification_preferences + ON notification_preferences.notification_template_id = notification_templates.id + AND notification_preferences.user_id = NEW.user_id + WHERE notification_templates.id = NEW.notification_template_id AND ( + -- Case 1: The user has explicitly disabled this template + notification_preferences.disabled = TRUE + OR + -- Case 2: The template is disabled by default AND the user hasn't enabled it + (notification_templates.enabled_by_default = FALSE AND notification_preferences.notification_template_id IS NULL) + ) + ) THEN + RAISE EXCEPTION 'cannot enqueue message: notification is not enabled'; + END IF; + + RETURN NEW; +END; +$$ LANGUAGE plpgsql; diff --git a/coderd/database/migrations/000285_disable_workspace_created_and_manually_updated_notifications_by_default.down.sql b/coderd/database/migrations/000285_disable_workspace_created_and_manually_updated_notifications_by_default.down.sql new file mode 100644 index 0000000000000..4d4910480f0ce --- /dev/null +++ b/coderd/database/migrations/000285_disable_workspace_created_and_manually_updated_notifications_by_default.down.sql @@ -0,0 +1,9 @@ +-- Enable 'workspace created' notification by default +UPDATE notification_templates +SET enabled_by_default = TRUE +WHERE id = '281fdf73-c6d6-4cbb-8ff5-888baf8a2fff'; + +-- Enable 'workspace manually updated' notification by default +UPDATE notification_templates +SET enabled_by_default = TRUE +WHERE id = 'd089fe7b-d5c5-4c0c-aaf5-689859f7d392'; diff --git a/coderd/database/migrations/000285_disable_workspace_created_and_manually_updated_notifications_by_default.up.sql b/coderd/database/migrations/000285_disable_workspace_created_and_manually_updated_notifications_by_default.up.sql new file mode 100644 index 0000000000000..118b1dee0f700 --- /dev/null +++ b/coderd/database/migrations/000285_disable_workspace_created_and_manually_updated_notifications_by_default.up.sql @@ -0,0 +1,9 @@ +-- Disable 'workspace created' notification by default +UPDATE notification_templates +SET enabled_by_default = FALSE +WHERE id = '281fdf73-c6d6-4cbb-8ff5-888baf8a2fff'; + +-- Disable 'workspace manually updated' notification by default +UPDATE notification_templates +SET enabled_by_default = FALSE +WHERE id = 'd089fe7b-d5c5-4c0c-aaf5-689859f7d392'; diff --git a/coderd/database/models.go b/coderd/database/models.go index 35484c7856e14..2a20a8fa2f63e 100644 --- a/coderd/database/models.go +++ b/coderd/database/models.go @@ -2480,8 +2480,9 @@ type NotificationTemplate struct { Actions []byte `db:"actions" json:"actions"` Group sql.NullString `db:"group" json:"group"` // NULL defers to the deployment-level method - Method NullNotificationMethod `db:"method" json:"method"` - Kind NotificationTemplateKind `db:"kind" json:"kind"` + Method NullNotificationMethod `db:"method" json:"method"` + Kind NotificationTemplateKind `db:"kind" json:"kind"` + EnabledByDefault bool `db:"enabled_by_default" json:"enabled_by_default"` } // A table used to configure apps that can use Coder as an OAuth2 provider, the reverse of what we are calling external authentication. diff --git a/coderd/database/queries.sql.go b/coderd/database/queries.sql.go index 9301e4b6f725c..455de7c93da9c 100644 --- a/coderd/database/queries.sql.go +++ b/coderd/database/queries.sql.go @@ -4134,7 +4134,7 @@ func (q *sqlQuerier) GetNotificationReportGeneratorLogByTemplate(ctx context.Con } const getNotificationTemplateByID = `-- name: GetNotificationTemplateByID :one -SELECT id, name, title_template, body_template, actions, "group", method, kind +SELECT id, name, title_template, body_template, actions, "group", method, kind, enabled_by_default FROM notification_templates WHERE id = $1::uuid ` @@ -4151,12 +4151,13 @@ func (q *sqlQuerier) GetNotificationTemplateByID(ctx context.Context, id uuid.UU &i.Group, &i.Method, &i.Kind, + &i.EnabledByDefault, ) return i, err } const getNotificationTemplatesByKind = `-- name: GetNotificationTemplatesByKind :many -SELECT id, name, title_template, body_template, actions, "group", method, kind +SELECT id, name, title_template, body_template, actions, "group", method, kind, enabled_by_default FROM notification_templates WHERE kind = $1::notification_template_kind ORDER BY name ASC @@ -4180,6 +4181,7 @@ func (q *sqlQuerier) GetNotificationTemplatesByKind(ctx context.Context, kind No &i.Group, &i.Method, &i.Kind, + &i.EnabledByDefault, ); err != nil { return nil, err } @@ -4233,7 +4235,7 @@ const updateNotificationTemplateMethodByID = `-- name: UpdateNotificationTemplat UPDATE notification_templates SET method = $1::notification_method WHERE id = $2::uuid -RETURNING id, name, title_template, body_template, actions, "group", method, kind +RETURNING id, name, title_template, body_template, actions, "group", method, kind, enabled_by_default ` type UpdateNotificationTemplateMethodByIDParams struct { @@ -4253,6 +4255,7 @@ func (q *sqlQuerier) UpdateNotificationTemplateMethodByID(ctx context.Context, a &i.Group, &i.Method, &i.Kind, + &i.EnabledByDefault, ) return i, err } diff --git a/coderd/notifications.go b/coderd/notifications.go index bdf71f99cab98..32f035a076b43 100644 --- a/coderd/notifications.go +++ b/coderd/notifications.go @@ -271,14 +271,15 @@ func (api *API) putUserNotificationPreferences(rw http.ResponseWriter, r *http.R func convertNotificationTemplates(in []database.NotificationTemplate) (out []codersdk.NotificationTemplate) { for _, tmpl := range in { out = append(out, codersdk.NotificationTemplate{ - ID: tmpl.ID, - Name: tmpl.Name, - TitleTemplate: tmpl.TitleTemplate, - BodyTemplate: tmpl.BodyTemplate, - Actions: string(tmpl.Actions), - Group: tmpl.Group.String, - Method: string(tmpl.Method.NotificationMethod), - Kind: string(tmpl.Kind), + ID: tmpl.ID, + Name: tmpl.Name, + TitleTemplate: tmpl.TitleTemplate, + BodyTemplate: tmpl.BodyTemplate, + Actions: string(tmpl.Actions), + Group: tmpl.Group.String, + Method: string(tmpl.Method.NotificationMethod), + Kind: string(tmpl.Kind), + EnabledByDefault: tmpl.EnabledByDefault, }) } diff --git a/coderd/notifications/enqueuer.go b/coderd/notifications/enqueuer.go index 260fcd2675278..df91efe31d003 100644 --- a/coderd/notifications/enqueuer.go +++ b/coderd/notifications/enqueuer.go @@ -20,7 +20,7 @@ import ( ) var ( - ErrCannotEnqueueDisabledNotification = xerrors.New("user has disabled this notification") + ErrCannotEnqueueDisabledNotification = xerrors.New("notification is not enabled") ErrDuplicate = xerrors.New("duplicate notification") ) diff --git a/coderd/notifications/notifications_test.go b/coderd/notifications/notifications_test.go index e404f4afb3c19..62fa50f453cfa 100644 --- a/coderd/notifications/notifications_test.go +++ b/coderd/notifications/notifications_test.go @@ -1106,6 +1106,20 @@ func TestNotificationTemplates_Golden(t *testing.T) { r.Name = tc.payload.UserName }, ) + + // With the introduction of notifications that can be disabled + // by default, we want to make sure the user preferences have + // the notification enabled. + _, err := adminClient.UpdateUserNotificationPreferences( + context.Background(), + user.ID, + codersdk.UpdateUserNotificationPreferences{ + TemplateDisabledMap: map[string]bool{ + tc.id.String(): false, + }, + }) + require.NoError(t, err) + return &db, &api.Logger, &user }() @@ -1275,6 +1289,20 @@ func TestNotificationTemplates_Golden(t *testing.T) { r.Name = tc.payload.UserName }, ) + + // With the introduction of notifications that can be disabled + // by default, we want to make sure the user preferences have + // the notification enabled. + _, err := adminClient.UpdateUserNotificationPreferences( + context.Background(), + user.ID, + codersdk.UpdateUserNotificationPreferences{ + TemplateDisabledMap: map[string]bool{ + tc.id.String(): false, + }, + }) + require.NoError(t, err) + return &db, &api.Logger, &user }() @@ -1410,6 +1438,30 @@ func normalizeGoldenWebhook(content []byte) []byte { return content } +func TestDisabledByDefaultBeforeEnqueue(t *testing.T) { + t.Parallel() + + if !dbtestutil.WillUsePostgres() { + t.Skip("This test requires postgres; it is testing business-logic implemented in the database") + } + + // nolint:gocritic // Unit test. + ctx := dbauthz.AsNotifier(testutil.Context(t, testutil.WaitSuperLong)) + store, _ := dbtestutil.NewDB(t) + logger := testutil.Logger(t) + + cfg := defaultNotificationsConfig(database.NotificationMethodSmtp) + enq, err := notifications.NewStoreEnqueuer(cfg, store, defaultHelpers(), logger.Named("enqueuer"), quartz.NewReal()) + require.NoError(t, err) + user := createSampleUser(t, store) + + // We want to try enqueuing a notification on a template that is disabled + // by default. We expect this to fail. + templateID := notifications.TemplateWorkspaceManuallyUpdated + _, err = enq.Enqueue(ctx, user.ID, templateID, map[string]string{}, "test") + require.ErrorIs(t, err, notifications.ErrCannotEnqueueDisabledNotification, "enqueuing did not fail with expected error") +} + // TestDisabledBeforeEnqueue ensures that notifications cannot be enqueued once a user has disabled that notification template func TestDisabledBeforeEnqueue(t *testing.T) { t.Parallel() diff --git a/codersdk/notifications.go b/codersdk/notifications.go index 92870b4dd2b95..c1602c19f4260 100644 --- a/codersdk/notifications.go +++ b/codersdk/notifications.go @@ -17,14 +17,15 @@ type NotificationsSettings struct { } type NotificationTemplate struct { - ID uuid.UUID `json:"id" format:"uuid"` - Name string `json:"name"` - TitleTemplate string `json:"title_template"` - BodyTemplate string `json:"body_template"` - Actions string `json:"actions" format:""` - Group string `json:"group"` - Method string `json:"method"` - Kind string `json:"kind"` + ID uuid.UUID `json:"id" format:"uuid"` + Name string `json:"name"` + TitleTemplate string `json:"title_template"` + BodyTemplate string `json:"body_template"` + Actions string `json:"actions" format:""` + Group string `json:"group"` + Method string `json:"method"` + Kind string `json:"kind"` + EnabledByDefault bool `json:"enabled_by_default"` } type NotificationMethodsResponse struct { diff --git a/docs/admin/security/audit-logs.md b/docs/admin/security/audit-logs.md index 430d03adb0667..85e3a17e34665 100644 --- a/docs/admin/security/audit-logs.md +++ b/docs/admin/security/audit-logs.md @@ -19,7 +19,7 @@ We track the following resources: | GroupSyncSettings
| |
FieldTracked
auto_create_missing_groupstrue
fieldtrue
legacy_group_name_mappingfalse
mappingtrue
regex_filtertrue
| | HealthSettings
| |
FieldTracked
dismissed_healthcheckstrue
idfalse
| | License
create, delete | |
FieldTracked
exptrue
idfalse
jwtfalse
uploaded_attrue
uuidtrue
| -| NotificationTemplate
| |
FieldTracked
actionstrue
body_templatetrue
grouptrue
idfalse
kindtrue
methodtrue
nametrue
title_templatetrue
| +| NotificationTemplate
| |
FieldTracked
actionstrue
body_templatetrue
enabled_by_defaulttrue
grouptrue
idfalse
kindtrue
methodtrue
nametrue
title_templatetrue
| | NotificationsSettings
| |
FieldTracked
idfalse
notifier_pausedtrue
| | OAuth2ProviderApp
| |
FieldTracked
callback_urltrue
created_atfalse
icontrue
idfalse
nametrue
updated_atfalse
| | OAuth2ProviderAppSecret
| |
FieldTracked
app_idfalse
created_atfalse
display_secretfalse
hashed_secretfalse
idfalse
last_used_atfalse
secret_prefixfalse
| diff --git a/docs/reference/api/notifications.md b/docs/reference/api/notifications.md index 21b91182d78fa..0d9b07b3ffce2 100644 --- a/docs/reference/api/notifications.md +++ b/docs/reference/api/notifications.md @@ -146,6 +146,7 @@ curl -X GET http://coder-server:8080/api/v2/notifications/templates/system \ { "actions": "string", "body_template": "string", + "enabled_by_default": true, "group": "string", "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", "kind": "string", @@ -166,17 +167,18 @@ curl -X GET http://coder-server:8080/api/v2/notifications/templates/system \ Status Code **200** -| Name | Type | Required | Restrictions | Description | -|--------------------|--------------|----------|--------------|-------------| -| `[array item]` | array | false | | | -| `» actions` | string | false | | | -| `» body_template` | string | false | | | -| `» group` | string | false | | | -| `» id` | string(uuid) | false | | | -| `» kind` | string | false | | | -| `» method` | string | false | | | -| `» name` | string | false | | | -| `» title_template` | string | false | | | +| Name | Type | Required | Restrictions | Description | +|------------------------|--------------|----------|--------------|-------------| +| `[array item]` | array | false | | | +| `» actions` | string | false | | | +| `» body_template` | string | false | | | +| `» enabled_by_default` | boolean | false | | | +| `» group` | string | false | | | +| `» id` | string(uuid) | false | | | +| `» kind` | string | false | | | +| `» method` | string | false | | | +| `» name` | string | false | | | +| `» title_template` | string | false | | | To perform this operation, you must be authenticated. [Learn more](authentication.md). diff --git a/docs/reference/api/schemas.md b/docs/reference/api/schemas.md index 542294a01fa37..5b80483409149 100644 --- a/docs/reference/api/schemas.md +++ b/docs/reference/api/schemas.md @@ -3550,6 +3550,7 @@ Git clone makes use of this by parsing the URL from: 'Username for "https://gith { "actions": "string", "body_template": "string", + "enabled_by_default": true, "group": "string", "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", "kind": "string", @@ -3561,16 +3562,17 @@ Git clone makes use of this by parsing the URL from: 'Username for "https://gith ### Properties -| Name | Type | Required | Restrictions | Description | -|------------------|--------|----------|--------------|-------------| -| `actions` | string | false | | | -| `body_template` | string | false | | | -| `group` | string | false | | | -| `id` | string | false | | | -| `kind` | string | false | | | -| `method` | string | false | | | -| `name` | string | false | | | -| `title_template` | string | false | | | +| Name | Type | Required | Restrictions | Description | +|----------------------|---------|----------|--------------|-------------| +| `actions` | string | false | | | +| `body_template` | string | false | | | +| `enabled_by_default` | boolean | false | | | +| `group` | string | false | | | +| `id` | string | false | | | +| `kind` | string | false | | | +| `method` | string | false | | | +| `name` | string | false | | | +| `title_template` | string | false | | | ## codersdk.NotificationsConfig diff --git a/enterprise/audit/table.go b/enterprise/audit/table.go index 4bbeefdf01e09..b72a64c2eeae4 100644 --- a/enterprise/audit/table.go +++ b/enterprise/audit/table.go @@ -279,14 +279,15 @@ var auditableResourcesTypes = map[any]map[string]Action{ "icon": ActionTrack, }, &database.NotificationTemplate{}: { - "id": ActionIgnore, - "name": ActionTrack, - "title_template": ActionTrack, - "body_template": ActionTrack, - "actions": ActionTrack, - "group": ActionTrack, - "method": ActionTrack, - "kind": ActionTrack, + "id": ActionIgnore, + "name": ActionTrack, + "title_template": ActionTrack, + "body_template": ActionTrack, + "actions": ActionTrack, + "group": ActionTrack, + "method": ActionTrack, + "kind": ActionTrack, + "enabled_by_default": ActionTrack, }, &idpsync.OrganizationSyncSettings{}: { "field": ActionTrack, diff --git a/site/src/api/typesGenerated.ts b/site/src/api/typesGenerated.ts index acb5254a61a0a..122c36aab0aa7 100644 --- a/site/src/api/typesGenerated.ts +++ b/site/src/api/typesGenerated.ts @@ -1228,6 +1228,7 @@ export interface NotificationTemplate { readonly group: string; readonly method: string; readonly kind: string; + readonly enabled_by_default: boolean; } // From codersdk/deployment.go diff --git a/site/src/pages/UserSettingsPage/NotificationsPage/NotificationsPage.tsx b/site/src/pages/UserSettingsPage/NotificationsPage/NotificationsPage.tsx index 6a89714edf877..d10a5c853e56a 100644 --- a/site/src/pages/UserSettingsPage/NotificationsPage/NotificationsPage.tsx +++ b/site/src/pages/UserSettingsPage/NotificationsPage/NotificationsPage.tsx @@ -105,7 +105,7 @@ export const NotificationsPage: FC = () => { {Object.entries(templatesByGroup.data).map(([group, templates]) => { const allDisabled = templates.some((tpl) => { - return disabledPreferences.data[tpl.id] === true; + return notificationIsDisabled(disabledPreferences.data, tpl); }); return ( @@ -150,6 +150,11 @@ export const NotificationsPage: FC = () => { const label = methodLabels[method]; const isLastItem = i === templates.length - 1; + const disabled = notificationIsDisabled( + disabledPreferences.data, + tmpl, + ); + return ( @@ -157,7 +162,7 @@ export const NotificationsPage: FC = () => { { await updatePreferences.mutateAsync({ template_disabled_map: { @@ -207,6 +212,16 @@ export const NotificationsPage: FC = () => { export default NotificationsPage; +function notificationIsDisabled( + disabledPreferences: Record, + tmpl: NotificationTemplate, +): boolean { + return ( + (!tmpl.enabled_by_default && disabledPreferences[tmpl.id] === undefined) || + !!disabledPreferences[tmpl.id] + ); +} + function selectDisabledPreferences(data: NotificationPreference[]) { return data.reduce( (acc, pref) => { diff --git a/site/src/testHelpers/entities.ts b/site/src/testHelpers/entities.ts index 33d04cb23e60c..e15377de05430 100644 --- a/site/src/testHelpers/entities.ts +++ b/site/src/testHelpers/entities.ts @@ -4052,6 +4052,7 @@ export const MockNotificationTemplates: TypesGen.NotificationTemplate[] = [ group: "Workspace Events", method: "webhook", kind: "system", + enabled_by_default: true, }, { id: "f517da0b-cdc9-410f-ab89-a86107c420ed", @@ -4064,6 +4065,7 @@ export const MockNotificationTemplates: TypesGen.NotificationTemplate[] = [ group: "Workspace Events", method: "smtp", kind: "system", + enabled_by_default: true, }, { id: "f44d9314-ad03-4bc8-95d0-5cad491da6b6", @@ -4076,6 +4078,7 @@ export const MockNotificationTemplates: TypesGen.NotificationTemplate[] = [ group: "User Events", method: "", kind: "system", + enabled_by_default: true, }, { id: "4e19c0ac-94e1-4532-9515-d1801aa283b2", @@ -4088,6 +4091,7 @@ export const MockNotificationTemplates: TypesGen.NotificationTemplate[] = [ group: "User Events", method: "", kind: "system", + enabled_by_default: true, }, { id: "0ea69165-ec14-4314-91f1-69566ac3c5a0", @@ -4100,6 +4104,7 @@ export const MockNotificationTemplates: TypesGen.NotificationTemplate[] = [ group: "Workspace Events", method: "smtp", kind: "system", + enabled_by_default: true, }, { id: "c34a0c09-0704-4cac-bd1c-0c0146811c2b", @@ -4112,6 +4117,7 @@ export const MockNotificationTemplates: TypesGen.NotificationTemplate[] = [ group: "Workspace Events", method: "smtp", kind: "system", + enabled_by_default: true, }, { id: "51ce2fdf-c9ca-4be1-8d70-628674f9bc42", @@ -4124,6 +4130,7 @@ export const MockNotificationTemplates: TypesGen.NotificationTemplate[] = [ group: "Workspace Events", method: "webhook", kind: "system", + enabled_by_default: true, }, ]; From dd29997b9c65e420f036ce540cb692db0109800b Mon Sep 17 00:00:00 2001 From: Hugo Dutka Date: Mon, 13 Jan 2025 09:57:48 -0600 Subject: [PATCH 0015/1096] chore: reduce parallelism for test-go-pg on macOS (#16116) We're seeing test-go-pg flakes on macOS in CI. We've had the same problem on Windows, and reducing test parallelism in https://github.com/coder/coder/pull/16090 seemed to help. This PR makes the same change on macOS. --- .github/workflows/ci.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index df752d2aa41f0..82f2a7f9489b8 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -453,7 +453,8 @@ jobs: DB=ci gotestsum --format standard-quiet -- -v -short -count=1 -parallel 4 -p 4 ./... else go run scripts/embedded-pg/main.go - DB=ci gotestsum --format standard-quiet -- -v -short -count=1 ./... + # Reduce test parallelism, like for Windows above. + DB=ci gotestsum --format standard-quiet -- -v -short -count=1 -parallel 4 -p 4 ./... fi - name: Upload test stats to Datadog From 8a8e7b19af22e38c4e09a6c3943451e186b5d4d6 Mon Sep 17 00:00:00 2001 From: Thomas Kosiewski Date: Mon, 13 Jan 2025 17:54:21 +0100 Subject: [PATCH 0016/1096] fix(site): validate group name before submitting to the backend (#16115) --- .gitignore | 1 + .../pages/GroupsPage/CreateGroupPageView.stories.tsx | 11 +++++++++++ site/src/pages/GroupsPage/CreateGroupPageView.tsx | 11 +++++++++-- .../GroupsPage/CreateGroupPageView.stories.tsx | 10 ++++++++++ .../GroupsPage/CreateGroupPageView.tsx | 11 +++++++++-- 5 files changed, 40 insertions(+), 4 deletions(-) diff --git a/.gitignore b/.gitignore index 16607eacaa35e..031661119573b 100644 --- a/.gitignore +++ b/.gitignore @@ -54,6 +54,7 @@ site/stats/ # direnv .envrc +.direnv *.test # Loadtesting diff --git a/site/src/pages/GroupsPage/CreateGroupPageView.stories.tsx b/site/src/pages/GroupsPage/CreateGroupPageView.stories.tsx index d6b3dbdb57509..735c4160c9f67 100644 --- a/site/src/pages/GroupsPage/CreateGroupPageView.stories.tsx +++ b/site/src/pages/GroupsPage/CreateGroupPageView.stories.tsx @@ -1,4 +1,5 @@ import type { Meta, StoryObj } from "@storybook/react"; +import { userEvent, within } from "@storybook/test"; import { mockApiError } from "testHelpers/entities"; import { CreateGroupPageView } from "./CreateGroupPageView"; @@ -21,3 +22,13 @@ export const WithError: Story = { initialTouched: { name: true }, }, }; + +export const InvalidName: Story = { + play: async ({ canvasElement }) => { + const user = userEvent.setup(); + const body = within(canvasElement.ownerDocument.body); + const input = await body.findByLabelText("Name"); + await user.type(input, "$om3 !nv@lid Name"); + input.blur(); + }, +}; diff --git a/site/src/pages/GroupsPage/CreateGroupPageView.tsx b/site/src/pages/GroupsPage/CreateGroupPageView.tsx index fe2ed5158f6de..e1432d78e2716 100644 --- a/site/src/pages/GroupsPage/CreateGroupPageView.tsx +++ b/site/src/pages/GroupsPage/CreateGroupPageView.tsx @@ -12,11 +12,15 @@ import { Stack } from "components/Stack/Stack"; import { type FormikTouched, useFormik } from "formik"; import type { FC } from "react"; import { useNavigate } from "react-router-dom"; -import { getFormHelpers, onChangeTrimmed } from "utils/formUtils"; +import { + getFormHelpers, + nameValidator, + onChangeTrimmed, +} from "utils/formUtils"; import * as Yup from "yup"; const validationSchema = Yup.object({ - name: Yup.string().required().label("Name"), + name: nameValidator("Name"), }); export type CreateGroupPageViewProps = { @@ -62,6 +66,8 @@ export const CreateGroupPageView: FC = ({ autoFocus fullWidth label="Name" + onChange={onChangeTrimmed(form)} + autoComplete="name" /> = ({ })} fullWidth label="Display Name" + autoComplete="display_name" /> { + const user = userEvent.setup(); + const body = within(canvasElement.ownerDocument.body); + const input = await body.findByLabelText("Name"); + await user.type(input, "$om3 !nv@lid Name"); + input.blur(); + }, +}; diff --git a/site/src/pages/ManagementSettingsPage/GroupsPage/CreateGroupPageView.tsx b/site/src/pages/ManagementSettingsPage/GroupsPage/CreateGroupPageView.tsx index 050a42890900a..3f695020d21a9 100644 --- a/site/src/pages/ManagementSettingsPage/GroupsPage/CreateGroupPageView.tsx +++ b/site/src/pages/ManagementSettingsPage/GroupsPage/CreateGroupPageView.tsx @@ -15,11 +15,15 @@ import { Spinner } from "components/Spinner/Spinner"; import { useFormik } from "formik"; import type { FC } from "react"; import { useNavigate } from "react-router-dom"; -import { getFormHelpers, onChangeTrimmed } from "utils/formUtils"; +import { + getFormHelpers, + nameValidator, + onChangeTrimmed, +} from "utils/formUtils"; import * as Yup from "yup"; const validationSchema = Yup.object({ - name: Yup.string().required().label("Name"), + name: nameValidator("Name"), }); export type CreateGroupPageViewProps = { @@ -69,6 +73,8 @@ export const CreateGroupPageView: FC = ({ autoFocus fullWidth label="Name" + onChange={onChangeTrimmed(form)} + autoComplete="name" /> = ({ })} fullWidth label="Display Name" + autoComplete="display_name" /> Date: Mon, 13 Jan 2025 12:08:40 -0500 Subject: [PATCH 0017/1096] feat: add scaletest load generation infrastructure (#15816) Closes https://github.com/coder/internal/issues/149 This creates workspaces in each region and runs traffic generation against the workspaces colocated in the region. --- .../terraform/action/coder_helm_values.tftpl | 8 +- scaletest/terraform/action/coder_templates.tf | 304 ++++++++++++++---- scaletest/terraform/action/coder_traffic.tf | 236 ++++++++++++++ .../terraform/action/coder_workspaces.tf | 180 +++++++++++ scaletest/terraform/action/gcp_clusters.tf | 5 + scaletest/terraform/action/k8s_coder_asia.tf | 6 + .../terraform/action/k8s_coder_europe.tf | 6 + .../terraform/action/k8s_coder_primary.tf | 6 +- scaletest/terraform/action/kubeconfig.tftpl | 17 + scaletest/terraform/action/scenarios.tf | 95 +----- 10 files changed, 718 insertions(+), 145 deletions(-) create mode 100644 scaletest/terraform/action/coder_traffic.tf create mode 100644 scaletest/terraform/action/coder_workspaces.tf create mode 100644 scaletest/terraform/action/kubeconfig.tftpl diff --git a/scaletest/terraform/action/coder_helm_values.tftpl b/scaletest/terraform/action/coder_helm_values.tftpl index 7de0c598a1780..be24bf61cd5e3 100644 --- a/scaletest/terraform/action/coder_helm_values.tftpl +++ b/scaletest/terraform/action/coder_helm_values.tftpl @@ -34,7 +34,11 @@ coder: - name: "CODER_URL" value: "${access_url}" - name: "CODER_PROVISIONERD_TAGS" - value: "scope=organization" + value: "scope=organization,deployment=${deployment}" + - name: "CODER_PROVISIONER_DAEMON_NAME" + valueFrom: + fieldRef: + fieldPath: metadata.name - name: "CODER_CONFIG_DIR" value: "/tmp/config" %{~ endif ~} @@ -76,6 +80,8 @@ coder: value: "${experiments}" - name: "CODER_DANGEROUS_DISABLE_RATE_LIMITS" value: "true" + - name: "CODER_DANGEROUS_ALLOW_PATH_APP_SITE_OWNER_ACCESS" + value: "true" image: repo: ${image_repo} tag: ${image_tag} diff --git a/scaletest/terraform/action/coder_templates.tf b/scaletest/terraform/action/coder_templates.tf index c2334a488a85a..d27c25844b91e 100644 --- a/scaletest/terraform/action/coder_templates.tf +++ b/scaletest/terraform/action/coder_templates.tf @@ -1,96 +1,272 @@ resource "local_file" "kubernetes_template" { filename = "${path.module}/.coderv2/templates/kubernetes/main.tf" content = < Date: Mon, 13 Jan 2025 12:20:31 -0700 Subject: [PATCH 0018/1096] chore: bump storybook-addon-remix-react-router from 3.0.2 to 3.1.0 in /site (#16000) --- site/package.json | 2 +- site/pnpm-lock.yaml | 14 +++++++------- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/site/package.json b/site/package.json index 27adb540cb507..dc2dcbb0c4f96 100644 --- a/site/package.json +++ b/site/package.json @@ -175,7 +175,7 @@ "rxjs": "7.8.1", "ssh2": "1.16.0", "storybook": "8.4.6", - "storybook-addon-remix-react-router": "3.0.2", + "storybook-addon-remix-react-router": "3.1.0", "storybook-react-context": "0.7.0", "tailwindcss": "3.4.13", "ts-node": "10.9.1", diff --git a/site/pnpm-lock.yaml b/site/pnpm-lock.yaml index 3c1ce6520f3bb..6da29f54e8761 100644 --- a/site/pnpm-lock.yaml +++ b/site/pnpm-lock.yaml @@ -431,8 +431,8 @@ importers: specifier: 8.4.6 version: 8.4.6(prettier@3.4.1) storybook-addon-remix-react-router: - specifier: 3.0.2 - version: 3.0.2(@storybook/blocks@8.4.6(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(storybook@8.4.6(prettier@3.4.1)))(@storybook/channels@8.1.11)(@storybook/components@8.4.6(storybook@8.4.6(prettier@3.4.1)))(@storybook/core-events@8.1.11)(@storybook/manager-api@8.4.6(storybook@8.4.6(prettier@3.4.1)))(@storybook/preview-api@8.4.7(storybook@8.4.6(prettier@3.4.1)))(@storybook/theming@8.4.6(storybook@8.4.6(prettier@3.4.1)))(react-dom@18.3.1(react@18.3.1))(react-router-dom@6.26.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1) + specifier: 3.1.0 + version: 3.1.0(@storybook/blocks@8.4.6(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(storybook@8.4.6(prettier@3.4.1)))(@storybook/channels@8.1.11)(@storybook/components@8.4.6(storybook@8.4.6(prettier@3.4.1)))(@storybook/core-events@8.1.11)(@storybook/manager-api@8.4.6(storybook@8.4.6(prettier@3.4.1)))(@storybook/preview-api@8.4.7(storybook@8.4.6(prettier@3.4.1)))(@storybook/theming@8.4.6(storybook@8.4.6(prettier@3.4.1)))(react-dom@18.3.1(react@18.3.1))(react-router-dom@6.26.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1) storybook-react-context: specifier: 0.7.0 version: 0.7.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(storybook@8.4.6(prettier@3.4.1)) @@ -5711,8 +5711,8 @@ packages: resolution: {integrity: sha512-iCGQj+0l0HOdZ2AEeBADlsRC+vsnDsZsbdSiH1yNSjcfKM7fdpCMfqAL/dwF5BLiw/XhRft/Wax6zQbhq2BcjQ==} engines: {node: '>= 0.4'} - storybook-addon-remix-react-router@3.0.2: - resolution: {integrity: sha512-vSr7o+TYs2JY4m/elZm28UnLKeK/GQwmNnXWEnR5FyZ8Kcz1S1fPhsdWUjMEU9wRiAMjJwmEHiTtpjbZ4/b0mg==} + storybook-addon-remix-react-router@3.1.0: + resolution: {integrity: sha512-h6cOD+afyAddNrDz5ezoJGV6GBSeH7uh92VAPDz+HLuay74Cr9Ozz+aFmlzMEyVJ1hhNIMOIWDsmK56CueZjsw==} peerDependencies: '@storybook/blocks': ^8.0.0 '@storybook/channels': ^8.0.0 @@ -5721,8 +5721,8 @@ packages: '@storybook/manager-api': ^8.0.0 '@storybook/preview-api': ^8.0.0 '@storybook/theming': ^8.0.0 - react: ^16.8.0 || ^17.0.0 || ^18.0.0 - react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 + react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 + react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 react-router-dom: ^6.4.0 || ^7.0.0 peerDependenciesMeta: react: @@ -12228,7 +12228,7 @@ snapshots: dependencies: internal-slot: 1.0.6 - storybook-addon-remix-react-router@3.0.2(@storybook/blocks@8.4.6(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(storybook@8.4.6(prettier@3.4.1)))(@storybook/channels@8.1.11)(@storybook/components@8.4.6(storybook@8.4.6(prettier@3.4.1)))(@storybook/core-events@8.1.11)(@storybook/manager-api@8.4.6(storybook@8.4.6(prettier@3.4.1)))(@storybook/preview-api@8.4.7(storybook@8.4.6(prettier@3.4.1)))(@storybook/theming@8.4.6(storybook@8.4.6(prettier@3.4.1)))(react-dom@18.3.1(react@18.3.1))(react-router-dom@6.26.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1): + storybook-addon-remix-react-router@3.1.0(@storybook/blocks@8.4.6(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(storybook@8.4.6(prettier@3.4.1)))(@storybook/channels@8.1.11)(@storybook/components@8.4.6(storybook@8.4.6(prettier@3.4.1)))(@storybook/core-events@8.1.11)(@storybook/manager-api@8.4.6(storybook@8.4.6(prettier@3.4.1)))(@storybook/preview-api@8.4.7(storybook@8.4.6(prettier@3.4.1)))(@storybook/theming@8.4.6(storybook@8.4.6(prettier@3.4.1)))(react-dom@18.3.1(react@18.3.1))(react-router-dom@6.26.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1): dependencies: '@storybook/blocks': 8.4.6(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(storybook@8.4.6(prettier@3.4.1)) '@storybook/channels': 8.1.11 From 7c595e2631ea3223b00304d97362daab0eadc1a8 Mon Sep 17 00:00:00 2001 From: Danielle Maywood Date: Mon, 13 Jan 2025 21:37:57 +0000 Subject: [PATCH 0019/1096] feat: allow removing deadline for running workspace (#16085) Fixes https://github.com/coder/coder/issues/9775 When a workspace's TTL is removed, and the workspace is running, the deadline is removed from the workspace. This also modifies the frontend to not show a confirmation dialog when the change is to remove autostop. --- coderd/autobuild/lifecycle_executor_test.go | 34 +------- coderd/workspaces.go | 20 +++++ coderd/workspaces_test.go | 87 +++++++++++++++++++ .../WorkspaceSchedulePage.tsx | 5 +- 4 files changed, 114 insertions(+), 32 deletions(-) diff --git a/coderd/autobuild/lifecycle_executor_test.go b/coderd/autobuild/lifecycle_executor_test.go index 3eb779376cc5c..b271fc43d1267 100644 --- a/coderd/autobuild/lifecycle_executor_test.go +++ b/coderd/autobuild/lifecycle_executor_test.go @@ -722,45 +722,17 @@ func TestExecutorWorkspaceAutostopNoWaitChangedMyMind(t *testing.T) { err := client.UpdateWorkspaceTTL(ctx, workspace.ID, codersdk.UpdateWorkspaceTTLRequest{TTLMillis: nil}) require.NoError(t, err) - // Then: the deadline should still be the original value + // Then: the deadline should be set to zero updated := coderdtest.MustWorkspace(t, client, workspace.ID) - assert.WithinDuration(t, workspace.LatestBuild.Deadline.Time, updated.LatestBuild.Deadline.Time, time.Minute) + assert.True(t, !updated.LatestBuild.Deadline.Valid) // When: the autobuild executor ticks after the original deadline go func() { tickCh <- workspace.LatestBuild.Deadline.Time.Add(time.Minute) }() - // Then: the workspace should stop - stats := <-statsCh - assert.Len(t, stats.Errors, 0) - assert.Len(t, stats.Transitions, 1) - assert.Equal(t, stats.Transitions[workspace.ID], database.WorkspaceTransitionStop) - - // Wait for stop to complete - updated = coderdtest.MustWorkspace(t, client, workspace.ID) - _ = coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, updated.LatestBuild.ID) - - // Start the workspace again - workspace = coderdtest.MustTransitionWorkspace(t, client, workspace.ID, database.WorkspaceTransitionStop, database.WorkspaceTransitionStart) - - // Given: the user changes their mind again and wants to enable autostop - newTTL := 8 * time.Hour - err = client.UpdateWorkspaceTTL(ctx, workspace.ID, codersdk.UpdateWorkspaceTTLRequest{TTLMillis: ptr.Ref(newTTL.Milliseconds())}) - require.NoError(t, err) - - // Then: the deadline should remain at the zero value - updated = coderdtest.MustWorkspace(t, client, workspace.ID) - assert.Zero(t, updated.LatestBuild.Deadline) - - // When: the relentless onward march of time continues - go func() { - tickCh <- workspace.LatestBuild.Deadline.Time.Add(newTTL + time.Minute) - close(tickCh) - }() - // Then: the workspace should not stop - stats = <-statsCh + stats := <-statsCh assert.Len(t, stats.Errors, 0) assert.Len(t, stats.Transitions, 0) } diff --git a/coderd/workspaces.go b/coderd/workspaces.go index 19fb1ec1ce810..0e7a4b5972dfd 100644 --- a/coderd/workspaces.go +++ b/coderd/workspaces.go @@ -1029,6 +1029,26 @@ func (api *API) putWorkspaceTTL(rw http.ResponseWriter, r *http.Request) { return xerrors.Errorf("update workspace time until shutdown: %w", err) } + // If autostop has been disabled, we want to remove the deadline from the + // existing workspace build (if there is one). + if !dbTTL.Valid { + build, err := s.GetLatestWorkspaceBuildByWorkspaceID(ctx, workspace.ID) + if err != nil { + return xerrors.Errorf("get latest workspace build: %w", err) + } + + if build.Transition == database.WorkspaceTransitionStart { + if err = s.UpdateWorkspaceBuildDeadlineByID(ctx, database.UpdateWorkspaceBuildDeadlineByIDParams{ + ID: build.ID, + Deadline: time.Time{}, + MaxDeadline: build.MaxDeadline, + UpdatedAt: dbtime.Time(api.Clock.Now()), + }); err != nil { + return xerrors.Errorf("update workspace build deadline: %w", err) + } + } + } + return nil }, nil) if err != nil { diff --git a/coderd/workspaces_test.go b/coderd/workspaces_test.go index d6e365011b929..115549e28cc2b 100644 --- a/coderd/workspaces_test.go +++ b/coderd/workspaces_test.go @@ -2394,6 +2394,93 @@ func TestWorkspaceUpdateTTL(t *testing.T) { }) } + t.Run("ModifyAutostopWithRunningWorkspace", func(t *testing.T) { + t.Parallel() + + testCases := []struct { + name string + fromTTL *int64 + toTTL *int64 + afterUpdate func(t *testing.T, before, after codersdk.NullTime) + }{ + { + name: "RemoveAutostopRemovesDeadline", + fromTTL: ptr.Ref((8 * time.Hour).Milliseconds()), + toTTL: nil, + afterUpdate: func(t *testing.T, before, after codersdk.NullTime) { + require.NotZero(t, before) + require.Zero(t, after) + }, + }, + { + name: "AddAutostopDoesNotAddDeadline", + fromTTL: nil, + toTTL: ptr.Ref((8 * time.Hour).Milliseconds()), + afterUpdate: func(t *testing.T, before, after codersdk.NullTime) { + require.Zero(t, before) + require.Zero(t, after) + }, + }, + { + name: "IncreaseAutostopDoesNotModifyDeadline", + fromTTL: ptr.Ref((4 * time.Hour).Milliseconds()), + toTTL: ptr.Ref((8 * time.Hour).Milliseconds()), + afterUpdate: func(t *testing.T, before, after codersdk.NullTime) { + require.NotZero(t, before) + require.NotZero(t, after) + require.Equal(t, before, after) + }, + }, + { + name: "DecreaseAutostopDoesNotModifyDeadline", + fromTTL: ptr.Ref((8 * time.Hour).Milliseconds()), + toTTL: ptr.Ref((4 * time.Hour).Milliseconds()), + afterUpdate: func(t *testing.T, before, after codersdk.NullTime) { + require.NotZero(t, before) + require.NotZero(t, after) + require.Equal(t, before, after) + }, + }, + } + + for _, testCase := range testCases { + testCase := testCase + + t.Run(testCase.name, func(t *testing.T) { + t.Parallel() + + var ( + client = coderdtest.New(t, &coderdtest.Options{IncludeProvisionerDaemon: true}) + user = coderdtest.CreateFirstUser(t, client) + version = coderdtest.CreateTemplateVersion(t, client, user.OrganizationID, nil) + _ = coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) + template = coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID) + workspace = coderdtest.CreateWorkspace(t, client, template.ID, func(cwr *codersdk.CreateWorkspaceRequest) { + cwr.TTLMillis = testCase.fromTTL + }) + build = coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, workspace.LatestBuild.ID) + ) + + ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) + defer cancel() + + err := client.UpdateWorkspaceTTL(ctx, workspace.ID, codersdk.UpdateWorkspaceTTLRequest{ + TTLMillis: testCase.toTTL, + }) + require.NoError(t, err) + + deadlineBefore := build.Deadline + + build, err = client.WorkspaceBuild(ctx, build.ID) + require.NoError(t, err) + + deadlineAfter := build.Deadline + + testCase.afterUpdate(t, deadlineBefore, deadlineAfter) + }) + } + }) + t.Run("CustomAutostopDisabledByTemplate", func(t *testing.T) { t.Parallel() var ( diff --git a/site/src/pages/WorkspaceSettingsPage/WorkspaceSchedulePage/WorkspaceSchedulePage.tsx b/site/src/pages/WorkspaceSettingsPage/WorkspaceSchedulePage/WorkspaceSchedulePage.tsx index b5e9bfdba8da6..4ee96204dbdd5 100644 --- a/site/src/pages/WorkspaceSettingsPage/WorkspaceSchedulePage/WorkspaceSchedulePage.tsx +++ b/site/src/pages/WorkspaceSettingsPage/WorkspaceSchedulePage/WorkspaceSchedulePage.tsx @@ -118,7 +118,10 @@ export const WorkspaceSchedulePage: FC = () => { await submitScheduleMutation.mutateAsync(data); - if (data.autostopChanged) { + if ( + data.autostopChanged && + getAutostop(workspace).autostopEnabled + ) { setIsConfirmingApply(true); } }} From c2b55346acfcbd1c03aacb651ae45cfb9ce5d074 Mon Sep 17 00:00:00 2001 From: Bruno Quaresma Date: Mon, 13 Jan 2025 20:29:39 -0300 Subject: [PATCH 0020/1096] chore: add Select component (#16121) Related to https://github.com/coder/coder/issues/15297 and based on [this design](https://www.figma.com/design/gtVchocIWPGYjzaHD2OIY7/Setting-page?node-id=16-1848&m=dev). --- site/package.json | 1 + site/pnpm-lock.yaml | 102 +++++++---- site/src/components/Select/Select.stories.tsx | 59 +++++++ site/src/components/Select/Select.tsx | 158 ++++++++++++++++++ 4 files changed, 286 insertions(+), 34 deletions(-) create mode 100644 site/src/components/Select/Select.stories.tsx create mode 100644 site/src/components/Select/Select.tsx diff --git a/site/package.json b/site/package.json index dc2dcbb0c4f96..5c1445cc0a51a 100644 --- a/site/package.json +++ b/site/package.json @@ -56,6 +56,7 @@ "@radix-ui/react-dropdown-menu": "2.1.4", "@radix-ui/react-label": "2.1.0", "@radix-ui/react-popover": "1.1.3", + "@radix-ui/react-select": "2.1.4", "@radix-ui/react-slider": "1.2.1", "@radix-ui/react-slot": "1.1.1", "@radix-ui/react-switch": "1.1.1", diff --git a/site/pnpm-lock.yaml b/site/pnpm-lock.yaml index 6da29f54e8761..e52fdd48f9e54 100644 --- a/site/pnpm-lock.yaml +++ b/site/pnpm-lock.yaml @@ -78,6 +78,9 @@ importers: '@radix-ui/react-popover': specifier: 1.1.3 version: 1.1.3(@types/react-dom@18.3.1)(@types/react@18.3.12)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@radix-ui/react-select': + specifier: 2.1.4 + version: 2.1.4(@types/react-dom@18.3.1)(@types/react@18.3.12)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) '@radix-ui/react-slider': specifier: 1.2.1 version: 1.2.1(@types/react-dom@18.3.1)(@types/react@18.3.12)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) @@ -1134,33 +1137,18 @@ packages: '@fastly/performance-observer-polyfill@2.0.0': resolution: {integrity: sha512-cQC4E6ReYY4Vud+eCJSCr1N0dSz+fk7xJlLiSgPFDHbnFLZo5DenazoersMt9D8JkEhl9Z5ZwJ/8apcjSrdb8Q==} - '@floating-ui/core@1.6.7': - resolution: {integrity: sha512-yDzVT/Lm101nQ5TCVeK65LtdN7Tj4Qpr9RTXJ2vPFLqtLxwOrpoxAHAJI8J3yYWUc40J0BDBheaitK5SJmno2g==} - '@floating-ui/core@1.6.8': resolution: {integrity: sha512-7XJ9cPU+yI2QeLS+FCSlqNFZJq8arvswefkZrYI1yQBbftw6FyrZOxYSh+9S7z7TpeWlRt9zJ5IhM1WIL334jA==} - '@floating-ui/dom@1.6.10': - resolution: {integrity: sha512-fskgCFv8J8OamCmyun8MfjB1Olfn+uZKjOKZ0vhYF3gRmEUXcGOjxWL8bBr7i4kIuPZ2KD2S3EUIOxnjC8kl2A==} - '@floating-ui/dom@1.6.12': resolution: {integrity: sha512-NP83c0HjokcGVEMeoStg317VD9W7eDlGK7457dMBANbKA6GJZdc7rjujdgqzTaz93jkGgc5P/jeWbaCHnMNc+w==} - '@floating-ui/react-dom@2.1.1': - resolution: {integrity: sha512-4h84MJt3CHrtG18mGsXuLCHMrug49d7DFkU0RMIyshRveBeyV2hmV/pDaF2Uxtu8kgq5r46llp5E5FQiR0K2Yg==} - peerDependencies: - react: '>=16.8.0' - react-dom: '>=16.8.0' - '@floating-ui/react-dom@2.1.2': resolution: {integrity: sha512-06okr5cgPzMNBy+Ycse2A6udMi4bqwW/zgBF/rwjcNqWkyr82Mcg8b0vjX8OJpZFy/FKjJmw6wV7t44kK6kW7A==} peerDependencies: react: '>=16.8.0' react-dom: '>=16.8.0' - '@floating-ui/utils@0.2.7': - resolution: {integrity: sha512-X8R8Oj771YRl/w+c1HqAC1szL8zWQRwFvgDwT129k9ACdBoud/+/rX9V0qiMl6LWUdP9voC2nDVZYPMQQsb6eA==} - '@floating-ui/utils@0.2.8': resolution: {integrity: sha512-kym7SodPp8/wloecOpcmSnWJsK7M0E5Wg8UcFA+uO4B9s5d0ywXOEro/8HM9x0rW+TljRzul/14UYz3TleT3ig==} @@ -2006,6 +1994,19 @@ packages: '@types/react-dom': optional: true + '@radix-ui/react-select@2.1.4': + resolution: {integrity: sha512-pOkb2u8KgO47j/h7AylCj7dJsm69BXcjkrvTqMptFqsE2i0p8lHkfgneXKjAgPzBMivnoMyt8o4KiV4wYzDdyQ==} + peerDependencies: + '@types/react': '*' + '@types/react-dom': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + '@types/react-dom': + optional: true + '@radix-ui/react-slider@1.2.1': resolution: {integrity: sha512-bEzQoDW0XP+h/oGbutF5VMWJPAl/UU8IJjr7h02SOHDIIIxq+cep8nItVNoBV+OMmahCdqdF38FTpmXoqQUGvw==} peerDependencies: @@ -2171,6 +2172,19 @@ packages: '@types/react-dom': optional: true + '@radix-ui/react-visually-hidden@1.1.1': + resolution: {integrity: sha512-vVfA2IZ9q/J+gEamvj761Oq1FpWgCDaNOOIfbPVp2MVPLEomUr5+Vf7kJGwQ24YxZSlQVar7Bes8kyTo5Dshpg==} + peerDependencies: + '@types/react': '*' + '@types/react-dom': '*' + react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc + peerDependenciesMeta: + '@types/react': + optional: true + '@types/react-dom': + optional: true + '@radix-ui/rect@1.1.0': resolution: {integrity: sha512-A9+lCBZoaMJlVKcRBz2YByCG+Cp2t6nAnMnNba+XiWxnj6r4JUFqfsgwocMBZU9LPtdxC6wB56ySYpc7LQIoJg==} @@ -3750,7 +3764,6 @@ packages: eslint@8.52.0: resolution: {integrity: sha512-zh/JHnaixqHZsolRB/w9/02akBk9EPrOs9JwcTP2ek7yL5bVvXuRariiaAjjoJ5DvuwQ1WAE/HsMz+w17YgBCg==} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} - deprecated: This version is no longer supported. Please see https://eslint.org/version-support for other options. hasBin: true espree@9.6.1: @@ -6965,38 +6978,21 @@ snapshots: dependencies: tslib: 2.6.1 - '@floating-ui/core@1.6.7': - dependencies: - '@floating-ui/utils': 0.2.7 - '@floating-ui/core@1.6.8': dependencies: '@floating-ui/utils': 0.2.8 - '@floating-ui/dom@1.6.10': - dependencies: - '@floating-ui/core': 1.6.7 - '@floating-ui/utils': 0.2.7 - '@floating-ui/dom@1.6.12': dependencies: '@floating-ui/core': 1.6.8 '@floating-ui/utils': 0.2.8 - '@floating-ui/react-dom@2.1.1(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': - dependencies: - '@floating-ui/dom': 1.6.10 - react: 18.3.1 - react-dom: 18.3.1(react@18.3.1) - '@floating-ui/react-dom@2.1.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': dependencies: '@floating-ui/dom': 1.6.12 react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - '@floating-ui/utils@0.2.7': {} - '@floating-ui/utils@0.2.8': {} '@fontsource-variable/inter@5.0.15': {} @@ -7853,7 +7849,7 @@ snapshots: '@radix-ui/react-popper@1.2.1(@types/react-dom@18.3.1)(@types/react@18.3.12)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': dependencies: - '@floating-ui/react-dom': 2.1.1(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@floating-ui/react-dom': 2.1.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) '@radix-ui/react-arrow': 1.1.1(@types/react-dom@18.3.1)(@types/react@18.3.12)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) '@radix-ui/react-compose-refs': 1.1.1(@types/react@18.3.12)(react@18.3.1) '@radix-ui/react-context': 1.1.1(@types/react@18.3.12)(react@18.3.1) @@ -7955,6 +7951,35 @@ snapshots: '@types/react': 18.3.12 '@types/react-dom': 18.3.1 + '@radix-ui/react-select@2.1.4(@types/react-dom@18.3.1)(@types/react@18.3.12)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + dependencies: + '@radix-ui/number': 1.1.0 + '@radix-ui/primitive': 1.1.1 + '@radix-ui/react-collection': 1.1.1(@types/react-dom@18.3.1)(@types/react@18.3.12)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@radix-ui/react-compose-refs': 1.1.1(@types/react@18.3.12)(react@18.3.1) + '@radix-ui/react-context': 1.1.1(@types/react@18.3.12)(react@18.3.1) + '@radix-ui/react-direction': 1.1.0(@types/react@18.3.12)(react@18.3.1) + '@radix-ui/react-dismissable-layer': 1.1.3(@types/react-dom@18.3.1)(@types/react@18.3.12)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@radix-ui/react-focus-guards': 1.1.1(@types/react@18.3.12)(react@18.3.1) + '@radix-ui/react-focus-scope': 1.1.1(@types/react-dom@18.3.1)(@types/react@18.3.12)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@radix-ui/react-id': 1.1.0(@types/react@18.3.12)(react@18.3.1) + '@radix-ui/react-popper': 1.2.1(@types/react-dom@18.3.1)(@types/react@18.3.12)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@radix-ui/react-portal': 1.1.3(@types/react-dom@18.3.1)(@types/react@18.3.12)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@radix-ui/react-primitive': 2.0.1(@types/react-dom@18.3.1)(@types/react@18.3.12)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@radix-ui/react-slot': 1.1.1(@types/react@18.3.12)(react@18.3.1) + '@radix-ui/react-use-callback-ref': 1.1.0(@types/react@18.3.12)(react@18.3.1) + '@radix-ui/react-use-controllable-state': 1.1.0(@types/react@18.3.12)(react@18.3.1) + '@radix-ui/react-use-layout-effect': 1.1.0(@types/react@18.3.12)(react@18.3.1) + '@radix-ui/react-use-previous': 1.1.0(@types/react@18.3.12)(react@18.3.1) + '@radix-ui/react-visually-hidden': 1.1.1(@types/react-dom@18.3.1)(@types/react@18.3.12)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + aria-hidden: 1.2.4 + react: 18.3.1 + react-dom: 18.3.1(react@18.3.1) + react-remove-scroll: 2.6.2(@types/react@18.3.12)(react@18.3.1) + optionalDependencies: + '@types/react': 18.3.12 + '@types/react-dom': 18.3.1 + '@radix-ui/react-slider@1.2.1(@types/react-dom@18.3.1)(@types/react@18.3.12)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': dependencies: '@radix-ui/number': 1.1.0 @@ -8096,6 +8121,15 @@ snapshots: '@types/react': 18.3.12 '@types/react-dom': 18.3.1 + '@radix-ui/react-visually-hidden@1.1.1(@types/react-dom@18.3.1)(@types/react@18.3.12)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + dependencies: + '@radix-ui/react-primitive': 2.0.1(@types/react-dom@18.3.1)(@types/react@18.3.12)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + react: 18.3.1 + react-dom: 18.3.1(react@18.3.1) + optionalDependencies: + '@types/react': 18.3.12 + '@types/react-dom': 18.3.1 + '@radix-ui/rect@1.1.0': {} '@remix-run/router@1.19.2': {} diff --git a/site/src/components/Select/Select.stories.tsx b/site/src/components/Select/Select.stories.tsx new file mode 100644 index 0000000000000..f16ff31c4b023 --- /dev/null +++ b/site/src/components/Select/Select.stories.tsx @@ -0,0 +1,59 @@ +import type { Meta, StoryObj } from "@storybook/react"; +import { userEvent } from "@storybook/test"; +import { + Select, + SelectContent, + SelectGroup, + SelectItem, + SelectLabel, + SelectTrigger, + SelectValue, +} from "./Select"; + +const meta: Meta = { + title: "components/Select", + component: Select, + args: { + children: ( + <> + + + + + + Fruits + Apple + Banana + Blueberry + Grapes + Pineapple + + + + ), + }, +}; + +export default meta; +type Story = StoryObj; + +export const Close: Story = {}; + +export const Open: Story = { + args: { + open: true, + }, +}; + +export const SelectedClose: Story = { + args: { + value: "apple", + }, +}; + +export const SelectedOpen: Story = { + args: { + value: "apple", + open: true, + }, +}; diff --git a/site/src/components/Select/Select.tsx b/site/src/components/Select/Select.tsx new file mode 100644 index 0000000000000..a0da638c907a2 --- /dev/null +++ b/site/src/components/Select/Select.tsx @@ -0,0 +1,158 @@ +/** + * Copied from shadc/ui on 13/01/2025 + * @see {@link https://ui.shadcn.com/docs/components/select} + */ +import * as SelectPrimitive from "@radix-ui/react-select"; +import { Check, ChevronDown, ChevronUp } from "lucide-react"; +import * as React from "react"; +import { cn } from "utils/cn"; + +export const Select = SelectPrimitive.Root; + +export const SelectGroup = SelectPrimitive.Group; + +export const SelectValue = SelectPrimitive.Value; + +export const SelectTrigger = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, children, ...props }, ref) => ( + span]:line-clamp-1", + className, + )} + {...props} + > + {children} + + + + +)); +SelectTrigger.displayName = SelectPrimitive.Trigger.displayName; + +export const SelectScrollUpButton = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, ...props }, ref) => ( + + + +)); +SelectScrollUpButton.displayName = SelectPrimitive.ScrollUpButton.displayName; + +export const SelectScrollDownButton = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, ...props }, ref) => ( + + + +)); +SelectScrollDownButton.displayName = + SelectPrimitive.ScrollDownButton.displayName; + +export const SelectContent = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, children, position = "popper", ...props }, ref) => ( + + + + + {children} + + + + +)); +SelectContent.displayName = SelectPrimitive.Content.displayName; + +export const SelectLabel = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, ...props }, ref) => ( + +)); +SelectLabel.displayName = SelectPrimitive.Label.displayName; + +export const SelectItem = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, children, ...props }, ref) => ( + + + + + + + {children} + +)); +SelectItem.displayName = SelectPrimitive.Item.displayName; + +export const SelectSeparator = React.forwardRef< + React.ElementRef, + React.ComponentPropsWithoutRef +>(({ className, ...props }, ref) => ( + +)); +SelectSeparator.displayName = SelectPrimitive.Separator.displayName; From 838ee3b244ad7b3ff7ee5b8631a8387233726913 Mon Sep 17 00:00:00 2001 From: Aaron Lehmann Date: Mon, 13 Jan 2025 16:29:31 -0800 Subject: [PATCH 0021/1096] feat: add --network-info-dir and --network-info-interval flags to coder ssh (#16078) This is the first in a series of PRs to enable `coder ssh` to replace `coder vscodessh`. This change adds `--network-info-dir` and `--network-info-interval` flags to the `ssh` subcommand. These were formerly only available with the `vscodessh` subcommand. Subsequent PRs will add a `--ssh-host-prefix` flag to the ssh subcommand, and adjust the log file naming to contain the parent PID. --- cli/ssh.go | 224 +++++++++++++++++++++++++-- cli/ssh_test.go | 73 +++++++++ cli/testdata/coder_ssh_--help.golden | 6 + cli/vscodessh.go | 152 +----------------- docs/reference/cli/ssh.md | 17 ++ 5 files changed, 309 insertions(+), 163 deletions(-) diff --git a/cli/ssh.go b/cli/ssh.go index 7a1d5940bfd01..ea03916e3c293 100644 --- a/cli/ssh.go +++ b/cli/ssh.go @@ -3,6 +3,7 @@ package cli import ( "bytes" "context" + "encoding/json" "errors" "fmt" "io" @@ -13,6 +14,7 @@ import ( "os/exec" "path/filepath" "slices" + "strconv" "strings" "sync" "time" @@ -21,11 +23,14 @@ import ( "github.com/gofrs/flock" "github.com/google/uuid" "github.com/mattn/go-isatty" + "github.com/spf13/afero" gossh "golang.org/x/crypto/ssh" gosshagent "golang.org/x/crypto/ssh/agent" "golang.org/x/term" "golang.org/x/xerrors" "gvisor.dev/gvisor/pkg/tcpip/adapters/gonet" + "tailscale.com/tailcfg" + "tailscale.com/types/netlogtype" "cdr.dev/slog" "cdr.dev/slog/sloggers/sloghuman" @@ -55,19 +60,21 @@ var ( func (r *RootCmd) ssh() *serpent.Command { var ( - stdio bool - forwardAgent bool - forwardGPG bool - identityAgent string - wsPollInterval time.Duration - waitEnum string - noWait bool - logDirPath string - remoteForwards []string - env []string - usageApp string - disableAutostart bool - appearanceConfig codersdk.AppearanceConfig + stdio bool + forwardAgent bool + forwardGPG bool + identityAgent string + wsPollInterval time.Duration + waitEnum string + noWait bool + logDirPath string + remoteForwards []string + env []string + usageApp string + disableAutostart bool + appearanceConfig codersdk.AppearanceConfig + networkInfoDir string + networkInfoInterval time.Duration ) client := new(codersdk.Client) cmd := &serpent.Command{ @@ -284,13 +291,21 @@ func (r *RootCmd) ssh() *serpent.Command { return err } + var errCh <-chan error + if networkInfoDir != "" { + errCh, err = setStatsCallback(ctx, conn, logger, networkInfoDir, networkInfoInterval) + if err != nil { + return err + } + } + wg.Add(1) go func() { defer wg.Done() watchAndClose(ctx, func() error { stack.close(xerrors.New("watchAndClose")) return nil - }, logger, client, workspace) + }, logger, client, workspace, errCh) }() copier.copy(&wg) return nil @@ -312,6 +327,14 @@ func (r *RootCmd) ssh() *serpent.Command { return err } + var errCh <-chan error + if networkInfoDir != "" { + errCh, err = setStatsCallback(ctx, conn, logger, networkInfoDir, networkInfoInterval) + if err != nil { + return err + } + } + wg.Add(1) go func() { defer wg.Done() @@ -324,6 +347,7 @@ func (r *RootCmd) ssh() *serpent.Command { logger, client, workspace, + errCh, ) }() @@ -540,6 +564,17 @@ func (r *RootCmd) ssh() *serpent.Command { Value: serpent.StringOf(&usageApp), Hidden: true, }, + { + Flag: "network-info-dir", + Description: "Specifies a directory to write network information periodically.", + Value: serpent.StringOf(&networkInfoDir), + }, + { + Flag: "network-info-interval", + Description: "Specifies the interval to update network information.", + Default: "5s", + Value: serpent.DurationOf(&networkInfoInterval), + }, sshDisableAutostartOption(serpent.BoolOf(&disableAutostart)), } return cmd @@ -555,7 +590,7 @@ func (r *RootCmd) ssh() *serpent.Command { // will usually not propagate. // // See: https://github.com/coder/coder/issues/6180 -func watchAndClose(ctx context.Context, closer func() error, logger slog.Logger, client *codersdk.Client, workspace codersdk.Workspace) { +func watchAndClose(ctx context.Context, closer func() error, logger slog.Logger, client *codersdk.Client, workspace codersdk.Workspace, errCh <-chan error) { // Ensure session is ended on both context cancellation // and workspace stop. defer func() { @@ -606,6 +641,9 @@ startWatchLoop: logger.Info(ctx, "workspace stopped") return } + case err := <-errCh: + logger.Error(ctx, "failed to collect network stats", slog.Error(err)) + return } } } @@ -1144,3 +1182,159 @@ func getUsageAppName(usageApp string) codersdk.UsageAppName { return codersdk.UsageAppNameSSH } + +func setStatsCallback( + ctx context.Context, + agentConn *workspacesdk.AgentConn, + logger slog.Logger, + networkInfoDir string, + networkInfoInterval time.Duration, +) (<-chan error, error) { + fs, ok := ctx.Value("fs").(afero.Fs) + if !ok { + fs = afero.NewOsFs() + } + if err := fs.MkdirAll(networkInfoDir, 0o700); err != nil { + return nil, xerrors.Errorf("mkdir: %w", err) + } + + // The VS Code extension obtains the PID of the SSH process to + // read files to display logs and network info. + // + // We get the parent PID because it's assumed `ssh` is calling this + // command via the ProxyCommand SSH option. + pid := os.Getppid() + + // The VS Code extension obtains the PID of the SSH process to + // read the file below which contains network information to display. + // + // We get the parent PID because it's assumed `ssh` is calling this + // command via the ProxyCommand SSH option. + networkInfoFilePath := filepath.Join(networkInfoDir, fmt.Sprintf("%d.json", pid)) + + var ( + firstErrTime time.Time + errCh = make(chan error, 1) + ) + cb := func(start, end time.Time, virtual, _ map[netlogtype.Connection]netlogtype.Counts) { + sendErr := func(tolerate bool, err error) { + logger.Error(ctx, "collect network stats", slog.Error(err)) + // Tolerate up to 1 minute of errors. + if tolerate { + if firstErrTime.IsZero() { + logger.Info(ctx, "tolerating network stats errors for up to 1 minute") + firstErrTime = time.Now() + } + if time.Since(firstErrTime) < time.Minute { + return + } + } + + select { + case errCh <- err: + default: + } + } + + stats, err := collectNetworkStats(ctx, agentConn, start, end, virtual) + if err != nil { + sendErr(true, err) + return + } + + rawStats, err := json.Marshal(stats) + if err != nil { + sendErr(false, err) + return + } + err = afero.WriteFile(fs, networkInfoFilePath, rawStats, 0o600) + if err != nil { + sendErr(false, err) + return + } + + firstErrTime = time.Time{} + } + + now := time.Now() + cb(now, now.Add(time.Nanosecond), map[netlogtype.Connection]netlogtype.Counts{}, map[netlogtype.Connection]netlogtype.Counts{}) + agentConn.SetConnStatsCallback(networkInfoInterval, 2048, cb) + return errCh, nil +} + +type sshNetworkStats struct { + P2P bool `json:"p2p"` + Latency float64 `json:"latency"` + PreferredDERP string `json:"preferred_derp"` + DERPLatency map[string]float64 `json:"derp_latency"` + UploadBytesSec int64 `json:"upload_bytes_sec"` + DownloadBytesSec int64 `json:"download_bytes_sec"` +} + +func collectNetworkStats(ctx context.Context, agentConn *workspacesdk.AgentConn, start, end time.Time, counts map[netlogtype.Connection]netlogtype.Counts) (*sshNetworkStats, error) { + latency, p2p, pingResult, err := agentConn.Ping(ctx) + if err != nil { + return nil, err + } + node := agentConn.Node() + derpMap := agentConn.DERPMap() + derpLatency := map[string]float64{} + + // Convert DERP region IDs to friendly names for display in the UI. + for rawRegion, latency := range node.DERPLatency { + regionParts := strings.SplitN(rawRegion, "-", 2) + regionID, err := strconv.Atoi(regionParts[0]) + if err != nil { + continue + } + region, found := derpMap.Regions[regionID] + if !found { + // It's possible that a workspace agent is using an old DERPMap + // and reports regions that do not exist. If that's the case, + // report the region as unknown! + region = &tailcfg.DERPRegion{ + RegionID: regionID, + RegionName: fmt.Sprintf("Unnamed %d", regionID), + } + } + // Convert the microseconds to milliseconds. + derpLatency[region.RegionName] = latency * 1000 + } + + totalRx := uint64(0) + totalTx := uint64(0) + for _, stat := range counts { + totalRx += stat.RxBytes + totalTx += stat.TxBytes + } + // Tracking the time since last request is required because + // ExtractTrafficStats() resets its counters after each call. + dur := end.Sub(start) + uploadSecs := float64(totalTx) / dur.Seconds() + downloadSecs := float64(totalRx) / dur.Seconds() + + // Sometimes the preferred DERP doesn't match the one we're actually + // connected with. Perhaps because the agent prefers a different DERP and + // we're using that server instead. + preferredDerpID := node.PreferredDERP + if pingResult.DERPRegionID != 0 { + preferredDerpID = pingResult.DERPRegionID + } + preferredDerp, ok := derpMap.Regions[preferredDerpID] + preferredDerpName := fmt.Sprintf("Unnamed %d", preferredDerpID) + if ok { + preferredDerpName = preferredDerp.RegionName + } + if _, ok := derpLatency[preferredDerpName]; !ok { + derpLatency[preferredDerpName] = 0 + } + + return &sshNetworkStats{ + P2P: p2p, + Latency: float64(latency.Microseconds()) / 1000, + PreferredDERP: preferredDerpName, + DERPLatency: derpLatency, + UploadBytesSec: int64(uploadSecs), + DownloadBytesSec: int64(downloadSecs), + }, nil +} diff --git a/cli/ssh_test.go b/cli/ssh_test.go index 8006297f0c3e1..fa6ab32b59035 100644 --- a/cli/ssh_test.go +++ b/cli/ssh_test.go @@ -24,6 +24,7 @@ import ( "time" "github.com/google/uuid" + "github.com/spf13/afero" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "golang.org/x/crypto/ssh" @@ -452,6 +453,78 @@ func TestSSH(t *testing.T) { <-cmdDone }) + t.Run("NetworkInfo", func(t *testing.T) { + t.Parallel() + client, workspace, agentToken := setupWorkspaceForAgent(t) + _, _ = tGoContext(t, func(ctx context.Context) { + // Run this async so the SSH command has to wait for + // the build and agent to connect! + _ = agenttest.New(t, client.URL, agentToken) + <-ctx.Done() + }) + + clientOutput, clientInput := io.Pipe() + serverOutput, serverInput := io.Pipe() + defer func() { + for _, c := range []io.Closer{clientOutput, clientInput, serverOutput, serverInput} { + _ = c.Close() + } + }() + + ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) + defer cancel() + + fs := afero.NewMemMapFs() + //nolint:revive,staticcheck + ctx = context.WithValue(ctx, "fs", fs) + + inv, root := clitest.New(t, "ssh", "--stdio", workspace.Name, "--network-info-dir", "/net", "--network-info-interval", "25ms") + clitest.SetupConfig(t, client, root) + inv.Stdin = clientOutput + inv.Stdout = serverInput + inv.Stderr = io.Discard + + cmdDone := tGo(t, func() { + err := inv.WithContext(ctx).Run() + assert.NoError(t, err) + }) + + conn, channels, requests, err := ssh.NewClientConn(&stdioConn{ + Reader: serverOutput, + Writer: clientInput, + }, "", &ssh.ClientConfig{ + // #nosec + HostKeyCallback: ssh.InsecureIgnoreHostKey(), + }) + require.NoError(t, err) + defer conn.Close() + + sshClient := ssh.NewClient(conn, channels, requests) + session, err := sshClient.NewSession() + require.NoError(t, err) + defer session.Close() + + command := "sh -c exit" + if runtime.GOOS == "windows" { + command = "cmd.exe /c exit" + } + err = session.Run(command) + require.NoError(t, err) + err = sshClient.Close() + require.NoError(t, err) + _ = clientOutput.Close() + + assert.Eventually(t, func() bool { + entries, err := afero.ReadDir(fs, "/net") + if err != nil { + return false + } + return len(entries) > 0 + }, testutil.WaitLong, testutil.IntervalFast) + + <-cmdDone + }) + t.Run("Stdio_StartStoppedWorkspace_CleanStdout", func(t *testing.T) { t.Parallel() diff --git a/cli/testdata/coder_ssh_--help.golden b/cli/testdata/coder_ssh_--help.golden index 80aaa3c204fda..d847e9d7abb03 100644 --- a/cli/testdata/coder_ssh_--help.golden +++ b/cli/testdata/coder_ssh_--help.golden @@ -30,6 +30,12 @@ OPTIONS: -l, --log-dir string, $CODER_SSH_LOG_DIR Specify the directory containing SSH diagnostic log files. + --network-info-dir string + Specifies a directory to write network information periodically. + + --network-info-interval duration (default: 5s) + Specifies the interval to update network information. + --no-wait bool, $CODER_SSH_NO_WAIT Enter workspace immediately after the agent has connected. This is the default if the template has configured the agent startup script diff --git a/cli/vscodessh.go b/cli/vscodessh.go index d64e49c674a01..630c405241d17 100644 --- a/cli/vscodessh.go +++ b/cli/vscodessh.go @@ -2,21 +2,17 @@ package cli import ( "context" - "encoding/json" "fmt" "io" "net/http" "net/url" "os" "path/filepath" - "strconv" "strings" "time" "github.com/spf13/afero" "golang.org/x/xerrors" - "tailscale.com/tailcfg" - "tailscale.com/types/netlogtype" "cdr.dev/slog" "cdr.dev/slog/sloggers/sloghuman" @@ -83,11 +79,6 @@ func (r *RootCmd) vscodeSSH() *serpent.Command { ctx, cancel := context.WithCancel(inv.Context()) defer cancel() - err = fs.MkdirAll(networkInfoDir, 0o700) - if err != nil { - return xerrors.Errorf("mkdir: %w", err) - } - client := codersdk.New(serverURL) client.SetSessionToken(string(sessionToken)) @@ -155,20 +146,13 @@ func (r *RootCmd) vscodeSSH() *serpent.Command { } } - // The VS Code extension obtains the PID of the SSH process to - // read files to display logs and network info. - // - // We get the parent PID because it's assumed `ssh` is calling this - // command via the ProxyCommand SSH option. - pid := os.Getppid() - // Use a stripped down writer that doesn't sync, otherwise you get // "failed to sync sloghuman: sync /dev/stderr: The handle is // invalid" on Windows. Syncing isn't required for stdout/stderr // anyways. logger := inv.Logger.AppendSinks(sloghuman.Sink(slogWriter{w: inv.Stderr})).Leveled(slog.LevelDebug) if logDir != "" { - logFilePath := filepath.Join(logDir, fmt.Sprintf("%d.log", pid)) + logFilePath := filepath.Join(logDir, fmt.Sprintf("%d.log", os.Getppid())) logFile, err := fs.OpenFile(logFilePath, os.O_CREATE|os.O_WRONLY, 0o600) if err != nil { return xerrors.Errorf("open log file %q: %w", logFilePath, err) @@ -212,61 +196,10 @@ func (r *RootCmd) vscodeSSH() *serpent.Command { _, _ = io.Copy(rawSSH, inv.Stdin) }() - // The VS Code extension obtains the PID of the SSH process to - // read the file below which contains network information to display. - // - // We get the parent PID because it's assumed `ssh` is calling this - // command via the ProxyCommand SSH option. - networkInfoFilePath := filepath.Join(networkInfoDir, fmt.Sprintf("%d.json", pid)) - - var ( - firstErrTime time.Time - errCh = make(chan error, 1) - ) - cb := func(start, end time.Time, virtual, _ map[netlogtype.Connection]netlogtype.Counts) { - sendErr := func(tolerate bool, err error) { - logger.Error(ctx, "collect network stats", slog.Error(err)) - // Tolerate up to 1 minute of errors. - if tolerate { - if firstErrTime.IsZero() { - logger.Info(ctx, "tolerating network stats errors for up to 1 minute") - firstErrTime = time.Now() - } - if time.Since(firstErrTime) < time.Minute { - return - } - } - - select { - case errCh <- err: - default: - } - } - - stats, err := collectNetworkStats(ctx, agentConn, start, end, virtual) - if err != nil { - sendErr(true, err) - return - } - - rawStats, err := json.Marshal(stats) - if err != nil { - sendErr(false, err) - return - } - err = afero.WriteFile(fs, networkInfoFilePath, rawStats, 0o600) - if err != nil { - sendErr(false, err) - return - } - - firstErrTime = time.Time{} + errCh, err := setStatsCallback(ctx, agentConn, logger, networkInfoDir, networkInfoInterval) + if err != nil { + return err } - - now := time.Now() - cb(now, now.Add(time.Nanosecond), map[netlogtype.Connection]netlogtype.Counts{}, map[netlogtype.Connection]netlogtype.Counts{}) - agentConn.SetConnStatsCallback(networkInfoInterval, 2048, cb) - select { case <-ctx.Done(): return nil @@ -323,80 +256,3 @@ var _ io.Writer = slogWriter{} func (s slogWriter) Write(p []byte) (n int, err error) { return s.w.Write(p) } - -type sshNetworkStats struct { - P2P bool `json:"p2p"` - Latency float64 `json:"latency"` - PreferredDERP string `json:"preferred_derp"` - DERPLatency map[string]float64 `json:"derp_latency"` - UploadBytesSec int64 `json:"upload_bytes_sec"` - DownloadBytesSec int64 `json:"download_bytes_sec"` -} - -func collectNetworkStats(ctx context.Context, agentConn *workspacesdk.AgentConn, start, end time.Time, counts map[netlogtype.Connection]netlogtype.Counts) (*sshNetworkStats, error) { - latency, p2p, pingResult, err := agentConn.Ping(ctx) - if err != nil { - return nil, err - } - node := agentConn.Node() - derpMap := agentConn.DERPMap() - derpLatency := map[string]float64{} - - // Convert DERP region IDs to friendly names for display in the UI. - for rawRegion, latency := range node.DERPLatency { - regionParts := strings.SplitN(rawRegion, "-", 2) - regionID, err := strconv.Atoi(regionParts[0]) - if err != nil { - continue - } - region, found := derpMap.Regions[regionID] - if !found { - // It's possible that a workspace agent is using an old DERPMap - // and reports regions that do not exist. If that's the case, - // report the region as unknown! - region = &tailcfg.DERPRegion{ - RegionID: regionID, - RegionName: fmt.Sprintf("Unnamed %d", regionID), - } - } - // Convert the microseconds to milliseconds. - derpLatency[region.RegionName] = latency * 1000 - } - - totalRx := uint64(0) - totalTx := uint64(0) - for _, stat := range counts { - totalRx += stat.RxBytes - totalTx += stat.TxBytes - } - // Tracking the time since last request is required because - // ExtractTrafficStats() resets its counters after each call. - dur := end.Sub(start) - uploadSecs := float64(totalTx) / dur.Seconds() - downloadSecs := float64(totalRx) / dur.Seconds() - - // Sometimes the preferred DERP doesn't match the one we're actually - // connected with. Perhaps because the agent prefers a different DERP and - // we're using that server instead. - preferredDerpID := node.PreferredDERP - if pingResult.DERPRegionID != 0 { - preferredDerpID = pingResult.DERPRegionID - } - preferredDerp, ok := derpMap.Regions[preferredDerpID] - preferredDerpName := fmt.Sprintf("Unnamed %d", preferredDerpID) - if ok { - preferredDerpName = preferredDerp.RegionName - } - if _, ok := derpLatency[preferredDerpName]; !ok { - derpLatency[preferredDerpName] = 0 - } - - return &sshNetworkStats{ - P2P: p2p, - Latency: float64(latency.Microseconds()) / 1000, - PreferredDERP: preferredDerpName, - DERPLatency: derpLatency, - UploadBytesSec: int64(uploadSecs), - DownloadBytesSec: int64(downloadSecs), - }, nil -} diff --git a/docs/reference/cli/ssh.md b/docs/reference/cli/ssh.md index 72513e0c9ecdc..74e28837ad7e4 100644 --- a/docs/reference/cli/ssh.md +++ b/docs/reference/cli/ssh.md @@ -103,6 +103,23 @@ Enable remote port forwarding (remote_port:local_address:local_port). Set environment variable(s) for session (key1=value1,key2=value2,...). +### --network-info-dir + +| | | +|------|---------------------| +| Type | string | + +Specifies a directory to write network information periodically. + +### --network-info-interval + +| | | +|---------|-----------------------| +| Type | duration | +| Default | 5s | + +Specifies the interval to update network information. + ### --disable-autostart | | | From ec6645b832a7627e41ca451d0d30bf321f07fd54 Mon Sep 17 00:00:00 2001 From: Aaron Lehmann Date: Mon, 13 Jan 2025 16:30:02 -0800 Subject: [PATCH 0022/1096] chore: add parent PID to coder ssh log file name (#16080) Part of bringing `coder ssh` to parity with `coder vscodessh` is associating the log files with a particular parent process (in this case, the ssh process that spawned the coder CLI via `ProxyCommand`). `coder vscodessh` named log files using the parent PID, but coder ssh is missing this. Add the parent PID to the log file name when used in stdio mode so that the VS Code extension will be able to identify the correct log file. See also #16078. --- cli/ssh.go | 30 +++++++++++++++++++----------- 1 file changed, 19 insertions(+), 11 deletions(-) diff --git a/cli/ssh.go b/cli/ssh.go index ea03916e3c293..4fa836e44e389 100644 --- a/cli/ssh.go +++ b/cli/ssh.go @@ -131,18 +131,26 @@ func (r *RootCmd) ssh() *serpent.Command { if err != nil { return xerrors.Errorf("generate nonce: %w", err) } - logFilePath := filepath.Join( - logDirPath, - fmt.Sprintf( - "coder-ssh-%s-%s.log", - // The time portion makes it easier to find the right - // log file. - time.Now().Format("20060102-150405"), - // The nonce prevents collisions, as SSH invocations - // frequently happen in parallel. - nonce, - ), + logFileBaseName := fmt.Sprintf( + "coder-ssh-%s-%s", + // The time portion makes it easier to find the right + // log file. + time.Now().Format("20060102-150405"), + // The nonce prevents collisions, as SSH invocations + // frequently happen in parallel. + nonce, ) + if stdio { + // The VS Code extension obtains the PID of the SSH process to + // find the log file associated with a SSH session. + // + // We get the parent PID because it's assumed `ssh` is calling this + // command via the ProxyCommand SSH option. + logFileBaseName += fmt.Sprintf("-%d", os.Getppid()) + } + logFileBaseName += ".log" + + logFilePath := filepath.Join(logDirPath, logFileBaseName) logFile, err := os.OpenFile( logFilePath, os.O_CREATE|os.O_APPEND|os.O_WRONLY|os.O_EXCL, From 1aa9e32a2ba9e7b623516ee033bf20de764b2afd Mon Sep 17 00:00:00 2001 From: Aaron Lehmann Date: Mon, 13 Jan 2025 17:07:21 -0800 Subject: [PATCH 0023/1096] feat: add --ssh-host-prefix flag for "coder ssh" (#16088) This adds a flag matching `--ssh-host-prefix` from `coder config-ssh` to `coder ssh`. By trimming a custom prefix from the argument, we can set up wildcard-based `Host` entries in SSH config for the IDE plugins (and eventually `coder config-ssh`). We also replace `--` in the argument with `/`, so ownership can be specified in wildcard-based SSH hosts like `--`. Replaces #16087. Part of https://github.com/coder/coder/issues/14986. Related to https://github.com/coder/coder/pull/16078 and https://github.com/coder/coder/pull/16080. --- cli/ssh.go | 13 +++++- cli/ssh_test.go | 63 ++++++++++++++++++++++++++++ cli/testdata/coder_ssh_--help.golden | 5 +++ docs/reference/cli/ssh.md | 9 ++++ 4 files changed, 89 insertions(+), 1 deletion(-) diff --git a/cli/ssh.go b/cli/ssh.go index 4fa836e44e389..884c5500d703c 100644 --- a/cli/ssh.go +++ b/cli/ssh.go @@ -61,6 +61,7 @@ var ( func (r *RootCmd) ssh() *serpent.Command { var ( stdio bool + hostPrefix string forwardAgent bool forwardGPG bool identityAgent string @@ -195,7 +196,11 @@ func (r *RootCmd) ssh() *serpent.Command { parsedEnv = append(parsedEnv, [2]string{k, v}) } - workspace, workspaceAgent, err := getWorkspaceAndAgent(ctx, inv, client, !disableAutostart, inv.Args[0]) + namedWorkspace := strings.TrimPrefix(inv.Args[0], hostPrefix) + // Support "--" as a delimiter between owner and workspace name + namedWorkspace = strings.Replace(namedWorkspace, "--", "/", 1) + + workspace, workspaceAgent, err := getWorkspaceAndAgent(ctx, inv, client, !disableAutostart, namedWorkspace) if err != nil { return err } @@ -509,6 +514,12 @@ func (r *RootCmd) ssh() *serpent.Command { Description: "Specifies whether to emit SSH output over stdin/stdout.", Value: serpent.BoolOf(&stdio), }, + { + Flag: "ssh-host-prefix", + Env: "CODER_SSH_SSH_HOST_PREFIX", + Description: "Strip this prefix from the provided hostname to determine the workspace name. This is useful when used as part of an OpenSSH proxy command.", + Value: serpent.StringOf(&hostPrefix), + }, { Flag: "forward-agent", FlagShorthand: "A", diff --git a/cli/ssh_test.go b/cli/ssh_test.go index fa6ab32b59035..23c7a01898cd1 100644 --- a/cli/ssh_test.go +++ b/cli/ssh_test.go @@ -1568,6 +1568,69 @@ func TestSSH(t *testing.T) { }) } }) + + t.Run("SSHHostPrefix", func(t *testing.T) { + t.Parallel() + client, workspace, agentToken := setupWorkspaceForAgent(t) + _, _ = tGoContext(t, func(ctx context.Context) { + // Run this async so the SSH command has to wait for + // the build and agent to connect! + _ = agenttest.New(t, client.URL, agentToken) + <-ctx.Done() + }) + + clientOutput, clientInput := io.Pipe() + serverOutput, serverInput := io.Pipe() + defer func() { + for _, c := range []io.Closer{clientOutput, clientInput, serverOutput, serverInput} { + _ = c.Close() + } + }() + + ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) + defer cancel() + + user, err := client.User(ctx, codersdk.Me) + require.NoError(t, err) + + inv, root := clitest.New(t, "ssh", "--stdio", "--ssh-host-prefix", "coder.dummy.com--", fmt.Sprintf("coder.dummy.com--%s--%s", user.Username, workspace.Name)) + clitest.SetupConfig(t, client, root) + inv.Stdin = clientOutput + inv.Stdout = serverInput + inv.Stderr = io.Discard + + cmdDone := tGo(t, func() { + err := inv.WithContext(ctx).Run() + assert.NoError(t, err) + }) + + conn, channels, requests, err := ssh.NewClientConn(&stdioConn{ + Reader: serverOutput, + Writer: clientInput, + }, "", &ssh.ClientConfig{ + // #nosec + HostKeyCallback: ssh.InsecureIgnoreHostKey(), + }) + require.NoError(t, err) + defer conn.Close() + + sshClient := ssh.NewClient(conn, channels, requests) + session, err := sshClient.NewSession() + require.NoError(t, err) + defer session.Close() + + command := "sh -c exit" + if runtime.GOOS == "windows" { + command = "cmd.exe /c exit" + } + err = session.Run(command) + require.NoError(t, err) + err = sshClient.Close() + require.NoError(t, err) + _ = clientOutput.Close() + + <-cmdDone + }) } //nolint:paralleltest // This test uses t.Setenv, parent test MUST NOT be parallel. diff --git a/cli/testdata/coder_ssh_--help.golden b/cli/testdata/coder_ssh_--help.golden index d847e9d7abb03..3d2f584727cd9 100644 --- a/cli/testdata/coder_ssh_--help.golden +++ b/cli/testdata/coder_ssh_--help.golden @@ -45,6 +45,11 @@ OPTIONS: -R, --remote-forward string-array, $CODER_SSH_REMOTE_FORWARD Enable remote port forwarding (remote_port:local_address:local_port). + --ssh-host-prefix string, $CODER_SSH_SSH_HOST_PREFIX + Strip this prefix from the provided hostname to determine the + workspace name. This is useful when used as part of an OpenSSH proxy + command. + --stdio bool, $CODER_SSH_STDIO Specifies whether to emit SSH output over stdin/stdout. diff --git a/docs/reference/cli/ssh.md b/docs/reference/cli/ssh.md index 74e28837ad7e4..72d63a1f003af 100644 --- a/docs/reference/cli/ssh.md +++ b/docs/reference/cli/ssh.md @@ -20,6 +20,15 @@ coder ssh [flags] Specifies whether to emit SSH output over stdin/stdout. +### --ssh-host-prefix + +| | | +|-------------|-----------------------------------------| +| Type | string | +| Environment | $CODER_SSH_SSH_HOST_PREFIX | + +Strip this prefix from the provided hostname to determine the workspace name. This is useful when used as part of an OpenSSH proxy command. + ### -A, --forward-agent | | | From 8f02e633bf1a4a069b28e98719ee70c7a555c2dc Mon Sep 17 00:00:00 2001 From: Aaron Lehmann Date: Mon, 13 Jan 2025 17:07:42 -0800 Subject: [PATCH 0024/1096] feat: use wildcard Host entry in config-ssh (#16096) Rather than create a separate `Host` entry for every workspace, configure a wildcard such as `coder.*` which can accomodate all of a user's workspaces. Depends on #16088. --- cli/configssh.go | 233 ++++++++----------------- cli/configssh_test.go | 390 +++++++++++++----------------------------- 2 files changed, 192 insertions(+), 431 deletions(-) diff --git a/cli/configssh.go b/cli/configssh.go index cdaf404ab50df..a7aed33eba1df 100644 --- a/cli/configssh.go +++ b/cli/configssh.go @@ -3,7 +3,6 @@ package cli import ( "bufio" "bytes" - "context" "errors" "fmt" "io" @@ -12,7 +11,6 @@ import ( "os" "path/filepath" "runtime" - "sort" "strconv" "strings" @@ -22,11 +20,9 @@ import ( "github.com/pkg/diff/write" "golang.org/x/exp/constraints" "golang.org/x/exp/slices" - "golang.org/x/sync/errgroup" "golang.org/x/xerrors" "github.com/coder/coder/v2/cli/cliui" - "github.com/coder/coder/v2/coderd/util/slice" "github.com/coder/coder/v2/codersdk" "github.com/coder/serpent" ) @@ -139,74 +135,6 @@ func (o sshConfigOptions) asList() (list []string) { return list } -type sshWorkspaceConfig struct { - Name string - Hosts []string -} - -func sshFetchWorkspaceConfigs(ctx context.Context, client *codersdk.Client) ([]sshWorkspaceConfig, error) { - res, err := client.Workspaces(ctx, codersdk.WorkspaceFilter{ - Owner: codersdk.Me, - }) - if err != nil { - return nil, err - } - - var errGroup errgroup.Group - workspaceConfigs := make([]sshWorkspaceConfig, len(res.Workspaces)) - for i, workspace := range res.Workspaces { - i := i - workspace := workspace - errGroup.Go(func() error { - resources, err := client.TemplateVersionResources(ctx, workspace.LatestBuild.TemplateVersionID) - if err != nil { - return err - } - - wc := sshWorkspaceConfig{Name: workspace.Name} - var agents []codersdk.WorkspaceAgent - for _, resource := range resources { - if resource.Transition != codersdk.WorkspaceTransitionStart { - continue - } - agents = append(agents, resource.Agents...) - } - - // handle both WORKSPACE and WORKSPACE.AGENT syntax - if len(agents) == 1 { - wc.Hosts = append(wc.Hosts, workspace.Name) - } - for _, agent := range agents { - hostname := workspace.Name + "." + agent.Name - wc.Hosts = append(wc.Hosts, hostname) - } - - workspaceConfigs[i] = wc - - return nil - }) - } - err = errGroup.Wait() - if err != nil { - return nil, err - } - - return workspaceConfigs, nil -} - -func sshPrepareWorkspaceConfigs(ctx context.Context, client *codersdk.Client) (receive func() ([]sshWorkspaceConfig, error)) { - wcC := make(chan []sshWorkspaceConfig, 1) - errC := make(chan error, 1) - go func() { - wc, err := sshFetchWorkspaceConfigs(ctx, client) - wcC <- wc - errC <- err - }() - return func() ([]sshWorkspaceConfig, error) { - return <-wcC, <-errC - } -} - func (r *RootCmd) configSSH() *serpent.Command { var ( sshConfigFile string @@ -254,8 +182,6 @@ func (r *RootCmd) configSSH() *serpent.Command { // warning at any time. _, _ = client.BuildInfo(ctx) - recvWorkspaceConfigs := sshPrepareWorkspaceConfigs(ctx, client) - out := inv.Stdout if dryRun { // Print everything except diff to stderr so @@ -371,11 +297,6 @@ func (r *RootCmd) configSSH() *serpent.Command { newline := len(before) > 0 sshConfigWriteSectionHeader(buf, newline, sshConfigOpts) - workspaceConfigs, err := recvWorkspaceConfigs() - if err != nil { - return xerrors.Errorf("fetch workspace configs failed: %w", err) - } - coderdConfig, err := client.SSHConfiguration(ctx) if err != nil { // If the error is 404, this deployment does not support @@ -394,91 +315,79 @@ func (r *RootCmd) configSSH() *serpent.Command { coderdConfig.HostnamePrefix = sshConfigOpts.userHostPrefix } - // Ensure stable sorting of output. - slices.SortFunc(workspaceConfigs, func(a, b sshWorkspaceConfig) int { - return slice.Ascending(a.Name, b.Name) - }) - for _, wc := range workspaceConfigs { - sort.Strings(wc.Hosts) - // Write agent configuration. - for _, workspaceHostname := range wc.Hosts { - sshHostname := fmt.Sprintf("%s%s", coderdConfig.HostnamePrefix, workspaceHostname) - defaultOptions := []string{ - "HostName " + sshHostname, - "ConnectTimeout=0", - "StrictHostKeyChecking=no", - // Without this, the "REMOTE HOST IDENTITY CHANGED" - // message will appear. - "UserKnownHostsFile=/dev/null", - // This disables the "Warning: Permanently added 'hostname' (RSA) to the list of known hosts." - // message from appearing on every SSH. This happens because we ignore the known hosts. - "LogLevel ERROR", - } - - if !skipProxyCommand { - rootFlags := fmt.Sprintf("--global-config %s", escapedGlobalConfig) - for _, h := range sshConfigOpts.header { - rootFlags += fmt.Sprintf(" --header %q", h) - } - if sshConfigOpts.headerCommand != "" { - rootFlags += fmt.Sprintf(" --header-command %q", sshConfigOpts.headerCommand) - } - - flags := "" - if sshConfigOpts.waitEnum != "auto" { - flags += " --wait=" + sshConfigOpts.waitEnum - } - if sshConfigOpts.disableAutostart { - flags += " --disable-autostart=true" - } - defaultOptions = append(defaultOptions, fmt.Sprintf( - "ProxyCommand %s %s ssh --stdio%s %s", - escapedCoderBinary, rootFlags, flags, workspaceHostname, - )) - } + // Write agent configuration. + defaultOptions := []string{ + "ConnectTimeout=0", + "StrictHostKeyChecking=no", + // Without this, the "REMOTE HOST IDENTITY CHANGED" + // message will appear. + "UserKnownHostsFile=/dev/null", + // This disables the "Warning: Permanently added 'hostname' (RSA) to the list of known hosts." + // message from appearing on every SSH. This happens because we ignore the known hosts. + "LogLevel ERROR", + } - // Create a copy of the options so we can modify them. - configOptions := sshConfigOpts - configOptions.sshOptions = nil - - // User options first (SSH only uses the first - // option unless it can be given multiple times) - for _, opt := range sshConfigOpts.sshOptions { - err := configOptions.addOptions(opt) - if err != nil { - return xerrors.Errorf("add flag config option %q: %w", opt, err) - } - } + if !skipProxyCommand { + rootFlags := fmt.Sprintf("--global-config %s", escapedGlobalConfig) + for _, h := range sshConfigOpts.header { + rootFlags += fmt.Sprintf(" --header %q", h) + } + if sshConfigOpts.headerCommand != "" { + rootFlags += fmt.Sprintf(" --header-command %q", sshConfigOpts.headerCommand) + } - // Deployment options second, allow them to - // override standard options. - for k, v := range coderdConfig.SSHConfigOptions { - opt := fmt.Sprintf("%s %s", k, v) - err := configOptions.addOptions(opt) - if err != nil { - return xerrors.Errorf("add coderd config option %q: %w", opt, err) - } - } + flags := "" + if sshConfigOpts.waitEnum != "auto" { + flags += " --wait=" + sshConfigOpts.waitEnum + } + if sshConfigOpts.disableAutostart { + flags += " --disable-autostart=true" + } + defaultOptions = append(defaultOptions, fmt.Sprintf( + "ProxyCommand %s %s ssh --stdio%s --ssh-host-prefix %s %%h", + escapedCoderBinary, rootFlags, flags, coderdConfig.HostnamePrefix, + )) + } - // Finally, add the standard options. - err := configOptions.addOptions(defaultOptions...) - if err != nil { - return err - } + // Create a copy of the options so we can modify them. + configOptions := sshConfigOpts + configOptions.sshOptions = nil - hostBlock := []string{ - "Host " + sshHostname, - } - // Prefix with '\t' - for _, v := range configOptions.sshOptions { - hostBlock = append(hostBlock, "\t"+v) - } + // User options first (SSH only uses the first + // option unless it can be given multiple times) + for _, opt := range sshConfigOpts.sshOptions { + err := configOptions.addOptions(opt) + if err != nil { + return xerrors.Errorf("add flag config option %q: %w", opt, err) + } + } - _, _ = buf.WriteString(strings.Join(hostBlock, "\n")) - _ = buf.WriteByte('\n') + // Deployment options second, allow them to + // override standard options. + for k, v := range coderdConfig.SSHConfigOptions { + opt := fmt.Sprintf("%s %s", k, v) + err := configOptions.addOptions(opt) + if err != nil { + return xerrors.Errorf("add coderd config option %q: %w", opt, err) } } + // Finally, add the standard options. + if err := configOptions.addOptions(defaultOptions...); err != nil { + return err + } + + hostBlock := []string{ + "Host " + coderdConfig.HostnamePrefix + "*", + } + // Prefix with '\t' + for _, v := range configOptions.sshOptions { + hostBlock = append(hostBlock, "\t"+v) + } + + _, _ = buf.WriteString(strings.Join(hostBlock, "\n")) + _ = buf.WriteByte('\n') + sshConfigWriteSectionEnd(buf) // Write the remainder of the users config file to buf. @@ -532,9 +441,17 @@ func (r *RootCmd) configSSH() *serpent.Command { _, _ = fmt.Fprintf(out, "Updated %q\n", sshConfigFile) } - if len(workspaceConfigs) > 0 { + res, err := client.Workspaces(ctx, codersdk.WorkspaceFilter{ + Owner: codersdk.Me, + Limit: 1, + }) + if err != nil { + return xerrors.Errorf("fetch workspaces failed: %w", err) + } + + if len(res.Workspaces) > 0 { _, _ = fmt.Fprintln(out, "You should now be able to ssh into your workspace.") - _, _ = fmt.Fprintf(out, "For example, try running:\n\n\t$ ssh %s%s\n", coderdConfig.HostnamePrefix, workspaceConfigs[0].Name) + _, _ = fmt.Fprintf(out, "For example, try running:\n\n\t$ ssh %s%s\n", coderdConfig.HostnamePrefix, res.Workspaces[0].Name) } else { _, _ = fmt.Fprint(out, "You don't have any workspaces yet, try creating one with:\n\n\t$ coder create \n") } diff --git a/cli/configssh_test.go b/cli/configssh_test.go index 5bedd18cb27dc..3b88ab1e54db7 100644 --- a/cli/configssh_test.go +++ b/cli/configssh_test.go @@ -1,8 +1,6 @@ package cli_test import ( - "bufio" - "bytes" "context" "fmt" "io" @@ -16,7 +14,6 @@ import ( "sync" "testing" - "github.com/google/uuid" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -27,7 +24,6 @@ import ( "github.com/coder/coder/v2/coderd/database/dbfake" "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/codersdk/workspacesdk" - "github.com/coder/coder/v2/provisionersdk/proto" "github.com/coder/coder/v2/pty/ptytest" "github.com/coder/coder/v2/testutil" ) @@ -194,7 +190,7 @@ func TestConfigSSH_FileWriteAndOptionsFlow(t *testing.T) { ssh string } type wantConfig struct { - ssh string + ssh []string regexMatch string } type match struct { @@ -215,10 +211,10 @@ func TestConfigSSH_FileWriteAndOptionsFlow(t *testing.T) { {match: "Continue?", write: "yes"}, }, wantConfig: wantConfig{ - ssh: strings.Join([]string{ - baseHeader, - "", - }, "\n"), + ssh: []string{ + headerStart, + headerEnd, + }, }, }, { @@ -230,44 +226,19 @@ func TestConfigSSH_FileWriteAndOptionsFlow(t *testing.T) { }, "\n"), }, wantConfig: wantConfig{ - ssh: strings.Join([]string{ - "Host myhost", - " HostName myhost", - baseHeader, - "", - }, "\n"), + ssh: []string{ + strings.Join([]string{ + "Host myhost", + " HostName myhost", + }, "\n"), + headerStart, + headerEnd, + }, }, matches: []match{ {match: "Continue?", write: "yes"}, }, }, - { - name: "Section is not moved on re-run", - writeConfig: writeConfig{ - ssh: strings.Join([]string{ - "Host myhost", - " HostName myhost", - "", - baseHeader, - "", - "Host otherhost", - " HostName otherhost", - "", - }, "\n"), - }, - wantConfig: wantConfig{ - ssh: strings.Join([]string{ - "Host myhost", - " HostName myhost", - "", - baseHeader, - "", - "Host otherhost", - " HostName otherhost", - "", - }, "\n"), - }, - }, { name: "Section is not moved on re-run with new options", writeConfig: writeConfig{ @@ -283,20 +254,24 @@ func TestConfigSSH_FileWriteAndOptionsFlow(t *testing.T) { }, "\n"), }, wantConfig: wantConfig{ - ssh: strings.Join([]string{ - "Host myhost", - " HostName myhost", - "", - headerStart, - "# Last config-ssh options:", - "# :ssh-option=ForwardAgent=yes", - "#", - headerEnd, - "", - "Host otherhost", - " HostName otherhost", - "", - }, "\n"), + ssh: []string{ + strings.Join([]string{ + "Host myhost", + " HostName myhost", + "", + headerStart, + "# Last config-ssh options:", + "# :ssh-option=ForwardAgent=yes", + "#", + }, "\n"), + strings.Join([]string{ + headerEnd, + "", + "Host otherhost", + " HostName otherhost", + "", + }, "\n"), + }, }, args: []string{ "--ssh-option", "ForwardAgent=yes", @@ -314,10 +289,13 @@ func TestConfigSSH_FileWriteAndOptionsFlow(t *testing.T) { }, "\n"), }, wantConfig: wantConfig{ - ssh: strings.Join([]string{ - baseHeader, - "", - }, "\n"), + ssh: []string{ + headerStart, + strings.Join([]string{ + headerEnd, + "", + }, "\n"), + }, }, matches: []match{ {match: "Continue?", write: "yes"}, @@ -329,14 +307,17 @@ func TestConfigSSH_FileWriteAndOptionsFlow(t *testing.T) { ssh: "", }, wantConfig: wantConfig{ - ssh: strings.Join([]string{ - headerStart, - "# Last config-ssh options:", - "# :ssh-option=ForwardAgent=yes", - "#", - headerEnd, - "", - }, "\n"), + ssh: []string{ + strings.Join([]string{ + headerStart, + "# Last config-ssh options:", + "# :ssh-option=ForwardAgent=yes", + "#", + }, "\n"), + strings.Join([]string{ + headerEnd, + "", + }, "\n")}, }, args: []string{"--ssh-option", "ForwardAgent=yes"}, matches: []match{ @@ -351,14 +332,17 @@ func TestConfigSSH_FileWriteAndOptionsFlow(t *testing.T) { }, "\n"), }, wantConfig: wantConfig{ - ssh: strings.Join([]string{ - headerStart, - "# Last config-ssh options:", - "# :ssh-option=ForwardAgent=yes", - "#", - headerEnd, - "", - }, "\n"), + ssh: []string{ + strings.Join([]string{ + headerStart, + "# Last config-ssh options:", + "# :ssh-option=ForwardAgent=yes", + "#", + }, "\n"), + strings.Join([]string{ + headerEnd, + "", + }, "\n")}, }, args: []string{"--ssh-option", "ForwardAgent=yes"}, matches: []match{ @@ -378,40 +362,19 @@ func TestConfigSSH_FileWriteAndOptionsFlow(t *testing.T) { }, "\n"), }, wantConfig: wantConfig{ - ssh: strings.Join([]string{ - baseHeader, - "", - }, "\n"), + ssh: []string{ + headerStart, + strings.Join([]string{ + headerEnd, + "", + }, "\n"), + }, }, matches: []match{ {match: "Use new options?", write: "yes"}, {match: "Continue?", write: "yes"}, }, }, - { - name: "No prompt on no changes", - writeConfig: writeConfig{ - ssh: strings.Join([]string{ - headerStart, - "# Last config-ssh options:", - "# :ssh-option=ForwardAgent=yes", - "#", - headerEnd, - "", - }, "\n"), - }, - wantConfig: wantConfig{ - ssh: strings.Join([]string{ - headerStart, - "# Last config-ssh options:", - "# :ssh-option=ForwardAgent=yes", - "#", - headerEnd, - "", - }, "\n"), - }, - args: []string{"--ssh-option", "ForwardAgent=yes"}, - }, { name: "No changes when continue = no", writeConfig: writeConfig{ @@ -425,14 +388,14 @@ func TestConfigSSH_FileWriteAndOptionsFlow(t *testing.T) { }, "\n"), }, wantConfig: wantConfig{ - ssh: strings.Join([]string{ + ssh: []string{strings.Join([]string{ headerStart, "# Last config-ssh options:", "# :ssh-option=ForwardAgent=yes", "#", headerEnd, "", - }, "\n"), + }, "\n")}, }, args: []string{"--ssh-option", "ForwardAgent=no"}, matches: []match{ @@ -453,29 +416,32 @@ func TestConfigSSH_FileWriteAndOptionsFlow(t *testing.T) { }, "\n"), }, wantConfig: wantConfig{ - ssh: strings.Join([]string{ - // Last options overwritten. - baseHeader, - "", - }, "\n"), + ssh: []string{ + headerStart, + headerEnd, + }, }, args: []string{"--yes"}, }, { name: "Serialize supported flags", wantConfig: wantConfig{ - ssh: strings.Join([]string{ - headerStart, - "# Last config-ssh options:", - "# :wait=yes", - "# :ssh-host-prefix=coder-test.", - "# :header=X-Test-Header=foo", - "# :header=X-Test-Header2=bar", - "# :header-command=printf h1=v1 h2=\"v2\" h3='v3'", - "#", - headerEnd, - "", - }, "\n"), + ssh: []string{ + strings.Join([]string{ + headerStart, + "# Last config-ssh options:", + "# :wait=yes", + "# :ssh-host-prefix=coder-test.", + "# :header=X-Test-Header=foo", + "# :header=X-Test-Header2=bar", + "# :header-command=printf h1=v1 h2=\"v2\" h3='v3'", + "#", + }, "\n"), + strings.Join([]string{ + headerEnd, + "", + }, "\n"), + }, }, args: []string{ "--yes", @@ -500,15 +466,20 @@ func TestConfigSSH_FileWriteAndOptionsFlow(t *testing.T) { }, "\n"), }, wantConfig: wantConfig{ - ssh: strings.Join([]string{ - headerStart, - "# Last config-ssh options:", - "# :wait=no", - "# :ssh-option=ForwardAgent=yes", - "#", - headerEnd, - "", - }, "\n"), + ssh: []string{ + strings.Join( + []string{ + headerStart, + "# Last config-ssh options:", + "# :wait=no", + "# :ssh-option=ForwardAgent=yes", + "#", + }, "\n"), + strings.Join([]string{ + headerEnd, + "", + }, "\n"), + }, }, args: []string{ "--use-previous-options", @@ -524,10 +495,10 @@ func TestConfigSSH_FileWriteAndOptionsFlow(t *testing.T) { }, "\n"), }, wantConfig: wantConfig{ - ssh: strings.Join([]string{ + ssh: []string{strings.Join([]string{ baseHeader, "", - }, "\n"), + }, "\n")}, }, args: []string{ "--ssh-option", "ForwardAgent=yes", @@ -586,7 +557,7 @@ func TestConfigSSH_FileWriteAndOptionsFlow(t *testing.T) { wantErr: false, hasAgent: true, wantConfig: wantConfig{ - regexMatch: `ProxyCommand .* --header "X-Test-Header=foo" --header "X-Test-Header2=bar" ssh`, + regexMatch: `ProxyCommand .* --header "X-Test-Header=foo" --header "X-Test-Header2=bar" ssh .* --ssh-host-prefix coder. %h`, }, }, { @@ -598,7 +569,7 @@ func TestConfigSSH_FileWriteAndOptionsFlow(t *testing.T) { wantErr: false, hasAgent: true, wantConfig: wantConfig{ - regexMatch: `ProxyCommand .* --header-command "printf h1=v1" ssh`, + regexMatch: `ProxyCommand .* --header-command "printf h1=v1" ssh .* --ssh-host-prefix coder. %h`, }, }, { @@ -610,7 +581,7 @@ func TestConfigSSH_FileWriteAndOptionsFlow(t *testing.T) { wantErr: false, hasAgent: true, wantConfig: wantConfig{ - regexMatch: `ProxyCommand .* --header-command "printf h1=v1 h2=\\\"v2\\\"" ssh`, + regexMatch: `ProxyCommand .* --header-command "printf h1=v1 h2=\\\"v2\\\"" ssh .* --ssh-host-prefix coder. %h`, }, }, { @@ -622,7 +593,7 @@ func TestConfigSSH_FileWriteAndOptionsFlow(t *testing.T) { wantErr: false, hasAgent: true, wantConfig: wantConfig{ - regexMatch: `ProxyCommand .* --header-command "printf h1=v1 h2='v2'" ssh`, + regexMatch: `ProxyCommand .* --header-command "printf h1=v1 h2='v2'" ssh .* --ssh-host-prefix coder. %h`, }, }, { @@ -686,10 +657,15 @@ func TestConfigSSH_FileWriteAndOptionsFlow(t *testing.T) { <-done - if tt.wantConfig.ssh != "" || tt.wantConfig.regexMatch != "" { + if len(tt.wantConfig.ssh) != 0 || tt.wantConfig.regexMatch != "" { got := sshConfigFileRead(t, sshConfigName) - if tt.wantConfig.ssh != "" { - assert.Equal(t, tt.wantConfig.ssh, got) + // Require that the generated config has the expected snippets in order. + for _, want := range tt.wantConfig.ssh { + idx := strings.Index(got, want) + if idx == -1 { + require.Contains(t, got, want) + } + got = got[idx+len(want):] } if tt.wantConfig.regexMatch != "" { assert.Regexp(t, tt.wantConfig.regexMatch, got, "regex match") @@ -698,135 +674,3 @@ func TestConfigSSH_FileWriteAndOptionsFlow(t *testing.T) { }) } } - -func TestConfigSSH_Hostnames(t *testing.T) { - t.Parallel() - - type resourceSpec struct { - name string - agents []string - } - tests := []struct { - name string - resources []resourceSpec - expected []string - }{ - { - name: "one resource with one agent", - resources: []resourceSpec{ - {name: "foo", agents: []string{"agent1"}}, - }, - expected: []string{"coder.@", "coder.@.agent1"}, - }, - { - name: "one resource with two agents", - resources: []resourceSpec{ - {name: "foo", agents: []string{"agent1", "agent2"}}, - }, - expected: []string{"coder.@.agent1", "coder.@.agent2"}, - }, - { - name: "two resources with one agent", - resources: []resourceSpec{ - {name: "foo", agents: []string{"agent1"}}, - {name: "bar"}, - }, - expected: []string{"coder.@", "coder.@.agent1"}, - }, - { - name: "two resources with two agents", - resources: []resourceSpec{ - {name: "foo", agents: []string{"agent1"}}, - {name: "bar", agents: []string{"agent2"}}, - }, - expected: []string{"coder.@.agent1", "coder.@.agent2"}, - }, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - - var resources []*proto.Resource - for _, resourceSpec := range tt.resources { - resource := &proto.Resource{ - Name: resourceSpec.name, - Type: "aws_instance", - } - for _, agentName := range resourceSpec.agents { - resource.Agents = append(resource.Agents, &proto.Agent{ - Id: uuid.NewString(), - Name: agentName, - }) - } - resources = append(resources, resource) - } - - client, db := coderdtest.NewWithDatabase(t, nil) - owner := coderdtest.CreateFirstUser(t, client) - member, memberUser := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID) - - r := dbfake.WorkspaceBuild(t, db, database.WorkspaceTable{ - OrganizationID: owner.OrganizationID, - OwnerID: memberUser.ID, - }).Resource(resources...).Do() - sshConfigFile := sshConfigFileName(t) - - inv, root := clitest.New(t, "config-ssh", "--ssh-config-file", sshConfigFile) - clitest.SetupConfig(t, member, root) - - pty := ptytest.New(t) - inv.Stdin = pty.Input() - inv.Stdout = pty.Output() - clitest.Start(t, inv) - - matches := []struct { - match, write string - }{ - {match: "Continue?", write: "yes"}, - } - for _, m := range matches { - pty.ExpectMatch(m.match) - pty.WriteLine(m.write) - } - - pty.ExpectMatch("Updated") - - var expectedHosts []string - for _, hostnamePattern := range tt.expected { - hostname := strings.ReplaceAll(hostnamePattern, "@", r.Workspace.Name) - expectedHosts = append(expectedHosts, hostname) - } - - hosts := sshConfigFileParseHosts(t, sshConfigFile) - require.ElementsMatch(t, expectedHosts, hosts) - }) - } -} - -// sshConfigFileParseHosts reads a file in the format of .ssh/config and extracts -// the hostnames that are listed in "Host" directives. -func sshConfigFileParseHosts(t *testing.T, name string) []string { - t.Helper() - b, err := os.ReadFile(name) - require.NoError(t, err) - - var result []string - lineScanner := bufio.NewScanner(bytes.NewBuffer(b)) - for lineScanner.Scan() { - line := lineScanner.Text() - line = strings.TrimSpace(line) - - tokenScanner := bufio.NewScanner(bytes.NewBufferString(line)) - tokenScanner.Split(bufio.ScanWords) - ok := tokenScanner.Scan() - if ok && tokenScanner.Text() == "Host" { - for tokenScanner.Scan() { - result = append(result, tokenScanner.Text()) - } - } - } - - return result -} From 473fcc33a54bdba0c4b4c1b9628f9d62fafec0ad Mon Sep 17 00:00:00 2001 From: Bruno Quaresma Date: Tue, 14 Jan 2025 09:04:14 -0300 Subject: [PATCH 0025/1096] chore: add Chart component (#16118) Related to https://github.com/coder/coder/issues/15297 and based on [this design](https://www.figma.com/design/gtVchocIWPGYjzaHD2OIY7/Setting-page?node-id=16-1848&m=dev). --- site/package.json | 1 + site/pnpm-lock.yaml | 219 +++++++++++ site/src/components/Chart/Chart.stories.tsx | 75 ++++ site/src/components/Chart/Chart.tsx | 372 ++++++++++++++++++ site/src/index.css | 77 ++-- .../modules/dashboard/Navbar/MobileMenu.tsx | 2 +- 6 files changed, 709 insertions(+), 37 deletions(-) create mode 100644 site/src/components/Chart/Chart.stories.tsx create mode 100644 site/src/components/Chart/Chart.tsx diff --git a/site/package.json b/site/package.json index 5c1445cc0a51a..39c52f69d6a41 100644 --- a/site/package.json +++ b/site/package.json @@ -106,6 +106,7 @@ "react-syntax-highlighter": "15.6.1", "react-virtualized-auto-sizer": "1.0.24", "react-window": "1.8.10", + "recharts": "2.15.0", "remark-gfm": "4.0.0", "resize-observer-polyfill": "1.5.1", "rollup-plugin-visualizer": "5.12.0", diff --git a/site/pnpm-lock.yaml b/site/pnpm-lock.yaml index e52fdd48f9e54..40d50f146e1c8 100644 --- a/site/pnpm-lock.yaml +++ b/site/pnpm-lock.yaml @@ -228,6 +228,9 @@ importers: react-window: specifier: 1.8.10 version: 1.8.10(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + recharts: + specifier: 2.15.0 + version: 2.15.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1) remark-gfm: specifier: 4.0.0 version: 4.0.0 @@ -2720,6 +2723,33 @@ packages: '@types/cookie@0.6.0': resolution: {integrity: sha512-4Kh9a6B2bQciAhf7FSuMRRkUWecJgJu9nPnx3yzpsfXX/c50REIqpHY4C82bXP90qrLtXtkDxTZosYO3UpOwlA==} + '@types/d3-array@3.2.1': + resolution: {integrity: sha512-Y2Jn2idRrLzUfAKV2LyRImR+y4oa2AntrgID95SHJxuMUrkNXmanDSed71sRNZysveJVt1hLLemQZIady0FpEg==} + + '@types/d3-color@3.1.3': + resolution: {integrity: sha512-iO90scth9WAbmgv7ogoq57O9YpKmFBbmoEoCHDB2xMBY0+/KVrqAaCDyCE16dUspeOvIxFFRI+0sEtqDqy2b4A==} + + '@types/d3-ease@3.0.2': + resolution: {integrity: sha512-NcV1JjO5oDzoK26oMzbILE6HW7uVXOHLQvHshBUW4UMdZGfiY6v5BeQwh9a9tCzv+CeefZQHJt5SRgK154RtiA==} + + '@types/d3-interpolate@3.0.4': + resolution: {integrity: sha512-mgLPETlrpVV1YRJIglr4Ez47g7Yxjl1lj7YKsiMCb27VJH9W8NVM6Bb9d8kkpG/uAQS5AmbA48q2IAolKKo1MA==} + + '@types/d3-path@3.1.0': + resolution: {integrity: sha512-P2dlU/q51fkOc/Gfl3Ul9kicV7l+ra934qBFXCFhrZMOL6du1TM0pm1ThYvENukyOn5h9v+yMJ9Fn5JK4QozrQ==} + + '@types/d3-scale@4.0.8': + resolution: {integrity: sha512-gkK1VVTr5iNiYJ7vWDI+yUFFlszhNMtVeneJ6lUTKPjprsvLLI9/tgEGiXJOnlINJA8FyA88gfnQsHbybVZrYQ==} + + '@types/d3-shape@3.1.7': + resolution: {integrity: sha512-VLvUQ33C+3J+8p+Daf+nYSOsjB4GXp19/S/aGo60m9h1v6XaxjiT82lKVWJCfzhtuZ3yD7i/TPeC/fuKLLOSmg==} + + '@types/d3-time@3.0.4': + resolution: {integrity: sha512-yuzZug1nkAAaBlBBikKZTgzCeA+k1uy4ZFwWANOfKw5z5LRhV0gNA7gNkKm7HoK+HRN0wX3EkxGk0fpbWhmB7g==} + + '@types/d3-timer@3.0.2': + resolution: {integrity: sha512-Ps3T8E8dZDam6fUyNiMkekK3XUsaUEik+idO9/YjPtfj2qruF8tFBXS7XhtE4iIXBLxhmLjP3SXpLhVf21I9Lw==} + '@types/debug@4.1.12': resolution: {integrity: sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==} @@ -3488,6 +3518,50 @@ packages: csstype@3.1.3: resolution: {integrity: sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==} + d3-array@3.2.4: + resolution: {integrity: sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg==} + engines: {node: '>=12'} + + d3-color@3.1.0: + resolution: {integrity: sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA==} + engines: {node: '>=12'} + + d3-ease@3.0.1: + resolution: {integrity: sha512-wR/XK3D3XcLIZwpbvQwQ5fK+8Ykds1ip7A2Txe0yxncXSdq1L9skcG7blcedkOX+ZcgxGAmLX1FrRGbADwzi0w==} + engines: {node: '>=12'} + + d3-format@3.1.0: + resolution: {integrity: sha512-YyUI6AEuY/Wpt8KWLgZHsIU86atmikuoOmCfommt0LYHiQSPjvX2AcFc38PX0CBpr2RCyZhjex+NS/LPOv6YqA==} + engines: {node: '>=12'} + + d3-interpolate@3.0.1: + resolution: {integrity: sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g==} + engines: {node: '>=12'} + + d3-path@3.1.0: + resolution: {integrity: sha512-p3KP5HCf/bvjBSSKuXid6Zqijx7wIfNW+J/maPs+iwR35at5JCbLUT0LzF1cnjbCHWhqzQTIN2Jpe8pRebIEFQ==} + engines: {node: '>=12'} + + d3-scale@4.0.2: + resolution: {integrity: sha512-GZW464g1SH7ag3Y7hXjf8RoUuAFIqklOAq3MRl4OaWabTFJY9PN/E1YklhXLh+OQ3fM9yS2nOkCoS+WLZ6kvxQ==} + engines: {node: '>=12'} + + d3-shape@3.2.0: + resolution: {integrity: sha512-SaLBuwGm3MOViRq2ABk3eLoxwZELpH6zhl3FbAoJ7Vm1gofKx6El1Ib5z23NUEhF9AsGl7y+dzLe5Cw2AArGTA==} + engines: {node: '>=12'} + + d3-time-format@4.1.0: + resolution: {integrity: sha512-dJxPBlzC7NugB2PDLwo9Q8JiTR3M3e4/XANkreKSUxF8vvXKqm1Yfq4Q5dl8budlunRVlUUaDUgFt7eA8D6NLg==} + engines: {node: '>=12'} + + d3-time@3.1.0: + resolution: {integrity: sha512-VqKjzBLejbSMT4IgbmVgDjpkYrNWUYJnbCGo874u7MMKIWsILRX+OpX/gTk8MqjpT1A/c6HY2dCA77ZN0lkQ2Q==} + engines: {node: '>=12'} + + d3-timer@3.0.1: + resolution: {integrity: sha512-ndfJ/JxxMd3nw31uyKoY2naivF+r29V+Lc0svZxe1JvvIRmi8hUsrMvdOwgS1o6uBHmiz91geQ0ylPP0aj1VUA==} + engines: {node: '>=12'} + data-urls@3.0.2: resolution: {integrity: sha512-Jy/tj3ldjZJo63sVAvg6LHt2mHvl4V6AgRAmNDtLdm7faqtsx+aJG42rsyCo9JCoRVKwPFzKlIPx3DIibwSIaQ==} engines: {node: '>=12'} @@ -3516,6 +3590,9 @@ packages: supports-color: optional: true + decimal.js-light@2.5.1: + resolution: {integrity: sha512-qIMFpTMZmny+MMIitAB6D7iVPEorVw6YQRWkvarTkT4tBeSLLiHzcwj6q0MmYSFCiVpiqPJTJEYIrpcPzVEIvg==} + decimal.js@10.4.3: resolution: {integrity: sha512-VBBaLc1MgL5XpzgIP7ny5Z6Nx3UrRkIViUkPUdtl9aya5amy3De1gsUUSB1g3+3sExYNjCAsAznmukyxCb1GRA==} @@ -3801,6 +3878,9 @@ packages: resolution: {integrity: sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==} engines: {node: '>= 0.6'} + eventemitter3@4.0.7: + resolution: {integrity: sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==} + eventsourcemock@2.0.0: resolution: {integrity: sha512-tSmJnuE+h6A8/hLRg0usf1yL+Q8w01RQtmg0Uzgoxk/HIPZrIUeAr/A4es/8h1wNsoG8RdiESNQLTKiNwbSC3Q==} @@ -3830,6 +3910,10 @@ packages: fast-deep-equal@3.1.3: resolution: {integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==} + fast-equals@5.2.2: + resolution: {integrity: sha512-V7/RktU11J3I36Nwq2JnZEM7tNm17eBJz+u25qdxBZeCKiX6BkVSZQjwWIr+IobgnZy+ag73tTZgZi7tr0LrBw==} + engines: {node: '>=6.0.0'} + fast-glob@3.3.2: resolution: {integrity: sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==} engines: {node: '>=8.6.0'} @@ -4163,6 +4247,10 @@ packages: resolution: {integrity: sha512-Xj6dv+PsbtwyPpEflsejS+oIZxmMlV44zAhG479uYu89MsjcYOhCFnNyKrkJrihbsiasQyY0afoCl/9BLR65bg==} engines: {node: '>= 0.4'} + internmap@2.0.3: + resolution: {integrity: sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg==} + engines: {node: '>=12'} + invariant@2.2.4: resolution: {integrity: sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA==} @@ -5418,6 +5506,12 @@ packages: peerDependencies: react: '>=16.8' + react-smooth@4.0.4: + resolution: {integrity: sha512-gnGKTpYwqL0Iii09gHobNolvX4Kiq4PKx6eWBCYYix+8cdw+cGo3do906l1NBPKkSWx1DghC1dlWG9L2uGd61Q==} + peerDependencies: + react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 + react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 + react-style-singleton@2.2.1: resolution: {integrity: sha512-ZWj0fHEMyWkHzKYUr2Bs/4zU6XLmq9HsgBURm7g5pAVfyn49DgUiNgY2d4lXRlYSiCif9YBGpQleewkcqddc7g==} engines: {node: '>=10'} @@ -5489,6 +5583,16 @@ packages: resolution: {integrity: sha512-9FHoNjX1yjuesMwuthAmPKabxYQdOgihFYmT5ebXfYGBcnqXZf3WOVz+5foEZ8Y83P4ZY6yQD5GMmtV+pgCCAQ==} engines: {node: '>= 4'} + recharts-scale@0.4.5: + resolution: {integrity: sha512-kivNFO+0OcUNu7jQquLXAxz1FIwZj8nrj+YkOKc5694NbjCvcT6aSZiIzNzd2Kul4o4rTto8QVR9lMNtxD4G1w==} + + recharts@2.15.0: + resolution: {integrity: sha512-cIvMxDfpAmqAmVgc4yb7pgm/O1tmmkl/CjrvXuW+62/+7jj/iF9Ykm+hb/UJt42TREHMyd3gb+pkgoa2MxgDIw==} + engines: {node: '>=14'} + peerDependencies: + react: ^16.0.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 + react-dom: ^16.0.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 + redent@3.0.0: resolution: {integrity: sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg==} engines: {node: '>=8'} @@ -6163,6 +6267,9 @@ packages: vfile@6.0.1: resolution: {integrity: sha512-1bYqc7pt6NIADBJ98UiG0Bn/CHIVOoZ/IyEkqIruLg0mE1BKzkOXY2D6CSqQIcKqgadppE5lrxgWXJmXd7zZJw==} + victory-vendor@36.9.2: + resolution: {integrity: sha512-PnpQQMuxlwYdocC8fIJqVXvkeViHYzotI+NJrCuav0ZYFoq912ZHBk3mCeuj+5/VpodOjPe1z0Fk2ihgzlXqjQ==} + vite-plugin-checker@0.8.0: resolution: {integrity: sha512-UA5uzOGm97UvZRTdZHiQVYFnd86AVn8EVaD4L3PoVzxH+IZSfaAw14WGFwX9QS23UW3lV/5bVKZn6l0w+q9P0g==} engines: {node: '>=14.16'} @@ -8679,6 +8786,30 @@ snapshots: '@types/cookie@0.6.0': {} + '@types/d3-array@3.2.1': {} + + '@types/d3-color@3.1.3': {} + + '@types/d3-ease@3.0.2': {} + + '@types/d3-interpolate@3.0.4': + dependencies: + '@types/d3-color': 3.1.3 + + '@types/d3-path@3.1.0': {} + + '@types/d3-scale@4.0.8': + dependencies: + '@types/d3-time': 3.0.4 + + '@types/d3-shape@3.1.7': + dependencies: + '@types/d3-path': 3.1.0 + + '@types/d3-time@3.0.4': {} + + '@types/d3-timer@3.0.2': {} + '@types/debug@4.1.12': dependencies: '@types/ms': 0.7.34 @@ -9482,6 +9613,44 @@ snapshots: csstype@3.1.3: {} + d3-array@3.2.4: + dependencies: + internmap: 2.0.3 + + d3-color@3.1.0: {} + + d3-ease@3.0.1: {} + + d3-format@3.1.0: {} + + d3-interpolate@3.0.1: + dependencies: + d3-color: 3.1.0 + + d3-path@3.1.0: {} + + d3-scale@4.0.2: + dependencies: + d3-array: 3.2.4 + d3-format: 3.1.0 + d3-interpolate: 3.0.1 + d3-time: 3.1.0 + d3-time-format: 4.1.0 + + d3-shape@3.2.0: + dependencies: + d3-path: 3.1.0 + + d3-time-format@4.1.0: + dependencies: + d3-time: 3.1.0 + + d3-time@3.1.0: + dependencies: + d3-array: 3.2.4 + + d3-timer@3.0.1: {} + data-urls@3.0.2: dependencies: abab: 2.0.6 @@ -9502,6 +9671,8 @@ snapshots: dependencies: ms: 2.1.3 + decimal.js-light@2.5.1: {} + decimal.js@10.4.3: {} decode-named-character-reference@1.0.2: @@ -9840,6 +10011,8 @@ snapshots: etag@1.8.1: {} + eventemitter3@4.0.7: {} + eventsourcemock@2.0.0: {} execa@5.1.1: @@ -9907,6 +10080,8 @@ snapshots: fast-deep-equal@3.1.3: optional: true + fast-equals@5.2.2: {} + fast-glob@3.3.2: dependencies: '@nodelib/fs.stat': 2.0.5 @@ -10252,6 +10427,8 @@ snapshots: hasown: 2.0.2 side-channel: 1.0.6 + internmap@2.0.3: {} + invariant@2.2.4: dependencies: loose-envify: 1.4.0 @@ -11890,6 +12067,14 @@ snapshots: '@remix-run/router': 1.19.2 react: 18.3.1 + react-smooth@4.0.4(react-dom@18.3.1(react@18.3.1))(react@18.3.1): + dependencies: + fast-equals: 5.2.2 + prop-types: 15.8.1 + react: 18.3.1 + react-dom: 18.3.1(react@18.3.1) + react-transition-group: 4.4.5(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + react-style-singleton@2.2.1(@types/react@18.3.12)(react@18.3.1): dependencies: get-nonce: 1.0.1 @@ -11979,6 +12164,23 @@ snapshots: tiny-invariant: 1.3.3 tslib: 2.6.2 + recharts-scale@0.4.5: + dependencies: + decimal.js-light: 2.5.1 + + recharts@2.15.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1): + dependencies: + clsx: 2.1.1 + eventemitter3: 4.0.7 + lodash: 4.17.21 + react: 18.3.1 + react-dom: 18.3.1(react@18.3.1) + react-is: 18.3.1 + react-smooth: 4.0.4(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + recharts-scale: 0.4.5 + tiny-invariant: 1.3.3 + victory-vendor: 36.9.2 + redent@3.0.0: dependencies: indent-string: 4.0.0 @@ -12736,6 +12938,23 @@ snapshots: unist-util-stringify-position: 4.0.0 vfile-message: 4.0.2 + victory-vendor@36.9.2: + dependencies: + '@types/d3-array': 3.2.1 + '@types/d3-ease': 3.0.2 + '@types/d3-interpolate': 3.0.4 + '@types/d3-scale': 4.0.8 + '@types/d3-shape': 3.1.7 + '@types/d3-time': 3.0.4 + '@types/d3-timer': 3.0.2 + d3-array: 3.2.4 + d3-ease: 3.0.1 + d3-interpolate: 3.0.1 + d3-scale: 4.0.2 + d3-shape: 3.2.0 + d3-time: 3.1.0 + d3-timer: 3.0.1 + vite-plugin-checker@0.8.0(@biomejs/biome@1.9.4)(eslint@8.52.0)(optionator@0.9.3)(typescript@5.6.3)(vite@5.4.11(@types/node@20.17.11)): dependencies: '@babel/code-frame': 7.25.7 diff --git a/site/src/components/Chart/Chart.stories.tsx b/site/src/components/Chart/Chart.stories.tsx new file mode 100644 index 0000000000000..df863d4abee0e --- /dev/null +++ b/site/src/components/Chart/Chart.stories.tsx @@ -0,0 +1,75 @@ +import type { Meta, StoryObj } from "@storybook/react"; +import { Area, AreaChart, CartesianGrid, XAxis, YAxis } from "recharts"; +import { + type ChartConfig, + ChartContainer, + ChartTooltip, + ChartTooltipContent, +} from "./Chart"; + +const chartData = [ + { month: "January", users: 186 }, + { month: "February", users: 305 }, + { month: "March", users: 237 }, + { month: "April", users: 73 }, + { month: "May", users: 209 }, + { month: "June", users: 214 }, +]; + +const chartConfig = { + users: { + label: "Users", + color: "hsl(var(--chart-1))", + }, +} satisfies ChartConfig; + +const meta: Meta = { + title: "components/Chart", + render: () => { + return ( + + + + value.slice(0, 3)} + /> + value.toLocaleString()} + /> + } + /> + + + + ); + }, +}; + +export default meta; +type Story = StoryObj; + +export const Default: Story = {}; diff --git a/site/src/components/Chart/Chart.tsx b/site/src/components/Chart/Chart.tsx new file mode 100644 index 0000000000000..ba5ff7e7ed43d --- /dev/null +++ b/site/src/components/Chart/Chart.tsx @@ -0,0 +1,372 @@ +/** + * Copied from shadc/ui on 01/13/2025 + * @see {@link https://ui.shadcn.com/docs/components/chart} + */ +import * as React from "react"; +import * as RechartsPrimitive from "recharts"; +import { cn } from "utils/cn"; + +// Format: { THEME_NAME: CSS_SELECTOR } +const THEMES = { light: "", dark: ".dark" } as const; + +export type ChartConfig = { + [k in string]: { + label?: React.ReactNode; + icon?: React.ComponentType; + } & ( + | { color?: string; theme?: never } + | { color?: never; theme: Record } + ); +}; + +type ChartContextProps = { + config: ChartConfig; +}; + +export const ChartContext = React.createContext(null); + +function useChart() { + const context = React.useContext(ChartContext); + + if (!context) { + throw new Error("useChart must be used within a "); + } + + return context; +} + +export const ChartContainer = React.forwardRef< + HTMLDivElement, + React.ComponentProps<"div"> & { + config: ChartConfig; + children: React.ComponentProps< + typeof RechartsPrimitive.ResponsiveContainer + >["children"]; + } +>(({ id, className, children, config, ...props }, ref) => { + const uniqueId = React.useId(); + const chartId = `chart-${id || uniqueId.replace(/:/g, "")}`; + + return ( + +
+ + + {children} + +
+
+ ); +}); +ChartContainer.displayName = "Chart"; + +export const ChartStyle = ({ + id, + config, +}: { id: string; config: ChartConfig }) => { + const colorConfig = Object.entries(config).filter( + ([, config]) => config.theme || config.color, + ); + + if (!colorConfig.length) { + return null; + } + + return ( + \ No newline at end of file diff --git a/docs/manifest.json b/docs/manifest.json index ea1d19561593f..8692336d089ea 100644 --- a/docs/manifest.json +++ b/docs/manifest.json @@ -684,7 +684,7 @@ "description": "Learn how to run and integrate AI coding agents like GPT-Code, OpenDevin, or SWE-Agent in Coder workspaces to boost developer productivity.", "path": "./ai-coder/index.md", "icon_path": "./images/icons/wand.svg", - "state": ["early access"], + "state": ["beta"], "children": [ { "title": "Learn about coding agents", @@ -695,37 +695,37 @@ "title": "Create a Coder template for agents", "description": "Create a purpose-built template for your AI agents", "path": "./ai-coder/create-template.md", - "state": ["early access"] + "state": ["beta"] }, { "title": "Integrate with your issue tracker", "description": "Assign tickets to AI agents and interact via code reviews", "path": "./ai-coder/issue-tracker.md", - "state": ["early access"] + "state": ["beta"] }, { "title": "Model Context Protocols (MCP) and adding AI tools", "description": "Improve results by adding tools to your AI agents", "path": "./ai-coder/best-practices.md", - "state": ["early access"] + "state": ["beta"] }, { "title": "Supervise agents via Coder UI", "description": "Interact with agents via the Coder UI", "path": "./ai-coder/coder-dashboard.md", - "state": ["early access"] + "state": ["beta"] }, { "title": "Supervise agents via the IDE", "description": "Interact with agents via VS Code or Cursor", "path": "./ai-coder/ide-integration.md", - "state": ["early access"] + "state": ["beta"] }, { "title": "Programmatically manage agents", "description": "Manage agents via MCP, the Coder CLI, and/or REST API", "path": "./ai-coder/headless.md", - "state": ["early access"] + "state": ["beta"] }, { "title": "Securing agents in Coder", @@ -737,7 +737,7 @@ "title": "Custom agents", "description": "Learn how to use custom agents with Coder", "path": "./ai-coder/custom-agents.md", - "state": ["early access"] + "state": ["beta"] } ] }, From 205076e6e7f80b731aeaad523dcca56ee6d334b3 Mon Sep 17 00:00:00 2001 From: Bruno Quaresma Date: Wed, 30 Apr 2025 13:58:12 -0300 Subject: [PATCH 0933/1096] refactor: change how timings are formatted (#17623) --- .../workspaces/WorkspaceTiming/Chart/utils.ts | 42 +++++++++---------- 1 file changed, 21 insertions(+), 21 deletions(-) diff --git a/site/src/modules/workspaces/WorkspaceTiming/Chart/utils.ts b/site/src/modules/workspaces/WorkspaceTiming/Chart/utils.ts index 55df5b9ffad48..2790701db5265 100644 --- a/site/src/modules/workspaces/WorkspaceTiming/Chart/utils.ts +++ b/site/src/modules/workspaces/WorkspaceTiming/Chart/utils.ts @@ -61,29 +61,29 @@ export const makeTicks = (time: number) => { }; export const formatTime = (time: number): string => { - const seconds = Math.floor(time / 1000); - const minutes = Math.floor(seconds / 60); - const hours = Math.floor(minutes / 60); - const days = Math.floor(hours / 24); + const absTime = Math.abs(time); + let unit = ""; + let value = 0; - const parts: string[] = []; - if (days > 0) { - parts.push(`${days}d`); + if (absTime < second) { + value = time; + unit = "ms"; + } else if (absTime < minute) { + value = time / second; + unit = "s"; + } else if (absTime < hour) { + value = time / minute; + unit = "m"; + } else if (absTime < day) { + value = time / hour; + unit = "h"; + } else { + value = time / day; + unit = "d"; } - if (hours > 0) { - parts.push(`${hours % 24}h`); - } - if (minutes > 0) { - parts.push(`${minutes % 60}m`); - } - if (seconds > 0) { - parts.push(`${seconds % 60}s`); - } - if (time % 1000 > 0) { - parts.push(`${time % 1000}ms`); - } - - return parts.join(" "); + return `${value.toLocaleString(undefined, { + maximumFractionDigits: 2, + })}${unit}`; }; export const calcOffset = (range: TimeRange, baseRange: TimeRange): number => { From f108f9d71ffc4f49030cc6fa2ae35063aa2d3c25 Mon Sep 17 00:00:00 2001 From: brettkolodny Date: Wed, 30 Apr 2025 15:08:25 -0400 Subject: [PATCH 0934/1096] chore: setup knip and remove unused exports, files, and dependencies (#17608) Closes [coder/interal#600](https://github.com/coder/internal/issues/600) --- site/.knip.jsonc | 17 + site/biome.jsonc | 3 + site/e2e/constants.ts | 8 - site/e2e/helpers.ts | 4 +- site/e2e/tests/deployment/idpOrgSync.spec.ts | 1 - .../e2e/tests/organizations/auditLogs.spec.ts | 2 +- site/package.json | 14 +- site/pnpm-lock.yaml | 440 +++++----------- site/src/__mocks__/react-markdown.tsx | 7 - site/src/api/api.ts | 2 +- site/src/api/errors.ts | 2 +- site/src/api/queries/authCheck.ts | 2 +- site/src/api/queries/groups.ts | 8 +- site/src/api/queries/idpsync.ts | 4 +- site/src/api/queries/organizations.ts | 11 +- site/src/api/queries/settings.ts | 2 +- site/src/api/queries/templates.ts | 6 +- site/src/api/queries/workspaceBuilds.ts | 2 +- site/src/api/queries/workspaces.ts | 2 +- .../components/Avatar/AvatarDataSkeleton.tsx | 1 - site/src/components/Badge/Badge.tsx | 2 +- site/src/components/Button/Button.tsx | 2 +- site/src/components/Chart/Chart.tsx | 11 +- site/src/components/Command/Command.tsx | 4 +- site/src/components/CopyButton/CopyButton.tsx | 2 +- site/src/components/Dialog/Dialog.tsx | 6 +- .../components/DropdownMenu/DropdownMenu.tsx | 20 +- site/src/components/Icons/GitlabIcon.tsx | 29 -- site/src/components/Icons/MarkdownIcon.tsx | 21 - site/src/components/Icons/TerraformIcon.tsx | 22 - site/src/components/Link/Link.tsx | 2 +- site/src/components/Logs/LogLine.tsx | 2 +- .../PageHeader/FullWidthPageHeader.tsx | 2 +- site/src/components/ScrollArea/ScrollArea.tsx | 2 +- site/src/components/Select/Select.tsx | 6 +- site/src/components/Table/Table.tsx | 4 +- site/src/contexts/ProxyContext.tsx | 4 +- .../DeploymentBanner/DeploymentBannerView.tsx | 2 +- .../LicenseBanner/LicenseBannerView.tsx | 2 +- .../modules/dashboard/Navbar/NavbarView.tsx | 3 - .../UserDropdown/UserDropdownContent.tsx | 2 +- .../management/DeploymentSidebarView.tsx | 1 - site/src/modules/navigation.ts | 4 +- .../InboxPopover.stories.tsx | 2 +- .../JobStatusIndicator.stories.tsx | 1 - .../modules/provisioners/ProvisionerGroup.tsx | 487 ------------------ .../modules/provisioners/ProvisionerTag.tsx | 2 +- .../resources/AgentDevcontainerCard.tsx | 2 +- site/src/modules/resources/AgentMetadata.tsx | 2 +- .../src/modules/resources/AppLink/AppLink.tsx | 1 - .../resources/TerminalLink/TerminalLink.tsx | 2 +- .../WorkspaceBuildLogs/WorkspaceBuildLogs.tsx | 2 +- .../WorkspaceOutdatedTooltip.tsx | 4 +- .../workspaces/WorkspaceTiming/Chart/Bar.tsx | 7 +- .../WorkspaceTiming/Chart/XAxis.tsx | 6 +- site/src/pages/404Page/404Page.tsx | 2 +- site/src/pages/AuditPage/AuditHelpTooltip.tsx | 2 +- site/src/pages/AuditPage/AuditPage.tsx | 1 - site/src/pages/AuditPage/AuditPageView.tsx | 2 +- site/src/pages/CliAuthPage/CliAuthPage.tsx | 2 +- .../pages/CliInstallPage/CliInstallPage.tsx | 2 +- .../CreateTokenPage.stories.tsx | 2 +- .../CreateTokenPage/CreateTokenPage.test.tsx | 2 +- .../pages/CreateTokenPage/CreateTokenPage.tsx | 2 +- .../CreateUserPage/CreateUserPage.test.tsx | 2 +- .../pages/CreateUserPage/CreateUserPage.tsx | 4 +- .../CreateWorkspacePage.tsx | 2 +- .../CreateWorkspacePageExperimental.tsx | 2 +- .../CreateWorkspacePageViewExperimental.tsx | 2 +- .../IdpOrgSyncPage/IdpOrgSyncPage.tsx | 4 +- .../IdpOrgSyncPage/IdpOrgSyncPageView.tsx | 4 +- .../NotificationsPage.stories.tsx | 2 +- .../NotificationsPage/NotificationsPage.tsx | 3 +- .../NotificationsPage/storybookUtils.ts | 4 +- .../OverviewPage/ChartSection.tsx | 58 --- site/src/pages/GroupsPage/CreateGroupPage.tsx | 4 +- .../pages/GroupsPage/CreateGroupPageView.tsx | 1 - site/src/pages/GroupsPage/GroupPage.tsx | 2 +- .../pages/GroupsPage/GroupSettingsPage.tsx | 2 +- site/src/pages/GroupsPage/GroupsPage.tsx | 4 +- .../pages/GroupsPage/GroupsPageProvider.tsx | 6 +- site/src/pages/GroupsPage/GroupsPageView.tsx | 2 - .../HealthPage/AccessURLPage.stories.tsx | 2 +- site/src/pages/HealthPage/AccessURLPage.tsx | 2 +- .../src/pages/HealthPage/DERPPage.stories.tsx | 2 +- site/src/pages/HealthPage/DERPPage.tsx | 2 +- .../HealthPage/DERPRegionPage.stories.tsx | 2 +- site/src/pages/HealthPage/DERPRegionPage.tsx | 2 +- .../pages/HealthPage/DatabasePage.stories.tsx | 2 +- site/src/pages/HealthPage/DatabasePage.tsx | 2 +- .../ProvisionerDaemonsPage.stories.tsx | 2 +- .../HealthPage/ProvisionerDaemonsPage.tsx | 2 +- .../HealthPage/WebsocketPage.stories.tsx | 2 +- site/src/pages/HealthPage/WebsocketPage.tsx | 2 +- .../HealthPage/WorkspaceProxyPage.stories.tsx | 2 +- .../pages/HealthPage/WorkspaceProxyPage.tsx | 2 +- .../src/pages/IconsPage/IconsPage.stories.tsx | 2 +- site/src/pages/IconsPage/IconsPage.tsx | 2 +- site/src/pages/LoginPage/LoginPage.test.tsx | 2 +- site/src/pages/LoginPage/LoginPage.tsx | 2 +- .../CustomRolesPage/CreateEditRolePage.tsx | 2 +- .../CreateEditRolePageView.stories.tsx | 2 +- .../CreateEditRolePageView.tsx | 2 +- .../CustomRolesPage/CustomRolesPage.tsx | 4 +- .../CustomRolesPage/CustomRolesPageView.tsx | 2 - .../IdpSyncPage/IdpSyncPage.tsx | 2 +- .../IdpSyncPage/IdpSyncPageView.stories.tsx | 2 +- .../IdpSyncPage/IdpSyncPageView.tsx | 2 +- .../OrganizationMembersPageView.tsx | 1 - .../JobRow.stories.tsx | 2 +- .../OrganizationProvisionerJobsPage.tsx | 2 - .../UserTable/TableColumnHelpTooltip.tsx | 2 +- .../TemplateInsightsPage/IntervalMenu.tsx | 2 +- .../src/pages/TemplatePage/TemplateLayout.tsx | 1 - .../TemplateResourcesPage.tsx | 2 +- .../TemplateVersionsPage/VersionsTable.tsx | 2 +- .../TemplateSettingsPage.test.tsx | 2 +- .../TemplateSettingsPage.tsx | 2 +- .../TemplatePermissionsPage.tsx | 2 +- .../TemplateVariablesForm.tsx | 2 +- .../TemplateVariablesPage.tsx | 2 +- .../TemplateVersionEditorPage.tsx | 2 +- .../TemplateVersionStatusBadge.tsx | 2 +- .../TemplateVersionPage.tsx | 4 +- .../TemplateVersionPageView.tsx | 2 - .../src/pages/TemplatesPage/TemplatesPage.tsx | 2 +- .../pages/TemplatesPage/TemplatesPageView.tsx | 2 +- .../src/pages/TerminalPage/TerminalAlerts.tsx | 8 +- .../AccountPage/AccountPage.test.tsx | 2 +- .../AccountPage/AccountPage.tsx | 2 +- .../AppearancePage/AppearanceForm.tsx | 2 +- .../AppearancePage/AppearancePage.test.tsx | 2 +- .../AppearancePage/AppearancePage.tsx | 2 +- .../ExternalAuthPage/ExternalAuthPageView.tsx | 3 - .../NotificationsPage.stories.tsx | 2 +- .../NotificationsPage/NotificationsPage.tsx | 2 +- .../SSHKeysPage/SSHKeysPage.test.tsx | 2 +- .../SSHKeysPage/SSHKeysPage.tsx | 2 +- .../SchedulePage/SchedulePage.test.tsx | 2 +- .../SchedulePage/SchedulePage.tsx | 2 +- .../SecurityPage/SecurityPage.test.tsx | 2 +- .../SecurityPage/SecurityPage.tsx | 2 +- site/src/pages/UserSettingsPage/Sidebar.tsx | 1 - .../TokensPage/TokensPage.tsx | 2 +- .../WorkspaceProxyPage/WorkspaceProxyPage.tsx | 2 +- .../pages/UsersPage/ResetPasswordDialog.tsx | 2 +- site/src/pages/UsersPage/UsersPageView.tsx | 1 - .../pages/UsersPage/UsersTable/UsersTable.tsx | 2 +- .../WorkspaceBuildPage.test.tsx | 2 +- .../WorkspaceBuildPage/WorkspaceBuildPage.tsx | 2 +- site/src/pages/WorkspacePage/BuildRow.tsx | 123 ----- .../pages/WorkspacePage/ResourcesSidebar.tsx | 2 +- .../WorkspacePage/ResourcesSidebarContent.tsx | 29 -- .../WorkspaceActions/constants.ts | 2 +- .../WorkspacePage/WorkspacePage.test.tsx | 2 +- .../src/pages/WorkspacePage/WorkspacePage.tsx | 2 +- .../WorkspaceScheduleControls.tsx | 6 +- .../pages/WorkspacePage/WorkspaceTopbar.tsx | 2 +- .../WorkspaceSchedulePage.test.tsx | 2 +- .../WorkspaceSchedulePage.tsx | 2 +- site/src/pages/WorkspacesPage/LastUsed.tsx | 49 -- .../WorkspacesPage/WorkspacesPageView.tsx | 2 +- .../filter/WorkspacesFilter.tsx | 2 +- .../src/pages/WorkspacesPage/filter/menus.tsx | 2 +- site/src/testHelpers/entities.ts | 100 ++-- site/src/testHelpers/localStorage.ts | 2 +- site/src/testHelpers/storybook.tsx | 25 - site/src/theme/dark/branding.ts | 2 +- site/src/theme/externalImages.ts | 2 +- site/src/theme/light/branding.ts | 2 +- site/src/theme/mui.ts | 2 +- site/src/theme/roles.ts | 2 +- site/src/utils/appearance.ts | 15 - site/src/utils/colors.ts | 24 - site/src/utils/schedule.tsx | 7 +- site/src/utils/workspace.tsx | 4 - site/vite.config.mts | 1 - 177 files changed, 388 insertions(+), 1506 deletions(-) create mode 100644 site/.knip.jsonc delete mode 100644 site/src/__mocks__/react-markdown.tsx delete mode 100644 site/src/components/Icons/GitlabIcon.tsx delete mode 100644 site/src/components/Icons/MarkdownIcon.tsx delete mode 100644 site/src/components/Icons/TerraformIcon.tsx delete mode 100644 site/src/modules/provisioners/ProvisionerGroup.tsx delete mode 100644 site/src/pages/DeploymentSettingsPage/OverviewPage/ChartSection.tsx delete mode 100644 site/src/pages/WorkspacePage/BuildRow.tsx delete mode 100644 site/src/pages/WorkspacePage/ResourcesSidebarContent.tsx delete mode 100644 site/src/pages/WorkspacesPage/LastUsed.tsx diff --git a/site/.knip.jsonc b/site/.knip.jsonc new file mode 100644 index 0000000000000..f4c082a76ecbf --- /dev/null +++ b/site/.knip.jsonc @@ -0,0 +1,17 @@ +{ + "$schema": "https://unpkg.com/knip@5/schema.json", + "entry": ["./src/index.tsx", "./src/serviceWorker.ts"], + "project": ["./src/**/*.ts", "./src/**/*.tsx", "./e2e/**/*.ts"], + "ignore": ["**/*Generated.ts"], + "ignoreBinaries": ["protoc"], + "ignoreDependencies": [ + "@types/react-virtualized-auto-sizer", + "jest_workaround", + "ts-proto" + ], + // Don't report unused exports of types as long as they are used within the file. + "ignoreExportsUsedInFile": { + "interface": true, + "type": true + } +} diff --git a/site/biome.jsonc b/site/biome.jsonc index d26636fabef18..bc6fa8de6e946 100644 --- a/site/biome.jsonc +++ b/site/biome.jsonc @@ -16,6 +16,9 @@ "useButtonType": { "level": "off" }, "useSemanticElements": { "level": "off" } }, + "correctness": { + "noUnusedImports": "warn" + }, "style": { "noNonNullAssertion": { "level": "off" }, "noParameterAssign": { "level": "off" }, diff --git a/site/e2e/constants.ts b/site/e2e/constants.ts index 98757064c6f3f..4e95d642eac5e 100644 --- a/site/e2e/constants.ts +++ b/site/e2e/constants.ts @@ -78,14 +78,6 @@ export const premiumTestsRequired = Boolean( export const license = process.env.CODER_E2E_LICENSE ?? ""; -/** - * Certain parts of the UI change when organizations are enabled. Organizations - * are enabled by a license entitlement, and license configuration is guaranteed - * to run before any other tests, so having this as a bit of "global state" is - * fine. - */ -export const organizationsEnabled = Boolean(license); - // Disabling terraform tests is optional for environments without Docker + Terraform. // By default, we opt into these tests. export const requireTerraformTests = !process.env.CODER_E2E_DISABLE_TERRAFORM; diff --git a/site/e2e/helpers.ts b/site/e2e/helpers.ts index f4ad6485b2681..71b1c039c5dfb 100644 --- a/site/e2e/helpers.ts +++ b/site/e2e/helpers.ts @@ -81,7 +81,7 @@ export async function login(page: Page, options: LoginOptions = users.owner) { (ctx as any)[Symbol.for("currentUser")] = options; } -export function currentUser(page: Page): LoginOptions { +function currentUser(page: Page): LoginOptions { const ctx = page.context(); // biome-ignore lint/suspicious/noExplicitAny: get the current user const user = (ctx as any)[Symbol.for("currentUser")]; @@ -875,7 +875,7 @@ export const echoResponsesWithExternalAuth = ( }; }; -export const fillParameters = async ( +const fillParameters = async ( page: Page, richParameters: RichParameter[] = [], buildParameters: WorkspaceBuildParameter[] = [], diff --git a/site/e2e/tests/deployment/idpOrgSync.spec.ts b/site/e2e/tests/deployment/idpOrgSync.spec.ts index a693e70007d4d..4f175b93183c0 100644 --- a/site/e2e/tests/deployment/idpOrgSync.spec.ts +++ b/site/e2e/tests/deployment/idpOrgSync.spec.ts @@ -5,7 +5,6 @@ import { deleteOrganization, setupApiCalls, } from "../../api"; -import { users } from "../../constants"; import { login, randomName, requiresLicense } from "../../helpers"; import { beforeCoderTest } from "../../hooks"; diff --git a/site/e2e/tests/organizations/auditLogs.spec.ts b/site/e2e/tests/organizations/auditLogs.spec.ts index 3044d9da2d7ca..0cb92c94a5692 100644 --- a/site/e2e/tests/organizations/auditLogs.spec.ts +++ b/site/e2e/tests/organizations/auditLogs.spec.ts @@ -1,4 +1,4 @@ -import { type Page, expect, test } from "@playwright/test"; +import { expect, test } from "@playwright/test"; import { createOrganization, createOrganizationMember, diff --git a/site/package.json b/site/package.json index 8a08e837dc8a5..265756d773594 100644 --- a/site/package.json +++ b/site/package.json @@ -13,10 +13,11 @@ "dev": "vite", "format": "biome format --write .", "format:check": "biome format .", - "lint": "pnpm run lint:check && pnpm run lint:types && pnpm run lint:circular-deps", + "lint": "pnpm run lint:check && pnpm run lint:types && pnpm run lint:circular-deps && knip", "lint:check": " biome lint --error-on-warnings .", "lint:circular-deps": "dpdm --no-tree --no-warning -T ./src/App.tsx", - "lint:fix": " biome lint --error-on-warnings --write .", + "lint:knip": "knip", + "lint:fix": " biome lint --error-on-warnings --write . && knip --fix", "lint:types": "tsc -p .", "playwright:install": "playwright install --with-deps chromium", "playwright:test": "playwright test --config=e2e/playwright.config.ts", @@ -29,7 +30,6 @@ "test:ci": "jest --selectProjects test --silent", "test:coverage": "jest --selectProjects test --collectCoverage", "test:watch": "jest --selectProjects test --watch", - "test:storybook": "test-storybook", "stats": "STATS=true pnpm build && npx http-server ./stats -p 8081 -c-1", "deadcode": "ts-prune | grep -v \".stories\\|.config\\|e2e\\|__mocks__\\|used in module\\|testHelpers\\|typesGenerated\" || echo \"No deadcode found.\"", "update-emojis": "cp -rf ./node_modules/emoji-datasource-apple/img/apple/64/* ./static/emojis" @@ -68,7 +68,6 @@ "@radix-ui/react-slot": "1.1.1", "@radix-ui/react-switch": "1.1.1", "@radix-ui/react-tooltip": "1.1.7", - "@radix-ui/react-visually-hidden": "1.1.0", "@tanstack/react-query-devtools": "4.35.3", "@xterm/addon-canvas": "0.7.0", "@xterm/addon-fit": "0.10.0", @@ -78,10 +77,8 @@ "@xterm/xterm": "5.5.0", "ansi-to-html": "0.7.2", "axios": "1.8.2", - "canvas": "3.1.0", "chart.js": "4.4.0", "chartjs-adapter-date-fns": "3.0.0", - "chartjs-plugin-annotation": "3.0.1", "chroma-js": "2.4.2", "class-variance-authority": "0.7.1", "clsx": "2.1.1", @@ -91,7 +88,6 @@ "cronstrue": "2.50.0", "date-fns": "2.30.0", "dayjs": "1.11.13", - "emoji-datasource-apple": "15.1.2", "emoji-mart": "5.6.0", "file-saver": "2.0.5", "formik": "2.4.6", @@ -149,7 +145,6 @@ "@tailwindcss/typography": "0.5.16", "@testing-library/jest-dom": "6.6.3", "@testing-library/react": "14.3.1", - "@testing-library/react-hooks": "8.0.1", "@testing-library/user-event": "14.6.1", "@types/chroma-js": "2.4.0", "@types/color-convert": "2.0.4", @@ -181,6 +176,7 @@ "jest-location-mock": "2.0.0", "jest-websocket-mock": "2.5.0", "jest_workaround": "0.1.14", + "knip": "5.51.0", "msw": "2.4.8", "postcss": "8.5.1", "protobufjs": "7.4.0", @@ -188,9 +184,7 @@ "ssh2": "1.16.0", "storybook": "8.5.3", "storybook-addon-remix-react-router": "3.1.0", - "storybook-react-context": "0.7.0", "tailwindcss": "3.4.17", - "ts-node": "10.9.2", "ts-proto": "1.164.0", "ts-prune": "0.10.3", "typescript": "5.6.3", diff --git a/site/pnpm-lock.yaml b/site/pnpm-lock.yaml index 15bc6709ef011..7fea2e807e086 100644 --- a/site/pnpm-lock.yaml +++ b/site/pnpm-lock.yaml @@ -115,9 +115,6 @@ importers: '@radix-ui/react-tooltip': specifier: 1.1.7 version: 1.1.7(@types/react-dom@18.3.1)(@types/react@18.3.12)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@radix-ui/react-visually-hidden': - specifier: 1.1.0 - version: 1.1.0(@types/react-dom@18.3.1)(@types/react@18.3.12)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) '@tanstack/react-query-devtools': specifier: 4.35.3 version: 4.35.3(@tanstack/react-query@4.35.3(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1) @@ -145,18 +142,12 @@ importers: axios: specifier: 1.8.2 version: 1.8.2 - canvas: - specifier: 3.1.0 - version: 3.1.0 chart.js: specifier: 4.4.0 version: 4.4.0 chartjs-adapter-date-fns: specifier: 3.0.0 version: 3.0.0(chart.js@4.4.0)(date-fns@2.30.0) - chartjs-plugin-annotation: - specifier: 3.0.1 - version: 3.0.1(chart.js@4.4.0) chroma-js: specifier: 2.4.2 version: 2.4.2 @@ -184,9 +175,6 @@ importers: dayjs: specifier: 1.11.13 version: 1.11.13 - emoji-datasource-apple: - specifier: 15.1.2 - version: 15.1.2 emoji-mart: specifier: 5.6.0 version: 5.6.0 @@ -353,9 +341,6 @@ importers: '@testing-library/react': specifier: 14.3.1 version: 14.3.1(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - '@testing-library/react-hooks': - specifier: 8.0.1 - version: 8.0.1(@types/react@18.3.12)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) '@testing-library/user-event': specifier: 14.6.1 version: 14.6.1(@testing-library/dom@10.4.0) @@ -436,10 +421,10 @@ importers: version: 2.5.2 jest-environment-jsdom: specifier: 29.5.0 - version: 29.5.0(canvas@3.1.0) + version: 29.5.0 jest-fixed-jsdom: specifier: 0.0.9 - version: 0.0.9(jest-environment-jsdom@29.5.0(canvas@3.1.0)) + version: 0.0.9(jest-environment-jsdom@29.5.0) jest-location-mock: specifier: 2.0.0 version: 2.0.0 @@ -449,6 +434,9 @@ importers: jest_workaround: specifier: 0.1.14 version: 0.1.14(@swc/core@1.3.38)(@swc/jest@0.2.37(@swc/core@1.3.38)) + knip: + specifier: 5.51.0 + version: 5.51.0(@types/node@20.17.16)(typescript@5.6.3) msw: specifier: 2.4.8 version: 2.4.8(typescript@5.6.3) @@ -470,15 +458,9 @@ importers: storybook-addon-remix-react-router: specifier: 3.1.0 version: 3.1.0(@storybook/blocks@8.4.6(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(storybook@8.5.3(prettier@3.4.1)))(@storybook/channels@8.1.11)(@storybook/components@8.4.6(storybook@8.5.3(prettier@3.4.1)))(@storybook/core-events@8.1.11)(@storybook/manager-api@8.4.6(storybook@8.5.3(prettier@3.4.1)))(@storybook/preview-api@8.5.3(storybook@8.5.3(prettier@3.4.1)))(@storybook/theming@8.4.6(storybook@8.5.3(prettier@3.4.1)))(react-dom@18.3.1(react@18.3.1))(react-router-dom@6.26.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(react@18.3.1) - storybook-react-context: - specifier: 0.7.0 - version: 0.7.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(storybook@8.5.3(prettier@3.4.1)) tailwindcss: specifier: 3.4.17 version: 3.4.17(ts-node@10.9.2(@swc/core@1.3.38)(@types/node@20.17.16)(typescript@5.6.3)) - ts-node: - specifier: 10.9.2 - version: 10.9.2(@swc/core@1.3.38)(@types/node@20.17.16)(typescript@5.6.3) ts-proto: specifier: 1.164.0 version: 1.164.0 @@ -1991,19 +1973,6 @@ packages: '@types/react': optional: true - '@radix-ui/react-visually-hidden@1.1.0': - resolution: {integrity: sha512-N8MDZqtgCgG5S3aV60INAB475osJousYpZ4cTJ2cFbMpdHS5Y6loLTH8LPtkj2QN0x93J30HT/M3qJXM0+lyeQ==, tarball: https://registry.npmjs.org/@radix-ui/react-visually-hidden/-/react-visually-hidden-1.1.0.tgz} - peerDependencies: - '@types/react': '*' - '@types/react-dom': '*' - react: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - react-dom: ^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc - peerDependenciesMeta: - '@types/react': - optional: true - '@types/react-dom': - optional: true - '@radix-ui/react-visually-hidden@1.1.1': resolution: {integrity: sha512-vVfA2IZ9q/J+gEamvj761Oq1FpWgCDaNOOIfbPVp2MVPLEomUr5+Vf7kJGwQ24YxZSlQVar7Bes8kyTo5Dshpg==, tarball: https://registry.npmjs.org/@radix-ui/react-visually-hidden/-/react-visually-hidden-1.1.1.tgz} peerDependencies: @@ -2476,22 +2445,6 @@ packages: resolution: {integrity: sha512-IteBhl4XqYNkM54f4ejhLRJiZNqcSCoXUOG2CPK7qbD322KjQozM4kHQOfkG2oln9b9HTYqs+Sae8vBATubxxA==, tarball: https://registry.npmjs.org/@testing-library/jest-dom/-/jest-dom-6.6.3.tgz} engines: {node: '>=14', npm: '>=6', yarn: '>=1'} - '@testing-library/react-hooks@8.0.1': - resolution: {integrity: sha512-Aqhl2IVmLt8IovEVarNDFuJDVWVvhnr9/GCU6UUnrYXwgDFF9h2L2o2P9KBni1AST5sT6riAyoukFLyjQUgD/g==, tarball: https://registry.npmjs.org/@testing-library/react-hooks/-/react-hooks-8.0.1.tgz} - engines: {node: '>=12'} - peerDependencies: - '@types/react': ^16.9.0 || ^17.0.0 - react: ^16.9.0 || ^17.0.0 - react-dom: ^16.9.0 || ^17.0.0 - react-test-renderer: ^16.9.0 || ^17.0.0 - peerDependenciesMeta: - '@types/react': - optional: true - react-dom: - optional: true - react-test-renderer: - optional: true - '@testing-library/react@14.3.1': resolution: {integrity: sha512-H99XjUhWQw0lTgyMN05W3xQG1Nh4lq574D8keFf1dDoNTJgp66VbJozRaczoF+wsiaPJNt/TcnfpLGufGxSrZQ==, tarball: https://registry.npmjs.org/@testing-library/react/-/react-14.3.1.tgz} engines: {node: '>=14'} @@ -3120,10 +3073,6 @@ packages: caniuse-lite@1.0.30001690: resolution: {integrity: sha512-5ExiE3qQN6oF8Clf8ifIDcMRCRE/dMGcETG/XGMD8/XiXm6HXQgQTh1yZYLXXpSOsEUlJm1Xr7kGULZTuGtP/w==, tarball: https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001690.tgz} - canvas@3.1.0: - resolution: {integrity: sha512-tTj3CqqukVJ9NgSahykNwtGda7V33VLObwrHfzT0vqJXu7J4d4C/7kQQW3fOEGDfZZoILPut5H00gOjyttPGyg==, tarball: https://registry.npmjs.org/canvas/-/canvas-3.1.0.tgz} - engines: {node: ^18.12.0 || >= 20.9.0} - case-anything@2.1.13: resolution: {integrity: sha512-zlOQ80VrQ2Ue+ymH5OuM/DlDq64mEm+B9UTdHULv5osUMD6HalNTblf2b1u/m6QecjsnOkBpqVZ+XPwIVsy7Ng==, tarball: https://registry.npmjs.org/case-anything/-/case-anything-2.1.13.tgz} engines: {node: '>=12.13'} @@ -3182,11 +3131,6 @@ packages: chart.js: '>=2.8.0' date-fns: '>=2.0.0' - chartjs-plugin-annotation@3.0.1: - resolution: {integrity: sha512-hlIrXXKqSDgb+ZjVYHefmlZUXK8KbkCPiynSVrTb/HjTMkT62cOInaT1NTQCKtxKKOm9oHp958DY3RTAFKtkHg==, tarball: https://registry.npmjs.org/chartjs-plugin-annotation/-/chartjs-plugin-annotation-3.0.1.tgz} - peerDependencies: - chart.js: '>=4.0.0' - check-error@2.1.1: resolution: {integrity: sha512-OAlb+T7V4Op9OwdkjmguYRqncdlx5JiofwOAUkmTF+jNdHwzTaTs4sRAGpzLF3oOz5xAyDGrPgeIDFQmDOTiJw==, tarball: https://registry.npmjs.org/check-error/-/check-error-2.1.1.tgz} engines: {node: '>= 16'} @@ -3195,9 +3139,6 @@ packages: resolution: {integrity: sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==, tarball: https://registry.npmjs.org/chokidar/-/chokidar-3.6.0.tgz} engines: {node: '>= 8.10.0'} - chownr@1.1.4: - resolution: {integrity: sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==, tarball: https://registry.npmjs.org/chownr/-/chownr-1.1.4.tgz} - chroma-js@2.4.2: resolution: {integrity: sha512-U9eDw6+wt7V8z5NncY2jJfZa+hUH8XEj8FQHgFJTrUFnJfXYf4Ml4adI2vXZOjqRDpFWtYVWypDfZwnJ+HIR4A==, tarball: https://registry.npmjs.org/chroma-js/-/chroma-js-2.4.2.tgz} @@ -3472,10 +3413,6 @@ packages: decode-named-character-reference@1.0.2: resolution: {integrity: sha512-O8x12RzrUF8xyVcY0KJowWsmaJxQbmy0/EtnNtHRpsOcT7dFk5W598coHqBVpmWo1oQQfsCqfCmkZN5DJrZVdg==, tarball: https://registry.npmjs.org/decode-named-character-reference/-/decode-named-character-reference-1.0.2.tgz} - decompress-response@6.0.0: - resolution: {integrity: sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==, tarball: https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz} - engines: {node: '>=10'} - dedent@1.5.3: resolution: {integrity: sha512-NHQtfOOW68WD8lgypbLA5oT+Bt0xXJhiYvoR6SmmNXZfpzOGXwdKWmcwG8N7PwVVWV3eF/68nmD9BaJSsTBhyQ==, tarball: https://registry.npmjs.org/dedent/-/dedent-1.5.3.tgz} peerDependencies: @@ -3491,10 +3428,6 @@ packages: deep-equal@2.2.2: resolution: {integrity: sha512-xjVyBf0w5vH0I42jdAZzOKVldmPgSulmiyPRywoyq7HXC9qdgo17kxJE+rdnif5Tz6+pIrpJI8dCpMNLIGkUiA==, tarball: https://registry.npmjs.org/deep-equal/-/deep-equal-2.2.2.tgz} - deep-extend@0.6.0: - resolution: {integrity: sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==, tarball: https://registry.npmjs.org/deep-extend/-/deep-extend-0.6.0.tgz} - engines: {node: '>=4.0.0'} - deep-is@0.1.4: resolution: {integrity: sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==, tarball: https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz} @@ -3546,10 +3479,6 @@ packages: engines: {node: '>=0.10'} hasBin: true - detect-libc@2.0.3: - resolution: {integrity: sha512-bwy0MGW55bG41VqxxypOsdSdGqLwXPI/focwgTYCFMbdUiBAxLg9CFzG08sz2aqzknwiX7Hkl0bQENjg8iLByw==, tarball: https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.3.tgz} - engines: {node: '>=8'} - detect-newline@3.1.0: resolution: {integrity: sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA==, tarball: https://registry.npmjs.org/detect-newline/-/detect-newline-3.1.0.tgz} engines: {node: '>=8'} @@ -3606,6 +3535,9 @@ packages: eastasianwidth@0.2.0: resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==, tarball: https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz} + easy-table@1.2.0: + resolution: {integrity: sha512-OFzVOv03YpvtcWGe5AayU5G2hgybsg3iqA6drU8UaoZyB9jLGMTrz9+asnLp/E+6qPh88yEI1gvyZFZ41dmgww==, tarball: https://registry.npmjs.org/easy-table/-/easy-table-1.2.0.tgz} + ee-first@1.1.1: resolution: {integrity: sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==, tarball: https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz} @@ -3619,9 +3551,6 @@ packages: resolution: {integrity: sha512-DeWwawk6r5yR9jFgnDKYt4sLS0LmHJJi3ZOnb5/JdbYwj3nW+FxQnHIjhBKz8YLC7oRNPVM9NQ47I3CVx34eqQ==, tarball: https://registry.npmjs.org/emittery/-/emittery-0.13.1.tgz} engines: {node: '>=12'} - emoji-datasource-apple@15.1.2: - resolution: {integrity: sha512-32UZTK36x4DlvgD1smkmBlKmmJH7qUr5Qut4U/on2uQLGqNXGbZiheq6/LEA8xRQEUrmNrGEy25wpEI6wvYmTg==, tarball: https://registry.npmjs.org/emoji-datasource-apple/-/emoji-datasource-apple-15.1.2.tgz} - emoji-mart@5.6.0: resolution: {integrity: sha512-eJp3QRe79pjwa+duv+n7+5YsNhRcMl812EcFVwrnRvYKoNPoQb5qxU8DG6Bgwji0akHdp6D4Ln6tYLG58MFSow==, tarball: https://registry.npmjs.org/emoji-mart/-/emoji-mart-5.6.0.tgz} @@ -3639,8 +3568,9 @@ packages: resolution: {integrity: sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==, tarball: https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz} engines: {node: '>= 0.8'} - end-of-stream@1.4.4: - resolution: {integrity: sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==, tarball: https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz} + enhanced-resolve@5.18.1: + resolution: {integrity: sha512-ZSW3ma5GkcQBIpwZTSRAI8N71Uuwgs93IezB7mf7R60tC8ZbJideoDNKjHn2O9KIlx6rkGTTEk1xUCK2E1Y2Yg==, tarball: https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.18.1.tgz} + engines: {node: '>=10.13.0'} entities@2.2.0: resolution: {integrity: sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A==, tarball: https://registry.npmjs.org/entities/-/entities-2.2.0.tgz} @@ -3772,10 +3702,6 @@ packages: resolution: {integrity: sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ==, tarball: https://registry.npmjs.org/exit/-/exit-0.1.2.tgz} engines: {node: '>= 0.8.0'} - expand-template@2.0.3: - resolution: {integrity: sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg==, tarball: https://registry.npmjs.org/expand-template/-/expand-template-2.0.3.tgz} - engines: {node: '>=6'} - expect@29.7.0: resolution: {integrity: sha512-2Zks0hf1VLFYI1kbh0I5jP3KHHyCHpkfyHBzsSXRFgl/Bg9mWYfMW8oD+PdMPlEwy5HNsR9JutYy6pMeOh61nw==, tarball: https://registry.npmjs.org/expect/-/expect-29.7.0.tgz} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} @@ -3898,9 +3824,6 @@ packages: front-matter@4.0.2: resolution: {integrity: sha512-I8ZuJ/qG92NWX8i5x1Y8qyj3vizhXS31OxjKDu3LKP+7/qBgfIKValiZIEwoVoJKUHlhWtYrktkxV1XsX+pPlg==, tarball: https://registry.npmjs.org/front-matter/-/front-matter-4.0.2.tgz} - fs-constants@1.0.0: - resolution: {integrity: sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==, tarball: https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz} - fs-extra@11.2.0: resolution: {integrity: sha512-PmDi3uwK5nFuXh7XDTlVnS17xJS7vW36is2+w3xcv8SVxiB4NyATf4ctkVY5bkSjX0Y4nbvZCq1/EjtEyr9ktw==, tarball: https://registry.npmjs.org/fs-extra/-/fs-extra-11.2.0.tgz} engines: {node: '>=14.14'} @@ -3952,9 +3875,6 @@ packages: resolution: {integrity: sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==, tarball: https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz} engines: {node: '>=10'} - github-from-package@0.0.0: - resolution: {integrity: sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw==, tarball: https://registry.npmjs.org/github-from-package/-/github-from-package-0.0.0.tgz} - glob-parent@5.1.2: resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==, tarball: https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz} engines: {node: '>= 6'} @@ -4122,9 +4042,6 @@ packages: inherits@2.0.4: resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==, tarball: https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz} - ini@1.3.8: - resolution: {integrity: sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==, tarball: https://registry.npmjs.org/ini/-/ini-1.3.8.tgz} - inline-style-parser@0.2.4: resolution: {integrity: sha512-0aO8FkhNZlj/ZIbNi7Lxxr12obT7cL1moPfE4tg1LkX7LlLfC6DeX4l2ZEud1ukP9jNQyNnfzQVqwbwmAATY4Q==, tarball: https://registry.npmjs.org/inline-style-parser/-/inline-style-parser-0.2.4.tgz} @@ -4525,6 +4442,10 @@ packages: resolution: {integrity: sha512-/imKNG4EbWNrVjoNC/1H5/9GFy+tqjGBHCaSsN+P2RnPqjsLmv6UD3Ej+Kj8nBWaRAwyk7kK5ZUc+OEatnTR3A==, tarball: https://registry.npmjs.org/jiti/-/jiti-1.21.7.tgz} hasBin: true + jiti@2.4.2: + resolution: {integrity: sha512-rg9zJN+G4n2nfJl5MW3BMygZX56zKPNVEYYqq7adpmMh4Jn2QNEwhvQlFy6jPVdcod7txZtKHWnyZiA3a0zP7A==, tarball: https://registry.npmjs.org/jiti/-/jiti-2.4.2.tgz} + hasBin: true + js-tokens@4.0.0: resolution: {integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==, tarball: https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz} @@ -4587,6 +4508,14 @@ packages: resolution: {integrity: sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==, tarball: https://registry.npmjs.org/kleur/-/kleur-3.0.3.tgz} engines: {node: '>=6'} + knip@5.51.0: + resolution: {integrity: sha512-gw5TzLt9FikIk1oPWDc7jPRb/+L3Aw1ia25hWUQBb+hXS/Rbdki/0rrzQygjU5/CVYnRWYqc1kgdNi60Jm1lPg==, tarball: https://registry.npmjs.org/knip/-/knip-5.51.0.tgz} + engines: {node: '>=18.18.0'} + hasBin: true + peerDependencies: + '@types/node': '>=18' + typescript: '>=5.0.4' + leven@3.1.0: resolution: {integrity: sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==, tarball: https://registry.npmjs.org/leven/-/leven-3.1.0.tgz} engines: {node: '>=6'} @@ -4941,10 +4870,6 @@ packages: resolution: {integrity: sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==, tarball: https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz} engines: {node: '>=6'} - mimic-response@3.1.0: - resolution: {integrity: sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==, tarball: https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz} - engines: {node: '>=10'} - min-indent@1.0.1: resolution: {integrity: sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg==, tarball: https://registry.npmjs.org/min-indent/-/min-indent-1.0.1.tgz} engines: {node: '>=4'} @@ -4963,9 +4888,6 @@ packages: resolution: {integrity: sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==, tarball: https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz} engines: {node: '>=16 || 14 >=14.17'} - mkdirp-classic@0.5.3: - resolution: {integrity: sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==, tarball: https://registry.npmjs.org/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz} - mkdirp@1.0.4: resolution: {integrity: sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==, tarball: https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz} engines: {node: '>=10'} @@ -5012,9 +4934,6 @@ packages: engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} hasBin: true - napi-build-utils@2.0.0: - resolution: {integrity: sha512-GEbrYkbfF7MoNaoh2iGG84Mnf/WZfB0GdGEsM8wz7Expx/LlWf5U8t9nvJKXSp3qr5IsEbK04cBGhol/KwOsWA==, tarball: https://registry.npmjs.org/napi-build-utils/-/napi-build-utils-2.0.0.tgz} - natural-compare@1.4.0: resolution: {integrity: sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==, tarball: https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz} @@ -5022,13 +4941,6 @@ packages: resolution: {integrity: sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==, tarball: https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz} engines: {node: '>= 0.6'} - node-abi@3.74.0: - resolution: {integrity: sha512-c5XK0MjkGBrQPGYG24GBADZud0NCbznxNx0ZkS+ebUTrmV1qTDxPxSL8zEAPURXSbLRWVexxmP4986BziahL5w==, tarball: https://registry.npmjs.org/node-abi/-/node-abi-3.74.0.tgz} - engines: {node: '>=10'} - - node-addon-api@7.1.1: - resolution: {integrity: sha512-5m3bsyrjFWE1xf7nz7YXdN4udnVtXK6/Yfgn5qnahL6bCkf2yKt4k3nuTKAtT4r3IG8JNR2ncsIMdZuAzJjHQQ==, tarball: https://registry.npmjs.org/node-addon-api/-/node-addon-api-7.1.1.tgz} - node-int64@0.4.0: resolution: {integrity: sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw==, tarball: https://registry.npmjs.org/node-int64/-/node-int64-0.4.0.tgz} @@ -5143,6 +5055,10 @@ packages: resolution: {integrity: sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==, tarball: https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz} engines: {node: '>=8'} + parse-ms@4.0.0: + resolution: {integrity: sha512-TXfryirbmq34y8QBwgqCVLi+8oA3oWx2eAnSn62ITyEhEYaWRlVZ2DvMM9eZbMs/RfxPu/PK/aBLyGj4IrqMHw==, tarball: https://registry.npmjs.org/parse-ms/-/parse-ms-4.0.0.tgz} + engines: {node: '>=18'} + parse5@7.1.2: resolution: {integrity: sha512-Czj1WaSVpaoj0wbhMzLmWD69anp2WH7FXMB9n1Sy8/ZFF9jolSQVMu1Ij5WIyGmcBmhk7EOndpO4mIpihVqAXw==, tarball: https://registry.npmjs.org/parse5/-/parse5-7.1.2.tgz} @@ -5272,11 +5188,6 @@ packages: resolution: {integrity: sha512-6oz2beyjc5VMn/KV1pPw8fliQkhBXrVn1Z3TVyqZxU8kZpzEKhBdmCFqI6ZbmGtamQvQGuU1sgPTk8ZrXDD7jQ==, tarball: https://registry.npmjs.org/postcss/-/postcss-8.5.1.tgz} engines: {node: ^10 || ^12 || >=14} - prebuild-install@7.1.3: - resolution: {integrity: sha512-8Mf2cbV7x1cXPUILADGI3wuhfqWvtiLA1iclTDbFRZkgRQS0NqsPZphna9V+HyTEadheuPmjaJMsbzKQFOzLug==, tarball: https://registry.npmjs.org/prebuild-install/-/prebuild-install-7.1.3.tgz} - engines: {node: '>=10'} - hasBin: true - prelude-ls@1.2.1: resolution: {integrity: sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==, tarball: https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz} engines: {node: '>= 0.8.0'} @@ -5298,6 +5209,10 @@ packages: resolution: {integrity: sha512-Pdlw/oPxN+aXdmM9R00JVC9WVFoCLTKJvDVLgmJ+qAffBMxsV85l/Lu7sNx4zSzPyoL2euImuEwHhOXdEgNFZQ==, tarball: https://registry.npmjs.org/pretty-format/-/pretty-format-29.7.0.tgz} engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + pretty-ms@9.2.0: + resolution: {integrity: sha512-4yf0QO/sllf/1zbZWYnvWw3NxCQwLXKzIj0G849LSufP15BXKM0rbD2Z3wVnkMfjdn/CB0Dpp444gYAACdsplg==, tarball: https://registry.npmjs.org/pretty-ms/-/pretty-ms-9.2.0.tgz} + engines: {node: '>=18'} + prismjs@1.30.0: resolution: {integrity: sha512-DEvV2ZF2r2/63V+tK8hQvrR2ZGn10srHbXviTlcv7Kpzw8jWiNTqbVgjO3IY8RxrrOUF8VPMQQFysYYYv0YZxw==, tarball: https://registry.npmjs.org/prismjs/-/prismjs-1.30.0.tgz} engines: {node: '>=6'} @@ -5342,9 +5257,6 @@ packages: psl@1.9.0: resolution: {integrity: sha512-E/ZsdU4HLs/68gYzgGTkMicWTLPdAftJLfJFlLUAAKZGkStNU72sZjT66SnMDVOfOWY/YAoiD7Jxa9iHvngcag==, tarball: https://registry.npmjs.org/psl/-/psl-1.9.0.tgz} - pump@3.0.2: - resolution: {integrity: sha512-tUPXtzlGM8FE3P0ZL6DVs/3P58k9nk8/jZeQCurTJylQA8qFYzHFfhBJkuqyE0FifOsQ0uKWekiZ5g8wtr28cw==, tarball: https://registry.npmjs.org/pump/-/pump-3.0.2.tgz} - punycode@2.3.1: resolution: {integrity: sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==, tarball: https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz} engines: {node: '>=6'} @@ -5370,10 +5282,6 @@ packages: resolution: {integrity: sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==, tarball: https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz} engines: {node: '>= 0.8'} - rc@1.2.8: - resolution: {integrity: sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==, tarball: https://registry.npmjs.org/rc/-/rc-1.2.8.tgz} - hasBin: true - react-chartjs-2@5.3.0: resolution: {integrity: sha512-UfZZFnDsERI3c3CZGxzvNJd02SHjaSJ8kgW1djn65H1KK8rehwTjyrRKOG3VTMG8wtHZ5rgAO5oTHtHi9GCCmw==, tarball: https://registry.npmjs.org/react-chartjs-2/-/react-chartjs-2-5.3.0.tgz} peerDependencies: @@ -5411,12 +5319,6 @@ packages: peerDependencies: react: ^18.3.1 - react-error-boundary@3.1.4: - resolution: {integrity: sha512-uM9uPzZJTF6wRQORmSrvOIgt4lJ9MC1sNgEOj2XGsDTRE4kmpWxg7ENK9EWNKJRMAOY9z0MuF4yIfl6gp4sotA==, tarball: https://registry.npmjs.org/react-error-boundary/-/react-error-boundary-3.1.4.tgz} - engines: {node: '>=10', npm: '>=6'} - peerDependencies: - react: '>=16.13.1' - react-fast-compare@2.0.4: resolution: {integrity: sha512-suNP+J1VU1MWFKcyt7RtjiSWUjvidmQSlqu+eHslq+342xCbGTYmC0mEhPCOHxlW0CywylOC1u2DFAT+bv4dBw==, tarball: https://registry.npmjs.org/react-fast-compare/-/react-fast-compare-2.0.4.tgz} @@ -5765,12 +5667,6 @@ packages: resolution: {integrity: sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==, tarball: https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz} engines: {node: '>=14'} - simple-concat@1.0.1: - resolution: {integrity: sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q==, tarball: https://registry.npmjs.org/simple-concat/-/simple-concat-1.0.1.tgz} - - simple-get@4.0.1: - resolution: {integrity: sha512-brv7p5WgH0jmQJr1ZDDfKDOSeWWg+OVypG99A/5vYGPqJ6pxiaHLy8nxtFjBA7oMa01ebA9gfh1uMCFqOuXxvA==, tarball: https://registry.npmjs.org/simple-get/-/simple-get-4.0.1.tgz} - sisteransi@1.0.5: resolution: {integrity: sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==, tarball: https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz} @@ -5778,6 +5674,10 @@ packages: resolution: {integrity: sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==, tarball: https://registry.npmjs.org/slash/-/slash-3.0.0.tgz} engines: {node: '>=8'} + smol-toml@1.3.4: + resolution: {integrity: sha512-UOPtVuYkzYGee0Bd2Szz8d2G3RfMfJ2t3qVdZUAozZyAk+a0Sxa+QKix0YCwjL/A1RR0ar44nCxaoN9FxdJGwA==, tarball: https://registry.npmjs.org/smol-toml/-/smol-toml-1.3.4.tgz} + engines: {node: '>= 18'} + source-map-js@1.2.1: resolution: {integrity: sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==, tarball: https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz} engines: {node: '>=0.10.0'} @@ -5844,12 +5744,6 @@ packages: react-dom: optional: true - storybook-react-context@0.7.0: - resolution: {integrity: sha512-esCfwMhnHfJZQipRHfVpjH5mYBfOjj2JEi5XFAZ2BXCl3mIEypMdNCQZmNUvuR1u8EsQWClArhtL0h+FCiLcrw==, tarball: https://registry.npmjs.org/storybook-react-context/-/storybook-react-context-0.7.0.tgz} - peerDependencies: - react: '>=18' - react-dom: '>=18' - storybook@8.5.3: resolution: {integrity: sha512-2WtNBZ45u1AhviRU+U+ld588tH8gDa702dNSq5C8UBaE9PlOsazGsyp90dw1s9YRvi+ejrjKAupQAU0GwwUiVg==, tarball: https://registry.npmjs.org/storybook/-/storybook-8.5.3.tgz} hasBin: true @@ -5911,14 +5805,14 @@ packages: resolution: {integrity: sha512-mnVSV2l+Zv6BLpSD/8V87CW/y9EmmbYzGCIavsnsI6/nwn26DwffM/yztm30Z/I2DY9wdS3vXVCMnHDgZaVNoA==, tarball: https://registry.npmjs.org/strip-indent/-/strip-indent-4.0.0.tgz} engines: {node: '>=12'} - strip-json-comments@2.0.1: - resolution: {integrity: sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ==, tarball: https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz} - engines: {node: '>=0.10.0'} - strip-json-comments@3.1.1: resolution: {integrity: sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==, tarball: https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz} engines: {node: '>=8'} + strip-json-comments@5.0.1: + resolution: {integrity: sha512-0fk9zBqO67Nq5M/m45qHCJxylV/DhBlIOVExqgOMiCCrzrhU6tCibRXNqE3jwJLftzE9SNuZtYbpzcO+i9FiKw==, tarball: https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-5.0.1.tgz} + engines: {node: '>=14.16'} + style-to-object@1.0.8: resolution: {integrity: sha512-xT47I/Eo0rwJmaXC4oilDGDWLohVhR6o/xAQcPQN8q6QBuZVL8qMYL85kLmST5cPjAorwvqIA4qXTRQoYHaL6g==, tarball: https://registry.npmjs.org/style-to-object/-/style-to-object-1.0.8.tgz} @@ -5966,11 +5860,8 @@ packages: engines: {node: '>=14.0.0'} hasBin: true - tar-fs@2.1.2: - resolution: {integrity: sha512-EsaAXwxmx8UB7FRKqeozqEPop69DXcmYwTQwXvyAPF352HJsPdkVhvTaDPYqfNgruveJIJy3TA2l+2zj8LJIJA==, tarball: https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.2.tgz} - - tar-stream@2.2.0: - resolution: {integrity: sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==, tarball: https://registry.npmjs.org/tar-stream/-/tar-stream-2.2.0.tgz} + tapable@2.2.1: + resolution: {integrity: sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ==, tarball: https://registry.npmjs.org/tapable/-/tapable-2.2.1.tgz} engines: {node: '>=6'} telejson@7.2.0: @@ -6073,8 +5964,8 @@ packages: '@swc/wasm': optional: true - ts-poet@6.6.0: - resolution: {integrity: sha512-4vEH/wkhcjRPFOdBwIh9ItO6jOoumVLRF4aABDX5JSNEubSqwOulihxQPqai+OkuygJm3WYMInxXQX4QwVNMuw==, tarball: https://registry.npmjs.org/ts-poet/-/ts-poet-6.6.0.tgz} + ts-poet@6.11.0: + resolution: {integrity: sha512-r5AGF8vvb+GjBsnqiTqbLhN1/U2FJt6BI+k0dfCrkKzWvUhNlwMmq9nDHuucHs45LomgHjZPvYj96dD3JawjJA==, tarball: https://registry.npmjs.org/ts-poet/-/ts-poet-6.11.0.tgz} ts-proto-descriptors@1.15.0: resolution: {integrity: sha512-TYyJ7+H+7Jsqawdv+mfsEpZPTIj9siDHS6EMCzG/z3b/PZiphsX+mWtqFfFVe5/N0Th6V3elK9lQqjnrgTOfrg==, tarball: https://registry.npmjs.org/ts-proto-descriptors/-/ts-proto-descriptors-1.15.0.tgz} @@ -6100,9 +5991,6 @@ packages: tslib@2.8.1: resolution: {integrity: sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==, tarball: https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz} - tunnel-agent@0.6.0: - resolution: {integrity: sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==, tarball: https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz} - tween-functions@1.2.0: resolution: {integrity: sha512-PZBtLYcCLtEcjL14Fzb1gSxPBeL7nWvGhO5ZFPGqziCcr8uvHp0NDmdjBchp6KHL+tExcg0m3NISmKxhU394dA==, tarball: https://registry.npmjs.org/tween-functions/-/tween-functions-1.2.0.tgz} @@ -6521,6 +6409,15 @@ packages: yup@1.6.1: resolution: {integrity: sha512-JED8pB50qbA4FOkDol0bYF/p60qSEDQqBD0/qeIrUCG1KbPBIQ776fCUNb9ldbPcSTxA69g/47XTo4TqWiuXOA==, tarball: https://registry.npmjs.org/yup/-/yup-1.6.1.tgz} + zod-validation-error@3.4.0: + resolution: {integrity: sha512-ZOPR9SVY6Pb2qqO5XHt+MkkTRxGXb4EVtnjc9JpXUOtUB1T9Ru7mZOT361AN3MsetVe7R0a1KZshJDZdgp9miQ==, tarball: https://registry.npmjs.org/zod-validation-error/-/zod-validation-error-3.4.0.tgz} + engines: {node: '>=18.0.0'} + peerDependencies: + zod: ^3.18.0 + + zod@3.24.3: + resolution: {integrity: sha512-HhY1oqzWCQWuUqvBFnsyrtZRhyPeR7SUGv+C4+MsisMuVfSPx8HpwWqH8tRahSlt6M3PiFAcoeFhZAqIXTxoSg==, tarball: https://registry.npmjs.org/zod/-/zod-3.24.3.tgz} + zwitch@2.0.4: resolution: {integrity: sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==, tarball: https://registry.npmjs.org/zwitch/-/zwitch-2.0.4.tgz} @@ -6849,6 +6746,7 @@ snapshots: '@cspotcode/source-map-support@0.8.1': dependencies: '@jridgewell/trace-mapping': 0.3.9 + optional: true '@emoji-mart/data@1.2.1': {} @@ -7373,6 +7271,7 @@ snapshots: dependencies: '@jridgewell/resolve-uri': 3.1.2 '@jridgewell/sourcemap-codec': 1.5.0 + optional: true '@kurkle/color@0.3.2': {} @@ -8145,15 +8044,6 @@ snapshots: optionalDependencies: '@types/react': 18.3.12 - '@radix-ui/react-visually-hidden@1.1.0(@types/react-dom@18.3.1)(@types/react@18.3.12)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': - dependencies: - '@radix-ui/react-primitive': 2.0.0(@types/react-dom@18.3.1)(@types/react@18.3.12)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) - react: 18.3.1 - react-dom: 18.3.1(react@18.3.1) - optionalDependencies: - '@types/react': 18.3.12 - '@types/react-dom': 18.3.1 - '@radix-ui/react-visually-hidden@1.1.1(@types/react-dom@18.3.1)(@types/react@18.3.12)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': dependencies: '@radix-ui/react-primitive': 2.0.1(@types/react-dom@18.3.1)(@types/react@18.3.12)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) @@ -8665,15 +8555,6 @@ snapshots: lodash: 4.17.21 redent: 3.0.0 - '@testing-library/react-hooks@8.0.1(@types/react@18.3.12)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': - dependencies: - '@babel/runtime': 7.26.10 - react: 18.3.1 - react-error-boundary: 3.1.4(react@18.3.1) - optionalDependencies: - '@types/react': 18.3.12 - react-dom: 18.3.1(react@18.3.1) - '@testing-library/react@14.3.1(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': dependencies: '@babel/runtime': 7.26.10 @@ -8699,13 +8580,17 @@ snapshots: mkdirp: 1.0.4 path-browserify: 1.0.1 - '@tsconfig/node10@1.0.11': {} + '@tsconfig/node10@1.0.11': + optional: true - '@tsconfig/node12@1.0.11': {} + '@tsconfig/node12@1.0.11': + optional: true - '@tsconfig/node14@1.0.3': {} + '@tsconfig/node14@1.0.3': + optional: true - '@tsconfig/node16@1.0.4': {} + '@tsconfig/node16@1.0.4': + optional: true '@types/aria-query@5.0.3': {} @@ -9127,7 +9012,8 @@ snapshots: normalize-path: 3.0.0 picomatch: 2.3.1 - arg@4.1.3: {} + arg@4.1.3: + optional: true arg@5.0.2: {} @@ -9135,8 +9021,7 @@ snapshots: dependencies: sprintf-js: 1.0.3 - argparse@2.0.1: - optional: true + argparse@2.0.1: {} aria-hidden@1.2.4: dependencies: @@ -9375,11 +9260,6 @@ snapshots: caniuse-lite@1.0.30001690: {} - canvas@3.1.0: - dependencies: - node-addon-api: 7.1.1 - prebuild-install: 7.1.3 - case-anything@2.1.13: {} ccount@2.0.1: {} @@ -9433,10 +9313,6 @@ snapshots: chart.js: 4.4.0 date-fns: 2.30.0 - chartjs-plugin-annotation@3.0.1(chart.js@4.4.0): - dependencies: - chart.js: 4.4.0 - check-error@2.1.1: {} chokidar@3.6.0: @@ -9451,8 +9327,6 @@ snapshots: optionalDependencies: fsevents: 2.3.3 - chownr@1.1.4: {} - chroma-js@2.4.2: {} chromatic@11.25.2: {} @@ -9584,7 +9458,8 @@ snapshots: - supports-color - ts-node - create-require@1.1.1: {} + create-require@1.1.1: + optional: true cron-parser@4.9.0: dependencies: @@ -9680,10 +9555,6 @@ snapshots: dependencies: character-entities: 2.0.2 - decompress-response@6.0.0: - dependencies: - mimic-response: 3.1.0 - dedent@1.5.3(babel-plugin-macros@3.1.0): optionalDependencies: babel-plugin-macros: 3.1.0 @@ -9711,8 +9582,6 @@ snapshots: which-collection: 1.0.1 which-typed-array: 1.1.18 - deep-extend@0.6.0: {} - deep-is@0.1.4: optional: true @@ -9754,8 +9623,6 @@ snapshots: detect-libc@1.0.3: {} - detect-libc@2.0.3: {} - detect-newline@3.1.0: {} detect-node-es@1.1.0: {} @@ -9768,7 +9635,8 @@ snapshots: diff-sequences@29.6.3: {} - diff@4.0.2: {} + diff@4.0.2: + optional: true dlv@1.1.3: {} @@ -9811,6 +9679,12 @@ snapshots: eastasianwidth@0.2.0: {} + easy-table@1.2.0: + dependencies: + ansi-regex: 5.0.1 + optionalDependencies: + wcwidth: 1.0.1 + ee-first@1.1.1: {} electron-to-chromium@1.5.50: {} @@ -9819,8 +9693,6 @@ snapshots: emittery@0.13.1: {} - emoji-datasource-apple@15.1.2: {} - emoji-mart@5.6.0: {} emoji-regex@8.0.0: {} @@ -9831,9 +9703,10 @@ snapshots: encodeurl@2.0.0: {} - end-of-stream@1.4.4: + enhanced-resolve@5.18.1: dependencies: - once: 1.4.0 + graceful-fs: 4.2.11 + tapable: 2.2.1 entities@2.2.0: {} @@ -10027,8 +9900,6 @@ snapshots: exit@0.1.2: {} - expand-template@2.0.3: {} - expect@29.7.0: dependencies: '@jest/expect-utils': 29.7.0 @@ -10202,8 +10073,6 @@ snapshots: dependencies: js-yaml: 3.14.1 - fs-constants@1.0.0: {} - fs-extra@11.2.0: dependencies: graceful-fs: 4.2.11 @@ -10250,8 +10119,6 @@ snapshots: get-stream@6.0.1: {} - github-from-package@0.0.0: {} - glob-parent@5.1.2: dependencies: is-glob: 4.0.3 @@ -10441,8 +10308,6 @@ snapshots: inherits@2.0.4: {} - ini@1.3.8: {} - inline-style-parser@0.2.4: {} internal-slot@1.0.6: @@ -10772,7 +10637,7 @@ snapshots: jest-util: 29.7.0 pretty-format: 29.7.0 - jest-environment-jsdom@29.5.0(canvas@3.1.0): + jest-environment-jsdom@29.5.0: dependencies: '@jest/environment': 29.6.2 '@jest/fake-timers': 29.6.2 @@ -10781,9 +10646,7 @@ snapshots: '@types/node': 20.17.16 jest-mock: 29.6.2 jest-util: 29.6.2 - jsdom: 20.0.3(canvas@3.1.0) - optionalDependencies: - canvas: 3.1.0 + jsdom: 20.0.3 transitivePeerDependencies: - bufferutil - supports-color @@ -10798,9 +10661,9 @@ snapshots: jest-mock: 29.7.0 jest-util: 29.7.0 - jest-fixed-jsdom@0.0.9(jest-environment-jsdom@29.5.0(canvas@3.1.0)): + jest-fixed-jsdom@0.0.9(jest-environment-jsdom@29.5.0): dependencies: - jest-environment-jsdom: 29.5.0(canvas@3.1.0) + jest-environment-jsdom: 29.5.0 jest-get-type@29.4.3: {} @@ -11047,6 +10910,8 @@ snapshots: jiti@1.21.7: {} + jiti@2.4.2: {} + js-tokens@4.0.0: {} js-yaml@3.14.1: @@ -11057,11 +10922,10 @@ snapshots: js-yaml@4.1.0: dependencies: argparse: 2.0.1 - optional: true jsdoc-type-pratt-parser@4.1.0: {} - jsdom@20.0.3(canvas@3.1.0): + jsdom@20.0.3: dependencies: abab: 2.0.6 acorn: 8.14.0 @@ -11089,8 +10953,6 @@ snapshots: whatwg-url: 11.0.0 ws: 8.17.1 xml-name-validator: 4.0.0 - optionalDependencies: - canvas: 3.1.0 transitivePeerDependencies: - bufferutil - supports-color @@ -11133,6 +10995,25 @@ snapshots: kleur@3.0.3: {} + knip@5.51.0(@types/node@20.17.16)(typescript@5.6.3): + dependencies: + '@nodelib/fs.walk': 1.2.8 + '@types/node': 20.17.16 + easy-table: 1.2.0 + enhanced-resolve: 5.18.1 + fast-glob: 3.3.3 + jiti: 2.4.2 + js-yaml: 4.1.0 + minimist: 1.2.8 + picocolors: 1.1.1 + picomatch: 4.0.2 + pretty-ms: 9.2.0 + smol-toml: 1.3.4 + strip-json-comments: 5.0.1 + typescript: 5.6.3 + zod: 3.24.3 + zod-validation-error: 3.4.0(zod@3.24.3) + leven@3.1.0: {} levn@0.4.1: @@ -11215,7 +11096,8 @@ snapshots: dependencies: semver: 7.6.2 - make-error@1.3.6: {} + make-error@1.3.6: + optional: true makeerror@1.0.12: dependencies: @@ -11762,8 +11644,6 @@ snapshots: mimic-fn@2.1.0: {} - mimic-response@3.1.0: {} - min-indent@1.0.1: {} minimatch@3.1.2: @@ -11778,8 +11658,6 @@ snapshots: minipass@7.1.2: {} - mkdirp-classic@0.5.3: {} - mkdirp@1.0.4: {} mock-socket@9.3.1: {} @@ -11829,18 +11707,10 @@ snapshots: nanoid@3.3.8: {} - napi-build-utils@2.0.0: {} - natural-compare@1.4.0: {} negotiator@0.6.3: {} - node-abi@3.74.0: - dependencies: - semver: 7.6.2 - - node-addon-api@7.1.1: {} - node-int64@0.4.0: {} node-releases@2.0.18: {} @@ -11971,6 +11841,8 @@ snapshots: json-parse-even-better-errors: 2.3.1 lines-and-columns: 1.2.4 + parse-ms@4.0.0: {} + parse5@7.1.2: dependencies: entities: 4.5.0 @@ -12071,21 +11943,6 @@ snapshots: picocolors: 1.1.1 source-map-js: 1.2.1 - prebuild-install@7.1.3: - dependencies: - detect-libc: 2.0.3 - expand-template: 2.0.3 - github-from-package: 0.0.0 - minimist: 1.2.8 - mkdirp-classic: 0.5.3 - napi-build-utils: 2.0.0 - node-abi: 3.74.0 - pump: 3.0.2 - rc: 1.2.8 - simple-get: 4.0.1 - tar-fs: 2.1.2 - tunnel-agent: 0.6.0 - prelude-ls@1.2.1: optional: true @@ -12106,6 +11963,10 @@ snapshots: ansi-styles: 5.2.0 react-is: 18.3.1 + pretty-ms@9.2.0: + dependencies: + parse-ms: 4.0.0 + prismjs@1.30.0: {} process-nextick-args@2.0.1: {} @@ -12159,11 +12020,6 @@ snapshots: psl@1.9.0: {} - pump@3.0.2: - dependencies: - end-of-stream: 1.4.4 - once: 1.4.0 - punycode@2.3.1: {} pure-rand@6.1.0: {} @@ -12185,13 +12041,6 @@ snapshots: iconv-lite: 0.4.24 unpipe: 1.0.0 - rc@1.2.8: - dependencies: - deep-extend: 0.6.0 - ini: 1.3.8 - minimist: 1.2.8 - strip-json-comments: 2.0.1 - react-chartjs-2@5.3.0(chart.js@4.4.0)(react@18.3.1): dependencies: chart.js: 4.4.0 @@ -12247,11 +12096,6 @@ snapshots: react: 18.3.1 scheduler: 0.23.2 - react-error-boundary@3.1.4(react@18.3.1): - dependencies: - '@babel/runtime': 7.26.10 - react: 18.3.1 - react-fast-compare@2.0.4: {} react-fast-compare@3.2.2: {} @@ -12694,18 +12538,12 @@ snapshots: signal-exit@4.1.0: {} - simple-concat@1.0.1: {} - - simple-get@4.0.1: - dependencies: - decompress-response: 6.0.0 - once: 1.4.0 - simple-concat: 1.0.1 - sisteransi@1.0.5: {} slash@3.0.0: {} + smol-toml@1.3.4: {} + source-map-js@1.2.1: {} source-map-support@0.5.13: @@ -12761,14 +12599,6 @@ snapshots: react: 18.3.1 react-dom: 18.3.1(react@18.3.1) - storybook-react-context@0.7.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(storybook@8.5.3(prettier@3.4.1)): - dependencies: - '@storybook/preview-api': 8.5.3(storybook@8.5.3(prettier@3.4.1)) - react: 18.3.1 - react-dom: 18.3.1(react@18.3.1) - transitivePeerDependencies: - - storybook - storybook@8.5.3(prettier@3.4.1): dependencies: '@storybook/core': 8.5.3(prettier@3.4.1) @@ -12833,10 +12663,10 @@ snapshots: dependencies: min-indent: 1.0.1 - strip-json-comments@2.0.1: {} - strip-json-comments@3.1.1: {} + strip-json-comments@5.0.1: {} + style-to-object@1.0.8: dependencies: inline-style-parser: 0.2.4 @@ -12906,20 +12736,7 @@ snapshots: transitivePeerDependencies: - ts-node - tar-fs@2.1.2: - dependencies: - chownr: 1.1.4 - mkdirp-classic: 0.5.3 - pump: 3.0.2 - tar-stream: 2.2.0 - - tar-stream@2.2.0: - dependencies: - bl: 4.1.0 - end-of-stream: 1.4.4 - fs-constants: 1.0.0 - inherits: 2.0.4 - readable-stream: 3.6.2 + tapable@2.2.1: {} telejson@7.2.0: dependencies: @@ -13007,7 +12824,7 @@ snapshots: '@tsconfig/node14': 1.0.3 '@tsconfig/node16': 1.0.4 '@types/node': 20.17.16 - acorn: 8.14.0 + acorn: 8.14.1 acorn-walk: 8.3.4 arg: 4.1.3 create-require: 1.1.1 @@ -13018,8 +12835,9 @@ snapshots: yn: 3.1.1 optionalDependencies: '@swc/core': 1.3.38 + optional: true - ts-poet@6.6.0: + ts-poet@6.11.0: dependencies: dprint-node: 1.0.8 @@ -13032,7 +12850,7 @@ snapshots: dependencies: case-anything: 2.1.13 protobufjs: 7.4.0 - ts-poet: 6.6.0 + ts-poet: 6.11.0 ts-proto-descriptors: 1.15.0 ts-prune@0.10.3: @@ -13056,10 +12874,6 @@ snapshots: tslib@2.8.1: {} - tunnel-agent@0.6.0: - dependencies: - safe-buffer: 5.2.1 - tween-functions@1.2.0: {} tweetnacl@0.14.5: {} @@ -13224,7 +13038,8 @@ snapshots: uuid@9.0.1: {} - v8-compile-cache-lib@3.0.1: {} + v8-compile-cache-lib@3.0.1: + optional: true v8-to-istanbul@9.3.0: dependencies: @@ -13430,7 +13245,8 @@ snapshots: y18n: 5.0.8 yargs-parser: 21.1.1 - yn@3.1.1: {} + yn@3.1.1: + optional: true yocto-queue@0.1.0: {} @@ -13443,4 +13259,10 @@ snapshots: toposort: 2.0.2 type-fest: 2.19.0 + zod-validation-error@3.4.0(zod@3.24.3): + dependencies: + zod: 3.24.3 + + zod@3.24.3: {} + zwitch@2.0.4: {} diff --git a/site/src/__mocks__/react-markdown.tsx b/site/src/__mocks__/react-markdown.tsx deleted file mode 100644 index de1d2ea4d21e0..0000000000000 --- a/site/src/__mocks__/react-markdown.tsx +++ /dev/null @@ -1,7 +0,0 @@ -import type { FC, PropsWithChildren } from "react"; - -const ReactMarkdown: FC = ({ children }) => { - return
{children}
; -}; - -export default ReactMarkdown; diff --git a/site/src/api/api.ts b/site/src/api/api.ts index 260f5d4880ef2..ef15beb8166f5 100644 --- a/site/src/api/api.ts +++ b/site/src/api/api.ts @@ -2563,7 +2563,7 @@ interface ClientApi extends ApiMethods { getAxiosInstance: () => AxiosInstance; } -export class Api extends ApiMethods implements ClientApi { +class Api extends ApiMethods implements ClientApi { constructor() { const scopedAxiosInstance = getConfiguredAxiosInstance(); super(scopedAxiosInstance); diff --git a/site/src/api/errors.ts b/site/src/api/errors.ts index 873163e11a68d..bb51bebce651b 100644 --- a/site/src/api/errors.ts +++ b/site/src/api/errors.ts @@ -31,7 +31,7 @@ export const isApiError = (err: unknown): err is ApiError => { ); }; -export const isApiErrorResponse = (err: unknown): err is ApiErrorResponse => { +const isApiErrorResponse = (err: unknown): err is ApiErrorResponse => { return ( typeof err === "object" && err !== null && diff --git a/site/src/api/queries/authCheck.ts b/site/src/api/queries/authCheck.ts index 813bec828500a..11f5fafa7d25a 100644 --- a/site/src/api/queries/authCheck.ts +++ b/site/src/api/queries/authCheck.ts @@ -1,7 +1,7 @@ import { API } from "api/api"; import type { AuthorizationRequest } from "api/typesGenerated"; -export const AUTHORIZATION_KEY = "authorization"; +const AUTHORIZATION_KEY = "authorization"; export const getAuthorizationKey = (req: AuthorizationRequest) => [AUTHORIZATION_KEY, req] as const; diff --git a/site/src/api/queries/groups.ts b/site/src/api/queries/groups.ts index 4ddce87a249a2..dc6285e8d6de7 100644 --- a/site/src/api/queries/groups.ts +++ b/site/src/api/queries/groups.ts @@ -10,7 +10,7 @@ type GroupSortOrder = "asc" | "desc"; export const groupsQueryKey = ["groups"]; -export const groups = () => { +const groups = () => { return { queryKey: groupsQueryKey, queryFn: () => API.getGroups(), @@ -60,7 +60,7 @@ export function groupsByUserIdInOrganization(organization: string) { } satisfies UseQueryOptions; } -export function selectGroupsByUserId(groups: Group[]): GroupsByUserId { +function selectGroupsByUserId(groups: Group[]): GroupsByUserId { // Sorting here means that nothing has to be sorted for the individual // user arrays later const sorted = sortGroupsByName(groups, "asc"); @@ -163,7 +163,7 @@ export const removeMember = (queryClient: QueryClient) => { }; }; -export const invalidateGroup = ( +const invalidateGroup = ( queryClient: QueryClient, organization: string, groupId: string, @@ -176,7 +176,7 @@ export const invalidateGroup = ( queryClient.invalidateQueries(getGroupQueryKey(organization, groupId)), ]); -export function sortGroupsByName( +function sortGroupsByName( groups: readonly T[], order: GroupSortOrder, ) { diff --git a/site/src/api/queries/idpsync.ts b/site/src/api/queries/idpsync.ts index 05fb26a4624d3..eca3ec496faee 100644 --- a/site/src/api/queries/idpsync.ts +++ b/site/src/api/queries/idpsync.ts @@ -2,9 +2,7 @@ import { API } from "api/api"; import type { OrganizationSyncSettings } from "api/typesGenerated"; import type { QueryClient } from "react-query"; -export const getOrganizationIdpSyncSettingsKey = () => [ - "organizationIdpSyncSettings", -]; +const getOrganizationIdpSyncSettingsKey = () => ["organizationIdpSyncSettings"]; export const patchOrganizationSyncSettings = (queryClient: QueryClient) => { return { diff --git a/site/src/api/queries/organizations.ts b/site/src/api/queries/organizations.ts index 238fb4493fb52..c7b42f5f0e79f 100644 --- a/site/src/api/queries/organizations.ts +++ b/site/src/api/queries/organizations.ts @@ -8,7 +8,6 @@ import type { GroupSyncSettings, PaginatedMembersRequest, PaginatedMembersResponse, - ProvisionerJobStatus, RoleSyncSettings, UpdateOrganizationRequest, } from "api/typesGenerated"; @@ -182,20 +181,20 @@ export const provisionerDaemons = ( }; }; -export const getProvisionerDaemonGroupsKey = (organization: string) => [ +const getProvisionerDaemonGroupsKey = (organization: string) => [ "organization", organization, "provisionerDaemons", ]; -export const provisionerDaemonGroups = (organization: string) => { +const provisionerDaemonGroups = (organization: string) => { return { queryKey: getProvisionerDaemonGroupsKey(organization), queryFn: () => API.getProvisionerDaemonGroupsByOrganization(organization), }; }; -export const getGroupIdpSyncSettingsKey = (organization: string) => [ +const getGroupIdpSyncSettingsKey = (organization: string) => [ "organizations", organization, "groupIdpSyncSettings", @@ -220,7 +219,7 @@ export const patchGroupSyncSettings = ( }; }; -export const getRoleIdpSyncSettingsKey = (organization: string) => [ +const getRoleIdpSyncSettingsKey = (organization: string) => [ "organizations", organization, "roleIdpSyncSettings", @@ -350,7 +349,7 @@ export const workspacePermissionsByOrganization = ( }; }; -export const getOrganizationIdpSyncClaimFieldValuesKey = ( +const getOrganizationIdpSyncClaimFieldValuesKey = ( organization: string, field: string, ) => [organization, "idpSync", "fieldValues", field]; diff --git a/site/src/api/queries/settings.ts b/site/src/api/queries/settings.ts index 5b040508ae686..7605d16c41d6d 100644 --- a/site/src/api/queries/settings.ts +++ b/site/src/api/queries/settings.ts @@ -5,7 +5,7 @@ import type { } from "api/typesGenerated"; import type { QueryClient, QueryOptions } from "react-query"; -export const userQuietHoursScheduleKey = (userId: string) => [ +const userQuietHoursScheduleKey = (userId: string) => [ "settings", userId, "quietHours", diff --git a/site/src/api/queries/templates.ts b/site/src/api/queries/templates.ts index 372863de41991..72e5deaefc72a 100644 --- a/site/src/api/queries/templates.ts +++ b/site/src/api/queries/templates.ts @@ -13,7 +13,7 @@ import type { MutationOptions, QueryClient, QueryOptions } from "react-query"; import { delay } from "utils/delay"; import { getTemplateVersionFiles } from "utils/templateVersion"; -export const templateKey = (templateId: string) => ["template", templateId]; +const templateKey = (templateId: string) => ["template", templateId]; export const template = (templateId: string): QueryOptions