From 205c43da995229f822245a63e2ce3984144225f6 Mon Sep 17 00:00:00 2001 From: Colin Adler Date: Fri, 3 May 2024 14:07:29 -0500 Subject: [PATCH 001/149] fix(enterprise): mark nodes from unhealthy coordinators as lost (#13123) Instead of removing the mappings of unhealthy coordinators entirely, mark them as lost instead. This prevents peers from disappearing from other peers if a coordinator misses a heartbeat. --- enterprise/tailnet/pgcoord.go | 13 +++- enterprise/tailnet/pgcoord_internal_test.go | 40 ++++++++++++- enterprise/tailnet/pgcoord_test.go | 66 +++++++++++++++++---- 3 files changed, 104 insertions(+), 15 deletions(-) diff --git a/enterprise/tailnet/pgcoord.go b/enterprise/tailnet/pgcoord.go index 390e13621fff0..1a9dc88378b03 100644 --- a/enterprise/tailnet/pgcoord.go +++ b/enterprise/tailnet/pgcoord.go @@ -1485,10 +1485,17 @@ func (h *heartbeats) filter(mappings []mapping) []mapping { ok := m.coordinator == h.self if !ok { _, ok = h.coordinators[m.coordinator] + if !ok { + // If a mapping exists to a coordinator lost to heartbeats, + // still add the mapping as LOST. If a coordinator misses + // heartbeats but a client is still connected to it, this may be + // the only mapping available for it. Newer mappings will take + // precedence. + m.kind = proto.CoordinateResponse_PeerUpdate_LOST + } } - if ok { - out = append(out, m) - } + + out = append(out, m) } return out } diff --git a/enterprise/tailnet/pgcoord_internal_test.go b/enterprise/tailnet/pgcoord_internal_test.go index b1bfb371f0959..53fd61d73f066 100644 --- a/enterprise/tailnet/pgcoord_internal_test.go +++ b/enterprise/tailnet/pgcoord_internal_test.go @@ -11,6 +11,7 @@ import ( "time" "github.com/google/uuid" + "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "go.uber.org/mock/gomock" "golang.org/x/xerrors" @@ -33,9 +34,9 @@ import ( // make update-golden-files var UpdateGoldenFiles = flag.Bool("update", false, "update .golden files") -// TestHeartbeat_Cleanup is internal so that we can overwrite the cleanup period and not wait an hour for the timed +// TestHeartbeats_Cleanup is internal so that we can overwrite the cleanup period and not wait an hour for the timed // cleanup. -func TestHeartbeat_Cleanup(t *testing.T) { +func TestHeartbeats_Cleanup(t *testing.T) { t.Parallel() ctrl := gomock.NewController(t) @@ -78,6 +79,41 @@ func TestHeartbeat_Cleanup(t *testing.T) { close(waitForCleanup) } +func TestHeartbeats_LostCoordinator_MarkLost(t *testing.T) { + t.Parallel() + + ctrl := gomock.NewController(t) + mStore := dbmock.NewMockStore(ctrl) + + ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitShort) + defer cancel() + logger := slogtest.Make(t, nil).Leveled(slog.LevelDebug) + + uut := &heartbeats{ + ctx: ctx, + logger: logger, + store: mStore, + cleanupPeriod: time.Millisecond, + coordinators: map[uuid.UUID]time.Time{ + uuid.New(): time.Now(), + }, + } + + mpngs := []mapping{{ + peer: uuid.New(), + coordinator: uuid.New(), + updatedAt: time.Now(), + node: &proto.Node{}, + kind: proto.CoordinateResponse_PeerUpdate_NODE, + }} + + // Filter should still return the mapping without a coordinator, but marked + // as LOST. + got := uut.filter(mpngs) + require.Len(t, got, 1) + assert.Equal(t, proto.CoordinateResponse_PeerUpdate_LOST, got[0].kind) +} + // TestLostPeerCleanupQueries tests that our SQL queries to clean up lost peers do what we expect, // that is, clean up peers and associated tunnels that have been lost for over 24 hours. func TestLostPeerCleanupQueries(t *testing.T) { diff --git a/enterprise/tailnet/pgcoord_test.go b/enterprise/tailnet/pgcoord_test.go index b27db149f634b..5bd722533dc39 100644 --- a/enterprise/tailnet/pgcoord_test.go +++ b/enterprise/tailnet/pgcoord_test.go @@ -415,6 +415,52 @@ func TestPGCoordinatorSingle_MissedHeartbeats(t *testing.T) { assertEventuallyLost(ctx, t, store, client.id) } +func TestPGCoordinatorSingle_MissedHeartbeats_NoDrop(t *testing.T) { + t.Parallel() + if !dbtestutil.WillUsePostgres() { + t.Skip("test only with postgres") + } + store, ps := dbtestutil.NewDB(t) + ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitSuperLong) + defer cancel() + logger := slogtest.Make(t, nil).Leveled(slog.LevelDebug) + + coordinator, err := tailnet.NewPGCoord(ctx, logger, ps, store) + require.NoError(t, err) + defer coordinator.Close() + + agentID := uuid.New() + + client := agpltest.NewPeer(ctx, t, coordinator, "client") + defer client.Close(ctx) + client.AddTunnel(agentID) + + client.UpdateDERP(11) + + // simulate a second coordinator via DB calls only --- our goal is to test + // broken heart-beating, so we can't use a real coordinator + fCoord2 := &fakeCoordinator{ + ctx: ctx, + t: t, + store: store, + id: uuid.New(), + } + // simulate a single heartbeat, the coordinator is healthy + fCoord2.heartbeat() + + fCoord2.agentNode(agentID, &agpl.Node{PreferredDERP: 12}) + // since it's healthy the client should get the new node. + client.AssertEventuallyHasDERP(agentID, 12) + + // the heartbeat should then timeout and we'll get sent a LOST update, NOT a + // disconnect. + client.AssertEventuallyLost(agentID) + + client.Close(ctx) + + assertEventuallyLost(ctx, t, store, client.ID) +} + func TestPGCoordinatorSingle_SendsHeartbeats(t *testing.T) { t.Parallel() if !dbtestutil.WillUsePostgres() { @@ -857,6 +903,16 @@ func newTestAgent(t *testing.T, coord agpl.CoordinatorV1, name string, id ...uui return a } +func newTestClient(t *testing.T, coord agpl.CoordinatorV1, agentID uuid.UUID, id ...uuid.UUID) *testConn { + c := newTestConn(id) + go func() { + err := coord.ServeClient(c.serverWS, c.id, agentID) + assert.NoError(t, err) + close(c.closeChan) + }() + return c +} + func (c *testConn) close() error { return c.ws.Close() } @@ -902,16 +958,6 @@ func (c *testConn) waitForClose(ctx context.Context, t *testing.T) { } } -func newTestClient(t *testing.T, coord agpl.CoordinatorV1, agentID uuid.UUID, id ...uuid.UUID) *testConn { - c := newTestConn(id) - go func() { - err := coord.ServeClient(c.serverWS, c.id, agentID) - assert.NoError(t, err) - close(c.closeChan) - }() - return c -} - func assertEventuallyHasDERPs(ctx context.Context, t *testing.T, c *testConn, expected ...int) { t.Helper() for { From 060f023174c70b8d22185cad5a9f443e9f8c9dab Mon Sep 17 00:00:00 2001 From: Michael Brewer Date: Fri, 3 May 2024 14:03:13 -0700 Subject: [PATCH 002/149] feat: mask coder login token to enhance security (#12948) * feat(login): treat coder token as a secret * Update login.go --- cli/login.go | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/cli/login.go b/cli/login.go index dfba4f3b4cfb0..65a94d8a4ec3e 100644 --- a/cli/login.go +++ b/cli/login.go @@ -287,7 +287,8 @@ func (r *RootCmd) login() *serpent.Command { } sessionToken, err = cliui.Prompt(inv, cliui.PromptOptions{ - Text: "Paste your token here:", + Text: "Paste your token here:", + Secret: true, Validate: func(token string) error { client.SetSessionToken(token) _, err := client.User(ctx, codersdk.Me) From b20c63c1852d1abd597c06d8a0e1d6a87f29ca88 Mon Sep 17 00:00:00 2001 From: recanman <29310982+recanman@users.noreply.github.com> Date: Fri, 3 May 2024 14:09:23 -0700 Subject: [PATCH 003/149] fix: install openrc service on alpine (#12294) (#12870) * fix: install openrc service on alpine (#12294) * fmt --------- Co-authored-by: Kyle Carberry --- scripts/linux-pkg/coder-openrc | 38 ++++++++++++++++++ .../linux-pkg/coder-workspace-proxy-openrc | 39 +++++++++++++++++++ scripts/linux-pkg/nfpm-alpine.yaml | 29 ++++++++++++++ scripts/package.sh | 9 ++++- 4 files changed, 114 insertions(+), 1 deletion(-) create mode 100755 scripts/linux-pkg/coder-openrc create mode 100755 scripts/linux-pkg/coder-workspace-proxy-openrc create mode 100644 scripts/linux-pkg/nfpm-alpine.yaml diff --git a/scripts/linux-pkg/coder-openrc b/scripts/linux-pkg/coder-openrc new file mode 100755 index 0000000000000..d7b5800f05c44 --- /dev/null +++ b/scripts/linux-pkg/coder-openrc @@ -0,0 +1,38 @@ +#!/sbin/openrc-run +name=coder +description="Coder - Self-hosted developer workspaces on your infra" +document="https://coder.com/docs/coder-oss" + +depend() { + need net + after net-online + use dns logger +} + +checkpath --directory --owner coder:coder --mode 0700 /var/cache/coder + +start_pre() { + if [ ! -f /etc/coder.d/coder.env ]; then + eerror "/etc/coder.d/coder.env file does not exist" + return 1 + fi + # Read and export environment variables ignoring comment lines and blank lines + while IFS= read -r line; do + # Skip blank or comment lines + if [ -z "$line" ] || [[ "$line" =~ ^# ]]; then + continue + fi + export "$line" + done < /etc/coder.d/coder.env +} + +command="/usr/bin/coder" +command_args="server" +command_user="coder:coder" +command_background="yes" +pidfile="/run/coder.pid" + +restart="always" +restart_delay="5" + +stop_timeout="90" diff --git a/scripts/linux-pkg/coder-workspace-proxy-openrc b/scripts/linux-pkg/coder-workspace-proxy-openrc new file mode 100755 index 0000000000000..867812f4bd66e --- /dev/null +++ b/scripts/linux-pkg/coder-workspace-proxy-openrc @@ -0,0 +1,39 @@ +#!/sbin/openrc-run +name=coder-workspace-proxy +description="Coder - external workspace proxy server" +document="https://coder.com/docs/coder-oss" + +depend() { + need net + after net-online + use dns logger +} + +checkpath --directory --owner coder:coder --mode 0700 /var/cache/coder + +start_pre() { + if [ ! -f /etc/coder.d/coder-workspace-proxy.env ]; then + eerror "/etc/coder.d/coder-workspace-proxy.env file does not exist" + return 1 + fi + + # Read and export environment variables ignoring comment lines and blank lines + while IFS= read -r line; do + # Skip blank or comment lines + if [ -z "$line" ] || [[ "$line" =~ ^# ]]; then + continue + fi + export "$line" + done < /etc/coder.d/coder-workspace-proxy.env +} + +command="/usr/bin/coder" +command_args="workspace-proxy server" +command_user="coder:coder" +command_background="yes" +pidfile="/run/coder-workspace-proxy.pid" + +restart="always" +restart_delay="5" + +stop_timeout="90" diff --git a/scripts/linux-pkg/nfpm-alpine.yaml b/scripts/linux-pkg/nfpm-alpine.yaml new file mode 100644 index 0000000000000..ab174a6c873bd --- /dev/null +++ b/scripts/linux-pkg/nfpm-alpine.yaml @@ -0,0 +1,29 @@ +name: coder +platform: linux +arch: "${GOARCH}" +version: "${CODER_VERSION}" +version_schema: semver +release: 1 + +vendor: Coder +homepage: https://coder.com +maintainer: Coder +description: | + Provision development environments with infrastructure with code +license: AGPL-3.0 +suggests: + - postgresql + +scripts: + preinstall: preinstall.sh + +contents: + - src: coder + dst: /usr/bin/coder + - src: coder.env + dst: /etc/coder.d/coder.env + type: "config|noreplace" + - src: coder-workspace-proxy-openrc + dst: /etc/init.d/coder-workspace-proxy + - src: coder-openrc + dst: /etc/init.d/coder diff --git a/scripts/package.sh b/scripts/package.sh index 8afbf5d608ea9..2b826735f38ce 100755 --- a/scripts/package.sh +++ b/scripts/package.sh @@ -89,9 +89,16 @@ ln "$(realpath scripts/linux-pkg/coder.service)" "$temp_dir/" ln "$(realpath scripts/linux-pkg/nfpm.yaml)" "$temp_dir/" ln "$(realpath scripts/linux-pkg/preinstall.sh)" "$temp_dir/" +nfpm_config_file="nfpm.yaml" + +# Use nfpm-alpine.yaml when building for Alpine (OpenRC). +if [[ "$format" == "apk" ]]; then + nfpm_config_file="nfpm-alpine.yaml" +fi + pushd "$temp_dir" GOARCH="$arch" CODER_VERSION="$version" nfpm package \ - -f nfpm.yaml \ + -f "$nfpm_config_file" \ -p "$format" \ -t "$output_path" \ 1>&2 From 13dd526f11f90cc28ffbd01395261d479b8c3dfd Mon Sep 17 00:00:00 2001 From: Colin Adler Date: Fri, 3 May 2024 17:12:06 -0500 Subject: [PATCH 004/149] fix: prevent stdlib logging from messing up ssh (#13161) Fixes https://github.com/coder/coder/issues/13144 --- cli/server.go | 2 +- cli/ssh.go | 5 +++++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/cli/server.go b/cli/server.go index e96c8cacdc744..3706b2ee1bc92 100644 --- a/cli/server.go +++ b/cli/server.go @@ -1441,7 +1441,7 @@ func newProvisionerDaemon( connector[string(database.ProvisionerTypeTerraform)] = sdkproto.NewDRPCProvisionerClient(terraformClient) default: - return nil, fmt.Errorf("unknown provisioner type %q", provisionerType) + return nil, xerrors.Errorf("unknown provisioner type %q", provisionerType) } } diff --git a/cli/ssh.go b/cli/ssh.go index 1aa832fcda27e..aa8bdadb9d0dd 100644 --- a/cli/ssh.go +++ b/cli/ssh.go @@ -6,6 +6,7 @@ import ( "errors" "fmt" "io" + "log" "net/http" "net/url" "os" @@ -79,6 +80,10 @@ func (r *RootCmd) ssh() *serpent.Command { ctx, cancel := context.WithCancel(ctx) defer cancel() + // Prevent unnecessary logs from the stdlib from messing up the TTY. + // See: https://github.com/coder/coder/issues/13144 + log.SetOutput(io.Discard) + logger := inv.Logger defer func() { if retErr != nil { From 886a97b4259d1028a29c7b77a3910e9c457b2be6 Mon Sep 17 00:00:00 2001 From: Colin Adler Date: Mon, 6 May 2024 00:01:47 -0500 Subject: [PATCH 005/149] chore: fix build ci (#13164) --- coderd/database/dbpurge/dbpurge_test.go | 3 +++ scripts/package.sh | 1 + 2 files changed, 4 insertions(+) diff --git a/coderd/database/dbpurge/dbpurge_test.go b/coderd/database/dbpurge/dbpurge_test.go index 4255409ba68fd..1a90cb9da750d 100644 --- a/coderd/database/dbpurge/dbpurge_test.go +++ b/coderd/database/dbpurge/dbpurge_test.go @@ -41,6 +41,9 @@ func TestPurge(t *testing.T) { func TestDeleteOldWorkspaceAgentStats(t *testing.T) { t.Parallel() + // https://github.com/coder/coder/issues/13165 + t.Skip() + db, _ := dbtestutil.NewDB(t) logger := slogtest.Make(t, &slogtest.Options{IgnoreErrors: true}).Leveled(slog.LevelDebug) diff --git a/scripts/package.sh b/scripts/package.sh index 2b826735f38ce..eb62544da9a6a 100755 --- a/scripts/package.sh +++ b/scripts/package.sh @@ -87,6 +87,7 @@ ln "$(realpath coder.env)" "$temp_dir/" ln "$(realpath scripts/linux-pkg/coder-workspace-proxy.service)" "$temp_dir/" ln "$(realpath scripts/linux-pkg/coder.service)" "$temp_dir/" ln "$(realpath scripts/linux-pkg/nfpm.yaml)" "$temp_dir/" +ln "$(realpath scripts/linux-pkg/nfpm-alpine.yaml)" "$temp_dir/" ln "$(realpath scripts/linux-pkg/preinstall.sh)" "$temp_dir/" nfpm_config_file="nfpm.yaml" From d956af0a3aab6e651f58796bec331f62bad9c95d Mon Sep 17 00:00:00 2001 From: Dean Sheather Date: Sun, 5 May 2024 22:36:54 -0700 Subject: [PATCH 006/149] chore: add EasyNATDERP tailnet integration test (#13138) --- tailnet/test/integration/integration.go | 148 ++------ tailnet/test/integration/integration_test.go | 108 ++++-- tailnet/test/integration/network.go | 368 ++++++++++++++++++- tailnet/test/integration/suite.go | 31 ++ 4 files changed, 506 insertions(+), 149 deletions(-) create mode 100644 tailnet/test/integration/suite.go diff --git a/tailnet/test/integration/integration.go b/tailnet/test/integration/integration.go index f4d884b36c35a..ff6552e2d8f49 100644 --- a/tailnet/test/integration/integration.go +++ b/tailnet/test/integration/integration.go @@ -30,7 +30,6 @@ import ( "github.com/coder/coder/v2/coderd/httpmw" "github.com/coder/coder/v2/coderd/tracing" "github.com/coder/coder/v2/codersdk" - "github.com/coder/coder/v2/cryptorand" "github.com/coder/coder/v2/tailnet" ) @@ -40,78 +39,7 @@ var ( Client2ID = uuid.MustParse("00000000-0000-0000-0000-000000000002") ) -type TestTopology struct { - Name string - // SetupNetworking creates interfaces and network namespaces for the test. - // The most simple implementation is NetworkSetupDefault, which only creates - // a network namespace shared for all tests. - SetupNetworking func(t *testing.T, logger slog.Logger) TestNetworking - - // StartServer gets called in the server subprocess. It's expected to start - // the coordinator server in the background and return. - StartServer func(t *testing.T, logger slog.Logger, listenAddr string) - // StartClient gets called in each client subprocess. It's expected to - // create the tailnet.Conn and ensure connectivity to it's peer. - StartClient func(t *testing.T, logger slog.Logger, serverURL *url.URL, myID uuid.UUID, peerID uuid.UUID) *tailnet.Conn - - // RunTests is the main test function. It's called in each of the client - // subprocesses. If tests can only run once, they should check the client ID - // and return early if it's not the expected one. - RunTests func(t *testing.T, logger slog.Logger, serverURL *url.URL, myID uuid.UUID, peerID uuid.UUID, conn *tailnet.Conn) -} - -type TestNetworking struct { - // ServerListenAddr is the IP address and port that the server listens on, - // passed to StartServer. - ServerListenAddr string - // ServerAccessURLClient1 is the hostname and port that the first client - // uses to access the server. - ServerAccessURLClient1 string - // ServerAccessURLClient2 is the hostname and port that the second client - // uses to access the server. - ServerAccessURLClient2 string - - // Networking settings for each subprocess. - ProcessServer TestNetworkingProcess - ProcessClient1 TestNetworkingProcess - ProcessClient2 TestNetworkingProcess -} - -type TestNetworkingProcess struct { - // NetNS to enter. If zero, the current network namespace is used. - NetNSFd int -} - -func SetupNetworkingLoopback(t *testing.T, _ slog.Logger) TestNetworking { - netNSName := "codertest_netns_" - randStr, err := cryptorand.String(4) - require.NoError(t, err, "generate random string for netns name") - netNSName += randStr - - // Create a single network namespace for all tests so we can have an - // isolated loopback interface. - netNSFile, err := createNetNS(netNSName) - require.NoError(t, err, "create network namespace") - t.Cleanup(func() { - _ = netNSFile.Close() - }) - - var ( - listenAddr = "127.0.0.1:8080" - process = TestNetworkingProcess{ - NetNSFd: int(netNSFile.Fd()), - } - ) - return TestNetworking{ - ServerListenAddr: listenAddr, - ServerAccessURLClient1: "http://" + listenAddr, - ServerAccessURLClient2: "http://" + listenAddr, - ProcessServer: process, - ProcessClient1: process, - ProcessClient2: process, - } -} - +// StartServerBasic creates a coordinator and DERP server. func StartServerBasic(t *testing.T, logger slog.Logger, listenAddr string) { coord := tailnet.NewCoordinator(logger) var coordPtr atomic.Pointer[tailnet.Coordinator] @@ -208,42 +136,7 @@ func StartServerBasic(t *testing.T, logger slog.Logger, listenAddr string) { }) } -func basicDERPMap(t *testing.T, serverURL *url.URL) *tailcfg.DERPMap { - portStr := serverURL.Port() - port, err := strconv.Atoi(portStr) - require.NoError(t, err, "parse server port") - - hostname := serverURL.Hostname() - ipv4 := "" - ip, err := netip.ParseAddr(hostname) - if err == nil { - hostname = "" - ipv4 = ip.String() - } - - return &tailcfg.DERPMap{ - Regions: map[int]*tailcfg.DERPRegion{ - 1: { - RegionID: 1, - RegionCode: "test", - RegionName: "test server", - Nodes: []*tailcfg.DERPNode{ - { - Name: "test0", - RegionID: 1, - HostName: hostname, - IPv4: ipv4, - IPv6: "none", - DERPPort: port, - ForceHTTP: true, - InsecureForTests: true, - }, - }, - }, - }, - } -} - +// StartClientBasic creates a client connection to the server. func StartClientBasic(t *testing.T, logger slog.Logger, serverURL *url.URL, myID uuid.UUID, peerID uuid.UUID) *tailnet.Conn { u, err := serverURL.Parse(fmt.Sprintf("/api/v2/workspaceagents/%s/coordinate", myID.String())) require.NoError(t, err) @@ -284,3 +177,40 @@ func StartClientBasic(t *testing.T, logger slog.Logger, serverURL *url.URL, myID return conn } + +func basicDERPMap(t *testing.T, serverURL *url.URL) *tailcfg.DERPMap { + portStr := serverURL.Port() + port, err := strconv.Atoi(portStr) + require.NoError(t, err, "parse server port") + + hostname := serverURL.Hostname() + ipv4 := "" + ip, err := netip.ParseAddr(hostname) + if err == nil { + hostname = "" + ipv4 = ip.String() + } + + return &tailcfg.DERPMap{ + Regions: map[int]*tailcfg.DERPRegion{ + 1: { + RegionID: 1, + RegionCode: "test", + RegionName: "test server", + Nodes: []*tailcfg.DERPNode{ + { + Name: "test0", + RegionID: 1, + HostName: hostname, + IPv4: ipv4, + IPv6: "none", + DERPPort: port, + STUNPort: -1, + ForceHTTP: true, + InsecureForTests: true, + }, + }, + }, + }, + } +} diff --git a/tailnet/test/integration/integration_test.go b/tailnet/test/integration/integration_test.go index 1678016c4af78..76b57fecae651 100644 --- a/tailnet/test/integration/integration_test.go +++ b/tailnet/test/integration/integration_test.go @@ -12,6 +12,8 @@ import ( "os/exec" "os/signal" "runtime" + "strings" + "sync" "syscall" "testing" "time" @@ -66,20 +68,22 @@ func TestMain(m *testing.M) { var topologies = []integration.TestTopology{ { - Name: "BasicLoopback", + Name: "BasicLoopbackDERP", SetupNetworking: integration.SetupNetworkingLoopback, StartServer: integration.StartServerBasic, StartClient: integration.StartClientBasic, - RunTests: func(t *testing.T, log slog.Logger, serverURL *url.URL, myID, peerID uuid.UUID, conn *tailnet.Conn) { - // Test basic connectivity - peerIP := tailnet.IPFromUUID(peerID) - _, _, _, err := conn.Ping(testutil.Context(t, testutil.WaitLong), peerIP) - require.NoError(t, err, "ping peer") - }, + RunTests: integration.TestSuite, + }, + { + Name: "EasyNATDERP", + SetupNetworking: integration.SetupNetworkingEasyNAT, + StartServer: integration.StartServerBasic, + StartClient: integration.StartClientBasic, + RunTests: integration.TestSuite, }, } -//nolint:paralleltest +//nolint:paralleltest,tparallel func TestIntegration(t *testing.T) { if *isSubprocess { handleTestSubprocess(t) @@ -87,10 +91,13 @@ func TestIntegration(t *testing.T) { } for _, topo := range topologies { - //nolint:paralleltest + topo := topo t.Run(topo.Name, func(t *testing.T) { - log := slogtest.Make(t, nil).Leveled(slog.LevelDebug) + // These can run in parallel because every test should be in an + // isolated NetNS. + t.Parallel() + log := slogtest.Make(t, nil).Leveled(slog.LevelDebug) networking := topo.SetupNetworking(t, log) // Fork the three child processes. @@ -100,13 +107,13 @@ func TestIntegration(t *testing.T) { client2ErrCh, closeClient2 := startClientSubprocess(t, topo.Name, networking, 2) // Wait for client1 to exit. - require.NoError(t, <-client1ErrCh) + require.NoError(t, <-client1ErrCh, "client 1 exited") // Close client2 and the server. closeClient2() - require.NoError(t, <-client2ErrCh) + require.NoError(t, <-client2ErrCh, "client 2 exited") closeServer() - require.NoError(t, <-serverErrCh) + require.NoError(t, <-serverErrCh, "server exited") }) } } @@ -152,8 +159,14 @@ func handleTestSubprocess(t *testing.T) { conn := topo.StartClient(t, log, serverURL, myID, peerID) if *clientRunTests { + // Wait for connectivity. + peerIP := tailnet.IPFromUUID(peerID) + if !conn.AwaitReachable(testutil.Context(t, testutil.WaitLong), peerIP) { + t.Fatalf("peer %v did not become reachable", peerIP) + } + topo.RunTests(t, log, serverURL, myID, peerID, conn) - // and exit + // then exit return } } @@ -194,7 +207,7 @@ func waitForServerAvailable(t *testing.T, serverURL *url.URL) { } func startServerSubprocess(t *testing.T, topologyName string, networking integration.TestNetworking) (<-chan error, func()) { - return startSubprocess(t, networking.ProcessServer.NetNSFd, []string{ + return startSubprocess(t, "server", networking.ProcessServer.NetNS, []string{ "--subprocess", "--test-name=" + topologyName, "--role=server", @@ -210,10 +223,12 @@ func startClientSubprocess(t *testing.T, topologyName string, networking integra myID = integration.Client1ID peerID = integration.Client2ID accessURL = networking.ServerAccessURLClient1 + netNS = networking.ProcessClient1.NetNS ) if clientNumber == 2 { myID, peerID = peerID, myID accessURL = networking.ServerAccessURLClient2 + netNS = networking.ProcessClient2.NetNS } flags := []string{ @@ -229,14 +244,15 @@ func startClientSubprocess(t *testing.T, topologyName string, networking integra flags = append(flags, "--client-run-tests") } - return startSubprocess(t, networking.ProcessClient1.NetNSFd, flags) + return startSubprocess(t, clientName, netNS, flags) } -func startSubprocess(t *testing.T, netNSFd int, flags []string) (<-chan error, func()) { +func startSubprocess(t *testing.T, processName string, netNS *os.File, flags []string) (<-chan error, func()) { name := os.Args[0] - args := append(os.Args[1:], flags...) + // Always use verbose mode since it gets piped to the parent test anyways. + args := append(os.Args[1:], append([]string{"-test.v=true"}, flags...)...) - if netNSFd > 0 { + if netNS != nil { // We use nsenter to enter the namespace. // We can't use `setns` easily from Golang in the parent process because // you can't execute the syscall in the forked child thread before it @@ -249,11 +265,17 @@ func startSubprocess(t *testing.T, netNSFd int, flags []string) (<-chan error, f } cmd := exec.Command(name, args...) - if netNSFd > 0 { - cmd.ExtraFiles = []*os.File{os.NewFile(uintptr(netNSFd), "")} + if netNS != nil { + cmd.ExtraFiles = []*os.File{netNS} + } + + out := &testWriter{ + name: processName, + t: t, } - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr + t.Cleanup(out.Flush) + cmd.Stdout = out + cmd.Stderr = out cmd.SysProcAttr = &syscall.SysProcAttr{ Pdeathsig: syscall.SIGTERM, } @@ -293,3 +315,43 @@ func startSubprocess(t *testing.T, netNSFd int, flags []string) (<-chan error, f return waitErr, closeFn } + +type testWriter struct { + mut sync.Mutex + name string + t *testing.T + + capturedLines []string +} + +func (w *testWriter) Write(p []byte) (n int, err error) { + w.mut.Lock() + defer w.mut.Unlock() + str := string(p) + split := strings.Split(str, "\n") + for _, s := range split { + if s == "" { + continue + } + + // If a line begins with "\s*--- (PASS|FAIL)" or is just PASS or FAIL, + // then it's a test result line. We want to capture it and log it later. + trimmed := strings.TrimSpace(s) + if strings.HasPrefix(trimmed, "--- PASS") || strings.HasPrefix(trimmed, "--- FAIL") || trimmed == "PASS" || trimmed == "FAIL" { + w.capturedLines = append(w.capturedLines, s) + continue + } + + w.t.Logf("%s output: \t%s", w.name, s) + } + return len(p), nil +} + +func (w *testWriter) Flush() { + w.mut.Lock() + defer w.mut.Unlock() + for _, s := range w.capturedLines { + w.t.Logf("%s output: \t%s", w.name, s) + } + w.capturedLines = nil +} diff --git a/tailnet/test/integration/network.go b/tailnet/test/integration/network.go index 95d68ca8e7d6b..604d7827cd71d 100644 --- a/tailnet/test/integration/network.go +++ b/tailnet/test/integration/network.go @@ -4,16 +4,276 @@ package integration import ( + "bytes" "fmt" + "net/url" "os" "os/exec" + "testing" + "github.com/google/uuid" + "github.com/stretchr/testify/require" + "github.com/tailscale/netlink" "golang.org/x/xerrors" + + "cdr.dev/slog" + + "github.com/coder/coder/v2/cryptorand" + "github.com/coder/coder/v2/tailnet" ) +type TestTopology struct { + Name string + // SetupNetworking creates interfaces and network namespaces for the test. + // The most simple implementation is NetworkSetupDefault, which only creates + // a network namespace shared for all tests. + SetupNetworking func(t *testing.T, logger slog.Logger) TestNetworking + + // StartServer gets called in the server subprocess. It's expected to start + // the coordinator server in the background and return. + StartServer func(t *testing.T, logger slog.Logger, listenAddr string) + // StartClient gets called in each client subprocess. It's expected to + // create the tailnet.Conn and ensure connectivity to it's peer. + StartClient func(t *testing.T, logger slog.Logger, serverURL *url.URL, myID uuid.UUID, peerID uuid.UUID) *tailnet.Conn + + // RunTests is the main test function. It's called in each of the client + // subprocesses. If tests can only run once, they should check the client ID + // and return early if it's not the expected one. + RunTests func(t *testing.T, logger slog.Logger, serverURL *url.URL, myID uuid.UUID, peerID uuid.UUID, conn *tailnet.Conn) +} + +type TestNetworking struct { + // ServerListenAddr is the IP address and port that the server listens on, + // passed to StartServer. + ServerListenAddr string + // ServerAccessURLClient1 is the hostname and port that the first client + // uses to access the server. + ServerAccessURLClient1 string + // ServerAccessURLClient2 is the hostname and port that the second client + // uses to access the server. + ServerAccessURLClient2 string + + // Networking settings for each subprocess. + ProcessServer TestNetworkingProcess + ProcessClient1 TestNetworkingProcess + ProcessClient2 TestNetworkingProcess +} + +type TestNetworkingProcess struct { + // NetNS to enter. If nil, the current network namespace is used. + NetNS *os.File +} + +// SetupNetworkingLoopback creates a network namespace with a loopback interface +// for all tests to share. This is the simplest networking setup. The network +// namespace only exists for isolation on the host and doesn't serve any routing +// purpose. +func SetupNetworkingLoopback(t *testing.T, _ slog.Logger) TestNetworking { + netNSName := "codertest_netns_" + randStr, err := cryptorand.String(4) + require.NoError(t, err, "generate random string for netns name") + netNSName += randStr + + // Create a single network namespace for all tests so we can have an + // isolated loopback interface. + netNSFile := createNetNS(t, netNSName) + + var ( + listenAddr = "127.0.0.1:8080" + process = TestNetworkingProcess{ + NetNS: netNSFile, + } + ) + return TestNetworking{ + ServerListenAddr: listenAddr, + ServerAccessURLClient1: "http://" + listenAddr, + ServerAccessURLClient2: "http://" + listenAddr, + ProcessServer: process, + ProcessClient1: process, + ProcessClient2: process, + } +} + +// SetupNetworkingEasyNAT creates a network namespace with a router that NATs +// packets between two clients and a server. +// See createFakeRouter for the full topology. +// NAT is achieved through a single iptables masquerade rule. +func SetupNetworkingEasyNAT(t *testing.T, _ slog.Logger) TestNetworking { + router := createFakeRouter(t) + + // Set up iptables masquerade rules to allow the router to NAT packets + // between the Three Kingdoms. + _, err := commandInNetNS(router.RouterNetNS, "sysctl", []string{"-w", "net.ipv4.ip_forward=1"}).Output() + require.NoError(t, wrapExitErr(err), "enable IP forwarding in router NetNS") + _, err = commandInNetNS(router.RouterNetNS, "iptables", []string{ + "-t", "nat", + "-A", "POSTROUTING", + // Every interface except loopback. + "!", "-o", "lo", + "-j", "MASQUERADE", + }).Output() + require.NoError(t, wrapExitErr(err), "add iptables masquerade rule") + + return router.Net +} + +type fakeRouter struct { + Net TestNetworking + + RouterNetNS *os.File + RouterVeths struct { + Server string + Client1 string + Client2 string + } + ServerNetNS *os.File + ServerVeth string + Client1NetNS *os.File + Client1Veth string + Client2NetNS *os.File + Client2Veth string +} + +// fakeRouter creates multiple namespaces with veth pairs between them with +// the following topology: +// +// namespaces: +// - router +// - server +// - client1 +// - client2 +// +// veth pairs: +// - router-server (10.0.1.1) <-> server-router (10.0.1.2) +// - router-client1 (10.0.2.1) <-> client1-router (10.0.2.2) +// - router-client2 (10.0.3.1) <-> client2-router (10.0.3.2) +// +// No iptables rules are created, so packets will not be forwarded out of the +// box. Routes are created between all namespaces based on the veth pairs, +// however. +func createFakeRouter(t *testing.T) fakeRouter { + t.Helper() + const ( + routerServerPrefix = "10.0.1." + routerServerIP = routerServerPrefix + "1" + serverIP = routerServerPrefix + "2" + routerClient1Prefix = "10.0.2." + routerClient1IP = routerClient1Prefix + "1" + client1IP = routerClient1Prefix + "2" + routerClient2Prefix = "10.0.3." + routerClient2IP = routerClient2Prefix + "1" + client2IP = routerClient2Prefix + "2" + ) + + prefix := uniqNetName(t) + "_" + router := fakeRouter{} + router.RouterVeths.Server = prefix + "r-s" + router.RouterVeths.Client1 = prefix + "r-c1" + router.RouterVeths.Client2 = prefix + "r-c2" + router.ServerVeth = prefix + "s-r" + router.Client1Veth = prefix + "c1-r" + router.Client2Veth = prefix + "c2-r" + + // Create namespaces. + router.RouterNetNS = createNetNS(t, prefix+"r") + serverNS := createNetNS(t, prefix+"s") + client1NS := createNetNS(t, prefix+"c1") + client2NS := createNetNS(t, prefix+"c2") + + vethPairs := []struct { + parentName string + peerName string + parentNS *os.File + peerNS *os.File + parentIP string + peerIP string + }{ + { + parentName: router.RouterVeths.Server, + peerName: router.ServerVeth, + parentNS: router.RouterNetNS, + peerNS: serverNS, + parentIP: routerServerIP, + peerIP: serverIP, + }, + { + parentName: router.RouterVeths.Client1, + peerName: router.Client1Veth, + parentNS: router.RouterNetNS, + peerNS: client1NS, + parentIP: routerClient1IP, + peerIP: client1IP, + }, + { + parentName: router.RouterVeths.Client2, + peerName: router.Client2Veth, + parentNS: router.RouterNetNS, + peerNS: client2NS, + parentIP: routerClient2IP, + peerIP: client2IP, + }, + } + + for _, vethPair := range vethPairs { + err := createVethPair(vethPair.parentName, vethPair.peerName) + require.NoErrorf(t, err, "create veth pair %q <-> %q", vethPair.parentName, vethPair.peerName) + + // Move the veth interfaces to the respective network namespaces. + err = setVethNetNS(vethPair.parentName, int(vethPair.parentNS.Fd())) + require.NoErrorf(t, err, "set veth %q to NetNS", vethPair.parentName) + err = setVethNetNS(vethPair.peerName, int(vethPair.peerNS.Fd())) + require.NoErrorf(t, err, "set veth %q to NetNS", vethPair.peerName) + + // Set IP addresses on the interfaces. + err = setInterfaceIP(vethPair.parentNS, vethPair.parentName, vethPair.parentIP) + require.NoErrorf(t, err, "set IP %q on interface %q", vethPair.parentIP, vethPair.parentName) + err = setInterfaceIP(vethPair.peerNS, vethPair.peerName, vethPair.peerIP) + require.NoErrorf(t, err, "set IP %q on interface %q", vethPair.peerIP, vethPair.peerName) + + // Bring up both interfaces. + err = setInterfaceUp(vethPair.parentNS, vethPair.parentName) + require.NoErrorf(t, err, "bring up interface %q", vethPair.parentName) + err = setInterfaceUp(vethPair.peerNS, vethPair.peerName) + require.NoErrorf(t, err, "bring up interface %q", vethPair.parentName) + + // We don't need to add a route from parent to peer since the kernel + // already adds a default route for the /24. We DO need to add a default + // route from peer to parent, however. + err = addRouteInNetNS(vethPair.peerNS, []string{"default", "via", vethPair.parentIP, "dev", vethPair.peerName}) + require.NoErrorf(t, err, "add peer default route to %q", vethPair.peerName) + } + + router.Net = TestNetworking{ + ServerListenAddr: serverIP + ":8080", + ServerAccessURLClient1: "http://" + serverIP + ":8080", + ServerAccessURLClient2: "http://" + serverIP + ":8080", + ProcessServer: TestNetworkingProcess{ + NetNS: serverNS, + }, + ProcessClient1: TestNetworkingProcess{ + NetNS: client1NS, + }, + ProcessClient2: TestNetworkingProcess{ + NetNS: client2NS, + }, + } + return router +} + +func uniqNetName(t *testing.T) string { + t.Helper() + netNSName := "cdr_" + randStr, err := cryptorand.String(3) + require.NoError(t, err, "generate random string for netns name") + netNSName += randStr + return netNSName +} + // createNetNS creates a new network namespace with the given name. The returned // file is a file descriptor to the network namespace. -func createNetNS(name string) (*os.File, error) { +// Note: all cleanup is handled for you, you do not need to call Close on the +// returned file. +func createNetNS(t *testing.T, name string) *os.File { // We use ip-netns here because it handles the process of creating a // disowned netns for us. // The only way to create a network namespace is by calling unshare(2) or @@ -23,33 +283,107 @@ func createNetNS(name string) (*os.File, error) { // will keep the namespace alive until the mount is removed. // ip-netns does this for us. Without it, we would have to fork anyways. // Later, we will use nsenter to enter this network namespace. - err := exec.Command("ip", "netns", "add", name).Run() - if err != nil { - return nil, xerrors.Errorf("create network namespace via ip-netns: %w", err) - } + _, err := exec.Command("ip", "netns", "add", name).Output() + require.NoError(t, wrapExitErr(err), "create network namespace via ip-netns") + t.Cleanup(func() { + _, _ = exec.Command("ip", "netns", "delete", name).Output() + }) - // Open /run/netns/$name to get a file descriptor to the network namespace - // so it stays active after we soft-delete it. + // Open /run/netns/$name to get a file descriptor to the network namespace. path := fmt.Sprintf("/run/netns/%s", name) file, err := os.OpenFile(path, os.O_RDONLY, 0) - if err != nil { - return nil, xerrors.Errorf("open network namespace file %q: %w", path, err) - } + require.NoError(t, err, "open network namespace file") + t.Cleanup(func() { + _ = file.Close() + }) // Exec "ip link set lo up" in the namespace to bring up loopback // networking. //nolint:gosec - err = exec.Command("ip", "netns", "exec", name, "ip", "link", "set", "lo", "up").Run() + _, err = exec.Command("ip", "-netns", name, "link", "set", "lo", "up").Output() + require.NoError(t, wrapExitErr(err), "bring up loopback interface in network namespace") + + return file +} + +// createVethPair creates a veth pair with the given names. +func createVethPair(parentVethName, peerVethName string) error { + vethLinkAttrs := netlink.NewLinkAttrs() + vethLinkAttrs.Name = parentVethName + veth := &netlink.Veth{ + LinkAttrs: vethLinkAttrs, + PeerName: peerVethName, + } + + err := netlink.LinkAdd(veth) + if err != nil { + return xerrors.Errorf("LinkAdd(name: %q, peerName: %q): %w", parentVethName, peerVethName, err) + } + + return nil +} + +// setVethNetNS moves the veth interface to the specified network namespace. +func setVethNetNS(vethName string, netNSFd int) error { + veth, err := netlink.LinkByName(vethName) + if err != nil { + return xerrors.Errorf("LinkByName(%q): %w", vethName, err) + } + + err = netlink.LinkSetNsFd(veth, netNSFd) + if err != nil { + return xerrors.Errorf("LinkSetNsFd(%q, %v): %w", vethName, netNSFd, err) + } + + return nil +} + +// setInterfaceIP sets the IP address on the given interface. It automatically +// adds a /24 subnet mask. +func setInterfaceIP(netNS *os.File, ifaceName, ip string) error { + _, err := commandInNetNS(netNS, "ip", []string{"addr", "add", ip + "/24", "dev", ifaceName}).Output() if err != nil { - return nil, xerrors.Errorf("bring up loopback interface in network namespace: %w", err) + return xerrors.Errorf("set IP %q on interface %q in netns: %w", ip, ifaceName, wrapExitErr(err)) } - // Remove the network namespace. The kernel will keep it around until the - // file descriptor is closed. - err = exec.Command("ip", "netns", "delete", name).Run() + return nil +} + +// setInterfaceUp brings the given interface up. +func setInterfaceUp(netNS *os.File, ifaceName string) error { + _, err := commandInNetNS(netNS, "ip", []string{"link", "set", ifaceName, "up"}).Output() if err != nil { - return nil, xerrors.Errorf("soft delete network namespace via ip-netns: %w", err) + return xerrors.Errorf("bring up interface %q in netns: %w", ifaceName, wrapExitErr(err)) } - return file, nil + return nil +} + +// addRouteInNetNS adds a route to the given network namespace. +func addRouteInNetNS(netNS *os.File, route []string) error { + _, err := commandInNetNS(netNS, "ip", append([]string{"route", "add"}, route...)).Output() + if err != nil { + return xerrors.Errorf("add route %q in netns: %w", route, wrapExitErr(err)) + } + + return nil +} + +func commandInNetNS(netNS *os.File, bin string, args []string) *exec.Cmd { + //nolint:gosec + cmd := exec.Command("nsenter", append([]string{"--net=/proc/self/fd/3", bin}, args...)...) + cmd.ExtraFiles = []*os.File{netNS} + return cmd +} + +func wrapExitErr(err error) error { + if err == nil { + return nil + } + + var exitErr *exec.ExitError + if xerrors.As(err, &exitErr) { + return xerrors.Errorf("output: %s\n\n%w", bytes.TrimSpace(exitErr.Stderr), exitErr) + } + return err } diff --git a/tailnet/test/integration/suite.go b/tailnet/test/integration/suite.go new file mode 100644 index 0000000000000..54fb0856a21af --- /dev/null +++ b/tailnet/test/integration/suite.go @@ -0,0 +1,31 @@ +//go:build linux +// +build linux + +package integration + +import ( + "net/url" + "testing" + + "github.com/google/uuid" + "github.com/stretchr/testify/require" + + "cdr.dev/slog" + "github.com/coder/coder/v2/tailnet" + "github.com/coder/coder/v2/testutil" +) + +// TODO: instead of reusing one conn for each suite, maybe we should make a new +// one for each subtest? +func TestSuite(t *testing.T, _ slog.Logger, _ *url.URL, _, peerID uuid.UUID, conn *tailnet.Conn) { + t.Parallel() + + t.Run("Connectivity", func(t *testing.T) { + t.Parallel() + peerIP := tailnet.IPFromUUID(peerID) + _, _, _, err := conn.Ping(testutil.Context(t, testutil.WaitLong), peerIP) + require.NoError(t, err, "ping peer") + }) + + // TODO: more +} From 3e77f5b512ef54f2dce5d76f3964a6a2cafa7819 Mon Sep 17 00:00:00 2001 From: Muhammad Atif Ali Date: Mon, 6 May 2024 11:17:19 +0300 Subject: [PATCH 007/149] chore(docs): replace git-auth with external-auth (#13167) --- docs/admin/external-auth.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/admin/external-auth.md b/docs/admin/external-auth.md index 5b6e015fe789f..05e7821de7042 100644 --- a/docs/admin/external-auth.md +++ b/docs/admin/external-auth.md @@ -333,5 +333,5 @@ EOF ``` See the -[Terraform provider documentation](https://registry.terraform.io/providers/coder/coder/latest/docs/data-sources/git_auth) +[Terraform provider documentation](https://registry.terraform.io/providers/coder/coder/latest/docs/data-sources/external_auth) for all available options. From 7c3ec51997117d3a71257720678f62e8c3289a31 Mon Sep 17 00:00:00 2001 From: Muhammad Atif Ali Date: Mon, 6 May 2024 11:34:21 +0300 Subject: [PATCH 008/149] docs(admin/external-auth.md): add JFrog Artifactory guide (#13166) --- docs/admin/external-auth.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/docs/admin/external-auth.md b/docs/admin/external-auth.md index 05e7821de7042..bb905200532f1 100644 --- a/docs/admin/external-auth.md +++ b/docs/admin/external-auth.md @@ -178,6 +178,12 @@ CODER_EXTERNAL_AUTH_0_REGEX=github\.company\.org > Note: The `REGEX` variable must be set if using a custom git domain. +### JFrog Artifactory + +See +[this](https://coder.com/docs/v2/latest/guides/artifactory-integration#jfrog-oauth) +guide on instructions on how to set up for JFrog Artifactory. + ### Custom scopes Optionally, you can request custom scopes: From 2efb46a10e3368dd5e430ae3121ea2378b2abcd9 Mon Sep 17 00:00:00 2001 From: Spike Curtis Date: Mon, 6 May 2024 14:33:16 +0400 Subject: [PATCH 009/149] chore: remove superfluous context.Canceled handling (#13140) Removes a check for `context.Canceled` inside the `handleManifest` routine. This checking is handled in the `apiConnRoutineManager`, so checking inside the handler is redundant. --- agent/agent.go | 3 --- 1 file changed, 3 deletions(-) diff --git a/agent/agent.go b/agent/agent.go index b6eb60519cf3b..abaaed4c313c0 100644 --- a/agent/agent.go +++ b/agent/agent.go @@ -903,9 +903,6 @@ func (a *agent) handleManifest(manifestOK chan<- struct{}) func(ctx context.Cont Subsystems: subsys, }}) if err != nil { - if xerrors.Is(err, context.Canceled) { - return nil - } return xerrors.Errorf("update workspace agent startup: %w", err) } From d51c6912a724f5e433800ac4b414c76635a45ebc Mon Sep 17 00:00:00 2001 From: Spike Curtis Date: Mon, 6 May 2024 14:47:41 +0400 Subject: [PATCH 010/149] fix: make handleManifest always signal dependents (#13141) Fixes #13139 Using a bare channel to signal dependent goroutines means that we can only signal success, not failure, which leads to deadlock if we fail in a way that doesn't cause the whole `apiConnRoutineManager` to tear down routines. Instead, we use a new object called a `checkpoint` that signals success or failure, so that dependent routines get unblocked if the routine they depend on fails. --- agent/agent.go | 66 ++++++++++++++++--------------- agent/checkpoint.go | 51 ++++++++++++++++++++++++ agent/checkpoint_internal_test.go | 49 +++++++++++++++++++++++ 3 files changed, 134 insertions(+), 32 deletions(-) create mode 100644 agent/checkpoint.go create mode 100644 agent/checkpoint_internal_test.go diff --git a/agent/agent.go b/agent/agent.go index abaaed4c313c0..8125bbc5f70d6 100644 --- a/agent/agent.go +++ b/agent/agent.go @@ -807,23 +807,21 @@ func (a *agent) run() (retErr error) { // coordination <--------------------------+ // derp map subscriber <----------------+ // stats report loop <---------------+ - networkOK := make(chan struct{}) - manifestOK := make(chan struct{}) + networkOK := newCheckpoint(a.logger) + manifestOK := newCheckpoint(a.logger) connMan.start("handle manifest", gracefulShutdownBehaviorStop, a.handleManifest(manifestOK)) connMan.start("app health reporter", gracefulShutdownBehaviorStop, func(ctx context.Context, conn drpc.Conn) error { - select { - case <-ctx.Done(): - return nil - case <-manifestOK: - manifest := a.manifest.Load() - NewWorkspaceAppHealthReporter( - a.logger, manifest.Apps, agentsdk.AppHealthPoster(proto.NewDRPCAgentClient(conn)), - )(ctx) - return nil + if err := manifestOK.wait(ctx); err != nil { + return xerrors.Errorf("no manifest: %w", err) } + manifest := a.manifest.Load() + NewWorkspaceAppHealthReporter( + a.logger, manifest.Apps, agentsdk.AppHealthPoster(proto.NewDRPCAgentClient(conn)), + )(ctx) + return nil }) connMan.start("create or update network", gracefulShutdownBehaviorStop, @@ -831,10 +829,8 @@ func (a *agent) run() (retErr error) { connMan.start("coordination", gracefulShutdownBehaviorStop, func(ctx context.Context, conn drpc.Conn) error { - select { - case <-ctx.Done(): - return nil - case <-networkOK: + if err := networkOK.wait(ctx); err != nil { + return xerrors.Errorf("no network: %w", err) } return a.runCoordinator(ctx, conn, a.network) }, @@ -842,10 +838,8 @@ func (a *agent) run() (retErr error) { connMan.start("derp map subscriber", gracefulShutdownBehaviorStop, func(ctx context.Context, conn drpc.Conn) error { - select { - case <-ctx.Done(): - return nil - case <-networkOK: + if err := networkOK.wait(ctx); err != nil { + return xerrors.Errorf("no network: %w", err) } return a.runDERPMapSubscriber(ctx, conn, a.network) }) @@ -853,10 +847,8 @@ func (a *agent) run() (retErr error) { connMan.start("fetch service banner loop", gracefulShutdownBehaviorStop, a.fetchServiceBannerLoop) connMan.start("stats report loop", gracefulShutdownBehaviorStop, func(ctx context.Context, conn drpc.Conn) error { - select { - case <-ctx.Done(): - return nil - case <-networkOK: + if err := networkOK.wait(ctx); err != nil { + return xerrors.Errorf("no network: %w", err) } return a.statsReporter.reportLoop(ctx, proto.NewDRPCAgentClient(conn)) }) @@ -865,8 +857,17 @@ func (a *agent) run() (retErr error) { } // handleManifest returns a function that fetches and processes the manifest -func (a *agent) handleManifest(manifestOK chan<- struct{}) func(ctx context.Context, conn drpc.Conn) error { +func (a *agent) handleManifest(manifestOK *checkpoint) func(ctx context.Context, conn drpc.Conn) error { return func(ctx context.Context, conn drpc.Conn) error { + var ( + sentResult = false + err error + ) + defer func() { + if !sentResult { + manifestOK.complete(err) + } + }() aAPI := proto.NewDRPCAgentClient(conn) mp, err := aAPI.GetManifest(ctx, &proto.GetManifestRequest{}) if err != nil { @@ -907,7 +908,8 @@ func (a *agent) handleManifest(manifestOK chan<- struct{}) func(ctx context.Cont } oldManifest := a.manifest.Swap(&manifest) - close(manifestOK) + manifestOK.complete(nil) + sentResult = true // The startup script should only execute on the first run! if oldManifest == nil { @@ -968,14 +970,15 @@ func (a *agent) handleManifest(manifestOK chan<- struct{}) func(ctx context.Cont // createOrUpdateNetwork waits for the manifest to be set using manifestOK, then creates or updates // the tailnet using the information in the manifest -func (a *agent) createOrUpdateNetwork(manifestOK <-chan struct{}, networkOK chan<- struct{}) func(context.Context, drpc.Conn) error { - return func(ctx context.Context, _ drpc.Conn) error { - select { - case <-ctx.Done(): - return nil - case <-manifestOK: +func (a *agent) createOrUpdateNetwork(manifestOK, networkOK *checkpoint) func(context.Context, drpc.Conn) error { + return func(ctx context.Context, _ drpc.Conn) (retErr error) { + if err := manifestOK.wait(ctx); err != nil { + return xerrors.Errorf("no manifest: %w", err) } var err error + defer func() { + networkOK.complete(retErr) + }() manifest := a.manifest.Load() a.closeMutex.Lock() network := a.network @@ -1011,7 +1014,6 @@ func (a *agent) createOrUpdateNetwork(manifestOK <-chan struct{}, networkOK chan network.SetDERPForceWebSockets(manifest.DERPForceWebSockets) network.SetBlockEndpoints(manifest.DisableDirectConnections) } - close(networkOK) return nil } } diff --git a/agent/checkpoint.go b/agent/checkpoint.go new file mode 100644 index 0000000000000..3f6c7b2c6d299 --- /dev/null +++ b/agent/checkpoint.go @@ -0,0 +1,51 @@ +package agent + +import ( + "context" + "runtime" + "sync" + + "cdr.dev/slog" +) + +// checkpoint allows a goroutine to communicate when it is OK to proceed beyond some async condition +// to other dependent goroutines. +type checkpoint struct { + logger slog.Logger + mu sync.Mutex + called bool + done chan struct{} + err error +} + +// complete the checkpoint. Pass nil to indicate the checkpoint was ok. It is an error to call this +// more than once. +func (c *checkpoint) complete(err error) { + c.mu.Lock() + defer c.mu.Unlock() + if c.called { + b := make([]byte, 2048) + n := runtime.Stack(b, false) + c.logger.Critical(context.Background(), "checkpoint complete called more than once", slog.F("stacktrace", b[:n])) + return + } + c.called = true + c.err = err + close(c.done) +} + +func (c *checkpoint) wait(ctx context.Context) error { + select { + case <-ctx.Done(): + return ctx.Err() + case <-c.done: + return c.err + } +} + +func newCheckpoint(logger slog.Logger) *checkpoint { + return &checkpoint{ + logger: logger, + done: make(chan struct{}), + } +} diff --git a/agent/checkpoint_internal_test.go b/agent/checkpoint_internal_test.go new file mode 100644 index 0000000000000..17567a0e3c587 --- /dev/null +++ b/agent/checkpoint_internal_test.go @@ -0,0 +1,49 @@ +package agent + +import ( + "testing" + + "github.com/stretchr/testify/require" + "golang.org/x/xerrors" + + "cdr.dev/slog/sloggers/slogtest" + "github.com/coder/coder/v2/testutil" +) + +func TestCheckpoint_CompleteWait(t *testing.T) { + t.Parallel() + logger := slogtest.Make(t, nil) + ctx := testutil.Context(t, testutil.WaitShort) + uut := newCheckpoint(logger) + err := xerrors.New("test") + uut.complete(err) + got := uut.wait(ctx) + require.Equal(t, err, got) +} + +func TestCheckpoint_CompleteTwice(t *testing.T) { + t.Parallel() + logger := slogtest.Make(t, &slogtest.Options{IgnoreErrors: true}) + ctx := testutil.Context(t, testutil.WaitShort) + uut := newCheckpoint(logger) + err := xerrors.New("test") + uut.complete(err) + uut.complete(nil) // drops CRITICAL log + got := uut.wait(ctx) + require.Equal(t, err, got) +} + +func TestCheckpoint_WaitComplete(t *testing.T) { + t.Parallel() + logger := slogtest.Make(t, nil) + ctx := testutil.Context(t, testutil.WaitShort) + uut := newCheckpoint(logger) + err := xerrors.New("test") + errCh := make(chan error, 1) + go func() { + errCh <- uut.wait(ctx) + }() + uut.complete(err) + got := testutil.RequireRecvCtx(ctx, t, errCh) + require.Equal(t, err, got) +} From e76b5950527aa7beeaffc8a87a2a83f1c078cd46 Mon Sep 17 00:00:00 2001 From: Spike Curtis Date: Mon, 6 May 2024 15:00:34 +0400 Subject: [PATCH 011/149] fix: use a native websocket.NetConn for agent RPC client (#13142) One cause of #13139 is a peculiar failure mode of `WebsocketNetConn` which causes it to return `context.Canceled` in some circumstances when the underlying websocket fails. We have special processing for that error in the `agent.run()` routine, which is erroneously being triggered. Since we don't actually need the returned context from `WebsocketNetConn`, we can simplify and just use the netConn from the `websocket` library directly. --- codersdk/agentsdk/agentsdk.go | 22 ++++------------------ 1 file changed, 4 insertions(+), 18 deletions(-) diff --git a/codersdk/agentsdk/agentsdk.go b/codersdk/agentsdk/agentsdk.go index 75bec0047eb5b..5dcccca09e350 100644 --- a/codersdk/agentsdk/agentsdk.go +++ b/codersdk/agentsdk/agentsdk.go @@ -5,7 +5,6 @@ import ( "encoding/json" "fmt" "io" - "net" "net/http" "net/http/cookiejar" "net/url" @@ -206,14 +205,11 @@ func (c *Client) ConnectRPC(ctx context.Context) (drpc.Conn, error) { return nil, codersdk.ReadBodyAsError(res) } - _, wsNetConn := codersdk.WebsocketNetConn(ctx, conn, websocket.MessageBinary) + // Set the read limit to 4 MiB -- about the limit for protobufs. This needs to be larger than + // the default because some of our protocols can include large messages like startup scripts. + conn.SetReadLimit(1 << 22) + netConn := websocket.NetConn(ctx, conn, websocket.MessageBinary) - netConn := &closeNetConn{ - Conn: wsNetConn, - closeFunc: func() { - _ = conn.Close(websocket.StatusGoingAway, "ConnectRPC closed") - }, - } config := yamux.DefaultConfig() config.LogOutput = nil config.Logger = slog.Stdlib(ctx, c.SDK.Logger(), slog.LevelInfo) @@ -618,13 +614,3 @@ func LogsNotifyChannel(agentID uuid.UUID) string { type LogsNotifyMessage struct { CreatedAfter int64 `json:"created_after"` } - -type closeNetConn struct { - net.Conn - closeFunc func() -} - -func (c *closeNetConn) Close() error { - c.closeFunc() - return c.Conn.Close() -} From 619ec927e9b7c10cb41e5e0d910c7e64bd9aeff9 Mon Sep 17 00:00:00 2001 From: Mathias Fredriksson Date: Mon, 6 May 2024 14:14:38 +0300 Subject: [PATCH 012/149] test(coderd/database): fix DST issue in dbpurge test (#13170) Fixes #13165 --- coderd/database/dbpurge/dbpurge_test.go | 40 +++++++++++++++---------- 1 file changed, 25 insertions(+), 15 deletions(-) diff --git a/coderd/database/dbpurge/dbpurge_test.go b/coderd/database/dbpurge/dbpurge_test.go index 1a90cb9da750d..29f8dd9b80999 100644 --- a/coderd/database/dbpurge/dbpurge_test.go +++ b/coderd/database/dbpurge/dbpurge_test.go @@ -1,8 +1,12 @@ package dbpurge_test import ( + "bufio" + "bytes" "context" "database/sql" + "encoding/json" + "fmt" "testing" "time" @@ -41,9 +45,6 @@ func TestPurge(t *testing.T) { func TestDeleteOldWorkspaceAgentStats(t *testing.T) { t.Parallel() - // https://github.com/coder/coder/issues/13165 - t.Skip() - db, _ := dbtestutil.NewDB(t) logger := slogtest.Make(t, &slogtest.Options{IgnoreErrors: true}).Leveled(slog.LevelDebug) @@ -53,21 +54,27 @@ func TestDeleteOldWorkspaceAgentStats(t *testing.T) { if t.Failed() { t.Logf("Test failed, printing rows...") ctx := testutil.Context(t, testutil.WaitShort) + buf := &bytes.Buffer{} + enc := json.NewEncoder(buf) + enc.SetIndent("", "\t") wasRows, err := db.GetWorkspaceAgentStats(ctx, now.AddDate(0, -7, 0)) if err == nil { - for _, row := range wasRows { - t.Logf("workspace agent stat: %v", row) - } + _, _ = fmt.Fprintf(buf, "workspace agent stats: ") + _ = enc.Encode(wasRows) } tusRows, err := db.GetTemplateUsageStats(context.Background(), database.GetTemplateUsageStatsParams{ StartTime: now.AddDate(0, -7, 0), EndTime: now, }) if err == nil { - for _, row := range tusRows { - t.Logf("template usage stat: %v", row) - } + _, _ = fmt.Fprintf(buf, "template usage stats: ") + _ = enc.Encode(tusRows) + } + s := bufio.NewScanner(buf) + for s.Scan() { + t.Log(s.Text()) } + _ = s.Err() } }() @@ -75,28 +82,31 @@ func TestDeleteOldWorkspaceAgentStats(t *testing.T) { defer cancel() // given + // Note: We use increments of 2 hours to ensure we avoid any DST + // conflicts, verifying DST behavior is beyond the scope of this + // test. // Let's use RxBytes to identify stat entries. - // Stat inserted 6 months + 1 hour ago, should be deleted. + // Stat inserted 6 months + 2 hour ago, should be deleted. first := dbgen.WorkspaceAgentStat(t, db, database.WorkspaceAgentStat{ - CreatedAt: now.AddDate(0, -6, 0).Add(-time.Hour), + CreatedAt: now.AddDate(0, -6, 0).Add(-2 * time.Hour), ConnectionCount: 1, ConnectionMedianLatencyMS: 1, RxBytes: 1111, SessionCountSSH: 1, }) - // Stat inserted 6 months - 1 hour ago, should not be deleted before rollup. + // Stat inserted 6 months - 2 hour ago, should not be deleted before rollup. second := dbgen.WorkspaceAgentStat(t, db, database.WorkspaceAgentStat{ - CreatedAt: now.AddDate(0, -6, 0).Add(time.Hour), + CreatedAt: now.AddDate(0, -6, 0).Add(2 * time.Hour), ConnectionCount: 1, ConnectionMedianLatencyMS: 1, RxBytes: 2222, SessionCountSSH: 1, }) - // Stat inserted 6 months - 1 day - 2 hour ago, should not be deleted at all. + // Stat inserted 6 months - 1 day - 4 hour ago, should not be deleted at all. third := dbgen.WorkspaceAgentStat(t, db, database.WorkspaceAgentStat{ - CreatedAt: now.AddDate(0, -6, 0).AddDate(0, 0, 1).Add(2 * time.Hour), + CreatedAt: now.AddDate(0, -6, 0).AddDate(0, 0, 1).Add(4 * time.Hour), ConnectionCount: 1, ConnectionMedianLatencyMS: 1, RxBytes: 3333, From deee9492e3375672ffe88418f5778da297192d29 Mon Sep 17 00:00:00 2001 From: Spike Curtis Date: Mon, 6 May 2024 16:48:19 +0400 Subject: [PATCH 013/149] Revert "fix: install openrc service on alpine (#12294) (#12870)" (#13178) This reverts commit b20c63c1852d1abd597c06d8a0e1d6a87f29ca88. --- scripts/linux-pkg/coder-openrc | 38 ------------------ .../linux-pkg/coder-workspace-proxy-openrc | 39 ------------------- scripts/linux-pkg/nfpm-alpine.yaml | 29 -------------- scripts/package.sh | 9 +---- 4 files changed, 1 insertion(+), 114 deletions(-) delete mode 100755 scripts/linux-pkg/coder-openrc delete mode 100755 scripts/linux-pkg/coder-workspace-proxy-openrc delete mode 100644 scripts/linux-pkg/nfpm-alpine.yaml diff --git a/scripts/linux-pkg/coder-openrc b/scripts/linux-pkg/coder-openrc deleted file mode 100755 index d7b5800f05c44..0000000000000 --- a/scripts/linux-pkg/coder-openrc +++ /dev/null @@ -1,38 +0,0 @@ -#!/sbin/openrc-run -name=coder -description="Coder - Self-hosted developer workspaces on your infra" -document="https://coder.com/docs/coder-oss" - -depend() { - need net - after net-online - use dns logger -} - -checkpath --directory --owner coder:coder --mode 0700 /var/cache/coder - -start_pre() { - if [ ! -f /etc/coder.d/coder.env ]; then - eerror "/etc/coder.d/coder.env file does not exist" - return 1 - fi - # Read and export environment variables ignoring comment lines and blank lines - while IFS= read -r line; do - # Skip blank or comment lines - if [ -z "$line" ] || [[ "$line" =~ ^# ]]; then - continue - fi - export "$line" - done < /etc/coder.d/coder.env -} - -command="/usr/bin/coder" -command_args="server" -command_user="coder:coder" -command_background="yes" -pidfile="/run/coder.pid" - -restart="always" -restart_delay="5" - -stop_timeout="90" diff --git a/scripts/linux-pkg/coder-workspace-proxy-openrc b/scripts/linux-pkg/coder-workspace-proxy-openrc deleted file mode 100755 index 867812f4bd66e..0000000000000 --- a/scripts/linux-pkg/coder-workspace-proxy-openrc +++ /dev/null @@ -1,39 +0,0 @@ -#!/sbin/openrc-run -name=coder-workspace-proxy -description="Coder - external workspace proxy server" -document="https://coder.com/docs/coder-oss" - -depend() { - need net - after net-online - use dns logger -} - -checkpath --directory --owner coder:coder --mode 0700 /var/cache/coder - -start_pre() { - if [ ! -f /etc/coder.d/coder-workspace-proxy.env ]; then - eerror "/etc/coder.d/coder-workspace-proxy.env file does not exist" - return 1 - fi - - # Read and export environment variables ignoring comment lines and blank lines - while IFS= read -r line; do - # Skip blank or comment lines - if [ -z "$line" ] || [[ "$line" =~ ^# ]]; then - continue - fi - export "$line" - done < /etc/coder.d/coder-workspace-proxy.env -} - -command="/usr/bin/coder" -command_args="workspace-proxy server" -command_user="coder:coder" -command_background="yes" -pidfile="/run/coder-workspace-proxy.pid" - -restart="always" -restart_delay="5" - -stop_timeout="90" diff --git a/scripts/linux-pkg/nfpm-alpine.yaml b/scripts/linux-pkg/nfpm-alpine.yaml deleted file mode 100644 index ab174a6c873bd..0000000000000 --- a/scripts/linux-pkg/nfpm-alpine.yaml +++ /dev/null @@ -1,29 +0,0 @@ -name: coder -platform: linux -arch: "${GOARCH}" -version: "${CODER_VERSION}" -version_schema: semver -release: 1 - -vendor: Coder -homepage: https://coder.com -maintainer: Coder -description: | - Provision development environments with infrastructure with code -license: AGPL-3.0 -suggests: - - postgresql - -scripts: - preinstall: preinstall.sh - -contents: - - src: coder - dst: /usr/bin/coder - - src: coder.env - dst: /etc/coder.d/coder.env - type: "config|noreplace" - - src: coder-workspace-proxy-openrc - dst: /etc/init.d/coder-workspace-proxy - - src: coder-openrc - dst: /etc/init.d/coder diff --git a/scripts/package.sh b/scripts/package.sh index eb62544da9a6a..86e43a2613c83 100755 --- a/scripts/package.sh +++ b/scripts/package.sh @@ -90,16 +90,9 @@ ln "$(realpath scripts/linux-pkg/nfpm.yaml)" "$temp_dir/" ln "$(realpath scripts/linux-pkg/nfpm-alpine.yaml)" "$temp_dir/" ln "$(realpath scripts/linux-pkg/preinstall.sh)" "$temp_dir/" -nfpm_config_file="nfpm.yaml" - -# Use nfpm-alpine.yaml when building for Alpine (OpenRC). -if [[ "$format" == "apk" ]]; then - nfpm_config_file="nfpm-alpine.yaml" -fi - pushd "$temp_dir" GOARCH="$arch" CODER_VERSION="$version" nfpm package \ - -f "$nfpm_config_file" \ + -f nfpm.yaml \ -p "$format" \ -t "$output_path" \ 1>&2 From dfd27f559ec2ec15e5b49a43aaf0e7792e430984 Mon Sep 17 00:00:00 2001 From: Spike Curtis Date: Mon, 6 May 2024 17:13:24 +0400 Subject: [PATCH 014/149] Revert "chore: fix build ci (#13164)" (#13180) This reverts commit 886a97b4259d1028a29c7b77a3910e9c457b2be6. --- scripts/package.sh | 1 - 1 file changed, 1 deletion(-) diff --git a/scripts/package.sh b/scripts/package.sh index 86e43a2613c83..8afbf5d608ea9 100755 --- a/scripts/package.sh +++ b/scripts/package.sh @@ -87,7 +87,6 @@ ln "$(realpath coder.env)" "$temp_dir/" ln "$(realpath scripts/linux-pkg/coder-workspace-proxy.service)" "$temp_dir/" ln "$(realpath scripts/linux-pkg/coder.service)" "$temp_dir/" ln "$(realpath scripts/linux-pkg/nfpm.yaml)" "$temp_dir/" -ln "$(realpath scripts/linux-pkg/nfpm-alpine.yaml)" "$temp_dir/" ln "$(realpath scripts/linux-pkg/preinstall.sh)" "$temp_dir/" pushd "$temp_dir" From e7c87a806b60156fc03233fad6973aa87fad1886 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 6 May 2024 22:43:17 +0300 Subject: [PATCH 015/149] ci: bump the github-actions group with 2 updates (#13177) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 2314763cae0e2..476872af20d55 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -142,7 +142,7 @@ jobs: # Check for any typos - name: Check for typos - uses: crate-ci/typos@v1.20.10 + uses: crate-ci/typos@v1.21.0 with: config: .github/workflows/typos.toml @@ -861,7 +861,7 @@ jobs: - name: "Dependency Review" id: review # TODO: Replace this with the latest release once https://github.com/actions/dependency-review-action/pull/761 is merged. - uses: actions/dependency-review-action@49fbbe0acb033b7824f26d00b005d7d598d76301 + uses: actions/dependency-review-action@82ab8f69c78827a746628706b5d2c3f87231fd4c with: allow-licenses: Apache-2.0, BSD-2-Clause, BSD-3-Clause, CC0-1.0, ISC, MIT, MIT-0, MPL-2.0 allow-dependencies-licenses: "pkg:golang/github.com/pelletier/go-toml/v2" From 05facc971be1b19518508ee40839987ca417a628 Mon Sep 17 00:00:00 2001 From: Muhammad Atif Ali Date: Mon, 6 May 2024 23:06:21 +0300 Subject: [PATCH 016/149] ci: sync terraform version (#13187) --- .github/actions/setup-tf/action.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/setup-tf/action.yaml b/.github/actions/setup-tf/action.yaml index cca5d1b2e0eae..576b7d7738287 100644 --- a/.github/actions/setup-tf/action.yaml +++ b/.github/actions/setup-tf/action.yaml @@ -7,5 +7,5 @@ runs: - name: Install Terraform uses: hashicorp/setup-terraform@v3 with: - terraform_version: 1.5.7 + terraform_version: 1.6.6 terraform_wrapper: false From 3e3118794f1e07eeff49665c83a80bd2f23debe9 Mon Sep 17 00:00:00 2001 From: Kyle Carberry Date: Mon, 6 May 2024 18:21:20 -0400 Subject: [PATCH 017/149] chore: add build targets to nix flake (#13186) * chore: add build targets to nix flake Enables `nix build github:coder/coder#main`! * Fix all packages * Add back pnpm * Update flake.nix Co-authored-by: Asher * Remove yarn * fmt --------- Co-authored-by: Asher --- .github/workflows/ci.yaml | 23 ++++++++++ flake.lock | 71 ++++++++++++++++++++++++++++- flake.nix | 94 ++++++++++++++++++++++++++++++--------- scripts/lib.sh | 3 ++ scripts/update-flake.sh | 16 +++++++ site/package.json | 2 +- 6 files changed, 185 insertions(+), 24 deletions(-) create mode 100755 scripts/update-flake.sh diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 476872af20d55..2ce41c5088a04 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -37,6 +37,7 @@ jobs: k8s: ${{ steps.filter.outputs.k8s }} ci: ${{ steps.filter.outputs.ci }} db: ${{ steps.filter.outputs.db }} + gomod: ${{ steps.filter.outputs.gomod }} offlinedocs-only: ${{ steps.filter.outputs.offlinedocs_count == steps.filter.outputs.all_count }} offlinedocs: ${{ steps.filter.outputs.offlinedocs }} steps: @@ -90,6 +91,9 @@ jobs: - "scaletest/**" - "tailnet/**" - "testutil/**" + gomod: + - "go.mod" + - "go.sum" ts: - "site/**" - "Makefile" @@ -108,6 +112,25 @@ jobs: run: | echo "${{ toJSON(steps.filter )}}" + update-flake: + needs: changes + if: needs.changes.outputs.gomod == 'true' + runs-on: ${{ github.repository_owner == 'coder' && 'buildjet-8vcpu-ubuntu-2204' || 'ubuntu-latest' }} + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 1 + + - name: Setup Go + uses: ./.github/actions/setup-go + + - name: Update Nix Flake SRI Hash + run: ./scripts/update-flake.sh + + - name: Ensure No Changes + run: git diff --exit-code + lint: needs: changes if: needs.changes.outputs.offlinedocs-only == 'false' || needs.changes.outputs.ci == 'true' || github.ref == 'refs/heads/main' diff --git a/flake.lock b/flake.lock index 2bbf4252756b6..8a460beeb9782 100644 --- a/flake.lock +++ b/flake.lock @@ -56,6 +56,24 @@ "type": "github" } }, + "flake-utils_3": { + "inputs": { + "systems": "systems_3" + }, + "locked": { + "lastModified": 1710146030, + "narHash": "sha256-SZ5L6eA7HJ/nmkzGG7/ISclqe6oZdOZTNoesiInkXPQ=", + "owner": "numtide", + "repo": "flake-utils", + "rev": "b1d9ab70662946ef0850d488da1c9019f3a9752a", + "type": "github" + }, + "original": { + "owner": "numtide", + "repo": "flake-utils", + "type": "github" + } + }, "nixpkgs": { "locked": { "lastModified": 1681823821, @@ -87,11 +105,47 @@ "type": "github" } }, + "nixpkgs_3": { + "locked": { + "lastModified": 1714906307, + "narHash": "sha256-UlRZtrCnhPFSJlDQE7M0eyhgvuuHBTe1eJ9N9AQlJQ0=", + "owner": "nixos", + "repo": "nixpkgs", + "rev": "25865a40d14b3f9cf19f19b924e2ab4069b09588", + "type": "github" + }, + "original": { + "owner": "nixos", + "ref": "nixos-unstable", + "repo": "nixpkgs", + "type": "github" + } + }, + "pnpm2nix": { + "inputs": { + "flake-utils": "flake-utils_3", + "nixpkgs": "nixpkgs_3" + }, + "locked": { + "lastModified": 1706694632, + "narHash": "sha256-ytyTwNPiUR8aq74QlxFI+Wv3MyvXz5POO1xZxQIoi0c=", + "owner": "nzbr", + "repo": "pnpm2nix-nzbr", + "rev": "0366b7344171accc2522525710e52a8abbf03579", + "type": "github" + }, + "original": { + "owner": "nzbr", + "repo": "pnpm2nix-nzbr", + "type": "github" + } + }, "root": { "inputs": { "drpc": "drpc", "flake-utils": "flake-utils_2", - "nixpkgs": "nixpkgs_2" + "nixpkgs": "nixpkgs_2", + "pnpm2nix": "pnpm2nix" } }, "systems": { @@ -123,6 +177,21 @@ "repo": "default", "type": "github" } + }, + "systems_3": { + "locked": { + "lastModified": 1681028828, + "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", + "owner": "nix-systems", + "repo": "default", + "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", + "type": "github" + }, + "original": { + "owner": "nix-systems", + "repo": "default", + "type": "github" + } } }, "root": "root", diff --git a/flake.nix b/flake.nix index c0407c9e18063..510a2ea8dad53 100644 --- a/flake.nix +++ b/flake.nix @@ -4,25 +4,29 @@ inputs = { nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable"; flake-utils.url = "github:numtide/flake-utils"; + pnpm2nix.url = "github:nzbr/pnpm2nix-nzbr"; drpc.url = "github:storj/drpc/v0.0.33"; }; - outputs = { self, nixpkgs, flake-utils, drpc }: + outputs = { self, nixpkgs, flake-utils, drpc, pnpm2nix }: flake-utils.lib.eachDefaultSystem (system: let # Workaround for: terraform has an unfree license (‘bsl11’), refusing to evaluate. pkgs = import nixpkgs { inherit system; config.allowUnfree = true; }; - formatter = pkgs.nixpkgs-fmt; nodejs = pkgs.nodejs-18_x; - yarn = pkgs.yarn.override { inherit nodejs; }; # Check in https://search.nixos.org/packages to find new packages. # Use `nix --extra-experimental-features nix-command --extra-experimental-features flakes flake update` # to update the lock file if packages are out-of-date. # From https://nixos.wiki/wiki/Google_Cloud_SDK - gdk = pkgs.google-cloud-sdk.withExtraComponents ([pkgs.google-cloud-sdk.components.gke-gcloud-auth-plugin]); + gdk = pkgs.google-cloud-sdk.withExtraComponents ([ pkgs.google-cloud-sdk.components.gke-gcloud-auth-plugin ]); + # The minimal set of packages to build Coder. devShellPackages = with pkgs; [ + # google-chrome is not available on OSX + (if pkgs.stdenv.hostPlatform.isDarwin then null else google-chrome) + # strace is not available on OSX + (if pkgs.stdenv.hostPlatform.isDarwin then null else strace) bat cairo curl @@ -31,15 +35,13 @@ gcc gdk getopt - git gh + git gnumake gnused go_1_21 go-migrate golangci-lint - # google-chrome is not available on OSX - (if pkgs.stdenv.hostPlatform.isDarwin then null else google-chrome) gopls gotestsum jq @@ -47,8 +49,6 @@ kubectx kubernetes-helm less - # Needed for many LD system libs! - util-linux mockgen nfpm nodejs @@ -67,34 +67,84 @@ shellcheck shfmt sqlc - # strace is not available on OSX - (if pkgs.stdenv.hostPlatform.isDarwin then null else strace) terraform typos + # Needed for many LD system libs! + util-linux vim wget - yarn yq-go zip zsh zstd ]; - allPackages = pkgs.buildEnv { - name = "all-packages"; - paths = devShellPackages; + # buildSite packages the site directory. + buildSite = pnpm2nix.packages.${system}.mkPnpmPackage { + src = ./site/.; + # Required for the `canvas` package! + extraBuildInputs = with pkgs; [ pkgs.cairo pkgs.pango pkgs.pixman ]; + installInPlace = true; + distDir = "out"; }; + + version = "v0.0.0-nix-${self.shortRev or self.dirtyShortRev}"; + + # To make faster subsequent builds, you could extract the `.zst` + # slim bundle into it's own derivation. + buildFat = osArch: + pkgs.buildGo121Module { + name = "coder-${osArch}"; + # Updated with ./scripts/update-flake.nix`. + # This should be updated whenever go.mod changes! + vendorHash = "sha256-pTRr85MtdlsI0iYGAwLAQ3QvtrDR8rDOynYx8FDaRy0="; + proxyVendor = true; + src = ./.; + nativeBuildInputs = with pkgs; [ getopt openssl zstd ]; + preBuild = '' + # Replaces /usr/bin/env with an absolute path to the interpreter. + patchShebangs ./scripts + ''; + buildPhase = '' + runHook preBuild + + # Unpack the site contents. + mkdir -p ./site/out + cp -r ${buildSite.out}/* ./site/out + + # Build and copy the binary! + export CODER_FORCE_VERSION=${version} + make -j build/coder_${osArch} + ''; + installPhase = '' + mkdir -p $out/bin + cp -r ./build/coder_${osArch} $out/bin/coder + ''; + }; in { - defaultPackage = formatter; # or replace it with your desired default package. devShell = pkgs.mkShell { - buildInputs = devShellPackages; - shellHook = '' - export PLAYWRIGHT_BROWSERS_PATH=${pkgs.playwright-driver.browsers} - export PLAYWRIGHT_SKIP_VALIDATE_HOST_REQUIREMENTS=true - ''; + buildInputs = devShellPackages; + shellHook = '' + export PLAYWRIGHT_BROWSERS_PATH=${pkgs.playwright-driver.browsers} + export PLAYWRIGHT_SKIP_VALIDATE_HOST_REQUIREMENTS=true + ''; + }; + packages = { + all = pkgs.buildEnv { + name = "all-packages"; + paths = devShellPackages; + }; + site = buildSite; + + # Copying `OS_ARCHES` from the Makefile. + linux_amd64 = buildFat "linux_amd64"; + linux_arm64 = buildFat "linux_arm64"; + darwin_amd64 = buildFat "darwin_amd64"; + darwin_arm64 = buildFat "darwin_arm64"; + windows_amd64 = buildFat "windows_amd64.exe"; + windows_arm64 = buildFat "windows_arm64.exe"; }; - packages.all = allPackages; } ); } diff --git a/scripts/lib.sh b/scripts/lib.sh index 2839de2dbf9fa..78ec22d503fbf 100644 --- a/scripts/lib.sh +++ b/scripts/lib.sh @@ -43,6 +43,9 @@ SCRIPT="${BASH_SOURCE[1]:-${BASH_SOURCE[0]}}" SCRIPT_DIR="$(realpath "$(dirname "$SCRIPT")")" function project_root { + # Nix sets $src in derivations! + [[ -n "${src:-}" ]] && echo "$src" && return + # Try to use `git rev-parse --show-toplevel` to find the project root. # If this directory is not a git repository, this command will fail. git rev-parse --show-toplevel 2>/dev/null && return diff --git a/scripts/update-flake.sh b/scripts/update-flake.sh new file mode 100755 index 0000000000000..4094f20032611 --- /dev/null +++ b/scripts/update-flake.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env bash +# Updates SRI hashes for flake.nix. + +set -eu + +cd "$(dirname "${BASH_SOURCE[0]}")/.." + +OUT=$(mktemp -d -t nar-hash-XXXXXX) + +echo "Downloading Go modules..." +GOPATH="$OUT" go mod download +echo "Calculating SRI hash..." +HASH=$(go run tailscale.com/cmd/nardump --sri "$OUT/pkg/mod/cache/download") +sudo rm -rf "$OUT" + +sed -i "s/\(vendorHash = \"\)[^\"]*/\1${HASH}/" ./flake.nix diff --git a/site/package.json b/site/package.json index 24ba4d5262902..a5c5fee146578 100644 --- a/site/package.json +++ b/site/package.json @@ -184,6 +184,6 @@ }, "engines": { "npm": ">=9.0.0 <10.0.0", - "node": ">=18.0.0 <19.0.0" + "node": ">=18.0.0 <21.0.0" } } From 6f5c183c809fcf535b23859da9d72d09df56abcb Mon Sep 17 00:00:00 2001 From: Idleite <102099632+Idleite@users.noreply.github.com> Date: Mon, 6 May 2024 18:28:04 -0400 Subject: [PATCH 018/149] docs: show the proper Redirect URI for Gitea (#13162) --- docs/admin/external-auth.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/admin/external-auth.md b/docs/admin/external-auth.md index bb905200532f1..9165c1c67604c 100644 --- a/docs/admin/external-auth.md +++ b/docs/admin/external-auth.md @@ -164,6 +164,9 @@ CODER_EXTERNAL_AUTH_0_CLIENT_SECRET=xxxxxxx CODER_EXTERNAL_AUTH_0_AUTH_URL="https://gitea.com/login/oauth/authorize" ``` +The Redirect URI for Gitea should be +https://coder.company.org/external-auth/gitea/callback + ### Self-managed git providers Custom authentication and token URLs should be used for self-managed Git From b56c9c438f74270425cd57899f9fcf23d2bbc06a Mon Sep 17 00:00:00 2001 From: Muhammad Atif Ali Date: Tue, 7 May 2024 01:40:18 +0300 Subject: [PATCH 019/149] ci: only send docs-check notifications on schedule (#13191) --- .github/workflows/weekly-docs.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/weekly-docs.yaml b/.github/workflows/weekly-docs.yaml index 64c2e9899df92..049b31b85155e 100644 --- a/.github/workflows/weekly-docs.yaml +++ b/.github/workflows/weekly-docs.yaml @@ -29,7 +29,7 @@ jobs: file-path: "./README.md" - name: Send Slack notification - if: failure() && github.event_name != 'workflow_dispatch' + if: failure() && github.event_name == 'schedule' run: | curl -X POST -H 'Content-type: application/json' -d '{"msg":"Broken links found in the documentation. Please check the logs at ${{ env.LOGS_URL }}"}' ${{ secrets.DOCS_LINK_SLACK_WEBHOOK }} echo "Sent Slack notification" From 5e8f97d8c38dd7caea1875852a0450a5f18946e9 Mon Sep 17 00:00:00 2001 From: Dean Sheather Date: Mon, 6 May 2024 20:37:01 -0700 Subject: [PATCH 020/149] chore: add DERP websocket integration tests (#13168) - `DERPForceWebSockets`: Test that DERP over WebSocket (as well as DERPForceWebSockets works). This does not test the actual DERP failure detection code and automatic fallback. - `DERPFallbackWebSockets`: Test that falling back to DERP over WebSocket works. Also: - Rearranges some test code and refactors `TestTopology.StartServer` to be `TestTopology.ServerOptions` and take a struct instead of a function Closes #13045 --- tailnet/test/integration/integration.go | 105 ++++++++++++------ tailnet/test/integration/integration_test.go | 107 ++++++++++++++----- tailnet/test/integration/network.go | 6 +- 3 files changed, 160 insertions(+), 58 deletions(-) diff --git a/tailnet/test/integration/integration.go b/tailnet/test/integration/integration.go index ff6552e2d8f49..fdf0d9a37fc6d 100644 --- a/tailnet/test/integration/integration.go +++ b/tailnet/test/integration/integration.go @@ -11,6 +11,7 @@ import ( "net/netip" "net/url" "strconv" + "strings" "sync/atomic" "testing" "time" @@ -39,8 +40,21 @@ var ( Client2ID = uuid.MustParse("00000000-0000-0000-0000-000000000002") ) -// StartServerBasic creates a coordinator and DERP server. -func StartServerBasic(t *testing.T, logger slog.Logger, listenAddr string) { +type ServerOptions struct { + // FailUpgradeDERP will make the DERP server fail to handle the initial DERP + // upgrade in a way that causes the client to fallback to + // DERP-over-WebSocket fallback automatically. + // Incompatible with DERPWebsocketOnly. + FailUpgradeDERP bool + // DERPWebsocketOnly will make the DERP server only accept WebSocket + // connections. If a DERP request is received that is not using WebSocket + // fallback, the test will fail. + // Incompatible with FailUpgradeDERP. + DERPWebsocketOnly bool +} + +//nolint:revive +func (o ServerOptions) Router(t *testing.T, logger slog.Logger) *chi.Mux { coord := tailnet.NewCoordinator(logger) var coordPtr atomic.Pointer[tailnet.Coordinator] coordPtr.Store(&coord) @@ -69,15 +83,38 @@ func StartServerBasic(t *testing.T, logger slog.Logger, listenAddr string) { tracing.StatusWriterMiddleware, httpmw.Logger(logger), ) + r.Route("/derp", func(r chi.Router) { r.Get("/", func(w http.ResponseWriter, r *http.Request) { logger.Info(r.Context(), "start derp request", slog.F("path", r.URL.Path), slog.F("remote_ip", r.RemoteAddr)) + + upgrade := strings.ToLower(r.Header.Get("Upgrade")) + if upgrade != "derp" && upgrade != "websocket" { + http.Error(w, "invalid DERP upgrade header", http.StatusBadRequest) + t.Errorf("invalid DERP upgrade header: %s", upgrade) + return + } + + if o.FailUpgradeDERP && upgrade == "derp" { + // 4xx status codes will cause the client to fallback to + // DERP-over-WebSocket. + http.Error(w, "test derp upgrade failure", http.StatusBadRequest) + return + } + if o.DERPWebsocketOnly && upgrade != "websocket" { + logger.Error(r.Context(), "non-websocket DERP request received", slog.F("path", r.URL.Path), slog.F("remote_ip", r.RemoteAddr)) + http.Error(w, "non-websocket DERP request received", http.StatusBadRequest) + t.Error("non-websocket DERP request received") + return + } + derpHandler.ServeHTTP(w, r) }) r.Get("/latency-check", func(w http.ResponseWriter, r *http.Request) { w.WriteHeader(http.StatusOK) }) }) + r.Get("/api/v2/workspaceagents/{id}/coordinate", func(w http.ResponseWriter, r *http.Request) { ctx := r.Context() idStr := chi.URLParamFromCtx(ctx, "id") @@ -116,28 +153,44 @@ func StartServerBasic(t *testing.T, logger slog.Logger, listenAddr string) { } }) - // We have a custom listen address. - srv := http.Server{ - Addr: listenAddr, - Handler: r, - ReadTimeout: 10 * time.Second, - } - serveDone := make(chan struct{}) - go func() { - defer close(serveDone) - err := srv.ListenAndServe() - if err != nil && !xerrors.Is(err, http.ErrServerClosed) { - t.Error("HTTP server error:", err) - } - }() - t.Cleanup(func() { - _ = srv.Close() - <-serveDone + return r +} + +// StartClientDERP creates a client connection to the server for coordination +// and creates a tailnet.Conn which will only use DERP to connect to the peer. +func StartClientDERP(t *testing.T, logger slog.Logger, serverURL *url.URL, myID, peerID uuid.UUID) *tailnet.Conn { + return startClientOptions(t, logger, serverURL, myID, peerID, &tailnet.Options{ + Addresses: []netip.Prefix{netip.PrefixFrom(tailnet.IPFromUUID(myID), 128)}, + DERPMap: basicDERPMap(t, serverURL), + BlockEndpoints: true, + Logger: logger, + DERPForceWebSockets: false, + // These tests don't have internet connection, so we need to force + // magicsock to do anything. + ForceNetworkUp: true, + }) +} + +// StartClientDERPWebSockets does the same thing as StartClientDERP but will +// only use DERP WebSocket fallback. +func StartClientDERPWebSockets(t *testing.T, logger slog.Logger, serverURL *url.URL, myID, peerID uuid.UUID) *tailnet.Conn { + return startClientOptions(t, logger, serverURL, myID, peerID, &tailnet.Options{ + Addresses: []netip.Prefix{netip.PrefixFrom(tailnet.IPFromUUID(myID), 128)}, + DERPMap: basicDERPMap(t, serverURL), + BlockEndpoints: true, + Logger: logger, + DERPForceWebSockets: true, + // These tests don't have internet connection, so we need to force + // magicsock to do anything. + ForceNetworkUp: true, }) } -// StartClientBasic creates a client connection to the server. -func StartClientBasic(t *testing.T, logger slog.Logger, serverURL *url.URL, myID uuid.UUID, peerID uuid.UUID) *tailnet.Conn { +type ClientStarter struct { + Options *tailnet.Options +} + +func startClientOptions(t *testing.T, logger slog.Logger, serverURL *url.URL, myID, peerID uuid.UUID, options *tailnet.Options) *tailnet.Conn { u, err := serverURL.Parse(fmt.Sprintf("/api/v2/workspaceagents/%s/coordinate", myID.String())) require.NoError(t, err) //nolint:bodyclose @@ -156,15 +209,7 @@ func StartClientBasic(t *testing.T, logger slog.Logger, serverURL *url.URL, myID coord, err := client.Coordinate(context.Background()) require.NoError(t, err) - conn, err := tailnet.NewConn(&tailnet.Options{ - Addresses: []netip.Prefix{netip.PrefixFrom(tailnet.IPFromUUID(myID), 128)}, - DERPMap: basicDERPMap(t, serverURL), - BlockEndpoints: true, - Logger: logger, - // These tests don't have internet connection, so we need to force - // magicsock to do anything. - ForceNetworkUp: true, - }) + conn, err := tailnet.NewConn(options) require.NoError(t, err) t.Cleanup(func() { _ = conn.Close() diff --git a/tailnet/test/integration/integration_test.go b/tailnet/test/integration/integration_test.go index 76b57fecae651..dcd64b9343846 100644 --- a/tailnet/test/integration/integration_test.go +++ b/tailnet/test/integration/integration_test.go @@ -20,6 +20,7 @@ import ( "github.com/google/uuid" "github.com/stretchr/testify/require" + "golang.org/x/xerrors" "cdr.dev/slog" "cdr.dev/slog/sloggers/slogtest" @@ -68,19 +69,48 @@ func TestMain(m *testing.M) { var topologies = []integration.TestTopology{ { + // Test that DERP over loopback works. Name: "BasicLoopbackDERP", SetupNetworking: integration.SetupNetworkingLoopback, - StartServer: integration.StartServerBasic, - StartClient: integration.StartClientBasic, + ServerOptions: integration.ServerOptions{}, + StartClient: integration.StartClientDERP, RunTests: integration.TestSuite, }, { + // Test that DERP over "easy" NAT works. The server, client 1 and client + // 2 are on different networks with a shared router, and the router + // masquerades the traffic. Name: "EasyNATDERP", SetupNetworking: integration.SetupNetworkingEasyNAT, - StartServer: integration.StartServerBasic, - StartClient: integration.StartClientBasic, + ServerOptions: integration.ServerOptions{}, + StartClient: integration.StartClientDERP, RunTests: integration.TestSuite, }, + { + // Test that DERP over WebSocket (as well as DERPForceWebSockets works). + // This does not test the actual DERP failure detection code and + // automatic fallback. + Name: "DERPForceWebSockets", + SetupNetworking: integration.SetupNetworkingEasyNAT, + ServerOptions: integration.ServerOptions{ + FailUpgradeDERP: false, + DERPWebsocketOnly: true, + }, + StartClient: integration.StartClientDERPWebSockets, + RunTests: integration.TestSuite, + }, + { + // Test that falling back to DERP over WebSocket works. + Name: "DERPFallbackWebSockets", + SetupNetworking: integration.SetupNetworkingEasyNAT, + ServerOptions: integration.ServerOptions{ + FailUpgradeDERP: true, + DERPWebsocketOnly: false, + }, + // Use a basic client that will try `Upgrade: derp` first. + StartClient: integration.StartClientDERP, + RunTests: integration.TestSuite, + }, } //nolint:paralleltest,tparallel @@ -101,19 +131,17 @@ func TestIntegration(t *testing.T) { networking := topo.SetupNetworking(t, log) // Fork the three child processes. - serverErrCh, closeServer := startServerSubprocess(t, topo.Name, networking) + closeServer := startServerSubprocess(t, topo.Name, networking) // client1 runs the tests. client1ErrCh, _ := startClientSubprocess(t, topo.Name, networking, 1) - client2ErrCh, closeClient2 := startClientSubprocess(t, topo.Name, networking, 2) + _, closeClient2 := startClientSubprocess(t, topo.Name, networking, 2) // Wait for client1 to exit. require.NoError(t, <-client1ErrCh, "client 1 exited") // Close client2 and the server. - closeClient2() - require.NoError(t, <-client2ErrCh, "client 2 exited") - closeServer() - require.NoError(t, <-serverErrCh, "server exited") + require.NoError(t, closeClient2(), "client 2 exited") + require.NoError(t, closeServer(), "server exited") }) } } @@ -138,15 +166,32 @@ func handleTestSubprocess(t *testing.T) { //nolint:parralleltest t.Run(testName, func(t *testing.T) { - log := slogtest.Make(t, nil).Leveled(slog.LevelDebug) + logger := slogtest.Make(t, nil).Leveled(slog.LevelDebug) switch *role { case "server": - log = log.Named("server") - topo.StartServer(t, log, *serverListenAddr) + logger = logger.Named("server") + + srv := http.Server{ + Addr: *serverListenAddr, + Handler: topo.ServerOptions.Router(t, logger), + ReadTimeout: 10 * time.Second, + } + serveDone := make(chan struct{}) + go func() { + defer close(serveDone) + err := srv.ListenAndServe() + if err != nil && !xerrors.Is(err, http.ErrServerClosed) { + t.Error("HTTP server error:", err) + } + }() + t.Cleanup(func() { + _ = srv.Close() + <-serveDone + }) // no exit case "client": - log = log.Named(*clientName) + logger = logger.Named(*clientName) serverURL, err := url.Parse(*clientServerURL) require.NoErrorf(t, err, "parse server url %q", *clientServerURL) myID, err := uuid.Parse(*clientMyID) @@ -156,7 +201,7 @@ func handleTestSubprocess(t *testing.T) { waitForServerAvailable(t, serverURL) - conn := topo.StartClient(t, log, serverURL, myID, peerID) + conn := topo.StartClient(t, logger, serverURL, myID, peerID) if *clientRunTests { // Wait for connectivity. @@ -165,7 +210,7 @@ func handleTestSubprocess(t *testing.T) { t.Fatalf("peer %v did not become reachable", peerIP) } - topo.RunTests(t, log, serverURL, myID, peerID, conn) + topo.RunTests(t, logger, serverURL, myID, peerID, conn) // then exit return } @@ -206,16 +251,17 @@ func waitForServerAvailable(t *testing.T, serverURL *url.URL) { t.Fatalf("server did not become available after %v", timeout) } -func startServerSubprocess(t *testing.T, topologyName string, networking integration.TestNetworking) (<-chan error, func()) { - return startSubprocess(t, "server", networking.ProcessServer.NetNS, []string{ +func startServerSubprocess(t *testing.T, topologyName string, networking integration.TestNetworking) func() error { + _, closeFn := startSubprocess(t, "server", networking.ProcessServer.NetNS, []string{ "--subprocess", "--test-name=" + topologyName, "--role=server", "--server-listen-addr=" + networking.ServerListenAddr, }) + return closeFn } -func startClientSubprocess(t *testing.T, topologyName string, networking integration.TestNetworking, clientNumber int) (<-chan error, func()) { +func startClientSubprocess(t *testing.T, topologyName string, networking integration.TestNetworking, clientNumber int) (<-chan error, func() error) { require.True(t, clientNumber == 1 || clientNumber == 2) var ( @@ -247,7 +293,13 @@ func startClientSubprocess(t *testing.T, topologyName string, networking integra return startSubprocess(t, clientName, netNS, flags) } -func startSubprocess(t *testing.T, processName string, netNS *os.File, flags []string) (<-chan error, func()) { +// startSubprocess starts a subprocess with the given flags and returns a +// channel that will receive the error when the subprocess exits. The returned +// function can be used to close the subprocess. +// +// Do not call close then wait on the channel. Use the returned value from the +// function instead in this case. +func startSubprocess(t *testing.T, processName string, netNS *os.File, flags []string) (<-chan error, func() error) { name := os.Args[0] // Always use verbose mode since it gets piped to the parent test anyways. args := append(os.Args[1:], append([]string{"-test.v=true"}, flags...)...) @@ -289,15 +341,15 @@ func startSubprocess(t *testing.T, processName string, netNS *os.File, flags []s close(waitErr) }() - closeFn := func() { + closeFn := func() error { _ = cmd.Process.Signal(syscall.SIGTERM) select { case <-time.After(5 * time.Second): _ = cmd.Process.Kill() - case <-waitErr: - return + case err := <-waitErr: + return err } - <-waitErr + return <-waitErr } t.Cleanup(func() { @@ -310,7 +362,7 @@ func startSubprocess(t *testing.T, processName string, netNS *os.File, flags []s default: } - closeFn() + _ = closeFn() }) return waitErr, closeFn @@ -338,6 +390,11 @@ func (w *testWriter) Write(p []byte) (n int, err error) { // then it's a test result line. We want to capture it and log it later. trimmed := strings.TrimSpace(s) if strings.HasPrefix(trimmed, "--- PASS") || strings.HasPrefix(trimmed, "--- FAIL") || trimmed == "PASS" || trimmed == "FAIL" { + // Also fail the test if we see a FAIL line. + if strings.Contains(trimmed, "FAIL") { + w.t.Errorf("subprocess logged test failure: %s: \t%s", w.name, s) + } + w.capturedLines = append(w.capturedLines, s) continue } diff --git a/tailnet/test/integration/network.go b/tailnet/test/integration/network.go index 604d7827cd71d..f36ac637455de 100644 --- a/tailnet/test/integration/network.go +++ b/tailnet/test/integration/network.go @@ -29,9 +29,9 @@ type TestTopology struct { // a network namespace shared for all tests. SetupNetworking func(t *testing.T, logger slog.Logger) TestNetworking - // StartServer gets called in the server subprocess. It's expected to start - // the coordinator server in the background and return. - StartServer func(t *testing.T, logger slog.Logger, listenAddr string) + // ServerOptions is the configuration for the server. It's passed to the + // server process. + ServerOptions ServerOptions // StartClient gets called in each client subprocess. It's expected to // create the tailnet.Conn and ensure connectivity to it's peer. StartClient func(t *testing.T, logger slog.Logger, serverURL *url.URL, myID uuid.UUID, peerID uuid.UUID) *tailnet.Conn From 72f2efe048bb40c72ec7025593c692f07c30536a Mon Sep 17 00:00:00 2001 From: Dean Sheather Date: Mon, 6 May 2024 23:07:57 -0700 Subject: [PATCH 021/149] chore: implement easy NAT direct integration test (#13169) --- tailnet/test/integration/integration.go | 37 ++++++++++++++++++++ tailnet/test/integration/integration_test.go | 10 ++++++ 2 files changed, 47 insertions(+) diff --git a/tailnet/test/integration/integration.go b/tailnet/test/integration/integration.go index fdf0d9a37fc6d..bc9c3493e38e8 100644 --- a/tailnet/test/integration/integration.go +++ b/tailnet/test/integration/integration.go @@ -32,6 +32,7 @@ import ( "github.com/coder/coder/v2/coderd/tracing" "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/tailnet" + "github.com/coder/coder/v2/testutil" ) // IDs used in tests. @@ -186,6 +187,42 @@ func StartClientDERPWebSockets(t *testing.T, logger slog.Logger, serverURL *url. }) } +// StartClientDirect does the same thing as StartClientDERP but disables +// BlockEndpoints (which enables Direct connections), and waits for a direct +// connection to be established between the two peers. +func StartClientDirect(t *testing.T, logger slog.Logger, serverURL *url.URL, myID, peerID uuid.UUID) *tailnet.Conn { + conn := startClientOptions(t, logger, serverURL, myID, peerID, &tailnet.Options{ + Addresses: []netip.Prefix{netip.PrefixFrom(tailnet.IPFromUUID(myID), 128)}, + DERPMap: basicDERPMap(t, serverURL), + BlockEndpoints: false, + Logger: logger, + DERPForceWebSockets: true, + // These tests don't have internet connection, so we need to force + // magicsock to do anything. + ForceNetworkUp: true, + }) + + // Wait for direct connection to be established. + peerIP := tailnet.IPFromUUID(peerID) + require.Eventually(t, func() bool { + t.Log("attempting ping to peer to judge direct connection") + ctx := testutil.Context(t, testutil.WaitShort) + _, p2p, pong, err := conn.Ping(ctx, peerIP) + if err != nil { + t.Logf("ping failed: %v", err) + return false + } + if !p2p { + t.Log("ping succeeded, but not direct yet") + return false + } + t.Logf("ping succeeded, direct connection established via %s", pong.Endpoint) + return true + }, testutil.WaitLong, testutil.IntervalMedium) + + return conn +} + type ClientStarter struct { Options *tailnet.Options } diff --git a/tailnet/test/integration/integration_test.go b/tailnet/test/integration/integration_test.go index dcd64b9343846..0b4cf38fb3a06 100644 --- a/tailnet/test/integration/integration_test.go +++ b/tailnet/test/integration/integration_test.go @@ -86,6 +86,16 @@ var topologies = []integration.TestTopology{ StartClient: integration.StartClientDERP, RunTests: integration.TestSuite, }, + { + // Test that direct over "easy" NAT works. This should use local + // endpoints to connect as routing is enabled between client 1 and + // client 2. + Name: "EasyNATDirect", + SetupNetworking: integration.SetupNetworkingEasyNAT, + ServerOptions: integration.ServerOptions{}, + StartClient: integration.StartClientDirect, + RunTests: integration.TestSuite, + }, { // Test that DERP over WebSocket (as well as DERPForceWebSockets works). // This does not test the actual DERP failure detection code and From 677be9aab2cef9673d12c7f857042208b2c3cc88 Mon Sep 17 00:00:00 2001 From: Dean Sheather Date: Mon, 6 May 2024 23:21:17 -0700 Subject: [PATCH 022/149] chore: add tailnet integration test CI job (#13181) --- .github/workflows/ci.yaml | 29 +++++++++++++++++++++++++++++ Makefile | 12 ++++++++++++ 2 files changed, 41 insertions(+) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 2ce41c5088a04..f958663167e0a 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -40,6 +40,7 @@ jobs: gomod: ${{ steps.filter.outputs.gomod }} offlinedocs-only: ${{ steps.filter.outputs.offlinedocs_count == steps.filter.outputs.all_count }} offlinedocs: ${{ steps.filter.outputs.offlinedocs }} + tailnet-integration: ${{ steps.filter.outputs.tailnet-integration }} steps: - name: Checkout uses: actions/checkout@v4 @@ -107,6 +108,10 @@ jobs: - ".github/workflows/ci.yaml" offlinedocs: - "offlinedocs/**" + tailnet-integration: + - "tailnet/**" + - "go.mod" + - "go.sum" - id: debug run: | @@ -384,6 +389,30 @@ jobs: with: api-key: ${{ secrets.DATADOG_API_KEY }} + # Tailnet integration tests only run when the `tailnet` directory or `go.sum` + # and `go.mod` are changed. These tests are to ensure we don't add regressions + # to tailnet, either due to our code or due to updating dependencies. + # + # These tests are skipped in the main go test jobs because they require root + # and mess with networking. + test-go-tailnet-integration: + runs-on: ${{ github.repository_owner == 'coder' && 'buildjet-8vcpu-ubuntu-2204' || 'ubuntu-latest' }} + needs: changes + # Unnecessary to run on main for now + if: needs.changes.outputs.tailnet-integration == 'true' || needs.changes.outputs.ci == 'true' + timeout-minutes: 20 + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 1 + + - name: Setup Go + uses: ./.github/actions/setup-go + + - name: Run Tests + run: make test-tailnet-integration + test-js: runs-on: ${{ github.repository_owner == 'coder' && 'buildjet-8vcpu-ubuntu-2204' || 'ubuntu-latest' }} needs: changes diff --git a/Makefile b/Makefile index dd31c10fff810..7a04072964ee5 100644 --- a/Makefile +++ b/Makefile @@ -827,6 +827,18 @@ test-race: gotestsum --junitfile="gotests.xml" -- -race -count=1 ./... .PHONY: test-race +test-tailnet-integration: + env \ + CODER_TAILNET_TESTS=true \ + CODER_MAGICSOCK_DEBUG_LOGGING=true \ + TS_DEBUG_NETCHECK=true \ + GOTRACEBACK=single \ + go test \ + -exec "sudo -E" \ + -timeout=5m \ + -count=1 \ + ./tailnet/test/integration + # Note: we used to add this to the test target, but it's not necessary and we can # achieve the desired result by specifying -count=1 in the go test invocation # instead. Keeping it here for convenience. From 421c0d124235a4b5042d9e2e2419c397988123ff Mon Sep 17 00:00:00 2001 From: Colin Adler Date: Tue, 7 May 2024 03:17:38 -0500 Subject: [PATCH 023/149] chore: add nginx topology to tailnet tests (#13188) --- .github/workflows/ci.yaml | 4 + codersdk/organizations.go | 2 +- provisionerd/provisionerd_test.go | 2 +- tailnet/test/integration/integration.go | 232 ++++++++++++++++++- tailnet/test/integration/integration_test.go | 157 ++----------- tailnet/test/integration/network.go | 23 -- 6 files changed, 253 insertions(+), 167 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index f958663167e0a..f35be17942aa0 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -410,6 +410,10 @@ jobs: - name: Setup Go uses: ./.github/actions/setup-go + # Used by some integration tests. + - name: Install Nginx + run: sudo apt-get update && sudo apt-get install -y nginx + - name: Run Tests run: make test-tailnet-integration diff --git a/codersdk/organizations.go b/codersdk/organizations.go index f887d5ea4de5a..441f4774f2441 100644 --- a/codersdk/organizations.go +++ b/codersdk/organizations.go @@ -34,7 +34,7 @@ func ProvisionerTypeValid[T ProvisionerType | string](pt T) error { case string(ProvisionerTypeEcho), string(ProvisionerTypeTerraform): return nil default: - return fmt.Errorf("provisioner type '%s' is not supported", pt) + return xerrors.Errorf("provisioner type '%s' is not supported", pt) } } diff --git a/provisionerd/provisionerd_test.go b/provisionerd/provisionerd_test.go index bca072707f491..b3cf08fc1ca5a 100644 --- a/provisionerd/provisionerd_test.go +++ b/provisionerd/provisionerd_test.go @@ -611,7 +611,7 @@ func TestProvisionerd(t *testing.T) { server := createProvisionerd(t, func(ctx context.Context) (proto.DRPCProvisionerDaemonClient, error) { // This is the dial out to Coderd, which in this unit test will always fail. connectAttemptedClose.Do(func() { close(connectAttempted) }) - return nil, fmt.Errorf("client connection always fails") + return nil, xerrors.New("client connection always fails") }, provisionerd.LocalProvisioners{ "someprovisioner": createProvisionerClient(t, done, provisionerTestServer{}), }) diff --git a/tailnet/test/integration/integration.go b/tailnet/test/integration/integration.go index bc9c3493e38e8..db0f1500e491a 100644 --- a/tailnet/test/integration/integration.go +++ b/tailnet/test/integration/integration.go @@ -7,12 +7,18 @@ import ( "context" "fmt" "io" + "net" "net/http" "net/netip" "net/url" + "os" + "os/exec" + "path/filepath" "strconv" "strings" + "sync" "sync/atomic" + "syscall" "testing" "time" @@ -41,7 +47,34 @@ var ( Client2ID = uuid.MustParse("00000000-0000-0000-0000-000000000002") ) -type ServerOptions struct { +type TestTopology struct { + Name string + // SetupNetworking creates interfaces and network namespaces for the test. + // The most simple implementation is NetworkSetupDefault, which only creates + // a network namespace shared for all tests. + SetupNetworking func(t *testing.T, logger slog.Logger) TestNetworking + + // Server is the server starter for the test. It is executed in the server + // subprocess. + Server ServerStarter + // StartClient gets called in each client subprocess. It's expected to + // create the tailnet.Conn and ensure connectivity to it's peer. + StartClient func(t *testing.T, logger slog.Logger, serverURL *url.URL, myID uuid.UUID, peerID uuid.UUID) *tailnet.Conn + + // RunTests is the main test function. It's called in each of the client + // subprocesses. If tests can only run once, they should check the client ID + // and return early if it's not the expected one. + RunTests func(t *testing.T, logger slog.Logger, serverURL *url.URL, myID uuid.UUID, peerID uuid.UUID, conn *tailnet.Conn) +} + +type ServerStarter interface { + // StartServer should start the server and return once it's listening. It + // should not block once it's listening. Cleanup should be handled by + // t.Cleanup. + StartServer(t *testing.T, logger slog.Logger, listenAddr string) +} + +type SimpleServerOptions struct { // FailUpgradeDERP will make the DERP server fail to handle the initial DERP // upgrade in a way that causes the client to fallback to // DERP-over-WebSocket fallback automatically. @@ -54,8 +87,10 @@ type ServerOptions struct { DERPWebsocketOnly bool } +var _ ServerStarter = SimpleServerOptions{} + //nolint:revive -func (o ServerOptions) Router(t *testing.T, logger slog.Logger) *chi.Mux { +func (o SimpleServerOptions) Router(t *testing.T, logger slog.Logger) *chi.Mux { coord := tailnet.NewCoordinator(logger) var coordPtr atomic.Pointer[tailnet.Coordinator] coordPtr.Store(&coord) @@ -157,6 +192,76 @@ func (o ServerOptions) Router(t *testing.T, logger slog.Logger) *chi.Mux { return r } +func (o SimpleServerOptions) StartServer(t *testing.T, logger slog.Logger, listenAddr string) { + srv := http.Server{ + Addr: listenAddr, + Handler: o.Router(t, logger), + ReadTimeout: 10 * time.Second, + } + serveDone := make(chan struct{}) + go func() { + defer close(serveDone) + err := srv.ListenAndServe() + if err != nil && !xerrors.Is(err, http.ErrServerClosed) { + t.Error("HTTP server error:", err) + } + }() + t.Cleanup(func() { + _ = srv.Close() + <-serveDone + }) +} + +type NGINXServerOptions struct { + SimpleServerOptions +} + +var _ ServerStarter = NGINXServerOptions{} + +func (o NGINXServerOptions) StartServer(t *testing.T, logger slog.Logger, listenAddr string) { + host, nginxPortStr, err := net.SplitHostPort(listenAddr) + require.NoError(t, err) + + nginxPort, err := strconv.Atoi(nginxPortStr) + require.NoError(t, err) + + serverPort := nginxPort + 1 + serverListenAddr := net.JoinHostPort(host, strconv.Itoa(serverPort)) + + o.SimpleServerOptions.StartServer(t, logger, serverListenAddr) + startNginx(t, nginxPortStr, serverListenAddr) +} + +func startNginx(t *testing.T, listenPort, serverAddr string) { + cfg := `events {} +http { + server { + listen ` + listenPort + `; + server_name _; + location / { + proxy_pass http://` + serverAddr + `; + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection "upgrade"; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + proxy_set_header X-Forwarded-Host $server_name; + } + } +} +` + + dir := t.TempDir() + cfgPath := filepath.Join(dir, "nginx.conf") + err := os.WriteFile(cfgPath, []byte(cfg), 0o600) + require.NoError(t, err) + + // ExecBackground will handle cleanup. + _, _ = ExecBackground(t, "server.nginx", nil, "nginx", []string{"-c", cfgPath}) +} + // StartClientDERP creates a client connection to the server for coordination // and creates a tailnet.Conn which will only use DERP to connect to the peer. func StartClientDERP(t *testing.T, logger slog.Logger, serverURL *url.URL, myID, peerID uuid.UUID) *tailnet.Conn { @@ -296,3 +401,126 @@ func basicDERPMap(t *testing.T, serverURL *url.URL) *tailcfg.DERPMap { }, } } + +// ExecBackground starts a subprocess with the given flags and returns a +// channel that will receive the error when the subprocess exits. The returned +// function can be used to close the subprocess. +// +// processName is used to identify the subprocess in logs. +// +// Optionally, a network namespace can be passed to run the subprocess in. +// +// Do not call close then wait on the channel. Use the returned value from the +// function instead in this case. +// +// Cleanup is handled automatically if you don't care about monitoring the +// process manually. +func ExecBackground(t *testing.T, processName string, netNS *os.File, name string, args []string) (<-chan error, func() error) { + if netNS != nil { + // We use nsenter to enter the namespace. + // We can't use `setns` easily from Golang in the parent process because + // you can't execute the syscall in the forked child thread before it + // execs. + // We can't use `setns` easily from Golang in the child process because + // by the time you call it, the process has already created multiple + // threads. + args = append([]string{"--net=/proc/self/fd/3", name}, args...) + name = "nsenter" + } + + cmd := exec.Command(name, args...) + if netNS != nil { + cmd.ExtraFiles = []*os.File{netNS} + } + + out := &testWriter{ + name: processName, + t: t, + } + t.Cleanup(out.Flush) + cmd.Stdout = out + cmd.Stderr = out + cmd.SysProcAttr = &syscall.SysProcAttr{ + Pdeathsig: syscall.SIGTERM, + } + err := cmd.Start() + require.NoError(t, err) + + waitErr := make(chan error, 1) + go func() { + err := cmd.Wait() + waitErr <- err + close(waitErr) + }() + + closeFn := func() error { + _ = cmd.Process.Signal(syscall.SIGTERM) + select { + case <-time.After(5 * time.Second): + _ = cmd.Process.Kill() + case err := <-waitErr: + return err + } + return <-waitErr + } + + t.Cleanup(func() { + select { + case err := <-waitErr: + if err != nil { + t.Logf("subprocess exited: " + err.Error()) + } + return + default: + } + + _ = closeFn() + }) + + return waitErr, closeFn +} + +type testWriter struct { + mut sync.Mutex + name string + t *testing.T + + capturedLines []string +} + +func (w *testWriter) Write(p []byte) (n int, err error) { + w.mut.Lock() + defer w.mut.Unlock() + str := string(p) + split := strings.Split(str, "\n") + for _, s := range split { + if s == "" { + continue + } + + // If a line begins with "\s*--- (PASS|FAIL)" or is just PASS or FAIL, + // then it's a test result line. We want to capture it and log it later. + trimmed := strings.TrimSpace(s) + if strings.HasPrefix(trimmed, "--- PASS") || strings.HasPrefix(trimmed, "--- FAIL") || trimmed == "PASS" || trimmed == "FAIL" { + // Also fail the test if we see a FAIL line. + if strings.Contains(trimmed, "FAIL") { + w.t.Errorf("subprocess logged test failure: %s: \t%s", w.name, s) + } + + w.capturedLines = append(w.capturedLines, s) + continue + } + + w.t.Logf("%s output: \t%s", w.name, s) + } + return len(p), nil +} + +func (w *testWriter) Flush() { + w.mut.Lock() + defer w.mut.Unlock() + for _, s := range w.capturedLines { + w.t.Logf("%s output: \t%s", w.name, s) + } + w.capturedLines = nil +} diff --git a/tailnet/test/integration/integration_test.go b/tailnet/test/integration/integration_test.go index 0b4cf38fb3a06..0b2e835afc79d 100644 --- a/tailnet/test/integration/integration_test.go +++ b/tailnet/test/integration/integration_test.go @@ -9,18 +9,14 @@ import ( "net/http" "net/url" "os" - "os/exec" "os/signal" "runtime" - "strings" - "sync" "syscall" "testing" "time" "github.com/google/uuid" "github.com/stretchr/testify/require" - "golang.org/x/xerrors" "cdr.dev/slog" "cdr.dev/slog/sloggers/slogtest" @@ -72,7 +68,7 @@ var topologies = []integration.TestTopology{ // Test that DERP over loopback works. Name: "BasicLoopbackDERP", SetupNetworking: integration.SetupNetworkingLoopback, - ServerOptions: integration.ServerOptions{}, + Server: integration.SimpleServerOptions{}, StartClient: integration.StartClientDERP, RunTests: integration.TestSuite, }, @@ -82,7 +78,7 @@ var topologies = []integration.TestTopology{ // masquerades the traffic. Name: "EasyNATDERP", SetupNetworking: integration.SetupNetworkingEasyNAT, - ServerOptions: integration.ServerOptions{}, + Server: integration.SimpleServerOptions{}, StartClient: integration.StartClientDERP, RunTests: integration.TestSuite, }, @@ -92,7 +88,7 @@ var topologies = []integration.TestTopology{ // client 2. Name: "EasyNATDirect", SetupNetworking: integration.SetupNetworkingEasyNAT, - ServerOptions: integration.ServerOptions{}, + Server: integration.SimpleServerOptions{}, StartClient: integration.StartClientDirect, RunTests: integration.TestSuite, }, @@ -102,7 +98,7 @@ var topologies = []integration.TestTopology{ // automatic fallback. Name: "DERPForceWebSockets", SetupNetworking: integration.SetupNetworkingEasyNAT, - ServerOptions: integration.ServerOptions{ + Server: integration.SimpleServerOptions{ FailUpgradeDERP: false, DERPWebsocketOnly: true, }, @@ -113,7 +109,7 @@ var topologies = []integration.TestTopology{ // Test that falling back to DERP over WebSocket works. Name: "DERPFallbackWebSockets", SetupNetworking: integration.SetupNetworkingEasyNAT, - ServerOptions: integration.ServerOptions{ + Server: integration.SimpleServerOptions{ FailUpgradeDERP: true, DERPWebsocketOnly: false, }, @@ -121,6 +117,13 @@ var topologies = []integration.TestTopology{ StartClient: integration.StartClientDERP, RunTests: integration.TestSuite, }, + { + Name: "BasicLoopbackDERPNGINX", + SetupNetworking: integration.SetupNetworkingLoopback, + Server: integration.NGINXServerOptions{}, + StartClient: integration.StartClientDERP, + RunTests: integration.TestSuite, + }, } //nolint:paralleltest,tparallel @@ -180,24 +183,7 @@ func handleTestSubprocess(t *testing.T) { switch *role { case "server": logger = logger.Named("server") - - srv := http.Server{ - Addr: *serverListenAddr, - Handler: topo.ServerOptions.Router(t, logger), - ReadTimeout: 10 * time.Second, - } - serveDone := make(chan struct{}) - go func() { - defer close(serveDone) - err := srv.ListenAndServe() - if err != nil && !xerrors.Is(err, http.ErrServerClosed) { - t.Error("HTTP server error:", err) - } - }() - t.Cleanup(func() { - _ = srv.Close() - <-serveDone - }) + topo.Server.StartServer(t, logger, *serverListenAddr) // no exit case "client": @@ -303,122 +289,13 @@ func startClientSubprocess(t *testing.T, topologyName string, networking integra return startSubprocess(t, clientName, netNS, flags) } -// startSubprocess starts a subprocess with the given flags and returns a -// channel that will receive the error when the subprocess exits. The returned -// function can be used to close the subprocess. +// startSubprocess launches the test binary with the same flags as the test, but +// with additional flags added. // -// Do not call close then wait on the channel. Use the returned value from the -// function instead in this case. +// See integration.ExecBackground for more details. func startSubprocess(t *testing.T, processName string, netNS *os.File, flags []string) (<-chan error, func() error) { name := os.Args[0] // Always use verbose mode since it gets piped to the parent test anyways. args := append(os.Args[1:], append([]string{"-test.v=true"}, flags...)...) - - if netNS != nil { - // We use nsenter to enter the namespace. - // We can't use `setns` easily from Golang in the parent process because - // you can't execute the syscall in the forked child thread before it - // execs. - // We can't use `setns` easily from Golang in the child process because - // by the time you call it, the process has already created multiple - // threads. - args = append([]string{"--net=/proc/self/fd/3", name}, args...) - name = "nsenter" - } - - cmd := exec.Command(name, args...) - if netNS != nil { - cmd.ExtraFiles = []*os.File{netNS} - } - - out := &testWriter{ - name: processName, - t: t, - } - t.Cleanup(out.Flush) - cmd.Stdout = out - cmd.Stderr = out - cmd.SysProcAttr = &syscall.SysProcAttr{ - Pdeathsig: syscall.SIGTERM, - } - err := cmd.Start() - require.NoError(t, err) - - waitErr := make(chan error, 1) - go func() { - err := cmd.Wait() - waitErr <- err - close(waitErr) - }() - - closeFn := func() error { - _ = cmd.Process.Signal(syscall.SIGTERM) - select { - case <-time.After(5 * time.Second): - _ = cmd.Process.Kill() - case err := <-waitErr: - return err - } - return <-waitErr - } - - t.Cleanup(func() { - select { - case err := <-waitErr: - if err != nil { - t.Logf("subprocess exited: " + err.Error()) - } - return - default: - } - - _ = closeFn() - }) - - return waitErr, closeFn -} - -type testWriter struct { - mut sync.Mutex - name string - t *testing.T - - capturedLines []string -} - -func (w *testWriter) Write(p []byte) (n int, err error) { - w.mut.Lock() - defer w.mut.Unlock() - str := string(p) - split := strings.Split(str, "\n") - for _, s := range split { - if s == "" { - continue - } - - // If a line begins with "\s*--- (PASS|FAIL)" or is just PASS or FAIL, - // then it's a test result line. We want to capture it and log it later. - trimmed := strings.TrimSpace(s) - if strings.HasPrefix(trimmed, "--- PASS") || strings.HasPrefix(trimmed, "--- FAIL") || trimmed == "PASS" || trimmed == "FAIL" { - // Also fail the test if we see a FAIL line. - if strings.Contains(trimmed, "FAIL") { - w.t.Errorf("subprocess logged test failure: %s: \t%s", w.name, s) - } - - w.capturedLines = append(w.capturedLines, s) - continue - } - - w.t.Logf("%s output: \t%s", w.name, s) - } - return len(p), nil -} - -func (w *testWriter) Flush() { - w.mut.Lock() - defer w.mut.Unlock() - for _, s := range w.capturedLines { - w.t.Logf("%s output: \t%s", w.name, s) - } - w.capturedLines = nil + return integration.ExecBackground(t, processName, netNS, name, args) } diff --git a/tailnet/test/integration/network.go b/tailnet/test/integration/network.go index f36ac637455de..80eeb6048bd66 100644 --- a/tailnet/test/integration/network.go +++ b/tailnet/test/integration/network.go @@ -6,12 +6,10 @@ package integration import ( "bytes" "fmt" - "net/url" "os" "os/exec" "testing" - "github.com/google/uuid" "github.com/stretchr/testify/require" "github.com/tailscale/netlink" "golang.org/x/xerrors" @@ -19,29 +17,8 @@ import ( "cdr.dev/slog" "github.com/coder/coder/v2/cryptorand" - "github.com/coder/coder/v2/tailnet" ) -type TestTopology struct { - Name string - // SetupNetworking creates interfaces and network namespaces for the test. - // The most simple implementation is NetworkSetupDefault, which only creates - // a network namespace shared for all tests. - SetupNetworking func(t *testing.T, logger slog.Logger) TestNetworking - - // ServerOptions is the configuration for the server. It's passed to the - // server process. - ServerOptions ServerOptions - // StartClient gets called in each client subprocess. It's expected to - // create the tailnet.Conn and ensure connectivity to it's peer. - StartClient func(t *testing.T, logger slog.Logger, serverURL *url.URL, myID uuid.UUID, peerID uuid.UUID) *tailnet.Conn - - // RunTests is the main test function. It's called in each of the client - // subprocesses. If tests can only run once, they should check the client ID - // and return early if it's not the expected one. - RunTests func(t *testing.T, logger slog.Logger, serverURL *url.URL, myID uuid.UUID, peerID uuid.UUID, conn *tailnet.Conn) -} - type TestNetworking struct { // ServerListenAddr is the IP address and port that the server listens on, // passed to StartServer. From 3905e2c54190c2a761fc94a98739b75cb54a3cbf Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 7 May 2024 14:00:48 +0300 Subject: [PATCH 024/149] chore: bump undici from 6.7.1 to 6.11.1 in /site (#13190) Bumps [undici](https://github.com/nodejs/undici) from 6.7.1 to 6.11.1. - [Release notes](https://github.com/nodejs/undici/releases) - [Commits](https://github.com/nodejs/undici/compare/v6.7.1...v6.11.1) --- updated-dependencies: - dependency-name: undici dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- site/package.json | 2 +- site/pnpm-lock.yaml | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/site/package.json b/site/package.json index a5c5fee146578..b054180161d99 100644 --- a/site/package.json +++ b/site/package.json @@ -83,7 +83,7 @@ "tzdata": "1.0.30", "ua-parser-js": "1.0.33", "ufuzzy": "npm:@leeoniya/ufuzzy@1.0.10", - "undici": "6.7.1", + "undici": "6.11.1", "unique-names-generator": "4.7.1", "uuid": "9.0.0", "xterm": "5.2.0", diff --git a/site/pnpm-lock.yaml b/site/pnpm-lock.yaml index b723b4cf25e56..35c1536c32fde 100644 --- a/site/pnpm-lock.yaml +++ b/site/pnpm-lock.yaml @@ -169,8 +169,8 @@ dependencies: specifier: npm:@leeoniya/ufuzzy@1.0.10 version: /@leeoniya/ufuzzy@1.0.10 undici: - specifier: 6.7.1 - version: 6.7.1 + specifier: 6.11.1 + version: 6.11.1 unique-names-generator: specifier: 4.7.1 version: 4.7.1 @@ -12624,8 +12624,8 @@ packages: resolution: {integrity: sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==} dev: true - /undici@6.7.1: - resolution: {integrity: sha512-+Wtb9bAQw6HYWzCnxrPTMVEV3Q1QjYanI0E4q02ehReMuquQdLTEFEYbfs7hcImVYKcQkWSwT6buEmSVIiDDtQ==} + /undici@6.11.1: + resolution: {integrity: sha512-KyhzaLJnV1qa3BSHdj4AZ2ndqI0QWPxYzaIOio0WzcEJB9gvuysprJSLtpvc2D9mhR9jPDUk7xlJlZbH2KR5iw==} engines: {node: '>=18.0'} dev: false From 96f2cec541d2914775f4131c42d4a619afef1a37 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 7 May 2024 14:01:13 +0300 Subject: [PATCH 025/149] chore: bump vite from 4.5.2 to 4.5.3 in /site (#13189) Bumps [vite](https://github.com/vitejs/vite/tree/HEAD/packages/vite) from 4.5.2 to 4.5.3. - [Release notes](https://github.com/vitejs/vite/releases) - [Changelog](https://github.com/vitejs/vite/blob/v4.5.3/packages/vite/CHANGELOG.md) - [Commits](https://github.com/vitejs/vite/commits/v4.5.3/packages/vite) --- updated-dependencies: - dependency-name: vite dependency-type: direct:development ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- site/package.json | 2 +- site/pnpm-lock.yaml | 38 +++++++++++++++++++------------------- 2 files changed, 20 insertions(+), 20 deletions(-) diff --git a/site/package.json b/site/package.json index b054180161d99..9fdbc384dc659 100644 --- a/site/package.json +++ b/site/package.json @@ -169,7 +169,7 @@ "ts-proto": "1.164.0", "ts-prune": "0.10.3", "typescript": "5.2.2", - "vite": "4.5.2", + "vite": "4.5.3", "vite-plugin-checker": "0.6.0", "vite-plugin-turbosnap": "1.0.2" }, diff --git a/site/pnpm-lock.yaml b/site/pnpm-lock.yaml index 35c1536c32fde..94f1ea7a405a8 100644 --- a/site/pnpm-lock.yaml +++ b/site/pnpm-lock.yaml @@ -232,7 +232,7 @@ devDependencies: version: 8.0.5(react-dom@18.2.0)(react@18.2.0)(typescript@5.2.2) '@storybook/react-vite': specifier: 8.0.5 - version: 8.0.5(react-dom@18.2.0)(react@18.2.0)(typescript@5.2.2)(vite@4.5.2) + version: 8.0.5(react-dom@18.2.0)(react@18.2.0)(typescript@5.2.2)(vite@4.5.3) '@storybook/test': specifier: 8.0.5 version: 8.0.5(@types/jest@29.5.2)(jest@29.6.2) @@ -316,7 +316,7 @@ devDependencies: version: 6.9.1(eslint@8.52.0)(typescript@5.2.2) '@vitejs/plugin-react': specifier: 4.1.0 - version: 4.1.0(vite@4.5.2) + version: 4.1.0(vite@4.5.3) chromatic: specifier: 11.3.0 version: 11.3.0 @@ -423,11 +423,11 @@ devDependencies: specifier: 5.2.2 version: 5.2.2 vite: - specifier: 4.5.2 - version: 4.5.2(@types/node@18.19.0) + specifier: 4.5.3 + version: 4.5.3(@types/node@18.19.0) vite-plugin-checker: specifier: 0.6.0 - version: 0.6.0(eslint@8.52.0)(typescript@5.2.2)(vite@4.5.2) + version: 0.6.0(eslint@8.52.0)(typescript@5.2.2)(vite@4.5.3) vite-plugin-turbosnap: specifier: 1.0.2 version: 1.0.2 @@ -2843,7 +2843,7 @@ packages: chalk: 4.1.2 dev: true - /@joshwooding/vite-plugin-react-docgen-typescript@0.3.0(typescript@5.2.2)(vite@4.5.2): + /@joshwooding/vite-plugin-react-docgen-typescript@0.3.0(typescript@5.2.2)(vite@4.5.3): resolution: {integrity: sha512-2D6y7fNvFmsLmRt6UCOFJPvFoPMJGT0Uh1Wg0RaigUp7kdQPs6yYn8Dmx6GZkOH/NW0yMTwRz/p0SRMMRo50vA==} peerDependencies: typescript: '>= 4.3.x' @@ -2857,7 +2857,7 @@ packages: magic-string: 0.27.0 react-docgen-typescript: 2.2.2(typescript@5.2.2) typescript: 5.2.2 - vite: 4.5.2(@types/node@18.19.0) + vite: 4.5.3(@types/node@18.19.0) dev: true /@jridgewell/gen-mapping@0.3.3: @@ -3689,7 +3689,7 @@ packages: - supports-color dev: true - /@storybook/builder-vite@8.0.5(typescript@5.2.2)(vite@4.5.2): + /@storybook/builder-vite@8.0.5(typescript@5.2.2)(vite@4.5.3): resolution: {integrity: sha512-tKNxobC9tlYyUAayxoiOOnoMbg4RxoAwPOpPLnQYUfHLw1ecp/g8sGD6tisyFONyOIv7uF9gbzWLUfMjn9F2sw==} peerDependencies: '@preact/preset-vite': '*' @@ -3722,7 +3722,7 @@ packages: magic-string: 0.30.5 ts-dedent: 2.2.0 typescript: 5.2.2 - vite: 4.5.2(@types/node@18.19.0) + vite: 4.5.3(@types/node@18.19.0) transitivePeerDependencies: - encoding - supports-color @@ -4108,7 +4108,7 @@ packages: react-dom: 18.2.0(react@18.2.0) dev: true - /@storybook/react-vite@8.0.5(react-dom@18.2.0)(react@18.2.0)(typescript@5.2.2)(vite@4.5.2): + /@storybook/react-vite@8.0.5(react-dom@18.2.0)(react@18.2.0)(typescript@5.2.2)(vite@4.5.3): resolution: {integrity: sha512-VXxoyb3Zw5ReQwWoP64qMIy/iIS6B9PuLIEPDt7wM/5IMFljQozvNaarPQf0mNJxPkGT6zmiBn9WS06wPLPF0w==} engines: {node: '>=18.0.0'} peerDependencies: @@ -4116,9 +4116,9 @@ packages: react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 vite: ^4.0.0 || ^5.0.0 dependencies: - '@joshwooding/vite-plugin-react-docgen-typescript': 0.3.0(typescript@5.2.2)(vite@4.5.2) + '@joshwooding/vite-plugin-react-docgen-typescript': 0.3.0(typescript@5.2.2)(vite@4.5.3) '@rollup/pluginutils': 5.0.5 - '@storybook/builder-vite': 8.0.5(typescript@5.2.2)(vite@4.5.2) + '@storybook/builder-vite': 8.0.5(typescript@5.2.2)(vite@4.5.3) '@storybook/node-logger': 8.0.5 '@storybook/react': 8.0.5(react-dom@18.2.0)(react@18.2.0)(typescript@5.2.2) find-up: 5.0.0 @@ -4128,7 +4128,7 @@ packages: react-dom: 18.2.0(react@18.2.0) resolve: 1.22.8 tsconfig-paths: 4.2.0 - vite: 4.5.2(@types/node@18.19.0) + vite: 4.5.3(@types/node@18.19.0) transitivePeerDependencies: - '@preact/preset-vite' - encoding @@ -5274,7 +5274,7 @@ packages: /@ungap/structured-clone@1.2.0: resolution: {integrity: sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ==} - /@vitejs/plugin-react@4.1.0(vite@4.5.2): + /@vitejs/plugin-react@4.1.0(vite@4.5.3): resolution: {integrity: sha512-rM0SqazU9iqPUraQ2JlIvReeaxOoRj6n+PzB1C0cBzIbd8qP336nC39/R9yPi3wVcah7E7j/kdU1uCUqMEU4OQ==} engines: {node: ^14.18.0 || >=16.0.0} peerDependencies: @@ -5285,7 +5285,7 @@ packages: '@babel/plugin-transform-react-jsx-source': 7.22.5(@babel/core@7.23.0) '@types/babel__core': 7.20.2 react-refresh: 0.14.0 - vite: 4.5.2(@types/node@18.19.0) + vite: 4.5.3(@types/node@18.19.0) transitivePeerDependencies: - supports-color dev: true @@ -12847,7 +12847,7 @@ packages: unist-util-stringify-position: 4.0.0 vfile-message: 4.0.2 - /vite-plugin-checker@0.6.0(eslint@8.52.0)(typescript@5.2.2)(vite@4.5.2): + /vite-plugin-checker@0.6.0(eslint@8.52.0)(typescript@5.2.2)(vite@4.5.3): resolution: {integrity: sha512-DWZ9Hv2TkpjviPxAelNUt4Q3IhSGrx7xrwdM64NI+Q4dt8PaMWJJh4qGNtSrfEuiuIzWWo00Ksvh5It4Y3L9xQ==} engines: {node: '>=14.16'} peerDependencies: @@ -12893,7 +12893,7 @@ packages: strip-ansi: 6.0.1 tiny-invariant: 1.3.1 typescript: 5.2.2 - vite: 4.5.2(@types/node@18.19.0) + vite: 4.5.3(@types/node@18.19.0) vscode-languageclient: 7.0.0 vscode-languageserver: 7.0.0 vscode-languageserver-textdocument: 1.0.8 @@ -12904,8 +12904,8 @@ packages: resolution: {integrity: sha512-irjKcKXRn7v5bPAg4mAbsS6DgibpP1VUFL9tlgxU6lloK6V9yw9qCZkS+s2PtbkZpWNzr3TN3zVJAc6J7gJZmA==} dev: true - /vite@4.5.2(@types/node@18.19.0): - resolution: {integrity: sha512-tBCZBNSBbHQkaGyhGCDUGqeo2ph8Fstyp6FMSvTtsXeZSPpSMGlviAOav2hxVTqFcx8Hj/twtWKsMJXNY0xI8w==} + /vite@4.5.3(@types/node@18.19.0): + resolution: {integrity: sha512-kQL23kMeX92v3ph7IauVkXkikdDRsYMGTVl5KY2E9OY4ONLvkHf04MDTbnfo6NKxZiDLWzVpP5oTa8hQD8U3dg==} engines: {node: ^14.18.0 || >=16.0.0} hasBin: true peerDependencies: From 30227dae97687c0d47564922b07861ffd6fc9ad7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 7 May 2024 11:08:24 -0400 Subject: [PATCH 026/149] chore: bump follow-redirects from 1.15.4 to 1.15.6 in /site (#13197) Bumps [follow-redirects](https://github.com/follow-redirects/follow-redirects) from 1.15.4 to 1.15.6. - [Release notes](https://github.com/follow-redirects/follow-redirects/releases) - [Commits](https://github.com/follow-redirects/follow-redirects/compare/v1.15.4...v1.15.6) --- updated-dependencies: - dependency-name: follow-redirects dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- site/pnpm-lock.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/site/pnpm-lock.yaml b/site/pnpm-lock.yaml index 94f1ea7a405a8..90dd0443232bc 100644 --- a/site/pnpm-lock.yaml +++ b/site/pnpm-lock.yaml @@ -5720,7 +5720,7 @@ packages: /axios@1.6.0: resolution: {integrity: sha512-EZ1DYihju9pwVB+jg67ogm+Tmqc6JmhamRN6I4Zt8DfZu5lbcQGw3ozH9lFejSJgs/ibaef3A9PMXPLeefFGJg==} dependencies: - follow-redirects: 1.15.4 + follow-redirects: 1.15.6 form-data: 4.0.0 proxy-from-env: 1.1.0 transitivePeerDependencies: @@ -7775,8 +7775,8 @@ packages: engines: {node: '>=0.4.0'} dev: true - /follow-redirects@1.15.4: - resolution: {integrity: sha512-Cr4D/5wlrb0z9dgERpUL3LrmPKVDsETIJhaCMeDfuFYcqa5bldGV6wBsAN6X/vxlXQtFBMrXdXxdL8CbDTGniw==} + /follow-redirects@1.15.6: + resolution: {integrity: sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==} engines: {node: '>=4.0'} peerDependencies: debug: '*' From b7a921a2bf3d76b29da37667c49af514e8411c5d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 7 May 2024 11:08:33 -0400 Subject: [PATCH 027/149] chore: bump express from 4.18.2 to 4.19.2 in /site (#13196) Bumps [express](https://github.com/expressjs/express) from 4.18.2 to 4.19.2. - [Release notes](https://github.com/expressjs/express/releases) - [Changelog](https://github.com/expressjs/express/blob/master/History.md) - [Commits](https://github.com/expressjs/express/compare/4.18.2...4.19.2) --- updated-dependencies: - dependency-name: express dependency-type: direct:development ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- site/package.json | 2 +- site/pnpm-lock.yaml | 33 +++++++++++++++++++-------------- 2 files changed, 20 insertions(+), 15 deletions(-) diff --git a/site/package.json b/site/package.json index 9fdbc384dc659..3e582312dcfcc 100644 --- a/site/package.json +++ b/site/package.json @@ -149,7 +149,7 @@ "eslint-plugin-testing-library": "6.1.0", "eslint-plugin-unicorn": "49.0.0", "eventsourcemock": "2.0.0", - "express": "4.18.2", + "express": "4.19.2", "jest": "29.6.2", "jest-canvas-mock": "2.5.2", "jest-environment-jsdom": "29.5.0", diff --git a/site/pnpm-lock.yaml b/site/pnpm-lock.yaml index 90dd0443232bc..6a3297a2d5d6f 100644 --- a/site/pnpm-lock.yaml +++ b/site/pnpm-lock.yaml @@ -363,8 +363,8 @@ devDependencies: specifier: 2.0.0 version: 2.0.0 express: - specifier: 4.18.2 - version: 4.18.2 + specifier: 4.19.2 + version: 4.19.2 jest: specifier: 29.6.2 version: 29.6.2(@types/node@18.19.0)(ts-node@10.9.1) @@ -3680,7 +3680,7 @@ packages: ejs: 3.1.9 esbuild: 0.18.20 esbuild-plugin-alias: 0.2.1 - express: 4.18.2 + express: 4.19.2 fs-extra: 11.1.1 process: 0.11.10 util: 0.12.5 @@ -3716,7 +3716,7 @@ packages: '@types/find-cache-dir': 3.2.1 browser-assert: 1.2.1 es-module-lexer: 0.9.3 - express: 4.18.2 + express: 4.19.2 find-cache-dir: 3.3.2 fs-extra: 11.1.1 magic-string: 0.30.5 @@ -3929,7 +3929,7 @@ packages: cli-table3: 0.6.3 compression: 1.7.4 detect-port: 1.5.1 - express: 4.18.2 + express: 4.19.2 fs-extra: 11.1.1 globby: 11.1.0 ip: 2.0.1 @@ -5899,8 +5899,8 @@ packages: readable-stream: 3.6.2 dev: true - /body-parser@1.20.1: - resolution: {integrity: sha512-jWi7abTbYwajOytWCQc37VulmWiRae5RyTpaCyDcS5/lMdtwSz5lOpDE67srw/HYe35f1z3fDQw+3txg7gNtWw==} + /body-parser@1.20.2: + resolution: {integrity: sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA==} engines: {node: '>= 0.8', npm: 1.2.8000 || >= 1.4.16} dependencies: bytes: 3.1.2 @@ -5912,7 +5912,7 @@ packages: iconv-lite: 0.4.24 on-finished: 2.4.1 qs: 6.11.0 - raw-body: 2.5.1 + raw-body: 2.5.2 type-is: 1.6.18 unpipe: 1.0.0 transitivePeerDependencies: @@ -6413,6 +6413,11 @@ packages: engines: {node: '>= 0.6'} dev: true + /cookie@0.6.0: + resolution: {integrity: sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==} + engines: {node: '>= 0.6'} + dev: true + /copy-anything@3.0.5: resolution: {integrity: sha512-yCEafptTtb4bk7GLEQoM8KVJpxAfdBJYaXyzQEgQQQgYrZiDp8SJmGKlYza6CYjEDNstAdNdKA3UuoULlEbS6w==} engines: {node: '>=12.13'} @@ -7565,16 +7570,16 @@ packages: jest-util: 29.6.3 dev: true - /express@4.18.2: - resolution: {integrity: sha512-5/PsL6iGPdfQ/lKM1UuielYgv3BUoJfz1aUwU9vHZ+J7gyvwdQXFEBIEIaxeGf0GIcreATNyBExtalisDbuMqQ==} + /express@4.19.2: + resolution: {integrity: sha512-5T6nhjsT+EOMzuck8JjBHARTHfMht0POzlA60WV2pMD3gyXw2LZnZ+ueGdNxG+0calOJcWKbpFcuzLZ91YWq9Q==} engines: {node: '>= 0.10.0'} dependencies: accepts: 1.3.8 array-flatten: 1.1.1 - body-parser: 1.20.1 + body-parser: 1.20.2 content-disposition: 0.5.4 content-type: 1.0.5 - cookie: 0.5.0 + cookie: 0.6.0 cookie-signature: 1.0.6 debug: 2.6.9 depd: 2.0.0 @@ -10942,8 +10947,8 @@ packages: engines: {node: '>= 0.6'} dev: true - /raw-body@2.5.1: - resolution: {integrity: sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig==} + /raw-body@2.5.2: + resolution: {integrity: sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==} engines: {node: '>= 0.8'} dependencies: bytes: 3.1.2 From 06dd656e0859eb5663634f7f2e4d006e8aac0dd6 Mon Sep 17 00:00:00 2001 From: Mathias Fredriksson Date: Tue, 7 May 2024 20:15:12 +0300 Subject: [PATCH 028/149] ci: disable make test-migrations in release.yaml (#13201) --- .github/workflows/release.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index faa6593452e25..9f23a5c488e80 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -178,9 +178,9 @@ jobs: env: EV_SIGNING_CERT: ${{ secrets.EV_SIGNING_CERT }} - - name: Test migrations from current ref to main - run: | - make test-migrations + # - name: Test migrations from current ref to main + # run: | + # make test-migrations # Setup GCloud for signing Windows binaries. - name: Authenticate to Google Cloud From c73d5a261783a11b1b8771d893b7872cc1c7f2d6 Mon Sep 17 00:00:00 2001 From: Stephen Kirby <58410745+stirby@users.noreply.github.com> Date: Tue, 7 May 2024 16:29:51 -0500 Subject: [PATCH 029/149] docs: bump mainline version to v2.11.0 (#13202) * docs: bump mainline version to v2.11.0 * bump release schedule --- docs/install/kubernetes.md | 2 +- docs/install/releases.md | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/install/kubernetes.md b/docs/install/kubernetes.md index 796c59df2dc62..a883e810b8ae0 100644 --- a/docs/install/kubernetes.md +++ b/docs/install/kubernetes.md @@ -134,7 +134,7 @@ locally in order to log in and manage templates. helm install coder coder-v2/coder \ --namespace coder \ --values values.yaml \ - --version 2.10.2 + --version 2.11.0 ``` For the **stable** Coder release: diff --git a/docs/install/releases.md b/docs/install/releases.md index a1a09477444dd..22dac07f687e3 100644 --- a/docs/install/releases.md +++ b/docs/install/releases.md @@ -50,8 +50,8 @@ pages. | Release name | Release Date | Status | | ------------ | ------------------ | ---------------- | | 2.7.x | January 01, 2024 | Not Supported | -| 2.8.x | Februrary 06, 2024 | Security Support | -| 2.9.x | March 07, 2024 | Stable | -| 2.10.x | April 03, 2024 | Mainline | -| 2.11.x | May 07, 2024 | Not Released | +| 2.8.x | Februrary 06, 2024 | Not Supported | +| 2.9.x | March 07, 2024 | Security Support | +| 2.10.x | April 03, 2024 | Stable | +| 2.11.x | May 07, 2024 | Mainline | | 2.12.x | June 04, 2024 | Not Released | From 24448e79fe88cc0153a912b77b7c9f7730145c5c Mon Sep 17 00:00:00 2001 From: Bruno Quaresma Date: Wed, 8 May 2024 12:58:14 -0300 Subject: [PATCH 030/149] fix: prevent extending if template disallows (#13182) --- coderd/workspaces.go | 12 +++++++ enterprise/coderd/workspaces_test.go | 34 ++++++++++++++++++- .../WorkspacePage/WorkspaceTopbar.stories.tsx | 18 ++++++++++ .../pages/WorkspacePage/WorkspaceTopbar.tsx | 4 ++- 4 files changed, 66 insertions(+), 2 deletions(-) diff --git a/coderd/workspaces.go b/coderd/workspaces.go index 87aea6919a351..6f398b34488d6 100644 --- a/coderd/workspaces.go +++ b/coderd/workspaces.go @@ -1052,6 +1052,18 @@ func (api *API) putExtendWorkspace(rw http.ResponseWriter, r *http.Request) { return xerrors.Errorf("workspace shutdown is manual") } + tmpl, err := s.GetTemplateByID(ctx, workspace.TemplateID) + if err != nil { + code = http.StatusInternalServerError + resp.Message = "Error fetching template." + return xerrors.Errorf("get template: %w", err) + } + if !tmpl.AllowUserAutostop { + code = http.StatusBadRequest + resp.Message = "Cannot extend workspace: template does not allow user autostop." + return xerrors.New("cannot extend workspace: template does not allow user autostop") + } + newDeadline := req.Deadline.UTC() if err := validWorkspaceDeadline(job.CompletedAt.Time, newDeadline); err != nil { // NOTE(Cian): Putting the error in the Message field on request from the FE folks. diff --git a/enterprise/coderd/workspaces_test.go b/enterprise/coderd/workspaces_test.go index b44357c5b5dde..9cb86f55ba55f 100644 --- a/enterprise/coderd/workspaces_test.go +++ b/enterprise/coderd/workspaces_test.go @@ -913,8 +913,12 @@ func TestWorkspaceAutobuild(t *testing.T) { ws = coderdtest.MustWorkspace(t, client, ws.ID) require.Equal(t, version2.ID, ws.LatestBuild.TemplateVersionID) }) +} + +func TestTemplateDoesNotAllowUserAutostop(t *testing.T) { + t.Parallel() - t.Run("TemplateDoesNotAllowUserAutostop", func(t *testing.T) { + t.Run("TTLSetByTemplate", func(t *testing.T) { t.Parallel() client := coderdtest.New(t, &coderdtest.Options{ IncludeProvisionerDaemon: true, @@ -951,6 +955,34 @@ func TestWorkspaceAutobuild(t *testing.T) { require.Equal(t, templateTTL, template.DefaultTTLMillis) require.Equal(t, templateTTL, *workspace.TTLMillis) }) + + t.Run("ExtendIsNotEnabledByTemplate", func(t *testing.T) { + t.Parallel() + client := coderdtest.New(t, &coderdtest.Options{ + IncludeProvisionerDaemon: true, + TemplateScheduleStore: schedule.NewEnterpriseTemplateScheduleStore(agplUserQuietHoursScheduleStore()), + }) + user := coderdtest.CreateFirstUser(t, client) + version := coderdtest.CreateTemplateVersion(t, client, user.OrganizationID, nil) + template := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID, func(ctr *codersdk.CreateTemplateRequest) { + ctr.AllowUserAutostop = ptr.Ref(false) + }) + coderdtest.AwaitTemplateVersionJobCompleted(t, client, version.ID) + workspace := coderdtest.CreateWorkspace(t, client, user.OrganizationID, template.ID) + coderdtest.AwaitWorkspaceBuildJobCompleted(t, client, workspace.LatestBuild.ID) + + require.Equal(t, false, template.AllowUserAutostop, "template should have AllowUserAutostop as false") + + ctx := testutil.Context(t, testutil.WaitShort) + ttl := 8 * time.Hour + newDeadline := time.Now().Add(ttl + time.Hour).UTC() + + err := client.PutExtendWorkspace(ctx, workspace.ID, codersdk.PutExtendWorkspaceRequest{ + Deadline: newDeadline, + }) + + require.ErrorContains(t, err, "template does not allow user autostop") + }) } // Blocked by autostart requirements diff --git a/site/src/pages/WorkspacePage/WorkspaceTopbar.stories.tsx b/site/src/pages/WorkspacePage/WorkspaceTopbar.stories.tsx index 8b0334b1dccfc..1cf8eeec78a67 100644 --- a/site/src/pages/WorkspacePage/WorkspaceTopbar.stories.tsx +++ b/site/src/pages/WorkspacePage/WorkspaceTopbar.stories.tsx @@ -275,3 +275,21 @@ export const WithQuota: Story = { ], }, }; + +export const TemplateDoesNotAllowAutostop: Story = { + args: { + workspace: { + ...MockWorkspace, + latest_build: { + ...MockWorkspace.latest_build, + get deadline() { + return addHours(new Date(), 8).toISOString(); + }, + }, + }, + template: { + ...MockTemplate, + allow_user_autostop: false, + }, + }, +}; diff --git a/site/src/pages/WorkspacePage/WorkspaceTopbar.tsx b/site/src/pages/WorkspacePage/WorkspaceTopbar.tsx index e2337fd8a0bd5..d05f7c7c66453 100644 --- a/site/src/pages/WorkspacePage/WorkspaceTopbar.tsx +++ b/site/src/pages/WorkspacePage/WorkspaceTopbar.tsx @@ -204,7 +204,9 @@ export const WorkspaceTopbar: FC = ({ )} From 35cb5728888be75f59528304888ec55844f70b99 Mon Sep 17 00:00:00 2001 From: Bruno Quaresma Date: Wed, 8 May 2024 13:12:48 -0300 Subject: [PATCH 031/149] refactor(site): refactor the workspace settings form (#13198) --- .../WorkspaceScheduleForm.test.tsx | 12 ++- .../WorkspaceScheduleForm.tsx | 74 ++++++++++--------- 2 files changed, 50 insertions(+), 36 deletions(-) diff --git a/site/src/pages/WorkspaceSettingsPage/WorkspaceSchedulePage/WorkspaceScheduleForm.test.tsx b/site/src/pages/WorkspaceSettingsPage/WorkspaceSchedulePage/WorkspaceScheduleForm.test.tsx index 22d6eaa4df141..870a28b4f2f0b 100644 --- a/site/src/pages/WorkspaceSettingsPage/WorkspaceSchedulePage/WorkspaceScheduleForm.test.tsx +++ b/site/src/pages/WorkspaceSettingsPage/WorkspaceSchedulePage/WorkspaceScheduleForm.test.tsx @@ -281,7 +281,9 @@ describe("templateInheritance", () => { }; render(); - const autoStartToggle = await screen.findByLabelText("Enable Autostart"); + const autoStartToggle = await screen.findByLabelText("Enable Autostart", { + exact: false, + }); expect(autoStartToggle).toBeDisabled(); const startTimeInput = await screen.findByLabelText("Start time"); @@ -313,7 +315,9 @@ describe("templateInheritance", () => { render(); - const autoStartToggle = await screen.findByLabelText("Enable Autostart"); + const autoStartToggle = await screen.findByLabelText("Enable Autostart", { + exact: false, + }); expect(autoStartToggle).toBeEnabled(); const startTimeInput = await screen.findByLabelText("Start time"); @@ -346,7 +350,9 @@ describe("templateInheritance", () => { jest.spyOn(API, "getTemplateByName").mockResolvedValue(MockTemplate); render(); - const autoStopToggle = await screen.findByLabelText("Enable Autostop"); + const autoStopToggle = await screen.findByLabelText("Enable Autostop", { + exact: false, + }); expect(autoStopToggle).toBeDisabled(); const ttlInput = await screen.findByLabelText( diff --git a/site/src/pages/WorkspaceSettingsPage/WorkspaceSchedulePage/WorkspaceScheduleForm.tsx b/site/src/pages/WorkspaceSettingsPage/WorkspaceSchedulePage/WorkspaceScheduleForm.tsx index 15a58bf423c99..61e4cf597f325 100644 --- a/site/src/pages/WorkspaceSettingsPage/WorkspaceSchedulePage/WorkspaceScheduleForm.tsx +++ b/site/src/pages/WorkspaceSettingsPage/WorkspaceSchedulePage/WorkspaceScheduleForm.tsx @@ -7,7 +7,6 @@ import FormLabel from "@mui/material/FormLabel"; import MenuItem from "@mui/material/MenuItem"; import Switch from "@mui/material/Switch"; import TextField from "@mui/material/TextField"; -import Tooltip from "@mui/material/Tooltip"; import { formatDuration, intervalToDuration } from "date-fns"; import dayjs from "dayjs"; import advancedFormat from "dayjs/plugin/advancedFormat"; @@ -19,7 +18,6 @@ import { type FormikTouched, useFormik } from "formik"; import type { ChangeEvent, FC } from "react"; import * as Yup from "yup"; import type { Template } from "api/typesGenerated"; -import { DisabledBadge } from "components/Badges/Badges"; import { HorizontalForm, FormFooter, @@ -27,6 +25,10 @@ import { FormFields, } from "components/Form/Form"; import { Stack } from "components/Stack/Stack"; +import { + StackLabel, + StackLabelHelperText, +} from "components/StackLabel/StackLabel"; import { defaultSchedule, emptySchedule, @@ -180,6 +182,10 @@ export const validationSchema = Yup.object({ }), }); +// This form utilizes complex, visually-intensive fields. Increasing the space +// between these fields enhances readability and cleanliness. +const FIELDS_SPACING = 4; + export const WorkspaceScheduleForm: FC = ({ error, initialValues, @@ -275,21 +281,9 @@ export const WorkspaceScheduleForm: FC = ({ -
- Select the time and days of week on which you want the workspace - starting automatically. -
- {!template.allow_user_autostart && ( - - - - )} - - } + description="Select the time and days of week on which you want the workspace starting automatically." > - + = ({ name="autostartEnabled" checked={form.values.autostartEnabled} onChange={handleToggleAutostart} + size="small" /> } - label={Language.startSwitch} + label={ + + {Language.startSwitch} + {!template.allow_user_autostart && ( + + The template for this workspace does not allow modification + of autostart. + + )} + + } /> = ({ title="Autostop" description={ <> -
- Set how many hours should elapse after the workspace started - before the workspace automatically shuts down. This will be - extended by{" "} - {dayjs - .duration({ milliseconds: template.activity_bump_ms }) - .humanize()}{" "} - after last activity in the workspace was detected. -
- {!template.allow_user_autostop && ( - - - - )} + Set how many hours should elapse after the workspace started before + the workspace automatically shuts down. This will be extended by{" "} + {dayjs + .duration({ milliseconds: template.activity_bump_ms }) + .humanize()}{" "} + after last activity in the workspace was detected. } > - + } - label={Language.stopSwitch} + label={ + + {Language.stopSwitch} + {!template.allow_user_autostop && ( + + The template for this workspace does not allow modification + of autostop. + + )} + + } /> Date: Wed, 8 May 2024 20:29:12 +0400 Subject: [PATCH 032/149] docs: describe AWS hard NAT (#13205) Documents what I've learned about getting direct connections on AWS. Several customers have had issues. --- docs/networking/stun.md | 50 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 50 insertions(+) diff --git a/docs/networking/stun.md b/docs/networking/stun.md index af201e65b3d12..147c49aae0144 100644 --- a/docs/networking/stun.md +++ b/docs/networking/stun.md @@ -122,3 +122,53 @@ originate. Using these internal addresses is much more likely to result in a successful direct connection. ![Diagram of a workspace agent and client over VPN](../images/networking/stun3.png) + +## Hard NAT + +Some NATs are known to use a different port when forwarding requests to the STUN +server and when forwarding probe packets to peers. In that case, the address a +peer discovers over the STUN protocol will have the correct IP address, but the +wrong port. Tailscale refers to this as "hard" NAT in +[How NAT traversal works (tailscale.com)](https://tailscale.com/blog/how-nat-traversal-works). + +If both peers are behind a "hard" NAT, direct connections may take longer to +establish or will not be established at all. If one peer is behind a "hard" NAT +and the other is running a firewall (including Windows Defender Firewall), the +firewall may block direct connections. + +In both cases, peers fallback to DERP connections if they cannot establish a +direct connection. + +If your workspaces are behind a "hard" NAT, you can: + +1. Ensure clients are not also behind a "hard" NAT. You may have limited ability + to control this if end users connect from their homes. +2. Ensure firewalls on client devices (e.g. Windows Defender Firewall) have an + inbound policy allowing all UDP ports either to the `coder` or `coder.exe` + CLI binary, or from the IP addresses of your workspace NATs. +3. Reconfigure your workspace network's NAT connection to the public internet to + be an "easy" NAT. See below for specific examples. + +### AWS NAT Gateway + +The +[AWS NAT Gateway](https://docs.aws.amazon.com/vpc/latest/userguide/vpc-nat-gateway.html) +is a known "hard" NAT. You can use a +[NAT Instance](https://docs.aws.amazon.com/vpc/latest/userguide/VPC_NAT_Instance.html) +instead of a NAT Gateway, and configure it to use the same port assignment for +all UDP traffic from a particular source IP:port combination (Tailscale calls +this "easy" NAT). Linux `MASQUERADE` rules work well for this. + +### AWS Elastic Kubernetes Service (EKS) + +The default configuration of AWS Elastic Kubernetes Service (EKS) includes the +[Amazon VPC CNI Driver](https://github.com/aws/amazon-vpc-cni-k8s), which by +default randomizes the public port for different outgoing UDP connections. This +makes it act as a "hard" NAT, even if the EKS nodes are on a public subnet (and +thus do not need to use the AWS NAT Gateway to reach the Internet). + +This behavior can be disabled by setting the environment variable +`AWS_VPC_K8S_CNI_RANDOMIZESNAT=none` in the `aws-node` DaemonSet. Note, however, +if your nodes are on a private subnet, they will still need NAT to reach the +public Internet, meaning that issues with the +[AWS NAT Gateway](#aws-nat-gateway) might affect you. From a4bd50c9858734217dbdcb60a0bcc3091fb5f9bd Mon Sep 17 00:00:00 2001 From: Steven Masley Date: Wed, 8 May 2024 13:34:22 -0500 Subject: [PATCH 033/149] chore: enable terraform provisioners in e2e by default (#13134) * skip docker test for now, it leaks containers --- .vscode/settings.json | 4 +- site/e2e/constants.ts | 4 ++ site/e2e/helpers.ts | 52 ++++++++++++++----- site/e2e/playwright.config.ts | 40 +++++++++++++- site/e2e/tests/createWorkspace.spec.ts | 42 +++++++++++++++ .../CreateTemplatePage/CreateTemplateForm.tsx | 14 +++++ .../DuplicateTemplateView.tsx | 1 + .../CreateTemplatePage/UploadTemplateView.tsx | 1 + site/src/pages/CreateTemplatePage/utils.ts | 15 +++--- 9 files changed, 150 insertions(+), 23 deletions(-) diff --git a/.vscode/settings.json b/.vscode/settings.json index 1f47d7ecf99d4..c95554245cab5 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -222,5 +222,7 @@ "go.testFlags": ["-short", "-coverpkg=./..."], // We often use a version of TypeScript that's ahead of the version shipped // with VS Code. - "typescript.tsdk": "./site/node_modules/typescript/lib" + "typescript.tsdk": "./site/node_modules/typescript/lib", + // Playwright tests in VSCode will open a browser to live "view" the test. + "playwright.reuseBrowser": true } diff --git a/site/e2e/constants.ts b/site/e2e/constants.ts index 3e1283e5491c4..850df331a6adb 100644 --- a/site/e2e/constants.ts +++ b/site/e2e/constants.ts @@ -39,6 +39,10 @@ export const requireEnterpriseTests = Boolean( ); export const enterpriseLicense = process.env.CODER_E2E_ENTERPRISE_LICENSE ?? ""; +// Disabling terraform tests is optional for environments without Docker + Terraform. +// By default, we opt into these tests. +export const requireTerraformTests = !process.env.CODER_E2E_DISABLE_TERRAFORM; + // Fake experiments to verify that site presents them as enabled. export const e2eFakeExperiment1 = "e2e-fake-experiment-1"; export const e2eFakeExperiment2 = "e2e-fake-experiment-2"; diff --git a/site/e2e/helpers.ts b/site/e2e/helpers.ts index 1c5349fbf5e5b..3f58184b1c1ac 100644 --- a/site/e2e/helpers.ts +++ b/site/e2e/helpers.ts @@ -19,6 +19,7 @@ import { enterpriseLicense, prometheusPort, requireEnterpriseTests, + requireTerraformTests, } from "./constants"; import { expectUrl } from "./expectUrl"; import { @@ -43,6 +44,11 @@ export function requiresEnterpriseLicense() { test.skip(!enterpriseLicense); } +// requireTerraformProvisioner by default is enabled. +export function requireTerraformProvisioner() { + test.skip(!requireTerraformTests); +} + // createWorkspace creates a workspace for a template. // It does not wait for it to be running, but it does navigate to the page. export const createWorkspace = async ( @@ -149,25 +155,46 @@ export const verifyParameters = async ( } }; +// StarterTemplates are ids of starter templates that can be used in place of +// the responses payload. These starter templates will require real provisioners. +export enum StarterTemplates { + STARTER_DOCKER = "docker", +} + +function isStarterTemplate( + input: EchoProvisionerResponses | StarterTemplates | undefined, +): input is StarterTemplates { + if (!input) { + return false; + } + return typeof input === "string"; +} + // createTemplate navigates to the /templates/new page and uploads a template // with the resources provided in the responses argument. export const createTemplate = async ( page: Page, - responses?: EchoProvisionerResponses, + responses?: EchoProvisionerResponses | StarterTemplates, ): Promise => { - // Required to have templates submit their provisioner type as echo! - await page.addInitScript({ - content: "window.playwright = true", - }); + let path = "/templates/new"; + if (isStarterTemplate(responses)) { + path += `?exampleId=${responses}`; + } else { + // The form page will read this value and use it as the default type. + path += "?provisioner_type=echo"; + } - await page.goto("/templates/new", { waitUntil: "domcontentloaded" }); + await page.goto(path, { waitUntil: "domcontentloaded" }); await expectUrl(page).toHavePathName("/templates/new"); - await page.getByTestId("file-upload").setInputFiles({ - buffer: await createTemplateVersionTar(responses), - mimeType: "application/x-tar", - name: "template.tar", - }); + if (!isStarterTemplate(responses)) { + await page.getByTestId("file-upload").setInputFiles({ + buffer: await createTemplateVersionTar(responses), + mimeType: "application/x-tar", + name: "template.tar", + }); + } + const name = randomName(); await page.getByLabel("Name *").fill(name); await page.getByTestId("form-submit").click(); @@ -868,6 +895,7 @@ export async function openTerminalWindow( page: Page, context: BrowserContext, workspaceName: string, + agentName: string = "dev", ): Promise { // Wait for the web terminal to open in a new tab const pagePromise = context.waitForEvent("page"); @@ -879,7 +907,7 @@ export async function openTerminalWindow( // isn't POSIX compatible, such as Fish. const commandQuery = `?command=${encodeURIComponent("/usr/bin/env bash")}`; await expectUrl(terminal).toHavePathName( - `/@admin/${workspaceName}.dev/terminal`, + `/@admin/${workspaceName}.${agentName}/terminal`, ); await terminal.goto(`/@admin/${workspaceName}.dev/terminal${commandQuery}`); diff --git a/site/e2e/playwright.config.ts b/site/e2e/playwright.config.ts index 0e10c1ff34b0a..889976fe4615b 100644 --- a/site/e2e/playwright.config.ts +++ b/site/e2e/playwright.config.ts @@ -1,4 +1,5 @@ import { defineConfig } from "@playwright/test"; +import { execSync } from "child_process"; import * as path from "path"; import { coderMain, @@ -7,6 +8,7 @@ import { e2eFakeExperiment1, e2eFakeExperiment2, gitAuth, + requireTerraformTests, } from "./constants"; export const wsEndpoint = process.env.CODER_E2E_WS_ENDPOINT; @@ -14,6 +16,39 @@ export const wsEndpoint = process.env.CODER_E2E_WS_ENDPOINT; // This is where auth cookies are stored! export const storageState = path.join(__dirname, ".auth.json"); +// If running terraform tests, verify the requirements exist in the +// environment. +// +// These execs will throw an error if the status code is non-zero. +// So if both these work, then we can launch terraform provisioners. +let hasTerraform = false; +let hasDocker = false; +try { + execSync("terraform --version"); + hasTerraform = true; +} catch { + /* empty */ +} + +try { + execSync("docker --version"); + hasDocker = true; +} catch { + /* empty */ +} + +if (!hasTerraform || !hasDocker) { + const msg = + "Terraform provisioners require docker & terraform binaries to function. \n" + + (hasTerraform + ? "" + : "\tThe `terraform` executable is not present in the runtime environment.\n") + + (hasDocker + ? "" + : "\tThe `docker` executable is not present in the runtime environment.\n"); + throw new Error(msg); +} + const localURL = (port: number, path: string): string => { return `http://localhost:${port}${path}`; }; @@ -54,13 +89,14 @@ export default defineConfig({ `go run -tags embed ${coderMain} server`, "--global-config $(mktemp -d -t e2e-XXXXXXXXXX)", `--access-url=http://localhost:${coderPort}`, - `--http-address=localhost:${coderPort}`, + `--http-address=0.0.0.0:${coderPort}`, "--in-memory", "--telemetry=false", "--dangerous-disable-rate-limits", "--provisioner-daemons 10", // TODO: Enable some terraform provisioners - "--provisioner-types=echo", + `--provisioner-types=echo${requireTerraformTests ? ",terraform" : ""}`, + `--provisioner-daemons=10`, "--web-terminal-renderer=dom", "--pprof-enable", ] diff --git a/site/e2e/tests/createWorkspace.spec.ts b/site/e2e/tests/createWorkspace.spec.ts index 1fa770c5d3614..5f1713b60aaa7 100644 --- a/site/e2e/tests/createWorkspace.spec.ts +++ b/site/e2e/tests/createWorkspace.spec.ts @@ -1,8 +1,11 @@ import { test, expect } from "@playwright/test"; import { + StarterTemplates, createTemplate, createWorkspace, echoResponsesWithParameters, + openTerminalWindow, + requireTerraformProvisioner, verifyParameters, } from "../helpers"; import { beforeCoderTest } from "../hooks"; @@ -147,3 +150,42 @@ test("create workspace with disable_param search params", async ({ page }) => { await expect(page.getByLabel(/First parameter/i)).toBeDisabled(); await expect(page.getByLabel(/Second parameter/i)).toBeDisabled(); }); + +test("create docker workspace", async ({ context, page }) => { + test.skip( + true, + "creating docker containers is currently leaky. They are not cleaned up when the tests are over.", + ); + requireTerraformProvisioner(); + const template = await createTemplate(page, StarterTemplates.STARTER_DOCKER); + + const workspaceName = await createWorkspace(page, template); + + // The workspace agents must be ready before we try to interact with the workspace. + await page.waitForSelector( + `//div[@role="status"][@data-testid="agent-status-ready"]`, + { + state: "visible", + }, + ); + + // Wait for the terminal button to be visible, and click it. + const terminalButton = + "//a[@data-testid='terminal'][normalize-space()='Terminal']"; + await page.waitForSelector(terminalButton, { + state: "visible", + }); + + const terminal = await openTerminalWindow( + page, + context, + workspaceName, + "main", + ); + await terminal.waitForSelector( + `//textarea[contains(@class,"xterm-helper-textarea")]`, + { + state: "visible", + }, + ); +}); diff --git a/site/src/pages/CreateTemplatePage/CreateTemplateForm.tsx b/site/src/pages/CreateTemplatePage/CreateTemplateForm.tsx index 0ba63ce7de2c1..8370be000e9c1 100644 --- a/site/src/pages/CreateTemplatePage/CreateTemplateForm.tsx +++ b/site/src/pages/CreateTemplatePage/CreateTemplateForm.tsx @@ -3,9 +3,11 @@ import { useFormik } from "formik"; import camelCase from "lodash/camelCase"; import capitalize from "lodash/capitalize"; import type { FC } from "react"; +import { useSearchParams } from "react-router-dom"; import * as Yup from "yup"; import type { ProvisionerJobLog, + ProvisionerType, Template, TemplateExample, TemplateVersionVariable, @@ -50,6 +52,7 @@ export interface CreateTemplateData { parameter_values_by_name?: Record; user_variable_values?: VariableValue[]; allow_everyone_group_access: boolean; + provisioner_type: ProvisionerType; } const validationSchema = Yup.object({ @@ -81,6 +84,7 @@ const defaultInitialValues: CreateTemplateData = { allow_user_autostart: false, allow_user_autostop: false, allow_everyone_group_access: true, + provisioner_type: "terraform", }; type GetInitialValuesParams = { @@ -88,6 +92,7 @@ type GetInitialValuesParams = { fromCopy?: Template; variables?: TemplateVersionVariable[]; allowAdvancedScheduling: boolean; + searchParams: URLSearchParams; }; const getInitialValues = ({ @@ -95,9 +100,15 @@ const getInitialValues = ({ fromCopy, allowAdvancedScheduling, variables, + searchParams, }: GetInitialValuesParams) => { let initialValues = defaultInitialValues; + // Will assume the query param has a valid ProvisionerType, as this query param is only used + // in testing. + defaultInitialValues.provisioner_type = + (searchParams.get("provisioner_type") as ProvisionerType) || "terraform"; + if (!allowAdvancedScheduling) { initialValues = { ...initialValues, @@ -164,6 +175,7 @@ export type CreateTemplateFormProps = ( }; export const CreateTemplateForm: FC = (props) => { + const [searchParams] = useSearchParams(); const { onCancel, onSubmit, @@ -176,6 +188,7 @@ export const CreateTemplateForm: FC = (props) => { allowAdvancedScheduling, variablesSectionRef, } = props; + const form = useFormik({ initialValues: getInitialValues({ allowAdvancedScheduling, @@ -183,6 +196,7 @@ export const CreateTemplateForm: FC = (props) => { "starterTemplate" in props ? props.starterTemplate : undefined, fromCopy: "copiedTemplate" in props ? props.copiedTemplate : undefined, variables, + searchParams, }), validationSchema, onSubmit, diff --git a/site/src/pages/CreateTemplatePage/DuplicateTemplateView.tsx b/site/src/pages/CreateTemplatePage/DuplicateTemplateView.tsx index fd87e3b586c22..91ac28acc9127 100644 --- a/site/src/pages/CreateTemplatePage/DuplicateTemplateView.tsx +++ b/site/src/pages/CreateTemplatePage/DuplicateTemplateView.tsx @@ -82,6 +82,7 @@ export const DuplicateTemplateView: FC = ({ version: firstVersionFromFile( templateVersionQuery.data!.job.file_id, formData.user_variable_values, + formData.provisioner_type, ), template: newTemplate(formData), }); diff --git a/site/src/pages/CreateTemplatePage/UploadTemplateView.tsx b/site/src/pages/CreateTemplatePage/UploadTemplateView.tsx index c6cc5fccac8e3..ac650baff112b 100644 --- a/site/src/pages/CreateTemplatePage/UploadTemplateView.tsx +++ b/site/src/pages/CreateTemplatePage/UploadTemplateView.tsx @@ -64,6 +64,7 @@ export const UploadTemplateView: FC = ({ version: firstVersionFromFile( uploadedFile!.hash, formData.user_variable_values, + formData.provisioner_type, ), template: newTemplate(formData), }); diff --git a/site/src/pages/CreateTemplatePage/utils.ts b/site/src/pages/CreateTemplatePage/utils.ts index cc7266d1de664..a1536b8a4ce5c 100644 --- a/site/src/pages/CreateTemplatePage/utils.ts +++ b/site/src/pages/CreateTemplatePage/utils.ts @@ -1,4 +1,5 @@ import type { + CreateTemplateVersionRequest, Entitlements, ProvisionerType, TemplateExample, @@ -7,10 +8,6 @@ import type { import { calculateAutostopRequirementDaysValue } from "utils/schedule"; import type { CreateTemplateData } from "./CreateTemplateForm"; -const provisioner: ProvisionerType = - // eslint-disable-next-line @typescript-eslint/no-explicit-any -- Playwright needs to use a different provisioner type! - typeof (window as any).playwright !== "undefined" ? "echo" : "terraform"; - export const newTemplate = (formData: CreateTemplateData) => { const { autostop_requirement_days_of_week, autostop_requirement_weeks } = formData; @@ -56,10 +53,11 @@ export const getFormPermissions = (entitlements: Entitlements) => { export const firstVersionFromFile = ( fileId: string, variables: VariableValue[] | undefined, -) => { + provisionerType: ProvisionerType, +): CreateTemplateVersionRequest => { return { storage_method: "file" as const, - provisioner: provisioner, + provisioner: provisionerType, user_variable_values: variables, file_id: fileId, tags: {}, @@ -69,10 +67,11 @@ export const firstVersionFromFile = ( export const firstVersionFromExample = ( example: TemplateExample, variables: VariableValue[] | undefined, -) => { +): CreateTemplateVersionRequest => { return { storage_method: "file" as const, - provisioner: provisioner, + // All starter templates are for the terraform provisioner type. + provisioner: "terraform", user_variable_values: variables, example_id: example.id, tags: {}, From d8e0be6ee637d09d208590608c017405c0ad43d4 Mon Sep 17 00:00:00 2001 From: Kayla Washburn-Love Date: Wed, 8 May 2024 15:40:43 -0600 Subject: [PATCH 034/149] feat: add support for multiple banners (#13081) --- agent/agent.go | 110 ++-- agent/agent_test.go | 10 +- agent/agentssh/agentssh.go | 25 +- agent/agenttest/client.go | 30 +- agent/proto/agent.pb.go | 471 +++++++++++++----- agent/proto/agent.proto | 13 + agent/proto/agent_drpc.pb.go | 42 +- coderd/agentapi/api.go | 4 +- coderd/agentapi/notification_banners.go | 39 ++ ... => notification_banners_internal_test.go} | 35 +- coderd/agentapi/servicebanner.go | 24 - coderd/apidoc/docs.go | 54 +- coderd/apidoc/swagger.json | 54 +- coderd/appearance/appearance.go | 3 +- coderd/database/dbauthz/dbauthz.go | 24 +- coderd/database/dbauthz/dbauthz_test.go | 6 +- coderd/database/dbmem/dbmem.go | 40 +- coderd/database/dbmetrics/dbmetrics.go | 28 +- coderd/database/dbmock/dbmock.go | 58 +-- .../000208_notification_banners.down.sql | 1 + .../000208_notification_banners.up.sql | 4 + coderd/database/querier.go | 4 +- coderd/database/queries.sql.go | 36 +- coderd/database/queries/siteconfig.sql | 10 +- codersdk/agentsdk/convert.go | 22 +- codersdk/deployment.go | 23 +- docs/api/enterprise.md | 21 + docs/api/schemas.md | 74 +-- enterprise/coderd/appearance.go | 54 +- enterprise/coderd/appearance_test.go | 70 +-- site/src/api/api.ts | 1 + site/src/api/queries/appearance.ts | 4 +- site/src/api/typesGenerated.ts | 13 +- .../src/modules/dashboard/DashboardLayout.tsx | 4 +- .../modules/dashboard/DashboardProvider.tsx | 52 +- site/src/modules/dashboard/Navbar/Navbar.tsx | 4 +- .../NotificationBannerView.stories.tsx | 24 + .../NotificationBannerView.tsx} | 28 +- .../NotificationBanners.tsx | 28 ++ .../dashboard/ServiceBanner/ServiceBanner.tsx | 21 - .../ServiceBannerView.stories.tsx | 25 - .../WorkspaceStatusBadge.stories.tsx | 8 +- .../AppearanceSettingsPage.tsx | 12 +- .../AppearanceSettingsPageView.stories.tsx | 13 +- .../AppearanceSettingsPageView.tsx | 166 +----- .../NotificationBannerDialog.stories.tsx | 24 + .../NotificationBannerDialog.tsx | 138 +++++ .../NotificationBannerItem.tsx | 77 +++ .../NotificationBannerSettings.tsx | 202 ++++++++ .../src/pages/DeploySettingsPage/Fieldset.tsx | 20 +- .../pages/WorkspacePage/Workspace.stories.tsx | 8 +- .../src/pages/WorkspacePage/WorkspacePage.tsx | 4 +- .../WorkspacesPageView.stories.tsx | 8 +- site/src/testHelpers/entities.ts | 1 + site/src/testHelpers/storybook.tsx | 6 +- tailnet/proto/version.go | 2 +- tailnet/test/integration/integration_test.go | 1 - 57 files changed, 1473 insertions(+), 810 deletions(-) create mode 100644 coderd/agentapi/notification_banners.go rename coderd/agentapi/{servicebanner_internal_test.go => notification_banners_internal_test.go} (57%) delete mode 100644 coderd/agentapi/servicebanner.go create mode 100644 coderd/database/migrations/000208_notification_banners.down.sql create mode 100644 coderd/database/migrations/000208_notification_banners.up.sql create mode 100644 site/src/modules/dashboard/NotificationBanners/NotificationBannerView.stories.tsx rename site/src/modules/dashboard/{ServiceBanner/ServiceBannerView.tsx => NotificationBanners/NotificationBannerView.tsx} (57%) create mode 100644 site/src/modules/dashboard/NotificationBanners/NotificationBanners.tsx delete mode 100644 site/src/modules/dashboard/ServiceBanner/ServiceBanner.tsx delete mode 100644 site/src/modules/dashboard/ServiceBanner/ServiceBannerView.stories.tsx create mode 100644 site/src/pages/DeploySettingsPage/AppearanceSettingsPage/NotificationBannerDialog.stories.tsx create mode 100644 site/src/pages/DeploySettingsPage/AppearanceSettingsPage/NotificationBannerDialog.tsx create mode 100644 site/src/pages/DeploySettingsPage/AppearanceSettingsPage/NotificationBannerItem.tsx create mode 100644 site/src/pages/DeploySettingsPage/AppearanceSettingsPage/NotificationBannerSettings.tsx diff --git a/agent/agent.go b/agent/agent.go index 8125bbc5f70d6..e3bbe7f07c984 100644 --- a/agent/agent.go +++ b/agent/agent.go @@ -155,35 +155,35 @@ func New(options Options) Agent { hardCtx, hardCancel := context.WithCancel(context.Background()) gracefulCtx, gracefulCancel := context.WithCancel(hardCtx) a := &agent{ - tailnetListenPort: options.TailnetListenPort, - reconnectingPTYTimeout: options.ReconnectingPTYTimeout, - logger: options.Logger, - gracefulCtx: gracefulCtx, - gracefulCancel: gracefulCancel, - hardCtx: hardCtx, - hardCancel: hardCancel, - coordDisconnected: make(chan struct{}), - environmentVariables: options.EnvironmentVariables, - client: options.Client, - exchangeToken: options.ExchangeToken, - filesystem: options.Filesystem, - logDir: options.LogDir, - tempDir: options.TempDir, - scriptDataDir: options.ScriptDataDir, - lifecycleUpdate: make(chan struct{}, 1), - lifecycleReported: make(chan codersdk.WorkspaceAgentLifecycle, 1), - lifecycleStates: []agentsdk.PostLifecycleRequest{{State: codersdk.WorkspaceAgentLifecycleCreated}}, - ignorePorts: options.IgnorePorts, - portCacheDuration: options.PortCacheDuration, - reportMetadataInterval: options.ReportMetadataInterval, - serviceBannerRefreshInterval: options.ServiceBannerRefreshInterval, - sshMaxTimeout: options.SSHMaxTimeout, - subsystems: options.Subsystems, - addresses: options.Addresses, - syscaller: options.Syscaller, - modifiedProcs: options.ModifiedProcesses, - processManagementTick: options.ProcessManagementTick, - logSender: agentsdk.NewLogSender(options.Logger), + tailnetListenPort: options.TailnetListenPort, + reconnectingPTYTimeout: options.ReconnectingPTYTimeout, + logger: options.Logger, + gracefulCtx: gracefulCtx, + gracefulCancel: gracefulCancel, + hardCtx: hardCtx, + hardCancel: hardCancel, + coordDisconnected: make(chan struct{}), + environmentVariables: options.EnvironmentVariables, + client: options.Client, + exchangeToken: options.ExchangeToken, + filesystem: options.Filesystem, + logDir: options.LogDir, + tempDir: options.TempDir, + scriptDataDir: options.ScriptDataDir, + lifecycleUpdate: make(chan struct{}, 1), + lifecycleReported: make(chan codersdk.WorkspaceAgentLifecycle, 1), + lifecycleStates: []agentsdk.PostLifecycleRequest{{State: codersdk.WorkspaceAgentLifecycleCreated}}, + ignorePorts: options.IgnorePorts, + portCacheDuration: options.PortCacheDuration, + reportMetadataInterval: options.ReportMetadataInterval, + notificationBannersRefreshInterval: options.ServiceBannerRefreshInterval, + sshMaxTimeout: options.SSHMaxTimeout, + subsystems: options.Subsystems, + addresses: options.Addresses, + syscaller: options.Syscaller, + modifiedProcs: options.ModifiedProcesses, + processManagementTick: options.ProcessManagementTick, + logSender: agentsdk.NewLogSender(options.Logger), prometheusRegistry: prometheusRegistry, metrics: newAgentMetrics(prometheusRegistry), @@ -193,7 +193,7 @@ func New(options Options) Agent { // that gets closed on disconnection. This is used to wait for graceful disconnection from the // coordinator during shut down. close(a.coordDisconnected) - a.serviceBanner.Store(new(codersdk.ServiceBannerConfig)) + a.notificationBanners.Store(new([]codersdk.BannerConfig)) a.sessionToken.Store(new(string)) a.init() return a @@ -231,14 +231,14 @@ type agent struct { environmentVariables map[string]string - manifest atomic.Pointer[agentsdk.Manifest] // manifest is atomic because values can change after reconnection. - reportMetadataInterval time.Duration - scriptRunner *agentscripts.Runner - serviceBanner atomic.Pointer[codersdk.ServiceBannerConfig] // serviceBanner is atomic because it is periodically updated. - serviceBannerRefreshInterval time.Duration - sessionToken atomic.Pointer[string] - sshServer *agentssh.Server - sshMaxTimeout time.Duration + manifest atomic.Pointer[agentsdk.Manifest] // manifest is atomic because values can change after reconnection. + reportMetadataInterval time.Duration + scriptRunner *agentscripts.Runner + notificationBanners atomic.Pointer[[]codersdk.BannerConfig] // notificationBanners is atomic because it is periodically updated. + notificationBannersRefreshInterval time.Duration + sessionToken atomic.Pointer[string] + sshServer *agentssh.Server + sshMaxTimeout time.Duration lifecycleUpdate chan struct{} lifecycleReported chan codersdk.WorkspaceAgentLifecycle @@ -272,11 +272,11 @@ func (a *agent) TailnetConn() *tailnet.Conn { func (a *agent) init() { // pass the "hard" context because we explicitly close the SSH server as part of graceful shutdown. sshSrv, err := agentssh.NewServer(a.hardCtx, a.logger.Named("ssh-server"), a.prometheusRegistry, a.filesystem, &agentssh.Config{ - MaxTimeout: a.sshMaxTimeout, - MOTDFile: func() string { return a.manifest.Load().MOTDFile }, - ServiceBanner: func() *codersdk.ServiceBannerConfig { return a.serviceBanner.Load() }, - UpdateEnv: a.updateCommandEnv, - WorkingDirectory: func() string { return a.manifest.Load().Directory }, + MaxTimeout: a.sshMaxTimeout, + MOTDFile: func() string { return a.manifest.Load().MOTDFile }, + NotificationBanners: func() *[]codersdk.BannerConfig { return a.notificationBanners.Load() }, + UpdateEnv: a.updateCommandEnv, + WorkingDirectory: func() string { return a.manifest.Load().Directory }, }) if err != nil { panic(err) @@ -709,23 +709,26 @@ func (a *agent) setLifecycle(state codersdk.WorkspaceAgentLifecycle) { // (and must be done before the session actually starts). func (a *agent) fetchServiceBannerLoop(ctx context.Context, conn drpc.Conn) error { aAPI := proto.NewDRPCAgentClient(conn) - ticker := time.NewTicker(a.serviceBannerRefreshInterval) + ticker := time.NewTicker(a.notificationBannersRefreshInterval) defer ticker.Stop() for { select { case <-ctx.Done(): return ctx.Err() case <-ticker.C: - sbp, err := aAPI.GetServiceBanner(ctx, &proto.GetServiceBannerRequest{}) + bannersProto, err := aAPI.GetNotificationBanners(ctx, &proto.GetNotificationBannersRequest{}) if err != nil { if ctx.Err() != nil { return ctx.Err() } - a.logger.Error(ctx, "failed to update service banner", slog.Error(err)) + a.logger.Error(ctx, "failed to update notification banners", slog.Error(err)) return err } - serviceBanner := agentsdk.ServiceBannerFromProto(sbp) - a.serviceBanner.Store(&serviceBanner) + banners := make([]codersdk.BannerConfig, 0, len(bannersProto.NotificationBanners)) + for _, bannerProto := range bannersProto.NotificationBanners { + banners = append(banners, agentsdk.BannerConfigFromProto(bannerProto)) + } + a.notificationBanners.Store(&banners) } } } @@ -757,15 +760,18 @@ func (a *agent) run() (retErr error) { // redial the coder server and retry. connMan := newAPIConnRoutineManager(a.gracefulCtx, a.hardCtx, a.logger, conn) - connMan.start("init service banner", gracefulShutdownBehaviorStop, + connMan.start("init notification banners", gracefulShutdownBehaviorStop, func(ctx context.Context, conn drpc.Conn) error { aAPI := proto.NewDRPCAgentClient(conn) - sbp, err := aAPI.GetServiceBanner(ctx, &proto.GetServiceBannerRequest{}) + bannersProto, err := aAPI.GetNotificationBanners(ctx, &proto.GetNotificationBannersRequest{}) if err != nil { return xerrors.Errorf("fetch service banner: %w", err) } - serviceBanner := agentsdk.ServiceBannerFromProto(sbp) - a.serviceBanner.Store(&serviceBanner) + banners := make([]codersdk.BannerConfig, 0, len(bannersProto.NotificationBanners)) + for _, bannerProto := range bannersProto.NotificationBanners { + banners = append(banners, agentsdk.BannerConfigFromProto(bannerProto)) + } + a.notificationBanners.Store(&banners) return nil }, ) diff --git a/agent/agent_test.go b/agent/agent_test.go index 45ebf7b709199..c674a29ec35f6 100644 --- a/agent/agent_test.go +++ b/agent/agent_test.go @@ -614,12 +614,12 @@ func TestAgent_Session_TTY_MOTD_Update(t *testing.T) { // Set new banner func and wait for the agent to call it to update the // banner. ready := make(chan struct{}, 2) - client.SetServiceBannerFunc(func() (codersdk.ServiceBannerConfig, error) { + client.SetNotificationBannersFunc(func() ([]codersdk.BannerConfig, error) { select { case ready <- struct{}{}: default: } - return test.banner, nil + return []codersdk.BannerConfig{test.banner}, nil }) <-ready <-ready // Wait for two updates to ensure the value has propagated. @@ -2193,15 +2193,15 @@ func setupAgentSSHClient(ctx context.Context, t *testing.T) *ssh.Client { func setupSSHSession( t *testing.T, manifest agentsdk.Manifest, - serviceBanner codersdk.ServiceBannerConfig, + banner codersdk.BannerConfig, prepareFS func(fs afero.Fs), opts ...func(*agenttest.Client, *agent.Options), ) *ssh.Session { ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) defer cancel() opts = append(opts, func(c *agenttest.Client, o *agent.Options) { - c.SetServiceBannerFunc(func() (codersdk.ServiceBannerConfig, error) { - return serviceBanner, nil + c.SetNotificationBannersFunc(func() ([]codersdk.BannerConfig, error) { + return []codersdk.BannerConfig{banner}, nil }) }) //nolint:dogsled diff --git a/agent/agentssh/agentssh.go b/agent/agentssh/agentssh.go index 48da6aa0296ca..4fcc6ab869c5b 100644 --- a/agent/agentssh/agentssh.go +++ b/agent/agentssh/agentssh.go @@ -63,7 +63,7 @@ type Config struct { // file will be displayed to the user upon login. MOTDFile func() string // ServiceBanner returns the configuration for the Coder service banner. - ServiceBanner func() *codersdk.ServiceBannerConfig + NotificationBanners func() *[]codersdk.BannerConfig // UpdateEnv updates the environment variables for the command to be // executed. It can be used to add, modify or replace environment variables. UpdateEnv func(current []string) (updated []string, err error) @@ -123,8 +123,8 @@ func NewServer(ctx context.Context, logger slog.Logger, prometheusRegistry *prom if config.MOTDFile == nil { config.MOTDFile = func() string { return "" } } - if config.ServiceBanner == nil { - config.ServiceBanner = func() *codersdk.ServiceBannerConfig { return &codersdk.ServiceBannerConfig{} } + if config.NotificationBanners == nil { + config.NotificationBanners = func() *[]codersdk.BannerConfig { return &[]codersdk.BannerConfig{} } } if config.WorkingDirectory == nil { config.WorkingDirectory = func() string { @@ -441,12 +441,15 @@ func (s *Server) startPTYSession(logger slog.Logger, session ptySession, magicTy session.DisablePTYEmulation() if isLoginShell(session.RawCommand()) { - serviceBanner := s.config.ServiceBanner() - if serviceBanner != nil { - err := showServiceBanner(session, serviceBanner) - if err != nil { - logger.Error(ctx, "agent failed to show service banner", slog.Error(err)) - s.metrics.sessionErrors.WithLabelValues(magicTypeLabel, "yes", "service_banner").Add(1) + banners := s.config.NotificationBanners() + if banners != nil { + for _, banner := range *banners { + err := showNotificationBanner(session, banner) + if err != nil { + logger.Error(ctx, "agent failed to show service banner", slog.Error(err)) + s.metrics.sessionErrors.WithLabelValues(magicTypeLabel, "yes", "notification_banner").Add(1) + break + } } } } @@ -891,9 +894,9 @@ func isQuietLogin(fs afero.Fs, rawCommand string) bool { return err == nil } -// showServiceBanner will write the service banner if enabled and not blank +// showNotificationBanner will write the service banner if enabled and not blank // along with a blank line for spacing. -func showServiceBanner(session io.Writer, banner *codersdk.ServiceBannerConfig) error { +func showNotificationBanner(session io.Writer, banner codersdk.BannerConfig) error { if banner.Enabled && banner.Message != "" { // The banner supports Markdown so we might want to parse it but Markdown is // still fairly readable in its raw form. diff --git a/agent/agenttest/client.go b/agent/agenttest/client.go index 22eba14483f92..b21a7444c6084 100644 --- a/agent/agenttest/client.go +++ b/agent/agenttest/client.go @@ -138,8 +138,8 @@ func (c *Client) GetStartupLogs() []agentsdk.Log { return c.logs } -func (c *Client) SetServiceBannerFunc(f func() (codersdk.ServiceBannerConfig, error)) { - c.fakeAgentAPI.SetServiceBannerFunc(f) +func (c *Client) SetNotificationBannersFunc(f func() ([]codersdk.ServiceBannerConfig, error)) { + c.fakeAgentAPI.SetNotificationBannersFunc(f) } func (c *Client) PushDERPMapUpdate(update *tailcfg.DERPMap) error { @@ -171,31 +171,39 @@ type FakeAgentAPI struct { lifecycleStates []codersdk.WorkspaceAgentLifecycle metadata map[string]agentsdk.Metadata - getServiceBannerFunc func() (codersdk.ServiceBannerConfig, error) + getNotificationBannersFunc func() ([]codersdk.BannerConfig, error) } func (f *FakeAgentAPI) GetManifest(context.Context, *agentproto.GetManifestRequest) (*agentproto.Manifest, error) { return f.manifest, nil } -func (f *FakeAgentAPI) SetServiceBannerFunc(fn func() (codersdk.ServiceBannerConfig, error)) { +func (*FakeAgentAPI) GetServiceBanner(context.Context, *agentproto.GetServiceBannerRequest) (*agentproto.ServiceBanner, error) { + return &agentproto.ServiceBanner{}, nil +} + +func (f *FakeAgentAPI) SetNotificationBannersFunc(fn func() ([]codersdk.BannerConfig, error)) { f.Lock() defer f.Unlock() - f.getServiceBannerFunc = fn - f.logger.Info(context.Background(), "updated ServiceBannerFunc") + f.getNotificationBannersFunc = fn + f.logger.Info(context.Background(), "updated notification banners") } -func (f *FakeAgentAPI) GetServiceBanner(context.Context, *agentproto.GetServiceBannerRequest) (*agentproto.ServiceBanner, error) { +func (f *FakeAgentAPI) GetNotificationBanners(context.Context, *agentproto.GetNotificationBannersRequest) (*agentproto.GetNotificationBannersResponse, error) { f.Lock() defer f.Unlock() - if f.getServiceBannerFunc == nil { - return &agentproto.ServiceBanner{}, nil + if f.getNotificationBannersFunc == nil { + return &agentproto.GetNotificationBannersResponse{NotificationBanners: []*agentproto.BannerConfig{}}, nil } - sb, err := f.getServiceBannerFunc() + banners, err := f.getNotificationBannersFunc() if err != nil { return nil, err } - return agentsdk.ProtoFromServiceBanner(sb), nil + bannersProto := make([]*agentproto.BannerConfig, 0, len(banners)) + for _, banner := range banners { + bannersProto = append(bannersProto, agentsdk.ProtoFromBannerConfig(banner)) + } + return &agentproto.GetNotificationBannersResponse{NotificationBanners: bannersProto}, nil } func (f *FakeAgentAPI) UpdateStats(ctx context.Context, req *agentproto.UpdateStatsRequest) (*agentproto.UpdateStatsResponse, error) { diff --git a/agent/proto/agent.pb.go b/agent/proto/agent.pb.go index 20bd20460275f..41e8d061054a5 100644 --- a/agent/proto/agent.pb.go +++ b/agent/proto/agent.pb.go @@ -1859,6 +1859,154 @@ func (x *BatchCreateLogsResponse) GetLogLimitExceeded() bool { return false } +type GetNotificationBannersRequest struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *GetNotificationBannersRequest) Reset() { + *x = GetNotificationBannersRequest{} + if protoimpl.UnsafeEnabled { + mi := &file_agent_proto_agent_proto_msgTypes[22] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *GetNotificationBannersRequest) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetNotificationBannersRequest) ProtoMessage() {} + +func (x *GetNotificationBannersRequest) ProtoReflect() protoreflect.Message { + mi := &file_agent_proto_agent_proto_msgTypes[22] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetNotificationBannersRequest.ProtoReflect.Descriptor instead. +func (*GetNotificationBannersRequest) Descriptor() ([]byte, []int) { + return file_agent_proto_agent_proto_rawDescGZIP(), []int{22} +} + +type GetNotificationBannersResponse struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + NotificationBanners []*BannerConfig `protobuf:"bytes,1,rep,name=notification_banners,json=notificationBanners,proto3" json:"notification_banners,omitempty"` +} + +func (x *GetNotificationBannersResponse) Reset() { + *x = GetNotificationBannersResponse{} + if protoimpl.UnsafeEnabled { + mi := &file_agent_proto_agent_proto_msgTypes[23] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *GetNotificationBannersResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*GetNotificationBannersResponse) ProtoMessage() {} + +func (x *GetNotificationBannersResponse) ProtoReflect() protoreflect.Message { + mi := &file_agent_proto_agent_proto_msgTypes[23] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use GetNotificationBannersResponse.ProtoReflect.Descriptor instead. +func (*GetNotificationBannersResponse) Descriptor() ([]byte, []int) { + return file_agent_proto_agent_proto_rawDescGZIP(), []int{23} +} + +func (x *GetNotificationBannersResponse) GetNotificationBanners() []*BannerConfig { + if x != nil { + return x.NotificationBanners + } + return nil +} + +type BannerConfig struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Enabled bool `protobuf:"varint,1,opt,name=enabled,proto3" json:"enabled,omitempty"` + Message string `protobuf:"bytes,2,opt,name=message,proto3" json:"message,omitempty"` + BackgroundColor string `protobuf:"bytes,3,opt,name=background_color,json=backgroundColor,proto3" json:"background_color,omitempty"` +} + +func (x *BannerConfig) Reset() { + *x = BannerConfig{} + if protoimpl.UnsafeEnabled { + mi := &file_agent_proto_agent_proto_msgTypes[24] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *BannerConfig) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*BannerConfig) ProtoMessage() {} + +func (x *BannerConfig) ProtoReflect() protoreflect.Message { + mi := &file_agent_proto_agent_proto_msgTypes[24] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use BannerConfig.ProtoReflect.Descriptor instead. +func (*BannerConfig) Descriptor() ([]byte, []int) { + return file_agent_proto_agent_proto_rawDescGZIP(), []int{24} +} + +func (x *BannerConfig) GetEnabled() bool { + if x != nil { + return x.Enabled + } + return false +} + +func (x *BannerConfig) GetMessage() string { + if x != nil { + return x.Message + } + return "" +} + +func (x *BannerConfig) GetBackgroundColor() string { + if x != nil { + return x.BackgroundColor + } + return "" +} + type WorkspaceApp_Healthcheck struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -1872,7 +2020,7 @@ type WorkspaceApp_Healthcheck struct { func (x *WorkspaceApp_Healthcheck) Reset() { *x = WorkspaceApp_Healthcheck{} if protoimpl.UnsafeEnabled { - mi := &file_agent_proto_agent_proto_msgTypes[22] + mi := &file_agent_proto_agent_proto_msgTypes[25] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1885,7 +2033,7 @@ func (x *WorkspaceApp_Healthcheck) String() string { func (*WorkspaceApp_Healthcheck) ProtoMessage() {} func (x *WorkspaceApp_Healthcheck) ProtoReflect() protoreflect.Message { - mi := &file_agent_proto_agent_proto_msgTypes[22] + mi := &file_agent_proto_agent_proto_msgTypes[25] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1936,7 +2084,7 @@ type WorkspaceAgentMetadata_Result struct { func (x *WorkspaceAgentMetadata_Result) Reset() { *x = WorkspaceAgentMetadata_Result{} if protoimpl.UnsafeEnabled { - mi := &file_agent_proto_agent_proto_msgTypes[23] + mi := &file_agent_proto_agent_proto_msgTypes[26] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1949,7 +2097,7 @@ func (x *WorkspaceAgentMetadata_Result) String() string { func (*WorkspaceAgentMetadata_Result) ProtoMessage() {} func (x *WorkspaceAgentMetadata_Result) ProtoReflect() protoreflect.Message { - mi := &file_agent_proto_agent_proto_msgTypes[23] + mi := &file_agent_proto_agent_proto_msgTypes[26] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -2008,7 +2156,7 @@ type WorkspaceAgentMetadata_Description struct { func (x *WorkspaceAgentMetadata_Description) Reset() { *x = WorkspaceAgentMetadata_Description{} if protoimpl.UnsafeEnabled { - mi := &file_agent_proto_agent_proto_msgTypes[24] + mi := &file_agent_proto_agent_proto_msgTypes[27] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -2021,7 +2169,7 @@ func (x *WorkspaceAgentMetadata_Description) String() string { func (*WorkspaceAgentMetadata_Description) ProtoMessage() {} func (x *WorkspaceAgentMetadata_Description) ProtoReflect() protoreflect.Message { - mi := &file_agent_proto_agent_proto_msgTypes[24] + mi := &file_agent_proto_agent_proto_msgTypes[27] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -2086,7 +2234,7 @@ type Stats_Metric struct { func (x *Stats_Metric) Reset() { *x = Stats_Metric{} if protoimpl.UnsafeEnabled { - mi := &file_agent_proto_agent_proto_msgTypes[27] + mi := &file_agent_proto_agent_proto_msgTypes[30] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -2099,7 +2247,7 @@ func (x *Stats_Metric) String() string { func (*Stats_Metric) ProtoMessage() {} func (x *Stats_Metric) ProtoReflect() protoreflect.Message { - mi := &file_agent_proto_agent_proto_msgTypes[27] + mi := &file_agent_proto_agent_proto_msgTypes[30] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -2155,7 +2303,7 @@ type Stats_Metric_Label struct { func (x *Stats_Metric_Label) Reset() { *x = Stats_Metric_Label{} if protoimpl.UnsafeEnabled { - mi := &file_agent_proto_agent_proto_msgTypes[28] + mi := &file_agent_proto_agent_proto_msgTypes[31] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -2168,7 +2316,7 @@ func (x *Stats_Metric_Label) String() string { func (*Stats_Metric_Label) ProtoMessage() {} func (x *Stats_Metric_Label) ProtoReflect() protoreflect.Message { - mi := &file_agent_proto_agent_proto_msgTypes[28] + mi := &file_agent_proto_agent_proto_msgTypes[31] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -2210,7 +2358,7 @@ type BatchUpdateAppHealthRequest_HealthUpdate struct { func (x *BatchUpdateAppHealthRequest_HealthUpdate) Reset() { *x = BatchUpdateAppHealthRequest_HealthUpdate{} if protoimpl.UnsafeEnabled { - mi := &file_agent_proto_agent_proto_msgTypes[29] + mi := &file_agent_proto_agent_proto_msgTypes[32] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -2223,7 +2371,7 @@ func (x *BatchUpdateAppHealthRequest_HealthUpdate) String() string { func (*BatchUpdateAppHealthRequest_HealthUpdate) ProtoMessage() {} func (x *BatchUpdateAppHealthRequest_HealthUpdate) ProtoReflect() protoreflect.Message { - mi := &file_agent_proto_agent_proto_msgTypes[29] + mi := &file_agent_proto_agent_proto_msgTypes[32] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -2594,64 +2742,87 @@ var file_agent_proto_agent_proto_rawDesc = []byte{ 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x2c, 0x0a, 0x12, 0x6c, 0x6f, 0x67, 0x5f, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x5f, 0x65, 0x78, 0x63, 0x65, 0x65, 0x64, 0x65, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x10, 0x6c, 0x6f, 0x67, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x45, 0x78, 0x63, 0x65, - 0x65, 0x64, 0x65, 0x64, 0x2a, 0x63, 0x0a, 0x09, 0x41, 0x70, 0x70, 0x48, 0x65, 0x61, 0x6c, 0x74, - 0x68, 0x12, 0x1a, 0x0a, 0x16, 0x41, 0x50, 0x50, 0x5f, 0x48, 0x45, 0x41, 0x4c, 0x54, 0x48, 0x5f, - 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x0c, 0x0a, - 0x08, 0x44, 0x49, 0x53, 0x41, 0x42, 0x4c, 0x45, 0x44, 0x10, 0x01, 0x12, 0x10, 0x0a, 0x0c, 0x49, - 0x4e, 0x49, 0x54, 0x49, 0x41, 0x4c, 0x49, 0x5a, 0x49, 0x4e, 0x47, 0x10, 0x02, 0x12, 0x0b, 0x0a, - 0x07, 0x48, 0x45, 0x41, 0x4c, 0x54, 0x48, 0x59, 0x10, 0x03, 0x12, 0x0d, 0x0a, 0x09, 0x55, 0x4e, - 0x48, 0x45, 0x41, 0x4c, 0x54, 0x48, 0x59, 0x10, 0x04, 0x32, 0xf6, 0x05, 0x0a, 0x05, 0x41, 0x67, - 0x65, 0x6e, 0x74, 0x12, 0x4b, 0x0a, 0x0b, 0x47, 0x65, 0x74, 0x4d, 0x61, 0x6e, 0x69, 0x66, 0x65, - 0x73, 0x74, 0x12, 0x22, 0x2e, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2e, 0x61, 0x67, 0x65, 0x6e, 0x74, - 0x2e, 0x76, 0x32, 0x2e, 0x47, 0x65, 0x74, 0x4d, 0x61, 0x6e, 0x69, 0x66, 0x65, 0x73, 0x74, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x18, 0x2e, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2e, 0x61, - 0x67, 0x65, 0x6e, 0x74, 0x2e, 0x76, 0x32, 0x2e, 0x4d, 0x61, 0x6e, 0x69, 0x66, 0x65, 0x73, 0x74, - 0x12, 0x5a, 0x0a, 0x10, 0x47, 0x65, 0x74, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x42, 0x61, - 0x6e, 0x6e, 0x65, 0x72, 0x12, 0x27, 0x2e, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2e, 0x61, 0x67, 0x65, - 0x6e, 0x74, 0x2e, 0x76, 0x32, 0x2e, 0x47, 0x65, 0x74, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, - 0x42, 0x61, 0x6e, 0x6e, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1d, 0x2e, - 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2e, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x2e, 0x76, 0x32, 0x2e, 0x53, - 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x42, 0x61, 0x6e, 0x6e, 0x65, 0x72, 0x12, 0x56, 0x0a, 0x0b, - 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x74, 0x61, 0x74, 0x73, 0x12, 0x22, 0x2e, 0x63, 0x6f, - 0x64, 0x65, 0x72, 0x2e, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x2e, 0x76, 0x32, 0x2e, 0x55, 0x70, 0x64, - 0x61, 0x74, 0x65, 0x53, 0x74, 0x61, 0x74, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, - 0x23, 0x2e, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2e, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x2e, 0x76, 0x32, - 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x74, 0x61, 0x74, 0x73, 0x52, 0x65, 0x73, 0x70, - 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x54, 0x0a, 0x0f, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x4c, 0x69, - 0x66, 0x65, 0x63, 0x79, 0x63, 0x6c, 0x65, 0x12, 0x26, 0x2e, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2e, - 0x61, 0x67, 0x65, 0x6e, 0x74, 0x2e, 0x76, 0x32, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x4c, - 0x69, 0x66, 0x65, 0x63, 0x79, 0x63, 0x6c, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, - 0x19, 0x2e, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2e, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x2e, 0x76, 0x32, - 0x2e, 0x4c, 0x69, 0x66, 0x65, 0x63, 0x79, 0x63, 0x6c, 0x65, 0x12, 0x72, 0x0a, 0x15, 0x42, 0x61, - 0x74, 0x63, 0x68, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x41, 0x70, 0x70, 0x48, 0x65, 0x61, 0x6c, - 0x74, 0x68, 0x73, 0x12, 0x2b, 0x2e, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2e, 0x61, 0x67, 0x65, 0x6e, + 0x65, 0x64, 0x65, 0x64, 0x22, 0x1f, 0x0a, 0x1d, 0x47, 0x65, 0x74, 0x4e, 0x6f, 0x74, 0x69, 0x66, + 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x61, 0x6e, 0x6e, 0x65, 0x72, 0x73, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x71, 0x0a, 0x1e, 0x47, 0x65, 0x74, 0x4e, 0x6f, 0x74, 0x69, + 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x61, 0x6e, 0x6e, 0x65, 0x72, 0x73, 0x52, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x4f, 0x0a, 0x14, 0x6e, 0x6f, 0x74, 0x69, 0x66, + 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x62, 0x61, 0x6e, 0x6e, 0x65, 0x72, 0x73, 0x18, + 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2e, 0x61, 0x67, + 0x65, 0x6e, 0x74, 0x2e, 0x76, 0x32, 0x2e, 0x42, 0x61, 0x6e, 0x6e, 0x65, 0x72, 0x43, 0x6f, 0x6e, + 0x66, 0x69, 0x67, 0x52, 0x13, 0x6e, 0x6f, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x42, 0x61, 0x6e, 0x6e, 0x65, 0x72, 0x73, 0x22, 0x6d, 0x0a, 0x0c, 0x42, 0x61, 0x6e, 0x6e, + 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x18, 0x0a, 0x07, 0x65, 0x6e, 0x61, 0x62, + 0x6c, 0x65, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x07, 0x65, 0x6e, 0x61, 0x62, 0x6c, + 0x65, 0x64, 0x12, 0x18, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x29, 0x0a, 0x10, + 0x62, 0x61, 0x63, 0x6b, 0x67, 0x72, 0x6f, 0x75, 0x6e, 0x64, 0x5f, 0x63, 0x6f, 0x6c, 0x6f, 0x72, + 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0f, 0x62, 0x61, 0x63, 0x6b, 0x67, 0x72, 0x6f, 0x75, + 0x6e, 0x64, 0x43, 0x6f, 0x6c, 0x6f, 0x72, 0x2a, 0x63, 0x0a, 0x09, 0x41, 0x70, 0x70, 0x48, 0x65, + 0x61, 0x6c, 0x74, 0x68, 0x12, 0x1a, 0x0a, 0x16, 0x41, 0x50, 0x50, 0x5f, 0x48, 0x45, 0x41, 0x4c, + 0x54, 0x48, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, + 0x12, 0x0c, 0x0a, 0x08, 0x44, 0x49, 0x53, 0x41, 0x42, 0x4c, 0x45, 0x44, 0x10, 0x01, 0x12, 0x10, + 0x0a, 0x0c, 0x49, 0x4e, 0x49, 0x54, 0x49, 0x41, 0x4c, 0x49, 0x5a, 0x49, 0x4e, 0x47, 0x10, 0x02, + 0x12, 0x0b, 0x0a, 0x07, 0x48, 0x45, 0x41, 0x4c, 0x54, 0x48, 0x59, 0x10, 0x03, 0x12, 0x0d, 0x0a, + 0x09, 0x55, 0x4e, 0x48, 0x45, 0x41, 0x4c, 0x54, 0x48, 0x59, 0x10, 0x04, 0x32, 0xef, 0x06, 0x0a, + 0x05, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x12, 0x4b, 0x0a, 0x0b, 0x47, 0x65, 0x74, 0x4d, 0x61, 0x6e, + 0x69, 0x66, 0x65, 0x73, 0x74, 0x12, 0x22, 0x2e, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2e, 0x61, 0x67, + 0x65, 0x6e, 0x74, 0x2e, 0x76, 0x32, 0x2e, 0x47, 0x65, 0x74, 0x4d, 0x61, 0x6e, 0x69, 0x66, 0x65, + 0x73, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x18, 0x2e, 0x63, 0x6f, 0x64, 0x65, + 0x72, 0x2e, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x2e, 0x76, 0x32, 0x2e, 0x4d, 0x61, 0x6e, 0x69, 0x66, + 0x65, 0x73, 0x74, 0x12, 0x5a, 0x0a, 0x10, 0x47, 0x65, 0x74, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, + 0x65, 0x42, 0x61, 0x6e, 0x6e, 0x65, 0x72, 0x12, 0x27, 0x2e, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2e, + 0x61, 0x67, 0x65, 0x6e, 0x74, 0x2e, 0x76, 0x32, 0x2e, 0x47, 0x65, 0x74, 0x53, 0x65, 0x72, 0x76, + 0x69, 0x63, 0x65, 0x42, 0x61, 0x6e, 0x6e, 0x65, 0x72, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x1a, 0x1d, 0x2e, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2e, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x2e, 0x76, + 0x32, 0x2e, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x42, 0x61, 0x6e, 0x6e, 0x65, 0x72, 0x12, + 0x56, 0x0a, 0x0b, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x74, 0x61, 0x74, 0x73, 0x12, 0x22, + 0x2e, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2e, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x2e, 0x76, 0x32, 0x2e, + 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x74, 0x61, 0x74, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x1a, 0x23, 0x2e, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2e, 0x61, 0x67, 0x65, 0x6e, 0x74, + 0x2e, 0x76, 0x32, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x74, 0x61, 0x74, 0x73, 0x52, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x54, 0x0a, 0x0f, 0x55, 0x70, 0x64, 0x61, 0x74, + 0x65, 0x4c, 0x69, 0x66, 0x65, 0x63, 0x79, 0x63, 0x6c, 0x65, 0x12, 0x26, 0x2e, 0x63, 0x6f, 0x64, + 0x65, 0x72, 0x2e, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x2e, 0x76, 0x32, 0x2e, 0x55, 0x70, 0x64, 0x61, + 0x74, 0x65, 0x4c, 0x69, 0x66, 0x65, 0x63, 0x79, 0x63, 0x6c, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x1a, 0x19, 0x2e, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2e, 0x61, 0x67, 0x65, 0x6e, 0x74, + 0x2e, 0x76, 0x32, 0x2e, 0x4c, 0x69, 0x66, 0x65, 0x63, 0x79, 0x63, 0x6c, 0x65, 0x12, 0x72, 0x0a, + 0x15, 0x42, 0x61, 0x74, 0x63, 0x68, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x41, 0x70, 0x70, 0x48, + 0x65, 0x61, 0x6c, 0x74, 0x68, 0x73, 0x12, 0x2b, 0x2e, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2e, 0x61, + 0x67, 0x65, 0x6e, 0x74, 0x2e, 0x76, 0x32, 0x2e, 0x42, 0x61, 0x74, 0x63, 0x68, 0x55, 0x70, 0x64, + 0x61, 0x74, 0x65, 0x41, 0x70, 0x70, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x1a, 0x2c, 0x2e, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2e, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x2e, 0x76, 0x32, 0x2e, 0x42, 0x61, 0x74, 0x63, 0x68, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, - 0x41, 0x70, 0x70, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, - 0x1a, 0x2c, 0x2e, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2e, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x2e, 0x76, - 0x32, 0x2e, 0x42, 0x61, 0x74, 0x63, 0x68, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x41, 0x70, 0x70, - 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x4e, - 0x0a, 0x0d, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x74, 0x61, 0x72, 0x74, 0x75, 0x70, 0x12, - 0x24, 0x2e, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2e, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x2e, 0x76, 0x32, - 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x74, 0x61, 0x72, 0x74, 0x75, 0x70, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x17, 0x2e, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2e, 0x61, 0x67, - 0x65, 0x6e, 0x74, 0x2e, 0x76, 0x32, 0x2e, 0x53, 0x74, 0x61, 0x72, 0x74, 0x75, 0x70, 0x12, 0x6e, - 0x0a, 0x13, 0x42, 0x61, 0x74, 0x63, 0x68, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x4d, 0x65, 0x74, - 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x2a, 0x2e, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2e, 0x61, 0x67, - 0x65, 0x6e, 0x74, 0x2e, 0x76, 0x32, 0x2e, 0x42, 0x61, 0x74, 0x63, 0x68, 0x55, 0x70, 0x64, 0x61, - 0x74, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x1a, 0x2b, 0x2e, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2e, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x2e, - 0x76, 0x32, 0x2e, 0x42, 0x61, 0x74, 0x63, 0x68, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x4d, 0x65, - 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x62, - 0x0a, 0x0f, 0x42, 0x61, 0x74, 0x63, 0x68, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x4c, 0x6f, 0x67, - 0x73, 0x12, 0x26, 0x2e, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2e, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x2e, - 0x76, 0x32, 0x2e, 0x42, 0x61, 0x74, 0x63, 0x68, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x4c, 0x6f, - 0x67, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x27, 0x2e, 0x63, 0x6f, 0x64, 0x65, - 0x72, 0x2e, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x2e, 0x76, 0x32, 0x2e, 0x42, 0x61, 0x74, 0x63, 0x68, - 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x4c, 0x6f, 0x67, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, - 0x73, 0x65, 0x42, 0x27, 0x5a, 0x25, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, - 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x76, 0x32, 0x2f, - 0x61, 0x67, 0x65, 0x6e, 0x74, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x33, + 0x41, 0x70, 0x70, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x12, 0x4e, 0x0a, 0x0d, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x74, 0x61, 0x72, 0x74, + 0x75, 0x70, 0x12, 0x24, 0x2e, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2e, 0x61, 0x67, 0x65, 0x6e, 0x74, + 0x2e, 0x76, 0x32, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x53, 0x74, 0x61, 0x72, 0x74, 0x75, + 0x70, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x17, 0x2e, 0x63, 0x6f, 0x64, 0x65, 0x72, + 0x2e, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x2e, 0x76, 0x32, 0x2e, 0x53, 0x74, 0x61, 0x72, 0x74, 0x75, + 0x70, 0x12, 0x6e, 0x0a, 0x13, 0x42, 0x61, 0x74, 0x63, 0x68, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, + 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x2a, 0x2e, 0x63, 0x6f, 0x64, 0x65, 0x72, + 0x2e, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x2e, 0x76, 0x32, 0x2e, 0x42, 0x61, 0x74, 0x63, 0x68, 0x55, + 0x70, 0x64, 0x61, 0x74, 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2b, 0x2e, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2e, 0x61, 0x67, 0x65, + 0x6e, 0x74, 0x2e, 0x76, 0x32, 0x2e, 0x42, 0x61, 0x74, 0x63, 0x68, 0x55, 0x70, 0x64, 0x61, 0x74, + 0x65, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x12, 0x62, 0x0a, 0x0f, 0x42, 0x61, 0x74, 0x63, 0x68, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, + 0x4c, 0x6f, 0x67, 0x73, 0x12, 0x26, 0x2e, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2e, 0x61, 0x67, 0x65, + 0x6e, 0x74, 0x2e, 0x76, 0x32, 0x2e, 0x42, 0x61, 0x74, 0x63, 0x68, 0x43, 0x72, 0x65, 0x61, 0x74, + 0x65, 0x4c, 0x6f, 0x67, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x27, 0x2e, 0x63, + 0x6f, 0x64, 0x65, 0x72, 0x2e, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x2e, 0x76, 0x32, 0x2e, 0x42, 0x61, + 0x74, 0x63, 0x68, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x4c, 0x6f, 0x67, 0x73, 0x52, 0x65, 0x73, + 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x77, 0x0a, 0x16, 0x47, 0x65, 0x74, 0x4e, 0x6f, 0x74, 0x69, + 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x61, 0x6e, 0x6e, 0x65, 0x72, 0x73, 0x12, + 0x2d, 0x2e, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2e, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x2e, 0x76, 0x32, + 0x2e, 0x47, 0x65, 0x74, 0x4e, 0x6f, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x42, 0x61, 0x6e, 0x6e, 0x65, 0x72, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2e, + 0x2e, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2e, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x2e, 0x76, 0x32, 0x2e, + 0x47, 0x65, 0x74, 0x4e, 0x6f, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x42, + 0x61, 0x6e, 0x6e, 0x65, 0x72, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x42, 0x27, + 0x5a, 0x25, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x63, 0x6f, 0x64, + 0x65, 0x72, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x76, 0x32, 0x2f, 0x61, 0x67, 0x65, 0x6e, + 0x74, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -2667,7 +2838,7 @@ func file_agent_proto_agent_proto_rawDescGZIP() []byte { } var file_agent_proto_agent_proto_enumTypes = make([]protoimpl.EnumInfo, 7) -var file_agent_proto_agent_proto_msgTypes = make([]protoimpl.MessageInfo, 30) +var file_agent_proto_agent_proto_msgTypes = make([]protoimpl.MessageInfo, 33) var file_agent_proto_agent_proto_goTypes = []interface{}{ (AppHealth)(0), // 0: coder.agent.v2.AppHealth (WorkspaceApp_SharingLevel)(0), // 1: coder.agent.v2.WorkspaceApp.SharingLevel @@ -2698,73 +2869,79 @@ var file_agent_proto_agent_proto_goTypes = []interface{}{ (*Log)(nil), // 26: coder.agent.v2.Log (*BatchCreateLogsRequest)(nil), // 27: coder.agent.v2.BatchCreateLogsRequest (*BatchCreateLogsResponse)(nil), // 28: coder.agent.v2.BatchCreateLogsResponse - (*WorkspaceApp_Healthcheck)(nil), // 29: coder.agent.v2.WorkspaceApp.Healthcheck - (*WorkspaceAgentMetadata_Result)(nil), // 30: coder.agent.v2.WorkspaceAgentMetadata.Result - (*WorkspaceAgentMetadata_Description)(nil), // 31: coder.agent.v2.WorkspaceAgentMetadata.Description - nil, // 32: coder.agent.v2.Manifest.EnvironmentVariablesEntry - nil, // 33: coder.agent.v2.Stats.ConnectionsByProtoEntry - (*Stats_Metric)(nil), // 34: coder.agent.v2.Stats.Metric - (*Stats_Metric_Label)(nil), // 35: coder.agent.v2.Stats.Metric.Label - (*BatchUpdateAppHealthRequest_HealthUpdate)(nil), // 36: coder.agent.v2.BatchUpdateAppHealthRequest.HealthUpdate - (*durationpb.Duration)(nil), // 37: google.protobuf.Duration - (*proto.DERPMap)(nil), // 38: coder.tailnet.v2.DERPMap - (*timestamppb.Timestamp)(nil), // 39: google.protobuf.Timestamp + (*GetNotificationBannersRequest)(nil), // 29: coder.agent.v2.GetNotificationBannersRequest + (*GetNotificationBannersResponse)(nil), // 30: coder.agent.v2.GetNotificationBannersResponse + (*BannerConfig)(nil), // 31: coder.agent.v2.BannerConfig + (*WorkspaceApp_Healthcheck)(nil), // 32: coder.agent.v2.WorkspaceApp.Healthcheck + (*WorkspaceAgentMetadata_Result)(nil), // 33: coder.agent.v2.WorkspaceAgentMetadata.Result + (*WorkspaceAgentMetadata_Description)(nil), // 34: coder.agent.v2.WorkspaceAgentMetadata.Description + nil, // 35: coder.agent.v2.Manifest.EnvironmentVariablesEntry + nil, // 36: coder.agent.v2.Stats.ConnectionsByProtoEntry + (*Stats_Metric)(nil), // 37: coder.agent.v2.Stats.Metric + (*Stats_Metric_Label)(nil), // 38: coder.agent.v2.Stats.Metric.Label + (*BatchUpdateAppHealthRequest_HealthUpdate)(nil), // 39: coder.agent.v2.BatchUpdateAppHealthRequest.HealthUpdate + (*durationpb.Duration)(nil), // 40: google.protobuf.Duration + (*proto.DERPMap)(nil), // 41: coder.tailnet.v2.DERPMap + (*timestamppb.Timestamp)(nil), // 42: google.protobuf.Timestamp } var file_agent_proto_agent_proto_depIdxs = []int32{ 1, // 0: coder.agent.v2.WorkspaceApp.sharing_level:type_name -> coder.agent.v2.WorkspaceApp.SharingLevel - 29, // 1: coder.agent.v2.WorkspaceApp.healthcheck:type_name -> coder.agent.v2.WorkspaceApp.Healthcheck + 32, // 1: coder.agent.v2.WorkspaceApp.healthcheck:type_name -> coder.agent.v2.WorkspaceApp.Healthcheck 2, // 2: coder.agent.v2.WorkspaceApp.health:type_name -> coder.agent.v2.WorkspaceApp.Health - 37, // 3: coder.agent.v2.WorkspaceAgentScript.timeout:type_name -> google.protobuf.Duration - 30, // 4: coder.agent.v2.WorkspaceAgentMetadata.result:type_name -> coder.agent.v2.WorkspaceAgentMetadata.Result - 31, // 5: coder.agent.v2.WorkspaceAgentMetadata.description:type_name -> coder.agent.v2.WorkspaceAgentMetadata.Description - 32, // 6: coder.agent.v2.Manifest.environment_variables:type_name -> coder.agent.v2.Manifest.EnvironmentVariablesEntry - 38, // 7: coder.agent.v2.Manifest.derp_map:type_name -> coder.tailnet.v2.DERPMap + 40, // 3: coder.agent.v2.WorkspaceAgentScript.timeout:type_name -> google.protobuf.Duration + 33, // 4: coder.agent.v2.WorkspaceAgentMetadata.result:type_name -> coder.agent.v2.WorkspaceAgentMetadata.Result + 34, // 5: coder.agent.v2.WorkspaceAgentMetadata.description:type_name -> coder.agent.v2.WorkspaceAgentMetadata.Description + 35, // 6: coder.agent.v2.Manifest.environment_variables:type_name -> coder.agent.v2.Manifest.EnvironmentVariablesEntry + 41, // 7: coder.agent.v2.Manifest.derp_map:type_name -> coder.tailnet.v2.DERPMap 8, // 8: coder.agent.v2.Manifest.scripts:type_name -> coder.agent.v2.WorkspaceAgentScript 7, // 9: coder.agent.v2.Manifest.apps:type_name -> coder.agent.v2.WorkspaceApp - 31, // 10: coder.agent.v2.Manifest.metadata:type_name -> coder.agent.v2.WorkspaceAgentMetadata.Description - 33, // 11: coder.agent.v2.Stats.connections_by_proto:type_name -> coder.agent.v2.Stats.ConnectionsByProtoEntry - 34, // 12: coder.agent.v2.Stats.metrics:type_name -> coder.agent.v2.Stats.Metric + 34, // 10: coder.agent.v2.Manifest.metadata:type_name -> coder.agent.v2.WorkspaceAgentMetadata.Description + 36, // 11: coder.agent.v2.Stats.connections_by_proto:type_name -> coder.agent.v2.Stats.ConnectionsByProtoEntry + 37, // 12: coder.agent.v2.Stats.metrics:type_name -> coder.agent.v2.Stats.Metric 14, // 13: coder.agent.v2.UpdateStatsRequest.stats:type_name -> coder.agent.v2.Stats - 37, // 14: coder.agent.v2.UpdateStatsResponse.report_interval:type_name -> google.protobuf.Duration + 40, // 14: coder.agent.v2.UpdateStatsResponse.report_interval:type_name -> google.protobuf.Duration 4, // 15: coder.agent.v2.Lifecycle.state:type_name -> coder.agent.v2.Lifecycle.State - 39, // 16: coder.agent.v2.Lifecycle.changed_at:type_name -> google.protobuf.Timestamp + 42, // 16: coder.agent.v2.Lifecycle.changed_at:type_name -> google.protobuf.Timestamp 17, // 17: coder.agent.v2.UpdateLifecycleRequest.lifecycle:type_name -> coder.agent.v2.Lifecycle - 36, // 18: coder.agent.v2.BatchUpdateAppHealthRequest.updates:type_name -> coder.agent.v2.BatchUpdateAppHealthRequest.HealthUpdate + 39, // 18: coder.agent.v2.BatchUpdateAppHealthRequest.updates:type_name -> coder.agent.v2.BatchUpdateAppHealthRequest.HealthUpdate 5, // 19: coder.agent.v2.Startup.subsystems:type_name -> coder.agent.v2.Startup.Subsystem 21, // 20: coder.agent.v2.UpdateStartupRequest.startup:type_name -> coder.agent.v2.Startup - 30, // 21: coder.agent.v2.Metadata.result:type_name -> coder.agent.v2.WorkspaceAgentMetadata.Result + 33, // 21: coder.agent.v2.Metadata.result:type_name -> coder.agent.v2.WorkspaceAgentMetadata.Result 23, // 22: coder.agent.v2.BatchUpdateMetadataRequest.metadata:type_name -> coder.agent.v2.Metadata - 39, // 23: coder.agent.v2.Log.created_at:type_name -> google.protobuf.Timestamp + 42, // 23: coder.agent.v2.Log.created_at:type_name -> google.protobuf.Timestamp 6, // 24: coder.agent.v2.Log.level:type_name -> coder.agent.v2.Log.Level 26, // 25: coder.agent.v2.BatchCreateLogsRequest.logs:type_name -> coder.agent.v2.Log - 37, // 26: coder.agent.v2.WorkspaceApp.Healthcheck.interval:type_name -> google.protobuf.Duration - 39, // 27: coder.agent.v2.WorkspaceAgentMetadata.Result.collected_at:type_name -> google.protobuf.Timestamp - 37, // 28: coder.agent.v2.WorkspaceAgentMetadata.Description.interval:type_name -> google.protobuf.Duration - 37, // 29: coder.agent.v2.WorkspaceAgentMetadata.Description.timeout:type_name -> google.protobuf.Duration - 3, // 30: coder.agent.v2.Stats.Metric.type:type_name -> coder.agent.v2.Stats.Metric.Type - 35, // 31: coder.agent.v2.Stats.Metric.labels:type_name -> coder.agent.v2.Stats.Metric.Label - 0, // 32: coder.agent.v2.BatchUpdateAppHealthRequest.HealthUpdate.health:type_name -> coder.agent.v2.AppHealth - 11, // 33: coder.agent.v2.Agent.GetManifest:input_type -> coder.agent.v2.GetManifestRequest - 13, // 34: coder.agent.v2.Agent.GetServiceBanner:input_type -> coder.agent.v2.GetServiceBannerRequest - 15, // 35: coder.agent.v2.Agent.UpdateStats:input_type -> coder.agent.v2.UpdateStatsRequest - 18, // 36: coder.agent.v2.Agent.UpdateLifecycle:input_type -> coder.agent.v2.UpdateLifecycleRequest - 19, // 37: coder.agent.v2.Agent.BatchUpdateAppHealths:input_type -> coder.agent.v2.BatchUpdateAppHealthRequest - 22, // 38: coder.agent.v2.Agent.UpdateStartup:input_type -> coder.agent.v2.UpdateStartupRequest - 24, // 39: coder.agent.v2.Agent.BatchUpdateMetadata:input_type -> coder.agent.v2.BatchUpdateMetadataRequest - 27, // 40: coder.agent.v2.Agent.BatchCreateLogs:input_type -> coder.agent.v2.BatchCreateLogsRequest - 10, // 41: coder.agent.v2.Agent.GetManifest:output_type -> coder.agent.v2.Manifest - 12, // 42: coder.agent.v2.Agent.GetServiceBanner:output_type -> coder.agent.v2.ServiceBanner - 16, // 43: coder.agent.v2.Agent.UpdateStats:output_type -> coder.agent.v2.UpdateStatsResponse - 17, // 44: coder.agent.v2.Agent.UpdateLifecycle:output_type -> coder.agent.v2.Lifecycle - 20, // 45: coder.agent.v2.Agent.BatchUpdateAppHealths:output_type -> coder.agent.v2.BatchUpdateAppHealthResponse - 21, // 46: coder.agent.v2.Agent.UpdateStartup:output_type -> coder.agent.v2.Startup - 25, // 47: coder.agent.v2.Agent.BatchUpdateMetadata:output_type -> coder.agent.v2.BatchUpdateMetadataResponse - 28, // 48: coder.agent.v2.Agent.BatchCreateLogs:output_type -> coder.agent.v2.BatchCreateLogsResponse - 41, // [41:49] is the sub-list for method output_type - 33, // [33:41] is the sub-list for method input_type - 33, // [33:33] is the sub-list for extension type_name - 33, // [33:33] is the sub-list for extension extendee - 0, // [0:33] is the sub-list for field type_name + 31, // 26: coder.agent.v2.GetNotificationBannersResponse.notification_banners:type_name -> coder.agent.v2.BannerConfig + 40, // 27: coder.agent.v2.WorkspaceApp.Healthcheck.interval:type_name -> google.protobuf.Duration + 42, // 28: coder.agent.v2.WorkspaceAgentMetadata.Result.collected_at:type_name -> google.protobuf.Timestamp + 40, // 29: coder.agent.v2.WorkspaceAgentMetadata.Description.interval:type_name -> google.protobuf.Duration + 40, // 30: coder.agent.v2.WorkspaceAgentMetadata.Description.timeout:type_name -> google.protobuf.Duration + 3, // 31: coder.agent.v2.Stats.Metric.type:type_name -> coder.agent.v2.Stats.Metric.Type + 38, // 32: coder.agent.v2.Stats.Metric.labels:type_name -> coder.agent.v2.Stats.Metric.Label + 0, // 33: coder.agent.v2.BatchUpdateAppHealthRequest.HealthUpdate.health:type_name -> coder.agent.v2.AppHealth + 11, // 34: coder.agent.v2.Agent.GetManifest:input_type -> coder.agent.v2.GetManifestRequest + 13, // 35: coder.agent.v2.Agent.GetServiceBanner:input_type -> coder.agent.v2.GetServiceBannerRequest + 15, // 36: coder.agent.v2.Agent.UpdateStats:input_type -> coder.agent.v2.UpdateStatsRequest + 18, // 37: coder.agent.v2.Agent.UpdateLifecycle:input_type -> coder.agent.v2.UpdateLifecycleRequest + 19, // 38: coder.agent.v2.Agent.BatchUpdateAppHealths:input_type -> coder.agent.v2.BatchUpdateAppHealthRequest + 22, // 39: coder.agent.v2.Agent.UpdateStartup:input_type -> coder.agent.v2.UpdateStartupRequest + 24, // 40: coder.agent.v2.Agent.BatchUpdateMetadata:input_type -> coder.agent.v2.BatchUpdateMetadataRequest + 27, // 41: coder.agent.v2.Agent.BatchCreateLogs:input_type -> coder.agent.v2.BatchCreateLogsRequest + 29, // 42: coder.agent.v2.Agent.GetNotificationBanners:input_type -> coder.agent.v2.GetNotificationBannersRequest + 10, // 43: coder.agent.v2.Agent.GetManifest:output_type -> coder.agent.v2.Manifest + 12, // 44: coder.agent.v2.Agent.GetServiceBanner:output_type -> coder.agent.v2.ServiceBanner + 16, // 45: coder.agent.v2.Agent.UpdateStats:output_type -> coder.agent.v2.UpdateStatsResponse + 17, // 46: coder.agent.v2.Agent.UpdateLifecycle:output_type -> coder.agent.v2.Lifecycle + 20, // 47: coder.agent.v2.Agent.BatchUpdateAppHealths:output_type -> coder.agent.v2.BatchUpdateAppHealthResponse + 21, // 48: coder.agent.v2.Agent.UpdateStartup:output_type -> coder.agent.v2.Startup + 25, // 49: coder.agent.v2.Agent.BatchUpdateMetadata:output_type -> coder.agent.v2.BatchUpdateMetadataResponse + 28, // 50: coder.agent.v2.Agent.BatchCreateLogs:output_type -> coder.agent.v2.BatchCreateLogsResponse + 30, // 51: coder.agent.v2.Agent.GetNotificationBanners:output_type -> coder.agent.v2.GetNotificationBannersResponse + 43, // [43:52] is the sub-list for method output_type + 34, // [34:43] is the sub-list for method input_type + 34, // [34:34] is the sub-list for extension type_name + 34, // [34:34] is the sub-list for extension extendee + 0, // [0:34] is the sub-list for field type_name } func init() { file_agent_proto_agent_proto_init() } @@ -3038,7 +3215,7 @@ func file_agent_proto_agent_proto_init() { } } file_agent_proto_agent_proto_msgTypes[22].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*WorkspaceApp_Healthcheck); i { + switch v := v.(*GetNotificationBannersRequest); i { case 0: return &v.state case 1: @@ -3050,7 +3227,7 @@ func file_agent_proto_agent_proto_init() { } } file_agent_proto_agent_proto_msgTypes[23].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*WorkspaceAgentMetadata_Result); i { + switch v := v.(*GetNotificationBannersResponse); i { case 0: return &v.state case 1: @@ -3062,7 +3239,31 @@ func file_agent_proto_agent_proto_init() { } } file_agent_proto_agent_proto_msgTypes[24].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*WorkspaceAgentMetadata_Description); i { + switch v := v.(*BannerConfig); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_agent_proto_agent_proto_msgTypes[25].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*WorkspaceApp_Healthcheck); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_agent_proto_agent_proto_msgTypes[26].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*WorkspaceAgentMetadata_Result); i { case 0: return &v.state case 1: @@ -3074,6 +3275,18 @@ func file_agent_proto_agent_proto_init() { } } file_agent_proto_agent_proto_msgTypes[27].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*WorkspaceAgentMetadata_Description); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_agent_proto_agent_proto_msgTypes[30].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Stats_Metric); i { case 0: return &v.state @@ -3085,7 +3298,7 @@ func file_agent_proto_agent_proto_init() { return nil } } - file_agent_proto_agent_proto_msgTypes[28].Exporter = func(v interface{}, i int) interface{} { + file_agent_proto_agent_proto_msgTypes[31].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Stats_Metric_Label); i { case 0: return &v.state @@ -3097,7 +3310,7 @@ func file_agent_proto_agent_proto_init() { return nil } } - file_agent_proto_agent_proto_msgTypes[29].Exporter = func(v interface{}, i int) interface{} { + file_agent_proto_agent_proto_msgTypes[32].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*BatchUpdateAppHealthRequest_HealthUpdate); i { case 0: return &v.state @@ -3116,7 +3329,7 @@ func file_agent_proto_agent_proto_init() { GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_agent_proto_agent_proto_rawDesc, NumEnums: 7, - NumMessages: 30, + NumMessages: 33, NumExtensions: 0, NumServices: 1, }, diff --git a/agent/proto/agent.proto b/agent/proto/agent.proto index f09c836446a04..8432fe8ef7f2b 100644 --- a/agent/proto/agent.proto +++ b/agent/proto/agent.proto @@ -251,6 +251,18 @@ message BatchCreateLogsResponse { bool log_limit_exceeded = 1; } +message GetNotificationBannersRequest {} + +message GetNotificationBannersResponse { + repeated BannerConfig notification_banners = 1; +} + +message BannerConfig { + bool enabled = 1; + string message = 2; + string background_color = 3; +} + service Agent { rpc GetManifest(GetManifestRequest) returns (Manifest); rpc GetServiceBanner(GetServiceBannerRequest) returns (ServiceBanner); @@ -260,4 +272,5 @@ service Agent { rpc UpdateStartup(UpdateStartupRequest) returns (Startup); rpc BatchUpdateMetadata(BatchUpdateMetadataRequest) returns (BatchUpdateMetadataResponse); rpc BatchCreateLogs(BatchCreateLogsRequest) returns (BatchCreateLogsResponse); + rpc GetNotificationBanners(GetNotificationBannersRequest) returns (GetNotificationBannersResponse); } diff --git a/agent/proto/agent_drpc.pb.go b/agent/proto/agent_drpc.pb.go index 4bbf980522dd1..0003a1fa4568a 100644 --- a/agent/proto/agent_drpc.pb.go +++ b/agent/proto/agent_drpc.pb.go @@ -46,6 +46,7 @@ type DRPCAgentClient interface { UpdateStartup(ctx context.Context, in *UpdateStartupRequest) (*Startup, error) BatchUpdateMetadata(ctx context.Context, in *BatchUpdateMetadataRequest) (*BatchUpdateMetadataResponse, error) BatchCreateLogs(ctx context.Context, in *BatchCreateLogsRequest) (*BatchCreateLogsResponse, error) + GetNotificationBanners(ctx context.Context, in *GetNotificationBannersRequest) (*GetNotificationBannersResponse, error) } type drpcAgentClient struct { @@ -130,6 +131,15 @@ func (c *drpcAgentClient) BatchCreateLogs(ctx context.Context, in *BatchCreateLo return out, nil } +func (c *drpcAgentClient) GetNotificationBanners(ctx context.Context, in *GetNotificationBannersRequest) (*GetNotificationBannersResponse, error) { + out := new(GetNotificationBannersResponse) + err := c.cc.Invoke(ctx, "/coder.agent.v2.Agent/GetNotificationBanners", drpcEncoding_File_agent_proto_agent_proto{}, in, out) + if err != nil { + return nil, err + } + return out, nil +} + type DRPCAgentServer interface { GetManifest(context.Context, *GetManifestRequest) (*Manifest, error) GetServiceBanner(context.Context, *GetServiceBannerRequest) (*ServiceBanner, error) @@ -139,6 +149,7 @@ type DRPCAgentServer interface { UpdateStartup(context.Context, *UpdateStartupRequest) (*Startup, error) BatchUpdateMetadata(context.Context, *BatchUpdateMetadataRequest) (*BatchUpdateMetadataResponse, error) BatchCreateLogs(context.Context, *BatchCreateLogsRequest) (*BatchCreateLogsResponse, error) + GetNotificationBanners(context.Context, *GetNotificationBannersRequest) (*GetNotificationBannersResponse, error) } type DRPCAgentUnimplementedServer struct{} @@ -175,9 +186,13 @@ func (s *DRPCAgentUnimplementedServer) BatchCreateLogs(context.Context, *BatchCr return nil, drpcerr.WithCode(errors.New("Unimplemented"), drpcerr.Unimplemented) } +func (s *DRPCAgentUnimplementedServer) GetNotificationBanners(context.Context, *GetNotificationBannersRequest) (*GetNotificationBannersResponse, error) { + return nil, drpcerr.WithCode(errors.New("Unimplemented"), drpcerr.Unimplemented) +} + type DRPCAgentDescription struct{} -func (DRPCAgentDescription) NumMethods() int { return 8 } +func (DRPCAgentDescription) NumMethods() int { return 9 } func (DRPCAgentDescription) Method(n int) (string, drpc.Encoding, drpc.Receiver, interface{}, bool) { switch n { @@ -253,6 +268,15 @@ func (DRPCAgentDescription) Method(n int) (string, drpc.Encoding, drpc.Receiver, in1.(*BatchCreateLogsRequest), ) }, DRPCAgentServer.BatchCreateLogs, true + case 8: + return "/coder.agent.v2.Agent/GetNotificationBanners", drpcEncoding_File_agent_proto_agent_proto{}, + func(srv interface{}, ctx context.Context, in1, in2 interface{}) (drpc.Message, error) { + return srv.(DRPCAgentServer). + GetNotificationBanners( + ctx, + in1.(*GetNotificationBannersRequest), + ) + }, DRPCAgentServer.GetNotificationBanners, true default: return "", nil, nil, nil, false } @@ -389,3 +413,19 @@ func (x *drpcAgent_BatchCreateLogsStream) SendAndClose(m *BatchCreateLogsRespons } return x.CloseSend() } + +type DRPCAgent_GetNotificationBannersStream interface { + drpc.Stream + SendAndClose(*GetNotificationBannersResponse) error +} + +type drpcAgent_GetNotificationBannersStream struct { + drpc.Stream +} + +func (x *drpcAgent_GetNotificationBannersStream) SendAndClose(m *GetNotificationBannersResponse) error { + if err := x.MsgSend(m, drpcEncoding_File_agent_proto_agent_proto{}); err != nil { + return err + } + return x.CloseSend() +} diff --git a/coderd/agentapi/api.go b/coderd/agentapi/api.go index acfe9145b2ad0..fa8563a141a45 100644 --- a/coderd/agentapi/api.go +++ b/coderd/agentapi/api.go @@ -35,7 +35,7 @@ import ( type API struct { opts Options *ManifestAPI - *ServiceBannerAPI + *NotificationBannerAPI *StatsAPI *LifecycleAPI *AppsAPI @@ -107,7 +107,7 @@ func New(opts Options) *API { }, } - api.ServiceBannerAPI = &ServiceBannerAPI{ + api.NotificationBannerAPI = &NotificationBannerAPI{ appearanceFetcher: opts.AppearanceFetcher, } diff --git a/coderd/agentapi/notification_banners.go b/coderd/agentapi/notification_banners.go new file mode 100644 index 0000000000000..ab4e7dda96741 --- /dev/null +++ b/coderd/agentapi/notification_banners.go @@ -0,0 +1,39 @@ +package agentapi + +import ( + "context" + "sync/atomic" + + "golang.org/x/xerrors" + + "github.com/coder/coder/v2/agent/proto" + "github.com/coder/coder/v2/coderd/appearance" + "github.com/coder/coder/v2/codersdk/agentsdk" +) + +type NotificationBannerAPI struct { + appearanceFetcher *atomic.Pointer[appearance.Fetcher] +} + +// Deprecated: GetServiceBanner has been deprecated in favor of GetNotificationBanners. +func (a *NotificationBannerAPI) GetServiceBanner(ctx context.Context, _ *proto.GetServiceBannerRequest) (*proto.ServiceBanner, error) { + cfg, err := (*a.appearanceFetcher.Load()).Fetch(ctx) + if err != nil { + return nil, xerrors.Errorf("fetch appearance: %w", err) + } + return agentsdk.ProtoFromServiceBanner(cfg.ServiceBanner), nil +} + +func (a *NotificationBannerAPI) GetNotificationBanners(ctx context.Context, _ *proto.GetNotificationBannersRequest) (*proto.GetNotificationBannersResponse, error) { + cfg, err := (*a.appearanceFetcher.Load()).Fetch(ctx) + if err != nil { + return nil, xerrors.Errorf("fetch appearance: %w", err) + } + banners := make([]*proto.BannerConfig, 0, len(cfg.NotificationBanners)) + for _, banner := range cfg.NotificationBanners { + banners = append(banners, agentsdk.ProtoFromBannerConfig(banner)) + } + return &proto.GetNotificationBannersResponse{ + NotificationBanners: banners, + }, nil +} diff --git a/coderd/agentapi/servicebanner_internal_test.go b/coderd/agentapi/notification_banners_internal_test.go similarity index 57% rename from coderd/agentapi/servicebanner_internal_test.go rename to coderd/agentapi/notification_banners_internal_test.go index 6098d7df5f3d9..87f4df2d21764 100644 --- a/coderd/agentapi/servicebanner_internal_test.go +++ b/coderd/agentapi/notification_banners_internal_test.go @@ -11,36 +11,30 @@ import ( agentproto "github.com/coder/coder/v2/agent/proto" "github.com/coder/coder/v2/coderd/appearance" "github.com/coder/coder/v2/codersdk" + "github.com/coder/coder/v2/codersdk/agentsdk" ) -func TestGetServiceBanner(t *testing.T) { +func TestGetNotificationBanners(t *testing.T) { t.Parallel() t.Run("OK", func(t *testing.T) { t.Parallel() - cfg := codersdk.ServiceBannerConfig{ + cfg := []codersdk.BannerConfig{{ Enabled: true, - Message: "hello world", - BackgroundColor: "#000000", - } + Message: "The beep-bop will be boop-beeped on Saturday at 12AM PST.", + BackgroundColor: "#00FF00", + }} - var ff appearance.Fetcher = fakeFetcher{cfg: codersdk.AppearanceConfig{ServiceBanner: cfg}} + var ff appearance.Fetcher = fakeFetcher{cfg: codersdk.AppearanceConfig{NotificationBanners: cfg}} ptr := atomic.Pointer[appearance.Fetcher]{} ptr.Store(&ff) - api := &ServiceBannerAPI{ - appearanceFetcher: &ptr, - } - - resp, err := api.GetServiceBanner(context.Background(), &agentproto.GetServiceBannerRequest{}) + api := &NotificationBannerAPI{appearanceFetcher: &ptr} + resp, err := api.GetNotificationBanners(context.Background(), &agentproto.GetNotificationBannersRequest{}) require.NoError(t, err) - - require.Equal(t, &agentproto.ServiceBanner{ - Enabled: cfg.Enabled, - Message: cfg.Message, - BackgroundColor: cfg.BackgroundColor, - }, resp) + require.Len(t, resp.NotificationBanners, 1) + require.Equal(t, cfg[0], agentsdk.BannerConfigFromProto(resp.NotificationBanners[0])) }) t.Run("FetchError", func(t *testing.T) { @@ -51,11 +45,8 @@ func TestGetServiceBanner(t *testing.T) { ptr := atomic.Pointer[appearance.Fetcher]{} ptr.Store(&ff) - api := &ServiceBannerAPI{ - appearanceFetcher: &ptr, - } - - resp, err := api.GetServiceBanner(context.Background(), &agentproto.GetServiceBannerRequest{}) + api := &NotificationBannerAPI{appearanceFetcher: &ptr} + resp, err := api.GetNotificationBanners(context.Background(), &agentproto.GetNotificationBannersRequest{}) require.Error(t, err) require.ErrorIs(t, err, expectedErr) require.Nil(t, resp) diff --git a/coderd/agentapi/servicebanner.go b/coderd/agentapi/servicebanner.go deleted file mode 100644 index 2e835003c79a4..0000000000000 --- a/coderd/agentapi/servicebanner.go +++ /dev/null @@ -1,24 +0,0 @@ -package agentapi - -import ( - "context" - "sync/atomic" - - "golang.org/x/xerrors" - - "github.com/coder/coder/v2/agent/proto" - "github.com/coder/coder/v2/coderd/appearance" - "github.com/coder/coder/v2/codersdk/agentsdk" -) - -type ServiceBannerAPI struct { - appearanceFetcher *atomic.Pointer[appearance.Fetcher] -} - -func (a *ServiceBannerAPI) GetServiceBanner(ctx context.Context, _ *proto.GetServiceBannerRequest) (*proto.ServiceBanner, error) { - cfg, err := (*a.appearanceFetcher.Load()).Fetch(ctx) - if err != nil { - return nil, xerrors.Errorf("fetch appearance: %w", err) - } - return agentsdk.ProtoFromServiceBanner(cfg.ServiceBanner), nil -} diff --git a/coderd/apidoc/docs.go b/coderd/apidoc/docs.go index 9e746d2df6abf..3d14f4ec72726 100644 --- a/coderd/apidoc/docs.go +++ b/coderd/apidoc/docs.go @@ -8272,8 +8272,19 @@ const docTemplate = `{ "logo_url": { "type": "string" }, + "notification_banners": { + "type": "array", + "items": { + "$ref": "#/definitions/codersdk.BannerConfig" + } + }, "service_banner": { - "$ref": "#/definitions/codersdk.ServiceBannerConfig" + "description": "Deprecated: ServiceBanner has been replaced by NotificationBanners.", + "allOf": [ + { + "$ref": "#/definitions/codersdk.BannerConfig" + } + ] }, "support_links": { "type": "array", @@ -8530,6 +8541,20 @@ const docTemplate = `{ "AutomaticUpdatesNever" ] }, + "codersdk.BannerConfig": { + "type": "object", + "properties": { + "background_color": { + "type": "string" + }, + "enabled": { + "type": "boolean" + }, + "message": { + "type": "string" + } + } + }, "codersdk.BuildInfoResponse": { "type": "object", "properties": { @@ -11060,20 +11085,6 @@ const docTemplate = `{ } } }, - "codersdk.ServiceBannerConfig": { - "type": "object", - "properties": { - "background_color": { - "type": "string" - }, - "enabled": { - "type": "boolean" - }, - "message": { - "type": "string" - } - } - }, "codersdk.SessionCountDeploymentStats": { "type": "object", "properties": { @@ -11906,8 +11917,19 @@ const docTemplate = `{ "logo_url": { "type": "string" }, + "notification_banners": { + "type": "array", + "items": { + "$ref": "#/definitions/codersdk.BannerConfig" + } + }, "service_banner": { - "$ref": "#/definitions/codersdk.ServiceBannerConfig" + "description": "Deprecated: ServiceBanner has been replaced by NotificationBanners.", + "allOf": [ + { + "$ref": "#/definitions/codersdk.BannerConfig" + } + ] } } }, diff --git a/coderd/apidoc/swagger.json b/coderd/apidoc/swagger.json index d0e60d65aabfe..9f6a1833e995d 100644 --- a/coderd/apidoc/swagger.json +++ b/coderd/apidoc/swagger.json @@ -7341,8 +7341,19 @@ "logo_url": { "type": "string" }, + "notification_banners": { + "type": "array", + "items": { + "$ref": "#/definitions/codersdk.BannerConfig" + } + }, "service_banner": { - "$ref": "#/definitions/codersdk.ServiceBannerConfig" + "description": "Deprecated: ServiceBanner has been replaced by NotificationBanners.", + "allOf": [ + { + "$ref": "#/definitions/codersdk.BannerConfig" + } + ] }, "support_links": { "type": "array", @@ -7588,6 +7599,20 @@ "enum": ["always", "never"], "x-enum-varnames": ["AutomaticUpdatesAlways", "AutomaticUpdatesNever"] }, + "codersdk.BannerConfig": { + "type": "object", + "properties": { + "background_color": { + "type": "string" + }, + "enabled": { + "type": "boolean" + }, + "message": { + "type": "string" + } + } + }, "codersdk.BuildInfoResponse": { "type": "object", "properties": { @@ -9960,20 +9985,6 @@ } } }, - "codersdk.ServiceBannerConfig": { - "type": "object", - "properties": { - "background_color": { - "type": "string" - }, - "enabled": { - "type": "boolean" - }, - "message": { - "type": "string" - } - } - }, "codersdk.SessionCountDeploymentStats": { "type": "object", "properties": { @@ -10763,8 +10774,19 @@ "logo_url": { "type": "string" }, + "notification_banners": { + "type": "array", + "items": { + "$ref": "#/definitions/codersdk.BannerConfig" + } + }, "service_banner": { - "$ref": "#/definitions/codersdk.ServiceBannerConfig" + "description": "Deprecated: ServiceBanner has been replaced by NotificationBanners.", + "allOf": [ + { + "$ref": "#/definitions/codersdk.BannerConfig" + } + ] } } }, diff --git a/coderd/appearance/appearance.go b/coderd/appearance/appearance.go index 1ac61dea21fe3..f9809036ec84b 100644 --- a/coderd/appearance/appearance.go +++ b/coderd/appearance/appearance.go @@ -32,7 +32,8 @@ type AGPLFetcher struct{} func (AGPLFetcher) Fetch(context.Context) (codersdk.AppearanceConfig, error) { return codersdk.AppearanceConfig{ - SupportLinks: DefaultSupportLinks, + NotificationBanners: []codersdk.BannerConfig{}, + SupportLinks: DefaultSupportLinks, }, nil } diff --git a/coderd/database/dbauthz/dbauthz.go b/coderd/database/dbauthz/dbauthz.go index a638b705a54f0..aaf623c7a70b5 100644 --- a/coderd/database/dbauthz/dbauthz.go +++ b/coderd/database/dbauthz/dbauthz.go @@ -1220,6 +1220,11 @@ func (q *querier) GetLogoURL(ctx context.Context) (string, error) { return q.db.GetLogoURL(ctx) } +func (q *querier) GetNotificationBanners(ctx context.Context) (string, error) { + // No authz checks + return q.db.GetNotificationBanners(ctx) +} + func (q *querier) GetOAuth2ProviderAppByID(ctx context.Context, id uuid.UUID) (database.OAuth2ProviderApp, error) { if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceOAuth2ProviderApp); err != nil { return database.OAuth2ProviderApp{}, err @@ -1454,11 +1459,6 @@ func (q *querier) GetReplicasUpdatedAfter(ctx context.Context, updatedAt time.Ti return q.db.GetReplicasUpdatedAfter(ctx, updatedAt) } -func (q *querier) GetServiceBanner(ctx context.Context) (string, error) { - // No authz checks - return q.db.GetServiceBanner(ctx) -} - func (q *querier) GetTailnetAgents(ctx context.Context, id uuid.UUID) ([]database.TailnetAgent, error) { if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceTailnetCoordinator); err != nil { return nil, err @@ -3364,6 +3364,13 @@ func (q *querier) UpsertLogoURL(ctx context.Context, value string) error { return q.db.UpsertLogoURL(ctx, value) } +func (q *querier) UpsertNotificationBanners(ctx context.Context, value string) error { + if err := q.authorizeContext(ctx, rbac.ActionCreate, rbac.ResourceDeploymentValues); err != nil { + return err + } + return q.db.UpsertNotificationBanners(ctx, value) +} + func (q *querier) UpsertOAuthSigningKey(ctx context.Context, value string) error { if err := q.authorizeContext(ctx, rbac.ActionUpdate, rbac.ResourceSystem); err != nil { return err @@ -3382,13 +3389,6 @@ func (q *querier) UpsertProvisionerDaemon(ctx context.Context, arg database.Upse return q.db.UpsertProvisionerDaemon(ctx, arg) } -func (q *querier) UpsertServiceBanner(ctx context.Context, value string) error { - if err := q.authorizeContext(ctx, rbac.ActionCreate, rbac.ResourceDeploymentValues); err != nil { - return err - } - return q.db.UpsertServiceBanner(ctx, value) -} - func (q *querier) UpsertTailnetAgent(ctx context.Context, arg database.UpsertTailnetAgentParams) (database.TailnetAgent, error) { if err := q.authorizeContext(ctx, rbac.ActionUpdate, rbac.ResourceTailnetCoordinator); err != nil { return database.TailnetAgent{}, err diff --git a/coderd/database/dbauthz/dbauthz_test.go b/coderd/database/dbauthz/dbauthz_test.go index 7be33d58c8dda..48435a0141c64 100644 --- a/coderd/database/dbauthz/dbauthz_test.go +++ b/coderd/database/dbauthz/dbauthz_test.go @@ -525,7 +525,7 @@ func (s *MethodTestSuite) TestLicense() { s.Run("UpsertLogoURL", s.Subtest(func(db database.Store, check *expects) { check.Args("value").Asserts(rbac.ResourceDeploymentValues, rbac.ActionCreate) })) - s.Run("UpsertServiceBanner", s.Subtest(func(db database.Store, check *expects) { + s.Run("UpsertNotificationBanners", s.Subtest(func(db database.Store, check *expects) { check.Args("value").Asserts(rbac.ResourceDeploymentValues, rbac.ActionCreate) })) s.Run("GetLicenseByID", s.Subtest(func(db database.Store, check *expects) { @@ -556,8 +556,8 @@ func (s *MethodTestSuite) TestLicense() { require.NoError(s.T(), err) check.Args().Asserts().Returns("value") })) - s.Run("GetServiceBanner", s.Subtest(func(db database.Store, check *expects) { - err := db.UpsertServiceBanner(context.Background(), "value") + s.Run("GetNotificationBanners", s.Subtest(func(db database.Store, check *expects) { + err := db.UpsertNotificationBanners(context.Background(), "value") require.NoError(s.T(), err) check.Args().Asserts().Returns("value") })) diff --git a/coderd/database/dbmem/dbmem.go b/coderd/database/dbmem/dbmem.go index fcc3140133c42..8a2ce25b34367 100644 --- a/coderd/database/dbmem/dbmem.go +++ b/coderd/database/dbmem/dbmem.go @@ -185,7 +185,7 @@ type data struct { deploymentID string derpMeshKey string lastUpdateCheck []byte - serviceBanner []byte + notificationBanners []byte healthSettings []byte applicationName string logoURL string @@ -2488,6 +2488,17 @@ func (q *FakeQuerier) GetLogoURL(_ context.Context) (string, error) { return q.logoURL, nil } +func (q *FakeQuerier) GetNotificationBanners(_ context.Context) (string, error) { + q.mutex.RLock() + defer q.mutex.RUnlock() + + if q.notificationBanners == nil { + return "", sql.ErrNoRows + } + + return string(q.notificationBanners), nil +} + func (q *FakeQuerier) GetOAuth2ProviderAppByID(_ context.Context, id uuid.UUID) (database.OAuth2ProviderApp, error) { q.mutex.Lock() defer q.mutex.Unlock() @@ -3027,17 +3038,6 @@ func (q *FakeQuerier) GetReplicasUpdatedAfter(_ context.Context, updatedAt time. return replicas, nil } -func (q *FakeQuerier) GetServiceBanner(_ context.Context) (string, error) { - q.mutex.RLock() - defer q.mutex.RUnlock() - - if q.serviceBanner == nil { - return "", sql.ErrNoRows - } - - return string(q.serviceBanner), nil -} - func (*FakeQuerier) GetTailnetAgents(context.Context, uuid.UUID) ([]database.TailnetAgent, error) { return nil, ErrUnimplemented } @@ -8251,6 +8251,14 @@ func (q *FakeQuerier) UpsertLogoURL(_ context.Context, data string) error { return nil } +func (q *FakeQuerier) UpsertNotificationBanners(_ context.Context, data string) error { + q.mutex.RLock() + defer q.mutex.RUnlock() + + q.notificationBanners = []byte(data) + return nil +} + func (q *FakeQuerier) UpsertOAuthSigningKey(_ context.Context, value string) error { q.mutex.Lock() defer q.mutex.Unlock() @@ -8298,14 +8306,6 @@ func (q *FakeQuerier) UpsertProvisionerDaemon(_ context.Context, arg database.Up return d, nil } -func (q *FakeQuerier) UpsertServiceBanner(_ context.Context, data string) error { - q.mutex.RLock() - defer q.mutex.RUnlock() - - q.serviceBanner = []byte(data) - return nil -} - func (*FakeQuerier) UpsertTailnetAgent(context.Context, database.UpsertTailnetAgentParams) (database.TailnetAgent, error) { return database.TailnetAgent{}, ErrUnimplemented } diff --git a/coderd/database/dbmetrics/dbmetrics.go b/coderd/database/dbmetrics/dbmetrics.go index 4cb81c1eded86..d92c60e8db09a 100644 --- a/coderd/database/dbmetrics/dbmetrics.go +++ b/coderd/database/dbmetrics/dbmetrics.go @@ -646,6 +646,13 @@ func (m metricsStore) GetLogoURL(ctx context.Context) (string, error) { return url, err } +func (m metricsStore) GetNotificationBanners(ctx context.Context) (string, error) { + start := time.Now() + r0, r1 := m.s.GetNotificationBanners(ctx) + m.queryLatencies.WithLabelValues("GetNotificationBanners").Observe(time.Since(start).Seconds()) + return r0, r1 +} + func (m metricsStore) GetOAuth2ProviderAppByID(ctx context.Context, id uuid.UUID) (database.OAuth2ProviderApp, error) { start := time.Now() r0, r1 := m.s.GetOAuth2ProviderAppByID(ctx, id) @@ -849,13 +856,6 @@ func (m metricsStore) GetReplicasUpdatedAfter(ctx context.Context, updatedAt tim return replicas, err } -func (m metricsStore) GetServiceBanner(ctx context.Context) (string, error) { - start := time.Now() - banner, err := m.s.GetServiceBanner(ctx) - m.queryLatencies.WithLabelValues("GetServiceBanner").Observe(time.Since(start).Seconds()) - return banner, err -} - func (m metricsStore) GetTailnetAgents(ctx context.Context, id uuid.UUID) ([]database.TailnetAgent, error) { start := time.Now() r0, r1 := m.s.GetTailnetAgents(ctx, id) @@ -2186,6 +2186,13 @@ func (m metricsStore) UpsertLogoURL(ctx context.Context, value string) error { return r0 } +func (m metricsStore) UpsertNotificationBanners(ctx context.Context, value string) error { + start := time.Now() + r0 := m.s.UpsertNotificationBanners(ctx, value) + m.queryLatencies.WithLabelValues("UpsertNotificationBanners").Observe(time.Since(start).Seconds()) + return r0 +} + func (m metricsStore) UpsertOAuthSigningKey(ctx context.Context, value string) error { start := time.Now() r0 := m.s.UpsertOAuthSigningKey(ctx, value) @@ -2200,13 +2207,6 @@ func (m metricsStore) UpsertProvisionerDaemon(ctx context.Context, arg database. return r0, r1 } -func (m metricsStore) UpsertServiceBanner(ctx context.Context, value string) error { - start := time.Now() - r0 := m.s.UpsertServiceBanner(ctx, value) - m.queryLatencies.WithLabelValues("UpsertServiceBanner").Observe(time.Since(start).Seconds()) - return r0 -} - func (m metricsStore) UpsertTailnetAgent(ctx context.Context, arg database.UpsertTailnetAgentParams) (database.TailnetAgent, error) { start := time.Now() r0, r1 := m.s.UpsertTailnetAgent(ctx, arg) diff --git a/coderd/database/dbmock/dbmock.go b/coderd/database/dbmock/dbmock.go index 2bb62e8c92e84..e651c8301c933 100644 --- a/coderd/database/dbmock/dbmock.go +++ b/coderd/database/dbmock/dbmock.go @@ -1275,6 +1275,21 @@ func (mr *MockStoreMockRecorder) GetLogoURL(arg0 any) *gomock.Call { return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetLogoURL", reflect.TypeOf((*MockStore)(nil).GetLogoURL), arg0) } +// GetNotificationBanners mocks base method. +func (m *MockStore) GetNotificationBanners(arg0 context.Context) (string, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetNotificationBanners", arg0) + ret0, _ := ret[0].(string) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetNotificationBanners indicates an expected call of GetNotificationBanners. +func (mr *MockStoreMockRecorder) GetNotificationBanners(arg0 any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetNotificationBanners", reflect.TypeOf((*MockStore)(nil).GetNotificationBanners), arg0) +} + // GetOAuth2ProviderAppByID mocks base method. func (m *MockStore) GetOAuth2ProviderAppByID(arg0 context.Context, arg1 uuid.UUID) (database.OAuth2ProviderApp, error) { m.ctrl.T.Helper() @@ -1710,21 +1725,6 @@ func (mr *MockStoreMockRecorder) GetReplicasUpdatedAfter(arg0, arg1 any) *gomock return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetReplicasUpdatedAfter", reflect.TypeOf((*MockStore)(nil).GetReplicasUpdatedAfter), arg0, arg1) } -// GetServiceBanner mocks base method. -func (m *MockStore) GetServiceBanner(arg0 context.Context) (string, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetServiceBanner", arg0) - ret0, _ := ret[0].(string) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetServiceBanner indicates an expected call of GetServiceBanner. -func (mr *MockStoreMockRecorder) GetServiceBanner(arg0 any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetServiceBanner", reflect.TypeOf((*MockStore)(nil).GetServiceBanner), arg0) -} - // GetTailnetAgents mocks base method. func (m *MockStore) GetTailnetAgents(arg0 context.Context, arg1 uuid.UUID) ([]database.TailnetAgent, error) { m.ctrl.T.Helper() @@ -4577,6 +4577,20 @@ func (mr *MockStoreMockRecorder) UpsertLogoURL(arg0, arg1 any) *gomock.Call { return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpsertLogoURL", reflect.TypeOf((*MockStore)(nil).UpsertLogoURL), arg0, arg1) } +// UpsertNotificationBanners mocks base method. +func (m *MockStore) UpsertNotificationBanners(arg0 context.Context, arg1 string) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "UpsertNotificationBanners", arg0, arg1) + ret0, _ := ret[0].(error) + return ret0 +} + +// UpsertNotificationBanners indicates an expected call of UpsertNotificationBanners. +func (mr *MockStoreMockRecorder) UpsertNotificationBanners(arg0, arg1 any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpsertNotificationBanners", reflect.TypeOf((*MockStore)(nil).UpsertNotificationBanners), arg0, arg1) +} + // UpsertOAuthSigningKey mocks base method. func (m *MockStore) UpsertOAuthSigningKey(arg0 context.Context, arg1 string) error { m.ctrl.T.Helper() @@ -4606,20 +4620,6 @@ func (mr *MockStoreMockRecorder) UpsertProvisionerDaemon(arg0, arg1 any) *gomock return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpsertProvisionerDaemon", reflect.TypeOf((*MockStore)(nil).UpsertProvisionerDaemon), arg0, arg1) } -// UpsertServiceBanner mocks base method. -func (m *MockStore) UpsertServiceBanner(arg0 context.Context, arg1 string) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "UpsertServiceBanner", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 -} - -// UpsertServiceBanner indicates an expected call of UpsertServiceBanner. -func (mr *MockStoreMockRecorder) UpsertServiceBanner(arg0, arg1 any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpsertServiceBanner", reflect.TypeOf((*MockStore)(nil).UpsertServiceBanner), arg0, arg1) -} - // UpsertTailnetAgent mocks base method. func (m *MockStore) UpsertTailnetAgent(arg0 context.Context, arg1 database.UpsertTailnetAgentParams) (database.TailnetAgent, error) { m.ctrl.T.Helper() diff --git a/coderd/database/migrations/000208_notification_banners.down.sql b/coderd/database/migrations/000208_notification_banners.down.sql new file mode 100644 index 0000000000000..30d149cb016b6 --- /dev/null +++ b/coderd/database/migrations/000208_notification_banners.down.sql @@ -0,0 +1 @@ +delete from site_configs where key = 'notification_banners'; diff --git a/coderd/database/migrations/000208_notification_banners.up.sql b/coderd/database/migrations/000208_notification_banners.up.sql new file mode 100644 index 0000000000000..8f846b16dd509 --- /dev/null +++ b/coderd/database/migrations/000208_notification_banners.up.sql @@ -0,0 +1,4 @@ +update site_configs SET + key = 'notification_banners', + value = concat('[', value, ']') +where key = 'service_banner'; diff --git a/coderd/database/querier.go b/coderd/database/querier.go index 7d8f504cb50e7..405f86bf47688 100644 --- a/coderd/database/querier.go +++ b/coderd/database/querier.go @@ -135,6 +135,7 @@ type sqlcQuerier interface { GetLicenseByID(ctx context.Context, id int32) (License, error) GetLicenses(ctx context.Context) ([]License, error) GetLogoURL(ctx context.Context) (string, error) + GetNotificationBanners(ctx context.Context) (string, error) GetOAuth2ProviderAppByID(ctx context.Context, id uuid.UUID) (OAuth2ProviderApp, error) GetOAuth2ProviderAppCodeByID(ctx context.Context, id uuid.UUID) (OAuth2ProviderAppCode, error) GetOAuth2ProviderAppCodeByPrefix(ctx context.Context, secretPrefix []byte) (OAuth2ProviderAppCode, error) @@ -164,7 +165,6 @@ type sqlcQuerier interface { GetQuotaConsumedForUser(ctx context.Context, ownerID uuid.UUID) (int64, error) GetReplicaByID(ctx context.Context, id uuid.UUID) (Replica, error) GetReplicasUpdatedAfter(ctx context.Context, updatedAt time.Time) ([]Replica, error) - GetServiceBanner(ctx context.Context) (string, error) GetTailnetAgents(ctx context.Context, id uuid.UUID) ([]TailnetAgent, error) GetTailnetClientsForAgent(ctx context.Context, agentID uuid.UUID) ([]TailnetClient, error) GetTailnetPeers(ctx context.Context, id uuid.UUID) ([]TailnetPeer, error) @@ -421,9 +421,9 @@ type sqlcQuerier interface { UpsertJFrogXrayScanByWorkspaceAndAgentID(ctx context.Context, arg UpsertJFrogXrayScanByWorkspaceAndAgentIDParams) error UpsertLastUpdateCheck(ctx context.Context, value string) error UpsertLogoURL(ctx context.Context, value string) error + UpsertNotificationBanners(ctx context.Context, value string) error UpsertOAuthSigningKey(ctx context.Context, value string) error UpsertProvisionerDaemon(ctx context.Context, arg UpsertProvisionerDaemonParams) (ProvisionerDaemon, error) - UpsertServiceBanner(ctx context.Context, value string) error UpsertTailnetAgent(ctx context.Context, arg UpsertTailnetAgentParams) (TailnetAgent, error) UpsertTailnetClient(ctx context.Context, arg UpsertTailnetClientParams) (TailnetClient, error) UpsertTailnetClientSubscription(ctx context.Context, arg UpsertTailnetClientSubscriptionParams) error diff --git a/coderd/database/queries.sql.go b/coderd/database/queries.sql.go index 41171a7473cab..e0fba2dad35bd 100644 --- a/coderd/database/queries.sql.go +++ b/coderd/database/queries.sql.go @@ -5615,23 +5615,23 @@ func (q *sqlQuerier) GetLogoURL(ctx context.Context) (string, error) { return value, err } -const getOAuthSigningKey = `-- name: GetOAuthSigningKey :one -SELECT value FROM site_configs WHERE key = 'oauth_signing_key' +const getNotificationBanners = `-- name: GetNotificationBanners :one +SELECT value FROM site_configs WHERE key = 'notification_banners' ` -func (q *sqlQuerier) GetOAuthSigningKey(ctx context.Context) (string, error) { - row := q.db.QueryRowContext(ctx, getOAuthSigningKey) +func (q *sqlQuerier) GetNotificationBanners(ctx context.Context) (string, error) { + row := q.db.QueryRowContext(ctx, getNotificationBanners) var value string err := row.Scan(&value) return value, err } -const getServiceBanner = `-- name: GetServiceBanner :one -SELECT value FROM site_configs WHERE key = 'service_banner' +const getOAuthSigningKey = `-- name: GetOAuthSigningKey :one +SELECT value FROM site_configs WHERE key = 'oauth_signing_key' ` -func (q *sqlQuerier) GetServiceBanner(ctx context.Context) (string, error) { - row := q.db.QueryRowContext(ctx, getServiceBanner) +func (q *sqlQuerier) GetOAuthSigningKey(ctx context.Context) (string, error) { + row := q.db.QueryRowContext(ctx, getOAuthSigningKey) var value string err := row.Scan(&value) return value, err @@ -5728,23 +5728,23 @@ func (q *sqlQuerier) UpsertLogoURL(ctx context.Context, value string) error { return err } -const upsertOAuthSigningKey = `-- name: UpsertOAuthSigningKey :exec -INSERT INTO site_configs (key, value) VALUES ('oauth_signing_key', $1) -ON CONFLICT (key) DO UPDATE set value = $1 WHERE site_configs.key = 'oauth_signing_key' +const upsertNotificationBanners = `-- name: UpsertNotificationBanners :exec +INSERT INTO site_configs (key, value) VALUES ('notification_banners', $1) +ON CONFLICT (key) DO UPDATE SET value = $1 WHERE site_configs.key = 'notification_banners' ` -func (q *sqlQuerier) UpsertOAuthSigningKey(ctx context.Context, value string) error { - _, err := q.db.ExecContext(ctx, upsertOAuthSigningKey, value) +func (q *sqlQuerier) UpsertNotificationBanners(ctx context.Context, value string) error { + _, err := q.db.ExecContext(ctx, upsertNotificationBanners, value) return err } -const upsertServiceBanner = `-- name: UpsertServiceBanner :exec -INSERT INTO site_configs (key, value) VALUES ('service_banner', $1) -ON CONFLICT (key) DO UPDATE SET value = $1 WHERE site_configs.key = 'service_banner' +const upsertOAuthSigningKey = `-- name: UpsertOAuthSigningKey :exec +INSERT INTO site_configs (key, value) VALUES ('oauth_signing_key', $1) +ON CONFLICT (key) DO UPDATE set value = $1 WHERE site_configs.key = 'oauth_signing_key' ` -func (q *sqlQuerier) UpsertServiceBanner(ctx context.Context, value string) error { - _, err := q.db.ExecContext(ctx, upsertServiceBanner, value) +func (q *sqlQuerier) UpsertOAuthSigningKey(ctx context.Context, value string) error { + _, err := q.db.ExecContext(ctx, upsertOAuthSigningKey, value) return err } diff --git a/coderd/database/queries/siteconfig.sql b/coderd/database/queries/siteconfig.sql index a432b71e3a91d..b827c6e19e959 100644 --- a/coderd/database/queries/siteconfig.sql +++ b/coderd/database/queries/siteconfig.sql @@ -36,12 +36,12 @@ ON CONFLICT (key) DO UPDATE SET value = $1 WHERE site_configs.key = 'last_update -- name: GetLastUpdateCheck :one SELECT value FROM site_configs WHERE key = 'last_update_check'; --- name: UpsertServiceBanner :exec -INSERT INTO site_configs (key, value) VALUES ('service_banner', $1) -ON CONFLICT (key) DO UPDATE SET value = $1 WHERE site_configs.key = 'service_banner'; +-- name: UpsertNotificationBanners :exec +INSERT INTO site_configs (key, value) VALUES ('notification_banners', $1) +ON CONFLICT (key) DO UPDATE SET value = $1 WHERE site_configs.key = 'notification_banners'; --- name: GetServiceBanner :one -SELECT value FROM site_configs WHERE key = 'service_banner'; +-- name: GetNotificationBanners :one +SELECT value FROM site_configs WHERE key = 'notification_banners'; -- name: UpsertLogoURL :exec INSERT INTO site_configs (key, value) VALUES ('logo_url', $1) diff --git a/codersdk/agentsdk/convert.go b/codersdk/agentsdk/convert.go index 8671d9e0b51e3..adfabd1510768 100644 --- a/codersdk/agentsdk/convert.go +++ b/codersdk/agentsdk/convert.go @@ -277,15 +277,15 @@ func ProtoFromApp(a codersdk.WorkspaceApp) (*proto.WorkspaceApp, error) { }, nil } -func ServiceBannerFromProto(sbp *proto.ServiceBanner) codersdk.ServiceBannerConfig { - return codersdk.ServiceBannerConfig{ +func ServiceBannerFromProto(sbp *proto.ServiceBanner) codersdk.BannerConfig { + return codersdk.BannerConfig{ Enabled: sbp.GetEnabled(), Message: sbp.GetMessage(), BackgroundColor: sbp.GetBackgroundColor(), } } -func ProtoFromServiceBanner(sb codersdk.ServiceBannerConfig) *proto.ServiceBanner { +func ProtoFromServiceBanner(sb codersdk.BannerConfig) *proto.ServiceBanner { return &proto.ServiceBanner{ Enabled: sb.Enabled, Message: sb.Message, @@ -293,6 +293,22 @@ func ProtoFromServiceBanner(sb codersdk.ServiceBannerConfig) *proto.ServiceBanne } } +func BannerConfigFromProto(sbp *proto.BannerConfig) codersdk.BannerConfig { + return codersdk.BannerConfig{ + Enabled: sbp.GetEnabled(), + Message: sbp.GetMessage(), + BackgroundColor: sbp.GetBackgroundColor(), + } +} + +func ProtoFromBannerConfig(sb codersdk.BannerConfig) *proto.BannerConfig { + return &proto.BannerConfig{ + Enabled: sb.Enabled, + Message: sb.Message, + BackgroundColor: sb.BackgroundColor, + } +} + func ProtoFromSubsystems(ss []codersdk.AgentSubsystem) ([]proto.Startup_Subsystem, error) { ret := make([]proto.Startup_Subsystem, len(ss)) for i, s := range ss { diff --git a/codersdk/deployment.go b/codersdk/deployment.go index 2aa675727b72b..087ad660cbc68 100644 --- a/codersdk/deployment.go +++ b/codersdk/deployment.go @@ -2100,19 +2100,26 @@ func (c *Client) DeploymentStats(ctx context.Context) (DeploymentStats, error) { } type AppearanceConfig struct { - ApplicationName string `json:"application_name"` - LogoURL string `json:"logo_url"` - ServiceBanner ServiceBannerConfig `json:"service_banner"` - SupportLinks []LinkConfig `json:"support_links,omitempty"` + ApplicationName string `json:"application_name"` + LogoURL string `json:"logo_url"` + // Deprecated: ServiceBanner has been replaced by NotificationBanners. + ServiceBanner BannerConfig `json:"service_banner"` + NotificationBanners []BannerConfig `json:"notification_banners"` + SupportLinks []LinkConfig `json:"support_links,omitempty"` } type UpdateAppearanceConfig struct { - ApplicationName string `json:"application_name"` - LogoURL string `json:"logo_url"` - ServiceBanner ServiceBannerConfig `json:"service_banner"` + ApplicationName string `json:"application_name"` + LogoURL string `json:"logo_url"` + // Deprecated: ServiceBanner has been replaced by NotificationBanners. + ServiceBanner BannerConfig `json:"service_banner"` + NotificationBanners []BannerConfig `json:"notification_banners"` } -type ServiceBannerConfig struct { +// Deprecated: ServiceBannerConfig has been renamed to BannerConfig. +type ServiceBannerConfig = BannerConfig + +type BannerConfig struct { Enabled bool `json:"enabled"` Message string `json:"message,omitempty"` BackgroundColor string `json:"background_color,omitempty"` diff --git a/docs/api/enterprise.md b/docs/api/enterprise.md index 0b05a9fffeee6..800e9e517196d 100644 --- a/docs/api/enterprise.md +++ b/docs/api/enterprise.md @@ -21,6 +21,13 @@ curl -X GET http://coder-server:8080/api/v2/appearance \ { "application_name": "string", "logo_url": "string", + "notification_banners": [ + { + "background_color": "string", + "enabled": true, + "message": "string" + } + ], "service_banner": { "background_color": "string", "enabled": true, @@ -64,6 +71,13 @@ curl -X PUT http://coder-server:8080/api/v2/appearance \ { "application_name": "string", "logo_url": "string", + "notification_banners": [ + { + "background_color": "string", + "enabled": true, + "message": "string" + } + ], "service_banner": { "background_color": "string", "enabled": true, @@ -86,6 +100,13 @@ curl -X PUT http://coder-server:8080/api/v2/appearance \ { "application_name": "string", "logo_url": "string", + "notification_banners": [ + { + "background_color": "string", + "enabled": true, + "message": "string" + } + ], "service_banner": { "background_color": "string", "enabled": true, diff --git a/docs/api/schemas.md b/docs/api/schemas.md index 26b38a7c1ec78..a6462a14ca29c 100644 --- a/docs/api/schemas.md +++ b/docs/api/schemas.md @@ -751,6 +751,13 @@ { "application_name": "string", "logo_url": "string", + "notification_banners": [ + { + "background_color": "string", + "enabled": true, + "message": "string" + } + ], "service_banner": { "background_color": "string", "enabled": true, @@ -768,12 +775,13 @@ ### Properties -| Name | Type | Required | Restrictions | Description | -| ------------------ | ------------------------------------------------------------ | -------- | ------------ | ----------- | -| `application_name` | string | false | | | -| `logo_url` | string | false | | | -| `service_banner` | [codersdk.ServiceBannerConfig](#codersdkservicebannerconfig) | false | | | -| `support_links` | array of [codersdk.LinkConfig](#codersdklinkconfig) | false | | | +| Name | Type | Required | Restrictions | Description | +| ---------------------- | ------------------------------------------------------- | -------- | ------------ | ------------------------------------------------------------------- | +| `application_name` | string | false | | | +| `logo_url` | string | false | | | +| `notification_banners` | array of [codersdk.BannerConfig](#codersdkbannerconfig) | false | | | +| `service_banner` | [codersdk.BannerConfig](#codersdkbannerconfig) | false | | Deprecated: ServiceBanner has been replaced by NotificationBanners. | +| `support_links` | array of [codersdk.LinkConfig](#codersdklinkconfig) | false | | | ## codersdk.ArchiveTemplateVersionsRequest @@ -1172,6 +1180,24 @@ AuthorizationObject can represent a "set" of objects, such as: all workspaces in | `always` | | `never` | +## codersdk.BannerConfig + +```json +{ + "background_color": "string", + "enabled": true, + "message": "string" +} +``` + +### Properties + +| Name | Type | Required | Restrictions | Description | +| ------------------ | ------- | -------- | ------------ | ----------- | +| `background_color` | string | false | | | +| `enabled` | boolean | false | | | +| `message` | string | false | | | + ## codersdk.BuildInfoResponse ```json @@ -4264,24 +4290,6 @@ CreateWorkspaceRequest provides options for creating a new workspace. Only one o | `ssh_config_options` | object | false | | | | » `[any property]` | string | false | | | -## codersdk.ServiceBannerConfig - -```json -{ - "background_color": "string", - "enabled": true, - "message": "string" -} -``` - -### Properties - -| Name | Type | Required | Restrictions | Description | -| ------------------ | ------- | -------- | ------------ | ----------- | -| `background_color` | string | false | | | -| `enabled` | boolean | false | | | -| `message` | string | false | | | - ## codersdk.SessionCountDeploymentStats ```json @@ -5174,6 +5182,13 @@ CreateWorkspaceRequest provides options for creating a new workspace. Only one o { "application_name": "string", "logo_url": "string", + "notification_banners": [ + { + "background_color": "string", + "enabled": true, + "message": "string" + } + ], "service_banner": { "background_color": "string", "enabled": true, @@ -5184,11 +5199,12 @@ CreateWorkspaceRequest provides options for creating a new workspace. Only one o ### Properties -| Name | Type | Required | Restrictions | Description | -| ------------------ | ------------------------------------------------------------ | -------- | ------------ | ----------- | -| `application_name` | string | false | | | -| `logo_url` | string | false | | | -| `service_banner` | [codersdk.ServiceBannerConfig](#codersdkservicebannerconfig) | false | | | +| Name | Type | Required | Restrictions | Description | +| ---------------------- | ------------------------------------------------------- | -------- | ------------ | ------------------------------------------------------------------- | +| `application_name` | string | false | | | +| `logo_url` | string | false | | | +| `notification_banners` | array of [codersdk.BannerConfig](#codersdkbannerconfig) | false | | | +| `service_banner` | [codersdk.BannerConfig](#codersdkbannerconfig) | false | | Deprecated: ServiceBanner has been replaced by NotificationBanners. | ## codersdk.UpdateCheckResponse diff --git a/enterprise/coderd/appearance.go b/enterprise/coderd/appearance.go index 70ef238d6056c..7029340672b6e 100644 --- a/enterprise/coderd/appearance.go +++ b/enterprise/coderd/appearance.go @@ -6,6 +6,7 @@ import ( "encoding/hex" "encoding/json" "errors" + "fmt" "net/http" "golang.org/x/sync/errgroup" @@ -53,9 +54,11 @@ func newAppearanceFetcher(store database.Store, links []codersdk.LinkConfig) agp func (f *appearanceFetcher) Fetch(ctx context.Context) (codersdk.AppearanceConfig, error) { var eg errgroup.Group - var applicationName string - var logoURL string - var serviceBannerJSON string + var ( + applicationName string + logoURL string + notificationBannersJSON string + ) eg.Go(func() (err error) { applicationName, err = f.database.GetApplicationName(ctx) if err != nil && !errors.Is(err, sql.ErrNoRows) { @@ -71,9 +74,9 @@ func (f *appearanceFetcher) Fetch(ctx context.Context) (codersdk.AppearanceConfi return nil }) eg.Go(func() (err error) { - serviceBannerJSON, err = f.database.GetServiceBanner(ctx) + notificationBannersJSON, err = f.database.GetNotificationBanners(ctx) if err != nil && !errors.Is(err, sql.ErrNoRows) { - return xerrors.Errorf("get service banner: %w", err) + return xerrors.Errorf("get notification banners: %w", err) } return nil }) @@ -83,21 +86,27 @@ func (f *appearanceFetcher) Fetch(ctx context.Context) (codersdk.AppearanceConfi } cfg := codersdk.AppearanceConfig{ - ApplicationName: applicationName, - LogoURL: logoURL, + ApplicationName: applicationName, + LogoURL: logoURL, + NotificationBanners: []codersdk.BannerConfig{}, + SupportLinks: agpl.DefaultSupportLinks, } - if serviceBannerJSON != "" { - err = json.Unmarshal([]byte(serviceBannerJSON), &cfg.ServiceBanner) + + if notificationBannersJSON != "" { + err = json.Unmarshal([]byte(notificationBannersJSON), &cfg.NotificationBanners) if err != nil { return codersdk.AppearanceConfig{}, xerrors.Errorf( - "unmarshal json: %w, raw: %s", err, serviceBannerJSON, + "unmarshal notification banners json: %w, raw: %s", err, notificationBannersJSON, ) } - } - if len(f.supportLinks) == 0 { - cfg.SupportLinks = agpl.DefaultSupportLinks - } else { + // Redundant, but improves compatibility with slightly mismatched agent versions. + // Maybe we can remove this after a grace period? -Kayla, May 6th 2024 + if len(cfg.NotificationBanners) > 0 { + cfg.ServiceBanner = cfg.NotificationBanners[0] + } + } + if len(f.supportLinks) > 0 { cfg.SupportLinks = f.supportLinks } @@ -139,29 +148,32 @@ func (api *API) putAppearance(rw http.ResponseWriter, r *http.Request) { return } - if appearance.ServiceBanner.Enabled { - if err := validateHexColor(appearance.ServiceBanner.BackgroundColor); err != nil { + for _, banner := range appearance.NotificationBanners { + if err := validateHexColor(banner.BackgroundColor); err != nil { httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{ - Message: "Invalid color format", + Message: fmt.Sprintf("Invalid color format: %q", banner.BackgroundColor), Detail: err.Error(), }) return } } - serviceBannerJSON, err := json.Marshal(appearance.ServiceBanner) + if appearance.NotificationBanners == nil { + appearance.NotificationBanners = []codersdk.BannerConfig{} + } + notificationBannersJSON, err := json.Marshal(appearance.NotificationBanners) if err != nil { httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{ - Message: "Unable to marshal service banner", + Message: "Unable to marshal notification banners", Detail: err.Error(), }) return } - err = api.Database.UpsertServiceBanner(ctx, string(serviceBannerJSON)) + err = api.Database.UpsertNotificationBanners(ctx, string(notificationBannersJSON)) if err != nil { httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ - Message: "Unable to set service banner", + Message: "Unable to set notification banners", Detail: err.Error(), }) return diff --git a/enterprise/coderd/appearance_test.go b/enterprise/coderd/appearance_test.go index beab7c104f5e0..745f90e00d03b 100644 --- a/enterprise/coderd/appearance_test.go +++ b/enterprise/coderd/appearance_test.go @@ -6,7 +6,6 @@ import ( "net/http" "testing" - "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/coder/coder/v2/agent/proto" @@ -56,7 +55,7 @@ func TestCustomLogoAndCompanyName(t *testing.T) { require.Equal(t, uac.LogoURL, got.LogoURL) } -func TestServiceBanners(t *testing.T) { +func TestNotificationBanners(t *testing.T) { t.Parallel() t.Run("User", func(t *testing.T) { @@ -68,10 +67,10 @@ func TestServiceBanners(t *testing.T) { adminClient, adminUser := coderdenttest.New(t, &coderdenttest.Options{DontAddLicense: true}) basicUserClient, _ := coderdtest.CreateAnotherUser(t, adminClient, adminUser.OrganizationID) - // Even without a license, the banner should return as disabled. + // Without a license, there should be no banners. sb, err := basicUserClient.Appearance(ctx) require.NoError(t, err) - require.False(t, sb.ServiceBanner.Enabled) + require.Empty(t, sb.NotificationBanners) coderdenttest.AddLicense(t, adminClient, coderdenttest.LicenseOptions{ Features: license.Features{ @@ -82,43 +81,42 @@ func TestServiceBanners(t *testing.T) { // Default state sb, err = basicUserClient.Appearance(ctx) require.NoError(t, err) - require.False(t, sb.ServiceBanner.Enabled) + require.Empty(t, sb.NotificationBanners) + // Regular user should be unable to set the banner uac := codersdk.UpdateAppearanceConfig{ - ServiceBanner: sb.ServiceBanner, + NotificationBanners: []codersdk.BannerConfig{{Enabled: true}}, } - // Regular user should be unable to set the banner - uac.ServiceBanner.Enabled = true - err = basicUserClient.UpdateAppearance(ctx, uac) require.Error(t, err) var sdkError *codersdk.Error require.True(t, errors.As(err, &sdkError)) + require.ErrorAs(t, err, &sdkError) require.Equal(t, http.StatusForbidden, sdkError.StatusCode()) // But an admin can - wantBanner := uac - wantBanner.ServiceBanner.Enabled = true - wantBanner.ServiceBanner.Message = "Hey" - wantBanner.ServiceBanner.BackgroundColor = "#00FF00" + wantBanner := codersdk.UpdateAppearanceConfig{ + NotificationBanners: []codersdk.BannerConfig{{ + Enabled: true, + Message: "The beep-bop will be boop-beeped on Saturday at 12AM PST.", + BackgroundColor: "#00FF00", + }}, + } err = adminClient.UpdateAppearance(ctx, wantBanner) require.NoError(t, err) gotBanner, err := adminClient.Appearance(ctx) //nolint:gocritic // we should assert at least once that the owner can get the banner require.NoError(t, err) - gotBanner.SupportLinks = nil // clean "support links" before comparison - require.Equal(t, wantBanner.ServiceBanner, gotBanner.ServiceBanner) + require.Equal(t, wantBanner.NotificationBanners, gotBanner.NotificationBanners) // But even an admin can't give a bad color - wantBanner.ServiceBanner.BackgroundColor = "#bad color" + wantBanner.NotificationBanners[0].BackgroundColor = "#bad color" err = adminClient.UpdateAppearance(ctx, wantBanner) require.Error(t, err) - var sdkErr *codersdk.Error - if assert.ErrorAs(t, err, &sdkErr) { - assert.Equal(t, http.StatusBadRequest, sdkErr.StatusCode()) - assert.Contains(t, sdkErr.Message, "Invalid color format") - assert.Contains(t, sdkErr.Detail, "expected # prefix and 6 characters") - } + require.ErrorAs(t, err, &sdkErr) + require.Equal(t, http.StatusBadRequest, sdkErr.StatusCode()) + require.Contains(t, sdkErr.Message, "Invalid color format") + require.Contains(t, sdkErr.Detail, "expected # prefix and 6 characters") }) t.Run("Agent", func(t *testing.T) { @@ -141,11 +139,11 @@ func TestServiceBanners(t *testing.T) { }, }) cfg := codersdk.UpdateAppearanceConfig{ - ServiceBanner: codersdk.ServiceBannerConfig{ + NotificationBanners: []codersdk.BannerConfig{{ Enabled: true, - Message: "Hey", + Message: "The beep-bop will be boop-beeped on Saturday at 12AM PST.", BackgroundColor: "#00FF00", - }, + }}, } err := client.UpdateAppearance(ctx, cfg) require.NoError(t, err) @@ -157,34 +155,38 @@ func TestServiceBanners(t *testing.T) { agentClient := agentsdk.New(client.URL) agentClient.SetSessionToken(r.AgentToken) - banner := requireGetServiceBanner(ctx, t, agentClient) - require.Equal(t, cfg.ServiceBanner, banner) + banners := requireGetNotificationBanners(ctx, t, agentClient) + require.Equal(t, cfg.NotificationBanners, banners) // Create an AGPL Coderd against the same database agplClient := coderdtest.New(t, &coderdtest.Options{Database: store, Pubsub: ps}) agplAgentClient := agentsdk.New(agplClient.URL) agplAgentClient.SetSessionToken(r.AgentToken) - banner = requireGetServiceBanner(ctx, t, agplAgentClient) - require.Equal(t, codersdk.ServiceBannerConfig{}, banner) + banners = requireGetNotificationBanners(ctx, t, agplAgentClient) + require.Equal(t, []codersdk.BannerConfig{}, banners) // No license means no banner. err = client.DeleteLicense(ctx, lic.ID) require.NoError(t, err) - banner = requireGetServiceBanner(ctx, t, agentClient) - require.Equal(t, codersdk.ServiceBannerConfig{}, banner) + banners = requireGetNotificationBanners(ctx, t, agentClient) + require.Equal(t, []codersdk.BannerConfig{}, banners) }) } -func requireGetServiceBanner(ctx context.Context, t *testing.T, client *agentsdk.Client) codersdk.ServiceBannerConfig { +func requireGetNotificationBanners(ctx context.Context, t *testing.T, client *agentsdk.Client) []codersdk.BannerConfig { cc, err := client.ConnectRPC(ctx) require.NoError(t, err) defer func() { _ = cc.Close() }() aAPI := proto.NewDRPCAgentClient(cc) - sbp, err := aAPI.GetServiceBanner(ctx, &proto.GetServiceBannerRequest{}) + bannersProto, err := aAPI.GetNotificationBanners(ctx, &proto.GetNotificationBannersRequest{}) require.NoError(t, err) - return agentsdk.ServiceBannerFromProto(sbp) + banners := make([]codersdk.BannerConfig, 0, len(bannersProto.NotificationBanners)) + for _, bannerProto := range bannersProto.NotificationBanners { + banners = append(banners, agentsdk.BannerConfigFromProto(bannerProto)) + } + return banners } func TestCustomSupportLinks(t *testing.T) { diff --git a/site/src/api/api.ts b/site/src/api/api.ts index 6b4102073b3d1..c677ffbcb1b3b 100644 --- a/site/src/api/api.ts +++ b/site/src/api/api.ts @@ -1357,6 +1357,7 @@ export const getAppearance = async (): Promise => { service_banner: { enabled: false, }, + notification_banners: [], }; } throw ex; diff --git a/site/src/api/queries/appearance.ts b/site/src/api/queries/appearance.ts index d9337bc39e79d..7fc6cd1a71b9d 100644 --- a/site/src/api/queries/appearance.ts +++ b/site/src/api/queries/appearance.ts @@ -4,12 +4,12 @@ import type { AppearanceConfig } from "api/typesGenerated"; import type { MetadataState } from "hooks/useEmbeddedMetadata"; import { cachedQuery } from "./util"; -const appearanceConfigKey = ["appearance"] as const; +export const appearanceConfigKey = ["appearance"] as const; export const appearance = (metadata: MetadataState) => { return cachedQuery({ metadata, - queryKey: ["appearance"], + queryKey: appearanceConfigKey, queryFn: () => API.getAppearance(), }); }; diff --git a/site/src/api/typesGenerated.ts b/site/src/api/typesGenerated.ts index d91a6b430de27..c2e9b51b96a11 100644 --- a/site/src/api/typesGenerated.ts +++ b/site/src/api/typesGenerated.ts @@ -48,7 +48,8 @@ export interface AppHostResponse { export interface AppearanceConfig { readonly application_name: string; readonly logo_url: string; - readonly service_banner: ServiceBannerConfig; + readonly service_banner: BannerConfig; + readonly notification_banners: readonly BannerConfig[]; readonly support_links?: readonly LinkConfig[]; } @@ -157,6 +158,13 @@ export interface AvailableExperiments { readonly safe: readonly Experiment[]; } +// From codersdk/deployment.go +export interface BannerConfig { + readonly enabled: boolean; + readonly message?: string; + readonly background_color?: string; +} + // From codersdk/deployment.go export interface BuildInfoResponse { readonly external_url: string; @@ -1281,7 +1289,8 @@ export interface UpdateActiveTemplateVersion { export interface UpdateAppearanceConfig { readonly application_name: string; readonly logo_url: string; - readonly service_banner: ServiceBannerConfig; + readonly service_banner: BannerConfig; + readonly notification_banners: readonly BannerConfig[]; } // From codersdk/updatecheck.go diff --git a/site/src/modules/dashboard/DashboardLayout.tsx b/site/src/modules/dashboard/DashboardLayout.tsx index d698e77f001ca..2a1f545820115 100644 --- a/site/src/modules/dashboard/DashboardLayout.tsx +++ b/site/src/modules/dashboard/DashboardLayout.tsx @@ -7,7 +7,7 @@ import { Outlet } from "react-router-dom"; import { Loader } from "components/Loader/Loader"; import { useAuthenticated } from "contexts/auth/RequireAuth"; import { LicenseBanner } from "modules/dashboard/LicenseBanner/LicenseBanner"; -import { ServiceBanner } from "modules/dashboard/ServiceBanner/ServiceBanner"; +import { NotificationBanners } from "modules/dashboard/NotificationBanners/NotificationBanners"; import { dashboardContentBottomPadding } from "theme/constants"; import { docs } from "utils/docs"; import { DeploymentBanner } from "./DeploymentBanner/DeploymentBanner"; @@ -21,8 +21,8 @@ export const DashboardLayout: FC = () => { return ( <> - {canViewDeployment && } +
void; -} export interface DashboardValue { entitlements: Entitlements; experiments: Experiments; - appearance: Appearance; + appearance: AppearanceConfig; } export const DashboardContext = createContext( @@ -44,34 +30,6 @@ export const DashboardProvider: FC = ({ children }) => { const isLoading = !entitlementsQuery.data || !appearanceQuery.data || !experimentsQuery.data; - const [configPreview, setConfigPreview] = useState(); - - // Centralizing the logic for catching malformed configs in one spot, just to - // be on the safe side; don't want to expose raw setConfigPreview outside - // the provider - const setPreview = useCallback((newConfig: AppearanceConfig) => { - // Have runtime safety nets in place, just because so much of the codebase - // relies on HSL for formatting, but server expects hex values. Can't catch - // color format mismatches at the type level - const incomingBg = newConfig.service_banner.background_color; - let configForDispatch = newConfig; - - if (typeof incomingBg === "string" && isHslColor(incomingBg)) { - configForDispatch = { - ...newConfig, - service_banner: { - ...newConfig.service_banner, - background_color: hslToHex(incomingBg), - }, - }; - } else if (typeof incomingBg === "string" && !isHexColor(incomingBg)) { - displayError(`The value ${incomingBg} is not a valid hex string`); - return; - } - - setConfigPreview(configForDispatch); - }, []); - if (isLoading) { return ; } @@ -81,11 +39,7 @@ export const DashboardProvider: FC = ({ children }) => { value={{ entitlements: entitlementsQuery.data, experiments: experimentsQuery.data, - appearance: { - config: configPreview ?? appearanceQuery.data, - setPreview: setPreview, - isPreview: configPreview !== undefined, - }, + appearance: appearanceQuery.data, }} > {children} diff --git a/site/src/modules/dashboard/Navbar/Navbar.tsx b/site/src/modules/dashboard/Navbar/Navbar.tsx index 388622fdf7636..8a0b473398a70 100644 --- a/site/src/modules/dashboard/Navbar/Navbar.tsx +++ b/site/src/modules/dashboard/Navbar/Navbar.tsx @@ -25,9 +25,9 @@ export const Navbar: FC = () => { return ( = { + title: "modules/dashboard/NotificationBannerView", + component: NotificationBannerView, +}; + +export default meta; +type Story = StoryObj; + +export const Production: Story = { + args: { + message: "Unfortunately, there's a radio connected to my brain.", + backgroundColor: "#ffaff3", + }, +}; + +export const Preview: Story = { + args: { + message: "バアン バン バン バン バアン ブレイバアン!", + backgroundColor: "#4cd473", + }, +}; diff --git a/site/src/modules/dashboard/ServiceBanner/ServiceBannerView.tsx b/site/src/modules/dashboard/NotificationBanners/NotificationBannerView.tsx similarity index 57% rename from site/src/modules/dashboard/ServiceBanner/ServiceBannerView.tsx rename to site/src/modules/dashboard/NotificationBanners/NotificationBannerView.tsx index e907085cb2af4..4832ea93f6065 100644 --- a/site/src/modules/dashboard/ServiceBanner/ServiceBannerView.tsx +++ b/site/src/modules/dashboard/NotificationBanners/NotificationBannerView.tsx @@ -1,28 +1,30 @@ import { css, type Interpolation, type Theme } from "@emotion/react"; import type { FC } from "react"; import { InlineMarkdown } from "components/Markdown/Markdown"; -import { Pill } from "components/Pill/Pill"; import { readableForegroundColor } from "utils/colors"; -export interface ServiceBannerViewProps { - message: string; - backgroundColor: string; - isPreview: boolean; +export interface NotificationBannerViewProps { + message?: string; + backgroundColor?: string; } -export const ServiceBannerView: FC = ({ +export const NotificationBannerView: FC = ({ message, backgroundColor, - isPreview, }) => { + if (!message || !backgroundColor) { + return null; + } + return ( -
- {isPreview && Preview} +
{message}
diff --git a/site/src/modules/dashboard/NotificationBanners/NotificationBanners.tsx b/site/src/modules/dashboard/NotificationBanners/NotificationBanners.tsx new file mode 100644 index 0000000000000..a8ab663721a46 --- /dev/null +++ b/site/src/modules/dashboard/NotificationBanners/NotificationBanners.tsx @@ -0,0 +1,28 @@ +import type { FC } from "react"; +import { useDashboard } from "modules/dashboard/useDashboard"; +import { NotificationBannerView } from "./NotificationBannerView"; + +export const NotificationBanners: FC = () => { + const { appearance, entitlements } = useDashboard(); + const notificationBanners = appearance.notification_banners; + + const isEntitled = + entitlements.features.appearance.entitlement !== "not_entitled"; + if (!isEntitled) { + return null; + } + + return ( + <> + {notificationBanners + .filter((banner) => banner.enabled) + .map((banner) => ( + + ))} + + ); +}; diff --git a/site/src/modules/dashboard/ServiceBanner/ServiceBanner.tsx b/site/src/modules/dashboard/ServiceBanner/ServiceBanner.tsx deleted file mode 100644 index cedd4ba4e77f0..0000000000000 --- a/site/src/modules/dashboard/ServiceBanner/ServiceBanner.tsx +++ /dev/null @@ -1,21 +0,0 @@ -import type { FC } from "react"; -import { useDashboard } from "modules/dashboard/useDashboard"; -import { ServiceBannerView } from "./ServiceBannerView"; - -export const ServiceBanner: FC = () => { - const { appearance } = useDashboard(); - const { message, background_color, enabled } = - appearance.config.service_banner; - - if (!enabled || message === undefined || background_color === undefined) { - return null; - } - - return ( - - ); -}; diff --git a/site/src/modules/dashboard/ServiceBanner/ServiceBannerView.stories.tsx b/site/src/modules/dashboard/ServiceBanner/ServiceBannerView.stories.tsx deleted file mode 100644 index 1f3df18b3a42a..0000000000000 --- a/site/src/modules/dashboard/ServiceBanner/ServiceBannerView.stories.tsx +++ /dev/null @@ -1,25 +0,0 @@ -import type { Meta, StoryObj } from "@storybook/react"; -import { ServiceBannerView } from "./ServiceBannerView"; - -const meta: Meta = { - title: "modules/dashboard/ServiceBannerView", - component: ServiceBannerView, -}; - -export default meta; -type Story = StoryObj; - -export const Production: Story = { - args: { - message: "weeeee", - backgroundColor: "#FFFFFF", - }, -}; - -export const Preview: Story = { - args: { - message: "weeeee", - backgroundColor: "#000000", - isPreview: true, - }, -}; diff --git a/site/src/modules/workspaces/WorkspaceStatusBadge/WorkspaceStatusBadge.stories.tsx b/site/src/modules/workspaces/WorkspaceStatusBadge/WorkspaceStatusBadge.stories.tsx index a9c065c33b330..83da1063488b7 100644 --- a/site/src/modules/workspaces/WorkspaceStatusBadge/WorkspaceStatusBadge.stories.tsx +++ b/site/src/modules/workspaces/WorkspaceStatusBadge/WorkspaceStatusBadge.stories.tsx @@ -18,12 +18,6 @@ import { } from "testHelpers/entities"; import { WorkspaceStatusBadge } from "./WorkspaceStatusBadge"; -const MockedAppearance = { - config: MockAppearanceConfig, - isPreview: false, - setPreview: () => {}, -}; - const meta: Meta = { title: "modules/workspaces/WorkspaceStatusBadge", component: WorkspaceStatusBadge, @@ -41,7 +35,7 @@ const meta: Meta = { value={{ entitlements: MockEntitlementsWithScheduling, experiments: MockExperiments, - appearance: MockedAppearance, + appearance: MockAppearanceConfig, }} > diff --git a/site/src/pages/DeploySettingsPage/AppearanceSettingsPage/AppearanceSettingsPage.tsx b/site/src/pages/DeploySettingsPage/AppearanceSettingsPage/AppearanceSettingsPage.tsx index 2f05eacb9a20a..a99e04dd6b8e0 100644 --- a/site/src/pages/DeploySettingsPage/AppearanceSettingsPage/AppearanceSettingsPage.tsx +++ b/site/src/pages/DeploySettingsPage/AppearanceSettingsPage/AppearanceSettingsPage.tsx @@ -2,7 +2,7 @@ import type { FC } from "react"; import { Helmet } from "react-helmet-async"; import { useMutation, useQueryClient } from "react-query"; import { getErrorMessage } from "api/errors"; -import { updateAppearance } from "api/queries/appearance"; +import { appearanceConfigKey, updateAppearance } from "api/queries/appearance"; import type { UpdateAppearanceConfig } from "api/typesGenerated"; import { displayError, displaySuccess } from "components/GlobalSnackbar/utils"; import { useDashboard } from "modules/dashboard/useDashboard"; @@ -20,16 +20,12 @@ const AppearanceSettingsPage: FC = () => { const onSaveAppearance = async ( newConfig: Partial, - preview: boolean, ) => { - const newAppearance = { ...appearance.config, ...newConfig }; - if (preview) { - appearance.setPreview(newAppearance); - return; - } + const newAppearance = { ...appearance, ...newConfig }; try { await updateAppearanceMutation.mutateAsync(newAppearance); + await queryClient.invalidateQueries(appearanceConfigKey); displaySuccess("Successfully updated appearance settings!"); } catch (error) { displayError( @@ -45,7 +41,7 @@ const AppearanceSettingsPage: FC = () => { = { application_name: "Foobar", logo_url: "https://github.com/coder.png", service_banner: { - enabled: true, - message: "hello world", - background_color: "white", + enabled: false, + message: "", + background_color: "#00ff00", }, + notification_banners: [ + { + enabled: true, + message: "The beep-bop will be boop-beeped on Saturday at 12AM PST.", + background_color: "#ffaff3", + }, + ], }, isEntitled: false, }, diff --git a/site/src/pages/DeploySettingsPage/AppearanceSettingsPage/AppearanceSettingsPageView.tsx b/site/src/pages/DeploySettingsPage/AppearanceSettingsPage/AppearanceSettingsPageView.tsx index 784ccb94ac3b3..b62a20e923c89 100644 --- a/site/src/pages/DeploySettingsPage/AppearanceSettingsPage/AppearanceSettingsPageView.tsx +++ b/site/src/pages/DeploySettingsPage/AppearanceSettingsPage/AppearanceSettingsPageView.tsx @@ -1,13 +1,8 @@ -import { useTheme } from "@emotion/react"; import Button from "@mui/material/Button"; -import FormControlLabel from "@mui/material/FormControlLabel"; import InputAdornment from "@mui/material/InputAdornment"; -import Link from "@mui/material/Link"; -import Switch from "@mui/material/Switch"; import TextField from "@mui/material/TextField"; import { useFormik } from "formik"; -import { type FC, useState } from "react"; -import { BlockPicker } from "react-color"; +import type { FC } from "react"; import type { UpdateAppearanceConfig } from "api/typesGenerated"; import { Badges, @@ -15,35 +10,29 @@ import { EnterpriseBadge, EntitledBadge, } from "components/Badges/Badges"; -import { Stack } from "components/Stack/Stack"; -import colors from "theme/tailwindColors"; import { getFormHelpers } from "utils/formUtils"; import { Fieldset } from "../Fieldset"; import { Header } from "../Header"; +import { NotificationBannerSettings } from "./NotificationBannerSettings"; export type AppearanceSettingsPageViewProps = { appearance: UpdateAppearanceConfig; isEntitled: boolean; onSaveAppearance: ( newConfig: Partial, - preview: boolean, - ) => void; + ) => Promise; }; -const fallbackBgColor = colors.neutral[500]; - export const AppearanceSettingsPageView: FC< AppearanceSettingsPageViewProps > = ({ appearance, isEntitled, onSaveAppearance }) => { - const theme = useTheme(); - const applicationNameForm = useFormik<{ application_name: string; }>({ initialValues: { application_name: appearance.application_name, }, - onSubmit: (values) => onSaveAppearance(values, false), + onSubmit: (values) => onSaveAppearance(values), }); const applicationNameFieldHelpers = getFormHelpers(applicationNameForm); @@ -53,33 +42,10 @@ export const AppearanceSettingsPageView: FC< initialValues: { logo_url: appearance.logo_url, }, - onSubmit: (values) => onSaveAppearance(values, false), + onSubmit: (values) => onSaveAppearance(values), }); const logoFieldHelpers = getFormHelpers(logoForm); - const serviceBannerForm = useFormik( - { - initialValues: { - message: appearance.service_banner.message, - enabled: appearance.service_banner.enabled, - background_color: - appearance.service_banner.background_color ?? fallbackBgColor, - }, - onSubmit: (values) => - onSaveAppearance( - { - service_banner: values, - }, - false, - ), - }, - ); - const serviceBannerFieldHelpers = getFormHelpers(serviceBannerForm); - - const [backgroundColor, setBackgroundColor] = useState( - serviceBannerForm.values.background_color, - ); - return ( <>
-
{ - onSaveAppearance( - { - service_banner: { - message: - "👋 **This** is a service banner. The banner's color and text are editable.", - background_color: "#004852", - enabled: true, - }, - }, - true, - ); - }} - > - Show Preview - - ) - } - validation={ - !isEntitled && ( -

- Your license does not include Service Banners.{" "} - Contact sales to learn - more. -

- ) + + onSaveAppearance({ notification_banners: notificationBanners }) } - > - {isEntitled && ( - - { - const newState = !serviceBannerForm.values.enabled; - const newBanner = { - ...serviceBannerForm.values, - enabled: newState, - }; - onSaveAppearance( - { - service_banner: newBanner, - }, - false, - ); - await serviceBannerForm.setFieldValue("enabled", newState); - }} - data-testid="switch-service-banner" - /> - } - label="Enabled" - /> - - - - - -

{"Background Color"}

- { - setBackgroundColor(color.hex); - await serviceBannerForm.setFieldValue( - "background_color", - color.hex, - ); - onSaveAppearance( - { - service_banner: { - ...serviceBannerForm.values, - background_color: color.hex, - }, - }, - true, - ); - }} - triangle="hide" - colors={["#004852", "#D65D0F", "#4CD473", "#D94A5D", "#5A00CF"]} - styles={{ - default: { - input: { - color: "white", - backgroundColor: theme.palette.background.default, - }, - body: { - backgroundColor: "black", - color: "white", - }, - card: { - backgroundColor: "black", - }, - }, - }} - /> -
-
- )} -
+ /> ); }; diff --git a/site/src/pages/DeploySettingsPage/AppearanceSettingsPage/NotificationBannerDialog.stories.tsx b/site/src/pages/DeploySettingsPage/AppearanceSettingsPage/NotificationBannerDialog.stories.tsx new file mode 100644 index 0000000000000..d9ae43a6d80d0 --- /dev/null +++ b/site/src/pages/DeploySettingsPage/AppearanceSettingsPage/NotificationBannerDialog.stories.tsx @@ -0,0 +1,24 @@ +import { action } from "@storybook/addon-actions"; +import type { Meta, StoryObj } from "@storybook/react"; +import { NotificationBannerDialog } from "./NotificationBannerDialog"; + +const meta: Meta = { + title: "pages/DeploySettingsPage/NotificationBannerDialog", + component: NotificationBannerDialog, + args: { + banner: { + enabled: true, + message: "The beep-bop will be boop-beeped on Saturday at 12AM PST.", + background_color: "#ffaff3", + }, + onCancel: action("onCancel"), + onUpdate: () => Promise.resolve(void action("onUpdate")), + }, +}; + +export default meta; +type Story = StoryObj; + +const Example: Story = {}; + +export { Example as NotificationBannerDialog }; diff --git a/site/src/pages/DeploySettingsPage/AppearanceSettingsPage/NotificationBannerDialog.tsx b/site/src/pages/DeploySettingsPage/AppearanceSettingsPage/NotificationBannerDialog.tsx new file mode 100644 index 0000000000000..6b5ffaf6fc27b --- /dev/null +++ b/site/src/pages/DeploySettingsPage/AppearanceSettingsPage/NotificationBannerDialog.tsx @@ -0,0 +1,138 @@ +import { type Interpolation, type Theme, useTheme } from "@emotion/react"; +import DialogActions from "@mui/material/DialogActions"; +import TextField from "@mui/material/TextField"; +import { useFormik } from "formik"; +import type { FC } from "react"; +import { BlockPicker } from "react-color"; +import type { BannerConfig } from "api/typesGenerated"; +import { Dialog, DialogActionButtons } from "components/Dialogs/Dialog"; +import { Stack } from "components/Stack/Stack"; +import { NotificationBannerView } from "modules/dashboard/NotificationBanners/NotificationBannerView"; +import { getFormHelpers } from "utils/formUtils"; + +interface NotificationBannerDialogProps { + banner: BannerConfig; + onCancel: () => void; + onUpdate: (banner: Partial) => Promise; +} + +export const NotificationBannerDialog: FC = ({ + banner, + onCancel, + onUpdate, +}) => { + const theme = useTheme(); + + const bannerForm = useFormik<{ + message: string; + background_color: string; + }>({ + initialValues: { + message: banner.message ?? "", + background_color: banner.background_color ?? "#004852", + }, + onSubmit: (banner) => onUpdate(banner), + }); + const bannerFieldHelpers = getFormHelpers(bannerForm); + + return ( + + {/* Banner preview */} +
+ +
+ +
+

Notification banner

+ +
+

Message

+ +
+
+

Background color

+ { + await bannerForm.setFieldValue("background_color", color.hex); + }} + triangle="hide" + colors={["#004852", "#D65D0F", "#4CD473", "#D94A5D", "#5A00CF"]} + styles={{ + default: { + input: { + color: "white", + backgroundColor: theme.palette.background.default, + }, + body: { + backgroundColor: "black", + color: "white", + }, + card: { + backgroundColor: "black", + }, + }, + }} + /> +
+
+
+ + + + +
+ ); +}; + +const styles = { + dialogWrapper: (theme) => ({ + "& .MuiPaper-root": { + background: theme.palette.background.paper, + border: `1px solid ${theme.palette.divider}`, + width: "100%", + maxWidth: 500, + }, + "& .MuiDialogActions-spacing": { + padding: "0 40px 40px", + }, + }), + dialogContent: (theme) => ({ + color: theme.palette.text.secondary, + padding: "40px 40px 20px", + }), + dialogTitle: (theme) => ({ + margin: 0, + marginBottom: 16, + color: theme.palette.text.primary, + fontWeight: 400, + fontSize: 20, + }), + settingName: (theme) => ({ + marginTop: 0, + marginBottom: 8, + color: theme.palette.text.primary, + fontSize: 16, + lineHeight: "150%", + fontWeight: 600, + }), +} satisfies Record>; diff --git a/site/src/pages/DeploySettingsPage/AppearanceSettingsPage/NotificationBannerItem.tsx b/site/src/pages/DeploySettingsPage/AppearanceSettingsPage/NotificationBannerItem.tsx new file mode 100644 index 0000000000000..76636a30c4492 --- /dev/null +++ b/site/src/pages/DeploySettingsPage/AppearanceSettingsPage/NotificationBannerItem.tsx @@ -0,0 +1,77 @@ +import type { Interpolation, Theme } from "@emotion/react"; +import Checkbox from "@mui/material/Checkbox"; +import TableCell from "@mui/material/TableCell"; +import TableRow from "@mui/material/TableRow"; +import type { FC } from "react"; +import type { BannerConfig } from "api/typesGenerated"; +import { + MoreMenu, + MoreMenuContent, + MoreMenuItem, + MoreMenuTrigger, + ThreeDotsButton, +} from "components/MoreMenu/MoreMenu"; + +interface NotificationBannerItemProps { + enabled: boolean; + backgroundColor?: string; + message?: string; + onUpdate: (banner: Partial) => Promise; + onEdit: () => void; + onDelete: () => void; +} + +export const NotificationBannerItem: FC = ({ + enabled, + backgroundColor = "#004852", + message, + onUpdate, + onEdit, + onDelete, +}) => { + return ( + + + void onUpdate({ enabled: !enabled })} + /> + + + + {message || No message} + + + +
+
+ + + + + + + + onEdit()}>Edit… + onDelete()} danger> + Delete… + + + + +
+ ); +}; + +const styles = { + disabled: (theme) => ({ + color: theme.roles.inactive.fill.outline, + }), + + colorSample: { + width: 24, + height: 24, + borderRadius: 4, + }, +} satisfies Record>; diff --git a/site/src/pages/DeploySettingsPage/AppearanceSettingsPage/NotificationBannerSettings.tsx b/site/src/pages/DeploySettingsPage/AppearanceSettingsPage/NotificationBannerSettings.tsx new file mode 100644 index 0000000000000..d5611af119614 --- /dev/null +++ b/site/src/pages/DeploySettingsPage/AppearanceSettingsPage/NotificationBannerSettings.tsx @@ -0,0 +1,202 @@ +import { type CSSObject, useTheme } from "@emotion/react"; +import AddIcon from "@mui/icons-material/AddOutlined"; +import Button from "@mui/material/Button"; +import Link from "@mui/material/Link"; +import Table from "@mui/material/Table"; +import TableBody from "@mui/material/TableBody"; +import TableCell from "@mui/material/TableCell"; +import TableContainer from "@mui/material/TableContainer"; +import TableHead from "@mui/material/TableHead"; +import TableRow from "@mui/material/TableRow"; +import { type FC, useState } from "react"; +import type { BannerConfig } from "api/typesGenerated"; +import { ConfirmDialog } from "components/Dialogs/ConfirmDialog/ConfirmDialog"; +import { EmptyState } from "components/EmptyState/EmptyState"; +import { Stack } from "components/Stack/Stack"; +import { NotificationBannerDialog } from "./NotificationBannerDialog"; +import { NotificationBannerItem } from "./NotificationBannerItem"; + +interface NotificationBannerSettingsProps { + isEntitled: boolean; + notificationBanners: readonly BannerConfig[]; + onSubmit: (banners: readonly BannerConfig[]) => Promise; +} + +export const NotificationBannerSettings: FC< + NotificationBannerSettingsProps +> = ({ isEntitled, notificationBanners, onSubmit }) => { + const theme = useTheme(); + const [banners, setBanners] = useState(notificationBanners); + const [editingBannerId, setEditingBannerId] = useState(null); + const [deletingBannerId, setDeletingBannerId] = useState(null); + + const addBanner = () => { + setBanners([ + ...banners, + { enabled: true, message: "", background_color: "#004852" }, + ]); + setEditingBannerId(banners.length); + }; + + const updateBanner = (i: number, banner: Partial) => { + const newBanners = [...banners]; + newBanners[i] = { ...banners[i], ...banner }; + setBanners(newBanners); + return newBanners; + }; + + const removeBanner = (i: number) => { + const newBanners = [...banners]; + newBanners.splice(i, 1); + setBanners(newBanners); + return newBanners; + }; + + const editingBanner = editingBannerId !== null && banners[editingBannerId]; + const deletingBanner = deletingBannerId !== null && banners[deletingBannerId]; + + // If we're not editing a new banner, remove all empty banners. This makes canceling the + // "new" dialog more intuitive, by not persisting an empty banner. + if (editingBannerId === null && banners.some((banner) => !banner.message)) { + setBanners(banners.filter((banner) => banner.message)); + } + + return ( + <> +
+
+ +

+ Notification Banners +

+ +
+
+ Display message banners to all users. +
+ +
+ + + + + Enabled + Message + Color + + + + + {!isEntitled || banners.length < 1 ? ( + + + + ) : ( + banners.map((banner, i) => ( + setEditingBannerId(i)} + onUpdate={async (banner) => { + const newBanners = updateBanner(i, banner); + await onSubmit(newBanners); + }} + onDelete={() => setDeletingBannerId(i)} + /> + )) + )} + +
+
+
+
+ + {!isEntitled && ( +
+
+

+ Your license does not include Service Banners.{" "} + Contact sales to + learn more. +

+
+
+ )} +
+ + {editingBanner && ( + setEditingBannerId(null)} + onUpdate={async (banner) => { + const newBanners = updateBanner(editingBannerId, banner); + setEditingBannerId(null); + await onSubmit(newBanners); + }} + /> + )} + + {deletingBanner && ( + setDeletingBannerId(null)} + onConfirm={async () => { + const newBanners = removeBanner(deletingBannerId); + setDeletingBannerId(null); + await onSubmit(newBanners); + }} + /> + )} + + ); +}; diff --git a/site/src/pages/DeploySettingsPage/Fieldset.tsx b/site/src/pages/DeploySettingsPage/Fieldset.tsx index d91ab95f23bae..5ef43a9f36c10 100644 --- a/site/src/pages/DeploySettingsPage/Fieldset.tsx +++ b/site/src/pages/DeploySettingsPage/Fieldset.tsx @@ -12,16 +12,15 @@ interface FieldsetProps { isSubmitting?: boolean; } -export const Fieldset: FC = (props) => { - const { - title, - subtitle, - children, - validation, - button, - onSubmit, - isSubmitting, - } = props; +export const Fieldset: FC = ({ + title, + subtitle, + children, + validation, + button, + onSubmit, + isSubmitting, +}) => { const theme = useTheme(); return ( @@ -30,6 +29,7 @@ export const Fieldset: FC = (props) => { borderRadius: 8, border: `1px solid ${theme.palette.divider}`, marginTop: 32, + overflow: "hidden", }} onSubmit={onSubmit} > diff --git a/site/src/pages/WorkspacePage/Workspace.stories.tsx b/site/src/pages/WorkspacePage/Workspace.stories.tsx index 7f4db0efb8888..c321366862264 100644 --- a/site/src/pages/WorkspacePage/Workspace.stories.tsx +++ b/site/src/pages/WorkspacePage/Workspace.stories.tsx @@ -8,12 +8,6 @@ import type { WorkspacePermissions } from "./permissions"; import { Workspace } from "./Workspace"; import { WorkspaceBuildLogsSection } from "./WorkspaceBuildLogsSection"; -const MockedAppearance = { - config: Mocks.MockAppearanceConfig, - isPreview: false, - setPreview: () => {}, -}; - const permissions: WorkspacePermissions = { readWorkspace: true, updateWorkspace: true, @@ -43,7 +37,7 @@ const meta: Meta = { value={{ entitlements: Mocks.MockEntitlementsWithScheduling, experiments: Mocks.MockExperiments, - appearance: MockedAppearance, + appearance: Mocks.MockAppearanceConfig, }} > { return ( <> - +
{pageError ? ( diff --git a/site/src/pages/WorkspacesPage/WorkspacesPageView.stories.tsx b/site/src/pages/WorkspacesPage/WorkspacesPageView.stories.tsx index 1c7b61558a8cf..11fc39b142448 100644 --- a/site/src/pages/WorkspacesPage/WorkspacesPageView.stories.tsx +++ b/site/src/pages/WorkspacesPage/WorkspacesPageView.stories.tsx @@ -91,12 +91,6 @@ const allWorkspaces = [ ...Object.values(additionalWorkspaces), ]; -const MockedAppearance = { - config: MockAppearanceConfig, - isPreview: false, - setPreview: () => {}, -}; - type FilterProps = ComponentProps["filterProps"]; const defaultFilterProps = getDefaultFilterProps({ @@ -153,7 +147,7 @@ const meta: Meta = { value={{ entitlements: MockEntitlementsWithScheduling, experiments: MockExperiments, - appearance: MockedAppearance, + appearance: MockAppearanceConfig, }} > diff --git a/site/src/testHelpers/entities.ts b/site/src/testHelpers/entities.ts index 17bb48c1d00bd..6cf97131aba67 100644 --- a/site/src/testHelpers/entities.ts +++ b/site/src/testHelpers/entities.ts @@ -2355,6 +2355,7 @@ export const MockAppearanceConfig: TypesGen.AppearanceConfig = { service_banner: { enabled: false, }, + notification_banners: [], }; export const MockWorkspaceBuildParameter1: TypesGen.WorkspaceBuildParameter = { diff --git a/site/src/testHelpers/storybook.tsx b/site/src/testHelpers/storybook.tsx index 666d8b91c5c98..4d601e0dd67ef 100644 --- a/site/src/testHelpers/storybook.tsx +++ b/site/src/testHelpers/storybook.tsx @@ -28,11 +28,7 @@ export const withDashboardProvider = ( value={{ entitlements, experiments, - appearance: { - config: MockAppearanceConfig, - isPreview: false, - setPreview: () => {}, - }, + appearance: MockAppearanceConfig, }} > diff --git a/tailnet/proto/version.go b/tailnet/proto/version.go index a6040a9feae47..16f324f74fa33 100644 --- a/tailnet/proto/version.go +++ b/tailnet/proto/version.go @@ -6,7 +6,7 @@ import ( const ( CurrentMajor = 2 - CurrentMinor = 0 + CurrentMinor = 1 ) var CurrentVersion = apiversion.New(CurrentMajor, CurrentMinor).WithBackwardCompat(1) diff --git a/tailnet/test/integration/integration_test.go b/tailnet/test/integration/integration_test.go index 0b2e835afc79d..45d88145216c1 100644 --- a/tailnet/test/integration/integration_test.go +++ b/tailnet/test/integration/integration_test.go @@ -177,7 +177,6 @@ func handleTestSubprocess(t *testing.T) { testName += *clientName } - //nolint:parralleltest t.Run(testName, func(t *testing.T) { logger := slogtest.Make(t, nil).Leveled(slog.LevelDebug) switch *role { From 679099373bcb5fc22084afb81292f3fbd4256bf9 Mon Sep 17 00:00:00 2001 From: Muhammad Atif Ali Date: Thu, 9 May 2024 21:29:53 +0300 Subject: [PATCH 035/149] docs(ides): document connection via JetBrains Fleet (#13179) * docs: add docs to connect via JetBrains Fleet * Create fleet.md * Update fleet.md * Create ssh-connect-to-coder.png * Add files via upload * `make fmt` * Update fleet.md * Update docs/ides/fleet.md Co-authored-by: Kyle Carberry * Update fleet.md --------- Co-authored-by: Kyle Carberry --- docs/ides.md | 1 + docs/ides/fleet.md | 25 +++++++++++++++++++++ docs/images/fleet/ssh-connect-to-coder.png | Bin 0 -> 32233 bytes 3 files changed, 26 insertions(+) create mode 100644 docs/ides/fleet.md create mode 100644 docs/images/fleet/ssh-connect-to-coder.png diff --git a/docs/ides.md b/docs/ides.md index c5aafcec4813a..6ec1b5287c233 100644 --- a/docs/ides.md +++ b/docs/ides.md @@ -12,6 +12,7 @@ support should work: - Rider - RubyMine - WebStorm +- [JetBrains Fleet](./ides/fleet.md) - Web IDEs (code-server, JupyterLab, JetBrains Projector) - Note: These are [configured in the template](./ides/web-ides.md) - [Emacs](./ides/emacs-tramp.md) diff --git a/docs/ides/fleet.md b/docs/ides/fleet.md new file mode 100644 index 0000000000000..a248b581a2fe2 --- /dev/null +++ b/docs/ides/fleet.md @@ -0,0 +1,25 @@ +# JetBrains Fleet + +JetBrains Fleet is a code editor and lightweight IDE designed to support various +programming languages and development environments. + +[See JetBrains' website to learn about Fleet](https://www.jetbrains.com/fleet/) + +Fleet can connect to a Coder workspace by following these steps. + +1. [Install Fleet](https://www.jetbrains.com/fleet/download) +2. Install Coder CLI + ```shell + curl -L https://coder.com/install.sh | sh + ``` +3. Login and configure Coder SSH. + ```shell + coder login coder.example.com + coder config-ssh + ``` +4. Connect via SSH with the Host set to `coder.workspace-name` + ![Fleet Connect to Coder](../images/fleet/ssh-connect-to-coder.png) + +> If you experience problems, please +> [create a GitHub issue](https://github.com/coder/coder/issues) or share in +> [our Discord channel](https://discord.gg/coder). diff --git a/docs/images/fleet/ssh-connect-to-coder.png b/docs/images/fleet/ssh-connect-to-coder.png new file mode 100644 index 0000000000000000000000000000000000000000..fef916363260d585eed67c44955129ebace54d9e GIT binary patch literal 32233 zcmeFZRa9I}w=N7J1QJ{V!Gi=saCZm}!4llv-5r_)cXvy$1b3GvNaGURt&!j^4GsK@ zcYoi%k6oO7ZqCi=G1h2S^;%Umt7grb^~`4y{#j8P;}!8M1Ox;OSs4js1cYZ*2ndLk zs4sw$e8z1v;0w`3Sy~LCVw7|jxOr|NsvwGhP#uc~H$n#PUpmTYxga25c0XN+eGb1& z5fJ?RWhF#aJ@pTleRS2-x*kul-p;ywl>CA!iOkijI$VJ3)+V1<6tl}q&6mMtmBN)6 zT{I@I{YgkEMN-8){hqWQZ8XW$_#L-?P1QH~wjafwqOxt2{fL_ncTm!`D#>0F?~6PC z_EEV}ZJWyWu@`&>Gg>}&KMxKLoCD6zoCgO7c~crf5BI@TCfDx%%k6<2bg|S_z(-lb zmzZb0!QBIUu7cX*f+!3mqmz}L?d0rSBV1EkJ2W&n_@0GjXEAowVmkA&&&YyfUnmp)DdTEUX_Rzq6QJK+hM( zMxX8=;-3o$k>w6=DuzdkU>HZ*s03^LQx&CGN~)v9?n_$G&AU+np0e$#TCH_83E*R| ziL7|ZdthsRcU$Q=I@eByRsrGur^h__Xch3=A<6r8YYOc5t2+Fj_tbVce<<9aIqWZ+ zGm5YfDzPj)FcE|GufM3H2RCown@lp*I1aAAK{yfRZC_NSg7 zGk{*DXuMd3&e+%(s07WtHhsR<0*TdSTb_{sO|Qk(=xlSqcCjJ4*=2{g6+2!>^ZIBR zHe~S71+*w5FTdFnj&}ot0h9at*Oz?H^AVo;I;%vdS{r6g&ZMtMC=)-#Q4rjZmIgEo z{3NZdOR9Id&H>5Sj*dp9Dg2&y{{53r&&a46mdrm{yE!YDDdyNWY)QO4k4VVl=1`;~<#&~N9q91^ zEk;qFnXIpw-)%1_Gn0f#yAI(kD{I`MeQWg7)!F$GLA%cKEAUe`zZ<(P6IEjkFj=Rm zV0f@rt$DR^?<>VDUc}CDJUY#4W8_-%kqs6Q3~eNXi^JYq;qquX6Bw~7uXFPp-!s{# zi`!vkIEh~6?{aHnHH>Q#nA0b{7|r5Sfyrw3T9)bNhz7o>c7jSNtEkxc($LbfyB}ym z&3S^3!H+qM4&Y@?-Qq9#Qos+yk(HX|NM%ZY$)019qbdd@a#iv3A#Q!vf3fv$2Bg$N z;Ihz~wGGQj%K8-w+BIDoBq3_WYc2w&PP4(5h(Wy!VJ+3b-)Oxnm{GSW&cGjr9PpY! zy~=i;9e4y?C$hiX_GIlhqfSGVUaLD%DZ*U64dGAl^GtGChW%#g+x_y-nSOIEYPN`w2ox77iPtVUe2sEHOJAJ?n&?R|G%l85vN1Oy23(YQc z9?Nd2K)?((zURFXBJ#YDqtdB)Q2D*wj5+O$wVW%5q3&Ma= zXExuTDF?paM3w>u+z_~dY}n)h#-P3`;-?Y?B!+Bg zuG$3XJXS|X2gKa%NX$isfgI>9hdr&vpV4dqdIpB>>FMNL(8Ii@pV~Bk=QhPd^0zYf zQPP3#d{Mu)Ok`z|_t?4H3a6MSU5l&8D$tUXz3Kj!Dki*oz6n`pNRT6misEz*i!=E7 zn8v7=kn<+0N|C(LU=n@uK2L>SYsyY8CObho6sB zG;uFb7^OBV0gP>Ny(TRK7gw_Va!Jvh9vYVL&D- zm07POIXujEp)MRS<7>yqV+ZrKT7dPewHSk}uI2)TtQMoBIeu4KM{Vbd&?1fG+*}Gk zPOb`JL)X9Klz=1^1J2q)K34^@iL~(Z+{ew~bPk9$VDGy{9$`wrAc&3S2t`jj<1{xn zbJ{P>0p{Ha3Q0=Qzb6sb!+@MAPb(_0r_ ze2cpBcDI}jY{Fpe6lT2$V0Rm`wfVi`e|I>X!G#v^q>jLb5Ny;P!Ui}A5fKsgbI5s; zn-I61Y>%^p+Oe}ihDzO9m_OhI4rlUI&ivpM5y_>?^*#G`ZpP9FhzL6yVqs~*ebz9I z`vQZwAku{853t741-;I^QU^aUF`4a76t>;&l|oAPnc3J*vreolH7fO=N|yYtmmwuX z?gNbpZq9B$dwbsa^1JpmXuO(hFrrX<7KanLD*vqa$QSdb0k`yG)-N@nI!O-AQwUFQH~ z$C4ez#1*B<0;tU1Yd)3Fno|1qnQ~ieMJBB%vDw9 z?I=6uygCyvKA@`v?*=hi}Y&euQpj|n2cV_`KZ84Ow8>`B@r%lJ-kQ) zrV`Tlk+HFzcxiGrZk*;~J@tU{wY}+t+vub2+NO6~^Tu@cch~;AhC1Po+-p zUsXL7Y<$Kylmv`o&yV+=PyTn9_`hBY%|l>#dH_=rQT@pyCXJX3;=UYe_r~cH}_30o~QlqOB`7=wR0Xr*F*| z?Gf}Q=7&z?x3dl7$Jg7CRXcQy4BkslTlbu%wM3iTnux!2urJa7k|BLN#-MdGXq+&r zTKTM#F3V{w#^P_bXqs+`#uk~P@z%Fgt*k7Ka>2dm=aBkF|Ll7A7hfuA)JiL{A8Z-_ z-20X*DOQ+3{_;&NDN9IV+%SNEzZ58<+Rglv-k$C z;G}Z;f`hRV>m8!OrXAYPuO-FPIc-RPBvZ(|j;~*BSf1gm`NS}|jOyq40xxP^LqcLN z>RH(mv0x~>Rg&H>dKnX#OE(cNR}Ni{-?!5*;%^?7oF`s=AikOK*q{+(5MV7yqaadc z!k&$>Ta?R)wc<&wbz*8fd)ZQVd6BBj|FnA$1l%*$z+N zhk)ZHww?#1;GkRC+_fxXdp}oK-HgA0yw4+Vw7WjQlZcL}&UD6>=&6c}paYw^XoBugF_(*M7*~%1!a9U|Z*q z5RKNw=It)N*%!`@q-yZubRoy=Ycz)DF)9mZcSpUWS0z_ywCd@YBQ*H1Een}|_wq&U ztJja^CDJp3%w9w`5*q}PPzKVSH->s+ZJ76j~%#U0b#3^VsZm{~m}4hmFVC)azg!4;*O)?|Ivq^)s_ z{_iKfiTWH>!n4)bYOH|@=or;N1jv+T0F&<_nZh|rDR z(pYAwZ0iEf$8R{y<+q?Q0!M4$$NS3a;(j%P(JE)7sv-{_W>?AGYP} z6^vG8b*e24^gYc;8#RK!ZLYW)ezF@W=%0{upvN!QLJU$_+d9H6&WQp(dPn`0V_-p0 z(so1bVPc3*652J*K|GdveJ-tcVqZ&ls%nP)v!T{?-@oyyHSp~N<2 z2VaJ+;!+CHA|XblVT8`|RkXH;@xun;bD7l}w#TeH@>NoskEQOSQy$2vw~dq4>*OgQ zhvlM~%2GXS3t=Tg=~Jzk>=UXc1QU+(@k?r$sKU29~C2wF|=$HGrNz?5Zb2>Q(PS-k!k-*BPo zonHmDG0SfJzgm>8p7e=e?(pboQa*?4#Q4WvY)<-I#bsg>Y0xp)v zDD;T?>y7vP!-qkU&fAFN=CH7-g~OwFZamKxn_VoZsHsbZV#tLC4IXbbOeVGW1sGy( z9?7gVHXu!nxH4hcQbBrF{VH-Ynyf#p^>yvsGNlfrCsBD)A_J`==I!5)7!TA{mc4%b z0;XFc=y;pG1tJ?eK#tUEqY1H!9WLhPaqgV!NM2NPaVD#!@ap3Kc0DC7crWX%rU#t~ z;;MJheP?rMOI|b5>$?&EQ_H`qWR!m+NcKj@_U7p^W7v)&x5aww(pdxS2m%AY#zA{N$D5k_|vJ%EnE9dk~L z!$F-gWlfTSy0?^yqcm*#Abr{c!tdVvO}P_=z35^?DyF$pMNE;(O`!!?|DU7JY!x10 zW1x-obx|=fvFKP0eXrgoS+0Hm6|}<^wGN_ggoVao>M^0Dst7a=TM7z`Bh8`0J_4N5 zuoyMM1AIHZk&SNmWaRxYrHYo8W&DYp4Cdnj___-2;dX9OADzl;qY=)s)5%l$4xM;S zE|C;hb=pN_0~hf{wF?|q%!>EJG;Y#H(;N48;+Fo-gm)jrz87<4w`zmi=JvI325pOk z$a~}ZDTi;CDvmz1&N)qO!`11j>;_DwgoH!426i{IIdlrFo9pkW&cblAAsr0(xm|kzlAbHqe@jK>cG&VlGl0)!JBx!|=k)Ir+T!JAqdaHvD$h?+ z^Kdqr+rodd4;|FytrpcNHQr%HP%KjL#p$cu@C2~)8Ef5a@OFLJL5g|H&z#}Zov(#U zD1C7farWV9H*0mJMe;=)!h0Y?^|qgYV>?QI!0TOya6pqU3S+UM9y{cGuW?CVJ5t854q2oZ&f1XgO=&Mp2Ukd zt74TiDto6u}Dd;yilq;%f&ENUkoYAN5{oF5@)btCKB z{KEA}YGxO)9{w}u*C|fzfISfl;NRHJ3U%tof4e;3*L}(WQfH`7*UO~$ zP3@|9IcShTz~m#JmA>!t>fcLaF%IQIcfrDa&T?1}R|f%T{=;52vRfzNB^II{7tNi` zFU>l|p>HgF&@Hjn+`nG&pziERYbK>iuXs@$rDeJxwO81HNWDT;G}}!vC&|f1Pf|;Y zRMrSSjUf`w-KD!K;}US}%%7rDaAmqByZA1nM)Qt6R)1rhxGI>w=`S%tMg>OHHHOS^ z!1ORCqH0e`GmPb~p`7+GRJBGsc?}iOkr)Z3ECu9~AyXbVVpagRntev2K1hKI)la+50 zKxgBd47rsaP007h=JsNGIa+J%$@fQt{#dYnUJVMTVg!>@-8+6@$c;3TPk; z0x5o|r|7)Fyt-6d3K8ikK*_GY<8OwGJv3hxa`a5}UrHf}P5*)rIbWrO)3wLn!LZ zerKuBXs`Bz;G3o8D7BxD^)v+8$~5QV=VkV5WCG{I6&Xa!rBqoHai@{F!Ipcqc*iY) z$ax|F@zUtQ1G@@yP`s-3NfeRTTNtFnFEmS-=I!Tq7~_aSc|7H=b~>e@>8U;NP^*P~ z^BvhFoBK44W1i43$#0FfX%mt{t*H##ChZO=T3Z!Fzhc~MSMypDVu%TPUCrog#*XBg z`rUfsp*CY`&>>euJb#Ddht8#Bg8VY=2}HB?82Vdc!w!^0Q=~p3;I_{j;B>V!Tg&nV zN9_j#iyMO*5NyG{`@mvxO(!_fbY#s7&B-2aT9s67{n{05pz-{R(wONeV2bE^ICebT z-IAA_z*%Mm*`<}f&g;;oq;ne)y`EI^dUhs?kV8!@_jsFewaWS0r0(Z1_b;rS_xnXULH>rO)g+eq2ZWdR34AB`92K zSDcxeQ~j^!@*SdFG48LEKmOTn980Goz4U(d zSz<#+iPd);J1-|)I!{N5b}Yj+9!<}lt^Q4=f{w_lDhoyC$leUs*UUVs2V~h5s`MO?`dntyj8Fml>xmC`=ybhNti2=s5qE$EF%OB(J?!__}M43NcA^| zGV0FmeXl@*Y+0JOBUN-#qU1+xaRD{Gn1qBAvUJLL`6K(oekM;+V7ag#lFP`-P?bUv zAZofzTO0xD$72>^8sBF*Q`$t*QOBriuIOs)^(KE7!h`dirAoub$de$>F}4k zq~E|lmvb=+(`h=`qSH)ULy#HLjT-S^3;#g(G^TaQ38%{Q>TQrLc(8fla9p-f#%$(~ z+r2UJx^h(FbEVU;uZ{5eqX#?Ef_fCUMK$xb*#~MvJA+wiwjOm~ey0Ng% z0?E5@LQ zyg91aaxRgD>?#>@g26oT@O5XuHm?{eX1n!HZRpC8PdXyW}9TxhTxD$}YBi-=%X*W=55E4G(QbtvedT)z|I;OH_;cgS)Z zK}AcMRnR}c=s+swf|+RUnP2H8;6wXufqP>r=fXNaSk3K06LZ#4vQ`P3GMb6BV|$2v znXznV;`R5nR>#uc7WAkzllvHQ zx#p@M(x(e?(hbmoVrOWK61}Ibg*<9-om{*t@ai7?SuY$)Mdhr3amoCv6=4w!0+IXQ z82S1gGJk6dC@W)pxU2+Kn~xBiOk-4h=!6iLKEa}!5mQzH?QR!l6AXv*mj{dW85w|c zOhr}y^IvGI*)NaZBaS$!PuS_wr~hqi_Wv4ASU8Gg>*&Y|x}D4gxB=YidR2-~at5_v zRsn%giwJ)!E6GFYPMVsZpOL#%sKh+CZsa5 zp2p#kf8}nKuS^&nTT)((S2jK|oXghhFo*+|Z|O}?6yNN_r39`?pfX9cfk^aBR-QFJ zP^JORB8A0d%7>2B5PDCNwm7AzOV z!NXpinU}TsZ*YlZH~R;Cop0@F!Gh_ICQyDnvCmLWbw%%QbbFj_Vt-Z6)8|RlHF9E! z%8EjlVVVjTL8pSZ-c$&p?fzrOKFCA>776&4PR*`%o2OkYk`<6Mo4jAY!oYy?01(`h z&=2>=pjw~nqsW=;)z3x4e?_+wq|?spgOvli@D4P6zI7UjTcv`t947eL4dvewQP5T) zJpq#(`i2XMSRA(xK23)Q%>Hw!NFV1^(87<7B=Xlm7TZu0BG+TPtYsL|-KggEHi2Lg z{o~y;(A~M^i)9}YGavu$)XBmtl;_>~;Xb><&z9j$e*KP8?u_fJ`(oiSK5+!Se(CTD z6<4MQo_&vI9QGTQVc#PyauSZHRribSlWTg@6a)adJP9UGLwf9=0trGWe0XTj7maTv z$Bdp@pJ`=F-%sGvRrZ_Z%8fzPY*vod&>GvmOq`^#AV1HcMiK z#G)5&uI|s99RR?o#O4d2TYk%RWw!D_A6b^WV!n3~TvpMM448v{jABJ3 z#?A)2#gcoS#TMe=8>kO$r1sd#0Z&klFum{X*(YV-5O4-VH|G8r`y1&)CcZEipwbAF%B_^JksLo-v@^OI3YOJ3w5TuPF!s&%TPA^OvngiS(M8B`_ytv z1PXcJPAbDV_jE8Jw(`OCI!=$BpY=x&Us%Y{@!y<84hLE=I>owk99ooaJ8UAb=Z_kw zEM8W$8|9<$OTQQ--JT=okPcC`(RXE!E_?RPP-Kj^Ha6JXJoFsVU1Gv)V<K~B3&28%X6K6lyu1J z5DyK@h&$1*O=90U$$)B!YGUGzHPjRHaNjS$x#Q#Gr^&xm8rTkH{L871U5uLNh#Yop z&67(8>JbZnqyNX@Ln#0@ohGzIZ0Lu5@#P6Ew^EmYIsG zvE;q+2b8w5*qcI^H?#9;8rE{hTvD@4oeLlpJY3wHwLq-720N1Dj=PHLUF0&;fehWQ zT<^e_*EiZbCAUGimtdud*>~a75WcF?nZ%9xz}46qJ-1`L-$BBn1@~>Yc?`$}G&J$k zP6*I%b|SvQNNKPKUs_kLhO%gUT>pNWf=5#-LBu6gubb~ zf8mf(Vx{M@J(cVYjm|vj(UQtGq8CJ`KDU;XC_u=OHqvcu{zY4eBu@5*(o`9ds)BYK zzIP>Ci#_5DZvvn1jid0*8j?wEU2!{gI-*|IoRG^-LC z32C4L-jt^d8({|AiB~8q&@IrkF~Q;g(lx5BHaaZmpy)os1DJJQf=&!69H20UQWJgi z?CSM*CA+>+4(+kmMUu6n_Inb+1(&j$?wL~X2tElS7Ma$qG*l38MsT%LPcIYY_AcT~ zm@#xCEK;w3IG(Zz2U38{@%PVMD*0m=^-(X*bSoafR`V{Go!Bpxy|h)Z zSp4a4|64pA4#C~n*a-6zX<7Jt@N}H<_k8|G4jSfjO5Z*^^zU(vB=Un>9QMnR^v?LLWULU}NMQl%-y7K%XmTh)4_i zk>wBN6aG}oa@@DNve*4%Coe4#eSNG4ciD}?0o;+6?P|Ehk!acO#*NjUAQKDfVdJx8 zVgbJyTUQrPJC%P!Kc-04P{5+pae>X~7ZdAls-JkA55#JT_wN*o4)2i>#+-%?a0XVu z(l{=r&)Lu*>krdIZ4JVBkHR36+fFfIe{1&YP|Bw$7jj{>_ivvvd7&Pw+15*albx2; z{S-*ZZ$)%dFUhMGU=Zo{Rz^M9>`h2IPY7^xk9+BF!ZrKJq6>;8GE zuM)VAYbXLg5jTnN=Q8IZEIzAF0}z@FV{WsA6iXfwe!gt^#9XiKuzBd23)?VOzf!~(A`Ga`f}SO55v-BpU*f}VDohfWjCxZ zk%a`4Bo0Qi?>hBIbv5E`N>N!S$Nj@2idNG2Mykm=*SCh>2nF@WC#F@yu_?i^w6`Aa zc7X3wGO*E_8o~jrkr3(Vop7y4*&4qO+N#fJSHD{$QJNr({`KrsrBg9%cx3GvtD%nX&%yWjaZTfMN&J2xt@7idsd z0b&j~@jF|(S)sG*T%Or;A&I4gSwfPquPjj`C8rk`G;iM`)XqoM*1k|;E+{KQkL>OG zlP^vG{ynRDt_ctZ1_Gl!#BQBH+k=tkd|o^F@T;~^C8=kl?_iLWr>fP|$09kt#%Eay ze1(XN^$tu&ogwwI zu&#QWcTXoF=fKbZ!CUzs2q>H0B|G0Ds}B;IKab_FsQv9@`wzm3E{gt0%ecchb_hCY z&v(kezaS+6;#MM^Y$c(+JCx7hoWo)VpQF}eo~4z}@dN8KduedT;jd+W#MzW*W&LWG z2(_v0Ly1O^GZKlp#5RFP*_Jm$k`e`qUcnc62GUqppu2acr$8L1Iez)w*B}oEX+JuX z{R~fRY~oKJd)t52xL45{Ar;SunDfy8!*q$~pAO=-8zG%P6GCy8E0-)Jm;l|z-;dg% zV$@)kW{7)_Qx{Mw@L&D_2*ww!Kgpt(5r=~$E@c?*$@JMvTS@cFTC)y zh>C`O2>;!99E^nT~yuENGRC zhx@-?SUXTbe`aUBM|?9}Cy6y)R)7+FKbE_zdd3&typDeKDibV2g@;PY9jHOKEV**M z>JpcN{;-dpLnIQhj^u;00d(7^hQ9uC@?^F_ko}xRkhhH@TaW{0B~3e{vEe?-mNnOs zI83!jWQ{YBuktWhz}Dar7e73TH)Ako&qU<~KUPVI4szj^G}~GnCx!j z-m=dk#@wg`k@U%QUw1Pc#0qj;(mx0ej_2A5Bt9N{ZtSwbD_U9lK#WCwbwo4A3LXYA zpj0uba5tV*UtKEaA>Qe2qhV#`S}01GfHi`2yF_3`!+PSRLMl8GvKSrn%? zXT)mtZLSbl+tFqrq@f(iacpilR$y}Av?JSxKaJ064Ye=E3`#H_mQ}ub<|4&hSSZYi zRj%}qW55@f&JEs`9D*Ox*ujFkE!wPB%Z14MFr90#ZNQWm@8Me({wwDUTVIV_uSSd< z%*k(7M8PKzQib`RhIVQC_Cw|tSa$VHGY>5v*|4_vVEKbu zhU&Ur%{Td1r6r_MKV!tE^z8514v`>%`$G#|tJb5c9s4F<5r%btN^foYy*254`>nE@ zi|As51$eDK)%<*h`0x3_kf3jC{mk^HWa@)Zzy1Spt%#;Z^Xtx(?!K1?*ye{r)ZZ|f z_WpeLOS{Ym#iH^r>G#BE6LAuqk=9B#8jnc>6;Dt#aJJ=IJ$eRw+$U%Ca-*#A;m&Pm z%f#YQUqt@eZGH~w)|K^yxy@LJ$A)vj0v|C6M$&#q^$MVpp5o_=f4JT4JQ3kEx`)pX z`?I{946u@jZ8+*Jzcw|Gv6q9K0j0Z43G&w*u-)HHh&LG9&06_Z^*iF`V9rcQdW>e! zo(uX_X^BmP2~Mp+#z?4-HkN@djgQarH*(pa z*rgB?u*se6K+nR6vbKoQ?()O1PfoyXPi-O&ym;4#^i%G=y3#_5xc#omTB$&>uc66r#pkAi$@5lmZ@9)OB zdEzB0Xq;FN-^x<7jSfHR^msHkwBXrzw%6F>gg29gHDR6Xcdd!Q<3-d3kB-H*Dwh+o zea!`a%06m$>p!!a$CSB?11qHUXT1i5bdT%V6f(U-R`zN*2boW!$dbx04uE+mdYB#& zY;fJ3S((CP6F|MC@f510=dc(#iA`%>6UxA)nc^g0m#Mw_wdrnkKwRu67F?$^!x(*X*^S5@#U&*tzos7pq3l7fn?Se>z`0Mjb zm0szf7JQ}8japVy!Q%CagK8;QPFLr>jcvnIn?|^~T3Z*XD_r@L?d$PtPyBn(yT|p^ z02EKJQ{|M7OZGANk3cv4*8$WAu!ggjk|?o>ha@r#2(g%gCl`;WM|p)nf08^v zxLMbY;l!EGOs-U6JG9@9Ip>Y&TJXEml!zjPTD;DAxbCu1;U7PVE zPK&>+qt86fav#023qr71O_V*=WJWZ5v*;k~*6>GGi)kEf%p6Evf5hMm&R?X{$Qj%{ z40pVJU&-+vA@|YlRfdE4QN&y+)eI#Y)?es7xlrrSy3`^*{jgjFm*i%C$xpjEQk2o@ z-T(7PLPKyxvscsv<%NQjP_K#J+0DiykW@7F9?}u7dfC_Wu4&CFJWTg>-+XJISS|DQ z#NZ;gUT1HlxseB+ZbRzSMG2cmYJmFl=EV+sY_XwRIzDYa$*Ev(E>NnEUZzk8G(eG_S$0IZ>#fK=jK7xgzI&_i_h{42-~#Zl8yuvf(i$ z|Hb4FS1kGgcRiA9AqB_}nw7g*kMVQa9FmFWsGDDj9N0}X-)@>$ooCWqX)ZaI3ue(S z3DGR^zuNngAaZ3bK5&Ge@S%u2i@$%gjOuM%|G0T_>q7H`bWG!3_w&UpqecSI=2q|V znW@HHvp?GW;q6HuK|{9Z^?LfO{d`zqzkrq2)a@K_mv=T)3`b-9zJ)9`yGqT@q*cpFRmzLOs~yN?PMVt4oswc zzA4|20^fLAOdsnZ493=Sz_9hRZ#mxAh#*l@qw>P22SQ7}ox(_0YXh?a9QWOk?r>Ed z%OK4yvzdS=JpY`)FHS6Qy|UW%lq!YUGo)en(MZ(y%4<(}v)pVZRPs^;&t1)WV1a`iREL-fjX7>|nIMFj!005Z!5ps3i%;{xw6f;@beGaN~J6 z$<^95%HqE61-eI$b}txQVd)n~X7Sk5+A!+`rjPentv`CVp2)(xa>ce{&=yu0nIGN{ zE41D`PmR?)>2*5FYD+4NOgVdal-#&Iq*_@-ho7+!lnv&OJpxGvd6JBJCvWVOeEV+g zUXvJP1Ts8!dyofu*_~1mF!|^9>0ar&PBlnrzyRxEt^R3rxxT39Qv3kVsk}+Q7 z4;x&HP;->DCs#b|d**mBTDE!Od%)ov(Gr?|5!EU=g~C>+~$VSdr-%0;%1G^{@p%#?3_*6U?pp|CiAPQ_Y4`E!!p`bKUgvNMi$ z(`?Guo+VksP%Z<1$~7u=Z;w1RY>CvLs4l)p1pMkFtfCNNjnLKA#q0VvuKFe+een`% z?_WmF?cfY#M!tRbu2)lcz7onCh_ z5DLwlgGbREP++E0Z}h9DvmclAOW;xGWYEHvmcDmX&czWfG0vPjzNxt@3jhkRNu}s$ zxPurKNd$lZ^=h7+$y52sXu9EKz;UZN?D$6G0&LAy3FIggF#l%?Lvp4j5yEJ%SJHIp zu0KHWrK;CZA4A*Oh|nq4Ts5&)R#y+Ht)-eNbarep$QTb*#O88^D6vYIkPX*>|7zTriFpn z-BrHaq&I#tFI0-4com+?l-^L_9e;bP7joX}u4Q}UinRNBy+M>P9a5q3Vud z$FqKY4;5_4~NR!q`$oV|F{#*g)XfD=m>ydcydk+69J%ugqh=2o!dfZ zaW>>ru}Lbnq29}*b07X~yS1T$e0(J4=p*A31kja#M}+E7?_N1#11JpbQW4(&bG1N4 z9s}{JW2T8Wcdf4}BF+ZAO0Q5~>5i7V=5 z=|Ry&r}-6k)LKR7VBTH?R+U#S3(oTB?3!SP2Nfp>jl}jh?svn-SehdwmSbxs7K7-! zxlTy;@1O>!@)=cNLL@4(7mzhqrKUu@&a!l5bLK9|7C}rx4(;#u@$G3_pKG~CTATe-kXfbTBSMVx4SqBUzXu7sV+5UENX1l7gqUK3@`Wv3Wf2^ZUol zLa7%;oEtxi1B>1~5NrVc9e{OI{H!YE+7`|gl%wYx&82QeW;HOTu{o=8RWo}fBVtE}H8L>RbwK#>66;Lp<(@l>-JS+Rn=RKdk}t^&-K_JG z4L%TziMPpjN94BPSyT!c7ZOBJS|k9bHSaDPINaTa?n42FEkPlN8k_kblgbQGtIG+85$+qoonXE zW*{%m;%gE}>^ghqFXWg}bbav?NSGV1FtIqYbj_fcv8B2%TOMLPg8Ke9D&_)&Iqg#a z)Ob{ryw)vyATEu}M4#$?N@kVVB&|1)D$R%to_t!?W^UMw`^FSc0Ge|C5b8gvT~xEN z|7E1~KXbn95x?}%^stxf*a2{u#HLiyfBsbPP3d{!il&&ZE~%58N!rwqs@4_|LAE;X z!6hW@OW>j9;G7##=lJ%w$r6GQYc8ux`ltJAMWSYtA-gz5MmMgGK_bcvwJWnd?wG&V zEK{@c1P};LBWDKANZ^~317Bn4ha3%em#-$cz}z&`cSazrjWzLc>>6o}x`6yLc2acy zo?f`V@O-8q&AM}WTK7MaMiKt|knEZ*#8<~4%_FYB^9NjO3` zu2%V;NT_qej^&^<^4 zM&I5Za9a`5SewQFq|0NHDbih@6G>p?{@#0bLcty|ZgM)TA1AYr!--2cY_gordW&8$ z$DKXR0;%$LA>OsHmF%VO5e6qD30cvM#?mFf)m}wB@QOVJy#c5+PvKgVK0Q(%B-zQ^B6bb39pq{yE5^(tl?43A>(;7FZ^ksn5z#5v&+}LGQe-CGf7z z1gnq7is5r2{j=yy(p5B+UxR|48OMibhR!$C0})bfi_eC?m0>iAWf6vQ*hu7= z-Ca$d8+f>y5Ar;|42y{P%9@|czkhO?P9+ricNnVbYKS-ClwCk{WMsE# ziMEc*VuM{lVc`>X=<0aYpuGHPw+Z|6`S@E)rr3D9j|V#khu2~KEU!PD*Jh?{y>?6( z$q?Y#FzU4=9WA$#@RSPWzWwGK75rojoNJ3~apr(ru(cE>$UZyz6E z+kuNqC;tq5;5g?Tu&|)3Pr$j+d$KORaJ`G}FCmUY#G6cXZ~v*OjGa)|4AfbusJQ+3 zNWA1gC7+G$x2o*@-EAV!ct;4`@pOi(srYyGPdA+A_%#&` zJr#Q&eP3>oOiYq$%)MINPwkz%s7kyodUgb>-(F3`gl>!??6ko3g^4yh{ksL5Cy6`v-Ci~66g1fxO*uLz#27ZoLh>`_9zg#|&Z#M| z=u2RAyI2h9(EI)SH~otjOM{+w>=-0mSwLG4t*&0&-j)GZf4LOG5}GveJkx$&$mZVC z`|0(gMsguJVJTMLT>1BJ5wJRjB=$0mWYE$YPG3;mYDUaqDlB;%i!XUH>o$d+Crr5T zhZy$jPa4v>$R~)BePGu(STZ1XTs_r#I#91oD?TkI5&qc< z_g@$yez}QUmao(*Yxi;5P5E)L@Xz+RwW{s;wS z5%?0kIQFfmN(SpC^sKRV z_w$TbLM*3$$o&WJxe~4E#xs2JSDqwQ0Q%zcE4;8^`WDel!!zpYGkdYDz9J9ItFA73 zy}zAu2h?i*c#R}*9Mpz)zTt?PA5_wn2(!@IcFa^Z=Bm5 zgL?5vu=w2xA(0o#wwT7U2Z{7yxB(8U{t-Rx@$2Bz$Kw3~IQXh1a;PXm1~|Vx*|1Y9 zJt$HYZh-2F&d+y+@r@^cIMy_Mk;#HAegmghad1D3WViAyzf9>7Am$AM(O2h4{(FXA za;F8a&!1q!_t`UF;ANt1aO(9!xHU8J*qEmwTkJ_SbW!=6)Y1)6-`$JZN@-yQHeA`0 zja3Y0TAq?`@pSExs&E;YgX`@8#(ITT9I^lI7KI;IWQ0L#fvCgxKB+sf60(BlmCqOx zYfGm#9)F6iKT~KTI{T4!pD{sz@mal47?E{sK9EO&vFEjI!!Q}VTDsE;auXzQO$LDc z2b;Aj9v3Oi9AH?;i^$tLdEzToy$t+2DfBb+ye^mJX@9@Lo>Uc6+NZyW6j@}Dj4`^v z$PWBg!Bz2<%}uG-ui*^Kh>X_xt`~&C`Ix`|tu70__?Bf)uG<+(B$6wi($&}ZjFB<) z>(?j1g((!IShJ4vKa<6+PCq=DP}Pr}VH0@u3e2b&larGxtE(UlGc<$EYU4p=W$d&u zLBQpp*l~X@#D9XX9+#l$A%)wQ#zrAPkEG+~j<2f|&~{pUkm|G)WQwH?A6{a)Em&A3 z5vidBt`By=wY5LaSTnl7%d4xupN5}5>B|>5efzL2J0#^5myMBpTp}VO)FRk4`qIeA z2muk1g)dKSow}H6+^4Lptfc@XD;SW%!osof{1FI@Ne2FZ^KXjjnJ&yw!d~V?v{QjC zEk6&?CT*a2+tyK_MsS5e+!E>KL&#x(b-QmOp}+kq*ffLu^cFEsXYg#K>ecz|If;d( zuCW8Bb5*9!yhB3RD(rfAL5|+UkVSCI=Tdw>%@Pz5L3h_1w!@6_tj?8xi=Rkiyv^s! z&|8ET%2~Zr&EI@6vJ6SSfsmjJ?{C6SDsBunH6?t`m z$vO%PI5V;7rMINr&s#}<`&i9u4sbQb2M(BV+%N;tUFZeQTMl1=&VG#KSt-o&}23aSt9z; zp8uj>ORjtBFsx#r9k%y7DPl31bf2+Qf z0v;lhG`#XksGpmoV_oR#m)vgjRV6Qu`x6xD7int6E8bP-VoO=M5BDY~KlI7N6|bx< zENS40%7lazyg);JUU)Ip9b!o4I#hK38X1FJ^q!wssRHn=s>MJ?uwrN({Tksm4A1b} z>{@#_F5L^6nO8gd5dJ;#$TZ6mh5})th0UODNM33I!-{(0u|7-sf5GyhFdMfnrcRSL zNmr{3whYWE5Hd_mOj%mo)YMcc6uPkTp`f>1?DGe0ZFv)u=MNq{0At(wY)4H$h5-j3 zzXP~Kg|?46!Ds^0dJb;!ZC*3)tPVku#&vUq(x=VRA?E$#HHX`c;hvT;d!cCUN6Z*+ zHfJ*hT~$04T!IoE+-u*ne8S)_h5Kebj^)X=?70MttN1WCg10M+E}v&2X>Q?OC25Z+ zTnF!C#_C#(N7PAe$Ev~D>eJE$0(jT3uB~rK^<+O{^B!;t2xw5w_L#oINeMcy=Yp=g+Dq7YB z<%=J6z#z_uI~L-;nc+D5TaVku>3`TOvXt;ihg1DZvk%M2l|W`;)@*uc7v} zUhXI_c$AklwLtEo(M=Q91o$gY6b7d%@Tze@E0gj&^r^b3eP%~69c=F>3r2fhfd%+SUWo9cFS!G=6CqiM)1 zX|F&`!TLk#i@kMz4Q`aA*6+boa5aRZFPc8*GgfOuItI1X)QAQKDy|d((KKqiZE`ZP zw3JQXI_w0(%>62)-A1H88_;Q@Y(X@_7&kThEmwKFsnTrnx2m05jpP16AU5^OBo?=Q zCqNxKTJ43IkRSROP`Tob~V+!n!i(^7P4XamV({k7}F(drwElSNv$$Po&%51r$$0xi(01j0bFg8 zNGIaP?S&|^y}7)*;}?W*asgTSkLFwOnunh!lh@87{2>n4Sg-()B*;hMDcnrbu+fUo z4VOSd$6ou&j>1wwgr1sbn?vs)+UWeDk+$=T&6R8}P}XUCWN5r+x4%Risl{5bm~@!-bld|cd& zeFO0%KrmB4W2-t?)krJN=J~R~Rgw3{Q76Dw3t`9m0os7GW?bo-9T9eW;5Qq~5)I_x)4& zN-(f0d;jj;8*pOq{T@5)gf6 z{280ajT1=piLu@VD5pPm_tCd#a3UeUyiGfHUe14%=i?4;Kr>%`Hb|zeL<~FT<<~wK zG1rVt3&T^ue?xn-4Lzy(v%W2yED~qHNvnz#_GBI*&Yl$2(Wa_?znEBk)awh(C|Pa8 z!fL*jw6IU5#{%EA<&m0xT4Sx%*_EO~ZW}<<4)NVk^5m}%IAnkC~T+6 zvFtBV_KZ1vWAXL4HIP}8e!Rdrl_ou=Kr7j%Ozz(AWHriSaq@q8vPl@%){ae38@UnC z$_kGjL2L1WzT7<$3iXrSLR}E=S}^I02%~1UKw(wDq{-s)1NJcZDm626 zcs8pgt8==0Eh=pbMXK%IHf$Xaf1CVuOK!5kyCj&6 zVTO0_kFu))NJw4Kl$Ozm zL1Q~=HaDFhK9wpumsAyA2yrKT=kBO9L!D5k%lC2G=y-!6=n4Ft2r`xnW9lRt4ge6v zm%N<|`EkY5niJh_>`@yW#izU-bXP2MOhS@&r>B^LghfO+=>JYRw}p6vx$Pbt0VBbz*0A0HtHC*j8Rv! ze0uJNM)9OAro1;kweDLgtFm?O~ zE!ndxxSK<|yn>RSN*ffp?CJS_U&FdT^tE9eafq5l=o{*-{_e^ZH9_K&ucG%wWYj}Y4pA|CNCTyctqd-;#tp89f41O<)5 zY<)9K2=4i_XGYjp)7G&B7Kz9M28$ZF1J~AWV@1!in4A+MFe?pbdSg%?zQc*`+5)=s z=1P=;|B!V|h+B`FOM4B``j%)k$doh0%29n+n3L4z%Ikt~n?Moh#G)G_1$nGKM zqv@$?V|CxG9s(B)-185vY~w|=*`Gs*vv9A4SjxiTWmCz$OxCB{+`h@I)Hy8yS%nT) z;#s1pnigwt?cMtZCJd^L2^T#_;+h%*F-FgA^LO5N;+k%M2{|GYvx>$QcWYk4o?(u? zt{CL&KT`anFTO_b!uzJfa97teM zP#4B3I3|!%X;|*^x&iz&WncVcy$UF%hae>G3^K^TUxD0tFT=OuC;f5mJlUPWo~jJ( z(0O8FXx=>;ySTb#2=C2h1@tU!Ak-@sWO1-V?tnw~^~nO08mC1+f?`$1i)#(b8a2~@ z@85EQCwv{JmUS12{QA8RZlX|Uj9rI%Qv3~F=(I`12$H} zK0?9k9d_UyYrlD{i#Arat*fMpu|GvTE>e48r$8&5Q7MIV+hQD56|rKlJ^a%o5eXx? zxla)rJazxEx>l2{l)4*NDp>d-Ia_T3@31wJ58Nt40D@I&GE55-jv5-$+O7|@8~oU9 zS4u3LRm4`lML|3KG+#NIbsH(I-23uM*>`HTyRghW>T1j>)k!GU-zSItKGSJQDEQ;i z0v1TcLDYy{{YxvJgy>{{J`KQyN6il@J}^)KXd0$}y#VHyPQX2>U^GPq*nZtbB>H8( zCo28UQri)}Ua#*S6iyPcdQWS6^^2xlN65Fk_cP(gQxqz_At%~U3~JlvM>+0--ay=A zAV|OrBJeP^A?KstZTFo->DZj#F zRsb_z8yrlPQ>?=Xo>WGU#5|QZPjoHcfK(XRG5hcAm>nG*(K9fxbHPGqSoZRNDGba~ z=7Sh`coH1v-Qz*K;9!4j9Ew!}aNQ+n6VPAct@M5VW_K_qQ=xlW>3%UkjCc?HC_py# zR#I{hMc{Jc?HjYiSzt=gxNo?05(Gd`n;6gnlmPPr5g*L@J&HsYH6 zEN$N@ZC+_*ai96UdLEmB|J%uR%ykDXvF!#x;6;*@cPZExw4Db>jcn%TZkyM72q-8@ z5ebU1WPylmN&Nr|APNC<#0qkBe7wrGE5gX;6YtC8_2V{Ng)dCS!1y}~0PP>Sk~ZfX zg@8tn8I)@B3k!^0nV_7HGpbpV1(U_c{3L#~H~abdSpz#*{Swf5XLSFSvh$_AvD%qn zopW9%19u<@!->Dd#Nb|^@6)lf$GBggv05*Q(#j`y%~aclCFv-M(G5#ZXTlO&P=*57 zApW$##A0i}FsK-T1p5Qg2Oj`2vwcDZ<)wmKR3xg3bZl6U?tD9qJ;8@rOl1$;0RiI2 zI>Oa7w)q4BJK*i$0|Na?8rzIiiTZ+}2rv-sxs4I+!UCN zYLb!h+3AktDUOnZFkIkjBeNrC9C!piTEP1q;z9Om;(j;2t+NyNepTEd9tmfzp6oOw z^rz{P@M~jwc)T*}e`znkJ#%NA&wEl}Vn*1C8#0iy8}_}lun~j#`neFKAx`@aAnvy~ z_r?Q;)?-3A_T8(qjE_?)Zk~Bawvw1F6W!$R2|wOJzNShUZO6?fGjibK{EC?-8S`0s zBOPvG)X%js(}>bJZnjGQB_uKZ@yGJR^OkJ-07QPSvU7tVdzPoK9@);d3hzJm<#Ncr zIDxOUob8pGo{gBjox&Q+z&o9l?jD?x=^3zN%&x?qusB$*nN{7v)BxRwSRGgfXNqf0 zQC(KdireaneeRu(?=Xl|WU52>{|Yk9D_5tu008T{qQf3f_^R2{xMhCHxqVqHDQj^#|yWg`K zL$pJc9dR=5Q^?$4dkI_G+0@q23UV4}UR7zhtzTI|MSr1^67b4MUH^LqD0^+H*U|Eh zte~$-?j3sY&R|p5BLw+7$)k)Px^9A<=R1^ilruH6O$|tRqy^Rx2pHQxMSG3A<@T3) zjg{7y)*iR}eVuxs0%u$6G1DJWIIH#g6((KJV4N;gbNjpI%xuQSOan_t`z%>*^c!hr zdtih?r7~H&-_Q&5Xaps$FeOe@U~LTa_@9>Af&h{<;y}a%%KV#JpTE3H`vK=#l2`y6 zv_*6EuI#`@nwTSRc9#}ZD_{=VBwS`Ml~Y7O>0#~j1J&~{Z{J8#pBk?c!^4^%Z$iPllvlR_ z6&qW0SC^!g!JiFTDr~e6cVa_8CCdw`R^sM2AuT348&@5pqUAFz_8!$gb@K7|DSsWY z80EwD)saET9`822#Y)#<-OVTLMTJJWK1ok~5m+NsqW}P>B8>|Ud@ndfiq3kHBgZ{j zYJ-AysOtL+Fo+(u_k6TTU7c}2Zo0j&KwxSaD<;G^`Hf)RFuGVg>25%^EWp)cPz<7-zid= z2tF;hM8g_bar_>{7-lqZlHaFo@$@F2S9aS1BYAPKPT|DzsmI=joQ85znX2wSXWBZK zfK+T*S5MkSSMBA^2l+#GNVeJ(3L#&eVd?g{L&e9N?O*7pNLH<8s7A(VjCwlXdRp$4 z_oZQt3Xuhu0Ctuj&cy<2GyFjlBIZF08VlHwB)7M*xF9|9&CJX3hgTRGxeBOh4RtW? zO_rOWPXfk}{sCM%i-=2tlp1+|Prpf)xpsHxW=znX0Uh3QK2+kRbF*Db<>1;c6`bb? zHv#CBC#nKv1ka)?>B8C`9IHCgh6?&Ha7q6wpYO>8#3k&gf^=YPaIn}4wrZFVdJ#9o z=deQvLW0#%6`fT;!Qi&g=1<*&zW1ehc)+fI#Th1AYi}L%o88*V3d%(otz(VOs4IU!5KEdDr1O=iPt}{z{L|8F`_*!9^*iL>xvZTk%M%$LDXQEFA6fvl>NSF6pHCa~ zg;a*YE&)`oW8h3nR1eM@FG`g`CAAZ1?Kn-x2n(2-;u~0|#I1zXx=sWSc1dCqZlrQ5 zYIJEVSpf2K@@7e;4VWp>?qb$;_TKj({Tr<_W4M~Ua)tZp%O6y1cys}eLwU@;N zpZ>~KInV!kcH@9R{U2;ZDi!@%B}Whe-rb!y#(-Uwh~DzsZ;47uKJoY4ks7mPmcG~IL}dd7Wq$(6U6|S!|4{xOR#>7U9Ap+JkaL<4Tt#uOg9AS^9)X~ zyGZvoPt?+vy|1-Uk^)O<`D{NZ`K+(WbuH8|o45cRn>5Q3BL;itU5?=M&yK3U)~pS z&@Lw;?<7noydS(ekY&)XC6S1?&=z$pFoK=5dJ{%X#1tJ}AKsI1bB~r=_!m+lZe(nCKveK+^!0$aW=#6zu+~8G}=3?U7zxXDlk9AU2wvFKR>WsR_YSE z_sMjuEv1Ty?w_q?Kyh$wih4#sm1@V~-Y z2=x>udKkKT=HmQvsLWUFet^!w5xYcZ{Ur?^vcKzxib>D7c@M2FmD<6hT&C;=jR-+J z`)ry%&%v7A?-Oq-7i6!;Dm`X{o%@vuuI-sa9`g_9hVUpgxFYg?K3wa6j9xL}JO);KPpaU^=XPVCma$4xJJ7NeBjXv!-o?sT&W z5RVrmQzvN|aC;{yi93w4)4y0ZUX6LjhGwR7#k07$*Hd6i@}94XHQUzx(iM&F#$5}} zh!`4QLNf}PM-+2&NFU+r2RxG%{A@Ue(rRBs=ulPsVWZE(w1I<9N-XKx*^i&%hg6mG9V=tSgMCH2o%rz0j)VM5derB{2VBSlG zY7Y+YEP007C=WWzH3(?lhF;es>!onPv~}WYc0v z8&1<{Nm1wMZcC|J-W-wpmHkiLe5h>d_yit2A!taeJ8@4F*DOcB0{&UWD?INTdOfDbZ6AxH0pNCk!WCIzH%`+ ze$SDB`rdAyCN1XpICHx=3LWK`Q(2S9NWr@X(`aX+1bzSAysGKw%dgiSLf=X<#aXu$ zGKnU0$=1HjIS|{eIC|E+7J}k9?OTaQko9lmm4x2iy8Ci*RF>G7|4Pa$A-K3K%inw{ zYQdY;SGJL;pHgz9ru!R2M? zP%W)*$!<9i=8V-_jGwo63hw0jDt(YXP_xjA8K<~8?Li-`F4_QnDZw(M#L&7)- zj3)1@PbcD-KTWD!AMOeIx>~YV*5I`7-0{UtV7NX9dxkvVbv=(>osW+5Pv06Z#Y-hJ ziMN;k?BJaRRTZ3%{!|D>@R;E=<8iFcDPAoufp}9mo^HQUlO~lBx(bXCyb4Aw<8tow$QM!k_eSUqsJ5)WdbwNbJ_s~YgQ3rL9kYQl;&!%^= z)g35*Tq;xVd#-fC z7-&YcVP4JQSjUE*5B&%AG> zm(F*Jo`xcw6UnHat+!3yQdO{^!468tEquMl!GB1d=9U(|ZmzX~J@*Y60&HdE^@ayB zpFoj@^;H?yMI;|MVmbv^<23Y zh-LRbW~`X0w#VV(4QfP{*f6=GF<=tB9JcPVx;bw*r&F(HMmgcCS{NPZ%ne*~Smmy36zz@7_MX8WN1Lz%;kZy`YcX6fOraeVF zmP?i-IYbmb=vR#O>Kd0n`%{ShV9@VPMf;;OfUs5Wx{#+rr}2>NYDtbE6@^!NOpVa__Wi30{_|xUI!A$5Ge31|k$<_WTHZu> z^!{K`E?NANSi8S3H^fVlqLa_`eDwGrt3PG8_@>xjNyunn>N|I<$8bXHJVbAlU_Sa+ zId|(Gitkb@DLS`}E>Ob~ZdW5M!qKj}-aDRG4A>mXrtmZCdy_CT;lv-I!ZRaK*GV z3YkFU+vyBq9cm9pR;Yx?WA|EbJO*y7ypKrr=9wvH^GR1J$m~4jU(L5wN_;Ht!`(T!*=cJ_LbJ*8q~J!10VP>QR%q>{hdf;JthK#;b1x*2cG1q#I#Nz3rI0?RPRqC_vXrvga^LlI#qwr!$f^FdZvc4exbfV4;1-&++7O`xiwsC7AJjcx7`j8YA zH3g;mRbY&erS5q?!uH`!vCqr`7m{M%oX*+%#!n1|)k6{ZOfwUnO%|${aN}ra)&~cQ zZipHSs7T02R(A8+nl}h$gNP-hQ{E{5{%!BVk8dtLtUC9<_oOcp~1*qB|g*<2O1F$5c&Mub+<9_FERT6JGO;Bhu z0;&z0~#v-FI#TV6~*D9f`Z10z|2m0X*sLdDHCl-j7`()TN>Tf=!??jc(s?|jY^XEN@|`oIX4 z{*?kHYrsh*`Sfl5=EwnuP`jrR(<3|rCAXO!zgz;O88RjN#qk5?3%>DO4|mFCxPZQ0 z#P2v9&!}G(2cA%KwyUH&v6v7w zlpB9SzA+v}cuZQH!W~19nz~D9>{z1Ss7tWLU5Hw&RD|7i5riQ@{L8jYRs8W`LFcX%W)Aaz|3P-fd@gOM)Pex3tO_}(LpW)wzr5uJT}OIPA{@{x2}Yu`|EkWv$V z93OjTx84V1dcf9V=LyBs>zjybB>Vlrg=<$FyB&>awx(#ySI{fRldS$tQ5;O7T)yOa z-dsTM7PUaee}^l$7^47QcseCqlUxB6;t(j2;1;**qz;9CN9(Y6w*O^ethCG_hck)H zU~lQQ%yx-8PN&MY=Z|C(^_b|=cW00atw`m|i{xG2*J{X(J3h)xw~x$b{ePF&kDNR` zd2rn&)O0uOaWOv^YIsRR`eI-~)eufjEqFnjQ%_KO6&#NpT>$F=|9YgthVR` z^D7&(8EZH|$Qi&TP*1X|^lF?ebO0yUQ?c zzk5i;qlHe5$v=Mhcm->w5+k*8WNFEh0P(t_09~5yY%i1JW#~{7>6x;7hxxjm`;*8+ z)8|ZlmHs;H^l*>wU827{-pxykQq-q}*j>O~_cK8YM9@$@CbdS-4z$m%%QHo&r={%9 zvb56Xu{|{ULtLsWx0?4|ke8*OxAPM13^F#YCb#PhPu9+-aD8m9yzFAW@b|+xFy#vv zWdH3h<>tNl`xl=*LcFdEI>nm{b3BS;SDcv|US@o>%azEhQ_w({sLJjoJ$&oD{oXX@ zhy@Loex#Zx8xz?!lU#$9w}iY0%+rH0XboDm!?Z>euE+3flA+hW(7~U#V2Q!gV0&uY z`(k@$;rQ*B*re3=o4oA%mb`oqsorTJ3*A~hns?14B*SYzu<$du`)q6tjokAL)LWsD zV6RJ}n;yHfTck9S`qZkxTTagLEiXzjl-z^BV!EM15*&M9ceJ7aDG1}MH4vQ1PC z#-y)3Am!7oIxa=`(Da0BoDbJKp^(5i?Oe38p6_kw`zG-%B;w|cxQ}a4aIl`ih%s_y z50y!8b!G)o5_j?Vw)PzsUVB%bB09Q2&y?l4y2?}No7AdV`Obk14#x`lh>3%K#cwx; z&-63J*{>9+tEb($b*9gh)Ze}q_86sZB%~k6O;3MfEZfV^hcnjUVx9PD9XZ-YgVpN9 zSUiHfziF*=@}S>hnl=8N*|>tR2fWy)x9Y08DI{*>>f$mME|ZPUG2ac{L))vKR8eo6 z9Q5mi0y#3l+KzWFf92bE8tcrBcB4?}(x$RQP^Z2X=M_|xIW?7s0Pn44k`^pO(X?wm zK(2In^45wodRq(%bzRbY*Sp+^MW7;(5>j-LP!Sq8gCjdVU!N^OnIetGYVJ7>(q$X# zPucOtJAdF2Z@EjovO666ec<8n?9wvCN{M%=%VE)u9y{`2rn zm0w_tg|_c@U1_}N;)}&sm(U&$ZRcbdRb>KIvEqO1P3ND31p_?-TmMIU-N_XE^{eHf zzxL8wvie^GBY$Xh|C7D<{(-C*y>AtF;)x}JX)?hCOHVGpnn29eQf+OeK@9y1+mM>aUvk zN2hVyC5u6-nz>280iSzVnNl7AgbXZ9OxcBnq2@YnZ Date: Thu, 9 May 2024 13:14:31 -0600 Subject: [PATCH 036/149] fix(install.sh): install from github when using `--stable` on macOS (#13216) --- install.sh | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/install.sh b/install.sh index 50c3c85a8f020..e5d0ca6fab0ae 100755 --- a/install.sh +++ b/install.sh @@ -373,14 +373,6 @@ main() { ARCH=${ARCH:-$(arch)} TERRAFORM_ARCH=${TERRAFORM_ARCH:-$(terraform_arch)} - # We can't reasonably support installing specific versions of Coder through - # Homebrew, so if we're on macOS and the `--version` flag was set, we should - # "detect" standalone to be the appropriate installation method. This check - # needs to occur before we set `VERSION` to a default of the latest release. - if [ "$OS" = "darwin" ] && [ "${VERSION-}" ]; then - METHOD=standalone - fi - # If we've been provided a flag which is specific to the standalone installation # method, we should "detect" standalone to be the appropriate installation method. # This check needs to occur before we set these variables with defaults. @@ -395,6 +387,15 @@ main() { exit 1 fi + # We can't reasonably support installing specific versions of Coder through + # Homebrew, so if we're on macOS and the `--version` flag or the `--stable` + # flag (our tap follows mainline) was set, we should "detect" standalone to + # be the appropriate installation method. This check needs to occur before we + # set `VERSION` to a default of the latest release. + if [ "$OS" = "darwin" ] && { [ "${VERSION-}" ] || [ "${STABLE}" = 1 ]; }; then + METHOD=standalone + fi + # These are used by the various install_* functions that make use of GitHub # releases in order to download and unpack the right release. CACHE_DIR=$(echo_cache_dir) From 4671ebb330d5507c86fe97dd82556d55b6a312e6 Mon Sep 17 00:00:00 2001 From: Danny Kopping Date: Fri, 10 May 2024 14:31:49 +0200 Subject: [PATCH 037/149] feat: measure pubsub latencies and expose metrics (#13126) --- coderd/database/pubsub/latency.go | 74 +++++++++++++ coderd/database/pubsub/pubsub.go | 61 ++++++++++- coderd/database/pubsub/pubsub_linux_test.go | 111 ++++++++++++++++++++ coderd/database/pubsub/pubsub_test.go | 39 +++++-- testutil/prometheus.go | 79 ++++++++++---- 5 files changed, 326 insertions(+), 38 deletions(-) create mode 100644 coderd/database/pubsub/latency.go diff --git a/coderd/database/pubsub/latency.go b/coderd/database/pubsub/latency.go new file mode 100644 index 0000000000000..0797e6642beab --- /dev/null +++ b/coderd/database/pubsub/latency.go @@ -0,0 +1,74 @@ +package pubsub + +import ( + "bytes" + "context" + "fmt" + "time" + + "github.com/google/uuid" + "golang.org/x/xerrors" + + "cdr.dev/slog" +) + +// LatencyMeasurer is used to measure the send & receive latencies of the underlying Pubsub implementation. We use these +// measurements to export metrics which can indicate when a Pubsub implementation's queue is overloaded and/or full. +type LatencyMeasurer struct { + // Create unique pubsub channel names so that multiple coderd replicas do not clash when performing latency measurements. + channel uuid.UUID + logger slog.Logger +} + +// LatencyMessageLength is the length of a UUIDv4 encoded to hex. +const LatencyMessageLength = 36 + +func NewLatencyMeasurer(logger slog.Logger) *LatencyMeasurer { + return &LatencyMeasurer{ + channel: uuid.New(), + logger: logger, + } +} + +// Measure takes a given Pubsub implementation, publishes a message & immediately receives it, and returns the observed latency. +func (lm *LatencyMeasurer) Measure(ctx context.Context, p Pubsub) (send, recv time.Duration, err error) { + var ( + start time.Time + res = make(chan time.Duration, 1) + ) + + msg := []byte(uuid.New().String()) + lm.logger.Debug(ctx, "performing measurement", slog.F("msg", msg)) + + cancel, err := p.Subscribe(lm.latencyChannelName(), func(ctx context.Context, in []byte) { + if !bytes.Equal(in, msg) { + lm.logger.Warn(ctx, "received unexpected message", slog.F("got", in), slog.F("expected", msg)) + return + } + + res <- time.Since(start) + }) + if err != nil { + return -1, -1, xerrors.Errorf("failed to subscribe: %w", err) + } + defer cancel() + + start = time.Now() + err = p.Publish(lm.latencyChannelName(), msg) + if err != nil { + return -1, -1, xerrors.Errorf("failed to publish: %w", err) + } + + send = time.Since(start) + select { + case <-ctx.Done(): + lm.logger.Error(ctx, "context canceled before message could be received", slog.Error(ctx.Err()), slog.F("msg", msg)) + return send, -1, ctx.Err() + case recv = <-res: + return send, recv, nil + } +} + +func (lm *LatencyMeasurer) latencyChannelName() string { + return fmt.Sprintf("latency-measure:%s", lm.channel) +} diff --git a/coderd/database/pubsub/pubsub.go b/coderd/database/pubsub/pubsub.go index 59e5b23c34b00..c391a7c3eaf66 100644 --- a/coderd/database/pubsub/pubsub.go +++ b/coderd/database/pubsub/pubsub.go @@ -7,6 +7,7 @@ import ( "io" "net" "sync" + "sync/atomic" "time" "github.com/google/uuid" @@ -28,6 +29,9 @@ type ListenerWithErr func(ctx context.Context, message []byte, err error) // might have been dropped. var ErrDroppedMessages = xerrors.New("dropped messages") +// LatencyMeasureTimeout defines how often to trigger a new background latency measurement. +const LatencyMeasureTimeout = time.Second * 10 + // Pubsub is a generic interface for broadcasting and receiving messages. // Implementors should assume high-availability with the backing implementation. type Pubsub interface { @@ -205,6 +209,10 @@ type PGPubsub struct { receivedBytesTotal prometheus.Counter disconnectionsTotal prometheus.Counter connected prometheus.Gauge + + latencyMeasurer *LatencyMeasurer + latencyMeasureCounter atomic.Int64 + latencyErrCounter atomic.Int64 } // BufferSize is the maximum number of unhandled messages we will buffer @@ -478,6 +486,30 @@ var ( ) ) +// additional metrics collected out-of-band +var ( + pubsubSendLatencyDesc = prometheus.NewDesc( + "coder_pubsub_send_latency_seconds", + "The time taken to send a message into a pubsub event channel", + nil, nil, + ) + pubsubRecvLatencyDesc = prometheus.NewDesc( + "coder_pubsub_receive_latency_seconds", + "The time taken to receive a message from a pubsub event channel", + nil, nil, + ) + pubsubLatencyMeasureCountDesc = prometheus.NewDesc( + "coder_pubsub_latency_measures_total", + "The number of pubsub latency measurements", + nil, nil, + ) + pubsubLatencyMeasureErrDesc = prometheus.NewDesc( + "coder_pubsub_latency_measure_errs_total", + "The number of pubsub latency measurement failures", + nil, nil, + ) +) + // We'll track messages as size "normal" and "colossal", where the // latter are messages larger than 7600 bytes, or 95% of the postgres // notify limit. If we see a lot of colossal packets that's an indication that @@ -504,6 +536,12 @@ func (p *PGPubsub) Describe(descs chan<- *prometheus.Desc) { // implicit metrics descs <- currentSubscribersDesc descs <- currentEventsDesc + + // additional metrics + descs <- pubsubSendLatencyDesc + descs <- pubsubRecvLatencyDesc + descs <- pubsubLatencyMeasureCountDesc + descs <- pubsubLatencyMeasureErrDesc } // Collect implements, along with Describe, the prometheus.Collector interface @@ -528,6 +566,20 @@ func (p *PGPubsub) Collect(metrics chan<- prometheus.Metric) { p.qMu.Unlock() metrics <- prometheus.MustNewConstMetric(currentSubscribersDesc, prometheus.GaugeValue, float64(subs)) metrics <- prometheus.MustNewConstMetric(currentEventsDesc, prometheus.GaugeValue, float64(events)) + + // additional metrics + ctx, cancel := context.WithTimeout(context.Background(), LatencyMeasureTimeout) + defer cancel() + send, recv, err := p.latencyMeasurer.Measure(ctx, p) + + metrics <- prometheus.MustNewConstMetric(pubsubLatencyMeasureCountDesc, prometheus.CounterValue, float64(p.latencyMeasureCounter.Add(1))) + if err != nil { + p.logger.Warn(context.Background(), "failed to measure latency", slog.Error(err)) + metrics <- prometheus.MustNewConstMetric(pubsubLatencyMeasureErrDesc, prometheus.CounterValue, float64(p.latencyErrCounter.Add(1))) + return + } + metrics <- prometheus.MustNewConstMetric(pubsubSendLatencyDesc, prometheus.GaugeValue, send.Seconds()) + metrics <- prometheus.MustNewConstMetric(pubsubRecvLatencyDesc, prometheus.GaugeValue, recv.Seconds()) } // New creates a new Pubsub implementation using a PostgreSQL connection. @@ -544,10 +596,11 @@ func New(startCtx context.Context, logger slog.Logger, database *sql.DB, connect // newWithoutListener creates a new PGPubsub without creating the pqListener. func newWithoutListener(logger slog.Logger, database *sql.DB) *PGPubsub { return &PGPubsub{ - logger: logger, - listenDone: make(chan struct{}), - db: database, - queues: make(map[string]map[uuid.UUID]*msgQueue), + logger: logger, + listenDone: make(chan struct{}), + db: database, + queues: make(map[string]map[uuid.UUID]*msgQueue), + latencyMeasurer: NewLatencyMeasurer(logger.Named("latency-measurer")), publishesTotal: prometheus.NewCounterVec(prometheus.CounterOpts{ Namespace: "coder", diff --git a/coderd/database/pubsub/pubsub_linux_test.go b/coderd/database/pubsub/pubsub_linux_test.go index 4952539bc40fc..efde759096677 100644 --- a/coderd/database/pubsub/pubsub_linux_test.go +++ b/coderd/database/pubsub/pubsub_linux_test.go @@ -3,6 +3,7 @@ package pubsub_test import ( + "bytes" "context" "database/sql" "fmt" @@ -15,6 +16,8 @@ import ( "github.com/stretchr/testify/require" "golang.org/x/xerrors" + "cdr.dev/slog/sloggers/sloghuman" + "cdr.dev/slog" "cdr.dev/slog/sloggers/slogtest" "github.com/coder/coder/v2/coderd/database/dbtestutil" @@ -294,3 +297,111 @@ func TestPubsub_Disconnect(t *testing.T) { } require.True(t, gotDroppedErr) } + +func TestMeasureLatency(t *testing.T) { + t.Parallel() + + newPubsub := func() (pubsub.Pubsub, func()) { + ctx, cancel := context.WithCancel(context.Background()) + logger := slogtest.Make(t, nil).Leveled(slog.LevelDebug) + connectionURL, closePg, err := dbtestutil.Open() + require.NoError(t, err) + db, err := sql.Open("postgres", connectionURL) + require.NoError(t, err) + ps, err := pubsub.New(ctx, logger, db, connectionURL) + require.NoError(t, err) + + return ps, func() { + _ = ps.Close() + _ = db.Close() + closePg() + cancel() + } + } + + t.Run("MeasureLatency", func(t *testing.T) { + t.Parallel() + + logger := slogtest.Make(t, nil).Leveled(slog.LevelDebug) + ps, done := newPubsub() + defer done() + + ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitShort) + defer cancel() + + send, recv, err := pubsub.NewLatencyMeasurer(logger).Measure(ctx, ps) + require.NoError(t, err) + require.Greater(t, send.Seconds(), 0.0) + require.Greater(t, recv.Seconds(), 0.0) + }) + + t.Run("MeasureLatencyRecvTimeout", func(t *testing.T) { + t.Parallel() + + logger := slogtest.Make(t, nil).Leveled(slog.LevelDebug) + ps, done := newPubsub() + defer done() + + ctx, cancel := context.WithDeadline(context.Background(), time.Now().Add(-time.Hour)) + defer cancel() + + send, recv, err := pubsub.NewLatencyMeasurer(logger).Measure(ctx, ps) + require.ErrorContains(t, err, context.DeadlineExceeded.Error()) + require.Greater(t, send.Seconds(), 0.0) + require.EqualValues(t, recv, time.Duration(-1)) + }) + + t.Run("MeasureLatencyNotifyRace", func(t *testing.T) { + t.Parallel() + + var buf bytes.Buffer + logger := slogtest.Make(t, &slogtest.Options{IgnoreErrors: true}).Leveled(slog.LevelDebug) + logger = logger.AppendSinks(sloghuman.Sink(&buf)) + + lm := pubsub.NewLatencyMeasurer(logger) + ps, done := newPubsub() + defer done() + + racy := newRacyPubsub(ps) + ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitShort) + defer cancel() + + send, recv, err := lm.Measure(ctx, racy) + assert.NoError(t, err) + assert.Greater(t, send.Seconds(), 0.0) + assert.Greater(t, recv.Seconds(), 0.0) + + logger.Sync() + assert.Contains(t, buf.String(), "received unexpected message") + }) +} + +// racyPubsub simulates a race on the same channel by publishing two messages (one expected, one not). +// This is used to verify that a subscriber will only listen for the message it explicitly expects. +type racyPubsub struct { + pubsub.Pubsub +} + +func newRacyPubsub(ps pubsub.Pubsub) *racyPubsub { + return &racyPubsub{ps} +} + +func (s *racyPubsub) Subscribe(event string, listener pubsub.Listener) (cancel func(), err error) { + return s.Pubsub.Subscribe(event, listener) +} + +func (s *racyPubsub) SubscribeWithErr(event string, listener pubsub.ListenerWithErr) (cancel func(), err error) { + return s.Pubsub.SubscribeWithErr(event, listener) +} + +func (s *racyPubsub) Publish(event string, message []byte) error { + err := s.Pubsub.Publish(event, []byte("nonsense")) + if err != nil { + return xerrors.Errorf("failed to send simulated race: %w", err) + } + return s.Pubsub.Publish(event, message) +} + +func (s *racyPubsub) Close() error { + return s.Pubsub.Close() +} diff --git a/coderd/database/pubsub/pubsub_test.go b/coderd/database/pubsub/pubsub_test.go index 97e507412a214..d36298bb3221d 100644 --- a/coderd/database/pubsub/pubsub_test.go +++ b/coderd/database/pubsub/pubsub_test.go @@ -39,7 +39,11 @@ func TestPGPubsub_Metrics(t *testing.T) { err = registry.Register(uut) require.NoError(t, err) + // each Gather measures pubsub latency by publishing a message & subscribing to it + var gatherCount float64 + metrics, err := registry.Gather() + gatherCount++ require.NoError(t, err) require.True(t, testutil.PromGaugeHasValue(t, metrics, 0, "coder_pubsub_current_events")) require.True(t, testutil.PromGaugeHasValue(t, metrics, 0, "coder_pubsub_current_subscribers")) @@ -59,19 +63,26 @@ func TestPGPubsub_Metrics(t *testing.T) { _ = testutil.RequireRecvCtx(ctx, t, messageChannel) require.Eventually(t, func() bool { + latencyBytes := gatherCount * pubsub.LatencyMessageLength metrics, err = registry.Gather() + gatherCount++ assert.NoError(t, err) return testutil.PromGaugeHasValue(t, metrics, 1, "coder_pubsub_current_events") && testutil.PromGaugeHasValue(t, metrics, 1, "coder_pubsub_current_subscribers") && testutil.PromGaugeHasValue(t, metrics, 1, "coder_pubsub_connected") && - testutil.PromCounterHasValue(t, metrics, 1, "coder_pubsub_publishes_total", "true") && - testutil.PromCounterHasValue(t, metrics, 1, "coder_pubsub_subscribes_total", "true") && - testutil.PromCounterHasValue(t, metrics, 1, "coder_pubsub_messages_total", "normal") && - testutil.PromCounterHasValue(t, metrics, 7, "coder_pubsub_received_bytes_total") && - testutil.PromCounterHasValue(t, metrics, 7, "coder_pubsub_published_bytes_total") + testutil.PromCounterHasValue(t, metrics, gatherCount, "coder_pubsub_publishes_total", "true") && + testutil.PromCounterHasValue(t, metrics, gatherCount, "coder_pubsub_subscribes_total", "true") && + testutil.PromCounterHasValue(t, metrics, gatherCount, "coder_pubsub_messages_total", "normal") && + testutil.PromCounterHasValue(t, metrics, float64(len(data))+latencyBytes, "coder_pubsub_received_bytes_total") && + testutil.PromCounterHasValue(t, metrics, float64(len(data))+latencyBytes, "coder_pubsub_published_bytes_total") && + testutil.PromGaugeAssertion(t, metrics, func(in float64) bool { return in > 0 }, "coder_pubsub_send_latency_seconds") && + testutil.PromGaugeAssertion(t, metrics, func(in float64) bool { return in > 0 }, "coder_pubsub_receive_latency_seconds") && + testutil.PromCounterHasValue(t, metrics, gatherCount, "coder_pubsub_latency_measures_total") && + !testutil.PromCounterGathered(t, metrics, "coder_pubsub_latency_measure_errs_total") }, testutil.WaitShort, testutil.IntervalFast) - colossalData := make([]byte, 7600) + colossalSize := 7600 + colossalData := make([]byte, colossalSize) for i := range colossalData { colossalData[i] = 'q' } @@ -89,16 +100,22 @@ func TestPGPubsub_Metrics(t *testing.T) { _ = testutil.RequireRecvCtx(ctx, t, messageChannel) require.Eventually(t, func() bool { + latencyBytes := gatherCount * pubsub.LatencyMessageLength metrics, err = registry.Gather() + gatherCount++ assert.NoError(t, err) return testutil.PromGaugeHasValue(t, metrics, 1, "coder_pubsub_current_events") && testutil.PromGaugeHasValue(t, metrics, 2, "coder_pubsub_current_subscribers") && testutil.PromGaugeHasValue(t, metrics, 1, "coder_pubsub_connected") && - testutil.PromCounterHasValue(t, metrics, 2, "coder_pubsub_publishes_total", "true") && - testutil.PromCounterHasValue(t, metrics, 2, "coder_pubsub_subscribes_total", "true") && - testutil.PromCounterHasValue(t, metrics, 1, "coder_pubsub_messages_total", "normal") && + testutil.PromCounterHasValue(t, metrics, 1+gatherCount, "coder_pubsub_publishes_total", "true") && + testutil.PromCounterHasValue(t, metrics, 1+gatherCount, "coder_pubsub_subscribes_total", "true") && + testutil.PromCounterHasValue(t, metrics, gatherCount, "coder_pubsub_messages_total", "normal") && testutil.PromCounterHasValue(t, metrics, 1, "coder_pubsub_messages_total", "colossal") && - testutil.PromCounterHasValue(t, metrics, 7607, "coder_pubsub_received_bytes_total") && - testutil.PromCounterHasValue(t, metrics, 7607, "coder_pubsub_published_bytes_total") + testutil.PromCounterHasValue(t, metrics, float64(colossalSize+len(data))+latencyBytes, "coder_pubsub_received_bytes_total") && + testutil.PromCounterHasValue(t, metrics, float64(colossalSize+len(data))+latencyBytes, "coder_pubsub_published_bytes_total") && + testutil.PromGaugeAssertion(t, metrics, func(in float64) bool { return in > 0 }, "coder_pubsub_send_latency_seconds") && + testutil.PromGaugeAssertion(t, metrics, func(in float64) bool { return in > 0 }, "coder_pubsub_receive_latency_seconds") && + testutil.PromCounterHasValue(t, metrics, gatherCount, "coder_pubsub_latency_measures_total") && + !testutil.PromCounterGathered(t, metrics, "coder_pubsub_latency_measure_errs_total") }, testutil.WaitShort, testutil.IntervalFast) } diff --git a/testutil/prometheus.go b/testutil/prometheus.go index 3d4879c14c324..94d350abe37e2 100644 --- a/testutil/prometheus.go +++ b/testutil/prometheus.go @@ -7,29 +7,60 @@ import ( "github.com/stretchr/testify/require" ) -func PromGaugeHasValue(t testing.TB, metrics []*dto.MetricFamily, value float64, name string, label ...string) bool { +type kind string + +const ( + counterKind kind = "counter" + gaugeKind kind = "gauge" +) + +func PromGaugeHasValue(t testing.TB, metrics []*dto.MetricFamily, value float64, name string, labels ...string) bool { t.Helper() - for _, family := range metrics { - if family.GetName() != name { - continue - } - ms := family.GetMetric() - metricsLoop: - for _, m := range ms { - require.Equal(t, len(label), len(m.GetLabel())) - for i, lv := range label { - if lv != m.GetLabel()[i].GetValue() { - continue metricsLoop - } - } - return value == m.GetGauge().GetValue() - } - } - return false + return value == getValue(t, metrics, gaugeKind, name, labels...) +} + +func PromCounterHasValue(t testing.TB, metrics []*dto.MetricFamily, value float64, name string, labels ...string) bool { + t.Helper() + return value == getValue(t, metrics, counterKind, name, labels...) } -func PromCounterHasValue(t testing.TB, metrics []*dto.MetricFamily, value float64, name string, label ...string) bool { +func PromGaugeAssertion(t testing.TB, metrics []*dto.MetricFamily, assert func(in float64) bool, name string, labels ...string) bool { t.Helper() + return assert(getValue(t, metrics, gaugeKind, name, labels...)) +} + +func PromCounterAssertion(t testing.TB, metrics []*dto.MetricFamily, assert func(in float64) bool, name string, labels ...string) bool { + t.Helper() + return assert(getValue(t, metrics, counterKind, name, labels...)) +} + +func PromCounterGathered(t testing.TB, metrics []*dto.MetricFamily, name string, labels ...string) bool { + t.Helper() + return getMetric(t, metrics, name, labels...) != nil +} + +func PromGaugeGathered(t testing.TB, metrics []*dto.MetricFamily, name string, labels ...string) bool { + t.Helper() + return getMetric(t, metrics, name, labels...) != nil +} + +func getValue(t testing.TB, metrics []*dto.MetricFamily, kind kind, name string, labels ...string) float64 { + m := getMetric(t, metrics, name, labels...) + if m == nil { + return -1 + } + + switch kind { + case counterKind: + return m.GetCounter().GetValue() + case gaugeKind: + return m.GetGauge().GetValue() + default: + return -1 + } +} + +func getMetric(t testing.TB, metrics []*dto.MetricFamily, name string, labels ...string) *dto.Metric { for _, family := range metrics { if family.GetName() != name { continue @@ -37,14 +68,16 @@ func PromCounterHasValue(t testing.TB, metrics []*dto.MetricFamily, value float6 ms := family.GetMetric() metricsLoop: for _, m := range ms { - require.Equal(t, len(label), len(m.GetLabel())) - for i, lv := range label { + require.Equal(t, len(labels), len(m.GetLabel())) + for i, lv := range labels { if lv != m.GetLabel()[i].GetValue() { continue metricsLoop } } - return value == m.GetCounter().GetValue() + + return m } } - return false + + return nil } From 989575c5b6ad25197bab8b22bd1b7cb2f2086c97 Mon Sep 17 00:00:00 2001 From: Danny Kopping Date: Fri, 10 May 2024 16:35:59 +0200 Subject: [PATCH 038/149] chore: prevent commit signing in tests (#13222) --- Makefile | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/Makefile b/Makefile index 7a04072964ee5..9a457c619ad49 100644 --- a/Makefile +++ b/Makefile @@ -56,6 +56,9 @@ GO_SRC_FILES := $(shell find . $(FIND_EXCLUSIONS) -type f -name '*.go' -not -nam # All the shell files in the repo, excluding ignored files. SHELL_SRC_FILES := $(shell find . $(FIND_EXCLUSIONS) -type f -name '*.sh') +# Ensure we don't use the user's git configs which might cause side-effects +GIT_FLAGS = GIT_CONFIG_GLOBAL=/dev/null GIT_CONFIG_SYSTEM=/dev/null + # All ${OS}_${ARCH} combos we build for. Windows binaries have the .exe suffix. OS_ARCHES := \ linux_amd64 linux_arm64 linux_armv7 \ @@ -739,7 +742,7 @@ site/.eslintignore site/.prettierignore: .prettierignore Makefile done < "$<" test: - gotestsum --format standard-quiet -- -v -short -count=1 ./... + $(GIT_FLAGS) gotestsum --format standard-quiet -- -v -short -count=1 ./... .PHONY: test # sqlc-cloud-is-setup will fail if no SQLc auth token is set. Use this as a @@ -775,7 +778,7 @@ sqlc-vet: test-postgres-docker test-postgres: test-postgres-docker # The postgres test is prone to failure, so we limit parallelism for # more consistent execution. - DB=ci DB_FROM=$(shell go run scripts/migrate-ci/main.go) gotestsum \ + $(GIT_FLAGS) DB=ci DB_FROM=$(shell go run scripts/migrate-ci/main.go) gotestsum \ --junitfile="gotests.xml" \ --jsonfile="gotests.json" \ --packages="./..." -- \ @@ -824,7 +827,7 @@ test-postgres-docker: # Make sure to keep this in sync with test-go-race from .github/workflows/ci.yaml. test-race: - gotestsum --junitfile="gotests.xml" -- -race -count=1 ./... + $(GIT_FLAGS) gotestsum --junitfile="gotests.xml" -- -race -count=1 ./... .PHONY: test-race test-tailnet-integration: From 6182ee90f0c84a997a8da3e0d1697f1ee3bf879f Mon Sep 17 00:00:00 2001 From: Muhammad Atif Ali Date: Fri, 10 May 2024 20:14:37 +0300 Subject: [PATCH 039/149] chore: remove dependabot config for dogfood template (#13230) --- .github/dependabot.yaml | 14 -------------- 1 file changed, 14 deletions(-) diff --git a/.github/dependabot.yaml b/.github/dependabot.yaml index b2a815a0421a7..fecbe9ba959cb 100644 --- a/.github/dependabot.yaml +++ b/.github/dependabot.yaml @@ -112,17 +112,3 @@ updates: offlinedocs: patterns: - "*" - - # Update dogfood. - - package-ecosystem: "terraform" - directory: "/dogfood/" - schedule: - interval: "weekly" - time: "06:00" - timezone: "America/Chicago" - commit-message: - prefix: "chore" - labels: [] - ignore: - # We likely want to update this ourselves. - - dependency-name: "coder/coder" From 7eb228e3ff52b485318488ed760a8895b9028499 Mon Sep 17 00:00:00 2001 From: Kayla Washburn-Love Date: Fri, 10 May 2024 11:21:21 -0600 Subject: [PATCH 040/149] feat: popover paywall in appearance settings (#13217) --- .../Paywall/PopoverPaywall.stories.tsx | 20 +++ .../src/components/Paywall/PopoverPaywall.tsx | 125 ++++++++++++++++++ .../AppearanceSettingsPageView.tsx | 21 ++- 3 files changed, 165 insertions(+), 1 deletion(-) create mode 100644 site/src/components/Paywall/PopoverPaywall.stories.tsx create mode 100644 site/src/components/Paywall/PopoverPaywall.tsx diff --git a/site/src/components/Paywall/PopoverPaywall.stories.tsx b/site/src/components/Paywall/PopoverPaywall.stories.tsx new file mode 100644 index 0000000000000..3f85328c1fb98 --- /dev/null +++ b/site/src/components/Paywall/PopoverPaywall.stories.tsx @@ -0,0 +1,20 @@ +import type { Meta, StoryObj } from "@storybook/react"; +import { PopoverPaywall } from "./PopoverPaywall"; + +const meta: Meta = { + title: "components/Paywall/PopoverPaywall", + component: PopoverPaywall, +}; + +export default meta; +type Story = StoryObj; + +const Example: Story = { + args: { + message: "Black Lotus", + description: + "Adds 3 mana of any single color of your choice to your mana pool, then is discarded. Tapping this artifact can be played as an interrupt.", + }, +}; + +export { Example as PopoverPaywall }; diff --git a/site/src/components/Paywall/PopoverPaywall.tsx b/site/src/components/Paywall/PopoverPaywall.tsx new file mode 100644 index 0000000000000..459d7c58fb22b --- /dev/null +++ b/site/src/components/Paywall/PopoverPaywall.tsx @@ -0,0 +1,125 @@ +import type { Interpolation, Theme } from "@emotion/react"; +import TaskAltIcon from "@mui/icons-material/TaskAlt"; +import Button from "@mui/material/Button"; +import Link from "@mui/material/Link"; +import type { FC, ReactNode } from "react"; +import { EnterpriseBadge } from "components/Badges/Badges"; +import { Stack } from "components/Stack/Stack"; +import { docs } from "utils/docs"; + +export interface PopoverPaywallProps { + message: string; + description?: ReactNode; + documentationLink?: string; +} + +export const PopoverPaywall: FC = ({ + message, + description, + documentationLink, +}) => { + return ( +
+
+ +
{message}
+ +
+ + {description &&

{description}

} + + Read the documentation + +
+
+ +
    +
  • + Template access control +
  • +
  • + User groups +
  • +
  • + 24 hour support +
  • +
  • + Audit logs +
  • +
+ +
+
+ ); +}; + +const FeatureIcon: FC = () => { + return ; +}; + +const styles = { + root: (theme) => ({ + display: "flex", + flexDirection: "row", + alignItems: "center", + maxWidth: 600, + padding: "24px 36px", + backgroundImage: `linear-gradient(160deg, transparent, ${theme.roles.active.background})`, + border: `1px solid ${theme.roles.active.fill.outline}`, + borderRadius: 8, + gap: 18, + }), + title: { + fontWeight: 600, + fontFamily: "inherit", + fontSize: 18, + margin: 0, + }, + description: (theme) => ({ + marginTop: 8, + fontFamily: "inherit", + maxWidth: 420, + lineHeight: "160%", + color: theme.palette.text.secondary, + fontSize: 14, + }), + separator: (theme) => ({ + width: 1, + height: 180, + backgroundColor: theme.palette.divider, + marginLeft: 8, + }), + featureList: { + listStyle: "none", + margin: 0, + marginRight: 8, + padding: "0 12px", + fontSize: 13, + fontWeight: 500, + }, + featureIcon: (theme) => ({ + color: theme.roles.active.fill.outline, + fontSize: "1.5em", + }), + feature: { + display: "flex", + alignItems: "center", + padding: 3, + gap: 8, + lineHeight: 1.2, + }, +} satisfies Record>; diff --git a/site/src/pages/DeploySettingsPage/AppearanceSettingsPage/AppearanceSettingsPageView.tsx b/site/src/pages/DeploySettingsPage/AppearanceSettingsPage/AppearanceSettingsPageView.tsx index b62a20e923c89..fb8ec4a891de8 100644 --- a/site/src/pages/DeploySettingsPage/AppearanceSettingsPage/AppearanceSettingsPageView.tsx +++ b/site/src/pages/DeploySettingsPage/AppearanceSettingsPage/AppearanceSettingsPageView.tsx @@ -10,6 +10,12 @@ import { EnterpriseBadge, EntitledBadge, } from "components/Badges/Badges"; +import { PopoverPaywall } from "components/Paywall/PopoverPaywall"; +import { + Popover, + PopoverContent, + PopoverTrigger, +} from "components/Popover/Popover"; import { getFormHelpers } from "utils/formUtils"; import { Fieldset } from "../Fieldset"; import { Header } from "../Header"; @@ -55,7 +61,20 @@ export const AppearanceSettingsPageView: FC< {isEntitled ? : } - + + + + + + + + + +
Date: Fri, 10 May 2024 17:38:07 +0000 Subject: [PATCH 041/149] ci: bump the github-actions group with 2 updates (#13238) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/contrib.yaml | 2 +- .github/workflows/security.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/contrib.yaml b/.github/workflows/contrib.yaml index 64262c84022e2..9f398fb85ce3c 100644 --- a/.github/workflows/contrib.yaml +++ b/.github/workflows/contrib.yaml @@ -34,7 +34,7 @@ jobs: steps: - name: cla if: (github.event.comment.body == 'recheck' || github.event.comment.body == 'I have read the CLA Document and I hereby sign the CLA') || github.event_name == 'pull_request_target' - uses: contributor-assistant/github-action@v2.3.2 + uses: contributor-assistant/github-action@v2.4.0 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # the below token should have repo scope and must be manually added by you in the repository's secret diff --git a/.github/workflows/security.yaml b/.github/workflows/security.yaml index fe9727c4c2843..fb1238afec267 100644 --- a/.github/workflows/security.yaml +++ b/.github/workflows/security.yaml @@ -114,7 +114,7 @@ jobs: echo "image=$(cat "$image_job")" >> $GITHUB_OUTPUT - name: Run Trivy vulnerability scanner - uses: aquasecurity/trivy-action@d710430a6722f083d3b36b8339ff66b32f22ee55 + uses: aquasecurity/trivy-action@b2933f565dbc598b29947660e66259e3c7bc8561 with: image-ref: ${{ steps.build.outputs.image }} format: sarif From 82c1562f8240b4a92b57cb29bb9fce7ed716af03 Mon Sep 17 00:00:00 2001 From: Jon Ayers Date: Fri, 10 May 2024 13:14:03 -0500 Subject: [PATCH 042/149] fix: skip license review for dependabot (#13239) --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index f35be17942aa0..49ad712b7dee4 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -910,7 +910,7 @@ jobs: # This action is not intended to do a vulnerability check since that is handled by a separate action. dependency-license-review: runs-on: ubuntu-latest - if: github.ref != 'refs/heads/main' + if: github.ref != 'refs/heads/main' && github.actor != 'dependabot[bot]' steps: - name: "Checkout Repository" uses: actions/checkout@v4 From c557c25b3df1b602bc843fc1995bddb8e990fecd Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 11 May 2024 00:08:38 +0300 Subject: [PATCH 043/149] chore: bump golang.org/x/tools from 0.20.0 to 0.21.0 (#13237) Bumps [golang.org/x/tools](https://github.com/golang/tools) from 0.20.0 to 0.21.0. - [Release notes](https://github.com/golang/tools/releases) - [Commits](https://github.com/golang/tools/compare/v0.20.0...v0.21.0) --- updated-dependencies: - dependency-name: golang.org/x/tools dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- go.mod | 14 +++++++------- go.sum | 23 ++++++++++++----------- 2 files changed, 19 insertions(+), 18 deletions(-) diff --git a/go.mod b/go.mod index 5abab487e963f..1e268194ca384 100644 --- a/go.mod +++ b/go.mod @@ -188,16 +188,16 @@ require ( go.uber.org/atomic v1.11.0 go.uber.org/goleak v1.2.1 go4.org/netipx v0.0.0-20230728180743-ad4cb58a6516 - golang.org/x/crypto v0.22.0 + golang.org/x/crypto v0.23.0 golang.org/x/exp v0.0.0-20240222234643-814bf88cf225 golang.org/x/mod v0.17.0 - golang.org/x/net v0.24.0 + golang.org/x/net v0.25.0 golang.org/x/oauth2 v0.19.0 golang.org/x/sync v0.7.0 - golang.org/x/sys v0.19.0 - golang.org/x/term v0.19.0 - golang.org/x/text v0.14.0 - golang.org/x/tools v0.20.0 + golang.org/x/sys v0.20.0 + golang.org/x/term v0.20.0 + golang.org/x/text v0.15.0 + golang.org/x/tools v0.21.0 golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028 golang.zx2c4.com/wireguard v0.0.0-20230704135630-469159ecf7d1 google.golang.org/api v0.176.1 @@ -394,7 +394,7 @@ require ( github.com/tailscale/certstore v0.1.1-0.20220316223106-78d6e1c49d8d // indirect github.com/tailscale/golang-x-crypto v0.0.0-20230713185742-f0b76a10a08e // indirect github.com/tailscale/goupnp v1.0.1-0.20210804011211-c64d0f06ea05 // indirect - github.com/tailscale/netlink v1.1.1-0.20211101221916-cabfb018fe85 // indirect + github.com/tailscale/netlink v1.1.1-0.20211101221916-cabfb018fe85 github.com/tailscale/wireguard-go v0.0.0-20230710185534-bb2c8f22eccf // indirect github.com/tchap/go-patricia/v2 v2.3.1 // indirect github.com/tcnksm/go-httpstat v0.2.0 // indirect diff --git a/go.sum b/go.sum index be60beb05ccc8..320ed2e57d690 100644 --- a/go.sum +++ b/go.sum @@ -1004,8 +1004,8 @@ golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5y golang.org/x/crypto v0.1.0/go.mod h1:RecgLatLF4+eUMCP1PoPZQb+cVrJcOPbHkTkbkB9sbw= golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4= golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= -golang.org/x/crypto v0.22.0 h1:g1v0xeRhjcugydODzvb3mEM9SQ0HGp9s/nh3COQ/C30= -golang.org/x/crypto v0.22.0/go.mod h1:vr6Su+7cTlO45qkww3VDJlzDn0ctJvRgYbC2NvXHt+M= +golang.org/x/crypto v0.23.0 h1:dIJU/v2J8Mdglj/8rJ6UUOM3Zc9zLZxVZwwxMooUSAI= +golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20240222234643-814bf88cf225 h1:LfspQV/FYTatPTr/3HzIcmiUFH7PGP+OQ6mgDYo3yuQ= golang.org/x/exp v0.0.0-20240222234643-814bf88cf225/go.mod h1:CxmFvTBINI24O/j8iY7H1xHzx2i4OsyguNBmN/uPtqc= @@ -1044,8 +1044,8 @@ golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= -golang.org/x/net v0.24.0 h1:1PcaxkF854Fu3+lvBIx5SYn9wRlBzzcnHZSiaFFAb0w= -golang.org/x/net v0.24.0/go.mod h1:2Q7sJY5mzlzWjKtYUEXSlBWCdyaioyXzRB2RtU8KVE8= +golang.org/x/net v0.25.0 h1:d/OCCoBEUq33pjydKrGQhw7IlUPI2Oylr+8qLx49kac= +golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.19.0 h1:9+E/EZBCbTLNrbN35fHv/a/d/mOBatymz1zbtQrXpIg= golang.org/x/oauth2 v0.19.0/go.mod h1:vYi7skDa1x015PmRRYZ7+s1cWyPgrPiSYRe4rnsexc8= @@ -1101,8 +1101,8 @@ golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/sys v0.19.0 h1:q5f1RH2jigJ1MoAWp2KTp3gm5zAGFUTarQZ5U386+4o= -golang.org/x/sys v0.19.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.20.0 h1:Od9JTbYCk261bKm4M/mw7AklTlFYIa0bIp9BgSm1S8Y= +golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210503060354-a79de5458b56/go.mod h1:tfny5GFUkzUvx4ps4ajbZsCe5lw1metzhBm9T3x7oIY= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= @@ -1112,8 +1112,8 @@ golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U= golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= -golang.org/x/term v0.19.0 h1:+ThwsDv+tYfnJFhF4L8jITxu1tdTWRTZpdsWgEgjL6Q= -golang.org/x/term v0.19.0/go.mod h1:2CuTdWZ7KHSQwUzKva0cbMg6q2DMI3Mmxp+gKJbskEk= +golang.org/x/term v0.20.0 h1:VnkxpohqXaOBYJtBmEppKUG6mXpi+4O6purfc2+sMhw= +golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= @@ -1124,8 +1124,9 @@ golang.org/x/text v0.5.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= -golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/text v0.15.0 h1:h1V/4gjBv8v9cjcR6+AR5+/cIYK5N/WAgiv4xlsEtAk= +golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.5.0 h1:o7cqy6amK/52YcAKIPlM3a+Fpj35zvRj2TP+e1xFSfk= golang.org/x/time v0.5.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= @@ -1141,8 +1142,8 @@ golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= golang.org/x/tools v0.4.0/go.mod h1:UE5sM2OK9E/d67R0ANs2xJizIymRP5gJU295PvKXxjQ= golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= -golang.org/x/tools v0.20.0 h1:hz/CVckiOxybQvFw6h7b/q80NTr9IUQb4s1IIzW7KNY= -golang.org/x/tools v0.20.0/go.mod h1:WvitBU7JJf6A4jOdg4S1tviW9bhUxkgeCui/0JHctQg= +golang.org/x/tools v0.21.0 h1:qc0xYgIbsSDt9EyWz05J5wfa7LOVW0YTLOXrqdLAWIw= +golang.org/x/tools v0.21.0/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= From ee817b4d80223cc3dbcc9846b82f8abab1f5b4f6 Mon Sep 17 00:00:00 2001 From: Jon Ayers Date: Fri, 10 May 2024 17:10:19 -0500 Subject: [PATCH 044/149] fix: fix nix flake sed command (#13243) --- scripts/update-flake.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/update-flake.sh b/scripts/update-flake.sh index 4094f20032611..67aca5e79a09a 100755 --- a/scripts/update-flake.sh +++ b/scripts/update-flake.sh @@ -13,4 +13,4 @@ echo "Calculating SRI hash..." HASH=$(go run tailscale.com/cmd/nardump --sri "$OUT/pkg/mod/cache/download") sudo rm -rf "$OUT" -sed -i "s/\(vendorHash = \"\)[^\"]*/\1${HASH}/" ./flake.nix +sed -i "s#\(vendorHash = \"\)[^\"]*#\1${HASH}#" ./flake.nix From 3d707cbe5abd083700a50dcb4cca7e007e893f38 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 11 May 2024 01:10:39 +0300 Subject: [PATCH 045/149] chore: bump tar from 6.2.0 to 6.2.1 in /site (#13244) Bumps [tar](https://github.com/isaacs/node-tar) from 6.2.0 to 6.2.1. - [Release notes](https://github.com/isaacs/node-tar/releases) - [Changelog](https://github.com/isaacs/node-tar/blob/main/CHANGELOG.md) - [Commits](https://github.com/isaacs/node-tar/compare/v6.2.0...v6.2.1) --- updated-dependencies: - dependency-name: tar dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- site/pnpm-lock.yaml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/site/pnpm-lock.yaml b/site/pnpm-lock.yaml index 6a3297a2d5d6f..a4337b1a98b36 100644 --- a/site/pnpm-lock.yaml +++ b/site/pnpm-lock.yaml @@ -2938,7 +2938,7 @@ packages: npmlog: 5.0.1 rimraf: 3.0.2 semver: 7.5.3 - tar: 6.2.0 + tar: 6.2.1 transitivePeerDependencies: - encoding - supports-color @@ -7978,7 +7978,7 @@ packages: mri: 1.2.0 node-fetch-native: 1.4.1 pathe: 1.1.1 - tar: 6.2.0 + tar: 6.2.1 transitivePeerDependencies: - supports-color dev: true @@ -12192,8 +12192,8 @@ packages: readable-stream: 3.6.2 dev: true - /tar@6.2.0: - resolution: {integrity: sha512-/Wo7DcT0u5HUV486xg675HtjNd3BXZ6xDbzsCUZPt5iw8bTQ63bP0Raut3mvro9u+CUyq7YQd8Cx55fsZXxqLQ==} + /tar@6.2.1: + resolution: {integrity: sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==} engines: {node: '>=10'} dependencies: chownr: 2.0.0 From 5ddbeddf8520a4843e2977647186c19e7bf0a382 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 10 May 2024 22:19:19 +0000 Subject: [PATCH 046/149] chore: bump protobufjs from 7.2.4 to 7.2.5 in /site (#13245) Bumps [protobufjs](https://github.com/protobufjs/protobuf.js) from 7.2.4 to 7.2.5. - [Release notes](https://github.com/protobufjs/protobuf.js/releases) - [Changelog](https://github.com/protobufjs/protobuf.js/blob/master/CHANGELOG.md) - [Commits](https://github.com/protobufjs/protobuf.js/compare/protobufjs-v7.2.4...protobufjs-v7.2.5) --- updated-dependencies: - dependency-name: protobufjs dependency-type: direct:development ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- site/package.json | 2 +- site/pnpm-lock.yaml | 12 ++++++------ 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/site/package.json b/site/package.json index 3e582312dcfcc..2aa4c6b047c0b 100644 --- a/site/package.json +++ b/site/package.json @@ -159,7 +159,7 @@ "jest_workaround": "0.1.14", "msw": "2.2.3", "prettier": "3.1.0", - "protobufjs": "7.2.4", + "protobufjs": "7.2.5", "rxjs": "7.8.1", "ssh2": "1.14.0", "storybook": "8.0.5", diff --git a/site/pnpm-lock.yaml b/site/pnpm-lock.yaml index a4337b1a98b36..f454cd2ee9a22 100644 --- a/site/pnpm-lock.yaml +++ b/site/pnpm-lock.yaml @@ -393,8 +393,8 @@ devDependencies: specifier: 3.1.0 version: 3.1.0 protobufjs: - specifier: 7.2.4 - version: 7.2.4 + specifier: 7.2.5 + version: 7.2.5 rxjs: specifier: 7.8.1 version: 7.8.1 @@ -10850,8 +10850,8 @@ packages: resolution: {integrity: sha512-9t5qARVofg2xQqKtytzt+lZ4d1Qvj8t5B8fEwXK6qOfgRLgH/b13QlgEyDh033NOS31nXeFbYv7CLUDG1CeifQ==} dev: false - /protobufjs@7.2.4: - resolution: {integrity: sha512-AT+RJgD2sH8phPmCf7OUZR8xGdcJRga4+1cOaXJ64hvcSkVhNcRHOwIxUatPH15+nj59WAGTDv3LSGZPEQbJaQ==} + /protobufjs@7.2.5: + resolution: {integrity: sha512-gGXRSXvxQ7UiPgfw8gevrfRWcTlSbOFg+p/N+JVJEK5VhueL2miT6qTymqAmjr1Q5WbOCyJbyrk6JfWKwlFn6A==} engines: {node: '>=12.0.0'} requiresBuild: true dependencies: @@ -12430,7 +12430,7 @@ packages: resolution: {integrity: sha512-TYyJ7+H+7Jsqawdv+mfsEpZPTIj9siDHS6EMCzG/z3b/PZiphsX+mWtqFfFVe5/N0Th6V3elK9lQqjnrgTOfrg==} dependencies: long: 5.2.3 - protobufjs: 7.2.4 + protobufjs: 7.2.5 dev: true /ts-proto@1.164.0: @@ -12438,7 +12438,7 @@ packages: hasBin: true dependencies: case-anything: 2.1.13 - protobufjs: 7.2.4 + protobufjs: 7.2.5 ts-poet: 6.6.0 ts-proto-descriptors: 1.15.0 dev: true From f13b1c9af6db49c75e27effe68cf8eff1dc9cda6 Mon Sep 17 00:00:00 2001 From: Michael Smith Date: Sun, 12 May 2024 15:05:22 -0400 Subject: [PATCH 047/149] refactor: improve test isolation for Axios API logic (#13125) * wip: commit progress on code split-up * wip: commit more progress * wip: finish initial version of class implementation * chore: update all import paths to go through client instance * fix: remove temp comments * refactor: smoooooooosh the API * refactor: update import setup for tests --- site/e2e/api.ts | 17 +- site/e2e/global.setup.ts | 4 +- site/e2e/helpers.ts | 3 +- site/e2e/reporter.ts | 3 +- site/e2e/tests/deployment/general.spec.ts | 2 +- site/e2e/tests/deployment/network.spec.ts | 4 +- .../tests/deployment/observability.spec.ts | 4 +- site/e2e/tests/deployment/security.spec.ts | 7 +- site/e2e/tests/deployment/userAuth.spec.ts | 4 +- .../tests/deployment/workspaceProxies.spec.ts | 4 +- site/e2e/tests/groups/removeMember.spec.ts | 2 +- .../templates/updateTemplateSchedule.spec.ts | 8 +- site/src/api/api.test.ts | 55 +- site/src/api/api.ts | 3244 +++++++++-------- site/src/api/queries/appearance.ts | 2 +- site/src/api/queries/audits.ts | 4 +- site/src/api/queries/authCheck.ts | 2 +- site/src/api/queries/buildInfo.ts | 2 +- site/src/api/queries/debug.ts | 2 +- site/src/api/queries/deployment.ts | 2 +- site/src/api/queries/entitlements.ts | 2 +- site/src/api/queries/experiments.ts | 2 +- site/src/api/queries/externalAuth.ts | 2 +- site/src/api/queries/files.ts | 2 +- site/src/api/queries/groups.ts | 5 +- site/src/api/queries/insights.ts | 8 +- site/src/api/queries/integrations.ts | 2 +- site/src/api/queries/oauth2.ts | 2 +- site/src/api/queries/roles.ts | 2 +- site/src/api/queries/settings.ts | 2 +- site/src/api/queries/sshKeys.ts | 2 +- site/src/api/queries/templates.ts | 2 +- site/src/api/queries/updateCheck.ts | 2 +- site/src/api/queries/users.ts | 2 +- site/src/api/queries/workspaceBuilds.ts | 2 +- site/src/api/queries/workspaceQuota.ts | 2 +- site/src/api/queries/workspaceportsharing.ts | 12 +- site/src/api/queries/workspaces.ts | 11 +- site/src/components/Filter/UserFilter.tsx | 6 +- site/src/contexts/ProxyContext.tsx | 11 +- site/src/contexts/auth/RequireAuth.tsx | 3 +- site/src/contexts/useProxyLatency.ts | 3 +- .../modules/resources/AgentLogs/AgentLogs.tsx | 4 +- .../src/modules/resources/AppLink/AppLink.tsx | 4 +- .../modules/resources/PortForwardButton.tsx | 4 +- .../VSCodeDesktopButton.tsx | 6 +- site/src/pages/AuditPage/AuditPage.test.tsx | 2 +- .../CreateTemplatePage.test.tsx | 2 +- .../CreateTokenPage/CreateTokenPage.test.tsx | 2 +- .../pages/CreateTokenPage/CreateTokenPage.tsx | 6 +- .../CreateWorkspacePage.test.tsx | 2 +- .../CreateWorkspacePage.tsx | 4 +- .../AddNewLicensePage.tsx | 4 +- .../LicensesSettingsPage.tsx | 6 +- .../TemplateEmbedPage.test.tsx | 2 +- .../TemplateEmbedPage/TemplateEmbedPage.tsx | 5 +- .../src/pages/TemplatePage/TemplateLayout.tsx | 13 +- .../TemplateSummaryPage.tsx | 4 +- .../TemplateVersionsPage.tsx | 12 +- .../useDeletionDialogState.test.ts | 2 +- .../TemplatePage/useDeletionDialogState.ts | 4 +- .../TemplateSettingsPage.test.tsx | 6 +- .../TemplateSettingsPage.tsx | 6 +- .../TemplateSchedulePage.test.tsx | 2 +- .../TemplateSchedulePage.tsx | 4 +- .../TemplateVariablesPage.test.tsx | 2 +- .../TemplateVersionEditorPage.test.tsx | 22 +- .../TemplateVersionEditorPage.tsx | 6 +- .../pages/TerminalPage/TerminalPage.test.tsx | 2 +- .../AccountPage/AccountPage.test.tsx | 2 +- .../AppearancePage/AppearancePage.test.tsx | 2 +- .../SSHKeysPage/SSHKeysPage.test.tsx | 2 +- .../SecurityPage/SecurityPage.test.tsx | 2 +- .../SecurityPage/SecurityPage.tsx | 4 +- .../SecurityPage/SingleSignOnSection.tsx | 4 +- .../UserSettingsPage/TokensPage/hooks.ts | 9 +- site/src/pages/UsersPage/UsersPage.test.tsx | 2 +- .../WorkspaceBuildPage.test.tsx | 2 +- .../WorkspaceBuildPage/WorkspaceBuildPage.tsx | 4 +- .../BuildParametersPopover.tsx | 4 +- .../WorkspacePage/WorkspacePage.test.tsx | 54 +- .../WorkspacePage/WorkspaceReadyPage.tsx | 4 +- .../WorkspaceScheduleControls.test.tsx | 2 +- .../WorkspaceParametersPage.test.tsx | 10 +- .../WorkspaceParametersPage.tsx | 8 +- .../WorkspaceScheduleForm.test.tsx | 2 +- .../WorkspaceSchedulePage.tsx | 12 +- .../WorkspaceSettingsPage.test.tsx | 8 +- .../WorkspaceSettingsPage.tsx | 6 +- .../BatchUpdateConfirmation.tsx | 4 +- .../WorkspacesPage/WorkspacesPage.test.tsx | 2 +- .../src/pages/WorkspacesPage/batchActions.tsx | 21 +- site/src/pages/WorkspacesPage/data.ts | 6 +- site/src/pages/WorkspacesPage/filter/menus.ts | 6 +- site/src/utils/terminal.ts | 2 +- 95 files changed, 1926 insertions(+), 1858 deletions(-) diff --git a/site/e2e/api.ts b/site/e2e/api.ts index 65a4aaa40a937..08a25543b0fb6 100644 --- a/site/e2e/api.ts +++ b/site/e2e/api.ts @@ -1,7 +1,7 @@ import type { Page } from "@playwright/test"; import { expect } from "@playwright/test"; import { formatDuration, intervalToDuration } from "date-fns"; -import * as API from "api/api"; +import { type DeploymentConfig, API } from "api/api"; import type { SerpentOption } from "api/typesGenerated"; import { coderPort } from "./constants"; import { findSessionToken, randomName } from "./helpers"; @@ -15,6 +15,7 @@ export const setupApiCalls = async (page: Page) => { } catch { // If this fails, we have an unauthenticated client. } + API.setHost(`http://127.0.0.1:${coderPort}`); }; @@ -53,7 +54,7 @@ export const createGroup = async (orgId: string) => { export async function verifyConfigFlagBoolean( page: Page, - config: API.DeploymentConfig, + config: DeploymentConfig, flag: string, ) { const opt = findConfigOption(config, flag); @@ -68,7 +69,7 @@ export async function verifyConfigFlagBoolean( export async function verifyConfigFlagNumber( page: Page, - config: API.DeploymentConfig, + config: DeploymentConfig, flag: string, ) { const opt = findConfigOption(config, flag); @@ -80,7 +81,7 @@ export async function verifyConfigFlagNumber( export async function verifyConfigFlagString( page: Page, - config: API.DeploymentConfig, + config: DeploymentConfig, flag: string, ) { const opt = findConfigOption(config, flag); @@ -100,7 +101,7 @@ export async function verifyConfigFlagEmpty(page: Page, flag: string) { export async function verifyConfigFlagArray( page: Page, - config: API.DeploymentConfig, + config: DeploymentConfig, flag: string, ) { const opt = findConfigOption(config, flag); @@ -116,7 +117,7 @@ export async function verifyConfigFlagArray( export async function verifyConfigFlagEntries( page: Page, - config: API.DeploymentConfig, + config: DeploymentConfig, flag: string, ) { const opt = findConfigOption(config, flag); @@ -138,7 +139,7 @@ export async function verifyConfigFlagEntries( export async function verifyConfigFlagDuration( page: Page, - config: API.DeploymentConfig, + config: DeploymentConfig, flag: string, ) { const opt = findConfigOption(config, flag); @@ -157,7 +158,7 @@ export async function verifyConfigFlagDuration( } export function findConfigOption( - config: API.DeploymentConfig, + config: DeploymentConfig, flag: string, ): SerpentOption { const opt = config.options.find((option) => option.flag === flag); diff --git a/site/e2e/global.setup.ts b/site/e2e/global.setup.ts index 8c8526af9acc1..b23f6bbaa1cd3 100644 --- a/site/e2e/global.setup.ts +++ b/site/e2e/global.setup.ts @@ -1,5 +1,5 @@ import { expect, test } from "@playwright/test"; -import { hasFirstUser } from "api/api"; +import { API } from "api/api"; import { Language } from "pages/CreateUserPage/CreateUserForm"; import { setupApiCalls } from "./api"; import * as constants from "./constants"; @@ -9,7 +9,7 @@ import { storageState } from "./playwright.config"; test("setup deployment", async ({ page }) => { await page.goto("/", { waitUntil: "domcontentloaded" }); await setupApiCalls(page); - const exists = await hasFirstUser(); + const exists = await API.hasFirstUser(); // First user already exists, abort early. All tests execute this as a dependency, // if you run multiple tests in the UI, this will fail unless we check this. if (exists) { diff --git a/site/e2e/helpers.ts b/site/e2e/helpers.ts index 3f58184b1c1ac..2aa45193806d0 100644 --- a/site/e2e/helpers.ts +++ b/site/e2e/helpers.ts @@ -6,7 +6,7 @@ import capitalize from "lodash/capitalize"; import path from "path"; import * as ssh from "ssh2"; import { Duplex } from "stream"; -import { axiosInstance } from "api/api"; +import { API } from "api/api"; import type { WorkspaceBuildParameter, UpdateTemplateMeta, @@ -423,6 +423,7 @@ export const waitUntilUrlIsNotResponding = async (url: string) => { const retryIntervalMs = 1000; let retries = 0; + const axiosInstance = API.getAxiosInstance(); while (retries < maxRetries) { try { await axiosInstance.get(url); diff --git a/site/e2e/reporter.ts b/site/e2e/reporter.ts index 466bc564d238a..8c9a0d163acc0 100644 --- a/site/e2e/reporter.ts +++ b/site/e2e/reporter.ts @@ -10,7 +10,7 @@ import type { } from "@playwright/test/reporter"; import * as fs from "fs/promises"; import type { Writable } from "stream"; -import { axiosInstance } from "api/api"; +import { API } from "api/api"; import { coderdPProfPort, enterpriseLicense } from "./constants"; class CoderReporter implements Reporter { @@ -143,6 +143,7 @@ const logLines = (chunk: string | Buffer): string[] => { }; const exportDebugPprof = async (outputFile: string) => { + const axiosInstance = API.getAxiosInstance(); const response = await axiosInstance.get( `http://127.0.0.1:${coderdPProfPort}/debug/pprof/goroutine?debug=1`, ); diff --git a/site/e2e/tests/deployment/general.spec.ts b/site/e2e/tests/deployment/general.spec.ts index de334a95b05e3..47e9a22e1a67f 100644 --- a/site/e2e/tests/deployment/general.spec.ts +++ b/site/e2e/tests/deployment/general.spec.ts @@ -1,5 +1,5 @@ import { expect, test } from "@playwright/test"; -import * as API from "api/api"; +import { API } from "api/api"; import { setupApiCalls } from "../../api"; import { e2eFakeExperiment1, e2eFakeExperiment2 } from "../../constants"; diff --git a/site/e2e/tests/deployment/network.spec.ts b/site/e2e/tests/deployment/network.spec.ts index c979bb8e1022f..d125a100d30bb 100644 --- a/site/e2e/tests/deployment/network.spec.ts +++ b/site/e2e/tests/deployment/network.spec.ts @@ -1,5 +1,5 @@ import { test } from "@playwright/test"; -import { getDeploymentConfig } from "api/api"; +import { API } from "api/api"; import { setupApiCalls, verifyConfigFlagArray, @@ -11,7 +11,7 @@ import { test("enabled network settings", async ({ page }) => { await setupApiCalls(page); - const config = await getDeploymentConfig(); + const config = await API.getDeploymentConfig(); await page.goto("/deployment/network", { waitUntil: "domcontentloaded" }); diff --git a/site/e2e/tests/deployment/observability.spec.ts b/site/e2e/tests/deployment/observability.spec.ts index e94f14b6ceebc..7030ea35081a3 100644 --- a/site/e2e/tests/deployment/observability.spec.ts +++ b/site/e2e/tests/deployment/observability.spec.ts @@ -1,5 +1,5 @@ import { test } from "@playwright/test"; -import { getDeploymentConfig } from "api/api"; +import { API } from "api/api"; import { setupApiCalls, verifyConfigFlagArray, @@ -11,7 +11,7 @@ import { test("enabled observability settings", async ({ page }) => { await setupApiCalls(page); - const config = await getDeploymentConfig(); + const config = await API.getDeploymentConfig(); await page.goto("/deployment/observability", { waitUntil: "domcontentloaded", diff --git a/site/e2e/tests/deployment/security.spec.ts b/site/e2e/tests/deployment/security.spec.ts index ede966260ca44..45675089852e1 100644 --- a/site/e2e/tests/deployment/security.spec.ts +++ b/site/e2e/tests/deployment/security.spec.ts @@ -1,7 +1,6 @@ import type { Page } from "@playwright/test"; import { expect, test } from "@playwright/test"; -import type * as API from "api/api"; -import { getDeploymentConfig } from "api/api"; +import { type DeploymentConfig, API } from "api/api"; import { findConfigOption, setupApiCalls, @@ -12,7 +11,7 @@ import { test("enabled security settings", async ({ page }) => { await setupApiCalls(page); - const config = await getDeploymentConfig(); + const config = await API.getDeploymentConfig(); await page.goto("/deployment/security", { waitUntil: "domcontentloaded" }); @@ -31,7 +30,7 @@ test("enabled security settings", async ({ page }) => { async function verifyStrictTransportSecurity( page: Page, - config: API.DeploymentConfig, + config: DeploymentConfig, ) { const flag = "strict-transport-security"; const opt = findConfigOption(config, flag); diff --git a/site/e2e/tests/deployment/userAuth.spec.ts b/site/e2e/tests/deployment/userAuth.spec.ts index cf656c99fae3f..8dd8a3af49af7 100644 --- a/site/e2e/tests/deployment/userAuth.spec.ts +++ b/site/e2e/tests/deployment/userAuth.spec.ts @@ -1,5 +1,5 @@ import { test } from "@playwright/test"; -import { getDeploymentConfig } from "api/api"; +import { API } from "api/api"; import { setupApiCalls, verifyConfigFlagArray, @@ -10,7 +10,7 @@ import { test("login with OIDC", async ({ page }) => { await setupApiCalls(page); - const config = await getDeploymentConfig(); + const config = await API.getDeploymentConfig(); await page.goto("/deployment/userauth", { waitUntil: "domcontentloaded" }); diff --git a/site/e2e/tests/deployment/workspaceProxies.spec.ts b/site/e2e/tests/deployment/workspaceProxies.spec.ts index 5f67bda7d7ad4..47f8d48895466 100644 --- a/site/e2e/tests/deployment/workspaceProxies.spec.ts +++ b/site/e2e/tests/deployment/workspaceProxies.spec.ts @@ -1,5 +1,5 @@ import { test, expect, type Page } from "@playwright/test"; -import { createWorkspaceProxy } from "api/api"; +import { API } from "api/api"; import { setupApiCalls } from "../../api"; import { coderPort, workspaceProxyPort } from "../../constants"; import { randomName, requiresEnterpriseLicense } from "../../helpers"; @@ -34,7 +34,7 @@ test("custom proxy is online", async ({ page }) => { const proxyName = randomName(); // Register workspace proxy - const proxyResponse = await createWorkspaceProxy({ + const proxyResponse = await API.createWorkspaceProxy({ name: proxyName, display_name: "", icon: "/emojis/1f1e7-1f1f7.png", diff --git a/site/e2e/tests/groups/removeMember.spec.ts b/site/e2e/tests/groups/removeMember.spec.ts index 716c86af84a8d..468d9d4851441 100644 --- a/site/e2e/tests/groups/removeMember.spec.ts +++ b/site/e2e/tests/groups/removeMember.spec.ts @@ -1,5 +1,5 @@ import { test, expect } from "@playwright/test"; -import * as API from "api/api"; +import { API } from "api/api"; import { createGroup, createUser, diff --git a/site/e2e/tests/templates/updateTemplateSchedule.spec.ts b/site/e2e/tests/templates/updateTemplateSchedule.spec.ts index 1eb272a665edb..5678f015c917c 100644 --- a/site/e2e/tests/templates/updateTemplateSchedule.spec.ts +++ b/site/e2e/tests/templates/updateTemplateSchedule.spec.ts @@ -1,5 +1,5 @@ import { expect, test } from "@playwright/test"; -import { createTemplate, createTemplateVersion, getTemplate } from "api/api"; +import { API } from "api/api"; import { getCurrentOrgId, setupApiCalls } from "../../api"; import { beforeCoderTest } from "../../hooks"; @@ -11,14 +11,14 @@ test("update template schedule settings without override other settings", async }) => { await setupApiCalls(page); const orgId = await getCurrentOrgId(); - const templateVersion = await createTemplateVersion(orgId, { + const templateVersion = await API.createTemplateVersion(orgId, { storage_method: "file" as const, provisioner: "echo", user_variable_values: [], example_id: "docker", tags: {}, }); - const template = await createTemplate(orgId, { + const template = await API.createTemplate(orgId, { name: "test-template", display_name: "Test Template", template_version_id: templateVersion.id, @@ -33,7 +33,7 @@ test("update template schedule settings without override other settings", async await page.getByRole("button", { name: "Submit" }).click(); await expect(page.getByText("Template updated successfully")).toBeVisible(); - const updatedTemplate = await getTemplate(template.id); + const updatedTemplate = await API.getTemplate(template.id); // Validate that the template data remains consistent, with the exception of // the 'default_ttl_ms' field (updated during the test) and the 'updated at' // field (automatically updated by the backend). diff --git a/site/src/api/api.test.ts b/site/src/api/api.test.ts index 18615306683c4..af5f5e22d61ba 100644 --- a/site/src/api/api.test.ts +++ b/site/src/api/api.test.ts @@ -6,10 +6,11 @@ import { MockWorkspaceBuild, MockWorkspaceBuildParameter1, } from "testHelpers/entities"; -import * as api from "./api"; -import { axiosInstance } from "./api"; +import { API, getURLWithSearchParams, MissingBuildParameters } from "./api"; import type * as TypesGen from "./typesGenerated"; +const axiosInstance = API.getAxiosInstance(); + describe("api.ts", () => { describe("login", () => { it("should return LoginResponse", async () => { @@ -23,7 +24,7 @@ describe("api.ts", () => { .mockResolvedValueOnce({ data: loginResponse }); // when - const result = await api.login("test", "123"); + const result = await API.login("test", "123"); // then expect(axiosInstance.post).toHaveBeenCalled(); @@ -44,7 +45,7 @@ describe("api.ts", () => { axiosInstance.post = axiosMockPost; try { - await api.login("test", "123"); + await API.login("test", "123"); } catch (error) { expect(error).toStrictEqual(expectedError); } @@ -60,7 +61,7 @@ describe("api.ts", () => { axiosInstance.post = axiosMockPost; // when - await api.logout(); + await API.logout(); // then expect(axiosMockPost).toHaveBeenCalled(); @@ -80,7 +81,7 @@ describe("api.ts", () => { axiosInstance.post = axiosMockPost; try { - await api.logout(); + await API.logout(); } catch (error) { expect(error).toStrictEqual(expectedError); } @@ -100,7 +101,7 @@ describe("api.ts", () => { axiosInstance.post = axiosMockPost; // when - const result = await api.getApiKey(); + const result = await API.getApiKey(); // then expect(axiosMockPost).toHaveBeenCalled(); @@ -121,7 +122,7 @@ describe("api.ts", () => { axiosInstance.post = axiosMockPost; try { - await api.getApiKey(); + await API.getApiKey(); } catch (error) { expect(error).toStrictEqual(expectedError); } @@ -147,7 +148,7 @@ describe("api.ts", () => { ])( `Workspaces - getURLWithSearchParams(%p, %p) returns %p`, (basePath, filter, expected) => { - expect(api.getURLWithSearchParams(basePath, filter)).toBe(expected); + expect(getURLWithSearchParams(basePath, filter)).toBe(expected); }, ); }); @@ -164,7 +165,7 @@ describe("api.ts", () => { ])( `Users - getURLWithSearchParams(%p, %p) returns %p`, (basePath, filter, expected) => { - expect(api.getURLWithSearchParams(basePath, filter)).toBe(expected); + expect(getURLWithSearchParams(basePath, filter)).toBe(expected); }, ); }); @@ -172,11 +173,11 @@ describe("api.ts", () => { describe("update", () => { it("creates a build with start and the latest template", async () => { jest - .spyOn(api, "postWorkspaceBuild") + .spyOn(API, "postWorkspaceBuild") .mockResolvedValueOnce(MockWorkspaceBuild); - jest.spyOn(api, "getTemplate").mockResolvedValueOnce(MockTemplate); - await api.updateWorkspace(MockWorkspace); - expect(api.postWorkspaceBuild).toHaveBeenCalledWith(MockWorkspace.id, { + jest.spyOn(API, "getTemplate").mockResolvedValueOnce(MockTemplate); + await API.updateWorkspace(MockWorkspace); + expect(API.postWorkspaceBuild).toHaveBeenCalledWith(MockWorkspace.id, { transition: "start", template_version_id: MockTemplate.active_version_id, rich_parameter_values: [], @@ -185,12 +186,12 @@ describe("api.ts", () => { it("fails when having missing parameters", async () => { jest - .spyOn(api, "postWorkspaceBuild") + .spyOn(API, "postWorkspaceBuild") .mockResolvedValue(MockWorkspaceBuild); - jest.spyOn(api, "getTemplate").mockResolvedValue(MockTemplate); - jest.spyOn(api, "getWorkspaceBuildParameters").mockResolvedValue([]); + jest.spyOn(API, "getTemplate").mockResolvedValue(MockTemplate); + jest.spyOn(API, "getWorkspaceBuildParameters").mockResolvedValue([]); jest - .spyOn(api, "getTemplateVersionRichParameters") + .spyOn(API, "getTemplateVersionRichParameters") .mockResolvedValue([ MockTemplateVersionParameter1, { ...MockTemplateVersionParameter2, mutable: false }, @@ -198,14 +199,14 @@ describe("api.ts", () => { let error = new Error(); try { - await api.updateWorkspace(MockWorkspace); + await API.updateWorkspace(MockWorkspace); } catch (e) { error = e as Error; } - expect(error).toBeInstanceOf(api.MissingBuildParameters); + expect(error).toBeInstanceOf(MissingBuildParameters); // Verify if the correct missing parameters are being passed - expect((error as api.MissingBuildParameters).parameters).toEqual([ + expect((error as MissingBuildParameters).parameters).toEqual([ MockTemplateVersionParameter1, { ...MockTemplateVersionParameter2, mutable: false }, ]); @@ -213,19 +214,19 @@ describe("api.ts", () => { it("creates a build with the no parameters if it is already filled", async () => { jest - .spyOn(api, "postWorkspaceBuild") + .spyOn(API, "postWorkspaceBuild") .mockResolvedValueOnce(MockWorkspaceBuild); - jest.spyOn(api, "getTemplate").mockResolvedValueOnce(MockTemplate); + jest.spyOn(API, "getTemplate").mockResolvedValueOnce(MockTemplate); jest - .spyOn(api, "getWorkspaceBuildParameters") + .spyOn(API, "getWorkspaceBuildParameters") .mockResolvedValue([MockWorkspaceBuildParameter1]); jest - .spyOn(api, "getTemplateVersionRichParameters") + .spyOn(API, "getTemplateVersionRichParameters") .mockResolvedValue([ { ...MockTemplateVersionParameter1, required: true, mutable: false }, ]); - await api.updateWorkspace(MockWorkspace); - expect(api.postWorkspaceBuild).toHaveBeenCalledWith(MockWorkspace.id, { + await API.updateWorkspace(MockWorkspace); + expect(API.postWorkspaceBuild).toHaveBeenCalledWith(MockWorkspace.id, { transition: "start", template_version_id: MockTemplate.active_version_id, rich_parameter_values: [], diff --git a/site/src/api/api.ts b/site/src/api/api.ts index c677ffbcb1b3b..ed7f18ef1472c 100644 --- a/site/src/api/api.ts +++ b/site/src/api/api.ts @@ -19,32 +19,128 @@ * * For example, `utils/delay` must be imported using `../utils/delay` instead. */ -import globalAxios, { isAxiosError } from "axios"; +import globalAxios, { type AxiosInstance, isAxiosError } from "axios"; import type dayjs from "dayjs"; import userAgentParser from "ua-parser-js"; import { delay } from "../utils/delay"; import * as TypesGen from "./typesGenerated"; -export const axiosInstance = globalAxios.create(); +const getMissingParameters = ( + oldBuildParameters: TypesGen.WorkspaceBuildParameter[], + newBuildParameters: TypesGen.WorkspaceBuildParameter[], + templateParameters: TypesGen.TemplateVersionParameter[], +) => { + const missingParameters: TypesGen.TemplateVersionParameter[] = []; + const requiredParameters: TypesGen.TemplateVersionParameter[] = []; + + templateParameters.forEach((p) => { + // It is mutable and required. Mutable values can be changed after so we + // don't need to ask them if they are not required. + const isMutableAndRequired = p.mutable && p.required; + // Is immutable, so we can check if it is its first time on the build + const isImmutable = !p.mutable; + + if (isMutableAndRequired || isImmutable) { + requiredParameters.push(p); + } + }); + + for (const parameter of requiredParameters) { + // Check if there is a new value + let buildParameter = newBuildParameters.find( + (p) => p.name === parameter.name, + ); + + // If not, get the old one + if (!buildParameter) { + buildParameter = oldBuildParameters.find( + (p) => p.name === parameter.name, + ); + } + + // If there is a value from the new or old one, it is not missed + if (buildParameter) { + continue; + } + + missingParameters.push(parameter); + } + + // Check if parameter "options" changed and we can't use old build parameters. + templateParameters.forEach((templateParameter) => { + if (templateParameter.options.length === 0) { + return; + } + + // Check if there is a new value + let buildParameter = newBuildParameters.find( + (p) => p.name === templateParameter.name, + ); + + // If not, get the old one + if (!buildParameter) { + buildParameter = oldBuildParameters.find( + (p) => p.name === templateParameter.name, + ); + } + + if (!buildParameter) { + return; + } + + const matchingOption = templateParameter.options.find( + (option) => option.value === buildParameter?.value, + ); + if (!matchingOption) { + missingParameters.push(templateParameter); + } + }); + return missingParameters; +}; + +/** + * + * @param agentId + * @returns An EventSource that emits agent metadata event objects + * (ServerSentEvent) + */ +export const watchAgentMetadata = (agentId: string): EventSource => { + return new EventSource( + `${location.protocol}//${location.host}/api/v2/workspaceagents/${agentId}/watch-metadata`, + { withCredentials: true }, + ); +}; -// Adds 304 for the default axios validateStatus function -// https://github.com/axios/axios#handling-errors Check status here -// https://httpstatusdogs.com/ -axiosInstance.defaults.validateStatus = (status) => { - return (status >= 200 && status < 300) || status === 304; +/** + * @returns {EventSource} An EventSource that emits workspace event objects + * (ServerSentEvent) + */ +export const watchWorkspace = (workspaceId: string): EventSource => { + return new EventSource( + `${location.protocol}//${location.host}/api/v2/workspaces/${workspaceId}/watch`, + { withCredentials: true }, + ); }; -export const hardCodedCSRFCookie = (): string => { - // This is a hard coded CSRF token/cookie pair for local development. In prod, - // the GoLang webserver generates a random cookie with a new token for each - // document request. For local development, we don't use the Go webserver for - // static files, so this is the 'hack' to make local development work with - // remote apis. The CSRF cookie for this token is - // "JXm9hOUdZctWt0ZZGAy9xiS/gxMKYOThdxjjMnMUyn4=" - const csrfToken = - "KNKvagCBEHZK7ihe2t7fj6VeJ0UyTDco1yVUJE8N06oNqxLu5Zx1vRxZbgfC0mJJgeGkVjgs08mgPbcWPBkZ1A=="; - axiosInstance.defaults.headers.common["X-CSRF-TOKEN"] = csrfToken; - return csrfToken; +export const getURLWithSearchParams = ( + basePath: string, + options?: SearchParamOptions, +): string => { + if (!options) { + return basePath; + } + + const searchParams = new URLSearchParams(); + const keys = Object.keys(options) as (keyof SearchParamOptions)[]; + keys.forEach((key) => { + const value = options[key]; + if (value !== undefined && value !== "") { + searchParams.append(key, value.toString()); + } + }); + + const searchString = searchParams.toString(); + return searchString ? `${basePath}?${searchString}` : basePath; }; // withDefaultFeatures sets all unspecified features to not_entitled and @@ -57,1808 +153,1792 @@ export const withDefaultFeatures = ( if (fs[feature] !== undefined) { continue; } + fs[feature] = { enabled: false, entitlement: "not_entitled", }; } - return fs as TypesGen.Entitlements["features"]; -}; - -// Always attach CSRF token to all requests. In puppeteer the document is -// undefined. In those cases, just do nothing. -const token = - typeof document !== "undefined" - ? document.head.querySelector('meta[property="csrf-token"]') - : null; - -if (token !== null && token.getAttribute("content") !== null) { - if (process.env.NODE_ENV === "development") { - // Development mode uses a hard-coded CSRF token - axiosInstance.defaults.headers.common["X-CSRF-TOKEN"] = - hardCodedCSRFCookie(); - token.setAttribute("content", hardCodedCSRFCookie()); - } else { - axiosInstance.defaults.headers.common["X-CSRF-TOKEN"] = - token.getAttribute("content") ?? ""; - } -} else { - // Do not write error logs if we are in a FE unit test. - if (process.env.JEST_WORKER_ID === undefined) { - console.error("CSRF token not found"); - } -} - -export const setSessionToken = (token: string) => { - axiosInstance.defaults.headers.common["Coder-Session-Token"] = token; -}; -export const setHost = (host?: string) => { - axiosInstance.defaults.baseURL = host; + return fs as TypesGen.Entitlements["features"]; }; -const CONTENT_TYPE_JSON = { - "Content-Type": "application/json", +type WatchBuildLogsByTemplateVersionIdOptions = { + after?: number; + onMessage: (log: TypesGen.ProvisionerJobLog) => void; + onDone?: () => void; + onError: (error: Error) => void; }; -export const provisioners: TypesGen.ProvisionerDaemon[] = [ - { - id: "terraform", - name: "Terraform", - created_at: "", - provisioners: [], - tags: {}, - version: "v2.34.5", - api_version: "1.0", - }, +export const watchBuildLogsByTemplateVersionId = ( + versionId: string, { - id: "cdr-basic", - name: "Basic", - created_at: "", - provisioners: [], - tags: {}, - version: "v2.34.5", - api_version: "1.0", - }, -]; - -export const login = async ( - email: string, - password: string, -): Promise => { - const payload = JSON.stringify({ - email, - password, - }); + onMessage, + onDone, + onError, + after, + }: WatchBuildLogsByTemplateVersionIdOptions, +) => { + const searchParams = new URLSearchParams({ follow: "true" }); + if (after !== undefined) { + searchParams.append("after", after.toString()); + } - const response = await axiosInstance.post( - "/api/v2/users/login", - payload, - { - headers: { ...CONTENT_TYPE_JSON }, - }, + const proto = location.protocol === "https:" ? "wss:" : "ws:"; + const socket = new WebSocket( + `${proto}//${ + location.host + }/api/v2/templateversions/${versionId}/logs?${searchParams.toString()}`, ); - return response.data; -}; + socket.binaryType = "blob"; -export const convertToOAUTH = async (request: TypesGen.ConvertLoginRequest) => { - const response = await axiosInstance.post( - "/api/v2/users/me/convert-login", - request, + socket.addEventListener("message", (event) => + onMessage(JSON.parse(event.data) as TypesGen.ProvisionerJobLog), ); - return response.data; -}; -export const logout = async (): Promise => { - await axiosInstance.post("/api/v2/users/logout"); -}; + socket.addEventListener("error", () => { + onError(new Error("Connection for logs failed.")); + socket.close(); + }); -export const getAuthenticatedUser = async () => { - const response = await axiosInstance.get("/api/v2/users/me"); - return response.data; -}; + socket.addEventListener("close", () => { + // When the socket closes, logs have finished streaming! + onDone?.(); + }); -export const getUserParameters = async (templateID: string) => { - const response = await axiosInstance.get( - "/api/v2/users/me/autofill-parameters?template_id=" + templateID, - ); - return response.data; + return socket; }; -export const getAuthMethods = async (): Promise => { - const response = await axiosInstance.get( - "/api/v2/users/authmethods", - ); - return response.data; -}; +export const watchWorkspaceAgentLogs = ( + agentId: string, + { after, onMessage, onDone, onError }: WatchWorkspaceAgentLogsOptions, +) => { + // WebSocket compression in Safari (confirmed in 16.5) is broken when + // the server sends large messages. The following error is seen: + // + // WebSocket connection to 'wss://.../logs?follow&after=0' failed: The operation couldn’t be completed. Protocol error + // + const noCompression = + userAgentParser(navigator.userAgent).browser.name === "Safari" + ? "&no_compression" + : ""; -export const getUserLoginType = async (): Promise => { - const response = await axiosInstance.get( - "/api/v2/users/me/login-type", + const proto = location.protocol === "https:" ? "wss:" : "ws:"; + const socket = new WebSocket( + `${proto}//${location.host}/api/v2/workspaceagents/${agentId}/logs?follow&after=${after}${noCompression}`, ); - return response.data; -}; + socket.binaryType = "blob"; -export const checkAuthorization = async ( - params: TypesGen.AuthorizationRequest, -): Promise => { - const response = await axiosInstance.post( - `/api/v2/authcheck`, - params, - ); - return response.data; -}; + socket.addEventListener("message", (event) => { + const logs = JSON.parse(event.data) as TypesGen.WorkspaceAgentLog[]; + onMessage(logs); + }); -export const getApiKey = async (): Promise => { - const response = await axiosInstance.post( - "/api/v2/users/me/keys", - ); - return response.data; -}; + socket.addEventListener("error", () => { + onError(new Error("socket errored")); + }); -export const getTokens = async ( - params: TypesGen.TokensFilter, -): Promise => { - const response = await axiosInstance.get( - `/api/v2/users/me/keys/tokens`, - { - params, - }, - ); - return response.data; -}; + socket.addEventListener("close", () => { + onDone && onDone(); + }); -export const deleteToken = async (keyId: string): Promise => { - await axiosInstance.delete("/api/v2/users/me/keys/" + keyId); + return socket; }; -export const createToken = async ( - params: TypesGen.CreateTokenRequest, -): Promise => { - const response = await axiosInstance.post( - `/api/v2/users/me/keys/tokens`, - params, - ); - return response.data; +type WatchWorkspaceAgentLogsOptions = { + after: number; + onMessage: (logs: TypesGen.WorkspaceAgentLog[]) => void; + onDone?: () => void; + onError: (error: Error) => void; }; -export const getTokenConfig = async (): Promise => { - const response = await axiosInstance.get( - "/api/v2/users/me/keys/tokens/tokenconfig", - ); - return response.data; +type WatchBuildLogsByBuildIdOptions = { + after?: number; + onMessage: (log: TypesGen.ProvisionerJobLog) => void; + onDone?: () => void; + onError?: (error: Error) => void; }; - -export const getUsers = async ( - options: TypesGen.UsersRequest, - signal?: AbortSignal, -): Promise => { - const url = getURLWithSearchParams("/api/v2/users", options); - const response = await axiosInstance.get( - url.toString(), - { - signal, - }, +export const watchBuildLogsByBuildId = ( + buildId: string, + { onMessage, onDone, onError, after }: WatchBuildLogsByBuildIdOptions, +) => { + const searchParams = new URLSearchParams({ follow: "true" }); + if (after !== undefined) { + searchParams.append("after", after.toString()); + } + const proto = location.protocol === "https:" ? "wss:" : "ws:"; + const socket = new WebSocket( + `${proto}//${ + location.host + }/api/v2/workspacebuilds/${buildId}/logs?${searchParams.toString()}`, ); - return response.data; -}; + socket.binaryType = "blob"; -export const getOrganization = async ( - organizationId: string, -): Promise => { - const response = await axiosInstance.get( - `/api/v2/organizations/${organizationId}`, + socket.addEventListener("message", (event) => + onMessage(JSON.parse(event.data) as TypesGen.ProvisionerJobLog), ); - return response.data; -}; -export const getOrganizations = async (): Promise => { - const response = await axiosInstance.get( - "/api/v2/users/me/organizations", - ); - return response.data; -}; + socket.addEventListener("error", () => { + onError && onError(new Error("Connection for logs failed.")); + socket.close(); + }); -export const getTemplate = async ( - templateId: string, -): Promise => { - const response = await axiosInstance.get( - `/api/v2/templates/${templateId}`, - ); - return response.data; + socket.addEventListener("close", () => { + // When the socket closes, logs have finished streaming! + onDone && onDone(); + }); + + return socket; }; -export interface TemplateOptions { - readonly deprecated?: boolean; -} +// This is the base header that is used for several requests. This is defined as +// a readonly value, but only copies of it should be passed into the API calls, +// because Axios is able to mutate the headers +const BASE_CONTENT_TYPE_JSON = { + "Content-Type": "application/json", +} as const satisfies HeadersInit; -export const getTemplates = async ( - organizationId: string, - options?: TemplateOptions, -): Promise => { - const params = {} as Record; - if (options && options.deprecated !== undefined) { - // Just want to check if it isn't undefined. If it has - // a boolean value, convert it to a string and include - // it as a param. - params["deprecated"] = String(options.deprecated); - } +type TemplateOptions = Readonly<{ + readonly deprecated?: boolean; +}>; - const response = await axiosInstance.get( - `/api/v2/organizations/${organizationId}/templates`, - { - params, - }, - ); - return response.data; +type SearchParamOptions = TypesGen.Pagination & { + q?: string; }; -export const getTemplateByName = async ( - organizationId: string, - name: string, -): Promise => { - const response = await axiosInstance.get( - `/api/v2/organizations/${organizationId}/templates/${name}`, - ); - return response.data; -}; +type RestartWorkspaceParameters = Readonly<{ + workspace: TypesGen.Workspace; + buildParameters?: TypesGen.WorkspaceBuildParameter[]; +}>; -export const getTemplateVersion = async ( - versionId: string, -): Promise => { - const response = await axiosInstance.get( - `/api/v2/templateversions/${versionId}`, - ); - return response.data; -}; - -export const getTemplateVersionResources = async ( - versionId: string, -): Promise => { - const response = await axiosInstance.get( - `/api/v2/templateversions/${versionId}/resources`, - ); - return response.data; -}; +export type DeleteWorkspaceOptions = Pick< + TypesGen.CreateWorkspaceBuildRequest, + "log_level" & "orphan" +>; -export const getTemplateVersionVariables = async ( - versionId: string, -): Promise => { - const response = await axiosInstance.get( - `/api/v2/templateversions/${versionId}/variables`, - ); - return response.data; -}; +export type DeploymentConfig = Readonly<{ + config: TypesGen.DeploymentValues; + options: TypesGen.SerpentOption[]; +}>; -export const getTemplateVersions = async ( - templateId: string, -): Promise => { - const response = await axiosInstance.get( - `/api/v2/templates/${templateId}/versions`, - ); - return response.data; +type Claims = { + license_expires: number; + account_type?: string; + account_id?: string; + trial: boolean; + all_features: boolean; + version: number; + features: Record; + require_telemetry?: boolean; }; -export const getTemplateVersionByName = async ( - organizationId: string, - templateName: string, - versionName: string, -): Promise => { - const response = await axiosInstance.get( - `/api/v2/organizations/${organizationId}/templates/${templateName}/versions/${versionName}`, - ); - return response.data; +export type GetLicensesResponse = Omit & { + claims: Claims; + expires_at: string; }; -export type GetPreviousTemplateVersionByNameResponse = - | TypesGen.TemplateVersion - | undefined; - -export const getPreviousTemplateVersionByName = async ( - organizationId: string, - templateName: string, - versionName: string, -) => { - try { - const response = await axiosInstance.get( - `/api/v2/organizations/${organizationId}/templates/${templateName}/versions/${versionName}/previous`, - ); - return response.data; - } catch (error) { - // When there is no previous version, like the first version of a template, - // the API returns 404 so in this case we can safely return undefined - if ( - isAxiosError(error) && - error.response && - error.response.status === 404 - ) { - return undefined; - } - - throw error; - } +export type InsightsParams = { + start_time: string; + end_time: string; + template_ids: string; }; -export const createTemplateVersion = async ( - organizationId: string, - data: TypesGen.CreateTemplateVersionRequest, -): Promise => { - const response = await axiosInstance.post( - `/api/v2/organizations/${organizationId}/templateversions`, - data, - ); - return response.data; +export type InsightsTemplateParams = InsightsParams & { + interval: "day" | "week"; }; -export const getTemplateVersionExternalAuth = async ( - versionId: string, -): Promise => { - const response = await axiosInstance.get( - `/api/v2/templateversions/${versionId}/external-auth`, - ); - return response.data; +export type GetJFrogXRayScanParams = { + workspaceId: string; + agentId: string; }; -export const getTemplateVersionRichParameters = async ( - versionId: string, -): Promise => { - const response = await axiosInstance.get( - `/api/v2/templateversions/${versionId}/rich-parameters`, - ); - return response.data; -}; +export class MissingBuildParameters extends Error { + parameters: TypesGen.TemplateVersionParameter[] = []; + versionId: string; -export const createTemplate = async ( - organizationId: string, - data: TypesGen.CreateTemplateRequest, -): Promise => { - const response = await axiosInstance.post( - `/api/v2/organizations/${organizationId}/templates`, - data, - ); - return response.data; -}; + constructor( + parameters: TypesGen.TemplateVersionParameter[], + versionId: string, + ) { + super("Missing build parameters."); + this.parameters = parameters; + this.versionId = versionId; + } +} -export const updateActiveTemplateVersion = async ( - templateId: string, - data: TypesGen.UpdateActiveTemplateVersion, -) => { - const response = await axiosInstance.patch( - `/api/v2/templates/${templateId}/versions`, - data, - ); - return response.data; -}; +/** + * This is the container for all API methods. It's split off to make it more + * clear where API methods should go, but it is eventually merged into the Api + * class with a more flat hierarchy + * + * All public methods should be defined as arrow functions to ensure that they + * can be passed around the React UI without losing their `this` context. + * + * This is one of the few cases where you have to worry about the difference + * between traditional methods and arrow function properties. Arrow functions + * disable JS's dynamic scope, and force all `this` references to resolve via + * lexical scope. + */ +class ApiMethods { + constructor(protected readonly axios: AxiosInstance) {} + + login = async ( + email: string, + password: string, + ): Promise => { + const payload = JSON.stringify({ email, password }); + const response = await this.axios.post( + "/api/v2/users/login", + payload, + { headers: { ...BASE_CONTENT_TYPE_JSON } }, + ); -export const patchTemplateVersion = async ( - templateVersionId: string, - data: TypesGen.PatchTemplateVersionRequest, -) => { - const response = await axiosInstance.patch( - `/api/v2/templateversions/${templateVersionId}`, - data, - ); - return response.data; -}; + return response.data; + }; -export const archiveTemplateVersion = async (templateVersionId: string) => { - const response = await axiosInstance.post( - `/api/v2/templateversions/${templateVersionId}/archive`, - ); - return response.data; -}; + convertToOAUTH = async (request: TypesGen.ConvertLoginRequest) => { + const response = await this.axios.post( + "/api/v2/users/me/convert-login", + request, + ); -export const unarchiveTemplateVersion = async (templateVersionId: string) => { - const response = await axiosInstance.post( - `/api/v2/templateversions/${templateVersionId}/unarchive`, - ); - return response.data; -}; + return response.data; + }; -export const updateTemplateMeta = async ( - templateId: string, - data: TypesGen.UpdateTemplateMeta, -): Promise => { - const response = await axiosInstance.patch( - `/api/v2/templates/${templateId}`, - data, - ); - // On 304 response there is no data payload. - if (response.status === 304) { - return null; - } + logout = async (): Promise => { + return this.axios.post("/api/v2/users/logout"); + }; - return response.data; -}; + getAuthenticatedUser = async () => { + const response = await this.axios.get("/api/v2/users/me"); + return response.data; + }; -export const deleteTemplate = async ( - templateId: string, -): Promise => { - const response = await axiosInstance.delete( - `/api/v2/templates/${templateId}`, - ); - return response.data; -}; + getUserParameters = async (templateID: string) => { + const response = await this.axios.get( + `/api/v2/users/me/autofill-parameters?template_id=${templateID}`, + ); -export const getWorkspace = async ( - workspaceId: string, - params?: TypesGen.WorkspaceOptions, -): Promise => { - const response = await axiosInstance.get( - `/api/v2/workspaces/${workspaceId}`, - { - params, - }, - ); - return response.data; -}; + return response.data; + }; -/** - * - * @param workspaceId - * @returns An EventSource that emits workspace event objects (ServerSentEvent) - */ -export const watchWorkspace = (workspaceId: string): EventSource => { - return new EventSource( - `${location.protocol}//${location.host}/api/v2/workspaces/${workspaceId}/watch`, - { withCredentials: true }, - ); -}; + getAuthMethods = async (): Promise => { + const response = await this.axios.get( + "/api/v2/users/authmethods", + ); -interface SearchParamOptions extends TypesGen.Pagination { - q?: string; -} + return response.data; + }; -export const getURLWithSearchParams = ( - basePath: string, - options?: SearchParamOptions, -): string => { - if (options) { - const searchParams = new URLSearchParams(); - const keys = Object.keys(options) as (keyof SearchParamOptions)[]; - keys.forEach((key) => { - const value = options[key]; - if (value !== undefined && value !== "") { - searchParams.append(key, value.toString()); - } - }); - const searchString = searchParams.toString(); - return searchString ? `${basePath}?${searchString}` : basePath; - } else { - return basePath; - } -}; + getUserLoginType = async (): Promise => { + const response = await this.axios.get( + "/api/v2/users/me/login-type", + ); -export const getWorkspaces = async ( - options: TypesGen.WorkspacesRequest, -): Promise => { - const url = getURLWithSearchParams("/api/v2/workspaces", options); - const response = await axiosInstance.get(url); - return response.data; -}; + return response.data; + }; -export const getWorkspaceByOwnerAndName = async ( - username = "me", - workspaceName: string, - params?: TypesGen.WorkspaceOptions, -): Promise => { - const response = await axiosInstance.get( - `/api/v2/users/${username}/workspace/${workspaceName}`, - { + checkAuthorization = async ( + params: TypesGen.AuthorizationRequest, + ): Promise => { + const response = await this.axios.post( + `/api/v2/authcheck`, params, - }, - ); - return response.data; -}; + ); -export function waitForBuild(build: TypesGen.WorkspaceBuild) { - return new Promise((res, reject) => { - void (async () => { - let latestJobInfo: TypesGen.ProvisionerJob | undefined = undefined; - - while ( - !["succeeded", "canceled"].some( - (status) => latestJobInfo?.status.includes(status), - ) - ) { - const { job } = await getWorkspaceBuildByNumber( - build.workspace_owner_name, - build.workspace_name, - build.build_number, - ); - latestJobInfo = job; - - if (latestJobInfo.status === "failed") { - return reject(latestJobInfo); - } + return response.data; + }; - await delay(1000); - } + getApiKey = async (): Promise => { + const response = await this.axios.post( + "/api/v2/users/me/keys", + ); - return res(latestJobInfo); - })(); - }); -} + return response.data; + }; -export const postWorkspaceBuild = async ( - workspaceId: string, - data: TypesGen.CreateWorkspaceBuildRequest, -): Promise => { - const response = await axiosInstance.post( - `/api/v2/workspaces/${workspaceId}/builds`, - data, - ); - return response.data; -}; + getTokens = async ( + params: TypesGen.TokensFilter, + ): Promise => { + const response = await this.axios.get( + `/api/v2/users/me/keys/tokens`, + { params }, + ); -export const startWorkspace = ( - workspaceId: string, - templateVersionId: string, - logLevel?: TypesGen.ProvisionerLogLevel, - buildParameters?: TypesGen.WorkspaceBuildParameter[], -) => - postWorkspaceBuild(workspaceId, { - transition: "start", - template_version_id: templateVersionId, - log_level: logLevel, - rich_parameter_values: buildParameters, - }); -export const stopWorkspace = ( - workspaceId: string, - logLevel?: TypesGen.ProvisionerLogLevel, -) => - postWorkspaceBuild(workspaceId, { - transition: "stop", - log_level: logLevel, - }); + return response.data; + }; -export type DeleteWorkspaceOptions = Pick< - TypesGen.CreateWorkspaceBuildRequest, - "log_level" & "orphan" ->; + deleteToken = async (keyId: string): Promise => { + await this.axios.delete("/api/v2/users/me/keys/" + keyId); + }; -export const deleteWorkspace = ( - workspaceId: string, - options?: DeleteWorkspaceOptions, -) => - postWorkspaceBuild(workspaceId, { - transition: "delete", - ...options, - }); + createToken = async ( + params: TypesGen.CreateTokenRequest, + ): Promise => { + const response = await this.axios.post( + `/api/v2/users/me/keys/tokens`, + params, + ); -export const cancelWorkspaceBuild = async ( - workspaceBuildId: TypesGen.WorkspaceBuild["id"], -): Promise => { - const response = await axiosInstance.patch( - `/api/v2/workspacebuilds/${workspaceBuildId}/cancel`, - ); - return response.data; -}; + return response.data; + }; + + getTokenConfig = async (): Promise => { + const response = await this.axios.get( + "/api/v2/users/me/keys/tokens/tokenconfig", + ); -export const updateWorkspaceDormancy = async ( - workspaceId: string, - dormant: boolean, -): Promise => { - const data: TypesGen.UpdateWorkspaceDormancy = { - dormant: dormant, + return response.data; }; - const response = await axiosInstance.put( - `/api/v2/workspaces/${workspaceId}/dormant`, - data, - ); - return response.data; -}; + getUsers = async ( + options: TypesGen.UsersRequest, + signal?: AbortSignal, + ): Promise => { + const url = getURLWithSearchParams("/api/v2/users", options); + const response = await this.axios.get( + url.toString(), + { signal }, + ); -export const updateWorkspaceAutomaticUpdates = async ( - workspaceId: string, - automaticUpdates: TypesGen.AutomaticUpdates, -): Promise => { - const req: TypesGen.UpdateWorkspaceAutomaticUpdatesRequest = { - automatic_updates: automaticUpdates, + return response.data; }; - const response = await axiosInstance.put( - `/api/v2/workspaces/${workspaceId}/autoupdates`, - req, - ); - return response.data; -}; + getOrganization = async ( + organizationId: string, + ): Promise => { + const response = await this.axios.get( + `/api/v2/organizations/${organizationId}`, + ); -export const restartWorkspace = async ({ - workspace, - buildParameters, -}: { - workspace: TypesGen.Workspace; - buildParameters?: TypesGen.WorkspaceBuildParameter[]; -}) => { - const stopBuild = await stopWorkspace(workspace.id); - const awaitedStopBuild = await waitForBuild(stopBuild); + return response.data; + }; - // If the restart is canceled halfway through, make sure we bail - if (awaitedStopBuild?.status === "canceled") { - return; - } + getOrganizations = async (): Promise => { + const response = await this.axios.get( + "/api/v2/users/me/organizations", + ); + return response.data; + }; - const startBuild = await startWorkspace( - workspace.id, - workspace.latest_build.template_version_id, - undefined, - buildParameters, - ); - await waitForBuild(startBuild); -}; + getTemplate = async (templateId: string): Promise => { + const response = await this.axios.get( + `/api/v2/templates/${templateId}`, + ); -export const cancelTemplateVersionBuild = async ( - templateVersionId: TypesGen.TemplateVersion["id"], -): Promise => { - const response = await axiosInstance.patch( - `/api/v2/templateversions/${templateVersionId}/cancel`, - ); - return response.data; -}; + return response.data; + }; -export const createUser = async ( - user: TypesGen.CreateUserRequest, -): Promise => { - const response = await axiosInstance.post( - "/api/v2/users", - user, - ); - return response.data; -}; + getTemplates = async ( + organizationId: string, + options?: TemplateOptions, + ): Promise => { + const params: Record = {}; + if (options?.deprecated !== undefined) { + // Just want to check if it isn't undefined. If it has + // a boolean value, convert it to a string and include + // it as a param. + params["deprecated"] = String(options.deprecated); + } -export const createWorkspace = async ( - organizationId: string, - userId = "me", - workspace: TypesGen.CreateWorkspaceRequest, -): Promise => { - const response = await axiosInstance.post( - `/api/v2/organizations/${organizationId}/members/${userId}/workspaces`, - workspace, - ); - return response.data; -}; + const response = await this.axios.get( + `/api/v2/organizations/${organizationId}/templates`, + { params }, + ); -export const patchWorkspace = async ( - workspaceId: string, - data: TypesGen.UpdateWorkspaceRequest, -) => { - await axiosInstance.patch(`/api/v2/workspaces/${workspaceId}`, data); -}; + return response.data; + }; -export const getBuildInfo = async (): Promise => { - const response = await axiosInstance.get("/api/v2/buildinfo"); - return response.data; -}; + getTemplateByName = async ( + organizationId: string, + name: string, + ): Promise => { + const response = await this.axios.get( + `/api/v2/organizations/${organizationId}/templates/${name}`, + ); -export const getUpdateCheck = - async (): Promise => { - const response = await axiosInstance.get("/api/v2/updatecheck"); return response.data; }; -export const putWorkspaceAutostart = async ( - workspaceID: string, - autostart: TypesGen.UpdateWorkspaceAutostartRequest, -): Promise => { - const payload = JSON.stringify(autostart); - await axiosInstance.put( - `/api/v2/workspaces/${workspaceID}/autostart`, - payload, - { - headers: { ...CONTENT_TYPE_JSON }, - }, - ); -}; + getTemplateVersion = async ( + versionId: string, + ): Promise => { + const response = await this.axios.get( + `/api/v2/templateversions/${versionId}`, + ); -export const putWorkspaceAutostop = async ( - workspaceID: string, - ttl: TypesGen.UpdateWorkspaceTTLRequest, -): Promise => { - const payload = JSON.stringify(ttl); - await axiosInstance.put(`/api/v2/workspaces/${workspaceID}/ttl`, payload, { - headers: { ...CONTENT_TYPE_JSON }, - }); -}; + return response.data; + }; -export const updateProfile = async ( - userId: string, - data: TypesGen.UpdateUserProfileRequest, -): Promise => { - const response = await axiosInstance.put( - `/api/v2/users/${userId}/profile`, - data, - ); - return response.data; -}; + getTemplateVersionResources = async ( + versionId: string, + ): Promise => { + const response = await this.axios.get( + `/api/v2/templateversions/${versionId}/resources`, + ); -export const updateAppearanceSettings = async ( - userId: string, - data: TypesGen.UpdateUserAppearanceSettingsRequest, -): Promise => { - const response = await axiosInstance.put( - `/api/v2/users/${userId}/appearance`, - data, - ); - return response.data; -}; + return response.data; + }; -export const getUserQuietHoursSchedule = async ( - userId: TypesGen.User["id"], -): Promise => { - const response = await axiosInstance.get( - `/api/v2/users/${userId}/quiet-hours`, - ); - return response.data; -}; + getTemplateVersionVariables = async ( + versionId: string, + ): Promise => { + // Defined as separate variable to avoid wonky Prettier formatting because + // the type definition is so long + type VerArray = TypesGen.TemplateVersionVariable[]; -export const updateUserQuietHoursSchedule = async ( - userId: TypesGen.User["id"], - data: TypesGen.UpdateUserQuietHoursScheduleRequest, -): Promise => { - const response = await axiosInstance.put( - `/api/v2/users/${userId}/quiet-hours`, - data, - ); - return response.data; -}; + const response = await this.axios.get( + `/api/v2/templateversions/${versionId}/variables`, + ); -export const activateUser = async ( - userId: TypesGen.User["id"], -): Promise => { - const response = await axiosInstance.put( - `/api/v2/users/${userId}/status/activate`, - ); - return response.data; -}; + return response.data; + }; -export const suspendUser = async ( - userId: TypesGen.User["id"], -): Promise => { - const response = await axiosInstance.put( - `/api/v2/users/${userId}/status/suspend`, - ); - return response.data; -}; + getTemplateVersions = async ( + templateId: string, + ): Promise => { + const response = await this.axios.get( + `/api/v2/templates/${templateId}/versions`, + ); + return response.data; + }; -export const deleteUser = async ( - userId: TypesGen.User["id"], -): Promise => { - return await axiosInstance.delete(`/api/v2/users/${userId}`); -}; + getTemplateVersionByName = async ( + organizationId: string, + templateName: string, + versionName: string, + ): Promise => { + const response = await this.axios.get( + `/api/v2/organizations/${organizationId}/templates/${templateName}/versions/${versionName}`, + ); -// API definition: -// https://github.com/coder/coder/blob/db665e7261f3c24a272ccec48233a3e276878239/coderd/users.go#L33-L53 -export const hasFirstUser = async (): Promise => { - try { - // If it is success, it is true - await axiosInstance.get("/api/v2/users/first"); - return true; - } catch (error) { - // If it returns a 404, it is false - if (isAxiosError(error) && error.response?.status === 404) { - return false; - } + return response.data; + }; - throw error; - } -}; + getPreviousTemplateVersionByName = async ( + organizationId: string, + templateName: string, + versionName: string, + ) => { + try { + const response = await this.axios.get( + `/api/v2/organizations/${organizationId}/templates/${templateName}/versions/${versionName}/previous`, + ); -export const createFirstUser = async ( - req: TypesGen.CreateFirstUserRequest, -): Promise => { - const response = await axiosInstance.post(`/api/v2/users/first`, req); - return response.data; -}; + return response.data; + } catch (error) { + // When there is no previous version, like the first version of a + // template, the API returns 404 so in this case we can safely return + // undefined + const is404 = + isAxiosError(error) && error.response && error.response.status === 404; + + if (is404) { + return undefined; + } -export const updateUserPassword = async ( - userId: TypesGen.User["id"], - updatePassword: TypesGen.UpdateUserPasswordRequest, -): Promise => - axiosInstance.put(`/api/v2/users/${userId}/password`, updatePassword); + throw error; + } + }; -export const getRoles = async (): Promise> => { - const response = - await axiosInstance.get>( - `/api/v2/users/roles`, + createTemplateVersion = async ( + organizationId: string, + data: TypesGen.CreateTemplateVersionRequest, + ): Promise => { + const response = await this.axios.post( + `/api/v2/organizations/${organizationId}/templateversions`, + data, ); - return response.data; -}; -export const updateUserRoles = async ( - roles: TypesGen.Role["name"][], - userId: TypesGen.User["id"], -): Promise => { - const response = await axiosInstance.put( - `/api/v2/users/${userId}/roles`, - { roles }, - ); - return response.data; -}; + return response.data; + }; -export const getUserSSHKey = async ( - userId = "me", -): Promise => { - const response = await axiosInstance.get( - `/api/v2/users/${userId}/gitsshkey`, - ); - return response.data; -}; + getTemplateVersionExternalAuth = async ( + versionId: string, + ): Promise => { + const response = await this.axios.get( + `/api/v2/templateversions/${versionId}/external-auth`, + ); -export const regenerateUserSSHKey = async ( - userId = "me", -): Promise => { - const response = await axiosInstance.put( - `/api/v2/users/${userId}/gitsshkey`, - ); - return response.data; -}; + return response.data; + }; -export const getWorkspaceBuilds = async ( - workspaceId: string, - req?: TypesGen.WorkspaceBuildsRequest, -) => { - const response = await axiosInstance.get( - getURLWithSearchParams(`/api/v2/workspaces/${workspaceId}/builds`, req), - ); - return response.data; -}; + getTemplateVersionRichParameters = async ( + versionId: string, + ): Promise => { + const response = await this.axios.get( + `/api/v2/templateversions/${versionId}/rich-parameters`, + ); + return response.data; + }; -export const getWorkspaceBuildByNumber = async ( - username = "me", - workspaceName: string, - buildNumber: number, -): Promise => { - const response = await axiosInstance.get( - `/api/v2/users/${username}/workspace/${workspaceName}/builds/${buildNumber}`, - ); - return response.data; -}; + createTemplate = async ( + organizationId: string, + data: TypesGen.CreateTemplateRequest, + ): Promise => { + const response = await this.axios.post( + `/api/v2/organizations/${organizationId}/templates`, + data, + ); -export const getWorkspaceBuildLogs = async ( - buildId: string, - before: Date, -): Promise => { - const response = await axiosInstance.get( - `/api/v2/workspacebuilds/${buildId}/logs?before=${before.getTime()}`, - ); - return response.data; -}; + return response.data; + }; -export const getWorkspaceAgentLogs = async ( - agentID: string, -): Promise => { - const response = await axiosInstance.get( - `/api/v2/workspaceagents/${agentID}/logs`, - ); - return response.data; -}; + updateActiveTemplateVersion = async ( + templateId: string, + data: TypesGen.UpdateActiveTemplateVersion, + ) => { + const response = await this.axios.patch( + `/api/v2/templates/${templateId}/versions`, + data, + ); + return response.data; + }; -export const putWorkspaceExtension = async ( - workspaceId: string, - newDeadline: dayjs.Dayjs, -): Promise => { - await axiosInstance.put(`/api/v2/workspaces/${workspaceId}/extend`, { - deadline: newDeadline, - }); -}; + patchTemplateVersion = async ( + templateVersionId: string, + data: TypesGen.PatchTemplateVersionRequest, + ) => { + const response = await this.axios.patch( + `/api/v2/templateversions/${templateVersionId}`, + data, + ); -export const refreshEntitlements = async (): Promise => { - await axiosInstance.post("/api/v2/licenses/refresh-entitlements"); -}; + return response.data; + }; -export const getEntitlements = async (): Promise => { - try { - const response = await axiosInstance.get("/api/v2/entitlements"); - return response.data; - } catch (ex) { - if (isAxiosError(ex) && ex.response?.status === 404) { - return { - errors: [], - features: withDefaultFeatures({}), - has_license: false, - require_telemetry: false, - trial: false, - warnings: [], - refreshed_at: "", - }; - } - throw ex; - } -}; + archiveTemplateVersion = async (templateVersionId: string) => { + const response = await this.axios.post( + `/api/v2/templateversions/${templateVersionId}/archive`, + ); -export const getExperiments = async (): Promise => { - try { - const response = await axiosInstance.get("/api/v2/experiments"); return response.data; - } catch (error) { - if (isAxiosError(error) && error.response?.status === 404) { - return []; - } - throw error; - } -}; + }; -export const getAvailableExperiments = - async (): Promise => { - try { - const response = await axiosInstance.get("/api/v2/experiments/available"); - return response.data; - } catch (error) { - if (isAxiosError(error) && error.response?.status === 404) { - return { safe: [] }; - } - throw error; - } + unarchiveTemplateVersion = async (templateVersionId: string) => { + const response = await this.axios.post( + `/api/v2/templateversions/${templateVersionId}/unarchive`, + ); + return response.data; }; -export const getExternalAuthProvider = async ( - provider: string, -): Promise => { - const resp = await axiosInstance.get(`/api/v2/external-auth/${provider}`); - return resp.data; -}; + updateTemplateMeta = async ( + templateId: string, + data: TypesGen.UpdateTemplateMeta, + ): Promise => { + const response = await this.axios.patch( + `/api/v2/templates/${templateId}`, + data, + ); -export const getExternalAuthDevice = async ( - provider: string, -): Promise => { - const resp = await axiosInstance.get( - `/api/v2/external-auth/${provider}/device`, - ); - return resp.data; -}; + // On 304 response there is no data payload. + if (response.status === 304) { + return null; + } -export const exchangeExternalAuthDevice = async ( - provider: string, - req: TypesGen.ExternalAuthDeviceExchange, -): Promise => { - const resp = await axiosInstance.post( - `/api/v2/external-auth/${provider}/device`, - req, - ); - return resp.data; -}; + return response.data; + }; -export const getUserExternalAuthProviders = - async (): Promise => { - const resp = await axiosInstance.get(`/api/v2/external-auth`); - return resp.data; + deleteTemplate = async (templateId: string): Promise => { + const response = await this.axios.delete( + `/api/v2/templates/${templateId}`, + ); + + return response.data; }; -export const unlinkExternalAuthProvider = async ( - provider: string, -): Promise => { - const resp = await axiosInstance.delete(`/api/v2/external-auth/${provider}`); - return resp.data; -}; + getWorkspace = async ( + workspaceId: string, + params?: TypesGen.WorkspaceOptions, + ): Promise => { + const response = await this.axios.get( + `/api/v2/workspaces/${workspaceId}`, + { params }, + ); -export const getOAuth2ProviderApps = async ( - filter?: TypesGen.OAuth2ProviderAppFilter, -): Promise => { - const params = filter?.user_id - ? new URLSearchParams({ user_id: filter.user_id }) - : ""; - const resp = await axiosInstance.get( - `/api/v2/oauth2-provider/apps?${params}`, - ); - return resp.data; -}; + return response.data; + }; -export const getOAuth2ProviderApp = async ( - id: string, -): Promise => { - const resp = await axiosInstance.get(`/api/v2/oauth2-provider/apps/${id}`); - return resp.data; -}; + getWorkspaces = async ( + options: TypesGen.WorkspacesRequest, + ): Promise => { + const url = getURLWithSearchParams("/api/v2/workspaces", options); + const response = await this.axios.get(url); + return response.data; + }; -export const postOAuth2ProviderApp = async ( - data: TypesGen.PostOAuth2ProviderAppRequest, -): Promise => { - const response = await axiosInstance.post( - `/api/v2/oauth2-provider/apps`, - data, - ); - return response.data; -}; + getWorkspaceByOwnerAndName = async ( + username = "me", + workspaceName: string, + params?: TypesGen.WorkspaceOptions, + ): Promise => { + const response = await this.axios.get( + `/api/v2/users/${username}/workspace/${workspaceName}`, + { params }, + ); -export const putOAuth2ProviderApp = async ( - id: string, - data: TypesGen.PutOAuth2ProviderAppRequest, -): Promise => { - const response = await axiosInstance.put( - `/api/v2/oauth2-provider/apps/${id}`, - data, - ); - return response.data; -}; + return response.data; + }; -export const deleteOAuth2ProviderApp = async (id: string): Promise => { - await axiosInstance.delete(`/api/v2/oauth2-provider/apps/${id}`); -}; + getWorkspaceBuildByNumber = async ( + username = "me", + workspaceName: string, + buildNumber: number, + ): Promise => { + const response = await this.axios.get( + `/api/v2/users/${username}/workspace/${workspaceName}/builds/${buildNumber}`, + ); -export const getOAuth2ProviderAppSecrets = async ( - id: string, -): Promise => { - const resp = await axiosInstance.get( - `/api/v2/oauth2-provider/apps/${id}/secrets`, - ); - return resp.data; -}; + return response.data; + }; -export const postOAuth2ProviderAppSecret = async ( - id: string, -): Promise => { - const resp = await axiosInstance.post( - `/api/v2/oauth2-provider/apps/${id}/secrets`, - ); - return resp.data; -}; + waitForBuild = (build: TypesGen.WorkspaceBuild) => { + return new Promise((res, reject) => { + void (async () => { + let latestJobInfo: TypesGen.ProvisionerJob | undefined = undefined; + + while ( + !["succeeded", "canceled"].some( + (status) => latestJobInfo?.status.includes(status), + ) + ) { + const { job } = await this.getWorkspaceBuildByNumber( + build.workspace_owner_name, + build.workspace_name, + build.build_number, + ); + + latestJobInfo = job; + if (latestJobInfo.status === "failed") { + return reject(latestJobInfo); + } + + await delay(1000); + } -export const deleteOAuth2ProviderAppSecret = async ( - appId: string, - secretId: string, -): Promise => { - await axiosInstance.delete( - `/api/v2/oauth2-provider/apps/${appId}/secrets/${secretId}`, - ); -}; + return res(latestJobInfo); + })(); + }); + }; -export const revokeOAuth2ProviderApp = async (appId: string): Promise => { - await axiosInstance.delete(`/oauth2/tokens?client_id=${appId}`); -}; + postWorkspaceBuild = async ( + workspaceId: string, + data: TypesGen.CreateWorkspaceBuildRequest, + ): Promise => { + const response = await this.axios.post( + `/api/v2/workspaces/${workspaceId}/builds`, + data, + ); -export const getAuditLogs = async ( - options: TypesGen.AuditLogsRequest, -): Promise => { - const url = getURLWithSearchParams("/api/v2/audit", options); - const response = await axiosInstance.get(url); - return response.data; -}; + return response.data; + }; -export const getTemplateDAUs = async ( - templateId: string, -): Promise => { - const response = await axiosInstance.get( - `/api/v2/templates/${templateId}/daus`, - ); - return response.data; -}; + startWorkspace = ( + workspaceId: string, + templateVersionId: string, + logLevel?: TypesGen.ProvisionerLogLevel, + buildParameters?: TypesGen.WorkspaceBuildParameter[], + ) => { + return this.postWorkspaceBuild(workspaceId, { + transition: "start", + template_version_id: templateVersionId, + log_level: logLevel, + rich_parameter_values: buildParameters, + }); + }; -export const getDeploymentDAUs = async ( - // Default to user's local timezone. - // As /api/v2/insights/daus only accepts whole-number values for tz_offset - // we truncate the tz offset down to the closest hour. - offset = Math.trunc(new Date().getTimezoneOffset() / 60), -): Promise => { - const response = await axiosInstance.get( - `/api/v2/insights/daus?tz_offset=${offset}`, - ); - return response.data; -}; + stopWorkspace = ( + workspaceId: string, + logLevel?: TypesGen.ProvisionerLogLevel, + ) => { + return this.postWorkspaceBuild(workspaceId, { + transition: "stop", + log_level: logLevel, + }); + }; -export const getTemplateACLAvailable = async ( - templateId: string, - options: TypesGen.UsersRequest, -): Promise => { - const url = getURLWithSearchParams( - `/api/v2/templates/${templateId}/acl/available`, - options, - ); - const response = await axiosInstance.get(url.toString()); - return response.data; -}; + deleteWorkspace = (workspaceId: string, options?: DeleteWorkspaceOptions) => { + return this.postWorkspaceBuild(workspaceId, { + transition: "delete", + ...options, + }); + }; -export const getTemplateACL = async ( - templateId: string, -): Promise => { - const response = await axiosInstance.get( - `/api/v2/templates/${templateId}/acl`, - ); - return response.data; -}; + cancelWorkspaceBuild = async ( + workspaceBuildId: TypesGen.WorkspaceBuild["id"], + ): Promise => { + const response = await this.axios.patch( + `/api/v2/workspacebuilds/${workspaceBuildId}/cancel`, + ); -export const updateTemplateACL = async ( - templateId: string, - data: TypesGen.UpdateTemplateACL, -): Promise<{ message: string }> => { - const response = await axiosInstance.patch( - `/api/v2/templates/${templateId}/acl`, - data, - ); - return response.data; -}; + return response.data; + }; + + updateWorkspaceDormancy = async ( + workspaceId: string, + dormant: boolean, + ): Promise => { + const data: TypesGen.UpdateWorkspaceDormancy = { dormant }; + const response = await this.axios.put( + `/api/v2/workspaces/${workspaceId}/dormant`, + data, + ); -export const getApplicationsHost = - async (): Promise => { - const response = await axiosInstance.get(`/api/v2/applications/host`); return response.data; }; -export const getGroups = async ( - organizationId: string, -): Promise => { - const response = await axiosInstance.get( - `/api/v2/organizations/${organizationId}/groups`, - ); - return response.data; -}; + updateWorkspaceAutomaticUpdates = async ( + workspaceId: string, + automaticUpdates: TypesGen.AutomaticUpdates, + ): Promise => { + const req: TypesGen.UpdateWorkspaceAutomaticUpdatesRequest = { + automatic_updates: automaticUpdates, + }; -export const createGroup = async ( - organizationId: string, - data: TypesGen.CreateGroupRequest, -): Promise => { - const response = await axiosInstance.post( - `/api/v2/organizations/${organizationId}/groups`, - data, - ); - return response.data; -}; + const response = await this.axios.put( + `/api/v2/workspaces/${workspaceId}/autoupdates`, + req, + ); -export const getGroup = async (groupId: string): Promise => { - const response = await axiosInstance.get(`/api/v2/groups/${groupId}`); - return response.data; -}; + return response.data; + }; -export const patchGroup = async ( - groupId: string, - data: TypesGen.PatchGroupRequest, -): Promise => { - const response = await axiosInstance.patch(`/api/v2/groups/${groupId}`, data); - return response.data; -}; + restartWorkspace = async ({ + workspace, + buildParameters, + }: RestartWorkspaceParameters): Promise => { + const stopBuild = await this.stopWorkspace(workspace.id); + const awaitedStopBuild = await this.waitForBuild(stopBuild); -export const addMember = async (groupId: string, userId: string) => { - return patchGroup(groupId, { - name: "", - add_users: [userId], - remove_users: [], - }); -}; + // If the restart is canceled halfway through, make sure we bail + if (awaitedStopBuild?.status === "canceled") { + return; + } -export const removeMember = async (groupId: string, userId: string) => { - return patchGroup(groupId, { - name: "", - display_name: "", - add_users: [], - remove_users: [userId], - }); -}; + const startBuild = await this.startWorkspace( + workspace.id, + workspace.latest_build.template_version_id, + undefined, + buildParameters, + ); -export const deleteGroup = async (groupId: string): Promise => { - await axiosInstance.delete(`/api/v2/groups/${groupId}`); -}; + await this.waitForBuild(startBuild); + }; -export const getWorkspaceQuota = async ( - username: string, -): Promise => { - const response = await axiosInstance.get( - `/api/v2/workspace-quota/${encodeURIComponent(username)}`, - ); - return response.data; -}; + cancelTemplateVersionBuild = async ( + templateVersionId: TypesGen.TemplateVersion["id"], + ): Promise => { + const response = await this.axios.patch( + `/api/v2/templateversions/${templateVersionId}/cancel`, + ); -export const getAgentListeningPorts = async ( - agentID: string, -): Promise => { - const response = await axiosInstance.get( - `/api/v2/workspaceagents/${agentID}/listening-ports`, - ); - return response.data; -}; + return response.data; + }; -export const getWorkspaceAgentSharedPorts = async ( - workspaceID: string, -): Promise => { - const response = await axiosInstance.get( - `/api/v2/workspaces/${workspaceID}/port-share`, - ); - return response.data; -}; + createUser = async ( + user: TypesGen.CreateUserRequest, + ): Promise => { + const response = await this.axios.post( + "/api/v2/users", + user, + ); -export const upsertWorkspaceAgentSharedPort = async ( - workspaceID: string, - req: TypesGen.UpsertWorkspaceAgentPortShareRequest, -): Promise => { - const response = await axiosInstance.post( - `/api/v2/workspaces/${workspaceID}/port-share`, - req, - ); - return response.data; -}; + return response.data; + }; -export const deleteWorkspaceAgentSharedPort = async ( - workspaceID: string, - req: TypesGen.DeleteWorkspaceAgentPortShareRequest, -): Promise => { - const response = await axiosInstance.delete( - `/api/v2/workspaces/${workspaceID}/port-share`, - { - data: req, - }, - ); - return response.data; -}; + createWorkspace = async ( + organizationId: string, + userId = "me", + workspace: TypesGen.CreateWorkspaceRequest, + ): Promise => { + const response = await this.axios.post( + `/api/v2/organizations/${organizationId}/members/${userId}/workspaces`, + workspace, + ); -// getDeploymentSSHConfig is used by the VSCode-Extension. -export const getDeploymentSSHConfig = - async (): Promise => { - const response = await axiosInstance.get(`/api/v2/deployment/ssh`); return response.data; }; -export type DeploymentConfig = { - readonly config: TypesGen.DeploymentValues; - readonly options: TypesGen.SerpentOption[]; -}; + patchWorkspace = async ( + workspaceId: string, + data: TypesGen.UpdateWorkspaceRequest, + ): Promise => { + await this.axios.patch(`/api/v2/workspaces/${workspaceId}`, data); + }; -export const getDeploymentConfig = async (): Promise => { - const response = await axiosInstance.get(`/api/v2/deployment/config`); - return response.data; -}; + getBuildInfo = async (): Promise => { + const response = await this.axios.get("/api/v2/buildinfo"); + return response.data; + }; -export const getDeploymentStats = - async (): Promise => { - const response = await axiosInstance.get(`/api/v2/deployment/stats`); + getUpdateCheck = async (): Promise => { + const response = await this.axios.get("/api/v2/updatecheck"); return response.data; }; -export const getReplicas = async (): Promise => { - const response = await axiosInstance.get(`/api/v2/replicas`); - return response.data; -}; + putWorkspaceAutostart = async ( + workspaceID: string, + autostart: TypesGen.UpdateWorkspaceAutostartRequest, + ): Promise => { + const payload = JSON.stringify(autostart); + await this.axios.put( + `/api/v2/workspaces/${workspaceID}/autostart`, + payload, + { headers: { ...BASE_CONTENT_TYPE_JSON } }, + ); + }; -export const getFile = async (fileId: string): Promise => { - const response = await axiosInstance.get( - `/api/v2/files/${fileId}`, - { - responseType: "arraybuffer", - }, - ); - return response.data; -}; + putWorkspaceAutostop = async ( + workspaceID: string, + ttl: TypesGen.UpdateWorkspaceTTLRequest, + ): Promise => { + const payload = JSON.stringify(ttl); + await this.axios.put(`/api/v2/workspaces/${workspaceID}/ttl`, payload, { + headers: { ...BASE_CONTENT_TYPE_JSON }, + }); + }; -export const getWorkspaceProxyRegions = async (): Promise< - TypesGen.RegionsResponse -> => { - const response = - await axiosInstance.get>( - `/api/v2/regions`, + updateProfile = async ( + userId: string, + data: TypesGen.UpdateUserProfileRequest, + ): Promise => { + const response = await this.axios.put( + `/api/v2/users/${userId}/profile`, + data, ); - return response.data; -}; + return response.data; + }; -export const getWorkspaceProxies = async (): Promise< - TypesGen.RegionsResponse -> => { - const response = await axiosInstance.get< - TypesGen.RegionsResponse - >(`/api/v2/workspaceproxies`); - return response.data; -}; + updateAppearanceSettings = async ( + userId: string, + data: TypesGen.UpdateUserAppearanceSettingsRequest, + ): Promise => { + const response = await this.axios.put( + `/api/v2/users/${userId}/appearance`, + data, + ); + return response.data; + }; -export const createWorkspaceProxy = async ( - b: TypesGen.CreateWorkspaceProxyRequest, -): Promise => { - const response = await axiosInstance.post(`/api/v2/workspaceproxies`, b); - return response.data; -}; + getUserQuietHoursSchedule = async ( + userId: TypesGen.User["id"], + ): Promise => { + const response = await this.axios.get( + `/api/v2/users/${userId}/quiet-hours`, + ); + return response.data; + }; -export const getAppearance = async (): Promise => { - try { - const response = await axiosInstance.get(`/api/v2/appearance`); - return response.data || {}; - } catch (ex) { - if (isAxiosError(ex) && ex.response?.status === 404) { - return { - application_name: "", - logo_url: "", - service_banner: { - enabled: false, - }, - notification_banners: [], - }; - } - throw ex; - } -}; + updateUserQuietHoursSchedule = async ( + userId: TypesGen.User["id"], + data: TypesGen.UpdateUserQuietHoursScheduleRequest, + ): Promise => { + const response = await this.axios.put( + `/api/v2/users/${userId}/quiet-hours`, + data, + ); -export const updateAppearance = async ( - b: TypesGen.AppearanceConfig, -): Promise => { - const response = await axiosInstance.put(`/api/v2/appearance`, b); - return response.data; -}; + return response.data; + }; -export const getTemplateExamples = async ( - organizationId: string, -): Promise => { - const response = await axiosInstance.get( - `/api/v2/organizations/${organizationId}/templates/examples`, - ); - return response.data; -}; + activateUser = async ( + userId: TypesGen.User["id"], + ): Promise => { + const response = await this.axios.put( + `/api/v2/users/${userId}/status/activate`, + ); + return response.data; + }; -export const uploadFile = async ( - file: File, -): Promise => { - const response = await axiosInstance.post("/api/v2/files", file, { - headers: { - "Content-Type": "application/x-tar", - }, - }); - return response.data; -}; + suspendUser = async (userId: TypesGen.User["id"]): Promise => { + const response = await this.axios.put( + `/api/v2/users/${userId}/status/suspend`, + ); -export const getTemplateVersionLogs = async ( - versionId: string, -): Promise => { - const response = await axiosInstance.get( - `/api/v2/templateversions/${versionId}/logs`, - ); - return response.data; -}; + return response.data; + }; -export const updateWorkspaceVersion = async ( - workspace: TypesGen.Workspace, -): Promise => { - const template = await getTemplate(workspace.template_id); - return startWorkspace(workspace.id, template.active_version_id); -}; + deleteUser = async (userId: TypesGen.User["id"]): Promise => { + await this.axios.delete(`/api/v2/users/${userId}`); + }; -export const getWorkspaceBuildParameters = async ( - workspaceBuildId: TypesGen.WorkspaceBuild["id"], -): Promise => { - const response = await axiosInstance.get( - `/api/v2/workspacebuilds/${workspaceBuildId}/parameters`, - ); - return response.data; -}; -type Claims = { - license_expires: number; - account_type?: string; - account_id?: string; - trial: boolean; - all_features: boolean; - version: number; - features: Record; - require_telemetry?: boolean; -}; + // API definition: + // https://github.com/coder/coder/blob/db665e7261f3c24a272ccec48233a3e276878239/coderd/users.go#L33-L53 + hasFirstUser = async (): Promise => { + try { + // If it is success, it is true + await this.axios.get("/api/v2/users/first"); + return true; + } catch (error) { + // If it returns a 404, it is false + if (isAxiosError(error) && error.response?.status === 404) { + return false; + } -export type GetLicensesResponse = Omit & { - claims: Claims; - expires_at: string; -}; + throw error; + } + }; -export const getLicenses = async (): Promise => { - const response = await axiosInstance.get(`/api/v2/licenses`); - return response.data; -}; + createFirstUser = async ( + req: TypesGen.CreateFirstUserRequest, + ): Promise => { + const response = await this.axios.post(`/api/v2/users/first`, req); + return response.data; + }; -export const createLicense = async ( - data: TypesGen.AddLicenseRequest, -): Promise => { - const response = await axiosInstance.post(`/api/v2/licenses`, data); - return response.data; -}; + updateUserPassword = async ( + userId: TypesGen.User["id"], + updatePassword: TypesGen.UpdateUserPasswordRequest, + ): Promise => { + await this.axios.put(`/api/v2/users/${userId}/password`, updatePassword); + }; -export const removeLicense = async (licenseId: number): Promise => { - await axiosInstance.delete(`/api/v2/licenses/${licenseId}`); -}; + getRoles = async (): Promise> => { + const response = + await this.axios.get(`/api/v2/users/roles`); -export class MissingBuildParameters extends Error { - parameters: TypesGen.TemplateVersionParameter[] = []; - versionId: string; + return response.data; + }; - constructor( - parameters: TypesGen.TemplateVersionParameter[], - versionId: string, - ) { - super("Missing build parameters."); - this.parameters = parameters; - this.versionId = versionId; - } -} + updateUserRoles = async ( + roles: TypesGen.Role["name"][], + userId: TypesGen.User["id"], + ): Promise => { + const response = await this.axios.put( + `/api/v2/users/${userId}/roles`, + { roles }, + ); -/** Steps to change the workspace version - * - Get the latest template to access the latest active version - * - Get the current build parameters - * - Get the template parameters - * - Update the build parameters and check if there are missed parameters for the new version - * - If there are missing parameters raise an error - * - Create a build with the version and updated build parameters - */ -export const changeWorkspaceVersion = async ( - workspace: TypesGen.Workspace, - templateVersionId: string, - newBuildParameters: TypesGen.WorkspaceBuildParameter[] = [], -): Promise => { - const [currentBuildParameters, templateParameters] = await Promise.all([ - getWorkspaceBuildParameters(workspace.latest_build.id), - getTemplateVersionRichParameters(templateVersionId), - ]); - - const missingParameters = getMissingParameters( - currentBuildParameters, - newBuildParameters, - templateParameters, - ); + return response.data; + }; - if (missingParameters.length > 0) { - throw new MissingBuildParameters(missingParameters, templateVersionId); - } + getUserSSHKey = async (userId = "me"): Promise => { + const response = await this.axios.get( + `/api/v2/users/${userId}/gitsshkey`, + ); - return postWorkspaceBuild(workspace.id, { - transition: "start", - template_version_id: templateVersionId, - rich_parameter_values: newBuildParameters, - }); -}; + return response.data; + }; -/** Steps to update the workspace - * - Get the latest template to access the latest active version - * - Get the current build parameters - * - Get the template parameters - * - Update the build parameters and check if there are missed parameters for - * the newest version - * - If there are missing parameters raise an error - * - Create a build with the latest version and updated build parameters - */ -export const updateWorkspace = async ( - workspace: TypesGen.Workspace, - newBuildParameters: TypesGen.WorkspaceBuildParameter[] = [], -): Promise => { - const [template, oldBuildParameters] = await Promise.all([ - getTemplate(workspace.template_id), - getWorkspaceBuildParameters(workspace.latest_build.id), - ]); - const activeVersionId = template.active_version_id; - const templateParameters = - await getTemplateVersionRichParameters(activeVersionId); - const missingParameters = getMissingParameters( - oldBuildParameters, - newBuildParameters, - templateParameters, - ); + regenerateUserSSHKey = async (userId = "me"): Promise => { + const response = await this.axios.put( + `/api/v2/users/${userId}/gitsshkey`, + ); - if (missingParameters.length > 0) { - throw new MissingBuildParameters(missingParameters, activeVersionId); - } + return response.data; + }; - return postWorkspaceBuild(workspace.id, { - transition: "start", - template_version_id: activeVersionId, - rich_parameter_values: newBuildParameters, - }); -}; - -export const getWorkspaceResolveAutostart = async ( - workspaceId: string, -): Promise => { - const response = await axiosInstance.get( - `/api/v2/workspaces/${workspaceId}/resolve-autostart`, - ); - return response.data; -}; + getWorkspaceBuilds = async ( + workspaceId: string, + req?: TypesGen.WorkspaceBuildsRequest, + ) => { + const response = await this.axios.get( + getURLWithSearchParams(`/api/v2/workspaces/${workspaceId}/builds`, req), + ); -const getMissingParameters = ( - oldBuildParameters: TypesGen.WorkspaceBuildParameter[], - newBuildParameters: TypesGen.WorkspaceBuildParameter[], - templateParameters: TypesGen.TemplateVersionParameter[], -) => { - const missingParameters: TypesGen.TemplateVersionParameter[] = []; - const requiredParameters: TypesGen.TemplateVersionParameter[] = []; + return response.data; + }; - templateParameters.forEach((p) => { - // It is mutable and required. Mutable values can be changed after so we - // don't need to ask them if they are not required. - const isMutableAndRequired = p.mutable && p.required; - // Is immutable, so we can check if it is its first time on the build - const isImmutable = !p.mutable; + getWorkspaceBuildLogs = async ( + buildId: string, + before: Date, + ): Promise => { + const response = await this.axios.get( + `/api/v2/workspacebuilds/${buildId}/logs?before=${before.getTime()}`, + ); - if (isMutableAndRequired || isImmutable) { - requiredParameters.push(p); - } - }); + return response.data; + }; - for (const parameter of requiredParameters) { - // Check if there is a new value - let buildParameter = newBuildParameters.find( - (p) => p.name === parameter.name, + getWorkspaceAgentLogs = async ( + agentID: string, + ): Promise => { + const response = await this.axios.get( + `/api/v2/workspaceagents/${agentID}/logs`, ); - // If not, get the old one - if (!buildParameter) { - buildParameter = oldBuildParameters.find( - (p) => p.name === parameter.name, + return response.data; + }; + + putWorkspaceExtension = async ( + workspaceId: string, + newDeadline: dayjs.Dayjs, + ): Promise => { + await this.axios.put(`/api/v2/workspaces/${workspaceId}/extend`, { + deadline: newDeadline, + }); + }; + + refreshEntitlements = async (): Promise => { + await this.axios.post("/api/v2/licenses/refresh-entitlements"); + }; + + getEntitlements = async (): Promise => { + try { + const response = await this.axios.get( + "/api/v2/entitlements", ); - } - // If there is a value from the new or old one, it is not missed - if (buildParameter) { - continue; + return response.data; + } catch (ex) { + if (isAxiosError(ex) && ex.response?.status === 404) { + return { + errors: [], + features: withDefaultFeatures({}), + has_license: false, + require_telemetry: false, + trial: false, + warnings: [], + refreshed_at: "", + }; + } + throw ex; } + }; - missingParameters.push(parameter); - } + getExperiments = async (): Promise => { + try { + const response = await this.axios.get( + "/api/v2/experiments", + ); - // Check if parameter "options" changed and we can't use old build parameters. - templateParameters.forEach((templateParameter) => { - if (templateParameter.options.length === 0) { - return; + return response.data; + } catch (error) { + if (isAxiosError(error) && error.response?.status === 404) { + return []; + } + + throw error; } + }; - // Check if there is a new value - let buildParameter = newBuildParameters.find( - (p) => p.name === templateParameter.name, + getAvailableExperiments = + async (): Promise => { + try { + const response = await this.axios.get("/api/v2/experiments/available"); + + return response.data; + } catch (error) { + if (isAxiosError(error) && error.response?.status === 404) { + return { safe: [] }; + } + throw error; + } + }; + + getExternalAuthProvider = async ( + provider: string, + ): Promise => { + const res = await this.axios.get(`/api/v2/external-auth/${provider}`); + return res.data; + }; + + getExternalAuthDevice = async ( + provider: string, + ): Promise => { + const resp = await this.axios.get( + `/api/v2/external-auth/${provider}/device`, ); + return resp.data; + }; - // If not, get the old one - if (!buildParameter) { - buildParameter = oldBuildParameters.find( - (p) => p.name === templateParameter.name, + exchangeExternalAuthDevice = async ( + provider: string, + req: TypesGen.ExternalAuthDeviceExchange, + ): Promise => { + const resp = await this.axios.post( + `/api/v2/external-auth/${provider}/device`, + req, + ); + + return resp.data; + }; + + getUserExternalAuthProviders = + async (): Promise => { + const resp = await this.axios.get(`/api/v2/external-auth`); + return resp.data; + }; + + unlinkExternalAuthProvider = async (provider: string): Promise => { + const resp = await this.axios.delete(`/api/v2/external-auth/${provider}`); + return resp.data; + }; + + getOAuth2ProviderApps = async ( + filter?: TypesGen.OAuth2ProviderAppFilter, + ): Promise => { + const params = filter?.user_id + ? new URLSearchParams({ user_id: filter.user_id }).toString() + : ""; + + const resp = await this.axios.get(`/api/v2/oauth2-provider/apps?${params}`); + return resp.data; + }; + + getOAuth2ProviderApp = async ( + id: string, + ): Promise => { + const resp = await this.axios.get(`/api/v2/oauth2-provider/apps/${id}`); + return resp.data; + }; + + postOAuth2ProviderApp = async ( + data: TypesGen.PostOAuth2ProviderAppRequest, + ): Promise => { + const response = await this.axios.post( + `/api/v2/oauth2-provider/apps`, + data, + ); + return response.data; + }; + + putOAuth2ProviderApp = async ( + id: string, + data: TypesGen.PutOAuth2ProviderAppRequest, + ): Promise => { + const response = await this.axios.put( + `/api/v2/oauth2-provider/apps/${id}`, + data, + ); + return response.data; + }; + + deleteOAuth2ProviderApp = async (id: string): Promise => { + await this.axios.delete(`/api/v2/oauth2-provider/apps/${id}`); + }; + + getOAuth2ProviderAppSecrets = async ( + id: string, + ): Promise => { + const resp = await this.axios.get( + `/api/v2/oauth2-provider/apps/${id}/secrets`, + ); + return resp.data; + }; + + postOAuth2ProviderAppSecret = async ( + id: string, + ): Promise => { + const resp = await this.axios.post( + `/api/v2/oauth2-provider/apps/${id}/secrets`, + ); + return resp.data; + }; + + deleteOAuth2ProviderAppSecret = async ( + appId: string, + secretId: string, + ): Promise => { + await this.axios.delete( + `/api/v2/oauth2-provider/apps/${appId}/secrets/${secretId}`, + ); + }; + + revokeOAuth2ProviderApp = async (appId: string): Promise => { + await this.axios.delete(`/oauth2/tokens?client_id=${appId}`); + }; + + getAuditLogs = async ( + options: TypesGen.AuditLogsRequest, + ): Promise => { + const url = getURLWithSearchParams("/api/v2/audit", options); + const response = await this.axios.get(url); + return response.data; + }; + + getTemplateDAUs = async ( + templateId: string, + ): Promise => { + const response = await this.axios.get( + `/api/v2/templates/${templateId}/daus`, + ); + + return response.data; + }; + + getDeploymentDAUs = async ( + // Default to user's local timezone. + // As /api/v2/insights/daus only accepts whole-number values for tz_offset + // we truncate the tz offset down to the closest hour. + offset = Math.trunc(new Date().getTimezoneOffset() / 60), + ): Promise => { + const response = await this.axios.get( + `/api/v2/insights/daus?tz_offset=${offset}`, + ); + + return response.data; + }; + + getTemplateACLAvailable = async ( + templateId: string, + options: TypesGen.UsersRequest, + ): Promise => { + const url = getURLWithSearchParams( + `/api/v2/templates/${templateId}/acl/available`, + options, + ).toString(); + + const response = await this.axios.get(url); + return response.data; + }; + + getTemplateACL = async ( + templateId: string, + ): Promise => { + const response = await this.axios.get( + `/api/v2/templates/${templateId}/acl`, + ); + + return response.data; + }; + + updateTemplateACL = async ( + templateId: string, + data: TypesGen.UpdateTemplateACL, + ): Promise<{ message: string }> => { + const response = await this.axios.patch( + `/api/v2/templates/${templateId}/acl`, + data, + ); + + return response.data; + }; + + getApplicationsHost = async (): Promise => { + const response = await this.axios.get(`/api/v2/applications/host`); + return response.data; + }; + + getGroups = async (organizationId: string): Promise => { + const response = await this.axios.get( + `/api/v2/organizations/${organizationId}/groups`, + ); + + return response.data; + }; + + createGroup = async ( + organizationId: string, + data: TypesGen.CreateGroupRequest, + ): Promise => { + const response = await this.axios.post( + `/api/v2/organizations/${organizationId}/groups`, + data, + ); + return response.data; + }; + + getGroup = async (groupId: string): Promise => { + const response = await this.axios.get(`/api/v2/groups/${groupId}`); + return response.data; + }; + + patchGroup = async ( + groupId: string, + data: TypesGen.PatchGroupRequest, + ): Promise => { + const response = await this.axios.patch(`/api/v2/groups/${groupId}`, data); + return response.data; + }; + + addMember = async (groupId: string, userId: string) => { + return this.patchGroup(groupId, { + name: "", + add_users: [userId], + remove_users: [], + }); + }; + + removeMember = async (groupId: string, userId: string) => { + return this.patchGroup(groupId, { + name: "", + display_name: "", + add_users: [], + remove_users: [userId], + }); + }; + + deleteGroup = async (groupId: string): Promise => { + await this.axios.delete(`/api/v2/groups/${groupId}`); + }; + + getWorkspaceQuota = async ( + username: string, + ): Promise => { + const response = await this.axios.get( + `/api/v2/workspace-quota/${encodeURIComponent(username)}`, + ); + return response.data; + }; + + getAgentListeningPorts = async ( + agentID: string, + ): Promise => { + const response = await this.axios.get( + `/api/v2/workspaceagents/${agentID}/listening-ports`, + ); + return response.data; + }; + + getWorkspaceAgentSharedPorts = async ( + workspaceID: string, + ): Promise => { + const response = await this.axios.get( + `/api/v2/workspaces/${workspaceID}/port-share`, + ); + return response.data; + }; + + upsertWorkspaceAgentSharedPort = async ( + workspaceID: string, + req: TypesGen.UpsertWorkspaceAgentPortShareRequest, + ): Promise => { + const response = await this.axios.post( + `/api/v2/workspaces/${workspaceID}/port-share`, + req, + ); + return response.data; + }; + + deleteWorkspaceAgentSharedPort = async ( + workspaceID: string, + req: TypesGen.DeleteWorkspaceAgentPortShareRequest, + ): Promise => { + const response = await this.axios.delete( + `/api/v2/workspaces/${workspaceID}/port-share`, + { data: req }, + ); + + return response.data; + }; + + // getDeploymentSSHConfig is used by the VSCode-Extension. + getDeploymentSSHConfig = async (): Promise => { + const response = await this.axios.get(`/api/v2/deployment/ssh`); + return response.data; + }; + + getDeploymentConfig = async (): Promise => { + const response = await this.axios.get(`/api/v2/deployment/config`); + return response.data; + }; + + getDeploymentStats = async (): Promise => { + const response = await this.axios.get(`/api/v2/deployment/stats`); + return response.data; + }; + + getReplicas = async (): Promise => { + const response = await this.axios.get(`/api/v2/replicas`); + return response.data; + }; + + getFile = async (fileId: string): Promise => { + const response = await this.axios.get( + `/api/v2/files/${fileId}`, + { responseType: "arraybuffer" }, + ); + + return response.data; + }; + + getWorkspaceProxyRegions = async (): Promise< + TypesGen.RegionsResponse + > => { + const response = + await this.axios.get>( + `/api/v2/regions`, ); - } - if (!buildParameter) { - return; + return response.data; + }; + + getWorkspaceProxies = async (): Promise< + TypesGen.RegionsResponse + > => { + const response = await this.axios.get< + TypesGen.RegionsResponse + >(`/api/v2/workspaceproxies`); + + return response.data; + }; + + createWorkspaceProxy = async ( + b: TypesGen.CreateWorkspaceProxyRequest, + ): Promise => { + const response = await this.axios.post(`/api/v2/workspaceproxies`, b); + return response.data; + }; + + getAppearance = async (): Promise => { + try { + const response = await this.axios.get(`/api/v2/appearance`); + return response.data || {}; + } catch (ex) { + if (isAxiosError(ex) && ex.response?.status === 404) { + return { + application_name: "", + logo_url: "", + notification_banners: [], + service_banner: { + enabled: false, + }, + }; + } + + throw ex; } + }; - const matchingOption = templateParameter.options.find( - (option) => option.value === buildParameter?.value, + updateAppearance = async ( + b: TypesGen.AppearanceConfig, + ): Promise => { + const response = await this.axios.put(`/api/v2/appearance`, b); + return response.data; + }; + + getTemplateExamples = async ( + organizationId: string, + ): Promise => { + const response = await this.axios.get( + `/api/v2/organizations/${organizationId}/templates/examples`, ); - if (!matchingOption) { - missingParameters.push(templateParameter); - } - }); - return missingParameters; -}; -/** - * - * @param agentId - * @returns An EventSource that emits agent metadata event objects - * (ServerSentEvent) - */ -export const watchAgentMetadata = (agentId: string): EventSource => { - return new EventSource( - `${location.protocol}//${location.host}/api/v2/workspaceagents/${agentId}/watch-metadata`, - { withCredentials: true }, - ); -}; + return response.data; + }; -type WatchBuildLogsByTemplateVersionIdOptions = { - after?: number; - onMessage: (log: TypesGen.ProvisionerJobLog) => void; - onDone?: () => void; - onError: (error: Error) => void; -}; -export const watchBuildLogsByTemplateVersionId = ( - versionId: string, - { - onMessage, - onDone, - onError, - after, - }: WatchBuildLogsByTemplateVersionIdOptions, -) => { - const searchParams = new URLSearchParams({ follow: "true" }); - if (after !== undefined) { - searchParams.append("after", after.toString()); - } - const proto = location.protocol === "https:" ? "wss:" : "ws:"; - const socket = new WebSocket( - `${proto}//${ - location.host - }/api/v2/templateversions/${versionId}/logs?${searchParams.toString()}`, - ); - socket.binaryType = "blob"; - socket.addEventListener("message", (event) => - onMessage(JSON.parse(event.data) as TypesGen.ProvisionerJobLog), - ); - socket.addEventListener("error", () => { - onError(new Error("Connection for logs failed.")); - socket.close(); - }); - socket.addEventListener("close", () => { - // When the socket closes, logs have finished streaming! - onDone?.(); - }); - return socket; -}; + uploadFile = async (file: File): Promise => { + const response = await this.axios.post("/api/v2/files", file, { + headers: { "Content-Type": "application/x-tar" }, + }); -type WatchWorkspaceAgentLogsOptions = { - after: number; - onMessage: (logs: TypesGen.WorkspaceAgentLog[]) => void; - onDone?: () => void; - onError: (error: Error) => void; -}; + return response.data; + }; -export const watchWorkspaceAgentLogs = ( - agentId: string, - { after, onMessage, onDone, onError }: WatchWorkspaceAgentLogsOptions, -) => { - // WebSocket compression in Safari (confirmed in 16.5) is broken when - // the server sends large messages. The following error is seen: - // - // WebSocket connection to 'wss://.../logs?follow&after=0' failed: The operation couldn’t be completed. Protocol error - // - const noCompression = - userAgentParser(navigator.userAgent).browser.name === "Safari" - ? "&no_compression" - : ""; + getTemplateVersionLogs = async ( + versionId: string, + ): Promise => { + const response = await this.axios.get( + `/api/v2/templateversions/${versionId}/logs`, + ); + return response.data; + }; - const proto = location.protocol === "https:" ? "wss:" : "ws:"; - const socket = new WebSocket( - `${proto}//${location.host}/api/v2/workspaceagents/${agentId}/logs?follow&after=${after}${noCompression}`, - ); - socket.binaryType = "blob"; - socket.addEventListener("message", (event) => { - const logs = JSON.parse(event.data) as TypesGen.WorkspaceAgentLog[]; - onMessage(logs); - }); - socket.addEventListener("error", () => { - onError(new Error("socket errored")); - }); - socket.addEventListener("close", () => { - onDone && onDone(); - }); + updateWorkspaceVersion = async ( + workspace: TypesGen.Workspace, + ): Promise => { + const template = await this.getTemplate(workspace.template_id); + return this.startWorkspace(workspace.id, template.active_version_id); + }; - return socket; -}; + getWorkspaceBuildParameters = async ( + workspaceBuildId: TypesGen.WorkspaceBuild["id"], + ): Promise => { + const response = await this.axios.get( + `/api/v2/workspacebuilds/${workspaceBuildId}/parameters`, + ); -type WatchBuildLogsByBuildIdOptions = { - after?: number; - onMessage: (log: TypesGen.ProvisionerJobLog) => void; - onDone?: () => void; - onError?: (error: Error) => void; -}; -export const watchBuildLogsByBuildId = ( - buildId: string, - { onMessage, onDone, onError, after }: WatchBuildLogsByBuildIdOptions, -) => { - const searchParams = new URLSearchParams({ follow: "true" }); - if (after !== undefined) { - searchParams.append("after", after.toString()); - } - const proto = location.protocol === "https:" ? "wss:" : "ws:"; - const socket = new WebSocket( - `${proto}//${ - location.host - }/api/v2/workspacebuilds/${buildId}/logs?${searchParams.toString()}`, - ); - socket.binaryType = "blob"; - socket.addEventListener("message", (event) => - onMessage(JSON.parse(event.data) as TypesGen.ProvisionerJobLog), - ); - socket.addEventListener("error", () => { - onError && onError(new Error("Connection for logs failed.")); - socket.close(); - }); - socket.addEventListener("close", () => { - // When the socket closes, logs have finished streaming! - onDone && onDone(); - }); - return socket; -}; + return response.data; + }; -export const issueReconnectingPTYSignedToken = async ( - params: TypesGen.IssueReconnectingPTYSignedTokenRequest, -): Promise => { - const response = await axiosInstance.post( - "/api/v2/applications/reconnecting-pty-signed-token", - params, - ); - return response.data; -}; + getLicenses = async (): Promise => { + const response = await this.axios.get(`/api/v2/licenses`); + return response.data; + }; -export const getWorkspaceParameters = async (workspace: TypesGen.Workspace) => { - const latestBuild = workspace.latest_build; - const [templateVersionRichParameters, buildParameters] = await Promise.all([ - getTemplateVersionRichParameters(latestBuild.template_version_id), - getWorkspaceBuildParameters(latestBuild.id), - ]); - return { - templateVersionRichParameters, - buildParameters, + createLicense = async ( + data: TypesGen.AddLicenseRequest, + ): Promise => { + const response = await this.axios.post(`/api/v2/licenses`, data); + return response.data; }; -}; -export type InsightsParams = { - start_time: string; - end_time: string; - template_ids: string; -}; + removeLicense = async (licenseId: number): Promise => { + await this.axios.delete(`/api/v2/licenses/${licenseId}`); + }; -export const getInsightsUserLatency = async ( - filters: InsightsParams, -): Promise => { - const params = new URLSearchParams(filters); - const response = await axiosInstance.get( - `/api/v2/insights/user-latency?${params}`, - ); - return response.data; -}; + /** Steps to change the workspace version + * - Get the latest template to access the latest active version + * - Get the current build parameters + * - Get the template parameters + * - Update the build parameters and check if there are missed parameters for + * the new version + * - If there are missing parameters raise an error + * - Create a build with the version and updated build parameters + */ + changeWorkspaceVersion = async ( + workspace: TypesGen.Workspace, + templateVersionId: string, + newBuildParameters: TypesGen.WorkspaceBuildParameter[] = [], + ): Promise => { + const [currentBuildParameters, templateParameters] = await Promise.all([ + this.getWorkspaceBuildParameters(workspace.latest_build.id), + this.getTemplateVersionRichParameters(templateVersionId), + ]); + + const missingParameters = getMissingParameters( + currentBuildParameters, + newBuildParameters, + templateParameters, + ); -export const getInsightsUserActivity = async ( - filters: InsightsParams, -): Promise => { - const params = new URLSearchParams(filters); - const response = await axiosInstance.get( - `/api/v2/insights/user-activity?${params}`, - ); - return response.data; -}; + if (missingParameters.length > 0) { + throw new MissingBuildParameters(missingParameters, templateVersionId); + } -export type InsightsTemplateParams = InsightsParams & { - interval: "day" | "week"; -}; + return this.postWorkspaceBuild(workspace.id, { + transition: "start", + template_version_id: templateVersionId, + rich_parameter_values: newBuildParameters, + }); + }; -export const getInsightsTemplate = async ( - params: InsightsTemplateParams, -): Promise => { - const searchParams = new URLSearchParams(params); - const response = await axiosInstance.get( - `/api/v2/insights/templates?${searchParams}`, - ); - return response.data; -}; + /** Steps to update the workspace + * - Get the latest template to access the latest active version + * - Get the current build parameters + * - Get the template parameters + * - Update the build parameters and check if there are missed parameters for + * the newest version + * - If there are missing parameters raise an error + * - Create a build with the latest version and updated build parameters + */ + updateWorkspace = async ( + workspace: TypesGen.Workspace, + newBuildParameters: TypesGen.WorkspaceBuildParameter[] = [], + ): Promise => { + const [template, oldBuildParameters] = await Promise.all([ + this.getTemplate(workspace.template_id), + this.getWorkspaceBuildParameters(workspace.latest_build.id), + ]); + + const activeVersionId = template.active_version_id; + const templateParameters = + await this.getTemplateVersionRichParameters(activeVersionId); + + const missingParameters = getMissingParameters( + oldBuildParameters, + newBuildParameters, + templateParameters, + ); -export const getHealth = async (force: boolean = false) => { - const params = new URLSearchParams({ force: force.toString() }); - const response = await axiosInstance.get( - `/api/v2/debug/health?${params}`, - ); - return response.data; -}; + if (missingParameters.length > 0) { + throw new MissingBuildParameters(missingParameters, activeVersionId); + } -export const getHealthSettings = async () => { - return ( - await axiosInstance.get( - `/api/v2/debug/health/settings`, - ) - ).data; -}; + return this.postWorkspaceBuild(workspace.id, { + transition: "start", + template_version_id: activeVersionId, + rich_parameter_values: newBuildParameters, + }); + }; -export const updateHealthSettings = async ( - data: TypesGen.UpdateHealthSettings, -) => { - const response = await axiosInstance.put( - `/api/v2/debug/health/settings`, - data, - ); - return response.data; -}; + getWorkspaceResolveAutostart = async ( + workspaceId: string, + ): Promise => { + const response = await this.axios.get( + `/api/v2/workspaces/${workspaceId}/resolve-autostart`, + ); + return response.data; + }; -export const putFavoriteWorkspace = async (workspaceID: string) => { - await axiosInstance.put(`/api/v2/workspaces/${workspaceID}/favorite`); -}; + issueReconnectingPTYSignedToken = async ( + params: TypesGen.IssueReconnectingPTYSignedTokenRequest, + ): Promise => { + const response = await this.axios.post( + "/api/v2/applications/reconnecting-pty-signed-token", + params, + ); -export const deleteFavoriteWorkspace = async (workspaceID: string) => { - await axiosInstance.delete(`/api/v2/workspaces/${workspaceID}/favorite`); -}; + return response.data; + }; -export type GetJFrogXRayScanParams = { - workspaceId: string; - agentId: string; -}; + getWorkspaceParameters = async (workspace: TypesGen.Workspace) => { + const latestBuild = workspace.latest_build; + const [templateVersionRichParameters, buildParameters] = await Promise.all([ + this.getTemplateVersionRichParameters(latestBuild.template_version_id), + this.getWorkspaceBuildParameters(latestBuild.id), + ]); -export const getJFrogXRayScan = async (options: GetJFrogXRayScanParams) => { - const searchParams = new URLSearchParams({ - workspace_id: options.workspaceId, - agent_id: options.agentId, - }); + return { + templateVersionRichParameters, + buildParameters, + }; + }; + + getInsightsUserLatency = async ( + filters: InsightsParams, + ): Promise => { + const params = new URLSearchParams(filters); + const response = await this.axios.get( + `/api/v2/insights/user-latency?${params}`, + ); + + return response.data; + }; + + getInsightsUserActivity = async ( + filters: InsightsParams, + ): Promise => { + const params = new URLSearchParams(filters); + const response = await this.axios.get( + `/api/v2/insights/user-activity?${params}`, + ); + + return response.data; + }; + + getInsightsTemplate = async ( + params: InsightsTemplateParams, + ): Promise => { + const searchParams = new URLSearchParams(params); + const response = await this.axios.get( + `/api/v2/insights/templates?${searchParams}`, + ); + + return response.data; + }; - try { - const res = await axiosInstance.get( - `/api/v2/integrations/jfrog/xray-scan?${searchParams}`, + getHealth = async (force: boolean = false) => { + const params = new URLSearchParams({ force: force.toString() }); + const response = await this.axios.get( + `/api/v2/debug/health?${params}`, ); + return response.data; + }; + + getHealthSettings = async (): Promise => { + const res = await this.axios.get( + `/api/v2/debug/health/settings`, + ); + return res.data; - } catch (error) { - if (isAxiosError(error) && error.response?.status === 404) { - // react-query library does not allow undefined to be returned as a query result - return null; + }; + + updateHealthSettings = async (data: TypesGen.UpdateHealthSettings) => { + const response = await this.axios.put( + `/api/v2/debug/health/settings`, + data, + ); + + return response.data; + }; + + putFavoriteWorkspace = async (workspaceID: string) => { + await this.axios.put(`/api/v2/workspaces/${workspaceID}/favorite`); + }; + + deleteFavoriteWorkspace = async (workspaceID: string) => { + await this.axios.delete(`/api/v2/workspaces/${workspaceID}/favorite`); + }; + + getJFrogXRayScan = async (options: GetJFrogXRayScanParams) => { + const searchParams = new URLSearchParams({ + workspace_id: options.workspaceId, + agent_id: options.agentId, + }); + + try { + const res = await this.axios.get( + `/api/v2/integrations/jfrog/xray-scan?${searchParams}`, + ); + + return res.data; + } catch (error) { + if (isAxiosError(error) && error.response?.status === 404) { + // react-query library does not allow undefined to be returned as a + // query result + return null; + } + + throw error; + } + }; +} + +// This is a hard coded CSRF token/cookie pair for local development. In prod, +// the GoLang webserver generates a random cookie with a new token for each +// document request. For local development, we don't use the Go webserver for +// static files, so this is the 'hack' to make local development work with +// remote apis. The CSRF cookie for this token is "JXm9hOUdZctWt0ZZGAy9xiS/gxMKYOThdxjjMnMUyn4=" +const csrfToken = + "KNKvagCBEHZK7ihe2t7fj6VeJ0UyTDco1yVUJE8N06oNqxLu5Zx1vRxZbgfC0mJJgeGkVjgs08mgPbcWPBkZ1A=="; + +// Always attach CSRF token to all requests. In puppeteer the document is +// undefined. In those cases, just do nothing. +const tokenMetadataElement = + typeof document !== "undefined" + ? document.head.querySelector('meta[property="csrf-token"]') + : null; + +function getConfiguredAxiosInstance(): AxiosInstance { + const instance = globalAxios.create(); + + // Adds 304 for the default axios validateStatus function + // https://github.com/axios/axios#handling-errors Check status here + // https://httpstatusdogs.com/ + instance.defaults.validateStatus = (status) => { + return (status >= 200 && status < 300) || status === 304; + }; + + const metadataIsAvailable = + tokenMetadataElement !== null && + tokenMetadataElement.getAttribute("content") !== null; + + if (metadataIsAvailable) { + if (process.env.NODE_ENV === "development") { + // Development mode uses a hard-coded CSRF token + instance.defaults.headers.common["X-CSRF-TOKEN"] = csrfToken; + instance.defaults.headers.common["X-CSRF-TOKEN"] = csrfToken; + tokenMetadataElement.setAttribute("content", csrfToken); + } else { + instance.defaults.headers.common["X-CSRF-TOKEN"] = + tokenMetadataElement.getAttribute("content") ?? ""; + } + } else { + // Do not write error logs if we are in a FE unit test. + if (process.env.JEST_WORKER_ID === undefined) { + console.error("CSRF token not found"); } } -}; + + return instance; +} + +// Other non-API methods defined here to make it a little easier to find them. +interface ClientApi extends ApiMethods { + getCsrfToken: () => string; + setSessionToken: (token: string) => void; + setHost: (host: string | undefined) => void; + getAxiosInstance: () => AxiosInstance; +} + +export class Api extends ApiMethods implements ClientApi { + constructor() { + const scopedAxiosInstance = getConfiguredAxiosInstance(); + super(scopedAxiosInstance); + } + + // As with ApiMethods, all public methods should be defined with arrow + // function syntax to ensure they can be passed around the React UI without + // losing/detaching their `this` context! + + getCsrfToken = (): string => { + return csrfToken; + }; + + setSessionToken = (token: string): void => { + this.axios.defaults.headers.common["Coder-Session-Token"] = token; + }; + + setHost = (host: string | undefined): void => { + this.axios.defaults.baseURL = host; + }; + + getAxiosInstance = (): AxiosInstance => { + return this.axios; + }; +} + +export const API = new Api(); diff --git a/site/src/api/queries/appearance.ts b/site/src/api/queries/appearance.ts index 7fc6cd1a71b9d..8deab4a4e85e6 100644 --- a/site/src/api/queries/appearance.ts +++ b/site/src/api/queries/appearance.ts @@ -1,5 +1,5 @@ import type { QueryClient } from "react-query"; -import * as API from "api/api"; +import { API } from "api/api"; import type { AppearanceConfig } from "api/typesGenerated"; import type { MetadataState } from "hooks/useEmbeddedMetadata"; import { cachedQuery } from "./util"; diff --git a/site/src/api/queries/audits.ts b/site/src/api/queries/audits.ts index 6430767480714..1dce9a29eaab8 100644 --- a/site/src/api/queries/audits.ts +++ b/site/src/api/queries/audits.ts @@ -1,4 +1,4 @@ -import { getAuditLogs } from "api/api"; +import { API } from "api/api"; import type { AuditLogResponse } from "api/typesGenerated"; import { useFilterParamsKey } from "components/Filter/filter"; import type { UsePaginatedQueryOptions } from "hooks/usePaginatedQuery"; @@ -13,7 +13,7 @@ export function paginatedAudits( return ["auditLogs", payload, pageNumber] as const; }, queryFn: ({ payload, limit, offset }) => { - return getAuditLogs({ + return API.getAuditLogs({ offset, limit, q: payload, diff --git a/site/src/api/queries/authCheck.ts b/site/src/api/queries/authCheck.ts index be9e726ae074d..3248f35357f25 100644 --- a/site/src/api/queries/authCheck.ts +++ b/site/src/api/queries/authCheck.ts @@ -1,4 +1,4 @@ -import * as API from "api/api"; +import { API } from "api/api"; import type { AuthorizationRequest } from "api/typesGenerated"; export const AUTHORIZATION_KEY = "authorization"; diff --git a/site/src/api/queries/buildInfo.ts b/site/src/api/queries/buildInfo.ts index 0f0eecafa9f49..43dac7d20334f 100644 --- a/site/src/api/queries/buildInfo.ts +++ b/site/src/api/queries/buildInfo.ts @@ -1,4 +1,4 @@ -import * as API from "api/api"; +import { API } from "api/api"; import type { BuildInfoResponse } from "api/typesGenerated"; import type { MetadataState } from "hooks/useEmbeddedMetadata"; import { cachedQuery } from "./util"; diff --git a/site/src/api/queries/debug.ts b/site/src/api/queries/debug.ts index 1fba00c172c51..b84fdf1b7c2fb 100644 --- a/site/src/api/queries/debug.ts +++ b/site/src/api/queries/debug.ts @@ -1,5 +1,5 @@ import type { QueryClient, UseMutationOptions } from "react-query"; -import * as API from "api/api"; +import { API } from "api/api"; import type { HealthSettings, UpdateHealthSettings } from "api/typesGenerated"; export const HEALTH_QUERY_KEY = ["health"]; diff --git a/site/src/api/queries/deployment.ts b/site/src/api/queries/deployment.ts index 540c76ebd79e2..fa4d37967af18 100644 --- a/site/src/api/queries/deployment.ts +++ b/site/src/api/queries/deployment.ts @@ -1,4 +1,4 @@ -import * as API from "api/api"; +import { API } from "api/api"; export const deploymentConfig = () => { return { diff --git a/site/src/api/queries/entitlements.ts b/site/src/api/queries/entitlements.ts index 48f43630ea29a..542aa6f0cf591 100644 --- a/site/src/api/queries/entitlements.ts +++ b/site/src/api/queries/entitlements.ts @@ -1,5 +1,5 @@ import type { QueryClient } from "react-query"; -import * as API from "api/api"; +import { API } from "api/api"; import type { Entitlements } from "api/typesGenerated"; import type { MetadataState } from "hooks/useEmbeddedMetadata"; import { cachedQuery } from "./util"; diff --git a/site/src/api/queries/experiments.ts b/site/src/api/queries/experiments.ts index e0a2749d75829..86fd9096ae9f2 100644 --- a/site/src/api/queries/experiments.ts +++ b/site/src/api/queries/experiments.ts @@ -1,4 +1,4 @@ -import * as API from "api/api"; +import { API } from "api/api"; import type { Experiments } from "api/typesGenerated"; import type { MetadataState } from "hooks/useEmbeddedMetadata"; import { cachedQuery } from "./util"; diff --git a/site/src/api/queries/externalAuth.ts b/site/src/api/queries/externalAuth.ts index 18cc95a8839ff..eda68713aa5fc 100644 --- a/site/src/api/queries/externalAuth.ts +++ b/site/src/api/queries/externalAuth.ts @@ -1,5 +1,5 @@ import type { QueryClient, UseMutationOptions } from "react-query"; -import * as API from "api/api"; +import { API } from "api/api"; import type { ExternalAuth } from "api/typesGenerated"; // Returns all configured external auths for a given user. diff --git a/site/src/api/queries/files.ts b/site/src/api/queries/files.ts index cc840b52eb63f..a363e03f94473 100644 --- a/site/src/api/queries/files.ts +++ b/site/src/api/queries/files.ts @@ -1,4 +1,4 @@ -import * as API from "api/api"; +import { API } from "api/api"; export const uploadFile = () => { return { diff --git a/site/src/api/queries/groups.ts b/site/src/api/queries/groups.ts index 71cba33354b9b..5c34758df069f 100644 --- a/site/src/api/queries/groups.ts +++ b/site/src/api/queries/groups.ts @@ -1,6 +1,5 @@ import type { QueryClient, UseQueryOptions } from "react-query"; -import * as API from "api/api"; -import { checkAuthorization } from "api/api"; +import { API } from "api/api"; import type { CreateGroupRequest, Group, @@ -72,7 +71,7 @@ export const groupPermissions = (groupId: string) => { return { queryKey: [...getGroupQueryKey(groupId), "permissions"], queryFn: () => - checkAuthorization({ + API.checkAuthorization({ checks: { canUpdateGroup: { object: { diff --git a/site/src/api/queries/insights.ts b/site/src/api/queries/insights.ts index 7d60565e83bb0..4b6dad8cd2fc8 100644 --- a/site/src/api/queries/insights.ts +++ b/site/src/api/queries/insights.ts @@ -1,20 +1,20 @@ -import * as API from "api/api"; +import { type InsightsParams, type InsightsTemplateParams, API } from "api/api"; -export const insightsTemplate = (params: API.InsightsTemplateParams) => { +export const insightsTemplate = (params: InsightsTemplateParams) => { return { queryKey: ["insights", "templates", params.template_ids, params], queryFn: () => API.getInsightsTemplate(params), }; }; -export const insightsUserLatency = (params: API.InsightsParams) => { +export const insightsUserLatency = (params: InsightsParams) => { return { queryKey: ["insights", "userLatency", params.template_ids, params], queryFn: () => API.getInsightsUserLatency(params), }; }; -export const insightsUserActivity = (params: API.InsightsParams) => { +export const insightsUserActivity = (params: InsightsParams) => { return { queryKey: ["insights", "userActivity", params.template_ids, params], queryFn: () => API.getInsightsUserActivity(params), diff --git a/site/src/api/queries/integrations.ts b/site/src/api/queries/integrations.ts index de43a4c8f4cac..c0e7f6f28ce9d 100644 --- a/site/src/api/queries/integrations.ts +++ b/site/src/api/queries/integrations.ts @@ -1,5 +1,5 @@ import type { GetJFrogXRayScanParams } from "api/api"; -import * as API from "api/api"; +import { API } from "api/api"; export const xrayScan = (params: GetJFrogXRayScanParams) => { return { diff --git a/site/src/api/queries/oauth2.ts b/site/src/api/queries/oauth2.ts index 78b31762b2aa5..26334955c4a86 100644 --- a/site/src/api/queries/oauth2.ts +++ b/site/src/api/queries/oauth2.ts @@ -1,5 +1,5 @@ import type { QueryClient } from "react-query"; -import * as API from "api/api"; +import { API } from "api/api"; import type * as TypesGen from "api/typesGenerated"; const appsKey = ["oauth2-provider", "apps"]; diff --git a/site/src/api/queries/roles.ts b/site/src/api/queries/roles.ts index 37b2af49f3e74..2a6c1700b53a7 100644 --- a/site/src/api/queries/roles.ts +++ b/site/src/api/queries/roles.ts @@ -1,4 +1,4 @@ -import * as API from "api/api"; +import { API } from "api/api"; export const roles = () => { return { diff --git a/site/src/api/queries/settings.ts b/site/src/api/queries/settings.ts index 4a086cf18532c..eb3468b68d978 100644 --- a/site/src/api/queries/settings.ts +++ b/site/src/api/queries/settings.ts @@ -1,5 +1,5 @@ import type { QueryClient, QueryOptions } from "react-query"; -import * as API from "api/api"; +import { API } from "api/api"; import type { UpdateUserQuietHoursScheduleRequest, UserQuietHoursScheduleResponse, diff --git a/site/src/api/queries/sshKeys.ts b/site/src/api/queries/sshKeys.ts index 6fc3593c318c7..43686ff1437b2 100644 --- a/site/src/api/queries/sshKeys.ts +++ b/site/src/api/queries/sshKeys.ts @@ -1,5 +1,5 @@ import type { QueryClient } from "react-query"; -import * as API from "api/api"; +import { API } from "api/api"; import type { GitSSHKey } from "api/typesGenerated"; const getUserSSHKeyQueryKey = (userId: string) => [userId, "sshKey"]; diff --git a/site/src/api/queries/templates.ts b/site/src/api/queries/templates.ts index 83879415bacf6..2d0485b8f347b 100644 --- a/site/src/api/queries/templates.ts +++ b/site/src/api/queries/templates.ts @@ -1,5 +1,5 @@ import type { MutationOptions, QueryClient, QueryOptions } from "react-query"; -import * as API from "api/api"; +import { API } from "api/api"; import type { CreateTemplateRequest, CreateTemplateVersionRequest, diff --git a/site/src/api/queries/updateCheck.ts b/site/src/api/queries/updateCheck.ts index 40fcc6a3cfdde..e8dc1b2cc3e41 100644 --- a/site/src/api/queries/updateCheck.ts +++ b/site/src/api/queries/updateCheck.ts @@ -1,4 +1,4 @@ -import * as API from "api/api"; +import { API } from "api/api"; export const updateCheck = () => { return { diff --git a/site/src/api/queries/users.ts b/site/src/api/queries/users.ts index ded7c7a5f29c8..7dcd157f7bc6c 100644 --- a/site/src/api/queries/users.ts +++ b/site/src/api/queries/users.ts @@ -3,7 +3,7 @@ import type { UseMutationOptions, UseQueryOptions, } from "react-query"; -import * as API from "api/api"; +import { API } from "api/api"; import type { AuthorizationRequest, GetUsersResponse, diff --git a/site/src/api/queries/workspaceBuilds.ts b/site/src/api/queries/workspaceBuilds.ts index 8960068b6169c..a7c0aaf4fdabe 100644 --- a/site/src/api/queries/workspaceBuilds.ts +++ b/site/src/api/queries/workspaceBuilds.ts @@ -1,5 +1,5 @@ import type { QueryOptions, UseInfiniteQueryOptions } from "react-query"; -import * as API from "api/api"; +import { API } from "api/api"; import type { WorkspaceBuild, WorkspaceBuildParameter, diff --git a/site/src/api/queries/workspaceQuota.ts b/site/src/api/queries/workspaceQuota.ts index f43adf616688e..1735b0f71279b 100644 --- a/site/src/api/queries/workspaceQuota.ts +++ b/site/src/api/queries/workspaceQuota.ts @@ -1,4 +1,4 @@ -import * as API from "api/api"; +import { API } from "api/api"; export const getWorkspaceQuotaQueryKey = (username: string) => [ username, diff --git a/site/src/api/queries/workspaceportsharing.ts b/site/src/api/queries/workspaceportsharing.ts index 9e341d551a4f3..60bd99285aa54 100644 --- a/site/src/api/queries/workspaceportsharing.ts +++ b/site/src/api/queries/workspaceportsharing.ts @@ -1,8 +1,4 @@ -import { - deleteWorkspaceAgentSharedPort, - getWorkspaceAgentSharedPorts, - upsertWorkspaceAgentSharedPort, -} from "api/api"; +import { API } from "api/api"; import type { DeleteWorkspaceAgentPortShareRequest, UpsertWorkspaceAgentPortShareRequest, @@ -11,14 +7,14 @@ import type { export const workspacePortShares = (workspaceId: string) => { return { queryKey: ["sharedPorts", workspaceId], - queryFn: () => getWorkspaceAgentSharedPorts(workspaceId), + queryFn: () => API.getWorkspaceAgentSharedPorts(workspaceId), }; }; export const upsertWorkspacePortShare = (workspaceId: string) => { return { mutationFn: async (options: UpsertWorkspaceAgentPortShareRequest) => { - await upsertWorkspaceAgentSharedPort(workspaceId, options); + await API.upsertWorkspaceAgentSharedPort(workspaceId, options); }, }; }; @@ -26,7 +22,7 @@ export const upsertWorkspacePortShare = (workspaceId: string) => { export const deleteWorkspacePortShare = (workspaceId: string) => { return { mutationFn: async (options: DeleteWorkspaceAgentPortShareRequest) => { - await deleteWorkspaceAgentSharedPort(workspaceId, options); + await API.deleteWorkspaceAgentSharedPort(workspaceId, options); }, }; }; diff --git a/site/src/api/queries/workspaces.ts b/site/src/api/queries/workspaces.ts index 816cc5613e99d..95df3b7f592f6 100644 --- a/site/src/api/queries/workspaces.ts +++ b/site/src/api/queries/workspaces.ts @@ -4,8 +4,7 @@ import type { QueryOptions, UseMutationOptions, } from "react-query"; -import * as API from "api/api"; -import { putWorkspaceExtension } from "api/api"; +import { type DeleteWorkspaceOptions, API } from "api/api"; import type { CreateWorkspaceRequest, ProvisionerLogLevel, @@ -28,7 +27,9 @@ export const workspaceByOwnerAndName = (owner: string, name: string) => { return { queryKey: workspaceByOwnerAndNameKey(owner, name), queryFn: () => - API.getWorkspaceByOwnerAndName(owner, name, { include_deleted: true }), + API.getWorkspaceByOwnerAndName(owner, name, { + include_deleted: true, + }), }; }; @@ -111,7 +112,7 @@ export const updateDeadline = ( ): UseMutationOptions => { return { mutationFn: (deadline: Dayjs) => { - return putWorkspaceExtension(workspace.id, deadline); + return API.putWorkspaceExtension(workspace.id, deadline); }, }; }; @@ -155,7 +156,7 @@ export const deleteWorkspace = ( queryClient: QueryClient, ) => { return { - mutationFn: (options: API.DeleteWorkspaceOptions) => { + mutationFn: (options: DeleteWorkspaceOptions) => { return API.deleteWorkspace(workspace.id, options); }, onSuccess: async (build: WorkspaceBuild) => { diff --git a/site/src/components/Filter/UserFilter.tsx b/site/src/components/Filter/UserFilter.tsx index 74bbf91376a12..a42dbf07d791c 100644 --- a/site/src/components/Filter/UserFilter.tsx +++ b/site/src/components/Filter/UserFilter.tsx @@ -1,5 +1,5 @@ import type { FC } from "react"; -import { getUsers } from "api/api"; +import { API } from "api/api"; import { useAuthenticated } from "contexts/auth/RequireAuth"; import { UserAvatar } from "../UserAvatar/UserAvatar"; import { FilterSearchMenu, OptionItem } from "./filter"; @@ -42,7 +42,7 @@ export const useUserFilterMenu = ({ }; } - const usersRes = await getUsers({ q: value, limit: 1 }); + const usersRes = await API.getUsers({ q: value, limit: 1 }); const firstUser = usersRes.users.at(0); if (firstUser && firstUser.username === value) { return { @@ -54,7 +54,7 @@ export const useUserFilterMenu = ({ return null; }, getOptions: async (query) => { - const usersRes = await getUsers({ q: query, limit: 25 }); + const usersRes = await API.getUsers({ q: query, limit: 25 }); let options: UserOption[] = usersRes.users.map((user) => ({ label: user.username, value: user.username, diff --git a/site/src/contexts/ProxyContext.tsx b/site/src/contexts/ProxyContext.tsx index 0f5af37634092..767cdd54d1a67 100644 --- a/site/src/contexts/ProxyContext.tsx +++ b/site/src/contexts/ProxyContext.tsx @@ -8,7 +8,7 @@ import { useState, } from "react"; import { useQuery } from "react-query"; -import { getWorkspaceProxies, getWorkspaceProxyRegions } from "api/api"; +import { API } from "api/api"; import { cachedQuery } from "api/queries/util"; import type { Region, WorkspaceProxy } from "api/typesGenerated"; import { useAuthenticated } from "contexts/auth/RequireAuth"; @@ -108,10 +108,11 @@ export const ProxyProvider: FC = ({ children }) => { metadata: metadata.regions, queryKey: ["get-proxies"], queryFn: async (): Promise => { - const endpoint = permissions.editWorkspaceProxies - ? getWorkspaceProxies - : getWorkspaceProxyRegions; - const resp = await endpoint(); + const apiCall = permissions.editWorkspaceProxies + ? API.getWorkspaceProxies + : API.getWorkspaceProxyRegions; + + const resp = await apiCall(); return resp.regions; }, }), diff --git a/site/src/contexts/auth/RequireAuth.tsx b/site/src/contexts/auth/RequireAuth.tsx index 7f23da60c3dbb..2d6b14d3db69f 100644 --- a/site/src/contexts/auth/RequireAuth.tsx +++ b/site/src/contexts/auth/RequireAuth.tsx @@ -1,6 +1,6 @@ import { type FC, useEffect } from "react"; import { Outlet, Navigate, useLocation } from "react-router-dom"; -import { axiosInstance } from "api/api"; +import { API } from "api/api"; import { isApiError } from "api/errors"; import { Loader } from "components/Loader/Loader"; import { ProxyProvider } from "contexts/ProxyContext"; @@ -18,6 +18,7 @@ export const RequireAuth: FC = () => { return; } + const axiosInstance = API.getAxiosInstance(); const interceptorHandle = axiosInstance.interceptors.response.use( (okResponse) => okResponse, (error: unknown) => { diff --git a/site/src/contexts/useProxyLatency.ts b/site/src/contexts/useProxyLatency.ts index 497cd457ede51..df2afc277b44a 100644 --- a/site/src/contexts/useProxyLatency.ts +++ b/site/src/contexts/useProxyLatency.ts @@ -1,6 +1,6 @@ import PerformanceObserver from "@fastly/performance-observer-polyfill"; import { useEffect, useReducer, useState } from "react"; -import { axiosInstance } from "api/api"; +import { API } from "api/api"; import type { Region } from "api/typesGenerated"; import { generateRandomString } from "utils/random"; @@ -197,6 +197,7 @@ export const useProxyLatency = ( // The resource requests include xmlhttp requests. observer.observe({ entryTypes: ["resource"] }); + const axiosInstance = API.getAxiosInstance(); const proxyRequests = Object.keys(proxyChecks).map((latencyURL) => { return axiosInstance.get(latencyURL, { withCredentials: false, diff --git a/site/src/modules/resources/AgentLogs/AgentLogs.tsx b/site/src/modules/resources/AgentLogs/AgentLogs.tsx index 2216b7eae24ae..407e3c12fe9b5 100644 --- a/site/src/modules/resources/AgentLogs/AgentLogs.tsx +++ b/site/src/modules/resources/AgentLogs/AgentLogs.tsx @@ -8,7 +8,7 @@ import { useState, } from "react"; import { FixedSizeList as List } from "react-window"; -import * as API from "api/api"; +import { watchWorkspaceAgentLogs } from "api/api"; import type { WorkspaceAgentLogSource } from "api/typesGenerated"; import { AGENT_LOG_LINE_HEIGHT, @@ -193,7 +193,7 @@ export const useAgentLogs = ( return; } - const socket = API.watchWorkspaceAgentLogs(agentId, { + const socket = watchWorkspaceAgentLogs(agentId, { // Get all logs after: 0, onMessage: (logs) => { diff --git a/site/src/modules/resources/AppLink/AppLink.tsx b/site/src/modules/resources/AppLink/AppLink.tsx index 7243d2b1af5b6..7042c879385d0 100644 --- a/site/src/modules/resources/AppLink/AppLink.tsx +++ b/site/src/modules/resources/AppLink/AppLink.tsx @@ -4,7 +4,7 @@ import CircularProgress from "@mui/material/CircularProgress"; import Link from "@mui/material/Link"; import Tooltip from "@mui/material/Tooltip"; import { type FC, useState } from "react"; -import { getApiKey } from "api/api"; +import { API } from "api/api"; import type * as TypesGen from "api/typesGenerated"; import { useProxy } from "contexts/ProxyContext"; import { createAppLinkHref } from "utils/apps"; @@ -145,7 +145,7 @@ export const AppLink: FC = ({ app, workspace, agent }) => { let url = href; if (hasMagicToken !== -1) { setFetchingSessionToken(true); - const key = await getApiKey(); + const key = await API.getApiKey(); url = href.replaceAll(magicTokenString, key.key); setFetchingSessionToken(false); } diff --git a/site/src/modules/resources/PortForwardButton.tsx b/site/src/modules/resources/PortForwardButton.tsx index 6d46b1064ad46..d22e986a1c074 100644 --- a/site/src/modules/resources/PortForwardButton.tsx +++ b/site/src/modules/resources/PortForwardButton.tsx @@ -19,7 +19,7 @@ import { type FormikContextType, useFormik } from "formik"; import { useState, type FC } from "react"; import { useQuery, useMutation } from "react-query"; import * as Yup from "yup"; -import { getAgentListeningPorts } from "api/api"; +import { API } from "api/api"; import { deleteWorkspacePortShare, upsertWorkspacePortShare, @@ -70,7 +70,7 @@ export const PortForwardButton: FC = (props) => { const portsQuery = useQuery({ queryKey: ["portForward", agent.id], - queryFn: () => getAgentListeningPorts(agent.id), + queryFn: () => API.getAgentListeningPorts(agent.id), enabled: agent.status === "connected", refetchInterval: 5_000, }); diff --git a/site/src/modules/resources/VSCodeDesktopButton/VSCodeDesktopButton.tsx b/site/src/modules/resources/VSCodeDesktopButton/VSCodeDesktopButton.tsx index 73597dd22b6d3..73763439076bd 100644 --- a/site/src/modules/resources/VSCodeDesktopButton/VSCodeDesktopButton.tsx +++ b/site/src/modules/resources/VSCodeDesktopButton/VSCodeDesktopButton.tsx @@ -3,7 +3,7 @@ import ButtonGroup from "@mui/material/ButtonGroup"; import Menu from "@mui/material/Menu"; import MenuItem from "@mui/material/MenuItem"; import { type FC, useState, useRef } from "react"; -import { getApiKey } from "api/api"; +import { API } from "api/api"; import type { DisplayApp } from "api/typesGenerated"; import { VSCodeIcon } from "components/Icons/VSCodeIcon"; import { VSCodeInsidersIcon } from "components/Icons/VSCodeInsidersIcon"; @@ -119,7 +119,7 @@ const VSCodeButton: FC = ({ disabled={loading} onClick={() => { setLoading(true); - getApiKey() + API.getApiKey() .then(({ key }) => { const query = new URLSearchParams({ owner: userName, @@ -163,7 +163,7 @@ const VSCodeInsidersButton: FC = ({ disabled={loading} onClick={() => { setLoading(true); - getApiKey() + API.getApiKey() .then(({ key }) => { const query = new URLSearchParams({ owner: userName, diff --git a/site/src/pages/AuditPage/AuditPage.test.tsx b/site/src/pages/AuditPage/AuditPage.test.tsx index 24b00fa082430..be3317ee68099 100644 --- a/site/src/pages/AuditPage/AuditPage.test.tsx +++ b/site/src/pages/AuditPage/AuditPage.test.tsx @@ -1,7 +1,7 @@ import { screen, waitFor } from "@testing-library/react"; import userEvent from "@testing-library/user-event"; import { HttpResponse, http } from "msw"; -import * as API from "api/api"; +import { API } from "api/api"; import { DEFAULT_RECORDS_PER_PAGE } from "components/PaginationWidget/utils"; import { MockAuditLog, diff --git a/site/src/pages/CreateTemplatePage/CreateTemplatePage.test.tsx b/site/src/pages/CreateTemplatePage/CreateTemplatePage.test.tsx index 38eacc38fd30e..38cf8994011c3 100644 --- a/site/src/pages/CreateTemplatePage/CreateTemplatePage.test.tsx +++ b/site/src/pages/CreateTemplatePage/CreateTemplatePage.test.tsx @@ -1,6 +1,6 @@ import { screen, waitFor, within } from "@testing-library/react"; import userEvent from "@testing-library/user-event"; -import * as API from "api/api"; +import { API } from "api/api"; import { MockTemplateExample, MockTemplateVersion, diff --git a/site/src/pages/CreateTokenPage/CreateTokenPage.test.tsx b/site/src/pages/CreateTokenPage/CreateTokenPage.test.tsx index f58eed5d1e6b2..630834cd5fa72 100644 --- a/site/src/pages/CreateTokenPage/CreateTokenPage.test.tsx +++ b/site/src/pages/CreateTokenPage/CreateTokenPage.test.tsx @@ -1,6 +1,6 @@ import { screen, within } from "@testing-library/react"; import userEvent from "@testing-library/user-event"; -import * as API from "api/api"; +import { API } from "api/api"; import { renderWithAuth, waitForLoaderToBeRemoved, diff --git a/site/src/pages/CreateTokenPage/CreateTokenPage.tsx b/site/src/pages/CreateTokenPage/CreateTokenPage.tsx index 4ea1f98144671..1fcf9daaa43fb 100644 --- a/site/src/pages/CreateTokenPage/CreateTokenPage.tsx +++ b/site/src/pages/CreateTokenPage/CreateTokenPage.tsx @@ -3,7 +3,7 @@ import { type FC, useState } from "react"; import { Helmet } from "react-helmet-async"; import { useMutation, useQuery } from "react-query"; import { useNavigate } from "react-router-dom"; -import { createToken, getTokenConfig } from "api/api"; +import { API } from "api/api"; import { ErrorAlert } from "components/Alert/ErrorAlert"; import { CodeExample } from "components/CodeExample/CodeExample"; import { ConfirmDialog } from "components/Dialogs/ConfirmDialog/ConfirmDialog"; @@ -28,7 +28,7 @@ export const CreateTokenPage: FC = () => { isError: creationFailed, isSuccess: creationSuccessful, data: newToken, - } = useMutation(createToken); + } = useMutation(API.createToken); const { data: tokenConfig, isLoading: fetchingTokenConfig, @@ -36,7 +36,7 @@ export const CreateTokenPage: FC = () => { error: tokenFetchError, } = useQuery({ queryKey: ["tokenconfig"], - queryFn: getTokenConfig, + queryFn: API.getTokenConfig, }); const [formError, setFormError] = useState(undefined); diff --git a/site/src/pages/CreateWorkspacePage/CreateWorkspacePage.test.tsx b/site/src/pages/CreateWorkspacePage/CreateWorkspacePage.test.tsx index 85ffe8ea45896..02bde4b7134cf 100644 --- a/site/src/pages/CreateWorkspacePage/CreateWorkspacePage.test.tsx +++ b/site/src/pages/CreateWorkspacePage/CreateWorkspacePage.test.tsx @@ -1,6 +1,6 @@ import { fireEvent, screen, waitFor } from "@testing-library/react"; import userEvent from "@testing-library/user-event"; -import * as API from "api/api"; +import { API } from "api/api"; import { MockTemplate, MockUser, diff --git a/site/src/pages/CreateWorkspacePage/CreateWorkspacePage.tsx b/site/src/pages/CreateWorkspacePage/CreateWorkspacePage.tsx index b885b11d32f6c..df0bb38891f03 100644 --- a/site/src/pages/CreateWorkspacePage/CreateWorkspacePage.tsx +++ b/site/src/pages/CreateWorkspacePage/CreateWorkspacePage.tsx @@ -2,7 +2,7 @@ import { type FC, useCallback, useEffect, useState, useRef } from "react"; import { Helmet } from "react-helmet-async"; import { useMutation, useQuery, useQueryClient } from "react-query"; import { useNavigate, useParams, useSearchParams } from "react-router-dom"; -import { getUserParameters } from "api/api"; +import { API } from "api/api"; import type { ApiErrorResponse } from "api/errors"; import { checkAuthorization } from "api/queries/authCheck"; import { @@ -99,7 +99,7 @@ const CreateWorkspacePage: FC = () => { const autofillEnabled = experiments.includes("auto-fill-parameters"); const userParametersQuery = useQuery({ queryKey: ["userParameters"], - queryFn: () => getUserParameters(templateQuery.data!.id), + queryFn: () => API.getUserParameters(templateQuery.data!.id), enabled: autofillEnabled && templateQuery.isSuccess, }); const autofillParameters = getAutofillParameters( diff --git a/site/src/pages/DeploySettingsPage/LicensesSettingsPage/AddNewLicensePage.tsx b/site/src/pages/DeploySettingsPage/LicensesSettingsPage/AddNewLicensePage.tsx index 98758f22f7b5b..b40d7a201dd55 100644 --- a/site/src/pages/DeploySettingsPage/LicensesSettingsPage/AddNewLicensePage.tsx +++ b/site/src/pages/DeploySettingsPage/LicensesSettingsPage/AddNewLicensePage.tsx @@ -2,7 +2,7 @@ import type { FC } from "react"; import { Helmet } from "react-helmet-async"; import { useMutation } from "react-query"; import { useNavigate } from "react-router-dom"; -import { createLicense } from "api/api"; +import { API } from "api/api"; import { displayError, displaySuccess } from "components/GlobalSnackbar/utils"; import { pageTitle } from "utils/page"; import { AddNewLicensePageView } from "./AddNewLicensePageView"; @@ -14,7 +14,7 @@ const AddNewLicensePage: FC = () => { mutate: saveLicenseKeyApi, isLoading: isCreating, error: savingLicenseError, - } = useMutation(createLicense, { + } = useMutation(API.createLicense, { onSuccess: () => { displaySuccess("You have successfully added a license"); navigate("/deployment/licenses?success=true"); diff --git a/site/src/pages/DeploySettingsPage/LicensesSettingsPage/LicensesSettingsPage.tsx b/site/src/pages/DeploySettingsPage/LicensesSettingsPage/LicensesSettingsPage.tsx index dcd219c99e8c9..c3e353b63074e 100644 --- a/site/src/pages/DeploySettingsPage/LicensesSettingsPage/LicensesSettingsPage.tsx +++ b/site/src/pages/DeploySettingsPage/LicensesSettingsPage/LicensesSettingsPage.tsx @@ -3,7 +3,7 @@ import { Helmet } from "react-helmet-async"; import { useMutation, useQuery, useQueryClient } from "react-query"; import { useSearchParams } from "react-router-dom"; import useToggle from "react-use/lib/useToggle"; -import { getLicenses, removeLicense } from "api/api"; +import { API } from "api/api"; import { getErrorMessage } from "api/errors"; import { entitlements, refreshEntitlements } from "api/queries/entitlements"; import { displayError, displaySuccess } from "components/GlobalSnackbar/utils"; @@ -36,7 +36,7 @@ const LicensesSettingsPage: FC = () => { }, [entitlementsQuery.error]); const { mutate: removeLicenseApi, isLoading: isRemovingLicense } = - useMutation(removeLicense, { + useMutation(API.removeLicense, { onSuccess: () => { displaySuccess("Successfully removed license"); void queryClient.invalidateQueries(["licenses"]); @@ -48,7 +48,7 @@ const LicensesSettingsPage: FC = () => { const { data: licenses, isLoading } = useQuery({ queryKey: ["licenses"], - queryFn: () => getLicenses(), + queryFn: () => API.getLicenses(), }); useEffect(() => { diff --git a/site/src/pages/TemplatePage/TemplateEmbedPage/TemplateEmbedPage.test.tsx b/site/src/pages/TemplatePage/TemplateEmbedPage/TemplateEmbedPage.test.tsx index 580df7f645c7c..d2867a80085b6 100644 --- a/site/src/pages/TemplatePage/TemplateEmbedPage/TemplateEmbedPage.test.tsx +++ b/site/src/pages/TemplatePage/TemplateEmbedPage/TemplateEmbedPage.test.tsx @@ -1,6 +1,6 @@ import { screen } from "@testing-library/react"; import userEvent from "@testing-library/user-event"; -import * as API from "api/api"; +import { API } from "api/api"; import { TemplateLayout } from "pages/TemplatePage/TemplateLayout"; import { MockTemplate, diff --git a/site/src/pages/TemplatePage/TemplateEmbedPage/TemplateEmbedPage.tsx b/site/src/pages/TemplatePage/TemplateEmbedPage/TemplateEmbedPage.tsx index 57716e0b91fe5..643f9c166fb7b 100644 --- a/site/src/pages/TemplatePage/TemplateEmbedPage/TemplateEmbedPage.tsx +++ b/site/src/pages/TemplatePage/TemplateEmbedPage/TemplateEmbedPage.tsx @@ -7,7 +7,7 @@ import RadioGroup from "@mui/material/RadioGroup"; import { type FC, useEffect, useState } from "react"; import { Helmet } from "react-helmet-async"; import { useQuery } from "react-query"; -import { getTemplateVersionRichParameters } from "api/api"; +import { API } from "api/api"; import type { Template, TemplateVersionParameter } from "api/typesGenerated"; import { FormSection, VerticalForm } from "components/Form/Form"; import { Loader } from "components/Loader/Loader"; @@ -24,7 +24,8 @@ const TemplateEmbedPage: FC = () => { const { template } = useTemplateLayoutContext(); const { data: templateParameters } = useQuery({ queryKey: ["template", template.id, "embed"], - queryFn: () => getTemplateVersionRichParameters(template.active_version_id), + queryFn: () => + API.getTemplateVersionRichParameters(template.active_version_id), }); return ( diff --git a/site/src/pages/TemplatePage/TemplateLayout.tsx b/site/src/pages/TemplatePage/TemplateLayout.tsx index 43bf807c45df2..e388c81feb27e 100644 --- a/site/src/pages/TemplatePage/TemplateLayout.tsx +++ b/site/src/pages/TemplatePage/TemplateLayout.tsx @@ -7,11 +7,7 @@ import { } from "react"; import { useQuery } from "react-query"; import { Outlet, useLocation, useNavigate, useParams } from "react-router-dom"; -import { - checkAuthorization, - getTemplateByName, - getTemplateVersion, -} from "api/api"; +import { API } from "api/api"; import type { AuthorizationRequest } from "api/typesGenerated"; import { ErrorAlert } from "components/Alert/ErrorAlert"; import { Loader } from "components/Loader/Loader"; @@ -39,10 +35,11 @@ const templatePermissions = ( }); const fetchTemplate = async (organizationId: string, templateName: string) => { - const template = await getTemplateByName(organizationId, templateName); + const template = await API.getTemplateByName(organizationId, templateName); + const [activeVersion, permissions] = await Promise.all([ - getTemplateVersion(template.active_version_id), - checkAuthorization({ + API.getTemplateVersion(template.active_version_id), + API.checkAuthorization({ checks: templatePermissions(template.id), }), ]); diff --git a/site/src/pages/TemplatePage/TemplateSummaryPage/TemplateSummaryPage.tsx b/site/src/pages/TemplatePage/TemplateSummaryPage/TemplateSummaryPage.tsx index c0460c5b59d74..226f6d7fa07fb 100644 --- a/site/src/pages/TemplatePage/TemplateSummaryPage/TemplateSummaryPage.tsx +++ b/site/src/pages/TemplatePage/TemplateSummaryPage/TemplateSummaryPage.tsx @@ -1,7 +1,7 @@ import type { FC } from "react"; import { Helmet } from "react-helmet-async"; import { useQuery } from "react-query"; -import { getTemplateVersionResources } from "api/api"; +import { API } from "api/api"; import { useTemplateLayoutContext } from "pages/TemplatePage/TemplateLayout"; import { getTemplatePageTitle } from "../utils"; import { TemplateSummaryPageView } from "./TemplateSummaryPageView"; @@ -10,7 +10,7 @@ export const TemplateSummaryPage: FC = () => { const { template, activeVersion } = useTemplateLayoutContext(); const { data: resources } = useQuery({ queryKey: ["templates", template.id, "resources"], - queryFn: () => getTemplateVersionResources(activeVersion.id), + queryFn: () => API.getTemplateVersionResources(activeVersion.id), }); return ( diff --git a/site/src/pages/TemplatePage/TemplateVersionsPage/TemplateVersionsPage.tsx b/site/src/pages/TemplatePage/TemplateVersionsPage/TemplateVersionsPage.tsx index 5d50f110d00de..df05f167e776e 100644 --- a/site/src/pages/TemplatePage/TemplateVersionsPage/TemplateVersionsPage.tsx +++ b/site/src/pages/TemplatePage/TemplateVersionsPage/TemplateVersionsPage.tsx @@ -1,11 +1,7 @@ import { useState } from "react"; import { Helmet } from "react-helmet-async"; import { useMutation, useQuery } from "react-query"; -import { - archiveTemplateVersion, - getTemplateVersions, - updateActiveTemplateVersion, -} from "api/api"; +import { API } from "api/api"; import { getErrorMessage } from "api/errors"; import { ConfirmDialog } from "components/Dialogs/ConfirmDialog/ConfirmDialog"; import { displayError, displaySuccess } from "components/GlobalSnackbar/utils"; @@ -17,7 +13,7 @@ const TemplateVersionsPage = () => { const { template, permissions } = useTemplateLayoutContext(); const { data } = useQuery({ queryKey: ["template", "versions", template.id], - queryFn: () => getTemplateVersions(template.id), + queryFn: () => API.getTemplateVersions(template.id), }); // We use this to update the active version in the UI without having to refetch the template const [latestActiveVersion, setLatestActiveVersion] = useState( @@ -25,7 +21,7 @@ const TemplateVersionsPage = () => { ); const { mutate: promoteVersion, isLoading: isPromoting } = useMutation({ mutationFn: (templateVersionId: string) => { - return updateActiveTemplateVersion(template.id, { + return API.updateActiveTemplateVersion(template.id, { id: templateVersionId, }); }, @@ -41,7 +37,7 @@ const TemplateVersionsPage = () => { const { mutate: archiveVersion, isLoading: isArchiving } = useMutation({ mutationFn: (templateVersionId: string) => { - return archiveTemplateVersion(templateVersionId); + return API.archiveTemplateVersion(templateVersionId); }, onSuccess: async () => { // The reload is unfortunate. When a version is archived, we should hide diff --git a/site/src/pages/TemplatePage/useDeletionDialogState.test.ts b/site/src/pages/TemplatePage/useDeletionDialogState.test.ts index 63a53f3d1b682..d0dab66bbd975 100644 --- a/site/src/pages/TemplatePage/useDeletionDialogState.test.ts +++ b/site/src/pages/TemplatePage/useDeletionDialogState.test.ts @@ -1,5 +1,5 @@ import { act, renderHook, waitFor } from "@testing-library/react"; -import * as API from "api/api"; +import { API } from "api/api"; import { MockTemplate } from "testHelpers/entities"; import { useDeletionDialogState } from "./useDeletionDialogState"; diff --git a/site/src/pages/TemplatePage/useDeletionDialogState.ts b/site/src/pages/TemplatePage/useDeletionDialogState.ts index 7b3b7bbfcac63..cc7e55670e2be 100644 --- a/site/src/pages/TemplatePage/useDeletionDialogState.ts +++ b/site/src/pages/TemplatePage/useDeletionDialogState.ts @@ -1,5 +1,5 @@ import { useState } from "react"; -import { deleteTemplate } from "api/api"; +import { API } from "api/api"; import { getErrorMessage } from "api/errors"; import { displayError } from "components/GlobalSnackbar/utils"; @@ -27,7 +27,7 @@ export const useDeletionDialogState = ( const confirmDelete = async () => { try { setState({ status: "deleting" }); - await deleteTemplate(templateId); + await API.deleteTemplate(templateId); onDelete(); } catch (e) { setState({ status: "confirming" }); diff --git a/site/src/pages/TemplateSettingsPage/TemplateGeneralSettingsPage/TemplateSettingsPage.test.tsx b/site/src/pages/TemplateSettingsPage/TemplateGeneralSettingsPage/TemplateSettingsPage.test.tsx index 2b9402bda94bd..716322f982288 100644 --- a/site/src/pages/TemplateSettingsPage/TemplateGeneralSettingsPage/TemplateSettingsPage.test.tsx +++ b/site/src/pages/TemplateSettingsPage/TemplateGeneralSettingsPage/TemplateSettingsPage.test.tsx @@ -1,7 +1,7 @@ import { screen, waitFor } from "@testing-library/react"; import userEvent from "@testing-library/user-event"; import { http, HttpResponse } from "msw"; -import * as API from "api/api"; +import { API, withDefaultFeatures } from "api/api"; import type { Template, UpdateTemplateMeta } from "api/typesGenerated"; import { Language as FooterFormLanguage } from "components/FormFooter/FormFooter"; import { MockEntitlements, MockTemplate } from "testHelpers/entities"; @@ -138,7 +138,7 @@ describe("TemplateSettingsPage", () => { http.get("/api/v2/entitlements", () => { return HttpResponse.json({ ...MockEntitlements, - features: API.withDefaultFeatures({ + features: withDefaultFeatures({ access_control: { enabled: true, entitlement: "entitled" }, }), }); @@ -163,7 +163,7 @@ describe("TemplateSettingsPage", () => { http.get("/api/v2/entitlements", () => { return HttpResponse.json({ ...MockEntitlements, - features: API.withDefaultFeatures({ + features: withDefaultFeatures({ access_control: { enabled: false, entitlement: "not_entitled" }, }), }); diff --git a/site/src/pages/TemplateSettingsPage/TemplateGeneralSettingsPage/TemplateSettingsPage.tsx b/site/src/pages/TemplateSettingsPage/TemplateGeneralSettingsPage/TemplateSettingsPage.tsx index 05e61630db9f1..4438cec0bea06 100644 --- a/site/src/pages/TemplateSettingsPage/TemplateGeneralSettingsPage/TemplateSettingsPage.tsx +++ b/site/src/pages/TemplateSettingsPage/TemplateGeneralSettingsPage/TemplateSettingsPage.tsx @@ -2,7 +2,7 @@ import type { FC } from "react"; import { Helmet } from "react-helmet-async"; import { useMutation, useQueryClient } from "react-query"; import { useNavigate, useParams } from "react-router-dom"; -import { updateTemplateMeta } from "api/api"; +import { API } from "api/api"; import { templateByNameKey } from "api/queries/templates"; import type { UpdateTemplateMeta } from "api/typesGenerated"; import { displaySuccess } from "components/GlobalSnackbar/utils"; @@ -30,7 +30,9 @@ export const TemplateSettingsPage: FC = () => { isLoading: isSubmitting, error: submitError, } = useMutation( - (data: UpdateTemplateMeta) => updateTemplateMeta(template.id, data), + (data: UpdateTemplateMeta) => { + return API.updateTemplateMeta(template.id, data); + }, { onSuccess: async (data) => { // This update has a chance to return a 304 which means nothing was updated. diff --git a/site/src/pages/TemplateSettingsPage/TemplateSchedulePage/TemplateSchedulePage.test.tsx b/site/src/pages/TemplateSettingsPage/TemplateSchedulePage/TemplateSchedulePage.test.tsx index cb0505e99b800..48d9d8ef44e4f 100644 --- a/site/src/pages/TemplateSettingsPage/TemplateSchedulePage/TemplateSchedulePage.test.tsx +++ b/site/src/pages/TemplateSettingsPage/TemplateSchedulePage/TemplateSchedulePage.test.tsx @@ -1,6 +1,6 @@ import { screen, waitFor } from "@testing-library/react"; import userEvent from "@testing-library/user-event"; -import * as API from "api/api"; +import { API } from "api/api"; import { Language as FooterFormLanguage } from "components/FormFooter/FormFooter"; import { MockEntitlementsWithScheduling, diff --git a/site/src/pages/TemplateSettingsPage/TemplateSchedulePage/TemplateSchedulePage.tsx b/site/src/pages/TemplateSettingsPage/TemplateSchedulePage/TemplateSchedulePage.tsx index de45cbd38652e..db37ed32dbcc3 100644 --- a/site/src/pages/TemplateSettingsPage/TemplateSchedulePage/TemplateSchedulePage.tsx +++ b/site/src/pages/TemplateSettingsPage/TemplateSchedulePage/TemplateSchedulePage.tsx @@ -2,7 +2,7 @@ import type { FC } from "react"; import { Helmet } from "react-helmet-async"; import { useMutation, useQueryClient } from "react-query"; import { useNavigate, useParams } from "react-router-dom"; -import { updateTemplateMeta } from "api/api"; +import { API } from "api/api"; import { templateByNameKey } from "api/queries/templates"; import type { UpdateTemplateMeta } from "api/typesGenerated"; import { displaySuccess } from "components/GlobalSnackbar/utils"; @@ -27,7 +27,7 @@ const TemplateSchedulePage: FC = () => { isLoading: isSubmitting, error: submitError, } = useMutation( - (data: UpdateTemplateMeta) => updateTemplateMeta(template.id, data), + (data: UpdateTemplateMeta) => API.updateTemplateMeta(template.id, data), { onSuccess: async () => { await queryClient.invalidateQueries( diff --git a/site/src/pages/TemplateSettingsPage/TemplateVariablesPage/TemplateVariablesPage.test.tsx b/site/src/pages/TemplateSettingsPage/TemplateVariablesPage/TemplateVariablesPage.test.tsx index c123a317691e5..a99d599dd3947 100644 --- a/site/src/pages/TemplateSettingsPage/TemplateVariablesPage/TemplateVariablesPage.test.tsx +++ b/site/src/pages/TemplateSettingsPage/TemplateVariablesPage/TemplateVariablesPage.test.tsx @@ -1,6 +1,6 @@ import { screen } from "@testing-library/react"; import userEvent from "@testing-library/user-event"; -import * as API from "api/api"; +import { API } from "api/api"; import { Language as FooterFormLanguage } from "components/FormFooter/FormFooter"; import { MockTemplate, diff --git a/site/src/pages/TemplateVersionEditorPage/TemplateVersionEditorPage.test.tsx b/site/src/pages/TemplateVersionEditorPage/TemplateVersionEditorPage.test.tsx index 6f54255fbe23a..8c63b7db428d1 100644 --- a/site/src/pages/TemplateVersionEditorPage/TemplateVersionEditorPage.test.tsx +++ b/site/src/pages/TemplateVersionEditorPage/TemplateVersionEditorPage.test.tsx @@ -4,7 +4,7 @@ import WS from "jest-websocket-mock"; import { HttpResponse, http } from "msw"; import { QueryClient } from "react-query"; import { RouterProvider, createMemoryRouter } from "react-router-dom"; -import * as api from "api/api"; +import * as apiModule from "api/api"; import { templateVersionVariablesKey } from "api/queries/templates"; import type { TemplateVersion } from "api/typesGenerated"; import { AppProviders } from "App"; @@ -26,6 +26,8 @@ import type { MonacoEditorProps } from "./MonacoEditor"; import { Language } from "./PublishTemplateVersionDialog"; import TemplateVersionEditorPage from "./TemplateVersionEditorPage"; +const { API } = apiModule; + // For some reason this component in Jest is throwing a MUI style warning so, // since we don't need it for this test, we can mock it out jest.mock( @@ -72,8 +74,8 @@ const buildTemplateVersion = async ( user: UserEvent, topbar: HTMLElement, ) => { - jest.spyOn(api, "uploadFile").mockResolvedValueOnce({ hash: "hash" }); - jest.spyOn(api, "createTemplateVersion").mockResolvedValue({ + jest.spyOn(API, "uploadFile").mockResolvedValueOnce({ hash: "hash" }); + jest.spyOn(API, "createTemplateVersion").mockResolvedValue({ ...templateVersion, job: { ...templateVersion.job, @@ -81,10 +83,10 @@ const buildTemplateVersion = async ( }, }); jest - .spyOn(api, "getTemplateVersionByName") + .spyOn(API, "getTemplateVersionByName") .mockResolvedValue(templateVersion); jest - .spyOn(api, "watchBuildLogsByTemplateVersionId") + .spyOn(apiModule, "watchBuildLogsByTemplateVersionId") .mockImplementation((_, options) => { options.onMessage(MockWorkspaceBuildLogs[0]); options.onDone?.(); @@ -116,10 +118,10 @@ test("Use custom name, message and set it as active when publishing", async () = // Publish const patchTemplateVersion = jest - .spyOn(api, "patchTemplateVersion") + .spyOn(API, "patchTemplateVersion") .mockResolvedValue(newTemplateVersion); const updateActiveTemplateVersion = jest - .spyOn(api, "updateActiveTemplateVersion") + .spyOn(API, "updateActiveTemplateVersion") .mockResolvedValue({ message: "" }); const publishButton = within(topbar).getByRole("button", { name: "Publish", @@ -162,10 +164,10 @@ test("Do not mark as active if promote is not checked", async () => { // Publish const patchTemplateVersion = jest - .spyOn(api, "patchTemplateVersion") + .spyOn(API, "patchTemplateVersion") .mockResolvedValue(newTemplateVersion); const updateActiveTemplateVersion = jest - .spyOn(api, "updateActiveTemplateVersion") + .spyOn(API, "updateActiveTemplateVersion") .mockResolvedValue({ message: "" }); const publishButton = within(topbar).getByRole("button", { name: "Publish", @@ -207,7 +209,7 @@ test("Patch request is not send when there are no changes", async () => { // Publish const patchTemplateVersion = jest - .spyOn(api, "patchTemplateVersion") + .spyOn(API, "patchTemplateVersion") .mockResolvedValue(newTemplateVersion); const publishButton = within(topbar).getByRole("button", { name: "Publish", diff --git a/site/src/pages/TemplateVersionEditorPage/TemplateVersionEditorPage.tsx b/site/src/pages/TemplateVersionEditorPage/TemplateVersionEditorPage.tsx index 10412bd616a67..3a622630cd770 100644 --- a/site/src/pages/TemplateVersionEditorPage/TemplateVersionEditorPage.tsx +++ b/site/src/pages/TemplateVersionEditorPage/TemplateVersionEditorPage.tsx @@ -2,7 +2,7 @@ import { type FC, useEffect, useState } from "react"; import { Helmet } from "react-helmet-async"; import { useMutation, useQuery, useQueryClient } from "react-query"; import { useNavigate, useParams, useSearchParams } from "react-router-dom"; -import { patchTemplateVersion, updateActiveTemplateVersion } from "api/api"; +import { API } from "api/api"; import { file, uploadFile } from "api/queries/files"; import { createTemplateVersion, @@ -323,12 +323,12 @@ const publishVersion = async (options: { const publishActions: Promise[] = []; if (haveChanges) { - publishActions.push(patchTemplateVersion(version.id, data)); + publishActions.push(API.patchTemplateVersion(version.id, data)); } if (isActiveVersion) { publishActions.push( - updateActiveTemplateVersion(version.template_id!, { + API.updateActiveTemplateVersion(version.template_id!, { id: version.id, }), ); diff --git a/site/src/pages/TerminalPage/TerminalPage.test.tsx b/site/src/pages/TerminalPage/TerminalPage.test.tsx index 8b910602474d0..26112b743d1e7 100644 --- a/site/src/pages/TerminalPage/TerminalPage.test.tsx +++ b/site/src/pages/TerminalPage/TerminalPage.test.tsx @@ -2,7 +2,7 @@ import "jest-canvas-mock"; import { waitFor } from "@testing-library/react"; import WS from "jest-websocket-mock"; import { HttpResponse, http } from "msw"; -import * as API from "api/api"; +import { API } from "api/api"; import { MockUser, MockWorkspace, diff --git a/site/src/pages/UserSettingsPage/AccountPage/AccountPage.test.tsx b/site/src/pages/UserSettingsPage/AccountPage/AccountPage.test.tsx index 4bce60a5fe465..7687e95e90a49 100644 --- a/site/src/pages/UserSettingsPage/AccountPage/AccountPage.test.tsx +++ b/site/src/pages/UserSettingsPage/AccountPage/AccountPage.test.tsx @@ -1,5 +1,5 @@ import { fireEvent, screen, waitFor } from "@testing-library/react"; -import * as API from "api/api"; +import { API } from "api/api"; import { mockApiError } from "testHelpers/entities"; import { renderWithAuth } from "testHelpers/renderHelpers"; import * as AccountForm from "./AccountForm"; diff --git a/site/src/pages/UserSettingsPage/AppearancePage/AppearancePage.test.tsx b/site/src/pages/UserSettingsPage/AppearancePage/AppearancePage.test.tsx index 01c0ad3addfd0..5cb6ad6d3edee 100644 --- a/site/src/pages/UserSettingsPage/AppearancePage/AppearancePage.test.tsx +++ b/site/src/pages/UserSettingsPage/AppearancePage/AppearancePage.test.tsx @@ -1,6 +1,6 @@ import { screen } from "@testing-library/react"; import userEvent from "@testing-library/user-event"; -import * as API from "api/api"; +import { API } from "api/api"; import { MockUser } from "testHelpers/entities"; import { renderWithAuth } from "testHelpers/renderHelpers"; import { AppearancePage } from "./AppearancePage"; diff --git a/site/src/pages/UserSettingsPage/SSHKeysPage/SSHKeysPage.test.tsx b/site/src/pages/UserSettingsPage/SSHKeysPage/SSHKeysPage.test.tsx index daa03d50ea839..c6e706f98e769 100644 --- a/site/src/pages/UserSettingsPage/SSHKeysPage/SSHKeysPage.test.tsx +++ b/site/src/pages/UserSettingsPage/SSHKeysPage/SSHKeysPage.test.tsx @@ -1,5 +1,5 @@ import { fireEvent, screen, within } from "@testing-library/react"; -import * as API from "api/api"; +import { API } from "api/api"; import { MockGitSSHKey, mockApiError } from "testHelpers/entities"; import { renderWithAuth } from "testHelpers/renderHelpers"; import { Language as SSHKeysPageLanguage, SSHKeysPage } from "./SSHKeysPage"; diff --git a/site/src/pages/UserSettingsPage/SecurityPage/SecurityPage.test.tsx b/site/src/pages/UserSettingsPage/SecurityPage/SecurityPage.test.tsx index f2f0c73c4d7c9..8289da7ee9e5b 100644 --- a/site/src/pages/UserSettingsPage/SecurityPage/SecurityPage.test.tsx +++ b/site/src/pages/UserSettingsPage/SecurityPage/SecurityPage.test.tsx @@ -1,6 +1,6 @@ import { fireEvent, screen, waitFor, within } from "@testing-library/react"; import userEvent from "@testing-library/user-event"; -import * as API from "api/api"; +import { API } from "api/api"; import type { OAuthConversionResponse } from "api/typesGenerated"; import { MockAuthMethodsAll, mockApiError } from "testHelpers/entities"; import { diff --git a/site/src/pages/UserSettingsPage/SecurityPage/SecurityPage.tsx b/site/src/pages/UserSettingsPage/SecurityPage/SecurityPage.tsx index f0191310656c5..b3cb38969f1c0 100644 --- a/site/src/pages/UserSettingsPage/SecurityPage/SecurityPage.tsx +++ b/site/src/pages/UserSettingsPage/SecurityPage/SecurityPage.tsx @@ -1,6 +1,6 @@ import type { ComponentProps, FC } from "react"; import { useMutation, useQuery } from "react-query"; -import { getUserLoginType } from "api/api"; +import { API } from "api/api"; import { authMethods, updatePassword } from "api/queries/users"; import { displaySuccess } from "components/GlobalSnackbar/utils"; import { Loader } from "components/Loader/Loader"; @@ -19,7 +19,7 @@ export const SecurityPage: FC = () => { const authMethodsQuery = useQuery(authMethods()); const { data: userLoginType } = useQuery({ queryKey: ["loginType"], - queryFn: getUserLoginType, + queryFn: API.getUserLoginType, }); const singleSignOnSection = useSingleSignOnSection(); diff --git a/site/src/pages/UserSettingsPage/SecurityPage/SingleSignOnSection.tsx b/site/src/pages/UserSettingsPage/SecurityPage/SingleSignOnSection.tsx index f2c14dcd45762..78d7cfb0cb23f 100644 --- a/site/src/pages/UserSettingsPage/SecurityPage/SingleSignOnSection.tsx +++ b/site/src/pages/UserSettingsPage/SecurityPage/SingleSignOnSection.tsx @@ -7,7 +7,7 @@ import Link from "@mui/material/Link"; import TextField from "@mui/material/TextField"; import { type FC, useState } from "react"; import { useMutation } from "react-query"; -import { convertToOAUTH } from "api/api"; +import { API } from "api/api"; import { getErrorMessage } from "api/errors"; import type { AuthMethods, @@ -52,7 +52,7 @@ export const useSingleSignOnSection = () => { const [loginTypeConfirmation, setLoginTypeConfirmation] = useState({ open: false, selectedType: undefined }); - const mutation = useMutation(convertToOAUTH, { + const mutation = useMutation(API.convertToOAUTH, { onSuccess: (data) => { const loginTypeMsg = data.to_type === "github" ? "Github" : "OpenID Connect"; diff --git a/site/src/pages/UserSettingsPage/TokensPage/hooks.ts b/site/src/pages/UserSettingsPage/TokensPage/hooks.ts index a92252ecc8b8a..9909888dd0494 100644 --- a/site/src/pages/UserSettingsPage/TokensPage/hooks.ts +++ b/site/src/pages/UserSettingsPage/TokensPage/hooks.ts @@ -4,7 +4,7 @@ import { useQuery, useQueryClient, } from "react-query"; -import { getTokens, deleteToken } from "api/api"; +import { API } from "api/api"; import type { TokensFilter } from "api/typesGenerated"; // Load all tokens @@ -12,10 +12,7 @@ export const useTokensData = ({ include_all }: TokensFilter) => { const queryKey = ["tokens", include_all]; const result = useQuery({ queryKey, - queryFn: () => - getTokens({ - include_all, - }), + queryFn: () => API.getTokens({ include_all }), }); return { @@ -29,7 +26,7 @@ export const useDeleteToken = (queryKey: QueryKey) => { const queryClient = useQueryClient(); return useMutation({ - mutationFn: deleteToken, + mutationFn: API.deleteToken, onSuccess: () => { // Invalidate and refetch void queryClient.invalidateQueries(queryKey); diff --git a/site/src/pages/UsersPage/UsersPage.test.tsx b/site/src/pages/UsersPage/UsersPage.test.tsx index 7ed98850dc401..ebc5e24a5e6b6 100644 --- a/site/src/pages/UsersPage/UsersPage.test.tsx +++ b/site/src/pages/UsersPage/UsersPage.test.tsx @@ -1,7 +1,7 @@ import { fireEvent, screen, within } from "@testing-library/react"; import userEvent from "@testing-library/user-event"; import { HttpResponse, http } from "msw"; -import * as API from "api/api"; +import { API } from "api/api"; import type { Role } from "api/typesGenerated"; import { MockUser, diff --git a/site/src/pages/WorkspaceBuildPage/WorkspaceBuildPage.test.tsx b/site/src/pages/WorkspaceBuildPage/WorkspaceBuildPage.test.tsx index e358c50954d03..db5628bfc0bb3 100644 --- a/site/src/pages/WorkspaceBuildPage/WorkspaceBuildPage.test.tsx +++ b/site/src/pages/WorkspaceBuildPage/WorkspaceBuildPage.test.tsx @@ -1,6 +1,6 @@ import { screen, waitFor } from "@testing-library/react"; import WS from "jest-websocket-mock"; -import * as API from "api/api"; +import { API } from "api/api"; import { MockWorkspace, MockWorkspaceAgent, diff --git a/site/src/pages/WorkspaceBuildPage/WorkspaceBuildPage.tsx b/site/src/pages/WorkspaceBuildPage/WorkspaceBuildPage.tsx index 13ec3028248bb..bc3a914a10bb1 100644 --- a/site/src/pages/WorkspaceBuildPage/WorkspaceBuildPage.tsx +++ b/site/src/pages/WorkspaceBuildPage/WorkspaceBuildPage.tsx @@ -3,7 +3,7 @@ import type { FC } from "react"; import { Helmet } from "react-helmet-async"; import { useQuery } from "react-query"; import { useParams } from "react-router-dom"; -import { getWorkspaceBuilds } from "api/api"; +import { API } from "api/api"; import { workspaceBuildByNumber } from "api/queries/workspaceBuilds"; import { useWorkspaceBuildLogs } from "hooks/useWorkspaceBuildLogs"; import { pageTitle } from "utils/page"; @@ -26,7 +26,7 @@ export const WorkspaceBuildPage: FC = () => { const buildsQuery = useQuery({ queryKey: ["builds", username, build?.workspace_id], queryFn: () => { - return getWorkspaceBuilds(build?.workspace_id ?? "", { + return API.getWorkspaceBuilds(build?.workspace_id ?? "", { since: dayjs().add(-30, "day").toISOString(), }); }, diff --git a/site/src/pages/WorkspacePage/WorkspaceActions/BuildParametersPopover.tsx b/site/src/pages/WorkspacePage/WorkspaceActions/BuildParametersPopover.tsx index e203ccd671366..5916557a1c409 100644 --- a/site/src/pages/WorkspacePage/WorkspaceActions/BuildParametersPopover.tsx +++ b/site/src/pages/WorkspacePage/WorkspaceActions/BuildParametersPopover.tsx @@ -5,7 +5,7 @@ import visuallyHidden from "@mui/utils/visuallyHidden"; import { useFormik } from "formik"; import type { FC } from "react"; import { useQuery } from "react-query"; -import { getWorkspaceParameters } from "api/api"; +import { API } from "api/api"; import type { TemplateVersionParameter, Workspace, @@ -49,7 +49,7 @@ export const BuildParametersPopover: FC = ({ }) => { const { data: parameters } = useQuery({ queryKey: ["workspace", workspace.id, "parameters"], - queryFn: () => getWorkspaceParameters(workspace), + queryFn: () => API.getWorkspaceParameters(workspace), }); const ephemeralParameters = parameters ? parameters.templateVersionRichParameters.filter((p) => p.ephemeral) diff --git a/site/src/pages/WorkspacePage/WorkspacePage.test.tsx b/site/src/pages/WorkspacePage/WorkspacePage.test.tsx index 0a69834992638..9766d76f692a3 100644 --- a/site/src/pages/WorkspacePage/WorkspacePage.test.tsx +++ b/site/src/pages/WorkspacePage/WorkspacePage.test.tsx @@ -1,7 +1,7 @@ import { screen, waitFor, within } from "@testing-library/react"; import userEvent from "@testing-library/user-event"; import { HttpResponse, http } from "msw"; -import * as api from "api/api"; +import * as apiModule from "api/api"; import type { TemplateVersionParameter, Workspace } from "api/typesGenerated"; import EventSourceMock from "eventsourcemock"; import { @@ -22,16 +22,18 @@ import { renderWithAuth } from "testHelpers/renderHelpers"; import { server } from "testHelpers/server"; import { WorkspacePage } from "./WorkspacePage"; +const { API, MissingBuildParameters } = apiModule; + // Renders the workspace page and waits for it be loaded const renderWorkspacePage = async (workspace: Workspace) => { - jest.spyOn(api, "getWorkspaceByOwnerAndName").mockResolvedValue(workspace); - jest.spyOn(api, "getTemplate").mockResolvedValueOnce(MockTemplate); - jest.spyOn(api, "getTemplateVersionRichParameters").mockResolvedValueOnce([]); + jest.spyOn(API, "getWorkspaceByOwnerAndName").mockResolvedValue(workspace); + jest.spyOn(API, "getTemplate").mockResolvedValueOnce(MockTemplate); + jest.spyOn(API, "getTemplateVersionRichParameters").mockResolvedValueOnce([]); jest - .spyOn(api, "getDeploymentConfig") + .spyOn(API, "getDeploymentConfig") .mockResolvedValueOnce(MockDeploymentConfig); jest - .spyOn(api, "watchWorkspaceAgentLogs") + .spyOn(apiModule, "watchWorkspaceAgentLogs") .mockImplementation((_, options) => { options.onDone?.(); return new WebSocket(""); @@ -87,7 +89,7 @@ describe("WorkspacePage", () => { it("requests a delete job when the user presses Delete and confirms", async () => { const user = userEvent.setup({ delay: 0 }); const deleteWorkspaceMock = jest - .spyOn(api, "deleteWorkspace") + .spyOn(API, "deleteWorkspace") .mockResolvedValueOnce(MockWorkspaceBuild); await renderWorkspacePage(MockWorkspace); @@ -127,7 +129,7 @@ describe("WorkspacePage", () => { ); const deleteWorkspaceMock = jest - .spyOn(api, "deleteWorkspace") + .spyOn(API, "deleteWorkspace") .mockResolvedValueOnce(MockWorkspaceBuildDelete); await renderWorkspacePage(MockFailedWorkspace); @@ -173,7 +175,7 @@ describe("WorkspacePage", () => { ); const startWorkspaceMock = jest - .spyOn(api, "startWorkspace") + .spyOn(API, "startWorkspace") .mockImplementation(() => Promise.resolve(MockWorkspaceBuild)); await testButton(MockStoppedWorkspace, "Start", startWorkspaceMock); @@ -181,7 +183,7 @@ describe("WorkspacePage", () => { it("requests a stop job when the user presses Stop", async () => { const stopWorkspaceMock = jest - .spyOn(api, "stopWorkspace") + .spyOn(API, "stopWorkspace") .mockResolvedValueOnce(MockWorkspaceBuild); await testButton(MockWorkspace, "Stop", stopWorkspaceMock); @@ -189,7 +191,7 @@ describe("WorkspacePage", () => { it("requests a stop when the user presses Restart", async () => { const stopWorkspaceMock = jest - .spyOn(api, "stopWorkspace") + .spyOn(API, "stopWorkspace") .mockResolvedValueOnce(MockWorkspaceBuild); // Render @@ -215,7 +217,7 @@ describe("WorkspacePage", () => { ); const cancelWorkspaceMock = jest - .spyOn(api, "cancelWorkspaceBuild") + .spyOn(API, "cancelWorkspaceBuild") .mockImplementation(() => Promise.resolve({ message: "job canceled" })); await testButton(MockStartingWorkspace, "Cancel", cancelWorkspaceMock); @@ -224,11 +226,11 @@ describe("WorkspacePage", () => { it("requests an update when the user presses Update", async () => { // Mocks jest - .spyOn(api, "getWorkspaceByOwnerAndName") + .spyOn(API, "getWorkspaceByOwnerAndName") .mockResolvedValueOnce(MockOutdatedWorkspace); const updateWorkspaceMock = jest - .spyOn(api, "updateWorkspace") + .spyOn(API, "updateWorkspace") .mockResolvedValueOnce(MockWorkspaceBuild); // Render @@ -249,12 +251,12 @@ describe("WorkspacePage", () => { it("updates the parameters when they are missing during update", async () => { // Mocks jest - .spyOn(api, "getWorkspaceByOwnerAndName") + .spyOn(API, "getWorkspaceByOwnerAndName") .mockResolvedValueOnce(MockOutdatedWorkspace); const updateWorkspaceSpy = jest - .spyOn(api, "updateWorkspace") + .spyOn(API, "updateWorkspace") .mockRejectedValueOnce( - new api.MissingBuildParameters( + new MissingBuildParameters( [MockTemplateVersionParameter1, MockTemplateVersionParameter2], MockOutdatedWorkspace.template_active_version_id, ), @@ -271,7 +273,7 @@ describe("WorkspacePage", () => { // The update was called await waitFor(() => { - expect(api.updateWorkspace).toBeCalled(); + expect(API.updateWorkspace).toBeCalled(); updateWorkspaceSpy.mockClear(); }); @@ -294,7 +296,7 @@ describe("WorkspacePage", () => { // Check if the update was called using the values from the form await waitFor(() => { - expect(api.updateWorkspace).toBeCalledWith(MockOutdatedWorkspace, [ + expect(API.updateWorkspace).toBeCalledWith(MockOutdatedWorkspace, [ { name: MockTemplateVersionParameter1.name, value: "some-value", @@ -309,7 +311,7 @@ describe("WorkspacePage", () => { it("restart the workspace with one time parameters when having the confirmation dialog", async () => { localStorage.removeItem(`${MockUser.id}_ignoredWarnings`); - jest.spyOn(api, "getWorkspaceParameters").mockResolvedValue({ + jest.spyOn(API, "getWorkspaceParameters").mockResolvedValue({ templateVersionRichParameters: [ { ...MockTemplateVersionParameter1, @@ -321,7 +323,7 @@ describe("WorkspacePage", () => { ], buildParameters: [{ name: "rebuild", value: "false" }], }); - const restartWorkspaceSpy = jest.spyOn(api, "restartWorkspace"); + const restartWorkspaceSpy = jest.spyOn(API, "restartWorkspace"); const user = userEvent.setup(); await renderWorkspacePage(MockWorkspace); await user.click(screen.getByTestId("build-parameters-button")); @@ -351,7 +353,7 @@ describe("WorkspacePage", () => { const retryDebugButtonRe = /^Debug$/i; describe("Retries a failed 'Start' transition", () => { - const mockStart = jest.spyOn(api, "startWorkspace"); + const mockStart = jest.spyOn(API, "startWorkspace"); const failedStart: Workspace = { ...MockFailedWorkspace, latest_build: { @@ -384,7 +386,7 @@ describe("WorkspacePage", () => { }); describe("Retries a failed 'Stop' transition", () => { - const mockStop = jest.spyOn(api, "stopWorkspace"); + const mockStop = jest.spyOn(API, "stopWorkspace"); const failedStop: Workspace = { ...MockFailedWorkspace, latest_build: { @@ -405,7 +407,7 @@ describe("WorkspacePage", () => { }); describe("Retries a failed 'Delete' transition", () => { - const mockDelete = jest.spyOn(api, "deleteWorkspace"); + const mockDelete = jest.spyOn(API, "deleteWorkspace"); const failedDelete: Workspace = { ...MockFailedWorkspace, latest_build: { @@ -450,7 +452,7 @@ describe("WorkspacePage", () => { return HttpResponse.json([parameter]); }), ); - const startWorkspaceSpy = jest.spyOn(api, "startWorkspace"); + const startWorkspaceSpy = jest.spyOn(API, "startWorkspace"); await renderWorkspacePage(workspace); const retryWithBuildParametersButton = await screen.findByRole("button", { @@ -496,7 +498,7 @@ describe("WorkspacePage", () => { return HttpResponse.json([parameter]); }), ); - const startWorkspaceSpy = jest.spyOn(api, "startWorkspace"); + const startWorkspaceSpy = jest.spyOn(API, "startWorkspace"); await renderWorkspacePage(workspace); const retryWithBuildParametersButton = await screen.findByRole("button", { diff --git a/site/src/pages/WorkspacePage/WorkspaceReadyPage.tsx b/site/src/pages/WorkspacePage/WorkspaceReadyPage.tsx index e460c7163c1e6..f3750051823ff 100644 --- a/site/src/pages/WorkspacePage/WorkspaceReadyPage.tsx +++ b/site/src/pages/WorkspacePage/WorkspaceReadyPage.tsx @@ -3,7 +3,7 @@ import { type FC, useEffect, useState } from "react"; import { Helmet } from "react-helmet-async"; import { useMutation, useQuery, useQueryClient } from "react-query"; import { useNavigate } from "react-router-dom"; -import { MissingBuildParameters, restartWorkspace } from "api/api"; +import { MissingBuildParameters, API } from "api/api"; import { getErrorMessage } from "api/errors"; import { buildInfo } from "api/queries/buildInfo"; import { deploymentConfig, deploymentSSHConfig } from "api/queries/deployment"; @@ -83,7 +83,7 @@ export const WorkspaceReadyPage: FC = ({ }>({ open: false }); const { mutate: mutateRestartWorkspace, isLoading: isRestarting } = useMutation({ - mutationFn: restartWorkspace, + mutationFn: API.restartWorkspace, }); // SSH Prefix diff --git a/site/src/pages/WorkspacePage/WorkspaceScheduleControls.test.tsx b/site/src/pages/WorkspacePage/WorkspaceScheduleControls.test.tsx index 71e4174499305..07c13a10122c4 100644 --- a/site/src/pages/WorkspacePage/WorkspaceScheduleControls.test.tsx +++ b/site/src/pages/WorkspacePage/WorkspaceScheduleControls.test.tsx @@ -5,7 +5,7 @@ import { HttpResponse, http } from "msw"; import type { FC } from "react"; import { QueryClient, QueryClientProvider, useQuery } from "react-query"; import { RouterProvider, createMemoryRouter } from "react-router-dom"; -import * as API from "api/api"; +import { API } from "api/api"; import { workspaceByOwnerAndName } from "api/queries/workspaces"; import { GlobalSnackbar } from "components/GlobalSnackbar/GlobalSnackbar"; import { ThemeProvider } from "contexts/ThemeProvider"; diff --git a/site/src/pages/WorkspaceSettingsPage/WorkspaceParametersPage/WorkspaceParametersPage.test.tsx b/site/src/pages/WorkspaceSettingsPage/WorkspaceParametersPage/WorkspaceParametersPage.test.tsx index 99b4f692d1db2..af15a4423a44a 100644 --- a/site/src/pages/WorkspaceSettingsPage/WorkspaceParametersPage/WorkspaceParametersPage.test.tsx +++ b/site/src/pages/WorkspaceSettingsPage/WorkspaceParametersPage/WorkspaceParametersPage.test.tsx @@ -1,6 +1,6 @@ import { screen, waitFor, within } from "@testing-library/react"; import userEvent from "@testing-library/user-event"; -import * as api from "api/api"; +import { API } from "api/api"; import { MockWorkspace, MockTemplateVersionParameter1, @@ -20,15 +20,15 @@ import WorkspaceParametersPage from "./WorkspaceParametersPage"; test("Submit the workspace settings page successfully", async () => { // Mock the API calls that loads data jest - .spyOn(api, "getWorkspaceByOwnerAndName") + .spyOn(API, "getWorkspaceByOwnerAndName") .mockResolvedValueOnce(MockWorkspace); - jest.spyOn(api, "getTemplateVersionRichParameters").mockResolvedValueOnce([ + jest.spyOn(API, "getTemplateVersionRichParameters").mockResolvedValueOnce([ MockTemplateVersionParameter1, MockTemplateVersionParameter2, // Immutable parameters MockTemplateVersionParameter4, ]); - jest.spyOn(api, "getWorkspaceBuildParameters").mockResolvedValueOnce([ + jest.spyOn(API, "getWorkspaceBuildParameters").mockResolvedValueOnce([ MockWorkspaceBuildParameter1, MockWorkspaceBuildParameter2, // Immutable value @@ -36,7 +36,7 @@ test("Submit the workspace settings page successfully", async () => { ]); // Mock the API calls that submit data const postWorkspaceBuildSpy = jest - .spyOn(api, "postWorkspaceBuild") + .spyOn(API, "postWorkspaceBuild") .mockResolvedValue(MockWorkspaceBuild); // Setup event and rendering const user = userEvent.setup(); diff --git a/site/src/pages/WorkspaceSettingsPage/WorkspaceParametersPage/WorkspaceParametersPage.tsx b/site/src/pages/WorkspaceSettingsPage/WorkspaceParametersPage/WorkspaceParametersPage.tsx index c10accb30e9a0..7da0fc203d401 100644 --- a/site/src/pages/WorkspaceSettingsPage/WorkspaceParametersPage/WorkspaceParametersPage.tsx +++ b/site/src/pages/WorkspaceSettingsPage/WorkspaceParametersPage/WorkspaceParametersPage.tsx @@ -4,7 +4,7 @@ import type { FC } from "react"; import { Helmet } from "react-helmet-async"; import { useMutation, useQuery } from "react-query"; import { useNavigate } from "react-router-dom"; -import { getWorkspaceParameters, postWorkspaceBuild } from "api/api"; +import { API } from "api/api"; import { isApiValidationError } from "api/errors"; import { checkAuthorization } from "api/queries/authCheck"; import { templateByName } from "api/queries/templates"; @@ -29,12 +29,12 @@ const WorkspaceParametersPage: FC = () => { const workspace = useWorkspaceSettings(); const parameters = useQuery({ queryKey: ["workspace", workspace.id, "parameters"], - queryFn: () => getWorkspaceParameters(workspace), + queryFn: () => API.getWorkspaceParameters(workspace), }); const navigate = useNavigate(); const updateParameters = useMutation({ mutationFn: (buildParameters: WorkspaceBuildParameter[]) => - postWorkspaceBuild(workspace.id, { + API.postWorkspaceBuild(workspace.id, { transition: "start", rich_parameter_values: buildParameters, }), @@ -93,7 +93,7 @@ const WorkspaceParametersPage: FC = () => { export type WorkspaceParametersPageViewProps = { workspace: Workspace; canChangeVersions: boolean; - data: Awaited> | undefined; + data: Awaited> | undefined; submitError: unknown; isSubmitting: boolean; onSubmit: (formValues: WorkspaceParametersFormValues) => void; diff --git a/site/src/pages/WorkspaceSettingsPage/WorkspaceSchedulePage/WorkspaceScheduleForm.test.tsx b/site/src/pages/WorkspaceSettingsPage/WorkspaceSchedulePage/WorkspaceScheduleForm.test.tsx index 870a28b4f2f0b..dd5269758bb41 100644 --- a/site/src/pages/WorkspaceSettingsPage/WorkspaceSchedulePage/WorkspaceScheduleForm.test.tsx +++ b/site/src/pages/WorkspaceSettingsPage/WorkspaceSchedulePage/WorkspaceScheduleForm.test.tsx @@ -1,5 +1,5 @@ import { screen } from "@testing-library/react"; -import * as API from "api/api"; +import { API } from "api/api"; import { defaultSchedule } from "pages/WorkspaceSettingsPage/WorkspaceSchedulePage/schedule"; import { MockTemplate } from "testHelpers/entities"; import { render } from "testHelpers/renderHelpers"; diff --git a/site/src/pages/WorkspaceSettingsPage/WorkspaceSchedulePage/WorkspaceSchedulePage.tsx b/site/src/pages/WorkspaceSettingsPage/WorkspaceSchedulePage/WorkspaceSchedulePage.tsx index 7830a161c879e..79b14bec16184 100644 --- a/site/src/pages/WorkspaceSettingsPage/WorkspaceSchedulePage/WorkspaceSchedulePage.tsx +++ b/site/src/pages/WorkspaceSettingsPage/WorkspaceSchedulePage/WorkspaceSchedulePage.tsx @@ -3,11 +3,7 @@ import { type FC, useState } from "react"; import { Helmet } from "react-helmet-async"; import { useMutation, useQuery, useQueryClient } from "react-query"; import { useNavigate, useParams } from "react-router-dom"; -import { - putWorkspaceAutostart, - putWorkspaceAutostop, - startWorkspace, -} from "api/api"; +import { API } from "api/api"; import { checkAuthorization } from "api/queries/authCheck"; import { templateByName } from "api/queries/templates"; import { workspaceByOwnerAndNameKey } from "api/queries/workspaces"; @@ -72,7 +68,7 @@ export const WorkspaceSchedulePage: FC = () => { const [isConfirmingApply, setIsConfirmingApply] = useState(false); const { mutate: updateWorkspace } = useMutation({ mutationFn: () => - startWorkspace(workspace.id, workspace.template_active_version_id), + API.startWorkspace(workspace.id, workspace.template_active_version_id), }); return ( @@ -167,11 +163,11 @@ const submitSchedule = async (data: SubmitScheduleData) => { const actions: Promise[] = []; if (autostartChanged) { - actions.push(putWorkspaceAutostart(workspace.id, autostart)); + actions.push(API.putWorkspaceAutostart(workspace.id, autostart)); } if (autostopChanged) { - actions.push(putWorkspaceAutostop(workspace.id, ttl)); + actions.push(API.putWorkspaceAutostop(workspace.id, ttl)); } return Promise.all(actions); diff --git a/site/src/pages/WorkspaceSettingsPage/WorkspaceSettingsPage.test.tsx b/site/src/pages/WorkspaceSettingsPage/WorkspaceSettingsPage.test.tsx index 4fa1bc8a4d536..a7ce4d63c897d 100644 --- a/site/src/pages/WorkspaceSettingsPage/WorkspaceSettingsPage.test.tsx +++ b/site/src/pages/WorkspaceSettingsPage/WorkspaceSettingsPage.test.tsx @@ -1,6 +1,6 @@ import { screen, waitFor, within } from "@testing-library/react"; import userEvent from "@testing-library/user-event"; -import * as api from "api/api"; +import { API } from "api/api"; import { MockWorkspace } from "testHelpers/entities"; import { renderWithWorkspaceSettingsLayout, @@ -11,11 +11,11 @@ import WorkspaceSettingsPage from "./WorkspaceSettingsPage"; test("Submit the workspace settings page successfully", async () => { // Mock the API calls that loads data jest - .spyOn(api, "getWorkspaceByOwnerAndName") + .spyOn(API, "getWorkspaceByOwnerAndName") .mockResolvedValueOnce({ ...MockWorkspace }); // Mock the API calls that submit data const patchWorkspaceSpy = jest - .spyOn(api, "patchWorkspace") + .spyOn(API, "patchWorkspace") .mockResolvedValue(); // Setup event and rendering const user = userEvent.setup(); @@ -43,7 +43,7 @@ test("Submit the workspace settings page successfully", async () => { test("Name field is disabled if renames are disabled", async () => { // Mock the API calls that loads data jest - .spyOn(api, "getWorkspaceByOwnerAndName") + .spyOn(API, "getWorkspaceByOwnerAndName") .mockResolvedValueOnce({ ...MockWorkspace, allow_renames: false }); renderWithWorkspaceSettingsLayout(, { route: "/@test-user/test-workspace/settings", diff --git a/site/src/pages/WorkspaceSettingsPage/WorkspaceSettingsPage.tsx b/site/src/pages/WorkspaceSettingsPage/WorkspaceSettingsPage.tsx index e289a58c5ce59..09bf002fd7cb9 100644 --- a/site/src/pages/WorkspaceSettingsPage/WorkspaceSettingsPage.tsx +++ b/site/src/pages/WorkspaceSettingsPage/WorkspaceSettingsPage.tsx @@ -2,7 +2,7 @@ import type { FC } from "react"; import { Helmet } from "react-helmet-async"; import { useMutation } from "react-query"; import { useNavigate, useParams } from "react-router-dom"; -import { patchWorkspace, updateWorkspaceAutomaticUpdates } from "api/api"; +import { API } from "api/api"; import { displaySuccess } from "components/GlobalSnackbar/utils"; import { pageTitle } from "utils/page"; import type { WorkspaceSettingsFormValues } from "./WorkspaceSettingsForm"; @@ -22,8 +22,8 @@ const WorkspaceSettingsPage: FC = () => { const mutation = useMutation({ mutationFn: async (formValues: WorkspaceSettingsFormValues) => { await Promise.all([ - patchWorkspace(workspace.id, { name: formValues.name }), - updateWorkspaceAutomaticUpdates( + API.patchWorkspace(workspace.id, { name: formValues.name }), + API.updateWorkspaceAutomaticUpdates( workspace.id, formValues.automatic_updates, ), diff --git a/site/src/pages/WorkspacesPage/BatchUpdateConfirmation.tsx b/site/src/pages/WorkspacesPage/BatchUpdateConfirmation.tsx index de19212bcbefa..f5ea3589e2af4 100644 --- a/site/src/pages/WorkspacesPage/BatchUpdateConfirmation.tsx +++ b/site/src/pages/WorkspacesPage/BatchUpdateConfirmation.tsx @@ -7,7 +7,7 @@ import dayjs from "dayjs"; import relativeTime from "dayjs/plugin/relativeTime"; import { type FC, type ReactNode, useMemo, useState, useEffect } from "react"; import { useQueries } from "react-query"; -import { getTemplateVersion } from "api/api"; +import { API } from "api/api"; import type { TemplateVersion, Workspace } from "api/typesGenerated"; import { ErrorAlert } from "components/Alert/ErrorAlert"; import { ConfirmDialog } from "components/Dialogs/ConfirmDialog/ConfirmDialog"; @@ -129,7 +129,7 @@ export const BatchUpdateConfirmation: FC = ({ // ...but the query _also_ doesn't have everything we need, like the // template display name! ...version, - ...(await getTemplateVersion(version.id)), + ...(await API.getTemplateVersion(version.id)), }), })), }); diff --git a/site/src/pages/WorkspacesPage/WorkspacesPage.test.tsx b/site/src/pages/WorkspacesPage/WorkspacesPage.test.tsx index 5b0dc1b2a959e..9c152b1ac0534 100644 --- a/site/src/pages/WorkspacesPage/WorkspacesPage.test.tsx +++ b/site/src/pages/WorkspacesPage/WorkspacesPage.test.tsx @@ -1,7 +1,7 @@ import { screen, waitFor, within } from "@testing-library/react"; import userEvent from "@testing-library/user-event"; import { HttpResponse, http } from "msw"; -import * as API from "api/api"; +import { API } from "api/api"; import type { Workspace } from "api/typesGenerated"; import { MockStoppedWorkspace, diff --git a/site/src/pages/WorkspacesPage/batchActions.tsx b/site/src/pages/WorkspacesPage/batchActions.tsx index a9e3eb1cf4c7c..38819cdf60c88 100644 --- a/site/src/pages/WorkspacesPage/batchActions.tsx +++ b/site/src/pages/WorkspacesPage/batchActions.tsx @@ -1,12 +1,5 @@ import { useMutation } from "react-query"; -import { - deleteWorkspace, - deleteFavoriteWorkspace, - putFavoriteWorkspace, - startWorkspace, - stopWorkspace, - updateWorkspace, -} from "api/api"; +import { API } from "api/api"; import type { Workspace } from "api/typesGenerated"; import { displayError } from "components/GlobalSnackbar/utils"; @@ -21,7 +14,7 @@ export function useBatchActions(options: UseBatchActionsProps) { mutationFn: (workspaces: readonly Workspace[]) => { return Promise.all( workspaces.map((w) => - startWorkspace(w.id, w.latest_build.template_version_id), + API.startWorkspace(w.id, w.latest_build.template_version_id), ), ); }, @@ -33,7 +26,7 @@ export function useBatchActions(options: UseBatchActionsProps) { const stopAllMutation = useMutation({ mutationFn: (workspaces: readonly Workspace[]) => { - return Promise.all(workspaces.map((w) => stopWorkspace(w.id))); + return Promise.all(workspaces.map((w) => API.stopWorkspace(w.id))); }, onSuccess, onError: () => { @@ -43,7 +36,7 @@ export function useBatchActions(options: UseBatchActionsProps) { const deleteAllMutation = useMutation({ mutationFn: (workspaces: readonly Workspace[]) => { - return Promise.all(workspaces.map((w) => deleteWorkspace(w.id))); + return Promise.all(workspaces.map((w) => API.deleteWorkspace(w.id))); }, onSuccess, onError: () => { @@ -56,7 +49,7 @@ export function useBatchActions(options: UseBatchActionsProps) { return Promise.all( workspaces .filter((w) => w.outdated && !w.dormant_at) - .map((w) => updateWorkspace(w)), + .map((w) => API.updateWorkspace(w)), ); }, onSuccess, @@ -70,7 +63,7 @@ export function useBatchActions(options: UseBatchActionsProps) { return Promise.all( workspaces .filter((w) => !w.favorite) - .map((w) => putFavoriteWorkspace(w.id)), + .map((w) => API.putFavoriteWorkspace(w.id)), ); }, onSuccess, @@ -84,7 +77,7 @@ export function useBatchActions(options: UseBatchActionsProps) { return Promise.all( workspaces .filter((w) => w.favorite) - .map((w) => deleteFavoriteWorkspace(w.id)), + .map((w) => API.deleteFavoriteWorkspace(w.id)), ); }, onSuccess, diff --git a/site/src/pages/WorkspacesPage/data.ts b/site/src/pages/WorkspacesPage/data.ts index a785d00d03122..e1b8eec25ccb3 100644 --- a/site/src/pages/WorkspacesPage/data.ts +++ b/site/src/pages/WorkspacesPage/data.ts @@ -5,7 +5,7 @@ import { useQuery, useQueryClient, } from "react-query"; -import { getWorkspaces, updateWorkspaceVersion } from "api/api"; +import { API } from "api/api"; import { getErrorMessage } from "api/errors"; import type { Workspace, @@ -30,7 +30,7 @@ export const useWorkspacesData = ({ const result = useQuery({ queryKey, queryFn: () => - getWorkspaces({ + API.getWorkspaces({ q: query, limit: limit, offset: page <= 0 ? 0 : (page - 1) * limit, @@ -54,7 +54,7 @@ export const useWorkspaceUpdate = (queryKey: QueryKey) => { const queryClient = useQueryClient(); return useMutation({ - mutationFn: updateWorkspaceVersion, + mutationFn: API.updateWorkspaceVersion, onMutate: async (workspace) => { await queryClient.cancelQueries({ queryKey }); queryClient.setQueryData(queryKey, (oldResponse) => { diff --git a/site/src/pages/WorkspacesPage/filter/menus.ts b/site/src/pages/WorkspacesPage/filter/menus.ts index a8db56dd5a226..f8b6755f50e82 100644 --- a/site/src/pages/WorkspacesPage/filter/menus.ts +++ b/site/src/pages/WorkspacesPage/filter/menus.ts @@ -1,4 +1,4 @@ -import { getTemplates } from "api/api"; +import { API } from "api/api"; import type { WorkspaceStatus } from "api/typesGenerated"; import { useFilterMenu, @@ -21,7 +21,7 @@ export const useTemplateFilterMenu = ({ id: "template", getSelectedOption: async () => { // Show all templates including deprecated - const templates = await getTemplates(organizationId); + const templates = await API.getTemplates(organizationId); const template = templates.find((template) => template.name === value); if (template) { return { @@ -37,7 +37,7 @@ export const useTemplateFilterMenu = ({ }, getOptions: async (query) => { // Show all templates including deprecated - const templates = await getTemplates(organizationId); + const templates = await API.getTemplates(organizationId); const filteredTemplates = templates.filter( (template) => template.name.toLowerCase().includes(query.toLowerCase()) || diff --git a/site/src/utils/terminal.ts b/site/src/utils/terminal.ts index d27a6efce379c..82c98a370a51f 100644 --- a/site/src/utils/terminal.ts +++ b/site/src/utils/terminal.ts @@ -1,4 +1,4 @@ -import * as API from "api/api"; +import { API } from "api/api"; export const terminalWebsocketUrl = async ( baseUrl: string | undefined, From 228b99d9c211dea28af0b56a9bd2f6256e2069e2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 12 May 2024 23:33:37 +0300 Subject: [PATCH 048/149] chore: bump google.golang.org/protobuf from 1.33.0 to 1.34.1 (#13236) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- go.mod | 2 +- go.sum | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/go.mod b/go.mod index 1e268194ca384..66e057159ea53 100644 --- a/go.mod +++ b/go.mod @@ -202,7 +202,7 @@ require ( golang.zx2c4.com/wireguard v0.0.0-20230704135630-469159ecf7d1 google.golang.org/api v0.176.1 google.golang.org/grpc v1.63.2 - google.golang.org/protobuf v1.33.0 + google.golang.org/protobuf v1.34.1 gopkg.in/DataDog/dd-trace-go.v1 v1.61.0 gopkg.in/natefinch/lumberjack.v2 v2.2.1 gopkg.in/yaml.v3 v3.0.1 diff --git a/go.sum b/go.sum index 320ed2e57d690..bc2ed510434ab 100644 --- a/go.sum +++ b/go.sum @@ -1192,8 +1192,8 @@ google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp0 google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= -google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI= -google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= +google.golang.org/protobuf v1.34.1 h1:9ddQBjfCyZPOHPUiPxpYESBLc+T8P3E+Vo4IbKZgFWg= +google.golang.org/protobuf v1.34.1/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= gopkg.in/DataDog/dd-trace-go.v1 v1.61.0 h1:XKO91GwTjpIRhd56Xif/BZ2YgHkQufVTOvtkbRYSPi8= gopkg.in/DataDog/dd-trace-go.v1 v1.61.0/go.mod h1:NHKX1t9eKmajySb6H+zLdgZizCFzbt5iKvrTyxEyy8w= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= From 4e7381341ffe582dc8e6d95fe0f7011173e77bd2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 12 May 2024 23:34:59 +0300 Subject: [PATCH 049/149] chore: bump google.golang.org/api from 0.176.1 to 0.180.0 (#13235) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- go.mod | 10 +++++----- go.sum | 24 ++++++++++++------------ 2 files changed, 17 insertions(+), 17 deletions(-) diff --git a/go.mod b/go.mod index 66e057159ea53..3a7371ce3e4dd 100644 --- a/go.mod +++ b/go.mod @@ -192,7 +192,7 @@ require ( golang.org/x/exp v0.0.0-20240222234643-814bf88cf225 golang.org/x/mod v0.17.0 golang.org/x/net v0.25.0 - golang.org/x/oauth2 v0.19.0 + golang.org/x/oauth2 v0.20.0 golang.org/x/sync v0.7.0 golang.org/x/sys v0.20.0 golang.org/x/term v0.20.0 @@ -200,7 +200,7 @@ require ( golang.org/x/tools v0.21.0 golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028 golang.zx2c4.com/wireguard v0.0.0-20230704135630-469159ecf7d1 - google.golang.org/api v0.176.1 + google.golang.org/api v0.180.0 google.golang.org/grpc v1.63.2 google.golang.org/protobuf v1.34.1 gopkg.in/DataDog/dd-trace-go.v1 v1.61.0 @@ -222,7 +222,7 @@ require ( ) require ( - cloud.google.com/go/auth v0.3.0 // indirect + cloud.google.com/go/auth v0.4.1 // indirect cloud.google.com/go/auth/oauth2adapt v0.2.2 // indirect github.com/DataDog/go-libddwaf/v2 v2.3.1 // indirect github.com/alecthomas/chroma/v2 v2.13.0 // indirect @@ -429,8 +429,8 @@ require ( golang.zx2c4.com/wireguard/windows v0.5.3 // indirect google.golang.org/appengine v1.6.8 // indirect google.golang.org/genproto v0.0.0-20240227224415-6ceb2ff114de // indirect - google.golang.org/genproto/googleapis/api v0.0.0-20240311132316-a219d84964c2 // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20240415180920-8c6c420018be // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20240314234333-6e1732d8331c // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20240429193739-8cf5692501f6 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect howett.net/plist v1.0.0 // indirect inet.af/peercred v0.0.0-20210906144145-0893ea02156a // indirect diff --git a/go.sum b/go.sum index bc2ed510434ab..31c895cdc358c 100644 --- a/go.sum +++ b/go.sum @@ -1,8 +1,8 @@ cdr.dev/slog v1.6.2-0.20240126064726-20367d4aede6 h1:KHblWIE/KHOwQ6lEbMZt6YpcGve2FEZ1sDtrW1Am5UI= cdr.dev/slog v1.6.2-0.20240126064726-20367d4aede6/go.mod h1:NaoTA7KwopCrnaSb0JXTC0PTp/O/Y83Lndnq0OEV3ZQ= cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= -cloud.google.com/go/auth v0.3.0 h1:PRyzEpGfx/Z9e8+lHsbkoUVXD0gnu4MNmm7Gp8TQNIs= -cloud.google.com/go/auth v0.3.0/go.mod h1:lBv6NKTWp8E3LPzmO1TbiiRKc4drLOfHsgmlH9ogv5w= +cloud.google.com/go/auth v0.4.1 h1:Z7YNIhlWRtrnKlZke7z3GMqzvuYzdc2z98F9D1NV5Hg= +cloud.google.com/go/auth v0.4.1/go.mod h1:QVBuVEKpCn4Zp58hzRGvL0tjRGU0YqdRTdCHM1IHnro= cloud.google.com/go/auth/oauth2adapt v0.2.2 h1:+TTV8aXpjeChS9M+aTtN/TjdQnzJvmzKFt//oWu7HX4= cloud.google.com/go/auth/oauth2adapt v0.2.2/go.mod h1:wcYjgpZI9+Yu7LyYBg4pqSiaRkfEK3GQcpb7C/uyF1Q= cloud.google.com/go/compute/metadata v0.3.0 h1:Tz+eQXMEqDIKRsmY3cHTL6FVaynIjX2QxYC4trgAKZc= @@ -490,8 +490,8 @@ github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/googleapis/enterprise-certificate-proxy v0.3.2 h1:Vie5ybvEvT75RniqhfFxPRy3Bf7vr3h0cechB90XaQs= github.com/googleapis/enterprise-certificate-proxy v0.3.2/go.mod h1:VLSiSSBs/ksPL8kq3OBOQ6WRI2QnaFynd1DCjZ62+V0= -github.com/googleapis/gax-go/v2 v2.12.3 h1:5/zPPDvw8Q1SuXjrqrZslrqT7dL/uJT2CQii/cLCKqA= -github.com/googleapis/gax-go/v2 v2.12.3/go.mod h1:AKloxT6GtNbaLm8QTNSidHUVsHYcBHwWRvkNFJUQcS4= +github.com/googleapis/gax-go/v2 v2.12.4 h1:9gWcmF85Wvq4ryPFvGFaOgPIs1AQX0d0bcbGw4Z96qg= +github.com/googleapis/gax-go/v2 v2.12.4/go.mod h1:KYEYLorsnIGDi/rPC8b5TdlB9kbKoFubselGIoBMCwI= github.com/gorilla/css v1.0.0 h1:BQqNyPTi50JCFMTw/b67hByjMVXZRwGha6wxVGkeihY= github.com/gorilla/css v1.0.0/go.mod h1:Dn721qIggHpt4+EFCcTLTU/vk5ySda2ReITrtgBl60c= github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY= @@ -1047,8 +1047,8 @@ golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= golang.org/x/net v0.25.0 h1:d/OCCoBEUq33pjydKrGQhw7IlUPI2Oylr+8qLx49kac= golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= -golang.org/x/oauth2 v0.19.0 h1:9+E/EZBCbTLNrbN35fHv/a/d/mOBatymz1zbtQrXpIg= -golang.org/x/oauth2 v0.19.0/go.mod h1:vYi7skDa1x015PmRRYZ7+s1cWyPgrPiSYRe4rnsexc8= +golang.org/x/oauth2 v0.20.0 h1:4mQdhULixXKP1rwYBW0vAijoXnkTG0BLCDRzfe1idMo= +golang.org/x/oauth2 v0.20.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -1156,8 +1156,8 @@ golang.zx2c4.com/wireguard/wgctrl v0.0.0-20230429144221-925a1e7659e6 h1:CawjfCvY golang.zx2c4.com/wireguard/wgctrl v0.0.0-20230429144221-925a1e7659e6/go.mod h1:3rxYc4HtVcSG9gVaTs2GEBdehh+sYPOwKtyUWEOTb80= golang.zx2c4.com/wireguard/windows v0.5.3 h1:On6j2Rpn3OEMXqBq00QEDC7bWSZrPIHKIus8eIuExIE= golang.zx2c4.com/wireguard/windows v0.5.3/go.mod h1:9TEe8TJmtwyQebdFwAkEWOPr3prrtqm+REGFifP60hI= -google.golang.org/api v0.176.1 h1:DJSXnV6An+NhJ1J+GWtoF2nHEuqB1VNoTfnIbjNvwD4= -google.golang.org/api v0.176.1/go.mod h1:j2MaSDYcvYV1lkZ1+SMW4IeF90SrEyFA+tluDYWRrFg= +google.golang.org/api v0.180.0 h1:M2D87Yo0rGBPWpo1orwfCLehUUL6E7/TYe5gvMQWDh4= +google.golang.org/api v0.180.0/go.mod h1:51AiyoEg1MJPSZ9zvklA8VnRILPXxn1iVen9v25XHAE= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= @@ -1168,10 +1168,10 @@ google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98 google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= google.golang.org/genproto v0.0.0-20240227224415-6ceb2ff114de h1:F6qOa9AZTYJXOUEr4jDysRDLrm4PHePlge4v4TGAlxY= google.golang.org/genproto v0.0.0-20240227224415-6ceb2ff114de/go.mod h1:VUhTRKeHn9wwcdrk73nvdC9gF178Tzhmt/qyaFcPLSo= -google.golang.org/genproto/googleapis/api v0.0.0-20240311132316-a219d84964c2 h1:rIo7ocm2roD9DcFIX67Ym8icoGCKSARAiPljFhh5suQ= -google.golang.org/genproto/googleapis/api v0.0.0-20240311132316-a219d84964c2/go.mod h1:O1cOfN1Cy6QEYr7VxtjOyP5AdAuR0aJ/MYZaaof623Y= -google.golang.org/genproto/googleapis/rpc v0.0.0-20240415180920-8c6c420018be h1:LG9vZxsWGOmUKieR8wPAUR3u3MpnYFQZROPIMaXh7/A= -google.golang.org/genproto/googleapis/rpc v0.0.0-20240415180920-8c6c420018be/go.mod h1:WtryC6hu0hhx87FDGxWCDptyssuo68sk10vYjF+T9fY= +google.golang.org/genproto/googleapis/api v0.0.0-20240314234333-6e1732d8331c h1:kaI7oewGK5YnVwj+Y+EJBO/YN1ht8iTL9XkFHtVZLsc= +google.golang.org/genproto/googleapis/api v0.0.0-20240314234333-6e1732d8331c/go.mod h1:VQW3tUculP/D4B+xVCo+VgSq8As6wA9ZjHl//pmk+6s= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240429193739-8cf5692501f6 h1:DujSIu+2tC9Ht0aPNA7jgj23Iq8Ewi5sgkQ++wdvonE= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240429193739-8cf5692501f6/go.mod h1:WtryC6hu0hhx87FDGxWCDptyssuo68sk10vYjF+T9fY= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= From 7358c1b1ac66f500b71a52dd1d6c6d6941f80bd8 Mon Sep 17 00:00:00 2001 From: Muhammad Atif Ali Date: Mon, 13 May 2024 09:51:47 +0300 Subject: [PATCH 050/149] chore(dogfood): bump module versions to latest (#13246) We should use the latest versions as these are the ones most customers will use. We can automate this with @dependabot once we resolve https://github.com/coder/registry.coder.com/issues/13 --- dogfood/main.tf | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/dogfood/main.tf b/dogfood/main.tf index 794c662fab89b..1656a223ae2a3 100644 --- a/dogfood/main.tf +++ b/dogfood/main.tf @@ -105,13 +105,13 @@ module "slackme" { module "dotfiles" { source = "registry.coder.com/modules/dotfiles/coder" - version = "1.0.2" + version = "1.0.14" agent_id = coder_agent.dev.id } module "git-clone" { source = "registry.coder.com/modules/git-clone/coder" - version = "1.0.2" + version = "1.0.12" agent_id = coder_agent.dev.id url = "https://github.com/coder/coder" base_dir = local.repo_base_dir @@ -124,20 +124,22 @@ module "personalize" { } module "code-server" { - source = "registry.coder.com/modules/code-server/coder" - version = "1.0.8" - agent_id = coder_agent.dev.id - folder = local.repo_dir + source = "registry.coder.com/modules/code-server/coder" + version = "1.0.14" + agent_id = coder_agent.dev.id + folder = local.repo_dir + auto_install_extensions = true } module "jetbrains_gateway" { source = "registry.coder.com/modules/jetbrains-gateway/coder" - version = "1.0.9" + version = "1.0.13" agent_id = coder_agent.dev.id agent_name = "dev" folder = local.repo_dir jetbrains_ides = ["GO", "WS"] default = "GO" + latest = true } module "filebrowser" { From c41d0efff931ec038f9b8284f284a9cec9b2ae0e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 13 May 2024 13:01:28 +0000 Subject: [PATCH 051/149] chore: bump github.com/prometheus/client_golang from 1.18.0 to 1.19.1 (#13232) * chore: bump github.com/prometheus/client_golang from 1.18.0 to 1.19.1 --- flake.nix | 4 ++-- go.mod | 4 ++-- go.sum | 8 ++++---- scripts/metricsdocgen/main.go | 2 +- 4 files changed, 9 insertions(+), 9 deletions(-) diff --git a/flake.nix b/flake.nix index 510a2ea8dad53..577a422cdbbf3 100644 --- a/flake.nix +++ b/flake.nix @@ -95,9 +95,9 @@ buildFat = osArch: pkgs.buildGo121Module { name = "coder-${osArch}"; - # Updated with ./scripts/update-flake.nix`. + # Updated with ./scripts/update-flake.sh`. # This should be updated whenever go.mod changes! - vendorHash = "sha256-pTRr85MtdlsI0iYGAwLAQ3QvtrDR8rDOynYx8FDaRy0="; + vendorHash = "sha256-YOqgW5v7qXfOYcCQECZyJfoewChtQDfRCrTcr7Ui37Y="; proxyVendor = true; src = ./.; nativeBuildInputs = with pkgs; [ getopt openssl zstd ]; diff --git a/go.mod b/go.mod index 3a7371ce3e4dd..b9037722ca368 100644 --- a/go.mod +++ b/go.mod @@ -161,9 +161,9 @@ require ( github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e github.com/pkg/sftp v1.13.6 - github.com/prometheus/client_golang v1.18.0 + github.com/prometheus/client_golang v1.19.1 github.com/prometheus/client_model v0.6.0 - github.com/prometheus/common v0.47.0 + github.com/prometheus/common v0.48.0 github.com/quasilyte/go-ruleguard/dsl v0.3.21 github.com/robfig/cron/v3 v3.0.1 github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966 diff --git a/go.sum b/go.sum index 31c895cdc358c..ad15d5cc1ca36 100644 --- a/go.sum +++ b/go.sum @@ -773,13 +773,13 @@ github.com/pkg/profile v1.6.0/go.mod h1:qBsxPvzyUincmltOk6iyRVxHYg4adc0OFOv72ZdL github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/prometheus/client_golang v1.18.0 h1:HzFfmkOzH5Q8L8G+kSJKUx5dtG87sewO+FoDDqP5Tbk= -github.com/prometheus/client_golang v1.18.0/go.mod h1:T+GXkCk5wSJyOqMIzVgvvjFDlkOQntgjkJWKrN5txjA= +github.com/prometheus/client_golang v1.19.1 h1:wZWJDwK+NameRJuPGDhlnFgx8e8HN3XHQeLaYJFJBOE= +github.com/prometheus/client_golang v1.19.1/go.mod h1:mP78NwGzrVks5S2H6ab8+ZZGJLZUq1hoULYBAYBw1Ho= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.6.0 h1:k1v3CzpSRUTrKMppY35TLwPvxHqBu0bYgxZzqGIgaos= github.com/prometheus/client_model v0.6.0/go.mod h1:NTQHnmxFpouOD0DpvP4XujX3CdOAGQPoaGhyTchlyt8= -github.com/prometheus/common v0.47.0 h1:p5Cz0FNHo7SnWOmWmoRozVcjEp0bIVU8cV7OShpjL1k= -github.com/prometheus/common v0.47.0/go.mod h1:0/KsvlIEfPQCQ5I2iNSAWKPZziNCvRs5EC6ILDTlAPc= +github.com/prometheus/common v0.48.0 h1:QO8U2CdOzSn1BBsmXJXduaaW+dY/5QLjfB8svtSzKKE= +github.com/prometheus/common v0.48.0/go.mod h1:0/KsvlIEfPQCQ5I2iNSAWKPZziNCvRs5EC6ILDTlAPc= github.com/prometheus/procfs v0.12.0 h1:jluTpSng7V9hY0O2R9DzzJHYb2xULk9VTR1V1R/k6Bo= github.com/prometheus/procfs v0.12.0/go.mod h1:pcuDEFsWDnvcgNzo4EEweacyhjeA9Zk3cnaOZAZEfOo= github.com/quasilyte/go-ruleguard/dsl v0.3.21 h1:vNkC6fC6qMLzCOGbnIHOd5ixUGgTbp3Z4fGnUgULlDA= diff --git a/scripts/metricsdocgen/main.go b/scripts/metricsdocgen/main.go index 8589653172005..26f80232c810b 100644 --- a/scripts/metricsdocgen/main.go +++ b/scripts/metricsdocgen/main.go @@ -64,7 +64,7 @@ func readMetrics() ([]*dto.MetricFamily, error) { var metrics []*dto.MetricFamily - decoder := expfmt.NewDecoder(f, expfmt.FmtProtoText) + decoder := expfmt.NewDecoder(f, expfmt.NewFormat(expfmt.TypeProtoText)) for { var m dto.MetricFamily err = decoder.Decode(&m) From 8412450ae33526d2cd7dace13d57e1e661e6482c Mon Sep 17 00:00:00 2001 From: Bruno Quaresma Date: Mon, 13 May 2024 14:13:41 -0300 Subject: [PATCH 052/149] chore(site): fix portforward issue with vite (#13262) --- site/vite.config.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/site/vite.config.ts b/site/vite.config.ts index 542278416daac..39b3ab3c78b7f 100644 --- a/site/vite.config.ts +++ b/site/vite.config.ts @@ -37,6 +37,7 @@ export default defineConfig({ }, }, server: { + host: "127.0.0.1", port: process.env.PORT ? Number(process.env.PORT) : 8080, headers: { // This header corresponds to "src/api/api.ts"'s hardcoded FE token. From a5a64948cd1e187a27022000694875539fcaa7d3 Mon Sep 17 00:00:00 2001 From: Bruno Quaresma Date: Mon, 13 May 2024 15:11:01 -0300 Subject: [PATCH 053/149] feat(site): open README links in new tab (#13264) --- site/src/components/Markdown/Markdown.tsx | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/site/src/components/Markdown/Markdown.tsx b/site/src/components/Markdown/Markdown.tsx index d06d2eca7a653..228cd241ccece 100644 --- a/site/src/components/Markdown/Markdown.tsx +++ b/site/src/components/Markdown/Markdown.tsx @@ -37,11 +37,15 @@ export const Markdown: FC = (props) => { className={className} remarkPlugins={[gfm]} components={{ - a: ({ href, target, children }) => ( - - {children} - - ), + a: ({ href, children }) => { + const isExternal = href?.startsWith("http"); + + return ( + + {children} + + ); + }, pre: ({ node, children }) => { if (!node || !node.children) { From ebee9288aeb15ceaba2903303485e1b43034f59c Mon Sep 17 00:00:00 2001 From: Garrett Delfosse Date: Mon, 13 May 2024 14:37:51 -0400 Subject: [PATCH 054/149] fix: properly convert max port share level for oss (#13261) --- coderd/portsharing/portsharing.go | 13 +++++++++---- coderd/templates.go | 9 ++++++--- coderd/templates_test.go | 6 +++--- coderd/workspaceagentportshare.go | 2 +- enterprise/coderd/portsharing/portsharing.go | 8 ++++++-- 5 files changed, 25 insertions(+), 13 deletions(-) diff --git a/coderd/portsharing/portsharing.go b/coderd/portsharing/portsharing.go index 9c05539b5a400..4696ae63c8b10 100644 --- a/coderd/portsharing/portsharing.go +++ b/coderd/portsharing/portsharing.go @@ -8,18 +8,23 @@ import ( ) type PortSharer interface { - AuthorizedPortSharingLevel(template database.Template, level codersdk.WorkspaceAgentPortShareLevel) error - ValidateTemplateMaxPortSharingLevel(level codersdk.WorkspaceAgentPortShareLevel) error + AuthorizedLevel(template database.Template, level codersdk.WorkspaceAgentPortShareLevel) error + ValidateTemplateMaxLevel(level codersdk.WorkspaceAgentPortShareLevel) error + ConvertMaxLevel(level database.AppSharingLevel) codersdk.WorkspaceAgentPortShareLevel } type AGPLPortSharer struct{} -func (AGPLPortSharer) AuthorizedPortSharingLevel(_ database.Template, _ codersdk.WorkspaceAgentPortShareLevel) error { +func (AGPLPortSharer) AuthorizedLevel(_ database.Template, _ codersdk.WorkspaceAgentPortShareLevel) error { return nil } -func (AGPLPortSharer) ValidateTemplateMaxPortSharingLevel(_ codersdk.WorkspaceAgentPortShareLevel) error { +func (AGPLPortSharer) ValidateTemplateMaxLevel(_ codersdk.WorkspaceAgentPortShareLevel) error { return xerrors.New("Restricting port sharing level is an enterprise feature that is not enabled.") } +func (AGPLPortSharer) ConvertMaxLevel(_ database.AppSharingLevel) codersdk.WorkspaceAgentPortShareLevel { + return codersdk.WorkspaceAgentPortShareLevelPublic +} + var DefaultPortSharer PortSharer = AGPLPortSharer{} diff --git a/coderd/templates.go b/coderd/templates.go index 7dc4c2050b189..59537b962c21e 100644 --- a/coderd/templates.go +++ b/coderd/templates.go @@ -623,8 +623,8 @@ func (api *API) patchTemplateMeta(rw http.ResponseWriter, r *http.Request) { validErrs = append(validErrs, codersdk.ValidationError{Field: "time_til_dormant_autodelete_ms", Detail: "Value must be at least one minute."}) } maxPortShareLevel := template.MaxPortSharingLevel - if req.MaxPortShareLevel != nil && *req.MaxPortShareLevel != codersdk.WorkspaceAgentPortShareLevel(maxPortShareLevel) { - err := portSharer.ValidateTemplateMaxPortSharingLevel(*req.MaxPortShareLevel) + if req.MaxPortShareLevel != nil && *req.MaxPortShareLevel != portSharer.ConvertMaxLevel(template.MaxPortSharingLevel) { + err := portSharer.ValidateTemplateMaxLevel(*req.MaxPortShareLevel) if err != nil { validErrs = append(validErrs, codersdk.ValidationError{Field: "max_port_sharing_level", Detail: err.Error()}) } else { @@ -857,6 +857,9 @@ func (api *API) convertTemplate( autostopRequirementWeeks = 1 } + portSharer := *(api.PortSharer.Load()) + maxPortShareLevel := portSharer.ConvertMaxLevel(template.MaxPortSharingLevel) + return codersdk.Template{ ID: template.ID, CreatedAt: template.CreatedAt, @@ -891,6 +894,6 @@ func (api *API) convertTemplate( RequireActiveVersion: templateAccessControl.RequireActiveVersion, Deprecated: templateAccessControl.IsDeprecated(), DeprecationMessage: templateAccessControl.Deprecated, - MaxPortShareLevel: codersdk.WorkspaceAgentPortShareLevel(template.MaxPortSharingLevel), + MaxPortShareLevel: maxPortShareLevel, } } diff --git a/coderd/templates_test.go b/coderd/templates_test.go index 485b39a31de38..01b3462f603c3 100644 --- a/coderd/templates_test.go +++ b/coderd/templates_test.go @@ -600,9 +600,9 @@ func TestPatchTemplateMeta(t *testing.T) { user := coderdtest.CreateFirstUser(t, client) version := coderdtest.CreateTemplateVersion(t, client, user.OrganizationID, nil) template := coderdtest.CreateTemplate(t, client, user.OrganizationID, version.ID) - require.Equal(t, codersdk.WorkspaceAgentPortShareLevelOwner, template.MaxPortShareLevel) + require.Equal(t, codersdk.WorkspaceAgentPortShareLevelPublic, template.MaxPortShareLevel) - var level codersdk.WorkspaceAgentPortShareLevel = codersdk.WorkspaceAgentPortShareLevelPublic + var level codersdk.WorkspaceAgentPortShareLevel = codersdk.WorkspaceAgentPortShareLevelAuthenticated req := codersdk.UpdateTemplateMeta{ MaxPortShareLevel: &level, } @@ -615,7 +615,7 @@ func TestPatchTemplateMeta(t *testing.T) { require.ErrorContains(t, err, "port sharing level is an enterprise feature") // Ensure the same value port share level is a no-op - level = codersdk.WorkspaceAgentPortShareLevelOwner + level = codersdk.WorkspaceAgentPortShareLevelPublic _, err = client.UpdateTemplateMeta(ctx, template.ID, codersdk.UpdateTemplateMeta{ Name: coderdtest.RandomUsername(t), MaxPortShareLevel: &level, diff --git a/coderd/workspaceagentportshare.go b/coderd/workspaceagentportshare.go index 12520548045f1..b29f6baa2737c 100644 --- a/coderd/workspaceagentportshare.go +++ b/coderd/workspaceagentportshare.go @@ -69,7 +69,7 @@ func (api *API) postWorkspaceAgentPortShare(rw http.ResponseWriter, r *http.Requ return } - err = portSharer.AuthorizedPortSharingLevel(template, req.ShareLevel) + err = portSharer.AuthorizedLevel(template, req.ShareLevel) if err != nil { httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{ Message: err.Error(), diff --git a/enterprise/coderd/portsharing/portsharing.go b/enterprise/coderd/portsharing/portsharing.go index 94ff232927d56..6d7c138726e11 100644 --- a/enterprise/coderd/portsharing/portsharing.go +++ b/enterprise/coderd/portsharing/portsharing.go @@ -13,7 +13,7 @@ func NewEnterprisePortSharer() *EnterprisePortSharer { return &EnterprisePortSharer{} } -func (EnterprisePortSharer) AuthorizedPortSharingLevel(template database.Template, level codersdk.WorkspaceAgentPortShareLevel) error { +func (EnterprisePortSharer) AuthorizedLevel(template database.Template, level codersdk.WorkspaceAgentPortShareLevel) error { max := codersdk.WorkspaceAgentPortShareLevel(template.MaxPortSharingLevel) switch level { case codersdk.WorkspaceAgentPortShareLevelPublic: @@ -31,10 +31,14 @@ func (EnterprisePortSharer) AuthorizedPortSharingLevel(template database.Templat return nil } -func (EnterprisePortSharer) ValidateTemplateMaxPortSharingLevel(level codersdk.WorkspaceAgentPortShareLevel) error { +func (EnterprisePortSharer) ValidateTemplateMaxLevel(level codersdk.WorkspaceAgentPortShareLevel) error { if !level.ValidMaxLevel() { return xerrors.New("invalid max port sharing level, value must be 'authenticated' or 'public'.") } return nil } + +func (EnterprisePortSharer) ConvertMaxLevel(level database.AppSharingLevel) codersdk.WorkspaceAgentPortShareLevel { + return codersdk.WorkspaceAgentPortShareLevel(level) +} From 9ced0015704e8b0911786a8237159035e28eee7d Mon Sep 17 00:00:00 2001 From: Steven Masley Date: Mon, 13 May 2024 13:46:01 -0500 Subject: [PATCH 055/149] chore: add multi-org experiment for UI view toggling (#13260) * chore: Add multi-org experiment UI will use to toggle different views --- coderd/apidoc/docs.go | 9 ++++++--- coderd/apidoc/swagger.json | 11 ++++++++--- codersdk/deployment.go | 1 + docs/api/schemas.md | 1 + site/src/api/typesGenerated.ts | 11 +++++++++-- 5 files changed, 25 insertions(+), 8 deletions(-) diff --git a/coderd/apidoc/docs.go b/coderd/apidoc/docs.go index 3d14f4ec72726..22961a36df98a 100644 --- a/coderd/apidoc/docs.go +++ b/coderd/apidoc/docs.go @@ -9542,15 +9542,18 @@ const docTemplate = `{ "type": "string", "enum": [ "example", - "auto-fill-parameters" + "auto-fill-parameters", + "multi-organization" ], "x-enum-comments": { "ExperimentAutoFillParameters": "This should not be taken out of experiments until we have redesigned the feature.", - "ExperimentExample": "This isn't used for anything." + "ExperimentExample": "This isn't used for anything.", + "ExperimentMultiOrganization": "Requires organization context for interactions, default org is assumed." }, "x-enum-varnames": [ "ExperimentExample", - "ExperimentAutoFillParameters" + "ExperimentAutoFillParameters", + "ExperimentMultiOrganization" ] }, "codersdk.ExternalAuth": { diff --git a/coderd/apidoc/swagger.json b/coderd/apidoc/swagger.json index 9f6a1833e995d..76b606e46bb8f 100644 --- a/coderd/apidoc/swagger.json +++ b/coderd/apidoc/swagger.json @@ -8541,12 +8541,17 @@ }, "codersdk.Experiment": { "type": "string", - "enum": ["example", "auto-fill-parameters"], + "enum": ["example", "auto-fill-parameters", "multi-organization"], "x-enum-comments": { "ExperimentAutoFillParameters": "This should not be taken out of experiments until we have redesigned the feature.", - "ExperimentExample": "This isn't used for anything." + "ExperimentExample": "This isn't used for anything.", + "ExperimentMultiOrganization": "Requires organization context for interactions, default org is assumed." }, - "x-enum-varnames": ["ExperimentExample", "ExperimentAutoFillParameters"] + "x-enum-varnames": [ + "ExperimentExample", + "ExperimentAutoFillParameters", + "ExperimentMultiOrganization" + ] }, "codersdk.ExternalAuth": { "type": "object", diff --git a/codersdk/deployment.go b/codersdk/deployment.go index 087ad660cbc68..8f8499e51f13b 100644 --- a/codersdk/deployment.go +++ b/codersdk/deployment.go @@ -2217,6 +2217,7 @@ const ( // Add new experiments here! ExperimentExample Experiment = "example" // This isn't used for anything. ExperimentAutoFillParameters Experiment = "auto-fill-parameters" // This should not be taken out of experiments until we have redesigned the feature. + ExperimentMultiOrganization Experiment = "multi-organization" // Requires organization context for interactions, default org is assumed. ) // ExperimentsAll should include all experiments that are safe for diff --git a/docs/api/schemas.md b/docs/api/schemas.md index a6462a14ca29c..68ad8c8612733 100644 --- a/docs/api/schemas.md +++ b/docs/api/schemas.md @@ -2693,6 +2693,7 @@ CreateWorkspaceRequest provides options for creating a new workspace. Only one o | ---------------------- | | `example` | | `auto-fill-parameters` | +| `multi-organization` | ## codersdk.ExternalAuth diff --git a/site/src/api/typesGenerated.ts b/site/src/api/typesGenerated.ts index c2e9b51b96a11..8d49bc6ca7223 100644 --- a/site/src/api/typesGenerated.ts +++ b/site/src/api/typesGenerated.ts @@ -1907,8 +1907,15 @@ export const Entitlements: Entitlement[] = [ ]; // From codersdk/deployment.go -export type Experiment = "auto-fill-parameters" | "example"; -export const Experiments: Experiment[] = ["auto-fill-parameters", "example"]; +export type Experiment = + | "auto-fill-parameters" + | "example" + | "multi-organization"; +export const Experiments: Experiment[] = [ + "auto-fill-parameters", + "example", + "multi-organization", +]; // From codersdk/deployment.go export type FeatureName = From 2b29559984c66c5b060ec70e2560665684e66954 Mon Sep 17 00:00:00 2001 From: Kayla Washburn-Love Date: Mon, 13 May 2024 14:41:45 -0600 Subject: [PATCH 056/149] chore: add setting to enable multi-organization ui (#13266) --- .../AccountPage/AccountPage.tsx | 38 ++++++++++++++++++- 1 file changed, 36 insertions(+), 2 deletions(-) diff --git a/site/src/pages/UserSettingsPage/AccountPage/AccountPage.tsx b/site/src/pages/UserSettingsPage/AccountPage/AccountPage.tsx index da461e6193c28..38bac36ef6ed4 100644 --- a/site/src/pages/UserSettingsPage/AccountPage/AccountPage.tsx +++ b/site/src/pages/UserSettingsPage/AccountPage/AccountPage.tsx @@ -1,6 +1,8 @@ -import type { FC } from "react"; +import Button from "@mui/material/Button"; +import { type FC, useEffect, useState } from "react"; import { useQuery } from "react-query"; import { groupsForUser } from "api/queries/groups"; +import { DisabledBadge, EnabledBadge } from "components/Badges/Badges"; import { Stack } from "components/Stack/Stack"; import { useAuthContext } from "contexts/auth/AuthProvider"; import { useAuthenticated } from "contexts/auth/RequireAuth"; @@ -13,7 +15,7 @@ export const AccountPage: FC = () => { const { user: me, permissions, organizationId } = useAuthenticated(); const { updateProfile, updateProfileError, isUpdatingProfile } = useAuthContext(); - const { entitlements } = useDashboard(); + const { entitlements, experiments } = useDashboard(); const hasGroupsFeature = entitlements.features.user_role_management.enabled; const groupsQuery = useQuery({ @@ -21,6 +23,21 @@ export const AccountPage: FC = () => { enabled: hasGroupsFeature, }); + const multiOrgExperimentEnabled = experiments.includes("multi-organization"); + const [multiOrgUiEnabled, setMultiOrgUiEnabled] = useState( + () => + multiOrgExperimentEnabled && + Boolean(localStorage.getItem("enableMultiOrganizationUi")), + ); + + useEffect(() => { + if (multiOrgUiEnabled) { + localStorage.setItem("enableMultiOrganizationUi", "true"); + } else { + localStorage.removeItem("enableMultiOrganizationUi"); + } + }, [multiOrgUiEnabled]); + return (
@@ -41,6 +58,23 @@ export const AccountPage: FC = () => { error={groupsQuery.error} /> )} + + {multiOrgExperimentEnabled && ( +
Danger: enabling will break things in the UI. + } + > + + {multiOrgUiEnabled ? : } + + +
+ )} ); }; From 721ab2a1b4ade7eb077d4da0bf005cfa5bbcaf26 Mon Sep 17 00:00:00 2001 From: Garrett Delfosse Date: Tue, 14 May 2024 12:31:31 -0400 Subject: [PATCH 057/149] chore: add workspace activity linter (#13273) --- coderd/agentapi/activitybump.go | 1 + coderd/agentapi/stats.go | 1 + coderd/batchstats/batcher.go | 1 + coderd/workspaceagents.go | 1 + coderd/workspaceusage/tracker.go | 1 + enterprise/coderd/schedule/template.go | 1 + scripts/rules.go | 20 ++++++++++++++++++++ 7 files changed, 26 insertions(+) diff --git a/coderd/agentapi/activitybump.go b/coderd/agentapi/activitybump.go index 90afaf7e36111..a28ba695d018e 100644 --- a/coderd/agentapi/activitybump.go +++ b/coderd/agentapi/activitybump.go @@ -41,6 +41,7 @@ func ActivityBumpWorkspace(ctx context.Context, log slog.Logger, db database.Sto // low priority operations fail first. ctx, cancel := context.WithTimeout(ctx, time.Second*15) defer cancel() + // nolint:gocritic // (#13146) Will be moved soon as part of refactor. err := db.ActivityBumpWorkspace(ctx, database.ActivityBumpWorkspaceParams{ NextAutostart: nextAutostart.UTC(), WorkspaceID: workspaceID, diff --git a/coderd/agentapi/stats.go b/coderd/agentapi/stats.go index fe5ccbe833022..e91a3624e915d 100644 --- a/coderd/agentapi/stats.go +++ b/coderd/agentapi/stats.go @@ -102,6 +102,7 @@ func (a *StatsAPI) UpdateStats(ctx context.Context, req *agentproto.UpdateStatsR return nil }) errGroup.Go(func() error { + // nolint:gocritic // (#13146) Will be moved soon as part of refactor. err := a.Database.UpdateWorkspaceLastUsedAt(ctx, database.UpdateWorkspaceLastUsedAtParams{ ID: workspace.ID, LastUsedAt: now, diff --git a/coderd/batchstats/batcher.go b/coderd/batchstats/batcher.go index 34b76d239e7ef..bbff38b0413c0 100644 --- a/coderd/batchstats/batcher.go +++ b/coderd/batchstats/batcher.go @@ -240,6 +240,7 @@ func (b *Batcher) flush(ctx context.Context, forced bool, reason string) { b.buf.ConnectionsByProto = payload } + // nolint:gocritic // (#13146) Will be moved soon as part of refactor. err = b.store.InsertWorkspaceAgentStats(ctx, *b.buf) elapsed := time.Since(start) if err != nil { diff --git a/coderd/workspaceagents.go b/coderd/workspaceagents.go index bf50971ea6cb8..03ca23dcc1162 100644 --- a/coderd/workspaceagents.go +++ b/coderd/workspaceagents.go @@ -1244,6 +1244,7 @@ func (api *API) workspaceAgentReportStats(rw http.ResponseWriter, r *http.Reques }) if req.SessionCount() > 0 { errGroup.Go(func() error { + // nolint:gocritic // (#13146) Will be moved soon as part of refactor. err := api.Database.UpdateWorkspaceLastUsedAt(ctx, database.UpdateWorkspaceLastUsedAtParams{ ID: workspace.ID, LastUsedAt: now, diff --git a/coderd/workspaceusage/tracker.go b/coderd/workspaceusage/tracker.go index 6a3659a5008d8..118b021d71d52 100644 --- a/coderd/workspaceusage/tracker.go +++ b/coderd/workspaceusage/tracker.go @@ -130,6 +130,7 @@ func (tr *Tracker) flush(now time.Time) { authCtx := dbauthz.AsSystemRestricted(ctx) tr.flushLock.Lock() defer tr.flushLock.Unlock() + // nolint:gocritic // (#13146) Will be moved soon as part of refactor. if err := tr.s.BatchUpdateWorkspaceLastUsedAt(authCtx, database.BatchUpdateWorkspaceLastUsedAtParams{ LastUsedAt: now, IDs: ids, diff --git a/enterprise/coderd/schedule/template.go b/enterprise/coderd/schedule/template.go index 1d246d344f026..824bcca6a1bcc 100644 --- a/enterprise/coderd/schedule/template.go +++ b/enterprise/coderd/schedule/template.go @@ -169,6 +169,7 @@ func (s *EnterpriseTemplateScheduleStore) Set(ctx context.Context, db database.S } if opts.UpdateWorkspaceLastUsedAt { + // nolint:gocritic // (#13146) Will be moved soon as part of refactor. err = tx.UpdateTemplateWorkspacesLastUsedAt(ctx, database.UpdateTemplateWorkspacesLastUsedAtParams{ TemplateID: tpl.ID, LastUsedAt: dbtime.Now(), diff --git a/scripts/rules.go b/scripts/rules.go index 246fe29381e7c..2ff2a503b8503 100644 --- a/scripts/rules.go +++ b/scripts/rules.go @@ -467,3 +467,23 @@ func withTimezoneUTC(m dsl.Matcher) { ).Report(`Setting database timezone to UTC may mask timezone-related bugs.`). At(m["tz"]) } + +// workspaceActivity ensures that updating workspace activity is only done in the workspaceapps package. +// +//nolint:unused,deadcode,varnamelen +func workspaceActivity(m dsl.Matcher) { + m.Import("github.com/coder/coder/v2/coderd/database") + m.Match( + `$_.ActivityBumpWorkspace($_, $_)`, + `$_.UpdateWorkspaceLastUsedAt($_, $_)`, + `$_.BatchUpdateWorkspaceLastUsedAt($_, $_)`, + `$_.UpdateTemplateWorkspacesLastUsedAt($_, $_)`, + `$_.InsertWorkspaceAgentStats($_, $_)`, + `$_.InsertWorkspaceAppStats($_, $_)`, + ).Where( + !m.File().PkgPath.Matches(`workspaceapps`) && + !m.File().PkgPath.Matches(`dbauthz$`) && + !m.File().PkgPath.Matches(`dbgen$`) && + !m.File().Name.Matches(`_test\.go$`), + ).Report("Updating workspace activity should always be done in the workspaceapps package.") +} From a8a0be98b824406d8772138e836e0b1c94c64ad2 Mon Sep 17 00:00:00 2001 From: Kayla Washburn-Love Date: Tue, 14 May 2024 10:48:15 -0600 Subject: [PATCH 058/149] chore: expose all organization ids from AuthContext (#13268) --- site/src/contexts/auth/AuthProvider.tsx | 4 +- site/src/contexts/auth/RequireAuth.test.tsx | 3 +- site/src/contexts/auth/RequireAuth.tsx | 21 ++++++--- .../modules/dashboard/DashboardProvider.tsx | 31 ++++++++++++- .../WorkspaceStatusBadge.stories.tsx | 19 +------- .../DuplicateTemplateView.tsx | 6 +-- .../ImportStarterTemplateView.tsx | 6 +-- .../CreateTemplatePage/UploadTemplateView.tsx | 6 +-- .../pages/CreateUserPage/CreateUserPage.tsx | 4 +- .../CreateWorkspacePage.tsx | 4 +- site/src/pages/GroupsPage/CreateGroupPage.tsx | 4 +- site/src/pages/GroupsPage/GroupsPage.tsx | 4 +- .../StarterTemplatePage.tsx | 4 +- .../StarterTemplatesPage.tsx | 4 +- .../TemplateFilesPage/TemplateFilesPage.tsx | 4 +- .../src/pages/TemplatePage/TemplateLayout.tsx | 4 +- .../TemplateSettingsPage.tsx | 4 +- .../TemplatePermissionsPage.tsx | 4 +- .../TemplateSchedulePage.tsx | 4 +- .../TemplateSettingsLayout.tsx | 4 +- .../TemplateVariablesPage.tsx | 4 +- .../TemplateVersionEditorPage.tsx | 4 +- .../TemplateVersionPage.tsx | 3 +- .../src/pages/TemplatesPage/TemplatesPage.tsx | 5 ++- .../AccountPage/AccountPage.tsx | 4 +- site/src/pages/UsersPage/UsersPage.tsx | 6 +-- .../pages/WorkspacePage/Workspace.stories.tsx | 45 ++++++++----------- .../src/pages/WorkspacePage/WorkspacePage.tsx | 4 +- .../pages/WorkspacesPage/WorkspacesPage.tsx | 4 +- .../WorkspacesPageView.stories.tsx | 19 +------- site/src/testHelpers/storybook.tsx | 2 + 31 files changed, 120 insertions(+), 124 deletions(-) diff --git a/site/src/contexts/auth/AuthProvider.tsx b/site/src/contexts/auth/AuthProvider.tsx index 767606e8d605f..2925ac095aadd 100644 --- a/site/src/contexts/auth/AuthProvider.tsx +++ b/site/src/contexts/auth/AuthProvider.tsx @@ -30,7 +30,7 @@ export type AuthContextValue = { isUpdatingProfile: boolean; user: User | undefined; permissions: Permissions | undefined; - organizationId: string | undefined; + organizationIds: readonly string[] | undefined; signInError: unknown; updateProfileError: unknown; signOut: () => void; @@ -119,7 +119,7 @@ export const AuthProvider: FC = ({ children }) => { permissions: permissionsQuery.data as Permissions | undefined, signInError: loginMutation.error, updateProfileError: updateProfileMutation.error, - organizationId: userQuery.data?.organization_ids[0], + organizationIds: userQuery.data?.organization_ids, }} > {children} diff --git a/site/src/contexts/auth/RequireAuth.test.tsx b/site/src/contexts/auth/RequireAuth.test.tsx index 0f128417a96f9..e1194cb601cbc 100644 --- a/site/src/contexts/auth/RequireAuth.test.tsx +++ b/site/src/contexts/auth/RequireAuth.test.tsx @@ -45,7 +45,7 @@ const createAuthWrapper = (override: Partial) => { isUpdatingProfile: false, permissions: undefined, authMethods: undefined, - organizationId: undefined, + organizationIds: undefined, signInError: undefined, updateProfileError: undefined, signOut: jest.fn(), @@ -95,6 +95,7 @@ describe("useAuthenticated", () => { wrapper: createAuthWrapper({ user: MockUser, permissions: MockPermissions, + organizationIds: [], }), }); }).not.toThrow(); diff --git a/site/src/contexts/auth/RequireAuth.tsx b/site/src/contexts/auth/RequireAuth.tsx index 2d6b14d3db69f..b1def94fd9485 100644 --- a/site/src/contexts/auth/RequireAuth.tsx +++ b/site/src/contexts/auth/RequireAuth.tsx @@ -66,15 +66,18 @@ export const RequireAuth: FC = () => { ); }; +type RequireKeys = Omit & { + [K in keyof Pick]: NonNullable; +}; + // We can do some TS magic here but I would rather to be explicit on what // values are not undefined when authenticated -type NonNullableAuth = AuthContextValue & { - user: Exclude; - permissions: Exclude; - organizationId: Exclude; -}; +type AuthenticatedAuthContextValue = RequireKeys< + AuthContextValue, + "user" | "permissions" | "organizationIds" +>; -export const useAuthenticated = (): NonNullableAuth => { +export const useAuthenticated = (): AuthenticatedAuthContextValue => { const auth = useAuthContext(); if (!auth.user) { @@ -85,5 +88,9 @@ export const useAuthenticated = (): NonNullableAuth => { throw new Error("Permissions are not available."); } - return auth as NonNullableAuth; + if (!auth.organizationIds) { + throw new Error("Organization ID is not available."); + } + + return auth as AuthenticatedAuthContextValue; }; diff --git a/site/src/modules/dashboard/DashboardProvider.tsx b/site/src/modules/dashboard/DashboardProvider.tsx index 19daf886f02f8..a44a162b994dd 100644 --- a/site/src/modules/dashboard/DashboardProvider.tsx +++ b/site/src/modules/dashboard/DashboardProvider.tsx @@ -1,4 +1,9 @@ -import { createContext, type FC, type PropsWithChildren } from "react"; +import { + createContext, + type FC, + type PropsWithChildren, + useState, +} from "react"; import { useQuery } from "react-query"; import { appearance } from "api/queries/appearance"; import { entitlements } from "api/queries/entitlements"; @@ -9,9 +14,13 @@ import type { Experiments, } from "api/typesGenerated"; import { Loader } from "components/Loader/Loader"; +import { useAuthenticated } from "contexts/auth/RequireAuth"; +import { useEffectEvent } from "hooks/hookPolyfills"; import { useEmbeddedMetadata } from "hooks/useEmbeddedMetadata"; export interface DashboardValue { + organizationId: string; + setOrganizationId: (id: string) => void; entitlements: Entitlements; experiments: Experiments; appearance: AppearanceConfig; @@ -23,6 +32,7 @@ export const DashboardContext = createContext( export const DashboardProvider: FC = ({ children }) => { const { metadata } = useEmbeddedMetadata(); + const { user, organizationIds } = useAuthenticated(); const entitlementsQuery = useQuery(entitlements(metadata.entitlements)); const experimentsQuery = useQuery(experiments(metadata.experiments)); const appearanceQuery = useQuery(appearance(metadata.appearance)); @@ -30,6 +40,23 @@ export const DashboardProvider: FC = ({ children }) => { const isLoading = !entitlementsQuery.data || !appearanceQuery.data || !experimentsQuery.data; + const lastUsedOrganizationId = localStorage.getItem( + `user:${user.id}.lastUsedOrganizationId`, + ); + const [activeOrganizationId, setActiveOrganizationId] = useState(() => + lastUsedOrganizationId && organizationIds.includes(lastUsedOrganizationId) + ? lastUsedOrganizationId + : organizationIds[0], + ); + + const setOrganizationId = useEffectEvent((id: string) => { + if (!organizationIds.includes(id)) { + throw new ReferenceError("Invalid organization ID"); + } + localStorage.setItem(`user:${user.id}.lastUsedOrganizationId`, id); + setActiveOrganizationId(id); + }); + if (isLoading) { return ; } @@ -37,6 +64,8 @@ export const DashboardProvider: FC = ({ children }) => { return ( = { @@ -29,19 +26,7 @@ const meta: Meta = { }, ], }, - decorators: [ - (Story) => ( - - - - ), - ], + decorators: [withDashboardProvider], }; export default meta; diff --git a/site/src/pages/CreateTemplatePage/DuplicateTemplateView.tsx b/site/src/pages/CreateTemplatePage/DuplicateTemplateView.tsx index 91ac28acc9127..d108d89ca1ba0 100644 --- a/site/src/pages/CreateTemplatePage/DuplicateTemplateView.tsx +++ b/site/src/pages/CreateTemplatePage/DuplicateTemplateView.tsx @@ -10,7 +10,6 @@ import { } from "api/queries/templates"; import { ErrorAlert } from "components/Alert/ErrorAlert"; import { Loader } from "components/Loader/Loader"; -import { useAuthenticated } from "contexts/auth/RequireAuth"; import { useDashboard } from "modules/dashboard/useDashboard"; import { CreateTemplateForm } from "./CreateTemplateForm"; import type { CreateTemplatePageViewProps } from "./types"; @@ -24,7 +23,7 @@ export const DuplicateTemplateView: FC = ({ isCreating, }) => { const navigate = useNavigate(); - const { organizationId } = useAuthenticated(); + const { entitlements, organizationId } = useDashboard(); const [searchParams] = useSearchParams(); const templateByNameQuery = useQuery( templateByName(organizationId, searchParams.get("fromTemplate")!), @@ -47,8 +46,7 @@ export const DuplicateTemplateView: FC = ({ templateVersionQuery.error || templateVersionVariablesQuery.error; - const dashboard = useDashboard(); - const formPermissions = getFormPermissions(dashboard.entitlements); + const formPermissions = getFormPermissions(entitlements); const isJobError = error instanceof JobError; const templateVersionLogsQuery = useQuery({ diff --git a/site/src/pages/CreateTemplatePage/ImportStarterTemplateView.tsx b/site/src/pages/CreateTemplatePage/ImportStarterTemplateView.tsx index a7212a1410d13..e62cda910f847 100644 --- a/site/src/pages/CreateTemplatePage/ImportStarterTemplateView.tsx +++ b/site/src/pages/CreateTemplatePage/ImportStarterTemplateView.tsx @@ -9,7 +9,6 @@ import { } from "api/queries/templates"; import { ErrorAlert } from "components/Alert/ErrorAlert"; import { Loader } from "components/Loader/Loader"; -import { useAuthenticated } from "contexts/auth/RequireAuth"; import { useDashboard } from "modules/dashboard/useDashboard"; import { CreateTemplateForm } from "./CreateTemplateForm"; import type { CreateTemplatePageViewProps } from "./types"; @@ -27,7 +26,7 @@ export const ImportStarterTemplateView: FC = ({ isCreating, }) => { const navigate = useNavigate(); - const { organizationId } = useAuthenticated(); + const { entitlements, organizationId } = useDashboard(); const [searchParams] = useSearchParams(); const templateExamplesQuery = useQuery(templateExamples(organizationId)); const templateExample = templateExamplesQuery.data?.find( @@ -37,8 +36,7 @@ export const ImportStarterTemplateView: FC = ({ const isLoading = templateExamplesQuery.isLoading; const loadingError = templateExamplesQuery.error; - const dashboard = useDashboard(); - const formPermissions = getFormPermissions(dashboard.entitlements); + const formPermissions = getFormPermissions(entitlements); const isJobError = error instanceof JobError; const templateVersionLogsQuery = useQuery({ diff --git a/site/src/pages/CreateTemplatePage/UploadTemplateView.tsx b/site/src/pages/CreateTemplatePage/UploadTemplateView.tsx index ac650baff112b..b9f49d4a46b94 100644 --- a/site/src/pages/CreateTemplatePage/UploadTemplateView.tsx +++ b/site/src/pages/CreateTemplatePage/UploadTemplateView.tsx @@ -7,7 +7,6 @@ import { JobError, templateVersionVariables, } from "api/queries/templates"; -import { useAuthenticated } from "contexts/auth/RequireAuth"; import { useDashboard } from "modules/dashboard/useDashboard"; import { CreateTemplateForm } from "./CreateTemplateForm"; import type { CreateTemplatePageViewProps } from "./types"; @@ -21,10 +20,9 @@ export const UploadTemplateView: FC = ({ error, }) => { const navigate = useNavigate(); - const { organizationId } = useAuthenticated(); - const dashboard = useDashboard(); - const formPermissions = getFormPermissions(dashboard.entitlements); + const { entitlements, organizationId } = useDashboard(); + const formPermissions = getFormPermissions(entitlements); const uploadFileMutation = useMutation(uploadFile()); const uploadedFile = uploadFileMutation.data; diff --git a/site/src/pages/CreateUserPage/CreateUserPage.tsx b/site/src/pages/CreateUserPage/CreateUserPage.tsx index 5de615241ebd0..bec3e7c637e05 100644 --- a/site/src/pages/CreateUserPage/CreateUserPage.tsx +++ b/site/src/pages/CreateUserPage/CreateUserPage.tsx @@ -5,7 +5,7 @@ import { useNavigate } from "react-router-dom"; import { authMethods, createUser } from "api/queries/users"; import { displaySuccess } from "components/GlobalSnackbar/utils"; import { Margins } from "components/Margins/Margins"; -import { useAuthenticated } from "contexts/auth/RequireAuth"; +import { useDashboard } from "modules/dashboard/useDashboard"; import { pageTitle } from "utils/page"; import { CreateUserForm } from "./CreateUserForm"; @@ -14,7 +14,7 @@ export const Language = { }; export const CreateUserPage: FC = () => { - const { organizationId } = useAuthenticated(); + const { organizationId } = useDashboard(); const navigate = useNavigate(); const queryClient = useQueryClient(); const createUserMutation = useMutation(createUser(queryClient)); diff --git a/site/src/pages/CreateWorkspacePage/CreateWorkspacePage.tsx b/site/src/pages/CreateWorkspacePage/CreateWorkspacePage.tsx index df0bb38891f03..c2cd9ab9da3ae 100644 --- a/site/src/pages/CreateWorkspacePage/CreateWorkspacePage.tsx +++ b/site/src/pages/CreateWorkspacePage/CreateWorkspacePage.tsx @@ -35,10 +35,10 @@ export type ExternalAuthPollingState = "idle" | "polling" | "abandoned"; const CreateWorkspacePage: FC = () => { const { template: templateName } = useParams() as { template: string }; - const { user: me, organizationId } = useAuthenticated(); + const { user: me } = useAuthenticated(); const navigate = useNavigate(); const [searchParams, setSearchParams] = useSearchParams(); - const { experiments } = useDashboard(); + const { experiments, organizationId } = useDashboard(); const customVersionId = searchParams.get("version") ?? undefined; const defaultName = searchParams.get("name"); diff --git a/site/src/pages/GroupsPage/CreateGroupPage.tsx b/site/src/pages/GroupsPage/CreateGroupPage.tsx index 9a08da1cd4d0b..d5fd2c1f73c01 100644 --- a/site/src/pages/GroupsPage/CreateGroupPage.tsx +++ b/site/src/pages/GroupsPage/CreateGroupPage.tsx @@ -3,14 +3,14 @@ import { Helmet } from "react-helmet-async"; import { useMutation, useQueryClient } from "react-query"; import { useNavigate } from "react-router-dom"; import { createGroup } from "api/queries/groups"; -import { useAuthenticated } from "contexts/auth/RequireAuth"; +import { useDashboard } from "modules/dashboard/useDashboard"; import { pageTitle } from "utils/page"; import CreateGroupPageView from "./CreateGroupPageView"; export const CreateGroupPage: FC = () => { const queryClient = useQueryClient(); const navigate = useNavigate(); - const { organizationId } = useAuthenticated(); + const { organizationId } = useDashboard(); const createGroupMutation = useMutation(createGroup(queryClient)); return ( diff --git a/site/src/pages/GroupsPage/GroupsPage.tsx b/site/src/pages/GroupsPage/GroupsPage.tsx index 8ad9c2a3b00f5..41303d533bbbe 100644 --- a/site/src/pages/GroupsPage/GroupsPage.tsx +++ b/site/src/pages/GroupsPage/GroupsPage.tsx @@ -5,12 +5,14 @@ import { getErrorMessage } from "api/errors"; import { groups } from "api/queries/groups"; import { displayError } from "components/GlobalSnackbar/utils"; import { useAuthenticated } from "contexts/auth/RequireAuth"; +import { useDashboard } from "modules/dashboard/useDashboard"; import { useFeatureVisibility } from "modules/dashboard/useFeatureVisibility"; import { pageTitle } from "utils/page"; import GroupsPageView from "./GroupsPageView"; export const GroupsPage: FC = () => { - const { organizationId, permissions } = useAuthenticated(); + const { permissions } = useAuthenticated(); + const { organizationId } = useDashboard(); const { createGroup: canCreateGroup } = permissions; const { template_rbac: isTemplateRBACEnabled } = useFeatureVisibility(); const groupsQuery = useQuery(groups(organizationId)); diff --git a/site/src/pages/StarterTemplatePage/StarterTemplatePage.tsx b/site/src/pages/StarterTemplatePage/StarterTemplatePage.tsx index 2278461b40b83..ed7b5b1c9d92f 100644 --- a/site/src/pages/StarterTemplatePage/StarterTemplatePage.tsx +++ b/site/src/pages/StarterTemplatePage/StarterTemplatePage.tsx @@ -3,13 +3,13 @@ import { Helmet } from "react-helmet-async"; import { useQuery } from "react-query"; import { useParams } from "react-router-dom"; import { templateExamples } from "api/queries/templates"; -import { useAuthenticated } from "contexts/auth/RequireAuth"; +import { useDashboard } from "modules/dashboard/useDashboard"; import { pageTitle } from "utils/page"; import { StarterTemplatePageView } from "./StarterTemplatePageView"; const StarterTemplatePage: FC = () => { const { exampleId } = useParams() as { exampleId: string }; - const { organizationId } = useAuthenticated(); + const { organizationId } = useDashboard(); const templateExamplesQuery = useQuery(templateExamples(organizationId)); const starterTemplate = templateExamplesQuery.data?.find( (example) => example.id === exampleId, diff --git a/site/src/pages/StarterTemplatesPage/StarterTemplatesPage.tsx b/site/src/pages/StarterTemplatesPage/StarterTemplatesPage.tsx index 74b31388f614c..d52c92a12df82 100644 --- a/site/src/pages/StarterTemplatesPage/StarterTemplatesPage.tsx +++ b/site/src/pages/StarterTemplatesPage/StarterTemplatesPage.tsx @@ -3,13 +3,13 @@ import { Helmet } from "react-helmet-async"; import { useQuery } from "react-query"; import { templateExamples } from "api/queries/templates"; import type { TemplateExample } from "api/typesGenerated"; -import { useAuthenticated } from "contexts/auth/RequireAuth"; +import { useDashboard } from "modules/dashboard/useDashboard"; import { pageTitle } from "utils/page"; import { getTemplatesByTag } from "utils/starterTemplates"; import { StarterTemplatesPageView } from "./StarterTemplatesPageView"; const StarterTemplatesPage: FC = () => { - const { organizationId } = useAuthenticated(); + const { organizationId } = useDashboard(); const templateExamplesQuery = useQuery(templateExamples(organizationId)); const starterTemplatesByTag = templateExamplesQuery.data ? // Currently, the scratch template should not be displayed on the starter templates page. diff --git a/site/src/pages/TemplatePage/TemplateFilesPage/TemplateFilesPage.tsx b/site/src/pages/TemplatePage/TemplateFilesPage/TemplateFilesPage.tsx index 9ee7161899d88..915241780c3fa 100644 --- a/site/src/pages/TemplatePage/TemplateFilesPage/TemplateFilesPage.tsx +++ b/site/src/pages/TemplatePage/TemplateFilesPage/TemplateFilesPage.tsx @@ -3,13 +3,13 @@ import { Helmet } from "react-helmet-async"; import { useQuery } from "react-query"; import { previousTemplateVersion, templateFiles } from "api/queries/templates"; import { Loader } from "components/Loader/Loader"; -import { useAuthenticated } from "contexts/auth/RequireAuth"; +import { useDashboard } from "modules/dashboard/useDashboard"; import { TemplateFiles } from "modules/templates/TemplateFiles/TemplateFiles"; import { useTemplateLayoutContext } from "pages/TemplatePage/TemplateLayout"; import { getTemplatePageTitle } from "../utils"; const TemplateFilesPage: FC = () => { - const { organizationId } = useAuthenticated(); + const { organizationId } = useDashboard(); const { template, activeVersion } = useTemplateLayoutContext(); const { data: currentFiles } = useQuery( templateFiles(activeVersion.job.file_id), diff --git a/site/src/pages/TemplatePage/TemplateLayout.tsx b/site/src/pages/TemplatePage/TemplateLayout.tsx index e388c81feb27e..ec19d80c166cc 100644 --- a/site/src/pages/TemplatePage/TemplateLayout.tsx +++ b/site/src/pages/TemplatePage/TemplateLayout.tsx @@ -13,7 +13,7 @@ import { ErrorAlert } from "components/Alert/ErrorAlert"; import { Loader } from "components/Loader/Loader"; import { Margins } from "components/Margins/Margins"; import { TAB_PADDING_Y, TabLink, Tabs, TabsList } from "components/Tabs/Tabs"; -import { useAuthenticated } from "contexts/auth/RequireAuth"; +import { useDashboard } from "modules/dashboard/useDashboard"; import { TemplatePageHeader } from "./TemplatePageHeader"; const templatePermissions = ( @@ -71,7 +71,7 @@ export const TemplateLayout: FC = ({ children = , }) => { const navigate = useNavigate(); - const { organizationId } = useAuthenticated(); + const { organizationId } = useDashboard(); const { template: templateName } = useParams() as { template: string }; const { data, error, isLoading } = useQuery({ queryKey: ["template", templateName], diff --git a/site/src/pages/TemplateSettingsPage/TemplateGeneralSettingsPage/TemplateSettingsPage.tsx b/site/src/pages/TemplateSettingsPage/TemplateGeneralSettingsPage/TemplateSettingsPage.tsx index 4438cec0bea06..674505afd89e0 100644 --- a/site/src/pages/TemplateSettingsPage/TemplateGeneralSettingsPage/TemplateSettingsPage.tsx +++ b/site/src/pages/TemplateSettingsPage/TemplateGeneralSettingsPage/TemplateSettingsPage.tsx @@ -6,7 +6,6 @@ import { API } from "api/api"; import { templateByNameKey } from "api/queries/templates"; import type { UpdateTemplateMeta } from "api/typesGenerated"; import { displaySuccess } from "components/GlobalSnackbar/utils"; -import { useAuthenticated } from "contexts/auth/RequireAuth"; import { useDashboard } from "modules/dashboard/useDashboard"; import { pageTitle } from "utils/page"; import { useTemplateSettings } from "../TemplateSettingsLayout"; @@ -15,10 +14,9 @@ import { TemplateSettingsPageView } from "./TemplateSettingsPageView"; export const TemplateSettingsPage: FC = () => { const { template: templateName } = useParams() as { template: string }; const navigate = useNavigate(); - const { organizationId } = useAuthenticated(); const { template } = useTemplateSettings(); const queryClient = useQueryClient(); - const { entitlements } = useDashboard(); + const { entitlements, organizationId } = useDashboard(); const accessControlEnabled = entitlements.features.access_control.enabled; const advancedSchedulingEnabled = entitlements.features.advanced_template_scheduling.enabled; diff --git a/site/src/pages/TemplateSettingsPage/TemplatePermissionsPage/TemplatePermissionsPage.tsx b/site/src/pages/TemplateSettingsPage/TemplatePermissionsPage/TemplatePermissionsPage.tsx index 79f0068147717..2e9aa072e699a 100644 --- a/site/src/pages/TemplateSettingsPage/TemplatePermissionsPage/TemplatePermissionsPage.tsx +++ b/site/src/pages/TemplateSettingsPage/TemplatePermissionsPage/TemplatePermissionsPage.tsx @@ -4,7 +4,7 @@ import { useMutation, useQuery, useQueryClient } from "react-query"; import { setGroupRole, setUserRole, templateACL } from "api/queries/templates"; import { displaySuccess } from "components/GlobalSnackbar/utils"; import { Paywall } from "components/Paywall/Paywall"; -import { useAuthenticated } from "contexts/auth/RequireAuth"; +import { useDashboard } from "modules/dashboard/useDashboard"; import { useFeatureVisibility } from "modules/dashboard/useFeatureVisibility"; import { docs } from "utils/docs"; import { pageTitle } from "utils/page"; @@ -12,7 +12,7 @@ import { useTemplateSettings } from "../TemplateSettingsLayout"; import { TemplatePermissionsPageView } from "./TemplatePermissionsPageView"; export const TemplatePermissionsPage: FC = () => { - const { organizationId } = useAuthenticated(); + const { organizationId } = useDashboard(); const { template, permissions } = useTemplateSettings(); const { template_rbac: isTemplateRBACEnabled } = useFeatureVisibility(); const templateACLQuery = useQuery(templateACL(template.id)); diff --git a/site/src/pages/TemplateSettingsPage/TemplateSchedulePage/TemplateSchedulePage.tsx b/site/src/pages/TemplateSettingsPage/TemplateSchedulePage/TemplateSchedulePage.tsx index db37ed32dbcc3..45f87bdda5a5b 100644 --- a/site/src/pages/TemplateSettingsPage/TemplateSchedulePage/TemplateSchedulePage.tsx +++ b/site/src/pages/TemplateSettingsPage/TemplateSchedulePage/TemplateSchedulePage.tsx @@ -6,7 +6,6 @@ import { API } from "api/api"; import { templateByNameKey } from "api/queries/templates"; import type { UpdateTemplateMeta } from "api/typesGenerated"; import { displaySuccess } from "components/GlobalSnackbar/utils"; -import { useAuthenticated } from "contexts/auth/RequireAuth"; import { useDashboard } from "modules/dashboard/useDashboard"; import { pageTitle } from "utils/page"; import { useTemplateSettings } from "../TemplateSettingsLayout"; @@ -16,9 +15,8 @@ const TemplateSchedulePage: FC = () => { const { template: templateName } = useParams() as { template: string }; const navigate = useNavigate(); const queryClient = useQueryClient(); - const { organizationId } = useAuthenticated(); const { template } = useTemplateSettings(); - const { entitlements } = useDashboard(); + const { entitlements, organizationId } = useDashboard(); const allowAdvancedScheduling = entitlements.features["advanced_template_scheduling"].enabled; diff --git a/site/src/pages/TemplateSettingsPage/TemplateSettingsLayout.tsx b/site/src/pages/TemplateSettingsPage/TemplateSettingsLayout.tsx index 8e4bed6fb4b28..8e157dac3bd95 100644 --- a/site/src/pages/TemplateSettingsPage/TemplateSettingsLayout.tsx +++ b/site/src/pages/TemplateSettingsPage/TemplateSettingsLayout.tsx @@ -9,7 +9,7 @@ import { ErrorAlert } from "components/Alert/ErrorAlert"; import { Loader } from "components/Loader/Loader"; import { Margins } from "components/Margins/Margins"; import { Stack } from "components/Stack/Stack"; -import { useAuthenticated } from "contexts/auth/RequireAuth"; +import { useDashboard } from "modules/dashboard/useDashboard"; import { pageTitle } from "utils/page"; import { Sidebar } from "./Sidebar"; @@ -27,7 +27,7 @@ export function useTemplateSettings() { } export const TemplateSettingsLayout: FC = () => { - const { organizationId } = useAuthenticated(); + const { organizationId } = useDashboard(); const { template: templateName } = useParams() as { template: string }; const templateQuery = useQuery(templateByName(organizationId, templateName)); const permissionsQuery = useQuery({ diff --git a/site/src/pages/TemplateSettingsPage/TemplateVariablesPage/TemplateVariablesPage.tsx b/site/src/pages/TemplateSettingsPage/TemplateVariablesPage/TemplateVariablesPage.tsx index 94710daf4a98f..e717e24a2aab5 100644 --- a/site/src/pages/TemplateSettingsPage/TemplateVariablesPage/TemplateVariablesPage.tsx +++ b/site/src/pages/TemplateSettingsPage/TemplateVariablesPage/TemplateVariablesPage.tsx @@ -16,7 +16,7 @@ import type { import { ErrorAlert } from "components/Alert/ErrorAlert"; import { displaySuccess } from "components/GlobalSnackbar/utils"; import { Loader } from "components/Loader/Loader"; -import { useAuthenticated } from "contexts/auth/RequireAuth"; +import { useDashboard } from "modules/dashboard/useDashboard"; import { pageTitle } from "utils/page"; import { useTemplateSettings } from "../TemplateSettingsLayout"; import { TemplateVariablesPageView } from "./TemplateVariablesPageView"; @@ -26,7 +26,7 @@ export const TemplateVariablesPage: FC = () => { organization: string; template: string; }; - const { organizationId } = useAuthenticated(); + const { organizationId } = useDashboard(); const { template } = useTemplateSettings(); const navigate = useNavigate(); const queryClient = useQueryClient(); diff --git a/site/src/pages/TemplateVersionEditorPage/TemplateVersionEditorPage.tsx b/site/src/pages/TemplateVersionEditorPage/TemplateVersionEditorPage.tsx index 3a622630cd770..fa9d5e25be527 100644 --- a/site/src/pages/TemplateVersionEditorPage/TemplateVersionEditorPage.tsx +++ b/site/src/pages/TemplateVersionEditorPage/TemplateVersionEditorPage.tsx @@ -18,7 +18,7 @@ import type { } from "api/typesGenerated"; import { displayError } from "components/GlobalSnackbar/utils"; import { Loader } from "components/Loader/Loader"; -import { useAuthenticated } from "contexts/auth/RequireAuth"; +import { useDashboard } from "modules/dashboard/useDashboard"; import { useWatchVersionLogs } from "modules/templates/useWatchVersionLogs"; import { type FileTree, traverse } from "utils/filetree"; import { pageTitle } from "utils/page"; @@ -36,7 +36,7 @@ export const TemplateVersionEditorPage: FC = () => { const navigate = useNavigate(); const { version: versionName, template: templateName } = useParams() as Params; - const { organizationId } = useAuthenticated(); + const { organizationId } = useDashboard(); const templateQuery = useQuery(templateByName(organizationId, templateName)); const templateVersionOptions = templateVersionByName( organizationId, diff --git a/site/src/pages/TemplateVersionPage/TemplateVersionPage.tsx b/site/src/pages/TemplateVersionPage/TemplateVersionPage.tsx index 673fd5b5e91dc..dba108caa750b 100644 --- a/site/src/pages/TemplateVersionPage/TemplateVersionPage.tsx +++ b/site/src/pages/TemplateVersionPage/TemplateVersionPage.tsx @@ -9,6 +9,7 @@ import { templateVersionByName, } from "api/queries/templates"; import { useAuthenticated } from "contexts/auth/RequireAuth"; +import { useDashboard } from "modules/dashboard/useDashboard"; import { pageTitle } from "utils/page"; import TemplateVersionPageView from "./TemplateVersionPageView"; @@ -20,7 +21,7 @@ type Params = { export const TemplateVersionPage: FC = () => { const { version: versionName, template: templateName } = useParams() as Params; - const { organizationId } = useAuthenticated(); + const { organizationId } = useDashboard(); /** * Template version files diff --git a/site/src/pages/TemplatesPage/TemplatesPage.tsx b/site/src/pages/TemplatesPage/TemplatesPage.tsx index 019f32b1d7656..75c98d5221320 100644 --- a/site/src/pages/TemplatesPage/TemplatesPage.tsx +++ b/site/src/pages/TemplatesPage/TemplatesPage.tsx @@ -3,11 +3,14 @@ import { Helmet } from "react-helmet-async"; import { useQuery } from "react-query"; import { templateExamples, templates } from "api/queries/templates"; import { useAuthenticated } from "contexts/auth/RequireAuth"; +import { useDashboard } from "modules/dashboard/useDashboard"; import { pageTitle } from "utils/page"; import { TemplatesPageView } from "./TemplatesPageView"; export const TemplatesPage: FC = () => { - const { organizationId, permissions } = useAuthenticated(); + const { permissions } = useAuthenticated(); + const { organizationId } = useDashboard(); + const templatesQuery = useQuery(templates(organizationId)); const examplesQuery = useQuery({ ...templateExamples(organizationId), diff --git a/site/src/pages/UserSettingsPage/AccountPage/AccountPage.tsx b/site/src/pages/UserSettingsPage/AccountPage/AccountPage.tsx index 38bac36ef6ed4..3a299e37b20aa 100644 --- a/site/src/pages/UserSettingsPage/AccountPage/AccountPage.tsx +++ b/site/src/pages/UserSettingsPage/AccountPage/AccountPage.tsx @@ -12,10 +12,10 @@ import { AccountForm } from "./AccountForm"; import { AccountUserGroups } from "./AccountUserGroups"; export const AccountPage: FC = () => { - const { user: me, permissions, organizationId } = useAuthenticated(); + const { permissions, user: me } = useAuthenticated(); const { updateProfile, updateProfileError, isUpdatingProfile } = useAuthContext(); - const { entitlements, experiments } = useDashboard(); + const { entitlements, experiments, organizationId } = useDashboard(); const hasGroupsFeature = entitlements.features.user_role_management.enabled; const groupsQuery = useQuery({ diff --git a/site/src/pages/UsersPage/UsersPage.tsx b/site/src/pages/UsersPage/UsersPage.tsx index be864a8634079..8ddc42e630aff 100644 --- a/site/src/pages/UsersPage/UsersPage.tsx +++ b/site/src/pages/UsersPage/UsersPage.tsx @@ -35,15 +35,13 @@ export const UsersPage: FC = () => { const navigate = useNavigate(); const searchParamsResult = useSearchParams(); - const { entitlements } = useDashboard(); + const { entitlements, organizationId } = useDashboard(); const [searchParams] = searchParamsResult; - const { organizationId } = useAuthenticated(); const groupsByUserIdQuery = useQuery(groupsByUserId(organizationId)); const authMethodsQuery = useQuery(authMethods()); - const { user: me } = useAuthenticated(); - const { permissions } = useAuthenticated(); + const { permissions, user: me } = useAuthenticated(); const { updateUsers: canEditUsers, viewDeploymentValues } = permissions; const rolesQuery = useQuery(roles()); const { data: deploymentValues } = useQuery({ diff --git a/site/src/pages/WorkspacePage/Workspace.stories.tsx b/site/src/pages/WorkspacePage/Workspace.stories.tsx index c321366862264..eb00430a8b30c 100644 --- a/site/src/pages/WorkspacePage/Workspace.stories.tsx +++ b/site/src/pages/WorkspacePage/Workspace.stories.tsx @@ -2,8 +2,8 @@ import { action } from "@storybook/addon-actions"; import type { Meta, StoryObj } from "@storybook/react"; import type { ProvisionerJobLog } from "api/typesGenerated"; import { ProxyContext, getPreferredProxy } from "contexts/ProxyContext"; -import { DashboardContext } from "modules/dashboard/DashboardProvider"; import * as Mocks from "testHelpers/entities"; +import { withDashboardProvider } from "testHelpers/storybook"; import type { WorkspacePermissions } from "./permissions"; import { Workspace } from "./Workspace"; import { WorkspaceBuildLogsSection } from "./WorkspaceBuildLogsSection"; @@ -32,35 +32,28 @@ const meta: Meta = { ], }, decorators: [ + withDashboardProvider, (Story) => ( - { + return; + }, + setProxy: () => { + return; + }, + refetchProxyLatencies: (): Date => { + return new Date(); + }, }} > - { - return; - }, - setProxy: () => { - return; - }, - refetchProxyLatencies: (): Date => { - return new Date(); - }, - }} - > - - - + + ), ], }; diff --git a/site/src/pages/WorkspacePage/WorkspacePage.tsx b/site/src/pages/WorkspacePage/WorkspacePage.tsx index 0331f5290bb73..11869d6254f82 100644 --- a/site/src/pages/WorkspacePage/WorkspacePage.tsx +++ b/site/src/pages/WorkspacePage/WorkspacePage.tsx @@ -10,10 +10,10 @@ import type { Workspace } from "api/typesGenerated"; import { ErrorAlert } from "components/Alert/ErrorAlert"; import { Loader } from "components/Loader/Loader"; import { Margins } from "components/Margins/Margins"; -import { useAuthenticated } from "contexts/auth/RequireAuth"; import { useEffectEvent } from "hooks/hookPolyfills"; import { Navbar } from "modules/dashboard/Navbar/Navbar"; import { NotificationBanners } from "modules/dashboard/NotificationBanners/NotificationBanners"; +import { useDashboard } from "modules/dashboard/useDashboard"; import { workspaceChecks, type WorkspacePermissions } from "./permissions"; import { WorkspaceReadyPage } from "./WorkspaceReadyPage"; @@ -25,7 +25,7 @@ export const WorkspacePage: FC = () => { }; const workspaceName = params.workspace; const username = params.username.replace("@", ""); - const { organizationId } = useAuthenticated(); + const { organizationId } = useDashboard(); // Workspace const workspaceQueryOptions = workspaceByOwnerAndName( diff --git a/site/src/pages/WorkspacesPage/WorkspacesPage.tsx b/site/src/pages/WorkspacesPage/WorkspacesPage.tsx index bf959073aaeaa..277716f6a959c 100644 --- a/site/src/pages/WorkspacesPage/WorkspacesPage.tsx +++ b/site/src/pages/WorkspacesPage/WorkspacesPage.tsx @@ -38,8 +38,9 @@ const WorkspacesPage: FC = () => { // each hook. const searchParamsResult = useSafeSearchParams(); const pagination = usePagination({ searchParamsResult }); + const { permissions } = useAuthenticated(); + const { entitlements, organizationId } = useDashboard(); - const { organizationId, permissions } = useAuthenticated(); const templatesQuery = useQuery(templates(organizationId, false)); const filterProps = useWorkspacesFilter({ @@ -61,7 +62,6 @@ const WorkspacesPage: FC = () => { "delete" | "update" | null >(null); const [urlSearchParams] = searchParamsResult; - const { entitlements } = useDashboard(); const canCheckWorkspaces = entitlements.features["workspace_batch_actions"].enabled; const batchActions = useBatchActions({ diff --git a/site/src/pages/WorkspacesPage/WorkspacesPageView.stories.tsx b/site/src/pages/WorkspacesPage/WorkspacesPageView.stories.tsx index 11fc39b142448..ac8b854c5a29d 100644 --- a/site/src/pages/WorkspacesPage/WorkspacesPageView.stories.tsx +++ b/site/src/pages/WorkspacesPage/WorkspacesPageView.stories.tsx @@ -12,18 +12,15 @@ import { getDefaultFilterProps, } from "components/Filter/storyHelpers"; import { DEFAULT_RECORDS_PER_PAGE } from "components/PaginationWidget/utils"; -import { DashboardContext } from "modules/dashboard/DashboardProvider"; import { MockWorkspace, - MockAppearanceConfig, MockBuildInfo, - MockEntitlementsWithScheduling, - MockExperiments, mockApiError, MockUser, MockPendingProvisionerJob, MockTemplate, } from "testHelpers/entities"; +import { withDashboardProvider } from "testHelpers/storybook"; import { WorkspacesPageView } from "./WorkspacesPageView"; const createWorkspace = ( @@ -141,19 +138,7 @@ const meta: Meta = { }, ], }, - decorators: [ - (Story) => ( - - - - ), - ], + decorators: [withDashboardProvider], }; export default meta; diff --git a/site/src/testHelpers/storybook.tsx b/site/src/testHelpers/storybook.tsx index 4d601e0dd67ef..77c0305d1aa2b 100644 --- a/site/src/testHelpers/storybook.tsx +++ b/site/src/testHelpers/storybook.tsx @@ -26,6 +26,8 @@ export const withDashboardProvider = ( return ( {}, entitlements, experiments, appearance: MockAppearanceConfig, From f14927955d7361d9a50c633e69fbebca2d9946cb Mon Sep 17 00:00:00 2001 From: Bruno Quaresma Date: Tue, 14 May 2024 13:52:16 -0300 Subject: [PATCH 059/149] fix(site): fix group badge visual (#13263) --- .../UsersPage/UsersTable/UserGroupsCell.tsx | 17 ++++++----------- 1 file changed, 6 insertions(+), 11 deletions(-) diff --git a/site/src/pages/UsersPage/UsersTable/UserGroupsCell.tsx b/site/src/pages/UsersPage/UsersTable/UserGroupsCell.tsx index b5d1f799ee71e..071dc6c798b96 100644 --- a/site/src/pages/UsersPage/UsersTable/UserGroupsCell.tsx +++ b/site/src/pages/UsersPage/UsersTable/UserGroupsCell.tsx @@ -1,6 +1,5 @@ import { useTheme } from "@emotion/react"; import GroupIcon from "@mui/icons-material/Group"; -import Button from "@mui/material/Button"; import List from "@mui/material/List"; import ListItem from "@mui/material/ListItem"; import TableCell from "@mui/material/TableCell"; @@ -31,18 +30,14 @@ export function UserGroupsCell({ userGroups }: GroupsCellProps) { ) : ( - + Date: Wed, 15 May 2024 09:46:35 -0500 Subject: [PATCH 060/149] chore: push rbac actions to policy package (#13274) Just moved `rbac.Action` -> `policy.Action`. This is for the stacked PR to not have circular dependencies when doing autogen. Without this, the autogen can produce broken golang code, which prevents the autogen from compiling. So just avoiding circular dependencies. Doing this in it's own PR to reduce LoC diffs in the primary PR, since this has 0 functional changes. --- coderd/apikey.go | 4 +- coderd/authorize.go | 11 +- coderd/coderd.go | 3 +- coderd/coderdtest/authorize.go | 29 +- coderd/coderdtest/authorize_test.go | 3 +- coderd/database/dbauthz/dbauthz.go | 521 ++++++++++---------- coderd/database/dbauthz/dbauthz_test.go | 625 ++++++++++++------------ coderd/database/dbauthz/setup_test.go | 21 +- coderd/database/dbgen/dbgen.go | 3 +- coderd/database/dbmetrics/dbmetrics.go | 4 +- coderd/database/types.go | 6 +- coderd/debug.go | 3 +- coderd/deployment.go | 5 +- coderd/insights.go | 3 +- coderd/insights_test.go | 5 +- coderd/rbac/astvalue.go | 18 +- coderd/rbac/authz.go | 47 +- coderd/rbac/authz_internal_test.go | 129 ++--- coderd/rbac/authz_test.go | 37 +- coderd/rbac/error.go | 5 +- coderd/rbac/object.go | 16 +- coderd/rbac/object_test.go | 55 ++- coderd/rbac/policy/policy.go | 11 + coderd/rbac/roles.go | 86 ++-- coderd/rbac/roles_internal_test.go | 36 +- coderd/rbac/roles_test.go | 57 +-- coderd/rbac/scopes.go | 8 +- coderd/roles.go | 5 +- coderd/templates.go | 7 +- coderd/templateversions.go | 11 +- coderd/templateversions_test.go | 5 +- coderd/users.go | 5 +- coderd/users_test.go | 5 +- coderd/workspaceagents.go | 4 +- coderd/workspaceapps.go | 4 +- coderd/workspaceapps/db.go | 5 +- coderd/workspacebuilds.go | 5 +- coderd/workspaces.go | 9 +- coderd/workspaces_test.go | 3 +- coderd/wsbuilder/wsbuilder.go | 17 +- enterprise/coderd/appearance.go | 3 +- enterprise/coderd/coderd.go | 5 +- enterprise/coderd/coderd_test.go | 3 +- enterprise/coderd/groups.go | 4 +- enterprise/coderd/licenses.go | 9 +- enterprise/coderd/provisionerdaemons.go | 5 +- enterprise/coderd/replicas.go | 3 +- enterprise/coderd/templates.go | 13 +- enterprise/coderd/workspaceproxy.go | 4 +- enterprise/coderd/workspacequota.go | 4 +- enterprise/tailnet/pgcoord.go | 3 +- support/support.go | 4 +- 52 files changed, 971 insertions(+), 925 deletions(-) create mode 100644 coderd/rbac/policy/policy.go diff --git a/coderd/apikey.go b/coderd/apikey.go index 10a83a05f4a24..fe32b771e61ef 100644 --- a/coderd/apikey.go +++ b/coderd/apikey.go @@ -18,7 +18,7 @@ import ( "github.com/coder/coder/v2/coderd/database/dbtime" "github.com/coder/coder/v2/coderd/httpapi" "github.com/coder/coder/v2/coderd/httpmw" - "github.com/coder/coder/v2/coderd/rbac" + "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/coderd/telemetry" "github.com/coder/coder/v2/codersdk" ) @@ -255,7 +255,7 @@ func (api *API) tokens(rw http.ResponseWriter, r *http.Request) { } } - keys, err = AuthorizeFilter(api.HTTPAuth, r, rbac.ActionRead, keys) + keys, err = AuthorizeFilter(api.HTTPAuth, r, policy.ActionRead, keys) if err != nil { httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ Message: "Internal error fetching keys.", diff --git a/coderd/authorize.go b/coderd/authorize.go index 764f573ecfdc7..9adff89769805 100644 --- a/coderd/authorize.go +++ b/coderd/authorize.go @@ -11,13 +11,14 @@ import ( "github.com/coder/coder/v2/coderd/httpapi" "github.com/coder/coder/v2/coderd/httpmw" "github.com/coder/coder/v2/coderd/rbac" + "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/codersdk" ) // AuthorizeFilter takes a list of objects and returns the filtered list of // objects that the user is authorized to perform the given action on. // This is faster than calling Authorize() on each object. -func AuthorizeFilter[O rbac.Objecter](h *HTTPAuthorizer, r *http.Request, action rbac.Action, objects []O) ([]O, error) { +func AuthorizeFilter[O rbac.Objecter](h *HTTPAuthorizer, r *http.Request, action policy.Action, objects []O) ([]O, error) { roles := httpmw.UserAuthorization(r) objects, err := rbac.Filter(r.Context(), h.Authorizer, roles, action, objects) if err != nil { @@ -50,7 +51,7 @@ type HTTPAuthorizer struct { // httpapi.Forbidden(rw) // return // } -func (api *API) Authorize(r *http.Request, action rbac.Action, object rbac.Objecter) bool { +func (api *API) Authorize(r *http.Request, action policy.Action, object rbac.Objecter) bool { return api.HTTPAuth.Authorize(r, action, object) } @@ -63,7 +64,7 @@ func (api *API) Authorize(r *http.Request, action rbac.Action, object rbac.Objec // httpapi.Forbidden(rw) // return // } -func (h *HTTPAuthorizer) Authorize(r *http.Request, action rbac.Action, object rbac.Objecter) bool { +func (h *HTTPAuthorizer) Authorize(r *http.Request, action policy.Action, object rbac.Objecter) bool { roles := httpmw.UserAuthorization(r) err := h.Authorizer.Authorize(r.Context(), roles, action, object.RBACObject()) if err != nil { @@ -95,7 +96,7 @@ func (h *HTTPAuthorizer) Authorize(r *http.Request, action rbac.Action, object r // from postgres are already authorized, and the caller does not need to // call 'Authorize()' on the returned objects. // Note the authorization is only for the given action and object type. -func (h *HTTPAuthorizer) AuthorizeSQLFilter(r *http.Request, action rbac.Action, objectType string) (rbac.PreparedAuthorized, error) { +func (h *HTTPAuthorizer) AuthorizeSQLFilter(r *http.Request, action policy.Action, objectType string) (rbac.PreparedAuthorized, error) { roles := httpmw.UserAuthorization(r) prepared, err := h.Authorizer.Prepare(r.Context(), roles, action, objectType) if err != nil { @@ -219,7 +220,7 @@ func (api *API) checkAuthorization(rw http.ResponseWriter, r *http.Request) { obj = dbObj.RBACObject() } - err := api.Authorizer.Authorize(ctx, auth, rbac.Action(v.Action), obj) + err := api.Authorizer.Authorize(ctx, auth, policy.Action(v.Action), obj) response[k] = err == nil } diff --git a/coderd/coderd.go b/coderd/coderd.go index 9dcda1a71536e..c0631c0752c0c 100644 --- a/coderd/coderd.go +++ b/coderd/coderd.go @@ -60,6 +60,7 @@ import ( "github.com/coder/coder/v2/coderd/prometheusmetrics" "github.com/coder/coder/v2/coderd/provisionerdserver" "github.com/coder/coder/v2/coderd/rbac" + "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/coderd/schedule" "github.com/coder/coder/v2/coderd/telemetry" "github.com/coder/coder/v2/coderd/tracing" @@ -1106,7 +1107,7 @@ func New(options *Options) *API { // Ensure only owners can access debug endpoints. func(next http.Handler) http.Handler { return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { - if !api.Authorize(r, rbac.ActionRead, rbac.ResourceDebugInfo) { + if !api.Authorize(r, policy.ActionRead, rbac.ResourceDebugInfo) { httpapi.ResourceNotFound(rw) return } diff --git a/coderd/coderdtest/authorize.go b/coderd/coderdtest/authorize.go index 01210f9a7056e..6c38063a0dbbe 100644 --- a/coderd/coderdtest/authorize.go +++ b/coderd/coderdtest/authorize.go @@ -20,6 +20,7 @@ import ( "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/database/dbauthz" "github.com/coder/coder/v2/coderd/rbac" + "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/coderd/rbac/regosql" "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/cryptorand" @@ -84,7 +85,7 @@ func (a RBACAsserter) AllCalls() []AuthCall { // AssertChecked will assert a given rbac check was performed. It does not care // about order of checks, or any other checks. This is useful when you do not // care about asserting every check that was performed. -func (a RBACAsserter) AssertChecked(t *testing.T, action rbac.Action, objects ...interface{}) { +func (a RBACAsserter) AssertChecked(t *testing.T, action policy.Action, objects ...interface{}) { converted := a.convertObjects(t, objects...) pairs := make([]ActionObjectPair, 0, len(converted)) for _, obj := range converted { @@ -95,7 +96,7 @@ func (a RBACAsserter) AssertChecked(t *testing.T, action rbac.Action, objects .. // AssertInOrder must be called in the correct order of authz checks. If the objects // or actions are not in the correct order, the test will fail. -func (a RBACAsserter) AssertInOrder(t *testing.T, action rbac.Action, objects ...interface{}) { +func (a RBACAsserter) AssertInOrder(t *testing.T, action policy.Action, objects ...interface{}) { converted := a.convertObjects(t, objects...) pairs := make([]ActionObjectPair, 0, len(converted)) for _, obj := range converted { @@ -155,13 +156,13 @@ type RecordingAuthorizer struct { } type ActionObjectPair struct { - Action rbac.Action + Action policy.Action Object rbac.Object } // Pair is on the RecordingAuthorizer to be easy to find and keep the pkg // interface smaller. -func (*RecordingAuthorizer) Pair(action rbac.Action, object rbac.Objecter) ActionObjectPair { +func (*RecordingAuthorizer) Pair(action policy.Action, object rbac.Objecter) ActionObjectPair { return ActionObjectPair{ Action: action, Object: object.RBACObject(), @@ -248,7 +249,7 @@ func (r *RecordingAuthorizer) AssertActor(t *testing.T, actor rbac.Subject, did } // recordAuthorize is the internal method that records the Authorize() call. -func (r *RecordingAuthorizer) recordAuthorize(subject rbac.Subject, action rbac.Action, object rbac.Object) { +func (r *RecordingAuthorizer) recordAuthorize(subject rbac.Subject, action policy.Action, object rbac.Object) { r.Lock() defer r.Unlock() @@ -283,7 +284,7 @@ func caller(skip int) string { return str } -func (r *RecordingAuthorizer) Authorize(ctx context.Context, subject rbac.Subject, action rbac.Action, object rbac.Object) error { +func (r *RecordingAuthorizer) Authorize(ctx context.Context, subject rbac.Subject, action policy.Action, object rbac.Object) error { r.recordAuthorize(subject, action, object) if r.Wrapped == nil { panic("Developer error: RecordingAuthorizer.Wrapped is nil") @@ -291,7 +292,7 @@ func (r *RecordingAuthorizer) Authorize(ctx context.Context, subject rbac.Subjec return r.Wrapped.Authorize(ctx, subject, action, object) } -func (r *RecordingAuthorizer) Prepare(ctx context.Context, subject rbac.Subject, action rbac.Action, objectType string) (rbac.PreparedAuthorized, error) { +func (r *RecordingAuthorizer) Prepare(ctx context.Context, subject rbac.Subject, action policy.Action, objectType string) (rbac.PreparedAuthorized, error) { r.RLock() defer r.RUnlock() if r.Wrapped == nil { @@ -325,7 +326,7 @@ type PreparedRecorder struct { rec *RecordingAuthorizer prepped rbac.PreparedAuthorized subject rbac.Subject - action rbac.Action + action policy.Action rw sync.Mutex usingSQL bool @@ -357,11 +358,11 @@ type FakeAuthorizer struct { var _ rbac.Authorizer = (*FakeAuthorizer)(nil) -func (d *FakeAuthorizer) Authorize(_ context.Context, _ rbac.Subject, _ rbac.Action, _ rbac.Object) error { +func (d *FakeAuthorizer) Authorize(_ context.Context, _ rbac.Subject, _ policy.Action, _ rbac.Object) error { return d.AlwaysReturn } -func (d *FakeAuthorizer) Prepare(_ context.Context, subject rbac.Subject, action rbac.Action, _ string) (rbac.PreparedAuthorized, error) { +func (d *FakeAuthorizer) Prepare(_ context.Context, subject rbac.Subject, action policy.Action, _ string) (rbac.PreparedAuthorized, error) { return &fakePreparedAuthorizer{ Original: d, Subject: subject, @@ -377,7 +378,7 @@ type fakePreparedAuthorizer struct { sync.RWMutex Original *FakeAuthorizer Subject rbac.Subject - Action rbac.Action + Action policy.Action } func (f *fakePreparedAuthorizer) Authorize(ctx context.Context, object rbac.Object) error { @@ -392,7 +393,7 @@ func (*fakePreparedAuthorizer) CompileToSQL(_ context.Context, _ regosql.Convert // Random rbac helper funcs -func RandomRBACAction() rbac.Action { +func RandomRBACAction() policy.Action { all := rbac.AllActions() return all[must(cryptorand.Intn(len(all)))] } @@ -403,10 +404,10 @@ func RandomRBACObject() rbac.Object { Owner: uuid.NewString(), OrgID: uuid.NewString(), Type: randomRBACType(), - ACLUserList: map[string][]rbac.Action{ + ACLUserList: map[string][]policy.Action{ namesgenerator.GetRandomName(1): {RandomRBACAction()}, }, - ACLGroupList: map[string][]rbac.Action{ + ACLGroupList: map[string][]policy.Action{ namesgenerator.GetRandomName(1): {RandomRBACAction()}, }, } diff --git a/coderd/coderdtest/authorize_test.go b/coderd/coderdtest/authorize_test.go index 13a04200a9d2f..5cdcd26869cf3 100644 --- a/coderd/coderdtest/authorize_test.go +++ b/coderd/coderdtest/authorize_test.go @@ -9,6 +9,7 @@ import ( "github.com/coder/coder/v2/coderd/coderdtest" "github.com/coder/coder/v2/coderd/rbac" + "github.com/coder/coder/v2/coderd/rbac/policy" ) func TestAuthzRecorder(t *testing.T) { @@ -101,7 +102,7 @@ func TestAuthzRecorder(t *testing.T) { } // fuzzAuthzPrep has same action and object types for all calls. -func fuzzAuthzPrep(t *testing.T, prep rbac.PreparedAuthorized, n int, action rbac.Action, objectType string) []coderdtest.ActionObjectPair { +func fuzzAuthzPrep(t *testing.T, prep rbac.PreparedAuthorized, n int, action policy.Action, objectType string) []coderdtest.ActionObjectPair { t.Helper() pairs := make([]coderdtest.ActionObjectPair, 0, n) diff --git a/coderd/database/dbauthz/dbauthz.go b/coderd/database/dbauthz/dbauthz.go index aaf623c7a70b5..3d9129928c811 100644 --- a/coderd/database/dbauthz/dbauthz.go +++ b/coderd/database/dbauthz/dbauthz.go @@ -21,6 +21,7 @@ import ( "github.com/coder/coder/v2/coderd/database/dbtime" "github.com/coder/coder/v2/coderd/httpapi/httpapiconstraints" "github.com/coder/coder/v2/coderd/rbac" + "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/coderd/util/slice" "github.com/coder/coder/v2/provisionersdk" ) @@ -130,7 +131,7 @@ func (q *querier) Wrappers() []string { } // authorizeContext is a helper function to authorize an action on an object. -func (q *querier) authorizeContext(ctx context.Context, action rbac.Action, object rbac.Objecter) error { +func (q *querier) authorizeContext(ctx context.Context, action policy.Action, object rbac.Objecter) error { act, ok := ActorFromContext(ctx) if !ok { return NoActorError @@ -161,20 +162,20 @@ var ( { Name: "provisionerd", DisplayName: "Provisioner Daemon", - Site: rbac.Permissions(map[string][]rbac.Action{ + Site: rbac.Permissions(map[string][]policy.Action{ // TODO: Add ProvisionerJob resource type. - rbac.ResourceFile.Type: {rbac.ActionRead}, + rbac.ResourceFile.Type: {policy.ActionRead}, rbac.ResourceSystem.Type: {rbac.WildcardSymbol}, - rbac.ResourceTemplate.Type: {rbac.ActionRead, rbac.ActionUpdate}, - rbac.ResourceUser.Type: {rbac.ActionRead}, - rbac.ResourceWorkspace.Type: {rbac.ActionRead, rbac.ActionUpdate, rbac.ActionDelete}, - rbac.ResourceWorkspaceBuild.Type: {rbac.ActionRead, rbac.ActionUpdate, rbac.ActionDelete}, - rbac.ResourceUserData.Type: {rbac.ActionRead, rbac.ActionUpdate}, + rbac.ResourceTemplate.Type: {policy.ActionRead, policy.ActionUpdate}, + rbac.ResourceUser.Type: {policy.ActionRead}, + rbac.ResourceWorkspace.Type: {policy.ActionRead, policy.ActionUpdate, policy.ActionDelete}, + rbac.ResourceWorkspaceBuild.Type: {policy.ActionRead, policy.ActionUpdate, policy.ActionDelete}, + rbac.ResourceUserData.Type: {policy.ActionRead, policy.ActionUpdate}, rbac.ResourceAPIKey.Type: {rbac.WildcardSymbol}, // When org scoped provisioner credentials are implemented, // this can be reduced to read a specific org. - rbac.ResourceOrganization.Type: {rbac.ActionRead}, - rbac.ResourceGroup.Type: {rbac.ActionRead}, + rbac.ResourceOrganization.Type: {policy.ActionRead}, + rbac.ResourceGroup.Type: {policy.ActionRead}, }), Org: map[string][]rbac.Permission{}, User: []rbac.Permission{}, @@ -190,12 +191,12 @@ var ( { Name: "autostart", DisplayName: "Autostart Daemon", - Site: rbac.Permissions(map[string][]rbac.Action{ + Site: rbac.Permissions(map[string][]policy.Action{ rbac.ResourceSystem.Type: {rbac.WildcardSymbol}, - rbac.ResourceTemplate.Type: {rbac.ActionRead, rbac.ActionUpdate}, - rbac.ResourceWorkspace.Type: {rbac.ActionRead, rbac.ActionUpdate}, - rbac.ResourceWorkspaceBuild.Type: {rbac.ActionRead, rbac.ActionUpdate, rbac.ActionDelete}, - rbac.ResourceUser.Type: {rbac.ActionRead}, + rbac.ResourceTemplate.Type: {policy.ActionRead, policy.ActionUpdate}, + rbac.ResourceWorkspace.Type: {policy.ActionRead, policy.ActionUpdate}, + rbac.ResourceWorkspaceBuild.Type: {policy.ActionRead, policy.ActionUpdate, policy.ActionDelete}, + rbac.ResourceUser.Type: {policy.ActionRead}, }), Org: map[string][]rbac.Permission{}, User: []rbac.Permission{}, @@ -212,10 +213,10 @@ var ( { Name: "hangdetector", DisplayName: "Hang Detector Daemon", - Site: rbac.Permissions(map[string][]rbac.Action{ + Site: rbac.Permissions(map[string][]policy.Action{ rbac.ResourceSystem.Type: {rbac.WildcardSymbol}, - rbac.ResourceTemplate.Type: {rbac.ActionRead}, - rbac.ResourceWorkspace.Type: {rbac.ActionRead, rbac.ActionUpdate}, + rbac.ResourceTemplate.Type: {policy.ActionRead}, + rbac.ResourceWorkspace.Type: {policy.ActionRead, policy.ActionUpdate}, }), Org: map[string][]rbac.Permission{}, User: []rbac.Permission{}, @@ -231,22 +232,22 @@ var ( { Name: "system", DisplayName: "Coder", - Site: rbac.Permissions(map[string][]rbac.Action{ - rbac.ResourceWildcard.Type: {rbac.ActionRead}, - rbac.ResourceAPIKey.Type: {rbac.ActionCreate, rbac.ActionUpdate, rbac.ActionDelete}, - rbac.ResourceGroup.Type: {rbac.ActionCreate, rbac.ActionUpdate}, - rbac.ResourceRoleAssignment.Type: {rbac.ActionCreate, rbac.ActionDelete}, + Site: rbac.Permissions(map[string][]policy.Action{ + rbac.ResourceWildcard.Type: {policy.ActionRead}, + rbac.ResourceAPIKey.Type: {policy.ActionCreate, policy.ActionUpdate, policy.ActionDelete}, + rbac.ResourceGroup.Type: {policy.ActionCreate, policy.ActionUpdate}, + rbac.ResourceRoleAssignment.Type: {policy.ActionCreate, policy.ActionDelete}, rbac.ResourceSystem.Type: {rbac.WildcardSymbol}, - rbac.ResourceOrganization.Type: {rbac.ActionCreate, rbac.ActionRead}, - rbac.ResourceOrganizationMember.Type: {rbac.ActionCreate}, - rbac.ResourceOrgRoleAssignment.Type: {rbac.ActionCreate}, - rbac.ResourceProvisionerDaemon.Type: {rbac.ActionCreate, rbac.ActionUpdate}, - rbac.ResourceUser.Type: {rbac.ActionCreate, rbac.ActionUpdate, rbac.ActionDelete}, - rbac.ResourceUserData.Type: {rbac.ActionCreate, rbac.ActionUpdate}, - rbac.ResourceWorkspace.Type: {rbac.ActionUpdate}, - rbac.ResourceWorkspaceBuild.Type: {rbac.ActionUpdate}, - rbac.ResourceWorkspaceExecution.Type: {rbac.ActionCreate}, - rbac.ResourceWorkspaceProxy.Type: {rbac.ActionCreate, rbac.ActionUpdate, rbac.ActionDelete}, + rbac.ResourceOrganization.Type: {policy.ActionCreate, policy.ActionRead}, + rbac.ResourceOrganizationMember.Type: {policy.ActionCreate}, + rbac.ResourceOrgRoleAssignment.Type: {policy.ActionCreate}, + rbac.ResourceProvisionerDaemon.Type: {policy.ActionCreate, policy.ActionUpdate}, + rbac.ResourceUser.Type: {policy.ActionCreate, policy.ActionUpdate, policy.ActionDelete}, + rbac.ResourceUserData.Type: {policy.ActionCreate, policy.ActionUpdate}, + rbac.ResourceWorkspace.Type: {policy.ActionUpdate}, + rbac.ResourceWorkspaceBuild.Type: {policy.ActionUpdate}, + rbac.ResourceWorkspaceExecution.Type: {policy.ActionCreate}, + rbac.ResourceWorkspaceProxy.Type: {policy.ActionCreate, policy.ActionUpdate, policy.ActionDelete}, }), Org: map[string][]rbac.Permission{}, User: []rbac.Permission{}, @@ -302,7 +303,7 @@ func As(ctx context.Context, actor rbac.Subject) context.Context { // Generic functions used to implement the database.Store methods. // -// insert runs an rbac.ActionCreate on the rbac object argument before +// insert runs an policy.ActionCreate on the rbac object argument before // running the insertFunc. The insertFunc is expected to return the object that // was inserted. func insert[ @@ -323,7 +324,7 @@ func insert[ } // Authorize the action - err = authorizer.Authorize(ctx, act, rbac.ActionCreate, object.RBACObject()) + err = authorizer.Authorize(ctx, act, policy.ActionCreate, object.RBACObject()) if err != nil { return empty, logNotAuthorizedError(ctx, logger, err) } @@ -345,7 +346,7 @@ func deleteQ[ deleteFunc Delete, ) Delete { return fetchAndExec(logger, authorizer, - rbac.ActionDelete, fetchFunc, deleteFunc) + policy.ActionDelete, fetchFunc, deleteFunc) } func updateWithReturn[ @@ -359,7 +360,7 @@ func updateWithReturn[ fetchFunc Fetch, updateQuery UpdateQuery, ) UpdateQuery { - return fetchAndQuery(logger, authorizer, rbac.ActionUpdate, fetchFunc, updateQuery) + return fetchAndQuery(logger, authorizer, policy.ActionUpdate, fetchFunc, updateQuery) } func update[ @@ -373,7 +374,7 @@ func update[ fetchFunc Fetch, updateExec Exec, ) Exec { - return fetchAndExec(logger, authorizer, rbac.ActionUpdate, fetchFunc, updateExec) + return fetchAndExec(logger, authorizer, policy.ActionUpdate, fetchFunc, updateExec) } // fetch is a generic function that wraps a database @@ -406,7 +407,7 @@ func fetch[ } // Authorize the action - err = authorizer.Authorize(ctx, act, rbac.ActionRead, object.RBACObject()) + err = authorizer.Authorize(ctx, act, policy.ActionRead, object.RBACObject()) if err != nil { return empty, logNotAuthorizedError(ctx, logger, err) } @@ -426,7 +427,7 @@ func fetchAndExec[ ]( logger slog.Logger, authorizer rbac.Authorizer, - action rbac.Action, + action policy.Action, fetchFunc Fetch, execFunc Exec, ) Exec { @@ -452,7 +453,7 @@ func fetchAndQuery[ ]( logger slog.Logger, authorizer rbac.Authorizer, - action rbac.Action, + action policy.Action, fetchFunc Fetch, queryFunc Query, ) Query { @@ -503,13 +504,13 @@ func fetchWithPostFilter[ } // Authorize the action - return rbac.Filter(ctx, authorizer, act, rbac.ActionRead, objects) + return rbac.Filter(ctx, authorizer, act, policy.ActionRead, objects) } } // prepareSQLFilter is a helper function that prepares a SQL filter using the // given authorization context. -func prepareSQLFilter(ctx context.Context, authorizer rbac.Authorizer, action rbac.Action, resourceType string) (rbac.PreparedAuthorized, error) { +func prepareSQLFilter(ctx context.Context, authorizer rbac.Authorizer, action policy.Action, resourceType string) (rbac.PreparedAuthorized, error) { act, ok := ActorFromContext(ctx) if !ok { return nil, NoActorError @@ -543,7 +544,7 @@ func (q *querier) authorizeUpdateFileTemplate(ctx context.Context, file database // 1, so check them all. for _, tpl := range tpls { // If the user has update access to any template, they have read access to the file. - if err := q.authorizeContext(ctx, rbac.ActionUpdate, tpl); err == nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, tpl); err == nil { return nil } } @@ -584,13 +585,13 @@ func (q *querier) canAssignRoles(ctx context.Context, orgID *uuid.UUID, added, r } if len(added) > 0 { - if err := q.authorizeContext(ctx, rbac.ActionCreate, roleAssign); err != nil { + if err := q.authorizeContext(ctx, policy.ActionCreate, roleAssign); err != nil { return err } } if len(removed) > 0 { - if err := q.authorizeContext(ctx, rbac.ActionDelete, roleAssign); err != nil { + if err := q.authorizeContext(ctx, policy.ActionDelete, roleAssign); err != nil { return err } } @@ -660,7 +661,7 @@ func (q *querier) AcquireLock(ctx context.Context, id int64) error { // TODO: We need to create a ProvisionerJob resource type func (q *querier) AcquireProvisionerJob(ctx context.Context, arg database.AcquireProvisionerJobParams) (database.ProvisionerJob, error) { - // if err := q.authorizeContext(ctx, rbac.ActionUpdate, rbac.ResourceSystem); err != nil { + // if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceSystem); err != nil { // return database.ProvisionerJob{}, err // } return q.db.AcquireProvisionerJob(ctx, arg) @@ -676,7 +677,7 @@ func (q *querier) ActivityBumpWorkspace(ctx context.Context, arg database.Activi func (q *querier) AllUserIDs(ctx context.Context) ([]uuid.UUID, error) { // Although this technically only reads users, only system-related functions should be // allowed to call this. - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceSystem); err != nil { return nil, err } return q.db.AllUserIDs(ctx) @@ -687,7 +688,7 @@ func (q *querier) ArchiveUnusedTemplateVersions(ctx context.Context, arg databas if err != nil { return nil, err } - if err := q.authorizeContext(ctx, rbac.ActionUpdate, tpl); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, tpl); err != nil { return nil, err } return q.db.ArchiveUnusedTemplateVersions(ctx, arg) @@ -696,28 +697,28 @@ func (q *querier) ArchiveUnusedTemplateVersions(ctx context.Context, arg databas func (q *querier) BatchUpdateWorkspaceLastUsedAt(ctx context.Context, arg database.BatchUpdateWorkspaceLastUsedAtParams) error { // Could be any workspace and checking auth to each workspace is overkill for the purpose // of this function. - if err := q.authorizeContext(ctx, rbac.ActionUpdate, rbac.ResourceWorkspace.All()); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceWorkspace.All()); err != nil { return err } return q.db.BatchUpdateWorkspaceLastUsedAt(ctx, arg) } func (q *querier) CleanTailnetCoordinators(ctx context.Context) error { - if err := q.authorizeContext(ctx, rbac.ActionDelete, rbac.ResourceTailnetCoordinator); err != nil { + if err := q.authorizeContext(ctx, policy.ActionDelete, rbac.ResourceTailnetCoordinator); err != nil { return err } return q.db.CleanTailnetCoordinators(ctx) } func (q *querier) CleanTailnetLostPeers(ctx context.Context) error { - if err := q.authorizeContext(ctx, rbac.ActionDelete, rbac.ResourceTailnetCoordinator); err != nil { + if err := q.authorizeContext(ctx, policy.ActionDelete, rbac.ResourceTailnetCoordinator); err != nil { return err } return q.db.CleanTailnetLostPeers(ctx) } func (q *querier) CleanTailnetTunnels(ctx context.Context) error { - if err := q.authorizeContext(ctx, rbac.ActionDelete, rbac.ResourceTailnetCoordinator); err != nil { + if err := q.authorizeContext(ctx, policy.ActionDelete, rbac.ResourceTailnetCoordinator); err != nil { return err } return q.db.CleanTailnetTunnels(ctx) @@ -729,7 +730,7 @@ func (q *querier) DeleteAPIKeyByID(ctx context.Context, id string) error { func (q *querier) DeleteAPIKeysByUserID(ctx context.Context, userID uuid.UUID) error { // TODO: This is not 100% correct because it omits apikey IDs. - err := q.authorizeContext(ctx, rbac.ActionDelete, + err := q.authorizeContext(ctx, policy.ActionDelete, rbac.ResourceAPIKey.WithOwner(userID.String())) if err != nil { return err @@ -738,14 +739,14 @@ func (q *querier) DeleteAPIKeysByUserID(ctx context.Context, userID uuid.UUID) e } func (q *querier) DeleteAllTailnetClientSubscriptions(ctx context.Context, arg database.DeleteAllTailnetClientSubscriptionsParams) error { - if err := q.authorizeContext(ctx, rbac.ActionDelete, rbac.ResourceTailnetCoordinator); err != nil { + if err := q.authorizeContext(ctx, policy.ActionDelete, rbac.ResourceTailnetCoordinator); err != nil { return err } return q.db.DeleteAllTailnetClientSubscriptions(ctx, arg) } func (q *querier) DeleteAllTailnetTunnels(ctx context.Context, arg database.DeleteAllTailnetTunnelsParams) error { - if err := q.authorizeContext(ctx, rbac.ActionDelete, rbac.ResourceTailnetCoordinator); err != nil { + if err := q.authorizeContext(ctx, policy.ActionDelete, rbac.ResourceTailnetCoordinator); err != nil { return err } return q.db.DeleteAllTailnetTunnels(ctx, arg) @@ -753,7 +754,7 @@ func (q *querier) DeleteAllTailnetTunnels(ctx context.Context, arg database.Dele func (q *querier) DeleteApplicationConnectAPIKeysByUserID(ctx context.Context, userID uuid.UUID) error { // TODO: This is not 100% correct because it omits apikey IDs. - err := q.authorizeContext(ctx, rbac.ActionDelete, + err := q.authorizeContext(ctx, policy.ActionDelete, rbac.ResourceAPIKey.WithOwner(userID.String())) if err != nil { return err @@ -762,7 +763,7 @@ func (q *querier) DeleteApplicationConnectAPIKeysByUserID(ctx context.Context, u } func (q *querier) DeleteCoordinator(ctx context.Context, id uuid.UUID) error { - if err := q.authorizeContext(ctx, rbac.ActionDelete, rbac.ResourceTailnetCoordinator); err != nil { + if err := q.authorizeContext(ctx, policy.ActionDelete, rbac.ResourceTailnetCoordinator); err != nil { return err } return q.db.DeleteCoordinator(ctx, id) @@ -803,7 +804,7 @@ func (q *querier) DeleteLicense(ctx context.Context, id int32) (int32, error) { } func (q *querier) DeleteOAuth2ProviderAppByID(ctx context.Context, id uuid.UUID) error { - if err := q.authorizeContext(ctx, rbac.ActionDelete, rbac.ResourceOAuth2ProviderApp); err != nil { + if err := q.authorizeContext(ctx, policy.ActionDelete, rbac.ResourceOAuth2ProviderApp); err != nil { return err } return q.db.DeleteOAuth2ProviderAppByID(ctx, id) @@ -814,14 +815,14 @@ func (q *querier) DeleteOAuth2ProviderAppCodeByID(ctx context.Context, id uuid.U if err != nil { return err } - if err := q.authorizeContext(ctx, rbac.ActionDelete, code); err != nil { + if err := q.authorizeContext(ctx, policy.ActionDelete, code); err != nil { return err } return q.db.DeleteOAuth2ProviderAppCodeByID(ctx, id) } func (q *querier) DeleteOAuth2ProviderAppCodesByAppAndUserID(ctx context.Context, arg database.DeleteOAuth2ProviderAppCodesByAppAndUserIDParams) error { - if err := q.authorizeContext(ctx, rbac.ActionDelete, + if err := q.authorizeContext(ctx, policy.ActionDelete, rbac.ResourceOAuth2ProviderAppCodeToken.WithOwner(arg.UserID.String())); err != nil { return err } @@ -829,14 +830,14 @@ func (q *querier) DeleteOAuth2ProviderAppCodesByAppAndUserID(ctx context.Context } func (q *querier) DeleteOAuth2ProviderAppSecretByID(ctx context.Context, id uuid.UUID) error { - if err := q.authorizeContext(ctx, rbac.ActionDelete, rbac.ResourceOAuth2ProviderAppSecret); err != nil { + if err := q.authorizeContext(ctx, policy.ActionDelete, rbac.ResourceOAuth2ProviderAppSecret); err != nil { return err } return q.db.DeleteOAuth2ProviderAppSecretByID(ctx, id) } func (q *querier) DeleteOAuth2ProviderAppTokensByAppAndUserID(ctx context.Context, arg database.DeleteOAuth2ProviderAppTokensByAppAndUserIDParams) error { - if err := q.authorizeContext(ctx, rbac.ActionDelete, + if err := q.authorizeContext(ctx, policy.ActionDelete, rbac.ResourceOAuth2ProviderAppCodeToken.WithOwner(arg.UserID.String())); err != nil { return err } @@ -844,63 +845,63 @@ func (q *querier) DeleteOAuth2ProviderAppTokensByAppAndUserID(ctx context.Contex } func (q *querier) DeleteOldProvisionerDaemons(ctx context.Context) error { - if err := q.authorizeContext(ctx, rbac.ActionDelete, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionDelete, rbac.ResourceSystem); err != nil { return err } return q.db.DeleteOldProvisionerDaemons(ctx) } func (q *querier) DeleteOldWorkspaceAgentLogs(ctx context.Context) error { - if err := q.authorizeContext(ctx, rbac.ActionDelete, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionDelete, rbac.ResourceSystem); err != nil { return err } return q.db.DeleteOldWorkspaceAgentLogs(ctx) } func (q *querier) DeleteOldWorkspaceAgentStats(ctx context.Context) error { - if err := q.authorizeContext(ctx, rbac.ActionDelete, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionDelete, rbac.ResourceSystem); err != nil { return err } return q.db.DeleteOldWorkspaceAgentStats(ctx) } func (q *querier) DeleteReplicasUpdatedBefore(ctx context.Context, updatedAt time.Time) error { - if err := q.authorizeContext(ctx, rbac.ActionDelete, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionDelete, rbac.ResourceSystem); err != nil { return err } return q.db.DeleteReplicasUpdatedBefore(ctx, updatedAt) } func (q *querier) DeleteTailnetAgent(ctx context.Context, arg database.DeleteTailnetAgentParams) (database.DeleteTailnetAgentRow, error) { - if err := q.authorizeContext(ctx, rbac.ActionUpdate, rbac.ResourceTailnetCoordinator); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceTailnetCoordinator); err != nil { return database.DeleteTailnetAgentRow{}, err } return q.db.DeleteTailnetAgent(ctx, arg) } func (q *querier) DeleteTailnetClient(ctx context.Context, arg database.DeleteTailnetClientParams) (database.DeleteTailnetClientRow, error) { - if err := q.authorizeContext(ctx, rbac.ActionDelete, rbac.ResourceTailnetCoordinator); err != nil { + if err := q.authorizeContext(ctx, policy.ActionDelete, rbac.ResourceTailnetCoordinator); err != nil { return database.DeleteTailnetClientRow{}, err } return q.db.DeleteTailnetClient(ctx, arg) } func (q *querier) DeleteTailnetClientSubscription(ctx context.Context, arg database.DeleteTailnetClientSubscriptionParams) error { - if err := q.authorizeContext(ctx, rbac.ActionDelete, rbac.ResourceTailnetCoordinator); err != nil { + if err := q.authorizeContext(ctx, policy.ActionDelete, rbac.ResourceTailnetCoordinator); err != nil { return err } return q.db.DeleteTailnetClientSubscription(ctx, arg) } func (q *querier) DeleteTailnetPeer(ctx context.Context, arg database.DeleteTailnetPeerParams) (database.DeleteTailnetPeerRow, error) { - if err := q.authorizeContext(ctx, rbac.ActionDelete, rbac.ResourceTailnetCoordinator); err != nil { + if err := q.authorizeContext(ctx, policy.ActionDelete, rbac.ResourceTailnetCoordinator); err != nil { return database.DeleteTailnetPeerRow{}, err } return q.db.DeleteTailnetPeer(ctx, arg) } func (q *querier) DeleteTailnetTunnel(ctx context.Context, arg database.DeleteTailnetTunnelParams) (database.DeleteTailnetTunnelRow, error) { - if err := q.authorizeContext(ctx, rbac.ActionDelete, rbac.ResourceTailnetCoordinator); err != nil { + if err := q.authorizeContext(ctx, policy.ActionDelete, rbac.ResourceTailnetCoordinator); err != nil { return database.DeleteTailnetTunnelRow{}, err } return q.db.DeleteTailnetTunnel(ctx, arg) @@ -913,7 +914,7 @@ func (q *querier) DeleteWorkspaceAgentPortShare(ctx context.Context, arg databas } // deleting a workspace port share is more akin to just updating the workspace. - if err = q.authorizeContext(ctx, rbac.ActionUpdate, w.RBACObject()); err != nil { + if err = q.authorizeContext(ctx, policy.ActionUpdate, w.RBACObject()); err != nil { return xerrors.Errorf("authorize context: %w", err) } @@ -926,7 +927,7 @@ func (q *querier) DeleteWorkspaceAgentPortSharesByTemplate(ctx context.Context, return err } - if err := q.authorizeContext(ctx, rbac.ActionUpdate, template); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, template); err != nil { return err } @@ -961,7 +962,7 @@ func (q *querier) GetAPIKeysLastUsedAfter(ctx context.Context, lastUsed time.Tim } func (q *querier) GetActiveUserCount(ctx context.Context) (int64, error) { - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceSystem); err != nil { return 0, err } return q.db.GetActiveUserCount(ctx) @@ -969,35 +970,35 @@ func (q *querier) GetActiveUserCount(ctx context.Context) (int64, error) { func (q *querier) GetActiveWorkspaceBuildsByTemplateID(ctx context.Context, templateID uuid.UUID) ([]database.WorkspaceBuild, error) { // This is a system-only function. - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceSystem); err != nil { return []database.WorkspaceBuild{}, err } return q.db.GetActiveWorkspaceBuildsByTemplateID(ctx, templateID) } func (q *querier) GetAllTailnetAgents(ctx context.Context) ([]database.TailnetAgent, error) { - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceTailnetCoordinator); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceTailnetCoordinator); err != nil { return []database.TailnetAgent{}, err } return q.db.GetAllTailnetAgents(ctx) } func (q *querier) GetAllTailnetCoordinators(ctx context.Context) ([]database.TailnetCoordinator, error) { - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceTailnetCoordinator); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceTailnetCoordinator); err != nil { return nil, err } return q.db.GetAllTailnetCoordinators(ctx) } func (q *querier) GetAllTailnetPeers(ctx context.Context) ([]database.TailnetPeer, error) { - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceTailnetCoordinator); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceTailnetCoordinator); err != nil { return nil, err } return q.db.GetAllTailnetPeers(ctx) } func (q *querier) GetAllTailnetTunnels(ctx context.Context) ([]database.TailnetTunnel, error) { - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceTailnetCoordinator); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceTailnetCoordinator); err != nil { return nil, err } return q.db.GetAllTailnetTunnels(ctx) @@ -1017,28 +1018,28 @@ func (q *querier) GetAuditLogsOffset(ctx context.Context, arg database.GetAuditL // To optimize audit logs, we only check the global audit log permission once. // This is because we expect a large unbounded set of audit logs, and applying a SQL // filter would slow down the query for no benefit. - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceAuditLog); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceAuditLog); err != nil { return nil, err } return q.db.GetAuditLogsOffset(ctx, arg) } func (q *querier) GetAuthorizationUserRoles(ctx context.Context, userID uuid.UUID) (database.GetAuthorizationUserRolesRow, error) { - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceSystem); err != nil { return database.GetAuthorizationUserRolesRow{}, err } return q.db.GetAuthorizationUserRoles(ctx, userID) } func (q *querier) GetDBCryptKeys(ctx context.Context) ([]database.DBCryptKey, error) { - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceSystem); err != nil { return nil, err } return q.db.GetDBCryptKeys(ctx) } func (q *querier) GetDERPMeshKey(ctx context.Context) (string, error) { - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceSystem); err != nil { return "", err } return q.db.GetDERPMeshKey(ctx) @@ -1057,7 +1058,7 @@ func (q *querier) GetDefaultProxyConfig(ctx context.Context) (database.GetDefaul // Only used by metrics cache. func (q *querier) GetDeploymentDAUs(ctx context.Context, tzOffset int32) ([]database.GetDeploymentDAUsRow, error) { - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceSystem); err != nil { return nil, err } return q.db.GetDeploymentDAUs(ctx, tzOffset) @@ -1089,7 +1090,7 @@ func (q *querier) GetFileByHashAndCreator(ctx context.Context, arg database.GetF if err != nil { return database.File{}, err } - err = q.authorizeContext(ctx, rbac.ActionRead, file) + err = q.authorizeContext(ctx, policy.ActionRead, file) if err != nil { // Check the user's access to the file's templates. if q.authorizeUpdateFileTemplate(ctx, file) != nil { @@ -1105,7 +1106,7 @@ func (q *querier) GetFileByID(ctx context.Context, id uuid.UUID) (database.File, if err != nil { return database.File{}, err } - err = q.authorizeContext(ctx, rbac.ActionRead, file) + err = q.authorizeContext(ctx, policy.ActionRead, file) if err != nil { // Check the user's access to the file's templates. if q.authorizeUpdateFileTemplate(ctx, file) != nil { @@ -1117,7 +1118,7 @@ func (q *querier) GetFileByID(ctx context.Context, id uuid.UUID) (database.File, } func (q *querier) GetFileTemplates(ctx context.Context, fileID uuid.UUID) ([]database.GetFileTemplatesRow, error) { - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceSystem); err != nil { return nil, err } return q.db.GetFileTemplates(ctx, fileID) @@ -1157,7 +1158,7 @@ func (q *querier) GetHealthSettings(ctx context.Context) (string, error) { // TODO: We need to create a ProvisionerJob resource type func (q *querier) GetHungProvisionerJobs(ctx context.Context, hungSince time.Time) ([]database.ProvisionerJob, error) { - // if err := q.authorizeContext(ctx, rbac.ActionCreate, rbac.ResourceSystem); err != nil { + // if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceSystem); err != nil { // return nil, err // } return q.db.GetHungProvisionerJobs(ctx, hungSince) @@ -1171,7 +1172,7 @@ func (q *querier) GetJFrogXrayScanByWorkspaceAndAgentID(ctx context.Context, arg } func (q *querier) GetLastUpdateCheck(ctx context.Context) (string, error) { - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceSystem); err != nil { return "", err } return q.db.GetLastUpdateCheck(ctx) @@ -1189,7 +1190,7 @@ func (q *querier) GetLatestWorkspaceBuilds(ctx context.Context) ([]database.Work // This is because we need to query for all related workspaces to the returned builds. // This is a very inefficient method of fetching the latest workspace builds. // We should just join the rbac properties. - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceSystem); err != nil { return nil, err } return q.db.GetLatestWorkspaceBuilds(ctx) @@ -1197,7 +1198,7 @@ func (q *querier) GetLatestWorkspaceBuilds(ctx context.Context) ([]database.Work func (q *querier) GetLatestWorkspaceBuildsByWorkspaceIDs(ctx context.Context, ids []uuid.UUID) ([]database.WorkspaceBuild, error) { // This function is a system function until we implement a join for workspace builds. - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceSystem); err != nil { return nil, err } @@ -1226,7 +1227,7 @@ func (q *querier) GetNotificationBanners(ctx context.Context) (string, error) { } func (q *querier) GetOAuth2ProviderAppByID(ctx context.Context, id uuid.UUID) (database.OAuth2ProviderApp, error) { - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceOAuth2ProviderApp); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceOAuth2ProviderApp); err != nil { return database.OAuth2ProviderApp{}, err } return q.db.GetOAuth2ProviderAppByID(ctx, id) @@ -1241,7 +1242,7 @@ func (q *querier) GetOAuth2ProviderAppCodeByPrefix(ctx context.Context, secretPr } func (q *querier) GetOAuth2ProviderAppSecretByID(ctx context.Context, id uuid.UUID) (database.OAuth2ProviderAppSecret, error) { - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceOAuth2ProviderAppSecret); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceOAuth2ProviderAppSecret); err != nil { return database.OAuth2ProviderAppSecret{}, err } return q.db.GetOAuth2ProviderAppSecretByID(ctx, id) @@ -1252,7 +1253,7 @@ func (q *querier) GetOAuth2ProviderAppSecretByPrefix(ctx context.Context, secret } func (q *querier) GetOAuth2ProviderAppSecretsByAppID(ctx context.Context, appID uuid.UUID) ([]database.OAuth2ProviderAppSecret, error) { - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceOAuth2ProviderAppSecret); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceOAuth2ProviderAppSecret); err != nil { return []database.OAuth2ProviderAppSecret{}, err } return q.db.GetOAuth2ProviderAppSecretsByAppID(ctx, appID) @@ -1268,14 +1269,14 @@ func (q *querier) GetOAuth2ProviderAppTokenByPrefix(ctx context.Context, hashPre if err != nil { return database.OAuth2ProviderAppToken{}, err } - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceOAuth2ProviderAppCodeToken.WithOwner(key.UserID.String())); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceOAuth2ProviderAppCodeToken.WithOwner(key.UserID.String())); err != nil { return database.OAuth2ProviderAppToken{}, err } return token, nil } func (q *querier) GetOAuth2ProviderApps(ctx context.Context) ([]database.OAuth2ProviderApp, error) { - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceOAuth2ProviderApp); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceOAuth2ProviderApp); err != nil { return []database.OAuth2ProviderApp{}, err } return q.db.GetOAuth2ProviderApps(ctx) @@ -1283,7 +1284,7 @@ func (q *querier) GetOAuth2ProviderApps(ctx context.Context) ([]database.OAuth2P func (q *querier) GetOAuth2ProviderAppsByUserID(ctx context.Context, userID uuid.UUID) ([]database.GetOAuth2ProviderAppsByUserIDRow, error) { // This authz check is to make sure the caller can read all their own tokens. - if err := q.authorizeContext(ctx, rbac.ActionRead, + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceOAuth2ProviderAppCodeToken.WithOwner(userID.String())); err != nil { return []database.GetOAuth2ProviderAppsByUserIDRow{}, err } @@ -1291,7 +1292,7 @@ func (q *querier) GetOAuth2ProviderAppsByUserID(ctx context.Context, userID uuid } func (q *querier) GetOAuthSigningKey(ctx context.Context) (string, error) { - if err := q.authorizeContext(ctx, rbac.ActionUpdate, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceSystem); err != nil { return "", err } return q.db.GetOAuthSigningKey(ctx) @@ -1344,7 +1345,7 @@ func (q *querier) GetParameterSchemasByJobID(ctx context.Context, jobID uuid.UUI object = version.RBACObject(tpl) } - err = q.authorizeContext(ctx, rbac.ActionRead, object) + err = q.authorizeContext(ctx, policy.ActionRead, object) if err != nil { return nil, err } @@ -1355,7 +1356,7 @@ func (q *querier) GetPreviousTemplateVersion(ctx context.Context, arg database.G // An actor can read the previous template version if they can read the related template. // If no linked template exists, we check if the actor can read *a* template. if !arg.TemplateID.Valid { - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceTemplate.InOrg(arg.OrganizationID)); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceTemplate.InOrg(arg.OrganizationID)); err != nil { return database.TemplateVersion{}, err } } @@ -1401,7 +1402,7 @@ func (q *querier) GetProvisionerJobByID(ctx context.Context, id uuid.UUID) (data // TODO: we need to add a provisioner job resource func (q *querier) GetProvisionerJobsByIDs(ctx context.Context, ids []uuid.UUID) ([]database.ProvisionerJob, error) { - // if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceSystem); err != nil { + // if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceSystem); err != nil { // return nil, err // } return q.db.GetProvisionerJobsByIDs(ctx, ids) @@ -1414,7 +1415,7 @@ func (q *querier) GetProvisionerJobsByIDsWithQueuePosition(ctx context.Context, // TODO: We need to create a ProvisionerJob resource type func (q *querier) GetProvisionerJobsCreatedAfter(ctx context.Context, createdAt time.Time) ([]database.ProvisionerJob, error) { - // if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceSystem); err != nil { + // if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceSystem); err != nil { // return nil, err // } return q.db.GetProvisionerJobsCreatedAfter(ctx, createdAt) @@ -1430,7 +1431,7 @@ func (q *querier) GetProvisionerLogsAfterID(ctx context.Context, arg database.Ge } func (q *querier) GetQuotaAllowanceForUser(ctx context.Context, userID uuid.UUID) (int64, error) { - err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceUserObject(userID)) + err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceUserObject(userID)) if err != nil { return -1, err } @@ -1438,7 +1439,7 @@ func (q *querier) GetQuotaAllowanceForUser(ctx context.Context, userID uuid.UUID } func (q *querier) GetQuotaConsumedForUser(ctx context.Context, userID uuid.UUID) (int64, error) { - err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceUserObject(userID)) + err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceUserObject(userID)) if err != nil { return -1, err } @@ -1446,49 +1447,49 @@ func (q *querier) GetQuotaConsumedForUser(ctx context.Context, userID uuid.UUID) } func (q *querier) GetReplicaByID(ctx context.Context, id uuid.UUID) (database.Replica, error) { - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceSystem); err != nil { return database.Replica{}, err } return q.db.GetReplicaByID(ctx, id) } func (q *querier) GetReplicasUpdatedAfter(ctx context.Context, updatedAt time.Time) ([]database.Replica, error) { - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceSystem); err != nil { return nil, err } return q.db.GetReplicasUpdatedAfter(ctx, updatedAt) } func (q *querier) GetTailnetAgents(ctx context.Context, id uuid.UUID) ([]database.TailnetAgent, error) { - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceTailnetCoordinator); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceTailnetCoordinator); err != nil { return nil, err } return q.db.GetTailnetAgents(ctx, id) } func (q *querier) GetTailnetClientsForAgent(ctx context.Context, agentID uuid.UUID) ([]database.TailnetClient, error) { - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceTailnetCoordinator); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceTailnetCoordinator); err != nil { return nil, err } return q.db.GetTailnetClientsForAgent(ctx, agentID) } func (q *querier) GetTailnetPeers(ctx context.Context, id uuid.UUID) ([]database.TailnetPeer, error) { - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceTailnetCoordinator); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceTailnetCoordinator); err != nil { return nil, err } return q.db.GetTailnetPeers(ctx, id) } func (q *querier) GetTailnetTunnelPeerBindings(ctx context.Context, srcID uuid.UUID) ([]database.GetTailnetTunnelPeerBindingsRow, error) { - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceTailnetCoordinator); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceTailnetCoordinator); err != nil { return nil, err } return q.db.GetTailnetTunnelPeerBindings(ctx, srcID) } func (q *querier) GetTailnetTunnelPeerIDs(ctx context.Context, srcID uuid.UUID) ([]database.GetTailnetTunnelPeerIDsRow, error) { - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceTailnetCoordinator); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceTailnetCoordinator); err != nil { return nil, err } return q.db.GetTailnetTunnelPeerIDs(ctx, srcID) @@ -1497,19 +1498,19 @@ func (q *querier) GetTailnetTunnelPeerIDs(ctx context.Context, srcID uuid.UUID) func (q *querier) GetTemplateAppInsights(ctx context.Context, arg database.GetTemplateAppInsightsParams) ([]database.GetTemplateAppInsightsRow, error) { // Used by TemplateAppInsights endpoint // For auditors, check read template_insights, and fall back to update template. - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceTemplateInsights); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceTemplateInsights); err != nil { for _, templateID := range arg.TemplateIDs { template, err := q.db.GetTemplateByID(ctx, templateID) if err != nil { return nil, err } - if err := q.authorizeContext(ctx, rbac.ActionUpdate, template); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, template); err != nil { return nil, err } } if len(arg.TemplateIDs) == 0 { - if err := q.authorizeContext(ctx, rbac.ActionUpdate, rbac.ResourceTemplate.All()); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceTemplate.All()); err != nil { return nil, err } } @@ -1519,7 +1520,7 @@ func (q *querier) GetTemplateAppInsights(ctx context.Context, arg database.GetTe func (q *querier) GetTemplateAppInsightsByTemplate(ctx context.Context, arg database.GetTemplateAppInsightsByTemplateParams) ([]database.GetTemplateAppInsightsByTemplateRow, error) { // Only used by prometheus metrics, so we don't strictly need to check update template perms. - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceTemplateInsights); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceTemplateInsights); err != nil { return nil, err } return q.db.GetTemplateAppInsightsByTemplate(ctx, arg) @@ -1527,7 +1528,7 @@ func (q *querier) GetTemplateAppInsightsByTemplate(ctx context.Context, arg data // Only used by metrics cache. func (q *querier) GetTemplateAverageBuildTime(ctx context.Context, arg database.GetTemplateAverageBuildTimeParams) (database.GetTemplateAverageBuildTimeRow, error) { - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceSystem); err != nil { return database.GetTemplateAverageBuildTimeRow{}, err } return q.db.GetTemplateAverageBuildTime(ctx, arg) @@ -1543,7 +1544,7 @@ func (q *querier) GetTemplateByOrganizationAndName(ctx context.Context, arg data // Only used by metrics cache. func (q *querier) GetTemplateDAUs(ctx context.Context, arg database.GetTemplateDAUsParams) ([]database.GetTemplateDAUsRow, error) { - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceSystem); err != nil { return nil, err } return q.db.GetTemplateDAUs(ctx, arg) @@ -1552,19 +1553,19 @@ func (q *querier) GetTemplateDAUs(ctx context.Context, arg database.GetTemplateD func (q *querier) GetTemplateInsights(ctx context.Context, arg database.GetTemplateInsightsParams) (database.GetTemplateInsightsRow, error) { // Used by TemplateInsights endpoint // For auditors, check read template_insights, and fall back to update template. - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceTemplateInsights); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceTemplateInsights); err != nil { for _, templateID := range arg.TemplateIDs { template, err := q.db.GetTemplateByID(ctx, templateID) if err != nil { return database.GetTemplateInsightsRow{}, err } - if err := q.authorizeContext(ctx, rbac.ActionUpdate, template); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, template); err != nil { return database.GetTemplateInsightsRow{}, err } } if len(arg.TemplateIDs) == 0 { - if err := q.authorizeContext(ctx, rbac.ActionUpdate, rbac.ResourceTemplate.All()); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceTemplate.All()); err != nil { return database.GetTemplateInsightsRow{}, err } } @@ -1575,19 +1576,19 @@ func (q *querier) GetTemplateInsights(ctx context.Context, arg database.GetTempl func (q *querier) GetTemplateInsightsByInterval(ctx context.Context, arg database.GetTemplateInsightsByIntervalParams) ([]database.GetTemplateInsightsByIntervalRow, error) { // Used by TemplateInsights endpoint // For auditors, check read template_insights, and fall back to update template. - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceTemplateInsights); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceTemplateInsights); err != nil { for _, templateID := range arg.TemplateIDs { template, err := q.db.GetTemplateByID(ctx, templateID) if err != nil { return nil, err } - if err := q.authorizeContext(ctx, rbac.ActionUpdate, template); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, template); err != nil { return nil, err } } if len(arg.TemplateIDs) == 0 { - if err := q.authorizeContext(ctx, rbac.ActionUpdate, rbac.ResourceTemplate.All()); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceTemplate.All()); err != nil { return nil, err } } @@ -1597,7 +1598,7 @@ func (q *querier) GetTemplateInsightsByInterval(ctx context.Context, arg databas func (q *querier) GetTemplateInsightsByTemplate(ctx context.Context, arg database.GetTemplateInsightsByTemplateParams) ([]database.GetTemplateInsightsByTemplateRow, error) { // Only used by prometheus metrics collector. No need to check update template perms. - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceTemplateInsights); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceTemplateInsights); err != nil { return nil, err } return q.db.GetTemplateInsightsByTemplate(ctx, arg) @@ -1606,19 +1607,19 @@ func (q *querier) GetTemplateInsightsByTemplate(ctx context.Context, arg databas func (q *querier) GetTemplateParameterInsights(ctx context.Context, arg database.GetTemplateParameterInsightsParams) ([]database.GetTemplateParameterInsightsRow, error) { // Used by both insights endpoint and prometheus collector. // For auditors, check read template_insights, and fall back to update template. - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceTemplateInsights); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceTemplateInsights); err != nil { for _, templateID := range arg.TemplateIDs { template, err := q.db.GetTemplateByID(ctx, templateID) if err != nil { return nil, err } - if err := q.authorizeContext(ctx, rbac.ActionUpdate, template); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, template); err != nil { return nil, err } } if len(arg.TemplateIDs) == 0 { - if err := q.authorizeContext(ctx, rbac.ActionUpdate, rbac.ResourceTemplate.All()); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceTemplate.All()); err != nil { return nil, err } } @@ -1629,19 +1630,19 @@ func (q *querier) GetTemplateParameterInsights(ctx context.Context, arg database func (q *querier) GetTemplateUsageStats(ctx context.Context, arg database.GetTemplateUsageStatsParams) ([]database.TemplateUsageStat, error) { // Used by dbrollup tests, use same safe-guard as other insights endpoints. // For auditors, check read template_insights, and fall back to update template. - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceTemplateInsights); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceTemplateInsights); err != nil { for _, templateID := range arg.TemplateIDs { template, err := q.db.GetTemplateByID(ctx, templateID) if err != nil { return nil, err } - if err := q.authorizeContext(ctx, rbac.ActionUpdate, template); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, template); err != nil { return nil, err } } if len(arg.TemplateIDs) == 0 { - if err := q.authorizeContext(ctx, rbac.ActionUpdate, rbac.ResourceTemplate.All()); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceTemplate.All()); err != nil { return nil, err } } @@ -1656,7 +1657,7 @@ func (q *querier) GetTemplateVersionByID(ctx context.Context, tvid uuid.UUID) (d } if !tv.TemplateID.Valid { // If no linked template exists, check if the actor can read a template in the organization. - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceTemplate.InOrg(tv.OrganizationID)); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceTemplate.InOrg(tv.OrganizationID)); err != nil { return database.TemplateVersion{}, err } } else if _, err := q.GetTemplateByID(ctx, tv.TemplateID.UUID); err != nil { @@ -1673,7 +1674,7 @@ func (q *querier) GetTemplateVersionByJobID(ctx context.Context, jobID uuid.UUID } if !tv.TemplateID.Valid { // If no linked template exists, check if the actor can read a template in the organization. - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceTemplate.InOrg(tv.OrganizationID)); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceTemplate.InOrg(tv.OrganizationID)); err != nil { return database.TemplateVersion{}, err } } else if _, err := q.GetTemplateByID(ctx, tv.TemplateID.UUID); err != nil { @@ -1690,7 +1691,7 @@ func (q *querier) GetTemplateVersionByTemplateIDAndName(ctx context.Context, arg } if !tv.TemplateID.Valid { // If no linked template exists, check if the actor can read a template in the organization. - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceTemplate.InOrg(tv.OrganizationID)); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceTemplate.InOrg(tv.OrganizationID)); err != nil { return database.TemplateVersion{}, err } } else if _, err := q.GetTemplateByID(ctx, tv.TemplateID.UUID); err != nil { @@ -1718,7 +1719,7 @@ func (q *querier) GetTemplateVersionParameters(ctx context.Context, templateVers object = tv.RBACObject(template) } - if err := q.authorizeContext(ctx, rbac.ActionRead, object); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, object); err != nil { return nil, err } return q.db.GetTemplateVersionParameters(ctx, templateVersionID) @@ -1741,7 +1742,7 @@ func (q *querier) GetTemplateVersionVariables(ctx context.Context, templateVersi object = tv.RBACObject(template) } - if err := q.authorizeContext(ctx, rbac.ActionRead, object); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, object); err != nil { return nil, err } return q.db.GetTemplateVersionVariables(ctx, templateVersionID) @@ -1750,7 +1751,7 @@ func (q *querier) GetTemplateVersionVariables(ctx context.Context, templateVersi // GetTemplateVersionsByIDs is only used for workspace build data. // The workspace is already fetched. func (q *querier) GetTemplateVersionsByIDs(ctx context.Context, ids []uuid.UUID) ([]database.TemplateVersion, error) { - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceSystem); err != nil { return nil, err } return q.db.GetTemplateVersionsByIDs(ctx, ids) @@ -1763,7 +1764,7 @@ func (q *querier) GetTemplateVersionsByTemplateID(ctx context.Context, arg datab return nil, err } - if err := q.authorizeContext(ctx, rbac.ActionRead, template); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, template); err != nil { return nil, err } @@ -1772,21 +1773,21 @@ func (q *querier) GetTemplateVersionsByTemplateID(ctx context.Context, arg datab func (q *querier) GetTemplateVersionsCreatedAfter(ctx context.Context, createdAt time.Time) ([]database.TemplateVersion, error) { // An actor can read execute this query if they can read all templates. - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceTemplate.All()); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceTemplate.All()); err != nil { return nil, err } return q.db.GetTemplateVersionsCreatedAfter(ctx, createdAt) } func (q *querier) GetTemplates(ctx context.Context) ([]database.Template, error) { - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceSystem); err != nil { return nil, err } return q.db.GetTemplates(ctx) } func (q *querier) GetTemplatesWithFilter(ctx context.Context, arg database.GetTemplatesWithFilterParams) ([]database.Template, error) { - prep, err := prepareSQLFilter(ctx, q.auth, rbac.ActionRead, rbac.ResourceTemplate.Type) + prep, err := prepareSQLFilter(ctx, q.auth, policy.ActionRead, rbac.ResourceTemplate.Type) if err != nil { return nil, xerrors.Errorf("(dev error) prepare sql filter: %w", err) } @@ -1794,7 +1795,7 @@ func (q *querier) GetTemplatesWithFilter(ctx context.Context, arg database.GetTe } func (q *querier) GetUnexpiredLicenses(ctx context.Context) ([]database.License, error) { - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceSystem); err != nil { return nil, err } return q.db.GetUnexpiredLicenses(ctx) @@ -1802,19 +1803,19 @@ func (q *querier) GetUnexpiredLicenses(ctx context.Context) ([]database.License, func (q *querier) GetUserActivityInsights(ctx context.Context, arg database.GetUserActivityInsightsParams) ([]database.GetUserActivityInsightsRow, error) { // Used by insights endpoints. Need to check both for auditors and for regular users with template acl perms. - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceTemplateInsights); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceTemplateInsights); err != nil { for _, templateID := range arg.TemplateIDs { template, err := q.db.GetTemplateByID(ctx, templateID) if err != nil { return nil, err } - if err := q.authorizeContext(ctx, rbac.ActionUpdate, template); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, template); err != nil { return nil, err } } if len(arg.TemplateIDs) == 0 { - if err := q.authorizeContext(ctx, rbac.ActionUpdate, rbac.ResourceTemplate.All()); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceTemplate.All()); err != nil { return nil, err } } @@ -1831,7 +1832,7 @@ func (q *querier) GetUserByID(ctx context.Context, id uuid.UUID) (database.User, } func (q *querier) GetUserCount(ctx context.Context) (int64, error) { - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceSystem); err != nil { return 0, err } return q.db.GetUserCount(ctx) @@ -1839,19 +1840,19 @@ func (q *querier) GetUserCount(ctx context.Context) (int64, error) { func (q *querier) GetUserLatencyInsights(ctx context.Context, arg database.GetUserLatencyInsightsParams) ([]database.GetUserLatencyInsightsRow, error) { // Used by insights endpoints. Need to check both for auditors and for regular users with template acl perms. - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceTemplateInsights); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceTemplateInsights); err != nil { for _, templateID := range arg.TemplateIDs { template, err := q.db.GetTemplateByID(ctx, templateID) if err != nil { return nil, err } - if err := q.authorizeContext(ctx, rbac.ActionUpdate, template); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, template); err != nil { return nil, err } } if len(arg.TemplateIDs) == 0 { - if err := q.authorizeContext(ctx, rbac.ActionUpdate, rbac.ResourceTemplate.All()); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceTemplate.All()); err != nil { return nil, err } } @@ -1860,21 +1861,21 @@ func (q *querier) GetUserLatencyInsights(ctx context.Context, arg database.GetUs } func (q *querier) GetUserLinkByLinkedID(ctx context.Context, linkedID string) (database.UserLink, error) { - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceSystem); err != nil { return database.UserLink{}, err } return q.db.GetUserLinkByLinkedID(ctx, linkedID) } func (q *querier) GetUserLinkByUserIDLoginType(ctx context.Context, arg database.GetUserLinkByUserIDLoginTypeParams) (database.UserLink, error) { - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceSystem); err != nil { return database.UserLink{}, err } return q.db.GetUserLinkByUserIDLoginType(ctx, arg) } func (q *querier) GetUserLinksByUserID(ctx context.Context, userID uuid.UUID) ([]database.UserLink, error) { - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceSystem); err != nil { return nil, err } return q.db.GetUserLinksByUserID(ctx, userID) @@ -1885,7 +1886,7 @@ func (q *querier) GetUserWorkspaceBuildParameters(ctx context.Context, params da if err != nil { return nil, err } - if err := q.authorizeContext(ctx, rbac.ActionRead, u.UserWorkspaceBuildParametersObject()); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, u.UserWorkspaceBuildParametersObject()); err != nil { return nil, err } return q.db.GetUserWorkspaceBuildParameters(ctx, params) @@ -1893,7 +1894,7 @@ func (q *querier) GetUserWorkspaceBuildParameters(ctx context.Context, params da func (q *querier) GetUsers(ctx context.Context, arg database.GetUsersParams) ([]database.GetUsersRow, error) { // This does the filtering in SQL. - prep, err := prepareSQLFilter(ctx, q.auth, rbac.ActionRead, rbac.ResourceUser.Type) + prep, err := prepareSQLFilter(ctx, q.auth, policy.ActionRead, rbac.ResourceUser.Type) if err != nil { return nil, xerrors.Errorf("(dev error) prepare sql filter: %w", err) } @@ -1905,7 +1906,7 @@ func (q *querier) GetUsers(ctx context.Context, arg database.GetUsersParams) ([] // itself. func (q *querier) GetUsersByIDs(ctx context.Context, ids []uuid.UUID) ([]database.User, error) { for _, uid := range ids { - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceUserObject(uid)); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceUserObject(uid)); err != nil { return nil, err } } @@ -1914,7 +1915,7 @@ func (q *querier) GetUsersByIDs(ctx context.Context, ids []uuid.UUID) ([]databas func (q *querier) GetWorkspaceAgentAndLatestBuildByAuthToken(ctx context.Context, authToken uuid.UUID) (database.GetWorkspaceAgentAndLatestBuildByAuthTokenRow, error) { // This is a system function - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceSystem); err != nil { return database.GetWorkspaceAgentAndLatestBuildByAuthTokenRow{}, err } return q.db.GetWorkspaceAgentAndLatestBuildByAuthToken(ctx, authToken) @@ -1952,7 +1953,7 @@ func (q *querier) GetWorkspaceAgentLifecycleStateByID(ctx context.Context, id uu } func (q *querier) GetWorkspaceAgentLogSourcesByAgentIDs(ctx context.Context, ids []uuid.UUID) ([]database.WorkspaceAgentLogSource, error) { - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceSystem); err != nil { return nil, err } return q.db.GetWorkspaceAgentLogSourcesByAgentIDs(ctx, ids) @@ -1972,7 +1973,7 @@ func (q *querier) GetWorkspaceAgentMetadata(ctx context.Context, arg database.Ge return nil, err } - err = q.authorizeContext(ctx, rbac.ActionRead, workspace) + err = q.authorizeContext(ctx, policy.ActionRead, workspace) if err != nil { return nil, err } @@ -1987,7 +1988,7 @@ func (q *querier) GetWorkspaceAgentPortShare(ctx context.Context, arg database.G } // reading a workspace port share is more akin to just reading the workspace. - if err = q.authorizeContext(ctx, rbac.ActionRead, w.RBACObject()); err != nil { + if err = q.authorizeContext(ctx, policy.ActionRead, w.RBACObject()); err != nil { return database.WorkspaceAgentPortShare{}, xerrors.Errorf("authorize context: %w", err) } @@ -1995,7 +1996,7 @@ func (q *querier) GetWorkspaceAgentPortShare(ctx context.Context, arg database.G } func (q *querier) GetWorkspaceAgentScriptsByAgentIDs(ctx context.Context, ids []uuid.UUID) ([]database.WorkspaceAgentScript, error) { - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceSystem); err != nil { return nil, err } return q.db.GetWorkspaceAgentScriptsByAgentIDs(ctx, ids) @@ -2012,14 +2013,14 @@ func (q *querier) GetWorkspaceAgentStatsAndLabels(ctx context.Context, createdAf // GetWorkspaceAgentsByResourceIDs // The workspace/job is already fetched. func (q *querier) GetWorkspaceAgentsByResourceIDs(ctx context.Context, ids []uuid.UUID) ([]database.WorkspaceAgent, error) { - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceSystem); err != nil { return nil, err } return q.db.GetWorkspaceAgentsByResourceIDs(ctx, ids) } func (q *querier) GetWorkspaceAgentsCreatedAfter(ctx context.Context, createdAt time.Time) ([]database.WorkspaceAgent, error) { - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceSystem); err != nil { return nil, err } return q.db.GetWorkspaceAgentsCreatedAfter(ctx, createdAt) @@ -2053,14 +2054,14 @@ func (q *querier) GetWorkspaceAppsByAgentID(ctx context.Context, agentID uuid.UU // GetWorkspaceAppsByAgentIDs // The workspace/job is already fetched. func (q *querier) GetWorkspaceAppsByAgentIDs(ctx context.Context, ids []uuid.UUID) ([]database.WorkspaceApp, error) { - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceSystem); err != nil { return nil, err } return q.db.GetWorkspaceAppsByAgentIDs(ctx, ids) } func (q *querier) GetWorkspaceAppsCreatedAfter(ctx context.Context, createdAt time.Time) ([]database.WorkspaceApp, error) { - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceSystem); err != nil { return nil, err } return q.db.GetWorkspaceAppsCreatedAfter(ctx, createdAt) @@ -2119,7 +2120,7 @@ func (q *querier) GetWorkspaceBuildsByWorkspaceID(ctx context.Context, arg datab // telemetry data. Never called by a user. func (q *querier) GetWorkspaceBuildsCreatedAfter(ctx context.Context, createdAt time.Time) ([]database.WorkspaceBuild, error) { - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceSystem); err != nil { return nil, err } return q.db.GetWorkspaceBuildsCreatedAfter(ctx, createdAt) @@ -2148,7 +2149,7 @@ func (q *querier) GetWorkspaceProxies(ctx context.Context) ([]database.Workspace } func (q *querier) GetWorkspaceProxyByHostname(ctx context.Context, params database.GetWorkspaceProxyByHostnameParams) (database.WorkspaceProxy, error) { - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceSystem); err != nil { return database.WorkspaceProxy{}, err } return q.db.GetWorkspaceProxyByHostname(ctx, params) @@ -2180,14 +2181,14 @@ func (q *querier) GetWorkspaceResourceByID(ctx context.Context, id uuid.UUID) (d // GetWorkspaceResourceMetadataByResourceIDs is only used for build data. // The workspace/job is already fetched. func (q *querier) GetWorkspaceResourceMetadataByResourceIDs(ctx context.Context, ids []uuid.UUID) ([]database.WorkspaceResourceMetadatum, error) { - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceSystem); err != nil { return nil, err } return q.db.GetWorkspaceResourceMetadataByResourceIDs(ctx, ids) } func (q *querier) GetWorkspaceResourceMetadataCreatedAfter(ctx context.Context, createdAt time.Time) ([]database.WorkspaceResourceMetadatum, error) { - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceSystem); err != nil { return nil, err } return q.db.GetWorkspaceResourceMetadataCreatedAfter(ctx, createdAt) @@ -2232,7 +2233,7 @@ func (q *querier) GetWorkspaceResourcesByJobID(ctx context.Context, jobID uuid.U return nil, xerrors.Errorf("unknown job type: %s", job.Type) } - if err := q.authorizeContext(ctx, rbac.ActionRead, obj); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, obj); err != nil { return nil, err } return q.db.GetWorkspaceResourcesByJobID(ctx, jobID) @@ -2242,28 +2243,28 @@ func (q *querier) GetWorkspaceResourcesByJobID(ctx context.Context, jobID uuid.U // The workspace is already fetched. // TODO: Find a way to replace this with proper authz. func (q *querier) GetWorkspaceResourcesByJobIDs(ctx context.Context, ids []uuid.UUID) ([]database.WorkspaceResource, error) { - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceSystem); err != nil { return nil, err } return q.db.GetWorkspaceResourcesByJobIDs(ctx, ids) } func (q *querier) GetWorkspaceResourcesCreatedAfter(ctx context.Context, createdAt time.Time) ([]database.WorkspaceResource, error) { - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceSystem); err != nil { return nil, err } return q.db.GetWorkspaceResourcesCreatedAfter(ctx, createdAt) } func (q *querier) GetWorkspaceUniqueOwnerCountByTemplateIDs(ctx context.Context, templateIds []uuid.UUID) ([]database.GetWorkspaceUniqueOwnerCountByTemplateIDsRow, error) { - if err := q.authorizeContext(ctx, rbac.ActionRead, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceSystem); err != nil { return nil, err } return q.db.GetWorkspaceUniqueOwnerCountByTemplateIDs(ctx, templateIds) } func (q *querier) GetWorkspaces(ctx context.Context, arg database.GetWorkspacesParams) ([]database.GetWorkspacesRow, error) { - prep, err := prepareSQLFilter(ctx, q.auth, rbac.ActionRead, rbac.ResourceWorkspace.Type) + prep, err := prepareSQLFilter(ctx, q.auth, policy.ActionRead, rbac.ResourceWorkspace.Type) if err != nil { return nil, xerrors.Errorf("(dev error) prepare sql filter: %w", err) } @@ -2290,21 +2291,21 @@ func (q *querier) InsertAuditLog(ctx context.Context, arg database.InsertAuditLo } func (q *querier) InsertDBCryptKey(ctx context.Context, arg database.InsertDBCryptKeyParams) error { - if err := q.authorizeContext(ctx, rbac.ActionCreate, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceSystem); err != nil { return err } return q.db.InsertDBCryptKey(ctx, arg) } func (q *querier) InsertDERPMeshKey(ctx context.Context, value string) error { - if err := q.authorizeContext(ctx, rbac.ActionCreate, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceSystem); err != nil { return err } return q.db.InsertDERPMeshKey(ctx, value) } func (q *querier) InsertDeploymentID(ctx context.Context, value string) error { - if err := q.authorizeContext(ctx, rbac.ActionCreate, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceSystem); err != nil { return err } return q.db.InsertDeploymentID(ctx, value) @@ -2334,28 +2335,28 @@ func (q *querier) InsertGroupMember(ctx context.Context, arg database.InsertGrou } func (q *querier) InsertLicense(ctx context.Context, arg database.InsertLicenseParams) (database.License, error) { - if err := q.authorizeContext(ctx, rbac.ActionCreate, rbac.ResourceLicense); err != nil { + if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceLicense); err != nil { return database.License{}, err } return q.db.InsertLicense(ctx, arg) } func (q *querier) InsertMissingGroups(ctx context.Context, arg database.InsertMissingGroupsParams) ([]database.Group, error) { - if err := q.authorizeContext(ctx, rbac.ActionCreate, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceSystem); err != nil { return nil, err } return q.db.InsertMissingGroups(ctx, arg) } func (q *querier) InsertOAuth2ProviderApp(ctx context.Context, arg database.InsertOAuth2ProviderAppParams) (database.OAuth2ProviderApp, error) { - if err := q.authorizeContext(ctx, rbac.ActionCreate, rbac.ResourceOAuth2ProviderApp); err != nil { + if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceOAuth2ProviderApp); err != nil { return database.OAuth2ProviderApp{}, err } return q.db.InsertOAuth2ProviderApp(ctx, arg) } func (q *querier) InsertOAuth2ProviderAppCode(ctx context.Context, arg database.InsertOAuth2ProviderAppCodeParams) (database.OAuth2ProviderAppCode, error) { - if err := q.authorizeContext(ctx, rbac.ActionCreate, + if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceOAuth2ProviderAppCodeToken.WithOwner(arg.UserID.String())); err != nil { return database.OAuth2ProviderAppCode{}, err } @@ -2363,7 +2364,7 @@ func (q *querier) InsertOAuth2ProviderAppCode(ctx context.Context, arg database. } func (q *querier) InsertOAuth2ProviderAppSecret(ctx context.Context, arg database.InsertOAuth2ProviderAppSecretParams) (database.OAuth2ProviderAppSecret, error) { - if err := q.authorizeContext(ctx, rbac.ActionCreate, rbac.ResourceOAuth2ProviderAppSecret); err != nil { + if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceOAuth2ProviderAppSecret); err != nil { return database.OAuth2ProviderAppSecret{}, err } return q.db.InsertOAuth2ProviderAppSecret(ctx, arg) @@ -2374,7 +2375,7 @@ func (q *querier) InsertOAuth2ProviderAppToken(ctx context.Context, arg database if err != nil { return database.OAuth2ProviderAppToken{}, err } - if err := q.authorizeContext(ctx, rbac.ActionCreate, rbac.ResourceOAuth2ProviderAppCodeToken.WithOwner(key.UserID.String())); err != nil { + if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceOAuth2ProviderAppCodeToken.WithOwner(key.UserID.String())); err != nil { return database.OAuth2ProviderAppToken{}, err } return q.db.InsertOAuth2ProviderAppToken(ctx, arg) @@ -2398,7 +2399,7 @@ func (q *querier) InsertOrganizationMember(ctx context.Context, arg database.Ins // TODO: We need to create a ProvisionerJob resource type func (q *querier) InsertProvisionerJob(ctx context.Context, arg database.InsertProvisionerJobParams) (database.ProvisionerJob, error) { - // if err := q.authorizeContext(ctx, rbac.ActionCreate, rbac.ResourceSystem); err != nil { + // if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceSystem); err != nil { // return database.ProvisionerJob{}, err // } return q.db.InsertProvisionerJob(ctx, arg) @@ -2406,14 +2407,14 @@ func (q *querier) InsertProvisionerJob(ctx context.Context, arg database.InsertP // TODO: We need to create a ProvisionerJob resource type func (q *querier) InsertProvisionerJobLogs(ctx context.Context, arg database.InsertProvisionerJobLogsParams) ([]database.ProvisionerJobLog, error) { - // if err := q.authorizeContext(ctx, rbac.ActionCreate, rbac.ResourceSystem); err != nil { + // if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceSystem); err != nil { // return nil, err // } return q.db.InsertProvisionerJobLogs(ctx, arg) } func (q *querier) InsertReplica(ctx context.Context, arg database.InsertReplicaParams) (database.Replica, error) { - if err := q.authorizeContext(ctx, rbac.ActionCreate, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceSystem); err != nil { return database.Replica{}, err } return q.db.InsertReplica(ctx, arg) @@ -2421,7 +2422,7 @@ func (q *querier) InsertReplica(ctx context.Context, arg database.InsertReplicaP func (q *querier) InsertTemplate(ctx context.Context, arg database.InsertTemplateParams) error { obj := rbac.ResourceTemplate.InOrg(arg.OrganizationID) - if err := q.authorizeContext(ctx, rbac.ActionCreate, obj); err != nil { + if err := q.authorizeContext(ctx, policy.ActionCreate, obj); err != nil { return err } return q.db.InsertTemplate(ctx, arg) @@ -2430,7 +2431,7 @@ func (q *querier) InsertTemplate(ctx context.Context, arg database.InsertTemplat func (q *querier) InsertTemplateVersion(ctx context.Context, arg database.InsertTemplateVersionParams) error { if !arg.TemplateID.Valid { // Making a new template version is the same permission as creating a new template. - err := q.authorizeContext(ctx, rbac.ActionCreate, rbac.ResourceTemplate.InOrg(arg.OrganizationID)) + err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceTemplate.InOrg(arg.OrganizationID)) if err != nil { return err } @@ -2441,7 +2442,7 @@ func (q *querier) InsertTemplateVersion(ctx context.Context, arg database.Insert return err } // Check the create permission on the template. - err = q.authorizeContext(ctx, rbac.ActionCreate, tpl) + err = q.authorizeContext(ctx, policy.ActionCreate, tpl) if err != nil { return err } @@ -2451,14 +2452,14 @@ func (q *querier) InsertTemplateVersion(ctx context.Context, arg database.Insert } func (q *querier) InsertTemplateVersionParameter(ctx context.Context, arg database.InsertTemplateVersionParameterParams) (database.TemplateVersionParameter, error) { - if err := q.authorizeContext(ctx, rbac.ActionCreate, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceSystem); err != nil { return database.TemplateVersionParameter{}, err } return q.db.InsertTemplateVersionParameter(ctx, arg) } func (q *querier) InsertTemplateVersionVariable(ctx context.Context, arg database.InsertTemplateVersionVariableParams) (database.TemplateVersionVariable, error) { - if err := q.authorizeContext(ctx, rbac.ActionCreate, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceSystem); err != nil { return database.TemplateVersionVariable{}, err } return q.db.InsertTemplateVersionVariable(ctx, arg) @@ -2487,7 +2488,7 @@ func (q *querier) InsertUserGroupsByName(ctx context.Context, arg database.Inser // TODO: Should this be in system.go? func (q *querier) InsertUserLink(ctx context.Context, arg database.InsertUserLinkParams) (database.UserLink, error) { - if err := q.authorizeContext(ctx, rbac.ActionUpdate, rbac.ResourceUserObject(arg.UserID)); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceUserObject(arg.UserID)); err != nil { return database.UserLink{}, err } return q.db.InsertUserLink(ctx, arg) @@ -2499,7 +2500,7 @@ func (q *querier) InsertWorkspace(ctx context.Context, arg database.InsertWorksp } func (q *querier) InsertWorkspaceAgent(ctx context.Context, arg database.InsertWorkspaceAgentParams) (database.WorkspaceAgent, error) { - if err := q.authorizeContext(ctx, rbac.ActionCreate, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceSystem); err != nil { return database.WorkspaceAgent{}, err } return q.db.InsertWorkspaceAgent(ctx, arg) @@ -2518,7 +2519,7 @@ func (q *querier) InsertWorkspaceAgentLogs(ctx context.Context, arg database.Ins func (q *querier) InsertWorkspaceAgentMetadata(ctx context.Context, arg database.InsertWorkspaceAgentMetadataParams) error { // We don't check for workspace ownership here since the agent metadata may // be associated with an orphaned agent used by a dry run build. - if err := q.authorizeContext(ctx, rbac.ActionCreate, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceSystem); err != nil { return err } @@ -2526,14 +2527,14 @@ func (q *querier) InsertWorkspaceAgentMetadata(ctx context.Context, arg database } func (q *querier) InsertWorkspaceAgentScripts(ctx context.Context, arg database.InsertWorkspaceAgentScriptsParams) ([]database.WorkspaceAgentScript, error) { - if err := q.authorizeContext(ctx, rbac.ActionCreate, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceSystem); err != nil { return []database.WorkspaceAgentScript{}, err } return q.db.InsertWorkspaceAgentScripts(ctx, arg) } func (q *querier) InsertWorkspaceAgentStats(ctx context.Context, arg database.InsertWorkspaceAgentStatsParams) error { - if err := q.authorizeContext(ctx, rbac.ActionCreate, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceSystem); err != nil { return err } @@ -2541,14 +2542,14 @@ func (q *querier) InsertWorkspaceAgentStats(ctx context.Context, arg database.In } func (q *querier) InsertWorkspaceApp(ctx context.Context, arg database.InsertWorkspaceAppParams) (database.WorkspaceApp, error) { - if err := q.authorizeContext(ctx, rbac.ActionCreate, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceSystem); err != nil { return database.WorkspaceApp{}, err } return q.db.InsertWorkspaceApp(ctx, arg) } func (q *querier) InsertWorkspaceAppStats(ctx context.Context, arg database.InsertWorkspaceAppStatsParams) error { - if err := q.authorizeContext(ctx, rbac.ActionCreate, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceSystem); err != nil { return err } return q.db.InsertWorkspaceAppStats(ctx, arg) @@ -2560,9 +2561,9 @@ func (q *querier) InsertWorkspaceBuild(ctx context.Context, arg database.InsertW return xerrors.Errorf("get workspace by id: %w", err) } - var action rbac.Action = rbac.ActionUpdate + var action policy.Action = policy.ActionUpdate if arg.Transition == database.WorkspaceTransitionDelete { - action = rbac.ActionDelete + action = policy.ActionDelete } if err = q.authorizeContext(ctx, action, w.WorkspaceBuildRBAC(arg.Transition)); err != nil { @@ -2583,7 +2584,7 @@ func (q *querier) InsertWorkspaceBuild(ctx context.Context, arg database.InsertW // to use a non-active version then we must fail the request. if accessControl.RequireActiveVersion { if arg.TemplateVersionID != t.ActiveVersionID { - if err = q.authorizeContext(ctx, rbac.ActionUpdate, t); err != nil { + if err = q.authorizeContext(ctx, policy.ActionUpdate, t); err != nil { return xerrors.Errorf("cannot use non-active version: %w", err) } } @@ -2605,7 +2606,7 @@ func (q *querier) InsertWorkspaceBuildParameters(ctx context.Context, arg databa return err } - err = q.authorizeContext(ctx, rbac.ActionUpdate, workspace) + err = q.authorizeContext(ctx, policy.ActionUpdate, workspace) if err != nil { return err } @@ -2618,14 +2619,14 @@ func (q *querier) InsertWorkspaceProxy(ctx context.Context, arg database.InsertW } func (q *querier) InsertWorkspaceResource(ctx context.Context, arg database.InsertWorkspaceResourceParams) (database.WorkspaceResource, error) { - if err := q.authorizeContext(ctx, rbac.ActionCreate, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceSystem); err != nil { return database.WorkspaceResource{}, err } return q.db.InsertWorkspaceResource(ctx, arg) } func (q *querier) InsertWorkspaceResourceMetadata(ctx context.Context, arg database.InsertWorkspaceResourceMetadataParams) ([]database.WorkspaceResourceMetadatum, error) { - if err := q.authorizeContext(ctx, rbac.ActionCreate, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceSystem); err != nil { return nil, err } return q.db.InsertWorkspaceResourceMetadata(ctx, arg) @@ -2638,7 +2639,7 @@ func (q *querier) ListWorkspaceAgentPortShares(ctx context.Context, workspaceID } // listing port shares is more akin to reading the workspace. - if err := q.authorizeContext(ctx, rbac.ActionRead, workspace); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, workspace); err != nil { return nil, err } @@ -2651,7 +2652,7 @@ func (q *querier) ReduceWorkspaceAgentShareLevelToAuthenticatedByTemplate(ctx co return err } - if err := q.authorizeContext(ctx, rbac.ActionUpdate, template); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, template); err != nil { return err } @@ -2667,14 +2668,14 @@ func (q *querier) RegisterWorkspaceProxy(ctx context.Context, arg database.Regis func (q *querier) RemoveUserFromAllGroups(ctx context.Context, userID uuid.UUID) error { // This is a system function to clear user groups in group sync. - if err := q.authorizeContext(ctx, rbac.ActionUpdate, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceSystem); err != nil { return err } return q.db.RemoveUserFromAllGroups(ctx, userID) } func (q *querier) RevokeDBCryptKey(ctx context.Context, activeKeyDigest string) error { - if err := q.authorizeContext(ctx, rbac.ActionUpdate, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceSystem); err != nil { return err } return q.db.RevokeDBCryptKey(ctx, activeKeyDigest) @@ -2694,7 +2695,7 @@ func (q *querier) UnarchiveTemplateVersion(ctx context.Context, arg database.Una if err != nil { return err } - if err := q.authorizeContext(ctx, rbac.ActionUpdate, tpl); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, tpl); err != nil { return err } return q.db.UnarchiveTemplateVersion(ctx, arg) @@ -2736,7 +2737,7 @@ func (q *querier) UpdateGroupByID(ctx context.Context, arg database.UpdateGroupB } func (q *querier) UpdateInactiveUsersToDormant(ctx context.Context, lastSeenAfter database.UpdateInactiveUsersToDormantParams) ([]database.UpdateInactiveUsersToDormantRow, error) { - if err := q.authorizeContext(ctx, rbac.ActionCreate, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceSystem); err != nil { return nil, err } return q.db.UpdateInactiveUsersToDormant(ctx, lastSeenAfter) @@ -2764,21 +2765,21 @@ func (q *querier) UpdateMemberRoles(ctx context.Context, arg database.UpdateMemb } func (q *querier) UpdateOAuth2ProviderAppByID(ctx context.Context, arg database.UpdateOAuth2ProviderAppByIDParams) (database.OAuth2ProviderApp, error) { - if err := q.authorizeContext(ctx, rbac.ActionUpdate, rbac.ResourceOAuth2ProviderApp); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceOAuth2ProviderApp); err != nil { return database.OAuth2ProviderApp{}, err } return q.db.UpdateOAuth2ProviderAppByID(ctx, arg) } func (q *querier) UpdateOAuth2ProviderAppSecretByID(ctx context.Context, arg database.UpdateOAuth2ProviderAppSecretByIDParams) (database.OAuth2ProviderAppSecret, error) { - if err := q.authorizeContext(ctx, rbac.ActionUpdate, rbac.ResourceOAuth2ProviderAppSecret); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceOAuth2ProviderAppSecret); err != nil { return database.OAuth2ProviderAppSecret{}, err } return q.db.UpdateOAuth2ProviderAppSecretByID(ctx, arg) } func (q *querier) UpdateProvisionerDaemonLastSeenAt(ctx context.Context, arg database.UpdateProvisionerDaemonLastSeenAtParams) error { - if err := q.authorizeContext(ctx, rbac.ActionUpdate, rbac.ResourceProvisionerDaemon); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceProvisionerDaemon); err != nil { return err } return q.db.UpdateProvisionerDaemonLastSeenAt(ctx, arg) @@ -2786,7 +2787,7 @@ func (q *querier) UpdateProvisionerDaemonLastSeenAt(ctx context.Context, arg dat // TODO: We need to create a ProvisionerJob resource type func (q *querier) UpdateProvisionerJobByID(ctx context.Context, arg database.UpdateProvisionerJobByIDParams) error { - // if err := q.authorizeContext(ctx, rbac.ActionUpdate, rbac.ResourceSystem); err != nil { + // if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceSystem); err != nil { // return err // } return q.db.UpdateProvisionerJobByID(ctx, arg) @@ -2827,7 +2828,7 @@ func (q *querier) UpdateProvisionerJobWithCancelByID(ctx context.Context, arg da } } - err = q.authorizeContext(ctx, rbac.ActionUpdate, workspace) + err = q.authorizeContext(ctx, policy.ActionUpdate, workspace) if err != nil { return err } @@ -2843,12 +2844,12 @@ func (q *querier) UpdateProvisionerJobWithCancelByID(ctx context.Context, arg da if err != nil { return err } - err = q.authorizeContext(ctx, rbac.ActionUpdate, templateVersion.RBACObject(template)) + err = q.authorizeContext(ctx, policy.ActionUpdate, templateVersion.RBACObject(template)) if err != nil { return err } } else { - err = q.authorizeContext(ctx, rbac.ActionUpdate, templateVersion.RBACObjectNoTemplate()) + err = q.authorizeContext(ctx, policy.ActionUpdate, templateVersion.RBACObjectNoTemplate()) if err != nil { return err } @@ -2861,14 +2862,14 @@ func (q *querier) UpdateProvisionerJobWithCancelByID(ctx context.Context, arg da // TODO: We need to create a ProvisionerJob resource type func (q *querier) UpdateProvisionerJobWithCompleteByID(ctx context.Context, arg database.UpdateProvisionerJobWithCompleteByIDParams) error { - // if err := q.authorizeContext(ctx, rbac.ActionUpdate, rbac.ResourceSystem); err != nil { + // if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceSystem); err != nil { // return err // } return q.db.UpdateProvisionerJobWithCompleteByID(ctx, arg) } func (q *querier) UpdateReplica(ctx context.Context, arg database.UpdateReplicaParams) (database.Replica, error) { - if err := q.authorizeContext(ctx, rbac.ActionUpdate, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceSystem); err != nil { return database.Replica{}, err } return q.db.UpdateReplica(ctx, arg) @@ -2880,7 +2881,7 @@ func (q *querier) UpdateTemplateACLByID(ctx context.Context, arg database.Update } // UpdateTemplateACL uses the ActionCreate action. Only users that can create the template // may update the ACL. - return fetchAndExec(q.log, q.auth, rbac.ActionCreate, fetch, q.db.UpdateTemplateACLByID)(ctx, arg) + return fetchAndExec(q.log, q.auth, policy.ActionCreate, fetch, q.db.UpdateTemplateACLByID)(ctx, arg) } func (q *querier) UpdateTemplateAccessControlByID(ctx context.Context, arg database.UpdateTemplateAccessControlByIDParams) error { @@ -2932,7 +2933,7 @@ func (q *querier) UpdateTemplateVersionByID(ctx context.Context, arg database.Up } obj = tpl } - if err := q.authorizeContext(ctx, rbac.ActionUpdate, obj); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, obj); err != nil { return err } return q.db.UpdateTemplateVersionByID(ctx, arg) @@ -2954,7 +2955,7 @@ func (q *querier) UpdateTemplateVersionDescriptionByJobID(ctx context.Context, a } obj = tpl } - if err := q.authorizeContext(ctx, rbac.ActionUpdate, obj); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, obj); err != nil { return err } return q.db.UpdateTemplateVersionDescriptionByJobID(ctx, arg) @@ -2976,7 +2977,7 @@ func (q *querier) UpdateTemplateVersionExternalAuthProvidersByJobID(ctx context. } obj = tpl } - if err := q.authorizeContext(ctx, rbac.ActionUpdate, obj); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, obj); err != nil { return err } return q.db.UpdateTemplateVersionExternalAuthProvidersByJobID(ctx, arg) @@ -2987,7 +2988,7 @@ func (q *querier) UpdateTemplateWorkspacesLastUsedAt(ctx context.Context, arg da return q.db.GetTemplateByID(ctx, arg.TemplateID) } - return fetchAndExec(q.log, q.auth, rbac.ActionUpdate, fetch, q.db.UpdateTemplateWorkspacesLastUsedAt)(ctx, arg) + return fetchAndExec(q.log, q.auth, policy.ActionUpdate, fetch, q.db.UpdateTemplateWorkspacesLastUsedAt)(ctx, arg) } func (q *querier) UpdateUserAppearanceSettings(ctx context.Context, arg database.UpdateUserAppearanceSettingsParams) (database.User, error) { @@ -2995,7 +2996,7 @@ func (q *querier) UpdateUserAppearanceSettings(ctx context.Context, arg database if err != nil { return database.User{}, err } - if err := q.authorizeContext(ctx, rbac.ActionUpdate, u.UserDataRBACObject()); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, u.UserDataRBACObject()); err != nil { return database.User{}, err } return q.db.UpdateUserAppearanceSettings(ctx, arg) @@ -3011,10 +3012,10 @@ func (q *querier) UpdateUserHashedPassword(ctx context.Context, arg database.Upd return err } - err = q.authorizeContext(ctx, rbac.ActionUpdate, user.UserDataRBACObject()) + err = q.authorizeContext(ctx, policy.ActionUpdate, user.UserDataRBACObject()) if err != nil { // Admins can update passwords for other users. - err = q.authorizeContext(ctx, rbac.ActionUpdate, user.RBACObject()) + err = q.authorizeContext(ctx, policy.ActionUpdate, user.RBACObject()) if err != nil { return err } @@ -3041,14 +3042,14 @@ func (q *querier) UpdateUserLink(ctx context.Context, arg database.UpdateUserLin } func (q *querier) UpdateUserLinkedID(ctx context.Context, arg database.UpdateUserLinkedIDParams) (database.UserLink, error) { - if err := q.authorizeContext(ctx, rbac.ActionUpdate, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceSystem); err != nil { return database.UserLink{}, err } return q.db.UpdateUserLinkedID(ctx, arg) } func (q *querier) UpdateUserLoginType(ctx context.Context, arg database.UpdateUserLoginTypeParams) (database.User, error) { - if err := q.authorizeContext(ctx, rbac.ActionUpdate, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceSystem); err != nil { return database.User{}, err } return q.db.UpdateUserLoginType(ctx, arg) @@ -3059,7 +3060,7 @@ func (q *querier) UpdateUserProfile(ctx context.Context, arg database.UpdateUser if err != nil { return database.User{}, err } - if err := q.authorizeContext(ctx, rbac.ActionUpdate, u.UserDataRBACObject()); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, u.UserDataRBACObject()); err != nil { return database.User{}, err } return q.db.UpdateUserProfile(ctx, arg) @@ -3070,7 +3071,7 @@ func (q *querier) UpdateUserQuietHoursSchedule(ctx context.Context, arg database if err != nil { return database.User{}, err } - if err := q.authorizeContext(ctx, rbac.ActionUpdate, u.UserDataRBACObject()); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, u.UserDataRBACObject()); err != nil { return database.User{}, err } return q.db.UpdateUserQuietHoursSchedule(ctx, arg) @@ -3114,7 +3115,7 @@ func (q *querier) UpdateWorkspace(ctx context.Context, arg database.UpdateWorksp } func (q *querier) UpdateWorkspaceAgentConnectionByID(ctx context.Context, arg database.UpdateWorkspaceAgentConnectionByIDParams) error { - if err := q.authorizeContext(ctx, rbac.ActionUpdate, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceSystem); err != nil { return err } return q.db.UpdateWorkspaceAgentConnectionByID(ctx, arg) @@ -3126,7 +3127,7 @@ func (q *querier) UpdateWorkspaceAgentLifecycleStateByID(ctx context.Context, ar return err } - if err := q.authorizeContext(ctx, rbac.ActionUpdate, workspace); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, workspace); err != nil { return err } @@ -3144,7 +3145,7 @@ func (q *querier) UpdateWorkspaceAgentLogOverflowByID(ctx context.Context, arg d return err } - if err := q.authorizeContext(ctx, rbac.ActionUpdate, workspace); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, workspace); err != nil { return err } @@ -3157,7 +3158,7 @@ func (q *querier) UpdateWorkspaceAgentMetadata(ctx context.Context, arg database return err } - err = q.authorizeContext(ctx, rbac.ActionUpdate, workspace) + err = q.authorizeContext(ctx, policy.ActionUpdate, workspace) if err != nil { return err } @@ -3176,7 +3177,7 @@ func (q *querier) UpdateWorkspaceAgentStartupByID(ctx context.Context, arg datab return err } - if err := q.authorizeContext(ctx, rbac.ActionUpdate, workspace); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, workspace); err != nil { return err } @@ -3190,7 +3191,7 @@ func (q *querier) UpdateWorkspaceAppHealthByID(ctx context.Context, arg database return err } - err = q.authorizeContext(ctx, rbac.ActionUpdate, workspace.RBACObject()) + err = q.authorizeContext(ctx, policy.ActionUpdate, workspace.RBACObject()) if err != nil { return err } @@ -3203,7 +3204,7 @@ func (q *querier) UpdateWorkspaceAutomaticUpdates(ctx context.Context, arg datab return err } - err = q.authorizeContext(ctx, rbac.ActionUpdate, workspace.RBACObject()) + err = q.authorizeContext(ctx, policy.ActionUpdate, workspace.RBACObject()) if err != nil { return err } @@ -3219,7 +3220,7 @@ func (q *querier) UpdateWorkspaceAutostart(ctx context.Context, arg database.Upd // UpdateWorkspaceBuildCostByID is used by the provisioning system to update the cost of a workspace build. func (q *querier) UpdateWorkspaceBuildCostByID(ctx context.Context, arg database.UpdateWorkspaceBuildCostByIDParams) error { - if err := q.authorizeContext(ctx, rbac.ActionUpdate, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceSystem); err != nil { return err } return q.db.UpdateWorkspaceBuildCostByID(ctx, arg) @@ -3236,7 +3237,7 @@ func (q *querier) UpdateWorkspaceBuildDeadlineByID(ctx context.Context, arg data return err } - err = q.authorizeContext(ctx, rbac.ActionUpdate, workspace.RBACObject()) + err = q.authorizeContext(ctx, policy.ActionUpdate, workspace.RBACObject()) if err != nil { return err } @@ -3244,7 +3245,7 @@ func (q *querier) UpdateWorkspaceBuildDeadlineByID(ctx context.Context, arg data } func (q *querier) UpdateWorkspaceBuildProvisionerStateByID(ctx context.Context, arg database.UpdateWorkspaceBuildProvisionerStateByIDParams) error { - if err := q.authorizeContext(ctx, rbac.ActionUpdate, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceSystem); err != nil { return err } return q.db.UpdateWorkspaceBuildProvisionerStateByID(ctx, arg) @@ -3300,7 +3301,7 @@ func (q *querier) UpdateWorkspacesDormantDeletingAtByTemplateID(ctx context.Cont return q.db.GetTemplateByID(ctx, arg.TemplateID) } - return fetchAndExec(q.log, q.auth, rbac.ActionUpdate, fetch, q.db.UpdateWorkspacesDormantDeletingAtByTemplateID)(ctx, arg) + return fetchAndExec(q.log, q.auth, policy.ActionUpdate, fetch, q.db.UpdateWorkspacesDormantDeletingAtByTemplateID)(ctx, arg) } func (q *querier) UpsertAppSecurityKey(ctx context.Context, data string) error { @@ -3309,21 +3310,21 @@ func (q *querier) UpsertAppSecurityKey(ctx context.Context, data string) error { } func (q *querier) UpsertApplicationName(ctx context.Context, value string) error { - if err := q.authorizeContext(ctx, rbac.ActionCreate, rbac.ResourceDeploymentValues); err != nil { + if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceDeploymentValues); err != nil { return err } return q.db.UpsertApplicationName(ctx, value) } func (q *querier) UpsertDefaultProxy(ctx context.Context, arg database.UpsertDefaultProxyParams) error { - if err := q.authorizeContext(ctx, rbac.ActionUpdate, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceSystem); err != nil { return err } return q.db.UpsertDefaultProxy(ctx, arg) } func (q *querier) UpsertHealthSettings(ctx context.Context, value string) error { - if err := q.authorizeContext(ctx, rbac.ActionCreate, rbac.ResourceDeploymentValues); err != nil { + if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceDeploymentValues); err != nil { return err } return q.db.UpsertHealthSettings(ctx, value) @@ -3344,35 +3345,35 @@ func (q *querier) UpsertJFrogXrayScanByWorkspaceAndAgentID(ctx context.Context, // Only template admins should be able to write JFrog Xray scans to a workspace. // We don't want this to be a workspace-level permission because then users // could overwrite their own results. - if err := q.authorizeContext(ctx, rbac.ActionCreate, template); err != nil { + if err := q.authorizeContext(ctx, policy.ActionCreate, template); err != nil { return err } return q.db.UpsertJFrogXrayScanByWorkspaceAndAgentID(ctx, arg) } func (q *querier) UpsertLastUpdateCheck(ctx context.Context, value string) error { - if err := q.authorizeContext(ctx, rbac.ActionUpdate, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceSystem); err != nil { return err } return q.db.UpsertLastUpdateCheck(ctx, value) } func (q *querier) UpsertLogoURL(ctx context.Context, value string) error { - if err := q.authorizeContext(ctx, rbac.ActionCreate, rbac.ResourceDeploymentValues); err != nil { + if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceDeploymentValues); err != nil { return err } return q.db.UpsertLogoURL(ctx, value) } func (q *querier) UpsertNotificationBanners(ctx context.Context, value string) error { - if err := q.authorizeContext(ctx, rbac.ActionCreate, rbac.ResourceDeploymentValues); err != nil { + if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceDeploymentValues); err != nil { return err } return q.db.UpsertNotificationBanners(ctx, value) } func (q *querier) UpsertOAuthSigningKey(ctx context.Context, value string) error { - if err := q.authorizeContext(ctx, rbac.ActionUpdate, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceSystem); err != nil { return err } return q.db.UpsertOAuthSigningKey(ctx, value) @@ -3383,56 +3384,56 @@ func (q *querier) UpsertProvisionerDaemon(ctx context.Context, arg database.Upse if arg.Tags[provisionersdk.TagScope] == provisionersdk.ScopeUser { res.Owner = arg.Tags[provisionersdk.TagOwner] } - if err := q.authorizeContext(ctx, rbac.ActionCreate, res); err != nil { + if err := q.authorizeContext(ctx, policy.ActionCreate, res); err != nil { return database.ProvisionerDaemon{}, err } return q.db.UpsertProvisionerDaemon(ctx, arg) } func (q *querier) UpsertTailnetAgent(ctx context.Context, arg database.UpsertTailnetAgentParams) (database.TailnetAgent, error) { - if err := q.authorizeContext(ctx, rbac.ActionUpdate, rbac.ResourceTailnetCoordinator); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceTailnetCoordinator); err != nil { return database.TailnetAgent{}, err } return q.db.UpsertTailnetAgent(ctx, arg) } func (q *querier) UpsertTailnetClient(ctx context.Context, arg database.UpsertTailnetClientParams) (database.TailnetClient, error) { - if err := q.authorizeContext(ctx, rbac.ActionUpdate, rbac.ResourceTailnetCoordinator); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceTailnetCoordinator); err != nil { return database.TailnetClient{}, err } return q.db.UpsertTailnetClient(ctx, arg) } func (q *querier) UpsertTailnetClientSubscription(ctx context.Context, arg database.UpsertTailnetClientSubscriptionParams) error { - if err := q.authorizeContext(ctx, rbac.ActionUpdate, rbac.ResourceTailnetCoordinator); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceTailnetCoordinator); err != nil { return err } return q.db.UpsertTailnetClientSubscription(ctx, arg) } func (q *querier) UpsertTailnetCoordinator(ctx context.Context, id uuid.UUID) (database.TailnetCoordinator, error) { - if err := q.authorizeContext(ctx, rbac.ActionUpdate, rbac.ResourceTailnetCoordinator); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceTailnetCoordinator); err != nil { return database.TailnetCoordinator{}, err } return q.db.UpsertTailnetCoordinator(ctx, id) } func (q *querier) UpsertTailnetPeer(ctx context.Context, arg database.UpsertTailnetPeerParams) (database.TailnetPeer, error) { - if err := q.authorizeContext(ctx, rbac.ActionCreate, rbac.ResourceTailnetCoordinator); err != nil { + if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceTailnetCoordinator); err != nil { return database.TailnetPeer{}, err } return q.db.UpsertTailnetPeer(ctx, arg) } func (q *querier) UpsertTailnetTunnel(ctx context.Context, arg database.UpsertTailnetTunnelParams) (database.TailnetTunnel, error) { - if err := q.authorizeContext(ctx, rbac.ActionCreate, rbac.ResourceTailnetCoordinator); err != nil { + if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceTailnetCoordinator); err != nil { return database.TailnetTunnel{}, err } return q.db.UpsertTailnetTunnel(ctx, arg) } func (q *querier) UpsertTemplateUsageStats(ctx context.Context) error { - if err := q.authorizeContext(ctx, rbac.ActionUpdate, rbac.ResourceSystem); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceSystem); err != nil { return err } return q.db.UpsertTemplateUsageStats(ctx) @@ -3444,7 +3445,7 @@ func (q *querier) UpsertWorkspaceAgentPortShare(ctx context.Context, arg databas return database.WorkspaceAgentPortShare{}, err } - err = q.authorizeContext(ctx, rbac.ActionUpdate, workspace) + err = q.authorizeContext(ctx, policy.ActionUpdate, workspace) if err != nil { return database.WorkspaceAgentPortShare{}, err } @@ -3463,7 +3464,7 @@ func (q *querier) GetTemplateGroupRoles(ctx context.Context, id uuid.UUID) ([]da if err != nil { return nil, err } - if err := q.authorizeContext(ctx, rbac.ActionUpdate, template); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, template); err != nil { return nil, err } return q.db.GetTemplateGroupRoles(ctx, id) @@ -3475,7 +3476,7 @@ func (q *querier) GetTemplateUserRoles(ctx context.Context, id uuid.UUID) ([]dat if err != nil { return nil, err } - if err := q.authorizeContext(ctx, rbac.ActionUpdate, template); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, template); err != nil { return nil, err } return q.db.GetTemplateUserRoles(ctx, id) diff --git a/coderd/database/dbauthz/dbauthz_test.go b/coderd/database/dbauthz/dbauthz_test.go index 48435a0141c64..92dbbb8e7bce1 100644 --- a/coderd/database/dbauthz/dbauthz_test.go +++ b/coderd/database/dbauthz/dbauthz_test.go @@ -13,6 +13,7 @@ import ( "golang.org/x/xerrors" "cdr.dev/slog" + "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/coderd/coderdtest" "github.com/coder/coder/v2/coderd/database" @@ -119,7 +120,7 @@ func TestNew(t *testing.T) { require.NoError(t, err, "must not error") require.Equal(t, exp, w, "must be equal") - rec.AssertActor(t, subj, rec.Pair(rbac.ActionRead, exp)) + rec.AssertActor(t, subj, rec.Pair(policy.ActionRead, exp)) require.NoError(t, rec.AllAsserted(), "should only be 1 rbac call") } @@ -167,11 +168,11 @@ func must[T any](value T, err error) T { func (s *MethodTestSuite) TestAPIKey() { s.Run("DeleteAPIKeyByID", s.Subtest(func(db database.Store, check *expects) { key, _ := dbgen.APIKey(s.T(), db, database.APIKey{}) - check.Args(key.ID).Asserts(key, rbac.ActionDelete).Returns() + check.Args(key.ID).Asserts(key, policy.ActionDelete).Returns() })) s.Run("GetAPIKeyByID", s.Subtest(func(db database.Store, check *expects) { key, _ := dbgen.APIKey(s.T(), db, database.APIKey{}) - check.Args(key.ID).Asserts(key, rbac.ActionRead).Returns(key) + check.Args(key.ID).Asserts(key, policy.ActionRead).Returns(key) })) s.Run("GetAPIKeyByName", s.Subtest(func(db database.Store, check *expects) { key, _ := dbgen.APIKey(s.T(), db, database.APIKey{ @@ -181,14 +182,14 @@ func (s *MethodTestSuite) TestAPIKey() { check.Args(database.GetAPIKeyByNameParams{ TokenName: key.TokenName, UserID: key.UserID, - }).Asserts(key, rbac.ActionRead).Returns(key) + }).Asserts(key, policy.ActionRead).Returns(key) })) s.Run("GetAPIKeysByLoginType", s.Subtest(func(db database.Store, check *expects) { a, _ := dbgen.APIKey(s.T(), db, database.APIKey{LoginType: database.LoginTypePassword}) b, _ := dbgen.APIKey(s.T(), db, database.APIKey{LoginType: database.LoginTypePassword}) _, _ = dbgen.APIKey(s.T(), db, database.APIKey{LoginType: database.LoginTypeGithub}) check.Args(database.LoginTypePassword). - Asserts(a, rbac.ActionRead, b, rbac.ActionRead). + Asserts(a, policy.ActionRead, b, policy.ActionRead). Returns(slice.New(a, b)) })) s.Run("GetAPIKeysByUserID", s.Subtest(func(db database.Store, check *expects) { @@ -200,7 +201,7 @@ func (s *MethodTestSuite) TestAPIKey() { _, _ = dbgen.APIKey(s.T(), db, database.APIKey{UserID: idC, LoginType: database.LoginTypeToken}) check.Args(database.GetAPIKeysByUserIDParams{LoginType: database.LoginTypeToken, UserID: idAB}). - Asserts(keyA, rbac.ActionRead, keyB, rbac.ActionRead). + Asserts(keyA, policy.ActionRead, keyB, policy.ActionRead). Returns(slice.New(keyA, keyB)) })) s.Run("GetAPIKeysLastUsedAfter", s.Subtest(func(db database.Store, check *expects) { @@ -208,7 +209,7 @@ func (s *MethodTestSuite) TestAPIKey() { b, _ := dbgen.APIKey(s.T(), db, database.APIKey{LastUsed: time.Now().Add(time.Hour)}) _, _ = dbgen.APIKey(s.T(), db, database.APIKey{LastUsed: time.Now().Add(-time.Hour)}) check.Args(time.Now()). - Asserts(a, rbac.ActionRead, b, rbac.ActionRead). + Asserts(a, policy.ActionRead, b, policy.ActionRead). Returns(slice.New(a, b)) })) s.Run("InsertAPIKey", s.Subtest(func(db database.Store, check *expects) { @@ -217,33 +218,33 @@ func (s *MethodTestSuite) TestAPIKey() { UserID: u.ID, LoginType: database.LoginTypePassword, Scope: database.APIKeyScopeAll, - }).Asserts(rbac.ResourceAPIKey.WithOwner(u.ID.String()), rbac.ActionCreate) + }).Asserts(rbac.ResourceAPIKey.WithOwner(u.ID.String()), policy.ActionCreate) })) s.Run("UpdateAPIKeyByID", s.Subtest(func(db database.Store, check *expects) { a, _ := dbgen.APIKey(s.T(), db, database.APIKey{}) check.Args(database.UpdateAPIKeyByIDParams{ ID: a.ID, - }).Asserts(a, rbac.ActionUpdate).Returns() + }).Asserts(a, policy.ActionUpdate).Returns() })) s.Run("DeleteApplicationConnectAPIKeysByUserID", s.Subtest(func(db database.Store, check *expects) { a, _ := dbgen.APIKey(s.T(), db, database.APIKey{ Scope: database.APIKeyScopeApplicationConnect, }) - check.Args(a.UserID).Asserts(rbac.ResourceAPIKey.WithOwner(a.UserID.String()), rbac.ActionDelete).Returns() + check.Args(a.UserID).Asserts(rbac.ResourceAPIKey.WithOwner(a.UserID.String()), policy.ActionDelete).Returns() })) s.Run("DeleteExternalAuthLink", s.Subtest(func(db database.Store, check *expects) { a := dbgen.ExternalAuthLink(s.T(), db, database.ExternalAuthLink{}) check.Args(database.DeleteExternalAuthLinkParams{ ProviderID: a.ProviderID, UserID: a.UserID, - }).Asserts(a, rbac.ActionDelete).Returns() + }).Asserts(a, policy.ActionDelete).Returns() })) s.Run("GetExternalAuthLinksByUserID", s.Subtest(func(db database.Store, check *expects) { a := dbgen.ExternalAuthLink(s.T(), db, database.ExternalAuthLink{}) b := dbgen.ExternalAuthLink(s.T(), db, database.ExternalAuthLink{ UserID: a.UserID, }) - check.Args(a.UserID).Asserts(a, rbac.ActionRead, b, rbac.ActionRead) + check.Args(a.UserID).Asserts(a, policy.ActionRead, b, policy.ActionRead) })) } @@ -252,14 +253,14 @@ func (s *MethodTestSuite) TestAuditLogs() { check.Args(database.InsertAuditLogParams{ ResourceType: database.ResourceTypeOrganization, Action: database.AuditActionCreate, - }).Asserts(rbac.ResourceAuditLog, rbac.ActionCreate) + }).Asserts(rbac.ResourceAuditLog, policy.ActionCreate) })) s.Run("GetAuditLogsOffset", s.Subtest(func(db database.Store, check *expects) { _ = dbgen.AuditLog(s.T(), db, database.AuditLog{}) _ = dbgen.AuditLog(s.T(), db, database.AuditLog{}) check.Args(database.GetAuditLogsOffsetParams{ Limit: 10, - }).Asserts(rbac.ResourceAuditLog, rbac.ActionRead) + }).Asserts(rbac.ResourceAuditLog, policy.ActionRead) })) } @@ -269,24 +270,24 @@ func (s *MethodTestSuite) TestFile() { check.Args(database.GetFileByHashAndCreatorParams{ Hash: f.Hash, CreatedBy: f.CreatedBy, - }).Asserts(f, rbac.ActionRead).Returns(f) + }).Asserts(f, policy.ActionRead).Returns(f) })) s.Run("GetFileByID", s.Subtest(func(db database.Store, check *expects) { f := dbgen.File(s.T(), db, database.File{}) - check.Args(f.ID).Asserts(f, rbac.ActionRead).Returns(f) + check.Args(f.ID).Asserts(f, policy.ActionRead).Returns(f) })) s.Run("InsertFile", s.Subtest(func(db database.Store, check *expects) { u := dbgen.User(s.T(), db, database.User{}) check.Args(database.InsertFileParams{ CreatedBy: u.ID, - }).Asserts(rbac.ResourceFile.WithOwner(u.ID.String()), rbac.ActionCreate) + }).Asserts(rbac.ResourceFile.WithOwner(u.ID.String()), policy.ActionCreate) })) } func (s *MethodTestSuite) TestGroup() { s.Run("DeleteGroupByID", s.Subtest(func(db database.Store, check *expects) { g := dbgen.Group(s.T(), db, database.Group{}) - check.Args(g.ID).Asserts(g, rbac.ActionDelete).Returns() + check.Args(g.ID).Asserts(g, policy.ActionDelete).Returns() })) s.Run("DeleteGroupMemberFromGroup", s.Subtest(func(db database.Store, check *expects) { g := dbgen.Group(s.T(), db, database.Group{}) @@ -296,23 +297,23 @@ func (s *MethodTestSuite) TestGroup() { check.Args(database.DeleteGroupMemberFromGroupParams{ UserID: m.UserID, GroupID: g.ID, - }).Asserts(g, rbac.ActionUpdate).Returns() + }).Asserts(g, policy.ActionUpdate).Returns() })) s.Run("GetGroupByID", s.Subtest(func(db database.Store, check *expects) { g := dbgen.Group(s.T(), db, database.Group{}) - check.Args(g.ID).Asserts(g, rbac.ActionRead).Returns(g) + check.Args(g.ID).Asserts(g, policy.ActionRead).Returns(g) })) s.Run("GetGroupByOrgAndName", s.Subtest(func(db database.Store, check *expects) { g := dbgen.Group(s.T(), db, database.Group{}) check.Args(database.GetGroupByOrgAndNameParams{ OrganizationID: g.OrganizationID, Name: g.Name, - }).Asserts(g, rbac.ActionRead).Returns(g) + }).Asserts(g, policy.ActionRead).Returns(g) })) s.Run("GetGroupMembers", s.Subtest(func(db database.Store, check *expects) { g := dbgen.Group(s.T(), db, database.Group{}) _ = dbgen.GroupMember(s.T(), db, database.GroupMember{}) - check.Args(g.ID).Asserts(g, rbac.ActionRead) + check.Args(g.ID).Asserts(g, policy.ActionRead) })) s.Run("GetGroupsByOrganizationAndUserID", s.Subtest(func(db database.Store, check *expects) { g := dbgen.Group(s.T(), db, database.Group{}) @@ -320,25 +321,25 @@ func (s *MethodTestSuite) TestGroup() { check.Args(database.GetGroupsByOrganizationAndUserIDParams{ OrganizationID: g.OrganizationID, UserID: gm.UserID, - }).Asserts(g, rbac.ActionRead) + }).Asserts(g, policy.ActionRead) })) s.Run("InsertAllUsersGroup", s.Subtest(func(db database.Store, check *expects) { o := dbgen.Organization(s.T(), db, database.Organization{}) - check.Args(o.ID).Asserts(rbac.ResourceGroup.InOrg(o.ID), rbac.ActionCreate) + check.Args(o.ID).Asserts(rbac.ResourceGroup.InOrg(o.ID), policy.ActionCreate) })) s.Run("InsertGroup", s.Subtest(func(db database.Store, check *expects) { o := dbgen.Organization(s.T(), db, database.Organization{}) check.Args(database.InsertGroupParams{ OrganizationID: o.ID, Name: "test", - }).Asserts(rbac.ResourceGroup.InOrg(o.ID), rbac.ActionCreate) + }).Asserts(rbac.ResourceGroup.InOrg(o.ID), policy.ActionCreate) })) s.Run("InsertGroupMember", s.Subtest(func(db database.Store, check *expects) { g := dbgen.Group(s.T(), db, database.Group{}) check.Args(database.InsertGroupMemberParams{ UserID: uuid.New(), GroupID: g.ID, - }).Asserts(g, rbac.ActionUpdate).Returns() + }).Asserts(g, policy.ActionUpdate).Returns() })) s.Run("InsertUserGroupsByName", s.Subtest(func(db database.Store, check *expects) { o := dbgen.Organization(s.T(), db, database.Organization{}) @@ -350,7 +351,7 @@ func (s *MethodTestSuite) TestGroup() { OrganizationID: o.ID, UserID: u1.ID, GroupNames: slice.New(g1.Name, g2.Name), - }).Asserts(rbac.ResourceGroup.InOrg(o.ID), rbac.ActionUpdate).Returns() + }).Asserts(rbac.ResourceGroup.InOrg(o.ID), policy.ActionUpdate).Returns() })) s.Run("RemoveUserFromAllGroups", s.Subtest(func(db database.Store, check *expects) { o := dbgen.Organization(s.T(), db, database.Organization{}) @@ -359,13 +360,13 @@ func (s *MethodTestSuite) TestGroup() { g2 := dbgen.Group(s.T(), db, database.Group{OrganizationID: o.ID}) _ = dbgen.GroupMember(s.T(), db, database.GroupMember{GroupID: g1.ID, UserID: u1.ID}) _ = dbgen.GroupMember(s.T(), db, database.GroupMember{GroupID: g2.ID, UserID: u1.ID}) - check.Args(u1.ID).Asserts(rbac.ResourceSystem, rbac.ActionUpdate).Returns() + check.Args(u1.ID).Asserts(rbac.ResourceSystem, policy.ActionUpdate).Returns() })) s.Run("UpdateGroupByID", s.Subtest(func(db database.Store, check *expects) { g := dbgen.Group(s.T(), db, database.Group{}) check.Args(database.UpdateGroupByIDParams{ ID: g.ID, - }).Asserts(g, rbac.ActionUpdate) + }).Asserts(g, policy.ActionUpdate) })) } @@ -388,7 +389,7 @@ func (s *MethodTestSuite) TestProvisionerJob() { TemplateID: tpl.ID, TemplateVersionID: uuid.Nil, JobStatus: database.NullProvisionerJobStatus{}, - }).Asserts(v.RBACObject(tpl), rbac.ActionUpdate) + }).Asserts(v.RBACObject(tpl), policy.ActionUpdate) })) s.Run("UnarchiveTemplateVersion", s.Subtest(func(db database.Store, check *expects) { j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{ @@ -403,7 +404,7 @@ func (s *MethodTestSuite) TestProvisionerJob() { check.Args(database.UnarchiveTemplateVersionParams{ UpdatedAt: dbtime.Now(), TemplateVersionID: v.ID, - }).Asserts(v.RBACObject(tpl), rbac.ActionUpdate) + }).Asserts(v.RBACObject(tpl), policy.ActionUpdate) })) s.Run("Build/GetProvisionerJobByID", s.Subtest(func(db database.Store, check *expects) { w := dbgen.Workspace(s.T(), db, database.Workspace{}) @@ -411,7 +412,7 @@ func (s *MethodTestSuite) TestProvisionerJob() { Type: database.ProvisionerJobTypeWorkspaceBuild, }) _ = dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{JobID: j.ID, WorkspaceID: w.ID}) - check.Args(j.ID).Asserts(w, rbac.ActionRead).Returns(j) + check.Args(j.ID).Asserts(w, policy.ActionRead).Returns(j) })) s.Run("TemplateVersion/GetProvisionerJobByID", s.Subtest(func(db database.Store, check *expects) { j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{ @@ -422,7 +423,7 @@ func (s *MethodTestSuite) TestProvisionerJob() { TemplateID: uuid.NullUUID{UUID: tpl.ID, Valid: true}, JobID: j.ID, }) - check.Args(j.ID).Asserts(v.RBACObject(tpl), rbac.ActionRead).Returns(j) + check.Args(j.ID).Asserts(v.RBACObject(tpl), policy.ActionRead).Returns(j) })) s.Run("TemplateVersionDryRun/GetProvisionerJobByID", s.Subtest(func(db database.Store, check *expects) { tpl := dbgen.Template(s.T(), db, database.Template{}) @@ -435,7 +436,7 @@ func (s *MethodTestSuite) TestProvisionerJob() { TemplateVersionID uuid.UUID `json:"template_version_id"` }{TemplateVersionID: v.ID})), }) - check.Args(j.ID).Asserts(v.RBACObject(tpl), rbac.ActionRead).Returns(j) + check.Args(j.ID).Asserts(v.RBACObject(tpl), policy.ActionRead).Returns(j) })) s.Run("Build/UpdateProvisionerJobWithCancelByID", s.Subtest(func(db database.Store, check *expects) { tpl := dbgen.Template(s.T(), db, database.Template{AllowUserCancelWorkspaceJobs: true}) @@ -444,7 +445,7 @@ func (s *MethodTestSuite) TestProvisionerJob() { Type: database.ProvisionerJobTypeWorkspaceBuild, }) _ = dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{JobID: j.ID, WorkspaceID: w.ID}) - check.Args(database.UpdateProvisionerJobWithCancelByIDParams{ID: j.ID}).Asserts(w, rbac.ActionUpdate).Returns() + check.Args(database.UpdateProvisionerJobWithCancelByIDParams{ID: j.ID}).Asserts(w, policy.ActionUpdate).Returns() })) s.Run("BuildFalseCancel/UpdateProvisionerJobWithCancelByID", s.Subtest(func(db database.Store, check *expects) { tpl := dbgen.Template(s.T(), db, database.Template{AllowUserCancelWorkspaceJobs: false}) @@ -453,7 +454,7 @@ func (s *MethodTestSuite) TestProvisionerJob() { Type: database.ProvisionerJobTypeWorkspaceBuild, }) _ = dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{JobID: j.ID, WorkspaceID: w.ID}) - check.Args(database.UpdateProvisionerJobWithCancelByIDParams{ID: j.ID}).Asserts(w, rbac.ActionUpdate).Returns() + check.Args(database.UpdateProvisionerJobWithCancelByIDParams{ID: j.ID}).Asserts(w, policy.ActionUpdate).Returns() })) s.Run("TemplateVersion/UpdateProvisionerJobWithCancelByID", s.Subtest(func(db database.Store, check *expects) { j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{ @@ -465,7 +466,7 @@ func (s *MethodTestSuite) TestProvisionerJob() { JobID: j.ID, }) check.Args(database.UpdateProvisionerJobWithCancelByIDParams{ID: j.ID}). - Asserts(v.RBACObject(tpl), []rbac.Action{rbac.ActionRead, rbac.ActionUpdate}).Returns() + Asserts(v.RBACObject(tpl), []policy.Action{policy.ActionRead, policy.ActionUpdate}).Returns() })) s.Run("TemplateVersionNoTemplate/UpdateProvisionerJobWithCancelByID", s.Subtest(func(db database.Store, check *expects) { j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{ @@ -476,7 +477,7 @@ func (s *MethodTestSuite) TestProvisionerJob() { JobID: j.ID, }) check.Args(database.UpdateProvisionerJobWithCancelByIDParams{ID: j.ID}). - Asserts(v.RBACObjectNoTemplate(), []rbac.Action{rbac.ActionRead, rbac.ActionUpdate}).Returns() + Asserts(v.RBACObjectNoTemplate(), []policy.Action{policy.ActionRead, policy.ActionUpdate}).Returns() })) s.Run("TemplateVersionDryRun/UpdateProvisionerJobWithCancelByID", s.Subtest(func(db database.Store, check *expects) { tpl := dbgen.Template(s.T(), db, database.Template{}) @@ -490,7 +491,7 @@ func (s *MethodTestSuite) TestProvisionerJob() { }{TemplateVersionID: v.ID})), }) check.Args(database.UpdateProvisionerJobWithCancelByIDParams{ID: j.ID}). - Asserts(v.RBACObject(tpl), []rbac.Action{rbac.ActionRead, rbac.ActionUpdate}).Returns() + Asserts(v.RBACObject(tpl), []policy.Action{policy.ActionRead, policy.ActionUpdate}).Returns() })) s.Run("GetProvisionerJobsByIDs", s.Subtest(func(db database.Store, check *expects) { a := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{}) @@ -505,7 +506,7 @@ func (s *MethodTestSuite) TestProvisionerJob() { _ = dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{JobID: j.ID, WorkspaceID: w.ID}) check.Args(database.GetProvisionerLogsAfterIDParams{ JobID: j.ID, - }).Asserts(w, rbac.ActionRead).Returns([]database.ProvisionerJobLog{}) + }).Asserts(w, policy.ActionRead).Returns([]database.ProvisionerJobLog{}) })) } @@ -515,32 +516,32 @@ func (s *MethodTestSuite) TestLicense() { UUID: uuid.New(), }) require.NoError(s.T(), err) - check.Args().Asserts(l, rbac.ActionRead). + check.Args().Asserts(l, policy.ActionRead). Returns([]database.License{l}) })) s.Run("InsertLicense", s.Subtest(func(db database.Store, check *expects) { check.Args(database.InsertLicenseParams{}). - Asserts(rbac.ResourceLicense, rbac.ActionCreate) + Asserts(rbac.ResourceLicense, policy.ActionCreate) })) s.Run("UpsertLogoURL", s.Subtest(func(db database.Store, check *expects) { - check.Args("value").Asserts(rbac.ResourceDeploymentValues, rbac.ActionCreate) + check.Args("value").Asserts(rbac.ResourceDeploymentValues, policy.ActionCreate) })) s.Run("UpsertNotificationBanners", s.Subtest(func(db database.Store, check *expects) { - check.Args("value").Asserts(rbac.ResourceDeploymentValues, rbac.ActionCreate) + check.Args("value").Asserts(rbac.ResourceDeploymentValues, policy.ActionCreate) })) s.Run("GetLicenseByID", s.Subtest(func(db database.Store, check *expects) { l, err := db.InsertLicense(context.Background(), database.InsertLicenseParams{ UUID: uuid.New(), }) require.NoError(s.T(), err) - check.Args(l.ID).Asserts(l, rbac.ActionRead).Returns(l) + check.Args(l.ID).Asserts(l, policy.ActionRead).Returns(l) })) s.Run("DeleteLicense", s.Subtest(func(db database.Store, check *expects) { l, err := db.InsertLicense(context.Background(), database.InsertLicenseParams{ UUID: uuid.New(), }) require.NoError(s.T(), err) - check.Args(l.ID).Asserts(l, rbac.ActionDelete) + check.Args(l.ID).Asserts(l, policy.ActionDelete) })) s.Run("GetDeploymentID", s.Subtest(func(db database.Store, check *expects) { check.Args().Asserts().Returns("") @@ -568,20 +569,20 @@ func (s *MethodTestSuite) TestOrganization() { o := dbgen.Organization(s.T(), db, database.Organization{}) a := dbgen.Group(s.T(), db, database.Group{OrganizationID: o.ID}) b := dbgen.Group(s.T(), db, database.Group{OrganizationID: o.ID}) - check.Args(o.ID).Asserts(a, rbac.ActionRead, b, rbac.ActionRead). + check.Args(o.ID).Asserts(a, policy.ActionRead, b, policy.ActionRead). Returns([]database.Group{a, b}) })) s.Run("GetOrganizationByID", s.Subtest(func(db database.Store, check *expects) { o := dbgen.Organization(s.T(), db, database.Organization{}) - check.Args(o.ID).Asserts(o, rbac.ActionRead).Returns(o) + check.Args(o.ID).Asserts(o, policy.ActionRead).Returns(o) })) s.Run("GetDefaultOrganization", s.Subtest(func(db database.Store, check *expects) { o, _ := db.GetDefaultOrganization(context.Background()) - check.Args().Asserts(o, rbac.ActionRead).Returns(o) + check.Args().Asserts(o, policy.ActionRead).Returns(o) })) s.Run("GetOrganizationByName", s.Subtest(func(db database.Store, check *expects) { o := dbgen.Organization(s.T(), db, database.Organization{}) - check.Args(o.Name).Asserts(o, rbac.ActionRead).Returns(o) + check.Args(o.Name).Asserts(o, policy.ActionRead).Returns(o) })) s.Run("GetOrganizationIDsByMemberIDs", s.Subtest(func(db database.Store, check *expects) { oa := dbgen.Organization(s.T(), db, database.Organization{}) @@ -589,26 +590,26 @@ func (s *MethodTestSuite) TestOrganization() { ma := dbgen.OrganizationMember(s.T(), db, database.OrganizationMember{OrganizationID: oa.ID}) mb := dbgen.OrganizationMember(s.T(), db, database.OrganizationMember{OrganizationID: ob.ID}) check.Args([]uuid.UUID{ma.UserID, mb.UserID}). - Asserts(rbac.ResourceUserObject(ma.UserID), rbac.ActionRead, rbac.ResourceUserObject(mb.UserID), rbac.ActionRead) + Asserts(rbac.ResourceUserObject(ma.UserID), policy.ActionRead, rbac.ResourceUserObject(mb.UserID), policy.ActionRead) })) s.Run("GetOrganizationMemberByUserID", s.Subtest(func(db database.Store, check *expects) { mem := dbgen.OrganizationMember(s.T(), db, database.OrganizationMember{}) check.Args(database.GetOrganizationMemberByUserIDParams{ OrganizationID: mem.OrganizationID, UserID: mem.UserID, - }).Asserts(mem, rbac.ActionRead).Returns(mem) + }).Asserts(mem, policy.ActionRead).Returns(mem) })) s.Run("GetOrganizationMembershipsByUserID", s.Subtest(func(db database.Store, check *expects) { u := dbgen.User(s.T(), db, database.User{}) a := dbgen.OrganizationMember(s.T(), db, database.OrganizationMember{UserID: u.ID}) b := dbgen.OrganizationMember(s.T(), db, database.OrganizationMember{UserID: u.ID}) - check.Args(u.ID).Asserts(a, rbac.ActionRead, b, rbac.ActionRead).Returns(slice.New(a, b)) + check.Args(u.ID).Asserts(a, policy.ActionRead, b, policy.ActionRead).Returns(slice.New(a, b)) })) s.Run("GetOrganizations", s.Subtest(func(db database.Store, check *expects) { def, _ := db.GetDefaultOrganization(context.Background()) a := dbgen.Organization(s.T(), db, database.Organization{}) b := dbgen.Organization(s.T(), db, database.Organization{}) - check.Args().Asserts(def, rbac.ActionRead, a, rbac.ActionRead, b, rbac.ActionRead).Returns(slice.New(def, a, b)) + check.Args().Asserts(def, policy.ActionRead, a, policy.ActionRead, b, policy.ActionRead).Returns(slice.New(def, a, b)) })) s.Run("GetOrganizationsByUserID", s.Subtest(func(db database.Store, check *expects) { u := dbgen.User(s.T(), db, database.User{}) @@ -616,13 +617,13 @@ func (s *MethodTestSuite) TestOrganization() { _ = dbgen.OrganizationMember(s.T(), db, database.OrganizationMember{UserID: u.ID, OrganizationID: a.ID}) b := dbgen.Organization(s.T(), db, database.Organization{}) _ = dbgen.OrganizationMember(s.T(), db, database.OrganizationMember{UserID: u.ID, OrganizationID: b.ID}) - check.Args(u.ID).Asserts(a, rbac.ActionRead, b, rbac.ActionRead).Returns(slice.New(a, b)) + check.Args(u.ID).Asserts(a, policy.ActionRead, b, policy.ActionRead).Returns(slice.New(a, b)) })) s.Run("InsertOrganization", s.Subtest(func(db database.Store, check *expects) { check.Args(database.InsertOrganizationParams{ ID: uuid.New(), Name: "random", - }).Asserts(rbac.ResourceOrganization, rbac.ActionCreate) + }).Asserts(rbac.ResourceOrganization, policy.ActionCreate) })) s.Run("InsertOrganizationMember", s.Subtest(func(db database.Store, check *expects) { o := dbgen.Organization(s.T(), db, database.Organization{}) @@ -633,8 +634,8 @@ func (s *MethodTestSuite) TestOrganization() { UserID: u.ID, Roles: []string{rbac.RoleOrgAdmin(o.ID)}, }).Asserts( - rbac.ResourceRoleAssignment.InOrg(o.ID), rbac.ActionCreate, - rbac.ResourceOrganizationMember.InOrg(o.ID).WithID(u.ID), rbac.ActionCreate) + rbac.ResourceRoleAssignment.InOrg(o.ID), policy.ActionCreate, + rbac.ResourceOrganizationMember.InOrg(o.ID).WithID(u.ID), policy.ActionCreate) })) s.Run("UpdateMemberRoles", s.Subtest(func(db database.Store, check *expects) { o := dbgen.Organization(s.T(), db, database.Organization{}) @@ -652,9 +653,9 @@ func (s *MethodTestSuite) TestOrganization() { UserID: u.ID, OrgID: o.ID, }).Asserts( - mem, rbac.ActionRead, - rbac.ResourceRoleAssignment.InOrg(o.ID), rbac.ActionCreate, // org-mem - rbac.ResourceRoleAssignment.InOrg(o.ID), rbac.ActionDelete, // org-admin + mem, policy.ActionRead, + rbac.ResourceRoleAssignment.InOrg(o.ID), policy.ActionCreate, // org-mem + rbac.ResourceRoleAssignment.InOrg(o.ID), policy.ActionDelete, // org-admin ).Returns(out) })) } @@ -663,39 +664,39 @@ func (s *MethodTestSuite) TestWorkspaceProxy() { s.Run("InsertWorkspaceProxy", s.Subtest(func(db database.Store, check *expects) { check.Args(database.InsertWorkspaceProxyParams{ ID: uuid.New(), - }).Asserts(rbac.ResourceWorkspaceProxy, rbac.ActionCreate) + }).Asserts(rbac.ResourceWorkspaceProxy, policy.ActionCreate) })) s.Run("RegisterWorkspaceProxy", s.Subtest(func(db database.Store, check *expects) { p, _ := dbgen.WorkspaceProxy(s.T(), db, database.WorkspaceProxy{}) check.Args(database.RegisterWorkspaceProxyParams{ ID: p.ID, - }).Asserts(p, rbac.ActionUpdate) + }).Asserts(p, policy.ActionUpdate) })) s.Run("GetWorkspaceProxyByID", s.Subtest(func(db database.Store, check *expects) { p, _ := dbgen.WorkspaceProxy(s.T(), db, database.WorkspaceProxy{}) - check.Args(p.ID).Asserts(p, rbac.ActionRead).Returns(p) + check.Args(p.ID).Asserts(p, policy.ActionRead).Returns(p) })) s.Run("GetWorkspaceProxyByName", s.Subtest(func(db database.Store, check *expects) { p, _ := dbgen.WorkspaceProxy(s.T(), db, database.WorkspaceProxy{}) - check.Args(p.Name).Asserts(p, rbac.ActionRead).Returns(p) + check.Args(p.Name).Asserts(p, policy.ActionRead).Returns(p) })) s.Run("UpdateWorkspaceProxyDeleted", s.Subtest(func(db database.Store, check *expects) { p, _ := dbgen.WorkspaceProxy(s.T(), db, database.WorkspaceProxy{}) check.Args(database.UpdateWorkspaceProxyDeletedParams{ ID: p.ID, Deleted: true, - }).Asserts(p, rbac.ActionDelete) + }).Asserts(p, policy.ActionDelete) })) s.Run("UpdateWorkspaceProxy", s.Subtest(func(db database.Store, check *expects) { p, _ := dbgen.WorkspaceProxy(s.T(), db, database.WorkspaceProxy{}) check.Args(database.UpdateWorkspaceProxyParams{ ID: p.ID, - }).Asserts(p, rbac.ActionUpdate) + }).Asserts(p, policy.ActionUpdate) })) s.Run("GetWorkspaceProxies", s.Subtest(func(db database.Store, check *expects) { p1, _ := dbgen.WorkspaceProxy(s.T(), db, database.WorkspaceProxy{}) p2, _ := dbgen.WorkspaceProxy(s.T(), db, database.WorkspaceProxy{}) - check.Args().Asserts(p1, rbac.ActionRead, p2, rbac.ActionRead).Returns(slice.New(p1, p2)) + check.Args().Asserts(p1, policy.ActionRead, p2, policy.ActionRead).Returns(slice.New(p1, p2)) })) } @@ -725,11 +726,11 @@ func (s *MethodTestSuite) TestTemplate() { Name: t1.Name, OrganizationID: o1.ID, TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true}, - }).Asserts(t1, rbac.ActionRead).Returns(b) + }).Asserts(t1, policy.ActionRead).Returns(b) })) s.Run("GetTemplateByID", s.Subtest(func(db database.Store, check *expects) { t1 := dbgen.Template(s.T(), db, database.Template{}) - check.Args(t1.ID).Asserts(t1, rbac.ActionRead).Returns(t1) + check.Args(t1.ID).Asserts(t1, policy.ActionRead).Returns(t1) })) s.Run("GetTemplateByOrganizationAndName", s.Subtest(func(db database.Store, check *expects) { o1 := dbgen.Organization(s.T(), db, database.Organization{}) @@ -739,14 +740,14 @@ func (s *MethodTestSuite) TestTemplate() { check.Args(database.GetTemplateByOrganizationAndNameParams{ Name: t1.Name, OrganizationID: o1.ID, - }).Asserts(t1, rbac.ActionRead).Returns(t1) + }).Asserts(t1, policy.ActionRead).Returns(t1) })) s.Run("GetTemplateVersionByJobID", s.Subtest(func(db database.Store, check *expects) { t1 := dbgen.Template(s.T(), db, database.Template{}) tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{ TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true}, }) - check.Args(tv.JobID).Asserts(t1, rbac.ActionRead).Returns(tv) + check.Args(tv.JobID).Asserts(t1, policy.ActionRead).Returns(tv) })) s.Run("GetTemplateVersionByTemplateIDAndName", s.Subtest(func(db database.Store, check *expects) { t1 := dbgen.Template(s.T(), db, database.Template{}) @@ -756,14 +757,14 @@ func (s *MethodTestSuite) TestTemplate() { check.Args(database.GetTemplateVersionByTemplateIDAndNameParams{ Name: tv.Name, TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true}, - }).Asserts(t1, rbac.ActionRead).Returns(tv) + }).Asserts(t1, policy.ActionRead).Returns(tv) })) s.Run("GetTemplateVersionParameters", s.Subtest(func(db database.Store, check *expects) { t1 := dbgen.Template(s.T(), db, database.Template{}) tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{ TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true}, }) - check.Args(tv.ID).Asserts(t1, rbac.ActionRead).Returns([]database.TemplateVersionParameter{}) + check.Args(tv.ID).Asserts(t1, policy.ActionRead).Returns([]database.TemplateVersionParameter{}) })) s.Run("GetTemplateVersionVariables", s.Subtest(func(db database.Store, check *expects) { t1 := dbgen.Template(s.T(), db, database.Template{}) @@ -773,22 +774,22 @@ func (s *MethodTestSuite) TestTemplate() { tvv1 := dbgen.TemplateVersionVariable(s.T(), db, database.TemplateVersionVariable{ TemplateVersionID: tv.ID, }) - check.Args(tv.ID).Asserts(t1, rbac.ActionRead).Returns([]database.TemplateVersionVariable{tvv1}) + check.Args(tv.ID).Asserts(t1, policy.ActionRead).Returns([]database.TemplateVersionVariable{tvv1}) })) s.Run("GetTemplateGroupRoles", s.Subtest(func(db database.Store, check *expects) { t1 := dbgen.Template(s.T(), db, database.Template{}) - check.Args(t1.ID).Asserts(t1, rbac.ActionUpdate) + check.Args(t1.ID).Asserts(t1, policy.ActionUpdate) })) s.Run("GetTemplateUserRoles", s.Subtest(func(db database.Store, check *expects) { t1 := dbgen.Template(s.T(), db, database.Template{}) - check.Args(t1.ID).Asserts(t1, rbac.ActionUpdate) + check.Args(t1.ID).Asserts(t1, policy.ActionUpdate) })) s.Run("GetTemplateVersionByID", s.Subtest(func(db database.Store, check *expects) { t1 := dbgen.Template(s.T(), db, database.Template{}) tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{ TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true}, }) - check.Args(tv.ID).Asserts(t1, rbac.ActionRead).Returns(tv) + check.Args(tv.ID).Asserts(t1, policy.ActionRead).Returns(tv) })) s.Run("GetTemplateVersionsByTemplateID", s.Subtest(func(db database.Store, check *expects) { t1 := dbgen.Template(s.T(), db, database.Template{}) @@ -800,7 +801,7 @@ func (s *MethodTestSuite) TestTemplate() { }) check.Args(database.GetTemplateVersionsByTemplateIDParams{ TemplateID: t1.ID, - }).Asserts(t1, rbac.ActionRead). + }).Asserts(t1, policy.ActionRead). Returns(slice.New(a, b)) })) s.Run("GetTemplateVersionsCreatedAfter", s.Subtest(func(db database.Store, check *expects) { @@ -814,7 +815,7 @@ func (s *MethodTestSuite) TestTemplate() { TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true}, CreatedAt: now.Add(-2 * time.Hour), }) - check.Args(now.Add(-time.Hour)).Asserts(rbac.ResourceTemplate.All(), rbac.ActionRead) + check.Args(now.Add(-time.Hour)).Asserts(rbac.ResourceTemplate.All(), policy.ActionRead) })) s.Run("GetTemplatesWithFilter", s.Subtest(func(db database.Store, check *expects) { a := dbgen.Template(s.T(), db, database.Template{}) @@ -835,48 +836,48 @@ func (s *MethodTestSuite) TestTemplate() { Provisioner: "echo", OrganizationID: orgID, MaxPortSharingLevel: database.AppSharingLevelOwner, - }).Asserts(rbac.ResourceTemplate.InOrg(orgID), rbac.ActionCreate) + }).Asserts(rbac.ResourceTemplate.InOrg(orgID), policy.ActionCreate) })) s.Run("InsertTemplateVersion", s.Subtest(func(db database.Store, check *expects) { t1 := dbgen.Template(s.T(), db, database.Template{}) check.Args(database.InsertTemplateVersionParams{ TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true}, OrganizationID: t1.OrganizationID, - }).Asserts(t1, rbac.ActionRead, t1, rbac.ActionCreate) + }).Asserts(t1, policy.ActionRead, t1, policy.ActionCreate) })) s.Run("SoftDeleteTemplateByID", s.Subtest(func(db database.Store, check *expects) { t1 := dbgen.Template(s.T(), db, database.Template{}) - check.Args(t1.ID).Asserts(t1, rbac.ActionDelete) + check.Args(t1.ID).Asserts(t1, policy.ActionDelete) })) s.Run("UpdateTemplateACLByID", s.Subtest(func(db database.Store, check *expects) { t1 := dbgen.Template(s.T(), db, database.Template{}) check.Args(database.UpdateTemplateACLByIDParams{ ID: t1.ID, - }).Asserts(t1, rbac.ActionCreate) + }).Asserts(t1, policy.ActionCreate) })) s.Run("UpdateTemplateAccessControlByID", s.Subtest(func(db database.Store, check *expects) { t1 := dbgen.Template(s.T(), db, database.Template{}) check.Args(database.UpdateTemplateAccessControlByIDParams{ ID: t1.ID, - }).Asserts(t1, rbac.ActionUpdate) + }).Asserts(t1, policy.ActionUpdate) })) s.Run("UpdateTemplateScheduleByID", s.Subtest(func(db database.Store, check *expects) { t1 := dbgen.Template(s.T(), db, database.Template{}) check.Args(database.UpdateTemplateScheduleByIDParams{ ID: t1.ID, - }).Asserts(t1, rbac.ActionUpdate) + }).Asserts(t1, policy.ActionUpdate) })) s.Run("UpdateTemplateWorkspacesLastUsedAt", s.Subtest(func(db database.Store, check *expects) { t1 := dbgen.Template(s.T(), db, database.Template{}) check.Args(database.UpdateTemplateWorkspacesLastUsedAtParams{ TemplateID: t1.ID, - }).Asserts(t1, rbac.ActionUpdate) + }).Asserts(t1, policy.ActionUpdate) })) s.Run("UpdateWorkspacesDormantDeletingAtByTemplateID", s.Subtest(func(db database.Store, check *expects) { t1 := dbgen.Template(s.T(), db, database.Template{}) check.Args(database.UpdateWorkspacesDormantDeletingAtByTemplateIDParams{ TemplateID: t1.ID, - }).Asserts(t1, rbac.ActionUpdate) + }).Asserts(t1, policy.ActionUpdate) })) s.Run("UpdateTemplateActiveVersionByID", s.Subtest(func(db database.Store, check *expects) { t1 := dbgen.Template(s.T(), db, database.Template{ @@ -889,21 +890,21 @@ func (s *MethodTestSuite) TestTemplate() { check.Args(database.UpdateTemplateActiveVersionByIDParams{ ID: t1.ID, ActiveVersionID: tv.ID, - }).Asserts(t1, rbac.ActionUpdate).Returns() + }).Asserts(t1, policy.ActionUpdate).Returns() })) s.Run("UpdateTemplateDeletedByID", s.Subtest(func(db database.Store, check *expects) { t1 := dbgen.Template(s.T(), db, database.Template{}) check.Args(database.UpdateTemplateDeletedByIDParams{ ID: t1.ID, Deleted: true, - }).Asserts(t1, rbac.ActionDelete).Returns() + }).Asserts(t1, policy.ActionDelete).Returns() })) s.Run("UpdateTemplateMetaByID", s.Subtest(func(db database.Store, check *expects) { t1 := dbgen.Template(s.T(), db, database.Template{}) check.Args(database.UpdateTemplateMetaByIDParams{ ID: t1.ID, MaxPortSharingLevel: "owner", - }).Asserts(t1, rbac.ActionUpdate) + }).Asserts(t1, policy.ActionUpdate) })) s.Run("UpdateTemplateVersionByID", s.Subtest(func(db database.Store, check *expects) { t1 := dbgen.Template(s.T(), db, database.Template{}) @@ -915,7 +916,7 @@ func (s *MethodTestSuite) TestTemplate() { TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true}, Name: tv.Name, UpdatedAt: tv.UpdatedAt, - }).Asserts(t1, rbac.ActionUpdate) + }).Asserts(t1, policy.ActionUpdate) })) s.Run("UpdateTemplateVersionDescriptionByJobID", s.Subtest(func(db database.Store, check *expects) { jobID := uuid.New() @@ -927,7 +928,7 @@ func (s *MethodTestSuite) TestTemplate() { check.Args(database.UpdateTemplateVersionDescriptionByJobIDParams{ JobID: jobID, Readme: "foo", - }).Asserts(t1, rbac.ActionUpdate).Returns() + }).Asserts(t1, policy.ActionUpdate).Returns() })) s.Run("UpdateTemplateVersionExternalAuthProvidersByJobID", s.Subtest(func(db database.Store, check *expects) { jobID := uuid.New() @@ -938,37 +939,37 @@ func (s *MethodTestSuite) TestTemplate() { }) check.Args(database.UpdateTemplateVersionExternalAuthProvidersByJobIDParams{ JobID: jobID, - }).Asserts(t1, rbac.ActionUpdate).Returns() + }).Asserts(t1, policy.ActionUpdate).Returns() })) s.Run("GetTemplateInsights", s.Subtest(func(db database.Store, check *expects) { - check.Args(database.GetTemplateInsightsParams{}).Asserts(rbac.ResourceTemplateInsights, rbac.ActionRead) + check.Args(database.GetTemplateInsightsParams{}).Asserts(rbac.ResourceTemplateInsights, policy.ActionRead) })) s.Run("GetUserLatencyInsights", s.Subtest(func(db database.Store, check *expects) { - check.Args(database.GetUserLatencyInsightsParams{}).Asserts(rbac.ResourceTemplateInsights, rbac.ActionRead) + check.Args(database.GetUserLatencyInsightsParams{}).Asserts(rbac.ResourceTemplateInsights, policy.ActionRead) })) s.Run("GetUserActivityInsights", s.Subtest(func(db database.Store, check *expects) { - check.Args(database.GetUserActivityInsightsParams{}).Asserts(rbac.ResourceTemplateInsights, rbac.ActionRead).Errors(sql.ErrNoRows) + check.Args(database.GetUserActivityInsightsParams{}).Asserts(rbac.ResourceTemplateInsights, policy.ActionRead).Errors(sql.ErrNoRows) })) s.Run("GetTemplateParameterInsights", s.Subtest(func(db database.Store, check *expects) { - check.Args(database.GetTemplateParameterInsightsParams{}).Asserts(rbac.ResourceTemplateInsights, rbac.ActionRead) + check.Args(database.GetTemplateParameterInsightsParams{}).Asserts(rbac.ResourceTemplateInsights, policy.ActionRead) })) s.Run("GetTemplateInsightsByInterval", s.Subtest(func(db database.Store, check *expects) { - check.Args(database.GetTemplateInsightsByIntervalParams{}).Asserts(rbac.ResourceTemplateInsights, rbac.ActionRead) + check.Args(database.GetTemplateInsightsByIntervalParams{}).Asserts(rbac.ResourceTemplateInsights, policy.ActionRead) })) s.Run("GetTemplateInsightsByTemplate", s.Subtest(func(db database.Store, check *expects) { - check.Args(database.GetTemplateInsightsByTemplateParams{}).Asserts(rbac.ResourceTemplateInsights, rbac.ActionRead) + check.Args(database.GetTemplateInsightsByTemplateParams{}).Asserts(rbac.ResourceTemplateInsights, policy.ActionRead) })) s.Run("GetTemplateAppInsights", s.Subtest(func(db database.Store, check *expects) { - check.Args(database.GetTemplateAppInsightsParams{}).Asserts(rbac.ResourceTemplateInsights, rbac.ActionRead) + check.Args(database.GetTemplateAppInsightsParams{}).Asserts(rbac.ResourceTemplateInsights, policy.ActionRead) })) s.Run("GetTemplateAppInsightsByTemplate", s.Subtest(func(db database.Store, check *expects) { - check.Args(database.GetTemplateAppInsightsByTemplateParams{}).Asserts(rbac.ResourceTemplateInsights, rbac.ActionRead) + check.Args(database.GetTemplateAppInsightsByTemplateParams{}).Asserts(rbac.ResourceTemplateInsights, policy.ActionRead) })) s.Run("GetTemplateUsageStats", s.Subtest(func(db database.Store, check *expects) { - check.Args(database.GetTemplateUsageStatsParams{}).Asserts(rbac.ResourceTemplateInsights, rbac.ActionRead).Errors(sql.ErrNoRows) + check.Args(database.GetTemplateUsageStatsParams{}).Asserts(rbac.ResourceTemplateInsights, policy.ActionRead).Errors(sql.ErrNoRows) })) s.Run("UpsertTemplateUsageStats", s.Subtest(func(db database.Store, check *expects) { - check.Asserts(rbac.ResourceSystem, rbac.ActionUpdate) + check.Asserts(rbac.ResourceSystem, policy.ActionUpdate) })) } @@ -981,32 +982,32 @@ func (s *MethodTestSuite) TestUser() { })) s.Run("DeleteAPIKeysByUserID", s.Subtest(func(db database.Store, check *expects) { u := dbgen.User(s.T(), db, database.User{}) - check.Args(u.ID).Asserts(rbac.ResourceAPIKey.WithOwner(u.ID.String()), rbac.ActionDelete).Returns() + check.Args(u.ID).Asserts(rbac.ResourceAPIKey.WithOwner(u.ID.String()), policy.ActionDelete).Returns() })) s.Run("GetQuotaAllowanceForUser", s.Subtest(func(db database.Store, check *expects) { u := dbgen.User(s.T(), db, database.User{}) - check.Args(u.ID).Asserts(u, rbac.ActionRead).Returns(int64(0)) + check.Args(u.ID).Asserts(u, policy.ActionRead).Returns(int64(0)) })) s.Run("GetQuotaConsumedForUser", s.Subtest(func(db database.Store, check *expects) { u := dbgen.User(s.T(), db, database.User{}) - check.Args(u.ID).Asserts(u, rbac.ActionRead).Returns(int64(0)) + check.Args(u.ID).Asserts(u, policy.ActionRead).Returns(int64(0)) })) s.Run("GetUserByEmailOrUsername", s.Subtest(func(db database.Store, check *expects) { u := dbgen.User(s.T(), db, database.User{}) check.Args(database.GetUserByEmailOrUsernameParams{ Username: u.Username, Email: u.Email, - }).Asserts(u, rbac.ActionRead).Returns(u) + }).Asserts(u, policy.ActionRead).Returns(u) })) s.Run("GetUserByID", s.Subtest(func(db database.Store, check *expects) { u := dbgen.User(s.T(), db, database.User{}) - check.Args(u.ID).Asserts(u, rbac.ActionRead).Returns(u) + check.Args(u.ID).Asserts(u, policy.ActionRead).Returns(u) })) s.Run("GetUsersByIDs", s.Subtest(func(db database.Store, check *expects) { a := dbgen.User(s.T(), db, database.User{CreatedAt: dbtime.Now().Add(-time.Hour)}) b := dbgen.User(s.T(), db, database.User{CreatedAt: dbtime.Now()}) check.Args([]uuid.UUID{a.ID, b.ID}). - Asserts(a, rbac.ActionRead, b, rbac.ActionRead). + Asserts(a, policy.ActionRead, b, policy.ActionRead). Returns(slice.New(a, b)) })) s.Run("GetUsers", s.Subtest(func(db database.Store, check *expects) { @@ -1020,30 +1021,30 @@ func (s *MethodTestSuite) TestUser() { check.Args(database.InsertUserParams{ ID: uuid.New(), LoginType: database.LoginTypePassword, - }).Asserts(rbac.ResourceRoleAssignment, rbac.ActionCreate, rbac.ResourceUser, rbac.ActionCreate) + }).Asserts(rbac.ResourceRoleAssignment, policy.ActionCreate, rbac.ResourceUser, policy.ActionCreate) })) s.Run("InsertUserLink", s.Subtest(func(db database.Store, check *expects) { u := dbgen.User(s.T(), db, database.User{}) check.Args(database.InsertUserLinkParams{ UserID: u.ID, LoginType: database.LoginTypeOIDC, - }).Asserts(u, rbac.ActionUpdate) + }).Asserts(u, policy.ActionUpdate) })) s.Run("UpdateUserDeletedByID", s.Subtest(func(db database.Store, check *expects) { u := dbgen.User(s.T(), db, database.User{}) - check.Args(u.ID).Asserts(u, rbac.ActionDelete).Returns() + check.Args(u.ID).Asserts(u, policy.ActionDelete).Returns() })) s.Run("UpdateUserHashedPassword", s.Subtest(func(db database.Store, check *expects) { u := dbgen.User(s.T(), db, database.User{}) check.Args(database.UpdateUserHashedPasswordParams{ ID: u.ID, - }).Asserts(u.UserDataRBACObject(), rbac.ActionUpdate).Returns() + }).Asserts(u.UserDataRBACObject(), policy.ActionUpdate).Returns() })) s.Run("UpdateUserQuietHoursSchedule", s.Subtest(func(db database.Store, check *expects) { u := dbgen.User(s.T(), db, database.User{}) check.Args(database.UpdateUserQuietHoursScheduleParams{ ID: u.ID, - }).Asserts(u.UserDataRBACObject(), rbac.ActionUpdate) + }).Asserts(u.UserDataRBACObject(), policy.ActionUpdate) })) s.Run("UpdateUserLastSeenAt", s.Subtest(func(db database.Store, check *expects) { u := dbgen.User(s.T(), db, database.User{}) @@ -1051,7 +1052,7 @@ func (s *MethodTestSuite) TestUser() { ID: u.ID, UpdatedAt: u.UpdatedAt, LastSeenAt: u.LastSeenAt, - }).Asserts(u, rbac.ActionUpdate).Returns(u) + }).Asserts(u, policy.ActionUpdate).Returns(u) })) s.Run("UpdateUserProfile", s.Subtest(func(db database.Store, check *expects) { u := dbgen.User(s.T(), db, database.User{}) @@ -1060,7 +1061,7 @@ func (s *MethodTestSuite) TestUser() { Email: u.Email, Username: u.Username, UpdatedAt: u.UpdatedAt, - }).Asserts(u.UserDataRBACObject(), rbac.ActionUpdate).Returns(u) + }).Asserts(u.UserDataRBACObject(), policy.ActionUpdate).Returns(u) })) s.Run("GetUserWorkspaceBuildParameters", s.Subtest(func(db database.Store, check *expects) { u := dbgen.User(s.T(), db, database.User{}) @@ -1069,7 +1070,7 @@ func (s *MethodTestSuite) TestUser() { OwnerID: u.ID, TemplateID: uuid.UUID{}, }, - ).Asserts(u.UserWorkspaceBuildParametersObject(), rbac.ActionRead).Returns( + ).Asserts(u.UserWorkspaceBuildParametersObject(), policy.ActionRead).Returns( []database.GetUserWorkspaceBuildParametersRow{}, ) })) @@ -1079,7 +1080,7 @@ func (s *MethodTestSuite) TestUser() { ID: u.ID, ThemePreference: u.ThemePreference, UpdatedAt: u.UpdatedAt, - }).Asserts(u.UserDataRBACObject(), rbac.ActionUpdate).Returns(u) + }).Asserts(u.UserDataRBACObject(), policy.ActionUpdate).Returns(u) })) s.Run("UpdateUserStatus", s.Subtest(func(db database.Store, check *expects) { u := dbgen.User(s.T(), db, database.User{}) @@ -1087,42 +1088,42 @@ func (s *MethodTestSuite) TestUser() { ID: u.ID, Status: u.Status, UpdatedAt: u.UpdatedAt, - }).Asserts(u, rbac.ActionUpdate).Returns(u) + }).Asserts(u, policy.ActionUpdate).Returns(u) })) s.Run("DeleteGitSSHKey", s.Subtest(func(db database.Store, check *expects) { key := dbgen.GitSSHKey(s.T(), db, database.GitSSHKey{}) - check.Args(key.UserID).Asserts(key, rbac.ActionDelete).Returns() + check.Args(key.UserID).Asserts(key, policy.ActionDelete).Returns() })) s.Run("GetGitSSHKey", s.Subtest(func(db database.Store, check *expects) { key := dbgen.GitSSHKey(s.T(), db, database.GitSSHKey{}) - check.Args(key.UserID).Asserts(key, rbac.ActionRead).Returns(key) + check.Args(key.UserID).Asserts(key, policy.ActionRead).Returns(key) })) s.Run("InsertGitSSHKey", s.Subtest(func(db database.Store, check *expects) { u := dbgen.User(s.T(), db, database.User{}) check.Args(database.InsertGitSSHKeyParams{ UserID: u.ID, - }).Asserts(rbac.ResourceUserData.WithID(u.ID).WithOwner(u.ID.String()), rbac.ActionCreate) + }).Asserts(rbac.ResourceUserData.WithID(u.ID).WithOwner(u.ID.String()), policy.ActionCreate) })) s.Run("UpdateGitSSHKey", s.Subtest(func(db database.Store, check *expects) { key := dbgen.GitSSHKey(s.T(), db, database.GitSSHKey{}) check.Args(database.UpdateGitSSHKeyParams{ UserID: key.UserID, UpdatedAt: key.UpdatedAt, - }).Asserts(key, rbac.ActionUpdate).Returns(key) + }).Asserts(key, policy.ActionUpdate).Returns(key) })) s.Run("GetExternalAuthLink", s.Subtest(func(db database.Store, check *expects) { link := dbgen.ExternalAuthLink(s.T(), db, database.ExternalAuthLink{}) check.Args(database.GetExternalAuthLinkParams{ ProviderID: link.ProviderID, UserID: link.UserID, - }).Asserts(link, rbac.ActionRead).Returns(link) + }).Asserts(link, policy.ActionRead).Returns(link) })) s.Run("InsertExternalAuthLink", s.Subtest(func(db database.Store, check *expects) { u := dbgen.User(s.T(), db, database.User{}) check.Args(database.InsertExternalAuthLinkParams{ ProviderID: uuid.NewString(), UserID: u.ID, - }).Asserts(rbac.ResourceUserData.WithOwner(u.ID.String()).WithID(u.ID), rbac.ActionCreate) + }).Asserts(rbac.ResourceUserData.WithOwner(u.ID.String()).WithID(u.ID), policy.ActionCreate) })) s.Run("UpdateExternalAuthLink", s.Subtest(func(db database.Store, check *expects) { link := dbgen.ExternalAuthLink(s.T(), db, database.ExternalAuthLink{}) @@ -1133,7 +1134,7 @@ func (s *MethodTestSuite) TestUser() { OAuthRefreshToken: link.OAuthRefreshToken, OAuthExpiry: link.OAuthExpiry, UpdatedAt: link.UpdatedAt, - }).Asserts(link, rbac.ActionUpdate).Returns(link) + }).Asserts(link, policy.ActionUpdate).Returns(link) })) s.Run("UpdateUserLink", s.Subtest(func(db database.Store, check *expects) { link := dbgen.UserLink(s.T(), db, database.UserLink{}) @@ -1144,7 +1145,7 @@ func (s *MethodTestSuite) TestUser() { UserID: link.UserID, LoginType: link.LoginType, DebugContext: json.RawMessage("{}"), - }).Asserts(link, rbac.ActionUpdate).Returns(link) + }).Asserts(link, policy.ActionUpdate).Returns(link) })) s.Run("UpdateUserRoles", s.Subtest(func(db database.Store, check *expects) { u := dbgen.User(s.T(), db, database.User{RBACRoles: []string{rbac.RoleTemplateAdmin()}}) @@ -1154,22 +1155,22 @@ func (s *MethodTestSuite) TestUser() { GrantedRoles: []string{rbac.RoleUserAdmin()}, ID: u.ID, }).Asserts( - u, rbac.ActionRead, - rbac.ResourceRoleAssignment, rbac.ActionCreate, - rbac.ResourceRoleAssignment, rbac.ActionDelete, + u, policy.ActionRead, + rbac.ResourceRoleAssignment, policy.ActionCreate, + rbac.ResourceRoleAssignment, policy.ActionDelete, ).Returns(o) })) s.Run("AllUserIDs", s.Subtest(func(db database.Store, check *expects) { a := dbgen.User(s.T(), db, database.User{}) b := dbgen.User(s.T(), db, database.User{}) - check.Args().Asserts(rbac.ResourceSystem, rbac.ActionRead).Returns(slice.New(a.ID, b.ID)) + check.Args().Asserts(rbac.ResourceSystem, policy.ActionRead).Returns(slice.New(a.ID, b.ID)) })) } func (s *MethodTestSuite) TestWorkspace() { s.Run("GetWorkspaceByID", s.Subtest(func(db database.Store, check *expects) { ws := dbgen.Workspace(s.T(), db, database.Workspace{}) - check.Args(ws.ID).Asserts(ws, rbac.ActionRead) + check.Args(ws.ID).Asserts(ws, policy.ActionRead) })) s.Run("GetWorkspaces", s.Subtest(func(db database.Store, check *expects) { _ = dbgen.Workspace(s.T(), db, database.Workspace{}) @@ -1186,7 +1187,7 @@ func (s *MethodTestSuite) TestWorkspace() { s.Run("GetLatestWorkspaceBuildByWorkspaceID", s.Subtest(func(db database.Store, check *expects) { ws := dbgen.Workspace(s.T(), db, database.Workspace{}) b := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{WorkspaceID: ws.ID}) - check.Args(ws.ID).Asserts(ws, rbac.ActionRead).Returns(b) + check.Args(ws.ID).Asserts(ws, policy.ActionRead).Returns(b) })) s.Run("GetWorkspaceAgentByID", s.Subtest(func(db database.Store, check *expects) { tpl := dbgen.Template(s.T(), db, database.Template{}) @@ -1196,7 +1197,7 @@ func (s *MethodTestSuite) TestWorkspace() { build := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{WorkspaceID: ws.ID, JobID: uuid.New()}) res := dbgen.WorkspaceResource(s.T(), db, database.WorkspaceResource{JobID: build.JobID}) agt := dbgen.WorkspaceAgent(s.T(), db, database.WorkspaceAgent{ResourceID: res.ID}) - check.Args(agt.ID).Asserts(ws, rbac.ActionRead).Returns(agt) + check.Args(agt.ID).Asserts(ws, policy.ActionRead).Returns(agt) })) s.Run("GetWorkspaceAgentLifecycleStateByID", s.Subtest(func(db database.Store, check *expects) { tpl := dbgen.Template(s.T(), db, database.Template{}) @@ -1206,7 +1207,7 @@ func (s *MethodTestSuite) TestWorkspace() { build := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{WorkspaceID: ws.ID, JobID: uuid.New()}) res := dbgen.WorkspaceResource(s.T(), db, database.WorkspaceResource{JobID: build.JobID}) agt := dbgen.WorkspaceAgent(s.T(), db, database.WorkspaceAgent{ResourceID: res.ID}) - check.Args(agt.ID).Asserts(ws, rbac.ActionRead) + check.Args(agt.ID).Asserts(ws, policy.ActionRead) })) s.Run("GetWorkspaceAgentMetadata", s.Subtest(func(db database.Store, check *expects) { tpl := dbgen.Template(s.T(), db, database.Template{}) @@ -1224,7 +1225,7 @@ func (s *MethodTestSuite) TestWorkspace() { check.Args(database.GetWorkspaceAgentMetadataParams{ WorkspaceAgentID: agt.ID, Keys: []string{"test"}, - }).Asserts(ws, rbac.ActionRead) + }).Asserts(ws, policy.ActionRead) })) s.Run("GetWorkspaceAgentByInstanceID", s.Subtest(func(db database.Store, check *expects) { tpl := dbgen.Template(s.T(), db, database.Template{}) @@ -1234,7 +1235,7 @@ func (s *MethodTestSuite) TestWorkspace() { build := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{WorkspaceID: ws.ID, JobID: uuid.New()}) res := dbgen.WorkspaceResource(s.T(), db, database.WorkspaceResource{JobID: build.JobID}) agt := dbgen.WorkspaceAgent(s.T(), db, database.WorkspaceAgent{ResourceID: res.ID}) - check.Args(agt.AuthInstanceID.String).Asserts(ws, rbac.ActionRead).Returns(agt) + check.Args(agt.AuthInstanceID.String).Asserts(ws, policy.ActionRead).Returns(agt) })) s.Run("UpdateWorkspaceAgentLifecycleStateByID", s.Subtest(func(db database.Store, check *expects) { tpl := dbgen.Template(s.T(), db, database.Template{}) @@ -1247,7 +1248,7 @@ func (s *MethodTestSuite) TestWorkspace() { check.Args(database.UpdateWorkspaceAgentLifecycleStateByIDParams{ ID: agt.ID, LifecycleState: database.WorkspaceAgentLifecycleStateCreated, - }).Asserts(ws, rbac.ActionUpdate).Returns() + }).Asserts(ws, policy.ActionUpdate).Returns() })) s.Run("UpdateWorkspaceAgentMetadata", s.Subtest(func(db database.Store, check *expects) { tpl := dbgen.Template(s.T(), db, database.Template{}) @@ -1259,7 +1260,7 @@ func (s *MethodTestSuite) TestWorkspace() { agt := dbgen.WorkspaceAgent(s.T(), db, database.WorkspaceAgent{ResourceID: res.ID}) check.Args(database.UpdateWorkspaceAgentMetadataParams{ WorkspaceAgentID: agt.ID, - }).Asserts(ws, rbac.ActionUpdate).Returns() + }).Asserts(ws, policy.ActionUpdate).Returns() })) s.Run("UpdateWorkspaceAgentLogOverflowByID", s.Subtest(func(db database.Store, check *expects) { tpl := dbgen.Template(s.T(), db, database.Template{}) @@ -1272,7 +1273,7 @@ func (s *MethodTestSuite) TestWorkspace() { check.Args(database.UpdateWorkspaceAgentLogOverflowByIDParams{ ID: agt.ID, LogsOverflowed: true, - }).Asserts(ws, rbac.ActionUpdate).Returns() + }).Asserts(ws, policy.ActionUpdate).Returns() })) s.Run("UpdateWorkspaceAgentStartupByID", s.Subtest(func(db database.Store, check *expects) { tpl := dbgen.Template(s.T(), db, database.Template{}) @@ -1287,7 +1288,7 @@ func (s *MethodTestSuite) TestWorkspace() { Subsystems: []database.WorkspaceAgentSubsystem{ database.WorkspaceAgentSubsystemEnvbox, }, - }).Asserts(ws, rbac.ActionUpdate).Returns() + }).Asserts(ws, policy.ActionUpdate).Returns() })) s.Run("GetWorkspaceAgentLogsAfter", s.Subtest(func(db database.Store, check *expects) { tpl := dbgen.Template(s.T(), db, database.Template{}) @@ -1299,7 +1300,7 @@ func (s *MethodTestSuite) TestWorkspace() { agt := dbgen.WorkspaceAgent(s.T(), db, database.WorkspaceAgent{ResourceID: res.ID}) check.Args(database.GetWorkspaceAgentLogsAfterParams{ AgentID: agt.ID, - }).Asserts(ws, rbac.ActionRead).Returns([]database.WorkspaceAgentLog{}) + }).Asserts(ws, policy.ActionRead).Returns([]database.WorkspaceAgentLog{}) })) s.Run("GetWorkspaceAppByAgentIDAndSlug", s.Subtest(func(db database.Store, check *expects) { tpl := dbgen.Template(s.T(), db, database.Template{}) @@ -1314,7 +1315,7 @@ func (s *MethodTestSuite) TestWorkspace() { check.Args(database.GetWorkspaceAppByAgentIDAndSlugParams{ AgentID: agt.ID, Slug: app.Slug, - }).Asserts(ws, rbac.ActionRead).Returns(app) + }).Asserts(ws, policy.ActionRead).Returns(app) })) s.Run("GetWorkspaceAppsByAgentID", s.Subtest(func(db database.Store, check *expects) { tpl := dbgen.Template(s.T(), db, database.Template{}) @@ -1327,17 +1328,17 @@ func (s *MethodTestSuite) TestWorkspace() { a := dbgen.WorkspaceApp(s.T(), db, database.WorkspaceApp{AgentID: agt.ID}) b := dbgen.WorkspaceApp(s.T(), db, database.WorkspaceApp{AgentID: agt.ID}) - check.Args(agt.ID).Asserts(ws, rbac.ActionRead).Returns(slice.New(a, b)) + check.Args(agt.ID).Asserts(ws, policy.ActionRead).Returns(slice.New(a, b)) })) s.Run("GetWorkspaceBuildByID", s.Subtest(func(db database.Store, check *expects) { ws := dbgen.Workspace(s.T(), db, database.Workspace{}) build := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{WorkspaceID: ws.ID}) - check.Args(build.ID).Asserts(ws, rbac.ActionRead).Returns(build) + check.Args(build.ID).Asserts(ws, policy.ActionRead).Returns(build) })) s.Run("GetWorkspaceBuildByJobID", s.Subtest(func(db database.Store, check *expects) { ws := dbgen.Workspace(s.T(), db, database.Workspace{}) build := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{WorkspaceID: ws.ID}) - check.Args(build.JobID).Asserts(ws, rbac.ActionRead).Returns(build) + check.Args(build.JobID).Asserts(ws, policy.ActionRead).Returns(build) })) s.Run("GetWorkspaceBuildByWorkspaceIDAndBuildNumber", s.Subtest(func(db database.Store, check *expects) { ws := dbgen.Workspace(s.T(), db, database.Workspace{}) @@ -1345,12 +1346,12 @@ func (s *MethodTestSuite) TestWorkspace() { check.Args(database.GetWorkspaceBuildByWorkspaceIDAndBuildNumberParams{ WorkspaceID: ws.ID, BuildNumber: build.BuildNumber, - }).Asserts(ws, rbac.ActionRead).Returns(build) + }).Asserts(ws, policy.ActionRead).Returns(build) })) s.Run("GetWorkspaceBuildParameters", s.Subtest(func(db database.Store, check *expects) { ws := dbgen.Workspace(s.T(), db, database.Workspace{}) build := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{WorkspaceID: ws.ID}) - check.Args(build.ID).Asserts(ws, rbac.ActionRead). + check.Args(build.ID).Asserts(ws, policy.ActionRead). Returns([]database.WorkspaceBuildParameter{}) })) s.Run("GetWorkspaceBuildsByWorkspaceID", s.Subtest(func(db database.Store, check *expects) { @@ -1358,7 +1359,7 @@ func (s *MethodTestSuite) TestWorkspace() { _ = dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{WorkspaceID: ws.ID, BuildNumber: 1}) _ = dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{WorkspaceID: ws.ID, BuildNumber: 2}) _ = dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{WorkspaceID: ws.ID, BuildNumber: 3}) - check.Args(database.GetWorkspaceBuildsByWorkspaceIDParams{WorkspaceID: ws.ID}).Asserts(ws, rbac.ActionRead) // ordering + check.Args(database.GetWorkspaceBuildsByWorkspaceIDParams{WorkspaceID: ws.ID}).Asserts(ws, policy.ActionRead) // ordering })) s.Run("GetWorkspaceByAgentID", s.Subtest(func(db database.Store, check *expects) { tpl := dbgen.Template(s.T(), db, database.Template{}) @@ -1368,7 +1369,7 @@ func (s *MethodTestSuite) TestWorkspace() { build := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{WorkspaceID: ws.ID, JobID: uuid.New()}) res := dbgen.WorkspaceResource(s.T(), db, database.WorkspaceResource{JobID: build.JobID}) agt := dbgen.WorkspaceAgent(s.T(), db, database.WorkspaceAgent{ResourceID: res.ID}) - check.Args(agt.ID).Asserts(ws, rbac.ActionRead).Returns(database.GetWorkspaceByAgentIDRow{ + check.Args(agt.ID).Asserts(ws, policy.ActionRead).Returns(database.GetWorkspaceByAgentIDRow{ Workspace: ws, TemplateName: tpl.Name, }) @@ -1381,7 +1382,7 @@ func (s *MethodTestSuite) TestWorkspace() { build := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{WorkspaceID: ws.ID, JobID: uuid.New()}) res := dbgen.WorkspaceResource(s.T(), db, database.WorkspaceResource{JobID: build.JobID}) dbgen.WorkspaceAgent(s.T(), db, database.WorkspaceAgent{ResourceID: res.ID}) - check.Args(ws.ID).Asserts(ws, rbac.ActionRead) + check.Args(ws.ID).Asserts(ws, policy.ActionRead) })) s.Run("GetWorkspaceByOwnerIDAndName", s.Subtest(func(db database.Store, check *expects) { ws := dbgen.Workspace(s.T(), db, database.Workspace{}) @@ -1389,26 +1390,26 @@ func (s *MethodTestSuite) TestWorkspace() { OwnerID: ws.OwnerID, Deleted: ws.Deleted, Name: ws.Name, - }).Asserts(ws, rbac.ActionRead).Returns(ws) + }).Asserts(ws, policy.ActionRead).Returns(ws) })) s.Run("GetWorkspaceResourceByID", s.Subtest(func(db database.Store, check *expects) { ws := dbgen.Workspace(s.T(), db, database.Workspace{}) build := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{WorkspaceID: ws.ID, JobID: uuid.New()}) _ = dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{ID: build.JobID, Type: database.ProvisionerJobTypeWorkspaceBuild}) res := dbgen.WorkspaceResource(s.T(), db, database.WorkspaceResource{JobID: build.JobID}) - check.Args(res.ID).Asserts(ws, rbac.ActionRead).Returns(res) + check.Args(res.ID).Asserts(ws, policy.ActionRead).Returns(res) })) s.Run("Build/GetWorkspaceResourcesByJobID", s.Subtest(func(db database.Store, check *expects) { ws := dbgen.Workspace(s.T(), db, database.Workspace{}) build := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{WorkspaceID: ws.ID, JobID: uuid.New()}) job := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{ID: build.JobID, Type: database.ProvisionerJobTypeWorkspaceBuild}) - check.Args(job.ID).Asserts(ws, rbac.ActionRead).Returns([]database.WorkspaceResource{}) + check.Args(job.ID).Asserts(ws, policy.ActionRead).Returns([]database.WorkspaceResource{}) })) s.Run("Template/GetWorkspaceResourcesByJobID", s.Subtest(func(db database.Store, check *expects) { tpl := dbgen.Template(s.T(), db, database.Template{}) v := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{TemplateID: uuid.NullUUID{UUID: tpl.ID, Valid: true}, JobID: uuid.New()}) job := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{ID: v.JobID, Type: database.ProvisionerJobTypeTemplateVersionImport}) - check.Args(job.ID).Asserts(v.RBACObject(tpl), []rbac.Action{rbac.ActionRead, rbac.ActionRead}).Returns([]database.WorkspaceResource{}) + check.Args(job.ID).Asserts(v.RBACObject(tpl), []policy.Action{policy.ActionRead, policy.ActionRead}).Returns([]database.WorkspaceResource{}) })) s.Run("InsertWorkspace", s.Subtest(func(db database.Store, check *expects) { u := dbgen.User(s.T(), db, database.User{}) @@ -1418,7 +1419,7 @@ func (s *MethodTestSuite) TestWorkspace() { OwnerID: u.ID, OrganizationID: o.ID, AutomaticUpdates: database.AutomaticUpdatesNever, - }).Asserts(rbac.ResourceWorkspace.WithOwner(u.ID.String()).InOrg(o.ID), rbac.ActionCreate) + }).Asserts(rbac.ResourceWorkspace.WithOwner(u.ID.String()).InOrg(o.ID), policy.ActionCreate) })) s.Run("Start/InsertWorkspaceBuild", s.Subtest(func(db database.Store, check *expects) { t := dbgen.Template(s.T(), db, database.Template{}) @@ -1429,7 +1430,7 @@ func (s *MethodTestSuite) TestWorkspace() { WorkspaceID: w.ID, Transition: database.WorkspaceTransitionStart, Reason: database.BuildReasonInitiator, - }).Asserts(w.WorkspaceBuildRBAC(database.WorkspaceTransitionStart), rbac.ActionUpdate) + }).Asserts(w.WorkspaceBuildRBAC(database.WorkspaceTransitionStart), policy.ActionUpdate) })) s.Run("Start/RequireActiveVersion/VersionMismatch/InsertWorkspaceBuild", s.Subtest(func(db database.Store, check *expects) { t := dbgen.Template(s.T(), db, database.Template{}) @@ -1451,8 +1452,8 @@ func (s *MethodTestSuite) TestWorkspace() { Reason: database.BuildReasonInitiator, TemplateVersionID: v.ID, }).Asserts( - w.WorkspaceBuildRBAC(database.WorkspaceTransitionStart), rbac.ActionUpdate, - t, rbac.ActionUpdate, + w.WorkspaceBuildRBAC(database.WorkspaceTransitionStart), policy.ActionUpdate, + t, policy.ActionUpdate, ) })) s.Run("Start/RequireActiveVersion/VersionsMatch/InsertWorkspaceBuild", s.Subtest(func(db database.Store, check *expects) { @@ -1479,7 +1480,7 @@ func (s *MethodTestSuite) TestWorkspace() { Reason: database.BuildReasonInitiator, TemplateVersionID: v.ID, }).Asserts( - w.WorkspaceBuildRBAC(database.WorkspaceTransitionStart), rbac.ActionUpdate, + w.WorkspaceBuildRBAC(database.WorkspaceTransitionStart), policy.ActionUpdate, ) })) s.Run("Delete/InsertWorkspaceBuild", s.Subtest(func(db database.Store, check *expects) { @@ -1488,7 +1489,7 @@ func (s *MethodTestSuite) TestWorkspace() { WorkspaceID: w.ID, Transition: database.WorkspaceTransitionDelete, Reason: database.BuildReasonInitiator, - }).Asserts(w.WorkspaceBuildRBAC(database.WorkspaceTransitionDelete), rbac.ActionDelete) + }).Asserts(w.WorkspaceBuildRBAC(database.WorkspaceTransitionDelete), policy.ActionDelete) })) s.Run("InsertWorkspaceBuildParameters", s.Subtest(func(db database.Store, check *expects) { w := dbgen.Workspace(s.T(), db, database.Workspace{}) @@ -1497,7 +1498,7 @@ func (s *MethodTestSuite) TestWorkspace() { WorkspaceBuildID: b.ID, Name: []string{"foo", "bar"}, Value: []string{"baz", "qux"}, - }).Asserts(w, rbac.ActionUpdate) + }).Asserts(w, policy.ActionUpdate) })) s.Run("UpdateWorkspace", s.Subtest(func(db database.Store, check *expects) { w := dbgen.Workspace(s.T(), db, database.Workspace{}) @@ -1505,20 +1506,20 @@ func (s *MethodTestSuite) TestWorkspace() { expected.Name = "" check.Args(database.UpdateWorkspaceParams{ ID: w.ID, - }).Asserts(w, rbac.ActionUpdate).Returns(expected) + }).Asserts(w, policy.ActionUpdate).Returns(expected) })) s.Run("UpdateWorkspaceDormantDeletingAt", s.Subtest(func(db database.Store, check *expects) { w := dbgen.Workspace(s.T(), db, database.Workspace{}) check.Args(database.UpdateWorkspaceDormantDeletingAtParams{ ID: w.ID, - }).Asserts(w, rbac.ActionUpdate) + }).Asserts(w, policy.ActionUpdate) })) s.Run("UpdateWorkspaceAutomaticUpdates", s.Subtest(func(db database.Store, check *expects) { w := dbgen.Workspace(s.T(), db, database.Workspace{}) check.Args(database.UpdateWorkspaceAutomaticUpdatesParams{ ID: w.ID, AutomaticUpdates: database.AutomaticUpdatesAlways, - }).Asserts(w, rbac.ActionUpdate) + }).Asserts(w, policy.ActionUpdate) })) s.Run("UpdateWorkspaceAppHealthByID", s.Subtest(func(db database.Store, check *expects) { ws := dbgen.Workspace(s.T(), db, database.Workspace{}) @@ -1529,13 +1530,13 @@ func (s *MethodTestSuite) TestWorkspace() { check.Args(database.UpdateWorkspaceAppHealthByIDParams{ ID: app.ID, Health: database.WorkspaceAppHealthDisabled, - }).Asserts(ws, rbac.ActionUpdate).Returns() + }).Asserts(ws, policy.ActionUpdate).Returns() })) s.Run("UpdateWorkspaceAutostart", s.Subtest(func(db database.Store, check *expects) { ws := dbgen.Workspace(s.T(), db, database.Workspace{}) check.Args(database.UpdateWorkspaceAutostartParams{ ID: ws.ID, - }).Asserts(ws, rbac.ActionUpdate).Returns() + }).Asserts(ws, policy.ActionUpdate).Returns() })) s.Run("UpdateWorkspaceBuildDeadlineByID", s.Subtest(func(db database.Store, check *expects) { ws := dbgen.Workspace(s.T(), db, database.Workspace{}) @@ -1544,38 +1545,38 @@ func (s *MethodTestSuite) TestWorkspace() { ID: build.ID, UpdatedAt: build.UpdatedAt, Deadline: build.Deadline, - }).Asserts(ws, rbac.ActionUpdate) + }).Asserts(ws, policy.ActionUpdate) })) s.Run("SoftDeleteWorkspaceByID", s.Subtest(func(db database.Store, check *expects) { ws := dbgen.Workspace(s.T(), db, database.Workspace{}) ws.Deleted = true - check.Args(ws.ID).Asserts(ws, rbac.ActionDelete).Returns() + check.Args(ws.ID).Asserts(ws, policy.ActionDelete).Returns() })) s.Run("UpdateWorkspaceDeletedByID", s.Subtest(func(db database.Store, check *expects) { ws := dbgen.Workspace(s.T(), db, database.Workspace{Deleted: true}) check.Args(database.UpdateWorkspaceDeletedByIDParams{ ID: ws.ID, Deleted: true, - }).Asserts(ws, rbac.ActionDelete).Returns() + }).Asserts(ws, policy.ActionDelete).Returns() })) s.Run("UpdateWorkspaceLastUsedAt", s.Subtest(func(db database.Store, check *expects) { ws := dbgen.Workspace(s.T(), db, database.Workspace{}) check.Args(database.UpdateWorkspaceLastUsedAtParams{ ID: ws.ID, - }).Asserts(ws, rbac.ActionUpdate).Returns() + }).Asserts(ws, policy.ActionUpdate).Returns() })) s.Run("BatchUpdateWorkspaceLastUsedAt", s.Subtest(func(db database.Store, check *expects) { ws1 := dbgen.Workspace(s.T(), db, database.Workspace{}) ws2 := dbgen.Workspace(s.T(), db, database.Workspace{}) check.Args(database.BatchUpdateWorkspaceLastUsedAtParams{ IDs: []uuid.UUID{ws1.ID, ws2.ID}, - }).Asserts(rbac.ResourceWorkspace.All(), rbac.ActionUpdate).Returns() + }).Asserts(rbac.ResourceWorkspace.All(), policy.ActionUpdate).Returns() })) s.Run("UpdateWorkspaceTTL", s.Subtest(func(db database.Store, check *expects) { ws := dbgen.Workspace(s.T(), db, database.Workspace{}) check.Args(database.UpdateWorkspaceTTLParams{ ID: ws.ID, - }).Asserts(ws, rbac.ActionUpdate).Returns() + }).Asserts(ws, policy.ActionUpdate).Returns() })) s.Run("GetWorkspaceByWorkspaceAppID", s.Subtest(func(db database.Store, check *expects) { ws := dbgen.Workspace(s.T(), db, database.Workspace{}) @@ -1583,7 +1584,7 @@ func (s *MethodTestSuite) TestWorkspace() { res := dbgen.WorkspaceResource(s.T(), db, database.WorkspaceResource{JobID: build.JobID}) agt := dbgen.WorkspaceAgent(s.T(), db, database.WorkspaceAgent{ResourceID: res.ID}) app := dbgen.WorkspaceApp(s.T(), db, database.WorkspaceApp{AgentID: agt.ID}) - check.Args(app.ID).Asserts(ws, rbac.ActionRead).Returns(ws) + check.Args(app.ID).Asserts(ws, policy.ActionRead).Returns(ws) })) s.Run("ActivityBumpWorkspace", s.Subtest(func(db database.Store, check *expects) { ws := dbgen.Workspace(s.T(), db, database.Workspace{}) @@ -1591,17 +1592,17 @@ func (s *MethodTestSuite) TestWorkspace() { dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{ID: build.JobID, Type: database.ProvisionerJobTypeWorkspaceBuild}) check.Args(database.ActivityBumpWorkspaceParams{ WorkspaceID: ws.ID, - }).Asserts(ws, rbac.ActionUpdate).Returns() + }).Asserts(ws, policy.ActionUpdate).Returns() })) s.Run("FavoriteWorkspace", s.Subtest(func(db database.Store, check *expects) { u := dbgen.User(s.T(), db, database.User{}) ws := dbgen.Workspace(s.T(), db, database.Workspace{OwnerID: u.ID}) - check.Args(ws.ID).Asserts(ws, rbac.ActionUpdate).Returns() + check.Args(ws.ID).Asserts(ws, policy.ActionUpdate).Returns() })) s.Run("UnfavoriteWorkspace", s.Subtest(func(db database.Store, check *expects) { u := dbgen.User(s.T(), db, database.User{}) ws := dbgen.Workspace(s.T(), db, database.Workspace{OwnerID: u.ID}) - check.Args(ws.ID).Asserts(ws, rbac.ActionUpdate).Returns() + check.Args(ws.ID).Asserts(ws, policy.ActionUpdate).Returns() })) } @@ -1617,7 +1618,7 @@ func (s *MethodTestSuite) TestWorkspacePortSharing() { Port: ps.Port, ShareLevel: ps.ShareLevel, Protocol: ps.Protocol, - }).Asserts(ws, rbac.ActionUpdate).Returns(ps) + }).Asserts(ws, policy.ActionUpdate).Returns(ps) })) s.Run("GetWorkspaceAgentPortShare", s.Subtest(func(db database.Store, check *expects) { u := dbgen.User(s.T(), db, database.User{}) @@ -1627,13 +1628,13 @@ func (s *MethodTestSuite) TestWorkspacePortSharing() { WorkspaceID: ps.WorkspaceID, AgentName: ps.AgentName, Port: ps.Port, - }).Asserts(ws, rbac.ActionRead).Returns(ps) + }).Asserts(ws, policy.ActionRead).Returns(ps) })) s.Run("ListWorkspaceAgentPortShares", s.Subtest(func(db database.Store, check *expects) { u := dbgen.User(s.T(), db, database.User{}) ws := dbgen.Workspace(s.T(), db, database.Workspace{OwnerID: u.ID}) ps := dbgen.WorkspaceAgentPortShare(s.T(), db, database.WorkspaceAgentPortShare{WorkspaceID: ws.ID}) - check.Args(ws.ID).Asserts(ws, rbac.ActionRead).Returns([]database.WorkspaceAgentPortShare{ps}) + check.Args(ws.ID).Asserts(ws, policy.ActionRead).Returns([]database.WorkspaceAgentPortShare{ps}) })) s.Run("DeleteWorkspaceAgentPortShare", s.Subtest(func(db database.Store, check *expects) { u := dbgen.User(s.T(), db, database.User{}) @@ -1643,21 +1644,21 @@ func (s *MethodTestSuite) TestWorkspacePortSharing() { WorkspaceID: ps.WorkspaceID, AgentName: ps.AgentName, Port: ps.Port, - }).Asserts(ws, rbac.ActionUpdate).Returns() + }).Asserts(ws, policy.ActionUpdate).Returns() })) s.Run("DeleteWorkspaceAgentPortSharesByTemplate", s.Subtest(func(db database.Store, check *expects) { u := dbgen.User(s.T(), db, database.User{}) t := dbgen.Template(s.T(), db, database.Template{}) ws := dbgen.Workspace(s.T(), db, database.Workspace{OwnerID: u.ID, TemplateID: t.ID}) _ = dbgen.WorkspaceAgentPortShare(s.T(), db, database.WorkspaceAgentPortShare{WorkspaceID: ws.ID}) - check.Args(t.ID).Asserts(t, rbac.ActionUpdate).Returns() + check.Args(t.ID).Asserts(t, policy.ActionUpdate).Returns() })) s.Run("ReduceWorkspaceAgentShareLevelToAuthenticatedByTemplate", s.Subtest(func(db database.Store, check *expects) { u := dbgen.User(s.T(), db, database.User{}) t := dbgen.Template(s.T(), db, database.Template{}) ws := dbgen.Workspace(s.T(), db, database.Workspace{OwnerID: u.ID, TemplateID: t.ID}) _ = dbgen.WorkspaceAgentPortShare(s.T(), db, database.WorkspaceAgentPortShare{WorkspaceID: ws.ID}) - check.Args(t.ID).Asserts(t, rbac.ActionUpdate).Returns() + check.Args(t.ID).Asserts(t, policy.ActionUpdate).Returns() })) } @@ -1669,7 +1670,7 @@ func (s *MethodTestSuite) TestExtraMethods() { }), }) s.NoError(err, "insert provisioner daemon") - check.Args().Asserts(d, rbac.ActionRead) + check.Args().Asserts(d, policy.ActionRead) })) s.Run("DeleteOldProvisionerDaemons", s.Subtest(func(db database.Store, check *expects) { _, err := db.UpsertProvisionerDaemon(context.Background(), database.UpsertProvisionerDaemonParams{ @@ -1678,7 +1679,7 @@ func (s *MethodTestSuite) TestExtraMethods() { }), }) s.NoError(err, "insert provisioner daemon") - check.Args().Asserts(rbac.ResourceSystem, rbac.ActionDelete) + check.Args().Asserts(rbac.ResourceSystem, policy.ActionDelete) })) s.Run("UpdateProvisionerDaemonLastSeenAt", s.Subtest(func(db database.Store, check *expects) { d, err := db.UpsertProvisionerDaemon(context.Background(), database.UpsertProvisionerDaemonParams{ @@ -1690,7 +1691,7 @@ func (s *MethodTestSuite) TestExtraMethods() { check.Args(database.UpdateProvisionerDaemonLastSeenAtParams{ ID: d.ID, LastSeenAt: sql.NullTime{Time: dbtime.Now(), Valid: true}, - }).Asserts(rbac.ResourceProvisionerDaemon, rbac.ActionUpdate) + }).Asserts(rbac.ResourceProvisionerDaemon, policy.ActionUpdate) })) } @@ -1699,134 +1700,134 @@ func (s *MethodTestSuite) TestExtraMethods() { func (s *MethodTestSuite) TestTailnetFunctions() { s.Run("CleanTailnetCoordinators", s.Subtest(func(db database.Store, check *expects) { check.Args(). - Asserts(rbac.ResourceTailnetCoordinator, rbac.ActionDelete). + Asserts(rbac.ResourceTailnetCoordinator, policy.ActionDelete). Errors(dbmem.ErrUnimplemented) })) s.Run("CleanTailnetLostPeers", s.Subtest(func(db database.Store, check *expects) { check.Args(). - Asserts(rbac.ResourceTailnetCoordinator, rbac.ActionDelete). + Asserts(rbac.ResourceTailnetCoordinator, policy.ActionDelete). Errors(dbmem.ErrUnimplemented) })) s.Run("CleanTailnetTunnels", s.Subtest(func(db database.Store, check *expects) { check.Args(). - Asserts(rbac.ResourceTailnetCoordinator, rbac.ActionDelete). + Asserts(rbac.ResourceTailnetCoordinator, policy.ActionDelete). Errors(dbmem.ErrUnimplemented) })) s.Run("DeleteAllTailnetClientSubscriptions", s.Subtest(func(db database.Store, check *expects) { check.Args(database.DeleteAllTailnetClientSubscriptionsParams{}). - Asserts(rbac.ResourceTailnetCoordinator, rbac.ActionDelete). + Asserts(rbac.ResourceTailnetCoordinator, policy.ActionDelete). Errors(dbmem.ErrUnimplemented) })) s.Run("DeleteAllTailnetTunnels", s.Subtest(func(db database.Store, check *expects) { check.Args(database.DeleteAllTailnetTunnelsParams{}). - Asserts(rbac.ResourceTailnetCoordinator, rbac.ActionDelete). + Asserts(rbac.ResourceTailnetCoordinator, policy.ActionDelete). Errors(dbmem.ErrUnimplemented) })) s.Run("DeleteCoordinator", s.Subtest(func(db database.Store, check *expects) { check.Args(uuid.New()). - Asserts(rbac.ResourceTailnetCoordinator, rbac.ActionDelete). + Asserts(rbac.ResourceTailnetCoordinator, policy.ActionDelete). Errors(dbmem.ErrUnimplemented) })) s.Run("DeleteTailnetAgent", s.Subtest(func(db database.Store, check *expects) { check.Args(database.DeleteTailnetAgentParams{}). - Asserts(rbac.ResourceTailnetCoordinator, rbac.ActionUpdate). + Asserts(rbac.ResourceTailnetCoordinator, policy.ActionUpdate). Errors(dbmem.ErrUnimplemented) })) s.Run("DeleteTailnetClient", s.Subtest(func(db database.Store, check *expects) { check.Args(database.DeleteTailnetClientParams{}). - Asserts(rbac.ResourceTailnetCoordinator, rbac.ActionDelete). + Asserts(rbac.ResourceTailnetCoordinator, policy.ActionDelete). Errors(dbmem.ErrUnimplemented) })) s.Run("DeleteTailnetClientSubscription", s.Subtest(func(db database.Store, check *expects) { check.Args(database.DeleteTailnetClientSubscriptionParams{}). - Asserts(rbac.ResourceTailnetCoordinator, rbac.ActionDelete). + Asserts(rbac.ResourceTailnetCoordinator, policy.ActionDelete). Errors(dbmem.ErrUnimplemented) })) s.Run("DeleteTailnetPeer", s.Subtest(func(db database.Store, check *expects) { check.Args(database.DeleteTailnetPeerParams{}). - Asserts(rbac.ResourceTailnetCoordinator, rbac.ActionDelete). + Asserts(rbac.ResourceTailnetCoordinator, policy.ActionDelete). Errors(dbmem.ErrUnimplemented) })) s.Run("DeleteTailnetTunnel", s.Subtest(func(db database.Store, check *expects) { check.Args(database.DeleteTailnetTunnelParams{}). - Asserts(rbac.ResourceTailnetCoordinator, rbac.ActionDelete). + Asserts(rbac.ResourceTailnetCoordinator, policy.ActionDelete). Errors(dbmem.ErrUnimplemented) })) s.Run("GetAllTailnetAgents", s.Subtest(func(db database.Store, check *expects) { check.Args(). - Asserts(rbac.ResourceTailnetCoordinator, rbac.ActionRead). + Asserts(rbac.ResourceTailnetCoordinator, policy.ActionRead). Errors(dbmem.ErrUnimplemented) })) s.Run("GetTailnetAgents", s.Subtest(func(db database.Store, check *expects) { check.Args(uuid.New()). - Asserts(rbac.ResourceTailnetCoordinator, rbac.ActionRead). + Asserts(rbac.ResourceTailnetCoordinator, policy.ActionRead). Errors(dbmem.ErrUnimplemented) })) s.Run("GetTailnetClientsForAgent", s.Subtest(func(db database.Store, check *expects) { check.Args(uuid.New()). - Asserts(rbac.ResourceTailnetCoordinator, rbac.ActionRead). + Asserts(rbac.ResourceTailnetCoordinator, policy.ActionRead). Errors(dbmem.ErrUnimplemented) })) s.Run("GetTailnetPeers", s.Subtest(func(db database.Store, check *expects) { check.Args(uuid.New()). - Asserts(rbac.ResourceTailnetCoordinator, rbac.ActionRead). + Asserts(rbac.ResourceTailnetCoordinator, policy.ActionRead). Errors(dbmem.ErrUnimplemented) })) s.Run("GetTailnetTunnelPeerBindings", s.Subtest(func(db database.Store, check *expects) { check.Args(uuid.New()). - Asserts(rbac.ResourceTailnetCoordinator, rbac.ActionRead). + Asserts(rbac.ResourceTailnetCoordinator, policy.ActionRead). Errors(dbmem.ErrUnimplemented) })) s.Run("GetTailnetTunnelPeerIDs", s.Subtest(func(db database.Store, check *expects) { check.Args(uuid.New()). - Asserts(rbac.ResourceTailnetCoordinator, rbac.ActionRead). + Asserts(rbac.ResourceTailnetCoordinator, policy.ActionRead). Errors(dbmem.ErrUnimplemented) })) s.Run("GetAllTailnetCoordinators", s.Subtest(func(db database.Store, check *expects) { check.Args(). - Asserts(rbac.ResourceTailnetCoordinator, rbac.ActionRead). + Asserts(rbac.ResourceTailnetCoordinator, policy.ActionRead). Errors(dbmem.ErrUnimplemented) })) s.Run("GetAllTailnetPeers", s.Subtest(func(db database.Store, check *expects) { check.Args(). - Asserts(rbac.ResourceTailnetCoordinator, rbac.ActionRead). + Asserts(rbac.ResourceTailnetCoordinator, policy.ActionRead). Errors(dbmem.ErrUnimplemented) })) s.Run("GetAllTailnetTunnels", s.Subtest(func(db database.Store, check *expects) { check.Args(). - Asserts(rbac.ResourceTailnetCoordinator, rbac.ActionRead). + Asserts(rbac.ResourceTailnetCoordinator, policy.ActionRead). Errors(dbmem.ErrUnimplemented) })) s.Run("UpsertTailnetAgent", s.Subtest(func(db database.Store, check *expects) { check.Args(database.UpsertTailnetAgentParams{}). - Asserts(rbac.ResourceTailnetCoordinator, rbac.ActionUpdate). + Asserts(rbac.ResourceTailnetCoordinator, policy.ActionUpdate). Errors(dbmem.ErrUnimplemented) })) s.Run("UpsertTailnetClient", s.Subtest(func(db database.Store, check *expects) { check.Args(database.UpsertTailnetClientParams{}). - Asserts(rbac.ResourceTailnetCoordinator, rbac.ActionUpdate). + Asserts(rbac.ResourceTailnetCoordinator, policy.ActionUpdate). Errors(dbmem.ErrUnimplemented) })) s.Run("UpsertTailnetClientSubscription", s.Subtest(func(db database.Store, check *expects) { check.Args(database.UpsertTailnetClientSubscriptionParams{}). - Asserts(rbac.ResourceTailnetCoordinator, rbac.ActionUpdate). + Asserts(rbac.ResourceTailnetCoordinator, policy.ActionUpdate). Errors(dbmem.ErrUnimplemented) })) s.Run("UpsertTailnetCoordinator", s.Subtest(func(db database.Store, check *expects) { check.Args(uuid.New()). - Asserts(rbac.ResourceTailnetCoordinator, rbac.ActionUpdate). + Asserts(rbac.ResourceTailnetCoordinator, policy.ActionUpdate). Errors(dbmem.ErrUnimplemented) })) s.Run("UpsertTailnetPeer", s.Subtest(func(db database.Store, check *expects) { check.Args(database.UpsertTailnetPeerParams{ Status: database.TailnetStatusOk, }). - Asserts(rbac.ResourceTailnetCoordinator, rbac.ActionCreate). + Asserts(rbac.ResourceTailnetCoordinator, policy.ActionCreate). Errors(dbmem.ErrUnimplemented) })) s.Run("UpsertTailnetTunnel", s.Subtest(func(db database.Store, check *expects) { check.Args(database.UpsertTailnetTunnelParams{}). - Asserts(rbac.ResourceTailnetCoordinator, rbac.ActionCreate). + Asserts(rbac.ResourceTailnetCoordinator, policy.ActionCreate). Errors(dbmem.ErrUnimplemented) })) } @@ -1834,12 +1835,12 @@ func (s *MethodTestSuite) TestTailnetFunctions() { func (s *MethodTestSuite) TestDBCrypt() { s.Run("GetDBCryptKeys", s.Subtest(func(db database.Store, check *expects) { check.Args(). - Asserts(rbac.ResourceSystem, rbac.ActionRead). + Asserts(rbac.ResourceSystem, policy.ActionRead). Returns([]database.DBCryptKey{}) })) s.Run("InsertDBCryptKey", s.Subtest(func(db database.Store, check *expects) { check.Args(database.InsertDBCryptKeyParams{}). - Asserts(rbac.ResourceSystem, rbac.ActionCreate). + Asserts(rbac.ResourceSystem, policy.ActionCreate). Returns() })) s.Run("RevokeDBCryptKey", s.Subtest(func(db database.Store, check *expects) { @@ -1848,7 +1849,7 @@ func (s *MethodTestSuite) TestDBCrypt() { }) s.NoError(err) check.Args("revoke me"). - Asserts(rbac.ResourceSystem, rbac.ActionUpdate). + Asserts(rbac.ResourceSystem, policy.ActionUpdate). Returns() })) } @@ -1861,56 +1862,56 @@ func (s *MethodTestSuite) TestSystemFunctions() { UserID: u.ID, LinkedID: l.LinkedID, LoginType: database.LoginTypeGithub, - }).Asserts(rbac.ResourceSystem, rbac.ActionUpdate).Returns(l) + }).Asserts(rbac.ResourceSystem, policy.ActionUpdate).Returns(l) })) s.Run("GetLatestWorkspaceBuildsByWorkspaceIDs", s.Subtest(func(db database.Store, check *expects) { ws := dbgen.Workspace(s.T(), db, database.Workspace{}) b := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{WorkspaceID: ws.ID}) - check.Args([]uuid.UUID{ws.ID}).Asserts(rbac.ResourceSystem, rbac.ActionRead).Returns(slice.New(b)) + check.Args([]uuid.UUID{ws.ID}).Asserts(rbac.ResourceSystem, policy.ActionRead).Returns(slice.New(b)) })) s.Run("UpsertDefaultProxy", s.Subtest(func(db database.Store, check *expects) { - check.Args(database.UpsertDefaultProxyParams{}).Asserts(rbac.ResourceSystem, rbac.ActionUpdate).Returns() + check.Args(database.UpsertDefaultProxyParams{}).Asserts(rbac.ResourceSystem, policy.ActionUpdate).Returns() })) s.Run("GetUserLinkByLinkedID", s.Subtest(func(db database.Store, check *expects) { l := dbgen.UserLink(s.T(), db, database.UserLink{}) - check.Args(l.LinkedID).Asserts(rbac.ResourceSystem, rbac.ActionRead).Returns(l) + check.Args(l.LinkedID).Asserts(rbac.ResourceSystem, policy.ActionRead).Returns(l) })) s.Run("GetUserLinkByUserIDLoginType", s.Subtest(func(db database.Store, check *expects) { l := dbgen.UserLink(s.T(), db, database.UserLink{}) check.Args(database.GetUserLinkByUserIDLoginTypeParams{ UserID: l.UserID, LoginType: l.LoginType, - }).Asserts(rbac.ResourceSystem, rbac.ActionRead).Returns(l) + }).Asserts(rbac.ResourceSystem, policy.ActionRead).Returns(l) })) s.Run("GetLatestWorkspaceBuilds", s.Subtest(func(db database.Store, check *expects) { dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{}) dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{}) - check.Args().Asserts(rbac.ResourceSystem, rbac.ActionRead) + check.Args().Asserts(rbac.ResourceSystem, policy.ActionRead) })) s.Run("GetActiveUserCount", s.Subtest(func(db database.Store, check *expects) { - check.Args().Asserts(rbac.ResourceSystem, rbac.ActionRead).Returns(int64(0)) + check.Args().Asserts(rbac.ResourceSystem, policy.ActionRead).Returns(int64(0)) })) s.Run("GetUnexpiredLicenses", s.Subtest(func(db database.Store, check *expects) { - check.Args().Asserts(rbac.ResourceSystem, rbac.ActionRead) + check.Args().Asserts(rbac.ResourceSystem, policy.ActionRead) })) s.Run("GetAuthorizationUserRoles", s.Subtest(func(db database.Store, check *expects) { u := dbgen.User(s.T(), db, database.User{}) - check.Args(u.ID).Asserts(rbac.ResourceSystem, rbac.ActionRead) + check.Args(u.ID).Asserts(rbac.ResourceSystem, policy.ActionRead) })) s.Run("GetDERPMeshKey", s.Subtest(func(db database.Store, check *expects) { db.InsertDERPMeshKey(context.Background(), "testing") - check.Args().Asserts(rbac.ResourceSystem, rbac.ActionRead) + check.Args().Asserts(rbac.ResourceSystem, policy.ActionRead) })) s.Run("InsertDERPMeshKey", s.Subtest(func(db database.Store, check *expects) { - check.Args("value").Asserts(rbac.ResourceSystem, rbac.ActionCreate).Returns() + check.Args("value").Asserts(rbac.ResourceSystem, policy.ActionCreate).Returns() })) s.Run("InsertDeploymentID", s.Subtest(func(db database.Store, check *expects) { - check.Args("value").Asserts(rbac.ResourceSystem, rbac.ActionCreate).Returns() + check.Args("value").Asserts(rbac.ResourceSystem, policy.ActionCreate).Returns() })) s.Run("InsertReplica", s.Subtest(func(db database.Store, check *expects) { check.Args(database.InsertReplicaParams{ ID: uuid.New(), - }).Asserts(rbac.ResourceSystem, rbac.ActionCreate) + }).Asserts(rbac.ResourceSystem, policy.ActionCreate) })) s.Run("UpdateReplica", s.Subtest(func(db database.Store, check *expects) { replica, err := db.InsertReplica(context.Background(), database.InsertReplicaParams{ID: uuid.New()}) @@ -1918,24 +1919,24 @@ func (s *MethodTestSuite) TestSystemFunctions() { check.Args(database.UpdateReplicaParams{ ID: replica.ID, DatabaseLatency: 100, - }).Asserts(rbac.ResourceSystem, rbac.ActionUpdate) + }).Asserts(rbac.ResourceSystem, policy.ActionUpdate) })) s.Run("DeleteReplicasUpdatedBefore", s.Subtest(func(db database.Store, check *expects) { _, err := db.InsertReplica(context.Background(), database.InsertReplicaParams{ID: uuid.New(), UpdatedAt: time.Now()}) require.NoError(s.T(), err) - check.Args(time.Now().Add(time.Hour)).Asserts(rbac.ResourceSystem, rbac.ActionDelete) + check.Args(time.Now().Add(time.Hour)).Asserts(rbac.ResourceSystem, policy.ActionDelete) })) s.Run("GetReplicasUpdatedAfter", s.Subtest(func(db database.Store, check *expects) { _, err := db.InsertReplica(context.Background(), database.InsertReplicaParams{ID: uuid.New(), UpdatedAt: time.Now()}) require.NoError(s.T(), err) - check.Args(time.Now().Add(time.Hour*-1)).Asserts(rbac.ResourceSystem, rbac.ActionRead) + check.Args(time.Now().Add(time.Hour*-1)).Asserts(rbac.ResourceSystem, policy.ActionRead) })) s.Run("GetUserCount", s.Subtest(func(db database.Store, check *expects) { - check.Args().Asserts(rbac.ResourceSystem, rbac.ActionRead).Returns(int64(0)) + check.Args().Asserts(rbac.ResourceSystem, policy.ActionRead).Returns(int64(0)) })) s.Run("GetTemplates", s.Subtest(func(db database.Store, check *expects) { _ = dbgen.Template(s.T(), db, database.Template{}) - check.Args().Asserts(rbac.ResourceSystem, rbac.ActionRead) + check.Args().Asserts(rbac.ResourceSystem, policy.ActionRead) })) s.Run("UpdateWorkspaceBuildCostByID", s.Subtest(func(db database.Store, check *expects) { b := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{}) @@ -1944,7 +1945,7 @@ func (s *MethodTestSuite) TestSystemFunctions() { check.Args(database.UpdateWorkspaceBuildCostByIDParams{ ID: b.ID, DailyCost: 10, - }).Asserts(rbac.ResourceSystem, rbac.ActionUpdate) + }).Asserts(rbac.ResourceSystem, policy.ActionUpdate) })) s.Run("UpdateWorkspaceBuildProvisionerStateByID", s.Subtest(func(db database.Store, check *expects) { ws := dbgen.Workspace(s.T(), db, database.Workspace{}) @@ -1952,43 +1953,43 @@ func (s *MethodTestSuite) TestSystemFunctions() { check.Args(database.UpdateWorkspaceBuildProvisionerStateByIDParams{ ID: build.ID, ProvisionerState: []byte("testing"), - }).Asserts(rbac.ResourceSystem, rbac.ActionUpdate) + }).Asserts(rbac.ResourceSystem, policy.ActionUpdate) })) s.Run("UpsertLastUpdateCheck", s.Subtest(func(db database.Store, check *expects) { - check.Args("value").Asserts(rbac.ResourceSystem, rbac.ActionUpdate) + check.Args("value").Asserts(rbac.ResourceSystem, policy.ActionUpdate) })) s.Run("GetLastUpdateCheck", s.Subtest(func(db database.Store, check *expects) { err := db.UpsertLastUpdateCheck(context.Background(), "value") require.NoError(s.T(), err) - check.Args().Asserts(rbac.ResourceSystem, rbac.ActionRead) + check.Args().Asserts(rbac.ResourceSystem, policy.ActionRead) })) s.Run("GetWorkspaceBuildsCreatedAfter", s.Subtest(func(db database.Store, check *expects) { _ = dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{CreatedAt: time.Now().Add(-time.Hour)}) - check.Args(time.Now()).Asserts(rbac.ResourceSystem, rbac.ActionRead) + check.Args(time.Now()).Asserts(rbac.ResourceSystem, policy.ActionRead) })) s.Run("GetWorkspaceAgentsCreatedAfter", s.Subtest(func(db database.Store, check *expects) { _ = dbgen.WorkspaceAgent(s.T(), db, database.WorkspaceAgent{CreatedAt: time.Now().Add(-time.Hour)}) - check.Args(time.Now()).Asserts(rbac.ResourceSystem, rbac.ActionRead) + check.Args(time.Now()).Asserts(rbac.ResourceSystem, policy.ActionRead) })) s.Run("GetWorkspaceAppsCreatedAfter", s.Subtest(func(db database.Store, check *expects) { _ = dbgen.WorkspaceApp(s.T(), db, database.WorkspaceApp{CreatedAt: time.Now().Add(-time.Hour)}) - check.Args(time.Now()).Asserts(rbac.ResourceSystem, rbac.ActionRead) + check.Args(time.Now()).Asserts(rbac.ResourceSystem, policy.ActionRead) })) s.Run("GetWorkspaceResourcesCreatedAfter", s.Subtest(func(db database.Store, check *expects) { _ = dbgen.WorkspaceResource(s.T(), db, database.WorkspaceResource{CreatedAt: time.Now().Add(-time.Hour)}) - check.Args(time.Now()).Asserts(rbac.ResourceSystem, rbac.ActionRead) + check.Args(time.Now()).Asserts(rbac.ResourceSystem, policy.ActionRead) })) s.Run("GetWorkspaceResourceMetadataCreatedAfter", s.Subtest(func(db database.Store, check *expects) { _ = dbgen.WorkspaceResourceMetadatums(s.T(), db, database.WorkspaceResourceMetadatum{}) - check.Args(time.Now()).Asserts(rbac.ResourceSystem, rbac.ActionRead) + check.Args(time.Now()).Asserts(rbac.ResourceSystem, policy.ActionRead) })) s.Run("DeleteOldWorkspaceAgentStats", s.Subtest(func(db database.Store, check *expects) { - check.Args().Asserts(rbac.ResourceSystem, rbac.ActionDelete) + check.Args().Asserts(rbac.ResourceSystem, policy.ActionDelete) })) s.Run("GetProvisionerJobsCreatedAfter", s.Subtest(func(db database.Store, check *expects) { // TODO: add provisioner job resource type _ = dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{CreatedAt: time.Now().Add(-time.Hour)}) - check.Args(time.Now()).Asserts( /*rbac.ResourceSystem, rbac.ActionRead*/ ) + check.Args(time.Now()).Asserts( /*rbac.ResourceSystem, policy.ActionRead*/ ) })) s.Run("GetTemplateVersionsByIDs", s.Subtest(func(db database.Store, check *expects) { t1 := dbgen.Template(s.T(), db, database.Template{}) @@ -2003,7 +2004,7 @@ func (s *MethodTestSuite) TestSystemFunctions() { TemplateID: uuid.NullUUID{UUID: t2.ID, Valid: true}, }) check.Args([]uuid.UUID{tv1.ID, tv2.ID, tv3.ID}). - Asserts(rbac.ResourceSystem, rbac.ActionRead). + Asserts(rbac.ResourceSystem, policy.ActionRead). Returns(slice.New(tv1, tv2, tv3)) })) s.Run("GetParameterSchemasByJobID", s.Subtest(func(db database.Store, check *expects) { @@ -2013,7 +2014,7 @@ func (s *MethodTestSuite) TestSystemFunctions() { }) job := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{ID: tv.JobID}) check.Args(job.ID). - Asserts(tpl, rbac.ActionRead).Errors(sql.ErrNoRows) + Asserts(tpl, policy.ActionRead).Errors(sql.ErrNoRows) })) s.Run("GetWorkspaceAppsByAgentIDs", s.Subtest(func(db database.Store, check *expects) { aWs := dbgen.Workspace(s.T(), db, database.Workspace{}) @@ -2029,7 +2030,7 @@ func (s *MethodTestSuite) TestSystemFunctions() { b := dbgen.WorkspaceApp(s.T(), db, database.WorkspaceApp{AgentID: bAgt.ID}) check.Args([]uuid.UUID{a.AgentID, b.AgentID}). - Asserts(rbac.ResourceSystem, rbac.ActionRead). + Asserts(rbac.ResourceSystem, policy.ActionRead). Returns([]database.WorkspaceApp{a, b}) })) s.Run("GetWorkspaceResourcesByJobIDs", s.Subtest(func(db database.Store, check *expects) { @@ -2041,7 +2042,7 @@ func (s *MethodTestSuite) TestSystemFunctions() { build := dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{WorkspaceID: ws.ID, JobID: uuid.New()}) wJob := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{ID: build.JobID, Type: database.ProvisionerJobTypeWorkspaceBuild}) check.Args([]uuid.UUID{tJob.ID, wJob.ID}). - Asserts(rbac.ResourceSystem, rbac.ActionRead). + Asserts(rbac.ResourceSystem, policy.ActionRead). Returns([]database.WorkspaceResource{}) })) s.Run("GetWorkspaceResourceMetadataByResourceIDs", s.Subtest(func(db database.Store, check *expects) { @@ -2051,7 +2052,7 @@ func (s *MethodTestSuite) TestSystemFunctions() { a := dbgen.WorkspaceResource(s.T(), db, database.WorkspaceResource{JobID: build.JobID}) b := dbgen.WorkspaceResource(s.T(), db, database.WorkspaceResource{JobID: build.JobID}) check.Args([]uuid.UUID{a.ID, b.ID}). - Asserts(rbac.ResourceSystem, rbac.ActionRead) + Asserts(rbac.ResourceSystem, policy.ActionRead) })) s.Run("GetWorkspaceAgentsByResourceIDs", s.Subtest(func(db database.Store, check *expects) { ws := dbgen.Workspace(s.T(), db, database.Workspace{}) @@ -2059,7 +2060,7 @@ func (s *MethodTestSuite) TestSystemFunctions() { res := dbgen.WorkspaceResource(s.T(), db, database.WorkspaceResource{JobID: build.JobID}) agt := dbgen.WorkspaceAgent(s.T(), db, database.WorkspaceAgent{ResourceID: res.ID}) check.Args([]uuid.UUID{res.ID}). - Asserts(rbac.ResourceSystem, rbac.ActionRead). + Asserts(rbac.ResourceSystem, policy.ActionRead). Returns([]database.WorkspaceAgent{agt}) })) s.Run("GetProvisionerJobsByIDs", s.Subtest(func(db database.Store, check *expects) { @@ -2067,25 +2068,25 @@ func (s *MethodTestSuite) TestSystemFunctions() { a := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{}) b := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{}) check.Args([]uuid.UUID{a.ID, b.ID}). - Asserts( /*rbac.ResourceSystem, rbac.ActionRead*/ ). + Asserts( /*rbac.ResourceSystem, policy.ActionRead*/ ). Returns(slice.New(a, b)) })) s.Run("InsertWorkspaceAgent", s.Subtest(func(db database.Store, check *expects) { check.Args(database.InsertWorkspaceAgentParams{ ID: uuid.New(), - }).Asserts(rbac.ResourceSystem, rbac.ActionCreate) + }).Asserts(rbac.ResourceSystem, policy.ActionCreate) })) s.Run("InsertWorkspaceApp", s.Subtest(func(db database.Store, check *expects) { check.Args(database.InsertWorkspaceAppParams{ ID: uuid.New(), Health: database.WorkspaceAppHealthDisabled, SharingLevel: database.AppSharingLevelOwner, - }).Asserts(rbac.ResourceSystem, rbac.ActionCreate) + }).Asserts(rbac.ResourceSystem, policy.ActionCreate) })) s.Run("InsertWorkspaceResourceMetadata", s.Subtest(func(db database.Store, check *expects) { check.Args(database.InsertWorkspaceResourceMetadataParams{ WorkspaceResourceID: uuid.New(), - }).Asserts(rbac.ResourceSystem, rbac.ActionCreate) + }).Asserts(rbac.ResourceSystem, policy.ActionCreate) })) s.Run("UpdateWorkspaceAgentConnectionByID", s.Subtest(func(db database.Store, check *expects) { ws := dbgen.Workspace(s.T(), db, database.Workspace{}) @@ -2094,7 +2095,7 @@ func (s *MethodTestSuite) TestSystemFunctions() { agt := dbgen.WorkspaceAgent(s.T(), db, database.WorkspaceAgent{ResourceID: res.ID}) check.Args(database.UpdateWorkspaceAgentConnectionByIDParams{ ID: agt.ID, - }).Asserts(rbac.ResourceSystem, rbac.ActionUpdate).Returns() + }).Asserts(rbac.ResourceSystem, policy.ActionUpdate).Returns() })) s.Run("AcquireProvisionerJob", s.Subtest(func(db database.Store, check *expects) { // TODO: we need to create a ProvisionerJob resource @@ -2102,14 +2103,14 @@ func (s *MethodTestSuite) TestSystemFunctions() { StartedAt: sql.NullTime{Valid: false}, }) check.Args(database.AcquireProvisionerJobParams{OrganizationID: j.OrganizationID, Types: []database.ProvisionerType{j.Provisioner}, Tags: must(json.Marshal(j.Tags))}). - Asserts( /*rbac.ResourceSystem, rbac.ActionUpdate*/ ) + Asserts( /*rbac.ResourceSystem, policy.ActionUpdate*/ ) })) s.Run("UpdateProvisionerJobWithCompleteByID", s.Subtest(func(db database.Store, check *expects) { // TODO: we need to create a ProvisionerJob resource j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{}) check.Args(database.UpdateProvisionerJobWithCompleteByIDParams{ ID: j.ID, - }).Asserts( /*rbac.ResourceSystem, rbac.ActionUpdate*/ ) + }).Asserts( /*rbac.ResourceSystem, policy.ActionUpdate*/ ) })) s.Run("UpdateProvisionerJobByID", s.Subtest(func(db database.Store, check *expects) { // TODO: we need to create a ProvisionerJob resource @@ -2117,7 +2118,7 @@ func (s *MethodTestSuite) TestSystemFunctions() { check.Args(database.UpdateProvisionerJobByIDParams{ ID: j.ID, UpdatedAt: time.Now(), - }).Asserts( /*rbac.ResourceSystem, rbac.ActionUpdate*/ ) + }).Asserts( /*rbac.ResourceSystem, policy.ActionUpdate*/ ) })) s.Run("InsertProvisionerJob", s.Subtest(func(db database.Store, check *expects) { // TODO: we need to create a ProvisionerJob resource @@ -2126,14 +2127,14 @@ func (s *MethodTestSuite) TestSystemFunctions() { Provisioner: database.ProvisionerTypeEcho, StorageMethod: database.ProvisionerStorageMethodFile, Type: database.ProvisionerJobTypeWorkspaceBuild, - }).Asserts( /*rbac.ResourceSystem, rbac.ActionCreate*/ ) + }).Asserts( /*rbac.ResourceSystem, policy.ActionCreate*/ ) })) s.Run("InsertProvisionerJobLogs", s.Subtest(func(db database.Store, check *expects) { // TODO: we need to create a ProvisionerJob resource j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{}) check.Args(database.InsertProvisionerJobLogsParams{ JobID: j.ID, - }).Asserts( /*rbac.ResourceSystem, rbac.ActionCreate*/ ) + }).Asserts( /*rbac.ResourceSystem, policy.ActionCreate*/ ) })) s.Run("UpsertProvisionerDaemon", s.Subtest(func(db database.Store, check *expects) { pd := rbac.ResourceProvisionerDaemon.All() @@ -2141,41 +2142,41 @@ func (s *MethodTestSuite) TestSystemFunctions() { Tags: database.StringMap(map[string]string{ provisionersdk.TagScope: provisionersdk.ScopeOrganization, }), - }).Asserts(pd, rbac.ActionCreate) + }).Asserts(pd, policy.ActionCreate) check.Args(database.UpsertProvisionerDaemonParams{ Tags: database.StringMap(map[string]string{ provisionersdk.TagScope: provisionersdk.ScopeUser, provisionersdk.TagOwner: "11111111-1111-1111-1111-111111111111", }), - }).Asserts(pd.WithOwner("11111111-1111-1111-1111-111111111111"), rbac.ActionCreate) + }).Asserts(pd.WithOwner("11111111-1111-1111-1111-111111111111"), policy.ActionCreate) })) s.Run("InsertTemplateVersionParameter", s.Subtest(func(db database.Store, check *expects) { v := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{}) check.Args(database.InsertTemplateVersionParameterParams{ TemplateVersionID: v.ID, - }).Asserts(rbac.ResourceSystem, rbac.ActionCreate) + }).Asserts(rbac.ResourceSystem, policy.ActionCreate) })) s.Run("InsertWorkspaceResource", s.Subtest(func(db database.Store, check *expects) { r := dbgen.WorkspaceResource(s.T(), db, database.WorkspaceResource{}) check.Args(database.InsertWorkspaceResourceParams{ ID: r.ID, Transition: database.WorkspaceTransitionStart, - }).Asserts(rbac.ResourceSystem, rbac.ActionCreate) + }).Asserts(rbac.ResourceSystem, policy.ActionCreate) })) s.Run("DeleteOldWorkspaceAgentLogs", s.Subtest(func(db database.Store, check *expects) { - check.Args().Asserts(rbac.ResourceSystem, rbac.ActionDelete) + check.Args().Asserts(rbac.ResourceSystem, policy.ActionDelete) })) s.Run("InsertWorkspaceAgentStats", s.Subtest(func(db database.Store, check *expects) { - check.Args(database.InsertWorkspaceAgentStatsParams{}).Asserts(rbac.ResourceSystem, rbac.ActionCreate).Errors(errMatchAny) + check.Args(database.InsertWorkspaceAgentStatsParams{}).Asserts(rbac.ResourceSystem, policy.ActionCreate).Errors(errMatchAny) })) s.Run("InsertWorkspaceAppStats", s.Subtest(func(db database.Store, check *expects) { - check.Args(database.InsertWorkspaceAppStatsParams{}).Asserts(rbac.ResourceSystem, rbac.ActionCreate) + check.Args(database.InsertWorkspaceAppStatsParams{}).Asserts(rbac.ResourceSystem, policy.ActionCreate) })) s.Run("InsertWorkspaceAgentScripts", s.Subtest(func(db database.Store, check *expects) { - check.Args(database.InsertWorkspaceAgentScriptsParams{}).Asserts(rbac.ResourceSystem, rbac.ActionCreate) + check.Args(database.InsertWorkspaceAgentScriptsParams{}).Asserts(rbac.ResourceSystem, policy.ActionCreate) })) s.Run("InsertWorkspaceAgentMetadata", s.Subtest(func(db database.Store, check *expects) { - check.Args(database.InsertWorkspaceAgentMetadataParams{}).Asserts(rbac.ResourceSystem, rbac.ActionCreate) + check.Args(database.InsertWorkspaceAgentMetadataParams{}).Asserts(rbac.ResourceSystem, policy.ActionCreate) })) s.Run("InsertWorkspaceAgentLogs", s.Subtest(func(db database.Store, check *expects) { check.Args(database.InsertWorkspaceAgentLogsParams{}).Asserts() @@ -2184,13 +2185,13 @@ func (s *MethodTestSuite) TestSystemFunctions() { check.Args(database.InsertWorkspaceAgentLogSourcesParams{}).Asserts() })) s.Run("GetTemplateDAUs", s.Subtest(func(db database.Store, check *expects) { - check.Args(database.GetTemplateDAUsParams{}).Asserts(rbac.ResourceSystem, rbac.ActionRead) + check.Args(database.GetTemplateDAUsParams{}).Asserts(rbac.ResourceSystem, policy.ActionRead) })) s.Run("GetActiveWorkspaceBuildsByTemplateID", s.Subtest(func(db database.Store, check *expects) { - check.Args(uuid.New()).Asserts(rbac.ResourceSystem, rbac.ActionRead).Errors(sql.ErrNoRows) + check.Args(uuid.New()).Asserts(rbac.ResourceSystem, policy.ActionRead).Errors(sql.ErrNoRows) })) s.Run("GetDeploymentDAUs", s.Subtest(func(db database.Store, check *expects) { - check.Args(int32(0)).Asserts(rbac.ResourceSystem, rbac.ActionRead) + check.Args(int32(0)).Asserts(rbac.ResourceSystem, policy.ActionRead) })) s.Run("GetAppSecurityKey", s.Subtest(func(db database.Store, check *expects) { check.Args().Asserts() @@ -2203,13 +2204,13 @@ func (s *MethodTestSuite) TestSystemFunctions() { check.Args().Asserts() })) s.Run("UpsertApplicationName", s.Subtest(func(db database.Store, check *expects) { - check.Args("").Asserts(rbac.ResourceDeploymentValues, rbac.ActionCreate) + check.Args("").Asserts(rbac.ResourceDeploymentValues, policy.ActionCreate) })) s.Run("GetHealthSettings", s.Subtest(func(db database.Store, check *expects) { check.Args().Asserts() })) s.Run("UpsertHealthSettings", s.Subtest(func(db database.Store, check *expects) { - check.Args("foo").Asserts(rbac.ResourceDeploymentValues, rbac.ActionCreate) + check.Args("foo").Asserts(rbac.ResourceDeploymentValues, policy.ActionCreate) })) s.Run("GetDeploymentWorkspaceAgentStats", s.Subtest(func(db database.Store, check *expects) { check.Args(time.Time{}).Asserts() @@ -2218,27 +2219,27 @@ func (s *MethodTestSuite) TestSystemFunctions() { check.Args().Asserts() })) s.Run("GetFileTemplates", s.Subtest(func(db database.Store, check *expects) { - check.Args(uuid.New()).Asserts(rbac.ResourceSystem, rbac.ActionRead) + check.Args(uuid.New()).Asserts(rbac.ResourceSystem, policy.ActionRead) })) s.Run("GetHungProvisionerJobs", s.Subtest(func(db database.Store, check *expects) { check.Args(time.Time{}).Asserts() })) s.Run("UpsertOAuthSigningKey", s.Subtest(func(db database.Store, check *expects) { - check.Args("foo").Asserts(rbac.ResourceSystem, rbac.ActionUpdate) + check.Args("foo").Asserts(rbac.ResourceSystem, policy.ActionUpdate) })) s.Run("GetOAuthSigningKey", s.Subtest(func(db database.Store, check *expects) { db.UpsertOAuthSigningKey(context.Background(), "foo") - check.Args().Asserts(rbac.ResourceSystem, rbac.ActionUpdate) + check.Args().Asserts(rbac.ResourceSystem, policy.ActionUpdate) })) s.Run("InsertMissingGroups", s.Subtest(func(db database.Store, check *expects) { - check.Args(database.InsertMissingGroupsParams{}).Asserts(rbac.ResourceSystem, rbac.ActionCreate).Errors(errMatchAny) + check.Args(database.InsertMissingGroupsParams{}).Asserts(rbac.ResourceSystem, policy.ActionCreate).Errors(errMatchAny) })) s.Run("UpdateUserLoginType", s.Subtest(func(db database.Store, check *expects) { u := dbgen.User(s.T(), db, database.User{}) check.Args(database.UpdateUserLoginTypeParams{ NewLoginType: database.LoginTypePassword, UserID: u.ID, - }).Asserts(rbac.ResourceSystem, rbac.ActionUpdate) + }).Asserts(rbac.ResourceSystem, policy.ActionUpdate) })) s.Run("GetWorkspaceAgentStatsAndLabels", s.Subtest(func(db database.Store, check *expects) { check.Args(time.Time{}).Asserts() @@ -2253,40 +2254,40 @@ func (s *MethodTestSuite) TestSystemFunctions() { check.Args(database.GetWorkspaceProxyByHostnameParams{ Hostname: "foo.example.com", AllowWildcardHostname: true, - }).Asserts(rbac.ResourceSystem, rbac.ActionRead).Returns(p) + }).Asserts(rbac.ResourceSystem, policy.ActionRead).Returns(p) })) s.Run("GetTemplateAverageBuildTime", s.Subtest(func(db database.Store, check *expects) { - check.Args(database.GetTemplateAverageBuildTimeParams{}).Asserts(rbac.ResourceSystem, rbac.ActionRead) + check.Args(database.GetTemplateAverageBuildTimeParams{}).Asserts(rbac.ResourceSystem, policy.ActionRead) })) s.Run("GetWorkspacesEligibleForTransition", s.Subtest(func(db database.Store, check *expects) { check.Args(time.Time{}).Asserts() })) s.Run("InsertTemplateVersionVariable", s.Subtest(func(db database.Store, check *expects) { - check.Args(database.InsertTemplateVersionVariableParams{}).Asserts(rbac.ResourceSystem, rbac.ActionCreate) + check.Args(database.InsertTemplateVersionVariableParams{}).Asserts(rbac.ResourceSystem, policy.ActionCreate) })) s.Run("UpdateInactiveUsersToDormant", s.Subtest(func(db database.Store, check *expects) { - check.Args(database.UpdateInactiveUsersToDormantParams{}).Asserts(rbac.ResourceSystem, rbac.ActionCreate).Errors(sql.ErrNoRows) + check.Args(database.UpdateInactiveUsersToDormantParams{}).Asserts(rbac.ResourceSystem, policy.ActionCreate).Errors(sql.ErrNoRows) })) s.Run("GetWorkspaceUniqueOwnerCountByTemplateIDs", s.Subtest(func(db database.Store, check *expects) { - check.Args([]uuid.UUID{uuid.New()}).Asserts(rbac.ResourceSystem, rbac.ActionRead) + check.Args([]uuid.UUID{uuid.New()}).Asserts(rbac.ResourceSystem, policy.ActionRead) })) s.Run("GetWorkspaceAgentScriptsByAgentIDs", s.Subtest(func(db database.Store, check *expects) { - check.Args([]uuid.UUID{uuid.New()}).Asserts(rbac.ResourceSystem, rbac.ActionRead) + check.Args([]uuid.UUID{uuid.New()}).Asserts(rbac.ResourceSystem, policy.ActionRead) })) s.Run("GetWorkspaceAgentLogSourcesByAgentIDs", s.Subtest(func(db database.Store, check *expects) { - check.Args([]uuid.UUID{uuid.New()}).Asserts(rbac.ResourceSystem, rbac.ActionRead) + check.Args([]uuid.UUID{uuid.New()}).Asserts(rbac.ResourceSystem, policy.ActionRead) })) s.Run("GetProvisionerJobsByIDsWithQueuePosition", s.Subtest(func(db database.Store, check *expects) { check.Args([]uuid.UUID{}).Asserts() })) s.Run("GetReplicaByID", s.Subtest(func(db database.Store, check *expects) { - check.Args(uuid.New()).Asserts(rbac.ResourceSystem, rbac.ActionRead).Errors(sql.ErrNoRows) + check.Args(uuid.New()).Asserts(rbac.ResourceSystem, policy.ActionRead).Errors(sql.ErrNoRows) })) s.Run("GetWorkspaceAgentAndLatestBuildByAuthToken", s.Subtest(func(db database.Store, check *expects) { - check.Args(uuid.New()).Asserts(rbac.ResourceSystem, rbac.ActionRead).Errors(sql.ErrNoRows) + check.Args(uuid.New()).Asserts(rbac.ResourceSystem, policy.ActionRead).Errors(sql.ErrNoRows) })) s.Run("GetUserLinksByUserID", s.Subtest(func(db database.Store, check *expects) { - check.Args(uuid.New()).Asserts(rbac.ResourceSystem, rbac.ActionRead) + check.Args(uuid.New()).Asserts(rbac.ResourceSystem, policy.ActionRead) })) s.Run("GetJFrogXrayScanByWorkspaceAndAgentID", s.Subtest(func(db database.Store, check *expects) { ws := dbgen.Workspace(s.T(), db, database.Workspace{}) @@ -2314,7 +2315,7 @@ func (s *MethodTestSuite) TestSystemFunctions() { check.Args(database.GetJFrogXrayScanByWorkspaceAndAgentIDParams{ WorkspaceID: ws.ID, AgentID: agent.ID, - }).Asserts(ws, rbac.ActionRead).Returns(expect) + }).Asserts(ws, policy.ActionRead).Returns(expect) })) s.Run("UpsertJFrogXrayScanByWorkspaceAndAgentID", s.Subtest(func(db database.Store, check *expects) { tpl := dbgen.Template(s.T(), db, database.Template{}) @@ -2324,7 +2325,7 @@ func (s *MethodTestSuite) TestSystemFunctions() { check.Args(database.UpsertJFrogXrayScanByWorkspaceAndAgentIDParams{ WorkspaceID: ws.ID, AgentID: uuid.New(), - }).Asserts(tpl, rbac.ActionCreate) + }).Asserts(tpl, policy.ActionCreate) })) } @@ -2334,11 +2335,11 @@ func (s *MethodTestSuite) TestOAuth2ProviderApps() { dbgen.OAuth2ProviderApp(s.T(), db, database.OAuth2ProviderApp{Name: "first"}), dbgen.OAuth2ProviderApp(s.T(), db, database.OAuth2ProviderApp{Name: "last"}), } - check.Args().Asserts(rbac.ResourceOAuth2ProviderApp, rbac.ActionRead).Returns(apps) + check.Args().Asserts(rbac.ResourceOAuth2ProviderApp, policy.ActionRead).Returns(apps) })) s.Run("GetOAuth2ProviderAppByID", s.Subtest(func(db database.Store, check *expects) { app := dbgen.OAuth2ProviderApp(s.T(), db, database.OAuth2ProviderApp{}) - check.Args(app.ID).Asserts(rbac.ResourceOAuth2ProviderApp, rbac.ActionRead).Returns(app) + check.Args(app.ID).Asserts(rbac.ResourceOAuth2ProviderApp, policy.ActionRead).Returns(app) })) s.Run("GetOAuth2ProviderAppsByUserID", s.Subtest(func(db database.Store, check *expects) { user := dbgen.User(s.T(), db, database.User{}) @@ -2356,7 +2357,7 @@ func (s *MethodTestSuite) TestOAuth2ProviderApps() { APIKeyID: key.ID, }) } - check.Args(user.ID).Asserts(rbac.ResourceOAuth2ProviderAppCodeToken.WithOwner(user.ID.String()), rbac.ActionRead).Returns([]database.GetOAuth2ProviderAppsByUserIDRow{ + check.Args(user.ID).Asserts(rbac.ResourceOAuth2ProviderAppCodeToken.WithOwner(user.ID.String()), policy.ActionRead).Returns([]database.GetOAuth2ProviderAppsByUserIDRow{ { OAuth2ProviderApp: database.OAuth2ProviderApp{ ID: app.ID, @@ -2369,7 +2370,7 @@ func (s *MethodTestSuite) TestOAuth2ProviderApps() { }) })) s.Run("InsertOAuth2ProviderApp", s.Subtest(func(db database.Store, check *expects) { - check.Args(database.InsertOAuth2ProviderAppParams{}).Asserts(rbac.ResourceOAuth2ProviderApp, rbac.ActionCreate) + check.Args(database.InsertOAuth2ProviderAppParams{}).Asserts(rbac.ResourceOAuth2ProviderApp, policy.ActionCreate) })) s.Run("UpdateOAuth2ProviderAppByID", s.Subtest(func(db database.Store, check *expects) { app := dbgen.OAuth2ProviderApp(s.T(), db, database.OAuth2ProviderApp{}) @@ -2380,11 +2381,11 @@ func (s *MethodTestSuite) TestOAuth2ProviderApps() { Name: app.Name, CallbackURL: app.CallbackURL, UpdatedAt: app.UpdatedAt, - }).Asserts(rbac.ResourceOAuth2ProviderApp, rbac.ActionUpdate).Returns(app) + }).Asserts(rbac.ResourceOAuth2ProviderApp, policy.ActionUpdate).Returns(app) })) s.Run("DeleteOAuth2ProviderAppByID", s.Subtest(func(db database.Store, check *expects) { app := dbgen.OAuth2ProviderApp(s.T(), db, database.OAuth2ProviderApp{}) - check.Args(app.ID).Asserts(rbac.ResourceOAuth2ProviderApp, rbac.ActionDelete) + check.Args(app.ID).Asserts(rbac.ResourceOAuth2ProviderApp, policy.ActionDelete) })) } @@ -2404,27 +2405,27 @@ func (s *MethodTestSuite) TestOAuth2ProviderAppSecrets() { _ = dbgen.OAuth2ProviderAppSecret(s.T(), db, database.OAuth2ProviderAppSecret{ AppID: app2.ID, }) - check.Args(app1.ID).Asserts(rbac.ResourceOAuth2ProviderAppSecret, rbac.ActionRead).Returns(secrets) + check.Args(app1.ID).Asserts(rbac.ResourceOAuth2ProviderAppSecret, policy.ActionRead).Returns(secrets) })) s.Run("GetOAuth2ProviderAppSecretByID", s.Subtest(func(db database.Store, check *expects) { app := dbgen.OAuth2ProviderApp(s.T(), db, database.OAuth2ProviderApp{}) secret := dbgen.OAuth2ProviderAppSecret(s.T(), db, database.OAuth2ProviderAppSecret{ AppID: app.ID, }) - check.Args(secret.ID).Asserts(rbac.ResourceOAuth2ProviderAppSecret, rbac.ActionRead).Returns(secret) + check.Args(secret.ID).Asserts(rbac.ResourceOAuth2ProviderAppSecret, policy.ActionRead).Returns(secret) })) s.Run("GetOAuth2ProviderAppSecretByPrefix", s.Subtest(func(db database.Store, check *expects) { app := dbgen.OAuth2ProviderApp(s.T(), db, database.OAuth2ProviderApp{}) secret := dbgen.OAuth2ProviderAppSecret(s.T(), db, database.OAuth2ProviderAppSecret{ AppID: app.ID, }) - check.Args(secret.SecretPrefix).Asserts(rbac.ResourceOAuth2ProviderAppSecret, rbac.ActionRead).Returns(secret) + check.Args(secret.SecretPrefix).Asserts(rbac.ResourceOAuth2ProviderAppSecret, policy.ActionRead).Returns(secret) })) s.Run("InsertOAuth2ProviderAppSecret", s.Subtest(func(db database.Store, check *expects) { app := dbgen.OAuth2ProviderApp(s.T(), db, database.OAuth2ProviderApp{}) check.Args(database.InsertOAuth2ProviderAppSecretParams{ AppID: app.ID, - }).Asserts(rbac.ResourceOAuth2ProviderAppSecret, rbac.ActionCreate) + }).Asserts(rbac.ResourceOAuth2ProviderAppSecret, policy.ActionCreate) })) s.Run("UpdateOAuth2ProviderAppSecretByID", s.Subtest(func(db database.Store, check *expects) { app := dbgen.OAuth2ProviderApp(s.T(), db, database.OAuth2ProviderApp{}) @@ -2435,14 +2436,14 @@ func (s *MethodTestSuite) TestOAuth2ProviderAppSecrets() { check.Args(database.UpdateOAuth2ProviderAppSecretByIDParams{ ID: secret.ID, LastUsedAt: secret.LastUsedAt, - }).Asserts(rbac.ResourceOAuth2ProviderAppSecret, rbac.ActionUpdate).Returns(secret) + }).Asserts(rbac.ResourceOAuth2ProviderAppSecret, policy.ActionUpdate).Returns(secret) })) s.Run("DeleteOAuth2ProviderAppSecretByID", s.Subtest(func(db database.Store, check *expects) { app := dbgen.OAuth2ProviderApp(s.T(), db, database.OAuth2ProviderApp{}) secret := dbgen.OAuth2ProviderAppSecret(s.T(), db, database.OAuth2ProviderAppSecret{ AppID: app.ID, }) - check.Args(secret.ID).Asserts(rbac.ResourceOAuth2ProviderAppSecret, rbac.ActionDelete) + check.Args(secret.ID).Asserts(rbac.ResourceOAuth2ProviderAppSecret, policy.ActionDelete) })) } @@ -2454,7 +2455,7 @@ func (s *MethodTestSuite) TestOAuth2ProviderAppCodes() { AppID: app.ID, UserID: user.ID, }) - check.Args(code.ID).Asserts(code, rbac.ActionRead).Returns(code) + check.Args(code.ID).Asserts(code, policy.ActionRead).Returns(code) })) s.Run("GetOAuth2ProviderAppCodeByPrefix", s.Subtest(func(db database.Store, check *expects) { user := dbgen.User(s.T(), db, database.User{}) @@ -2463,7 +2464,7 @@ func (s *MethodTestSuite) TestOAuth2ProviderAppCodes() { AppID: app.ID, UserID: user.ID, }) - check.Args(code.SecretPrefix).Asserts(code, rbac.ActionRead).Returns(code) + check.Args(code.SecretPrefix).Asserts(code, policy.ActionRead).Returns(code) })) s.Run("InsertOAuth2ProviderAppCode", s.Subtest(func(db database.Store, check *expects) { user := dbgen.User(s.T(), db, database.User{}) @@ -2471,7 +2472,7 @@ func (s *MethodTestSuite) TestOAuth2ProviderAppCodes() { check.Args(database.InsertOAuth2ProviderAppCodeParams{ AppID: app.ID, UserID: user.ID, - }).Asserts(rbac.ResourceOAuth2ProviderAppCodeToken.WithOwner(user.ID.String()), rbac.ActionCreate) + }).Asserts(rbac.ResourceOAuth2ProviderAppCodeToken.WithOwner(user.ID.String()), policy.ActionCreate) })) s.Run("DeleteOAuth2ProviderAppCodeByID", s.Subtest(func(db database.Store, check *expects) { user := dbgen.User(s.T(), db, database.User{}) @@ -2480,7 +2481,7 @@ func (s *MethodTestSuite) TestOAuth2ProviderAppCodes() { AppID: app.ID, UserID: user.ID, }) - check.Args(code.ID).Asserts(code, rbac.ActionDelete) + check.Args(code.ID).Asserts(code, policy.ActionDelete) })) s.Run("DeleteOAuth2ProviderAppCodesByAppAndUserID", s.Subtest(func(db database.Store, check *expects) { user := dbgen.User(s.T(), db, database.User{}) @@ -2494,7 +2495,7 @@ func (s *MethodTestSuite) TestOAuth2ProviderAppCodes() { check.Args(database.DeleteOAuth2ProviderAppCodesByAppAndUserIDParams{ AppID: app.ID, UserID: user.ID, - }).Asserts(rbac.ResourceOAuth2ProviderAppCodeToken.WithOwner(user.ID.String()), rbac.ActionDelete) + }).Asserts(rbac.ResourceOAuth2ProviderAppCodeToken.WithOwner(user.ID.String()), policy.ActionDelete) })) } @@ -2511,7 +2512,7 @@ func (s *MethodTestSuite) TestOAuth2ProviderAppTokens() { check.Args(database.InsertOAuth2ProviderAppTokenParams{ AppSecretID: secret.ID, APIKeyID: key.ID, - }).Asserts(rbac.ResourceOAuth2ProviderAppCodeToken.WithOwner(user.ID.String()), rbac.ActionCreate) + }).Asserts(rbac.ResourceOAuth2ProviderAppCodeToken.WithOwner(user.ID.String()), policy.ActionCreate) })) s.Run("GetOAuth2ProviderAppTokenByPrefix", s.Subtest(func(db database.Store, check *expects) { user := dbgen.User(s.T(), db, database.User{}) @@ -2526,7 +2527,7 @@ func (s *MethodTestSuite) TestOAuth2ProviderAppTokens() { AppSecretID: secret.ID, APIKeyID: key.ID, }) - check.Args(token.HashPrefix).Asserts(rbac.ResourceOAuth2ProviderAppCodeToken.WithOwner(user.ID.String()), rbac.ActionRead) + check.Args(token.HashPrefix).Asserts(rbac.ResourceOAuth2ProviderAppCodeToken.WithOwner(user.ID.String()), policy.ActionRead) })) s.Run("DeleteOAuth2ProviderAppTokensByAppAndUserID", s.Subtest(func(db database.Store, check *expects) { user := dbgen.User(s.T(), db, database.User{}) @@ -2546,6 +2547,6 @@ func (s *MethodTestSuite) TestOAuth2ProviderAppTokens() { check.Args(database.DeleteOAuth2ProviderAppTokensByAppAndUserIDParams{ AppID: app.ID, UserID: user.ID, - }).Asserts(rbac.ResourceOAuth2ProviderAppCodeToken.WithOwner(user.ID.String()), rbac.ActionDelete) + }).Asserts(rbac.ResourceOAuth2ProviderAppCodeToken.WithOwner(user.ID.String()), policy.ActionDelete) })) } diff --git a/coderd/database/dbauthz/setup_test.go b/coderd/database/dbauthz/setup_test.go index d3a8ae6b378eb..16829cdef669e 100644 --- a/coderd/database/dbauthz/setup_test.go +++ b/coderd/database/dbauthz/setup_test.go @@ -17,6 +17,7 @@ import ( "golang.org/x/xerrors" "cdr.dev/slog" + "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/coderd/coderdtest" "github.com/coder/coder/v2/coderd/database" @@ -338,7 +339,7 @@ func (m *expects) Errors(err error) *expects { // AssertRBAC contains the object and actions to be asserted. type AssertRBAC struct { Object rbac.Object - Actions []rbac.Action + Actions []policy.Action } // values is a convenience method for creating []reflect.Value. @@ -368,15 +369,15 @@ func values(ins ...any) []reflect.Value { // // Even-numbered inputs are the objects, and odd-numbered inputs are the actions. // Objects must implement rbac.Objecter. -// Inputs can be a single rbac.Action, or a slice of rbac.Action. +// Inputs can be a single policy.Action, or a slice of policy.Action. // -// asserts(workspace, rbac.ActionRead, template, slice(rbac.ActionRead, rbac.ActionWrite), ...) +// asserts(workspace, policy.ActionRead, template, slice(policy.ActionRead, policy.ActionWrite), ...) // // is equivalent to // // []AssertRBAC{ -// {Object: workspace, Actions: []rbac.Action{rbac.ActionRead}}, -// {Object: template, Actions: []rbac.Action{rbac.ActionRead, rbac.ActionWrite)}}, +// {Object: workspace, Actions: []policy.Action{policy.ActionRead}}, +// {Object: template, Actions: []policy.Action{policy.ActionRead, policy.ActionWrite)}}, // ... // } func asserts(inputs ...any) []AssertRBAC { @@ -392,19 +393,19 @@ func asserts(inputs ...any) []AssertRBAC { } rbacObj := obj.RBACObject() - var actions []rbac.Action - actions, ok = inputs[i+1].([]rbac.Action) + var actions []policy.Action + actions, ok = inputs[i+1].([]policy.Action) if !ok { - action, ok := inputs[i+1].(rbac.Action) + action, ok := inputs[i+1].(policy.Action) if !ok { // Could be the string type. actionAsString, ok := inputs[i+1].(string) if !ok { panic(fmt.Sprintf("action '%q' not a supported action", actionAsString)) } - action = rbac.Action(actionAsString) + action = policy.Action(actionAsString) } - actions = []rbac.Action{action} + actions = []policy.Action{action} } out = append(out, AssertRBAC{ diff --git a/coderd/database/dbgen/dbgen.go b/coderd/database/dbgen/dbgen.go index 596885c9d282d..fe660e7e8fa93 100644 --- a/coderd/database/dbgen/dbgen.go +++ b/coderd/database/dbgen/dbgen.go @@ -22,6 +22,7 @@ import ( "github.com/coder/coder/v2/coderd/database/provisionerjobs" "github.com/coder/coder/v2/coderd/database/pubsub" "github.com/coder/coder/v2/coderd/rbac" + "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/cryptorand" ) @@ -69,7 +70,7 @@ func Template(t testing.TB, db database.Store, seed database.Template) database. if seed.GroupACL == nil { // By default, all users in the organization can read the template. seed.GroupACL = database.TemplateACL{ - seed.OrganizationID.String(): []rbac.Action{rbac.ActionRead}, + seed.OrganizationID.String(): []policy.Action{policy.ActionRead}, } } if seed.UserACL == nil { diff --git a/coderd/database/dbmetrics/dbmetrics.go b/coderd/database/dbmetrics/dbmetrics.go index d92c60e8db09a..77ebfd6718757 100644 --- a/coderd/database/dbmetrics/dbmetrics.go +++ b/coderd/database/dbmetrics/dbmetrics.go @@ -14,12 +14,14 @@ import ( "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/rbac" + "github.com/coder/coder/v2/coderd/rbac/policy" ) var ( // Force these imports, for some reason the autogen does not include them. _ uuid.UUID - _ rbac.Action + _ policy.Action + _ rbac.Objecter ) const wrapname = "dbmetrics.metricsStore" diff --git a/coderd/database/types.go b/coderd/database/types.go index b9f195e541c20..497446b25abfa 100644 --- a/coderd/database/types.go +++ b/coderd/database/types.go @@ -8,7 +8,7 @@ import ( "github.com/google/uuid" "golang.org/x/xerrors" - "github.com/coder/coder/v2/coderd/rbac" + "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/codersdk/healthsdk" ) @@ -29,7 +29,7 @@ type HealthSettings struct { DismissedHealthchecks []healthsdk.HealthSection `db:"dismissed_healthchecks" json:"dismissed_healthchecks"` } -type Actions []rbac.Action +type Actions []policy.Action func (a *Actions) Scan(src interface{}) error { switch v := src.(type) { @@ -46,7 +46,7 @@ func (a *Actions) Value() (driver.Value, error) { } // TemplateACL is a map of ids to permissions. -type TemplateACL map[string][]rbac.Action +type TemplateACL map[string][]policy.Action func (t *TemplateACL) Scan(src interface{}) error { switch v := src.(type) { diff --git a/coderd/debug.go b/coderd/debug.go index d97edfe0b73fe..0e98539a71f75 100644 --- a/coderd/debug.go +++ b/coderd/debug.go @@ -19,6 +19,7 @@ import ( "github.com/coder/coder/v2/coderd/httpapi" "github.com/coder/coder/v2/coderd/httpmw" "github.com/coder/coder/v2/coderd/rbac" + "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/codersdk/healthsdk" ) @@ -193,7 +194,7 @@ func (api *API) deploymentHealthSettings(rw http.ResponseWriter, r *http.Request func (api *API) putDeploymentHealthSettings(rw http.ResponseWriter, r *http.Request) { ctx := r.Context() - if !api.Authorize(r, rbac.ActionUpdate, rbac.ResourceDeploymentValues) { + if !api.Authorize(r, policy.ActionUpdate, rbac.ResourceDeploymentValues) { httpapi.Write(ctx, rw, http.StatusForbidden, codersdk.Response{ Message: "Insufficient permissions to update health settings.", }) diff --git a/coderd/deployment.go b/coderd/deployment.go index d96d6d4388c9b..572bf9076bb59 100644 --- a/coderd/deployment.go +++ b/coderd/deployment.go @@ -5,6 +5,7 @@ import ( "github.com/coder/coder/v2/coderd/httpapi" "github.com/coder/coder/v2/coderd/rbac" + "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/codersdk" ) @@ -16,7 +17,7 @@ import ( // @Success 200 {object} codersdk.DeploymentConfig // @Router /deployment/config [get] func (api *API) deploymentValues(rw http.ResponseWriter, r *http.Request) { - if !api.Authorize(r, rbac.ActionRead, rbac.ResourceDeploymentValues) { + if !api.Authorize(r, policy.ActionRead, rbac.ResourceDeploymentValues) { httpapi.Forbidden(rw) return } @@ -44,7 +45,7 @@ func (api *API) deploymentValues(rw http.ResponseWriter, r *http.Request) { // @Success 200 {object} codersdk.DeploymentStats // @Router /deployment/stats [get] func (api *API) deploymentStats(rw http.ResponseWriter, r *http.Request) { - if !api.Authorize(r, rbac.ActionRead, rbac.ResourceDeploymentStats) { + if !api.Authorize(r, policy.ActionRead, rbac.ResourceDeploymentStats) { httpapi.Forbidden(rw) return } diff --git a/coderd/insights.go b/coderd/insights.go index fbd313247302d..85b4ec8661d9c 100644 --- a/coderd/insights.go +++ b/coderd/insights.go @@ -17,6 +17,7 @@ import ( "github.com/coder/coder/v2/coderd/database/db2sdk" "github.com/coder/coder/v2/coderd/httpapi" "github.com/coder/coder/v2/coderd/rbac" + "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/coderd/util/slice" "github.com/coder/coder/v2/codersdk" ) @@ -32,7 +33,7 @@ const insightsTimeLayout = time.RFC3339 // @Success 200 {object} codersdk.DAUsResponse // @Router /insights/daus [get] func (api *API) deploymentDAUs(rw http.ResponseWriter, r *http.Request) { - if !api.Authorize(r, rbac.ActionRead, rbac.ResourceDeploymentValues) { + if !api.Authorize(r, policy.ActionRead, rbac.ResourceDeploymentValues) { httpapi.Forbidden(rw) return } diff --git a/coderd/insights_test.go b/coderd/insights_test.go index 83be460dbdd8e..b6a28f7b0c59b 100644 --- a/coderd/insights_test.go +++ b/coderd/insights_test.go @@ -29,6 +29,7 @@ import ( "github.com/coder/coder/v2/coderd/database/dbrollup" "github.com/coder/coder/v2/coderd/database/dbtestutil" "github.com/coder/coder/v2/coderd/rbac" + "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/coderd/workspaceapps" "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/codersdk/agentsdk" @@ -655,7 +656,7 @@ func TestTemplateInsights_Golden(t *testing.T) { OrganizationID: firstUser.OrganizationID, CreatedBy: firstUser.UserID, GroupACL: database.TemplateACL{ - firstUser.OrganizationID.String(): []rbac.Action{rbac.ActionRead}, + firstUser.OrganizationID.String(): []policy.Action{policy.ActionRead}, }, }) err := db.UpdateTemplateVersionByID(context.Background(), database.UpdateTemplateVersionByIDParams{ @@ -1551,7 +1552,7 @@ func TestUserActivityInsights_Golden(t *testing.T) { OrganizationID: firstUser.OrganizationID, CreatedBy: firstUser.UserID, GroupACL: database.TemplateACL{ - firstUser.OrganizationID.String(): []rbac.Action{rbac.ActionRead}, + firstUser.OrganizationID.String(): []policy.Action{policy.ActionRead}, }, }) err := db.UpdateTemplateVersionByID(context.Background(), database.UpdateTemplateVersionByIDParams{ diff --git a/coderd/rbac/astvalue.go b/coderd/rbac/astvalue.go index 954f20cfeea53..9549eb1ed7be8 100644 --- a/coderd/rbac/astvalue.go +++ b/coderd/rbac/astvalue.go @@ -3,12 +3,14 @@ package rbac import ( "github.com/open-policy-agent/opa/ast" "golang.org/x/xerrors" + + "github.com/coder/coder/v2/coderd/rbac/policy" ) // regoInputValue returns a rego input value for the given subject, action, and // object. This rego input is already parsed and can be used directly in a // rego query. -func regoInputValue(subject Subject, action Action, object Object) (ast.Value, error) { +func regoInputValue(subject Subject, action policy.Action, object Object) (ast.Value, error) { regoSubj, err := subject.regoValue() if err != nil { return nil, xerrors.Errorf("subject: %w", err) @@ -34,7 +36,7 @@ func regoInputValue(subject Subject, action Action, object Object) (ast.Value, e // regoPartialInputValue is the same as regoInputValue but only includes the // object type. This is for partial evaluations. -func regoPartialInputValue(subject Subject, action Action, objectType string) (ast.Value, error) { +func regoPartialInputValue(subject Subject, action policy.Action, objectType string) (ast.Value, error) { regoSubj, err := subject.regoValue() if err != nil { return nil, xerrors.Errorf("subject: %w", err) @@ -103,11 +105,11 @@ func (s Subject) regoValue() (ast.Value, error) { func (z Object) regoValue() ast.Value { userACL := ast.NewObject() for k, v := range z.ACLUserList { - userACL.Insert(ast.StringTerm(k), ast.NewTerm(regoSlice(v))) + userACL.Insert(ast.StringTerm(k), ast.NewTerm(regoSliceString(v...))) } grpACL := ast.NewObject() for k, v := range z.ACLGroupList { - grpACL.Insert(ast.StringTerm(k), ast.NewTerm(regoSlice(v))) + grpACL.Insert(ast.StringTerm(k), ast.NewTerm(regoSliceString(v...))) } return ast.NewObject( [2]*ast.Term{ @@ -200,10 +202,6 @@ func (perm Permission) regoValue() ast.Value { ) } -func (act Action) regoValue() ast.Value { - return ast.StringTerm(string(act)).Value -} - type regoValue interface { regoValue() ast.Value } @@ -218,10 +216,10 @@ func regoSlice[T regoValue](slice []T) *ast.Array { return ast.NewArray(terms...) } -func regoSliceString(slice ...string) *ast.Array { +func regoSliceString[T ~string](slice ...T) *ast.Array { terms := make([]*ast.Term, len(slice)) for i, v := range slice { - terms[i] = ast.StringTerm(v) + terms[i] = ast.StringTerm(string(v)) } return ast.NewArray(terms...) } diff --git a/coderd/rbac/authz.go b/coderd/rbac/authz.go index 08ff61da1372c..c647bb09f89a0 100644 --- a/coderd/rbac/authz.go +++ b/coderd/rbac/authz.go @@ -19,30 +19,21 @@ import ( "go.opentelemetry.io/otel/trace" "golang.org/x/xerrors" + "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/coderd/rbac/regosql" "github.com/coder/coder/v2/coderd/rbac/regosql/sqltypes" "github.com/coder/coder/v2/coderd/tracing" "github.com/coder/coder/v2/coderd/util/slice" ) -// Action represents the allowed actions to be done on an object. -type Action string - -const ( - ActionCreate Action = "create" - ActionRead Action = "read" - ActionUpdate Action = "update" - ActionDelete Action = "delete" -) - // AllActions is a helper function to return all the possible actions types. -func AllActions() []Action { - return []Action{ActionCreate, ActionRead, ActionUpdate, ActionDelete} +func AllActions() []policy.Action { + return []policy.Action{policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete} } type AuthCall struct { Actor Subject - Action Action + Action policy.Action Object Object } @@ -52,7 +43,7 @@ type AuthCall struct { // // Note that this ignores some fields such as the permissions within a given // role, as this assumes all roles are static to a given role name. -func hashAuthorizeCall(actor Subject, action Action, object Object) [32]byte { +func hashAuthorizeCall(actor Subject, action policy.Action, object Object) [32]byte { var hashOut [32]byte hash := sha256.New() @@ -139,8 +130,8 @@ type Authorizer interface { // Authorize will authorize the given subject to perform the given action // on the given object. Authorize is pure and deterministic with respect to // its arguments and the surrounding object. - Authorize(ctx context.Context, subject Subject, action Action, object Object) error - Prepare(ctx context.Context, subject Subject, action Action, objectType string) (PreparedAuthorized, error) + Authorize(ctx context.Context, subject Subject, action policy.Action, object Object) error + Prepare(ctx context.Context, subject Subject, action policy.Action, objectType string) (PreparedAuthorized, error) } type PreparedAuthorized interface { @@ -154,7 +145,7 @@ type PreparedAuthorized interface { // // Ideally the 'CompileToSQL' is used instead for large sets. This cost scales // linearly with the number of objects passed in. -func Filter[O Objecter](ctx context.Context, auth Authorizer, subject Subject, action Action, objects []O) ([]O, error) { +func Filter[O Objecter](ctx context.Context, auth Authorizer, subject Subject, action policy.Action, objects []O) ([]O, error) { if len(objects) == 0 { // Nothing to filter return objects, nil @@ -236,7 +227,7 @@ var ( // Load the policy from policy.rego in this directory. // //go:embed policy.rego - policy string + regoPolicy string queryOnce sync.Once query rego.PreparedEvalQuery partialQuery rego.PreparedPartialQuery @@ -254,7 +245,7 @@ func NewAuthorizer(registry prometheus.Registerer) *RegoAuthorizer { var err error query, err = rego.New( rego.Query("data.authz.allow"), - rego.Module("policy.rego", policy), + rego.Module("policy.rego", regoPolicy), ).PrepareForEval(context.Background()) if err != nil { panic(xerrors.Errorf("compile rego: %w", err)) @@ -269,7 +260,7 @@ func NewAuthorizer(registry prometheus.Registerer) *RegoAuthorizer { "input.object.acl_group_list", }), rego.Query("data.authz.allow = true"), - rego.Module("policy.rego", policy), + rego.Module("policy.rego", regoPolicy), ).PrepareForPartial(context.Background()) if err != nil { panic(xerrors.Errorf("compile partial rego: %w", err)) @@ -334,7 +325,7 @@ type authSubject struct { // It returns `nil` if the subject is authorized to perform the action on // the object. // If an error is returned, the authorization is denied. -func (a RegoAuthorizer) Authorize(ctx context.Context, subject Subject, action Action, object Object) error { +func (a RegoAuthorizer) Authorize(ctx context.Context, subject Subject, action policy.Action, object Object) error { start := time.Now() ctx, span := tracing.StartSpan(ctx, trace.WithTimestamp(start), // Reuse the time.Now for metric and trace @@ -365,7 +356,7 @@ func (a RegoAuthorizer) Authorize(ctx context.Context, subject Subject, action A // It is a different function so the exported one can add tracing + metrics. // That code tends to clutter up the actual logic, so it's separated out. // nolint:revive -func (a RegoAuthorizer) authorize(ctx context.Context, subject Subject, action Action, object Object) error { +func (a RegoAuthorizer) authorize(ctx context.Context, subject Subject, action policy.Action, object Object) error { if subject.Roles == nil { return xerrors.Errorf("subject must have roles") } @@ -392,7 +383,7 @@ func (a RegoAuthorizer) authorize(ctx context.Context, subject Subject, action A // Prepare will partially execute the rego policy leaving the object fields unknown (except for the type). // This will vastly speed up performance if batch authorization on the same type of objects is needed. -func (a RegoAuthorizer) Prepare(ctx context.Context, subject Subject, action Action, objectType string) (PreparedAuthorized, error) { +func (a RegoAuthorizer) Prepare(ctx context.Context, subject Subject, action policy.Action, objectType string) (PreparedAuthorized, error) { start := time.Now() ctx, span := tracing.StartSpan(ctx, trace.WithTimestamp(start), @@ -428,7 +419,7 @@ type PartialAuthorizer struct { // input is used purely for debugging and logging. subjectInput Subject - subjectAction Action + subjectAction policy.Action subjectResourceType Object // preparedQueries are the compiled set of queries after partial evaluation. @@ -537,7 +528,7 @@ EachQueryLoop: pa.subjectInput, pa.subjectAction, pa.subjectResourceType, nil) } -func (a RegoAuthorizer) newPartialAuthorizer(ctx context.Context, subject Subject, action Action, objectType string) (*PartialAuthorizer, error) { +func (a RegoAuthorizer) newPartialAuthorizer(ctx context.Context, subject Subject, action policy.Action, objectType string) (*PartialAuthorizer, error) { if subject.Roles == nil { return nil, xerrors.Errorf("subject must have roles") } @@ -676,7 +667,7 @@ func Cacher(authz Authorizer) Authorizer { } } -func (c *authCache) Authorize(ctx context.Context, subject Subject, action Action, object Object) error { +func (c *authCache) Authorize(ctx context.Context, subject Subject, action policy.Action, object Object) error { authorizeCacheKey := hashAuthorizeCall(subject, action, object) var err error @@ -697,13 +688,13 @@ func (c *authCache) Authorize(ctx context.Context, subject Subject, action Actio // Prepare returns the underlying PreparedAuthorized. The cache does not apply // to prepared authorizations. These should be using a SQL filter, and // therefore the cache is not needed. -func (c *authCache) Prepare(ctx context.Context, subject Subject, action Action, objectType string) (PreparedAuthorized, error) { +func (c *authCache) Prepare(ctx context.Context, subject Subject, action policy.Action, objectType string) (PreparedAuthorized, error) { return c.authz.Prepare(ctx, subject, action, objectType) } // rbacTraceAttributes are the attributes that are added to all spans created by // the rbac package. These attributes should help to debug slow spans. -func rbacTraceAttributes(actor Subject, action Action, objectType string, extra ...attribute.KeyValue) trace.SpanStartOption { +func rbacTraceAttributes(actor Subject, action policy.Action, objectType string, extra ...attribute.KeyValue) trace.SpanStartOption { return trace.WithAttributes( append(extra, attribute.StringSlice("subject_roles", actor.SafeRoleNames()), diff --git a/coderd/rbac/authz_internal_test.go b/coderd/rbac/authz_internal_test.go index e264e31c73a8c..cba69952ea481 100644 --- a/coderd/rbac/authz_internal_test.go +++ b/coderd/rbac/authz_internal_test.go @@ -13,6 +13,7 @@ import ( "github.com/stretchr/testify/require" "golang.org/x/xerrors" + "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/coderd/rbac/regosql" "github.com/coder/coder/v2/testutil" ) @@ -59,7 +60,7 @@ func TestFilterError(t *testing.T) { Scope: ScopeAll, } - _, err := Filter(context.Background(), auth, subject, ActionRead, []Object{ResourceUser, ResourceWorkspace}) + _, err := Filter(context.Background(), auth, subject, policy.ActionRead, []Object{ResourceUser, ResourceWorkspace}) require.ErrorContains(t, err, "object types must be uniform") }) @@ -67,7 +68,7 @@ func TestFilterError(t *testing.T) { t.Parallel() auth := &MockAuthorizer{ - AuthorizeFunc: func(ctx context.Context, subject Subject, action Action, object Object) error { + AuthorizeFunc: func(ctx context.Context, subject Subject, action policy.Action, object Object) error { // Authorize func always returns nil, unless the context is canceled. return ctx.Err() }, @@ -97,7 +98,7 @@ func TestFilterError(t *testing.T) { ResourceUser, } - _, err := Filter(ctx, auth, subject, ActionRead, objects) + _, err := Filter(ctx, auth, subject, policy.ActionRead, objects) require.ErrorIs(t, err, context.Canceled) }) @@ -117,7 +118,7 @@ func TestFilterError(t *testing.T) { bomb: cancel, } - _, err := Filter(ctx, auth, subject, ActionRead, objects) + _, err := Filter(ctx, auth, subject, policy.ActionRead, objects) require.ErrorIs(t, err, context.Canceled) }) }) @@ -150,7 +151,7 @@ func TestFilter(t *testing.T) { testCases := []struct { Name string Actor Subject - Action Action + Action policy.Action ObjectType string }{ { @@ -160,7 +161,7 @@ func TestFilter(t *testing.T) { Roles: RoleNames{}, }, ObjectType: ResourceWorkspace.Type, - Action: ActionRead, + Action: policy.ActionRead, }, { Name: "Admin", @@ -169,7 +170,7 @@ func TestFilter(t *testing.T) { Roles: RoleNames{RoleOrgMember(orgIDs[0]), "auditor", RoleOwner(), RoleMember()}, }, ObjectType: ResourceWorkspace.Type, - Action: ActionRead, + Action: policy.ActionRead, }, { Name: "OrgAdmin", @@ -178,7 +179,7 @@ func TestFilter(t *testing.T) { Roles: RoleNames{RoleOrgMember(orgIDs[0]), RoleOrgAdmin(orgIDs[0]), RoleMember()}, }, ObjectType: ResourceWorkspace.Type, - Action: ActionRead, + Action: policy.ActionRead, }, { Name: "OrgMember", @@ -187,7 +188,7 @@ func TestFilter(t *testing.T) { Roles: RoleNames{RoleOrgMember(orgIDs[0]), RoleOrgMember(orgIDs[1]), RoleMember()}, }, ObjectType: ResourceWorkspace.Type, - Action: ActionRead, + Action: policy.ActionRead, }, { Name: "ManyRoles", @@ -203,7 +204,7 @@ func TestFilter(t *testing.T) { }, }, ObjectType: ResourceWorkspace.Type, - Action: ActionRead, + Action: policy.ActionRead, }, { Name: "SiteMember", @@ -212,7 +213,7 @@ func TestFilter(t *testing.T) { Roles: RoleNames{RoleMember()}, }, ObjectType: ResourceUser.Type, - Action: ActionRead, + Action: policy.ActionRead, }, { Name: "ReadOrgs", @@ -227,7 +228,7 @@ func TestFilter(t *testing.T) { }, }, ObjectType: ResourceOrganization.Type, - Action: ActionRead, + Action: policy.ActionRead, }, { Name: "ScopeApplicationConnect", @@ -236,7 +237,7 @@ func TestFilter(t *testing.T) { Roles: RoleNames{RoleOrgMember(orgIDs[0]), "auditor", RoleOwner(), RoleMember()}, }, ObjectType: ResourceWorkspace.Type, - Action: ActionRead, + Action: policy.ActionRead, }, } @@ -263,7 +264,7 @@ func TestFilter(t *testing.T) { var allowedCount int for i, obj := range localObjects { obj.Type = tc.ObjectType - err := auth.Authorize(ctx, actor, ActionRead, obj.RBACObject()) + err := auth.Authorize(ctx, actor, policy.ActionRead, obj.RBACObject()) obj.Allowed = err == nil if err == nil { allowedCount++ @@ -301,64 +302,64 @@ func TestAuthorizeDomain(t *testing.T) { testAuthorize(t, "UserACLList", user, []authTestCase{ { - resource: ResourceWorkspace.WithOwner(unuseID.String()).InOrg(unuseID).WithACLUserList(map[string][]Action{ + resource: ResourceWorkspace.WithOwner(unuseID.String()).InOrg(unuseID).WithACLUserList(map[string][]policy.Action{ user.ID: AllActions(), }), actions: AllActions(), allow: true, }, { - resource: ResourceWorkspace.WithOwner(unuseID.String()).InOrg(unuseID).WithACLUserList(map[string][]Action{ + resource: ResourceWorkspace.WithOwner(unuseID.String()).InOrg(unuseID).WithACLUserList(map[string][]policy.Action{ user.ID: {WildcardSymbol}, }), actions: AllActions(), allow: true, }, { - resource: ResourceWorkspace.WithOwner(unuseID.String()).InOrg(unuseID).WithACLUserList(map[string][]Action{ - user.ID: {ActionRead, ActionUpdate}, + resource: ResourceWorkspace.WithOwner(unuseID.String()).InOrg(unuseID).WithACLUserList(map[string][]policy.Action{ + user.ID: {policy.ActionRead, policy.ActionUpdate}, }), - actions: []Action{ActionCreate, ActionDelete}, + actions: []policy.Action{policy.ActionCreate, policy.ActionDelete}, allow: false, }, { // By default users cannot update templates - resource: ResourceTemplate.InOrg(defOrg).WithACLUserList(map[string][]Action{ - user.ID: {ActionUpdate}, + resource: ResourceTemplate.InOrg(defOrg).WithACLUserList(map[string][]policy.Action{ + user.ID: {policy.ActionUpdate}, }), - actions: []Action{ActionUpdate}, + actions: []policy.Action{policy.ActionUpdate}, allow: true, }, }) testAuthorize(t, "GroupACLList", user, []authTestCase{ { - resource: ResourceWorkspace.WithOwner(unuseID.String()).InOrg(defOrg).WithGroupACL(map[string][]Action{ + resource: ResourceWorkspace.WithOwner(unuseID.String()).InOrg(defOrg).WithGroupACL(map[string][]policy.Action{ allUsersGroup: AllActions(), }), actions: AllActions(), allow: true, }, { - resource: ResourceWorkspace.WithOwner(unuseID.String()).InOrg(defOrg).WithGroupACL(map[string][]Action{ + resource: ResourceWorkspace.WithOwner(unuseID.String()).InOrg(defOrg).WithGroupACL(map[string][]policy.Action{ allUsersGroup: {WildcardSymbol}, }), actions: AllActions(), allow: true, }, { - resource: ResourceWorkspace.WithOwner(unuseID.String()).InOrg(defOrg).WithGroupACL(map[string][]Action{ - allUsersGroup: {ActionRead, ActionUpdate}, + resource: ResourceWorkspace.WithOwner(unuseID.String()).InOrg(defOrg).WithGroupACL(map[string][]policy.Action{ + allUsersGroup: {policy.ActionRead, policy.ActionUpdate}, }), - actions: []Action{ActionCreate, ActionDelete}, + actions: []policy.Action{policy.ActionCreate, policy.ActionDelete}, allow: false, }, { // By default users cannot update templates - resource: ResourceTemplate.InOrg(defOrg).WithGroupACL(map[string][]Action{ - allUsersGroup: {ActionUpdate}, + resource: ResourceTemplate.InOrg(defOrg).WithGroupACL(map[string][]policy.Action{ + allUsersGroup: {policy.ActionUpdate}, }), - actions: []Action{ActionUpdate}, + actions: []policy.Action{policy.ActionUpdate}, allow: true, }, }) @@ -509,7 +510,7 @@ func TestAuthorizeDomain(t *testing.T) { testAuthorize(t, "ApplicationToken", user, // Create (connect) Actions cases(func(c authTestCase) authTestCase { - c.actions = []Action{ActionCreate} + c.actions = []policy.Action{policy.ActionCreate} return c }, []authTestCase{ // Org + me @@ -537,7 +538,7 @@ func TestAuthorizeDomain(t *testing.T) { }), // Not create actions cases(func(c authTestCase) authTestCase { - c.actions = []Action{ActionRead, ActionUpdate, ActionDelete} + c.actions = []policy.Action{policy.ActionRead, policy.ActionUpdate, policy.ActionDelete} c.allow = false return c }, []authTestCase{ @@ -566,7 +567,7 @@ func TestAuthorizeDomain(t *testing.T) { }), // Other Objects cases(func(c authTestCase) authTestCase { - c.actions = []Action{ActionCreate, ActionRead, ActionUpdate, ActionDelete} + c.actions = []policy.Action{policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete} c.allow = false return c }, []authTestCase{ @@ -607,14 +608,14 @@ func TestAuthorizeDomain(t *testing.T) { defOrg.String(): {{ Negate: false, ResourceType: "*", - Action: ActionRead, + Action: policy.ActionRead, }}, }, User: []Permission{ { Negate: false, ResourceType: "*", - Action: ActionRead, + Action: policy.ActionRead, }, }, }, @@ -623,7 +624,7 @@ func TestAuthorizeDomain(t *testing.T) { testAuthorize(t, "ReadOnly", user, cases(func(c authTestCase) authTestCase { - c.actions = []Action{ActionRead} + c.actions = []policy.Action{policy.ActionRead} return c }, []authTestCase{ // Read @@ -653,7 +654,7 @@ func TestAuthorizeDomain(t *testing.T) { // Pass non-read actions cases(func(c authTestCase) authTestCase { - c.actions = []Action{ActionCreate, ActionUpdate, ActionDelete} + c.actions = []policy.Action{policy.ActionCreate, policy.ActionUpdate, policy.ActionDelete} c.allow = false return c }, []authTestCase{ @@ -822,7 +823,7 @@ func TestAuthorizeScope(t *testing.T) { testAuthorize(t, "Admin_ScopeApplicationConnect", user, cases(func(c authTestCase) authTestCase { - c.actions = []Action{ActionRead, ActionUpdate, ActionDelete} + c.actions = []policy.Action{policy.ActionRead, policy.ActionUpdate, policy.ActionDelete} return c }, []authTestCase{ {resource: ResourceWorkspace.InOrg(defOrg).WithOwner(user.ID), allow: false}, @@ -839,9 +840,9 @@ func TestAuthorizeScope(t *testing.T) { }), // Allowed by scope: []authTestCase{ - {resource: ResourceWorkspaceApplicationConnect.InOrg(defOrg).WithOwner("not-me"), actions: []Action{ActionCreate}, allow: true}, - {resource: ResourceWorkspaceApplicationConnect.InOrg(defOrg).WithOwner(user.ID), actions: []Action{ActionCreate}, allow: true}, - {resource: ResourceWorkspaceApplicationConnect.InOrg(unusedID).WithOwner("not-me"), actions: []Action{ActionCreate}, allow: true}, + {resource: ResourceWorkspaceApplicationConnect.InOrg(defOrg).WithOwner("not-me"), actions: []policy.Action{policy.ActionCreate}, allow: true}, + {resource: ResourceWorkspaceApplicationConnect.InOrg(defOrg).WithOwner(user.ID), actions: []policy.Action{policy.ActionCreate}, allow: true}, + {resource: ResourceWorkspaceApplicationConnect.InOrg(unusedID).WithOwner("not-me"), actions: []policy.Action{policy.ActionCreate}, allow: true}, }, ) @@ -856,7 +857,7 @@ func TestAuthorizeScope(t *testing.T) { testAuthorize(t, "User_ScopeApplicationConnect", user, cases(func(c authTestCase) authTestCase { - c.actions = []Action{ActionRead, ActionUpdate, ActionDelete} + c.actions = []policy.Action{policy.ActionRead, policy.ActionUpdate, policy.ActionDelete} c.allow = false return c }, []authTestCase{ @@ -874,9 +875,9 @@ func TestAuthorizeScope(t *testing.T) { }), // Allowed by scope: []authTestCase{ - {resource: ResourceWorkspaceApplicationConnect.InOrg(defOrg).WithOwner(user.ID), actions: []Action{ActionCreate}, allow: true}, - {resource: ResourceWorkspaceApplicationConnect.InOrg(defOrg).WithOwner("not-me"), actions: []Action{ActionCreate}, allow: false}, - {resource: ResourceWorkspaceApplicationConnect.InOrg(unusedID).WithOwner("not-me"), actions: []Action{ActionCreate}, allow: false}, + {resource: ResourceWorkspaceApplicationConnect.InOrg(defOrg).WithOwner(user.ID), actions: []policy.Action{policy.ActionCreate}, allow: true}, + {resource: ResourceWorkspaceApplicationConnect.InOrg(defOrg).WithOwner("not-me"), actions: []policy.Action{policy.ActionCreate}, allow: false}, + {resource: ResourceWorkspaceApplicationConnect.InOrg(unusedID).WithOwner("not-me"), actions: []policy.Action{policy.ActionCreate}, allow: false}, }, ) @@ -891,9 +892,9 @@ func TestAuthorizeScope(t *testing.T) { Role: Role{ Name: "workspace_agent", DisplayName: "Workspace Agent", - Site: Permissions(map[string][]Action{ + Site: Permissions(map[string][]policy.Action{ // Only read access for workspaces. - ResourceWorkspace.Type: {ActionRead}, + ResourceWorkspace.Type: {policy.ActionRead}, }), Org: map[string][]Permission{}, User: []Permission{}, @@ -905,7 +906,7 @@ func TestAuthorizeScope(t *testing.T) { testAuthorize(t, "User_WorkspaceAgent", user, // Test cases without ID cases(func(c authTestCase) authTestCase { - c.actions = []Action{ActionCreate, ActionUpdate, ActionDelete} + c.actions = []policy.Action{policy.ActionCreate, policy.ActionUpdate, policy.ActionDelete} c.allow = false return c }, []authTestCase{ @@ -924,7 +925,7 @@ func TestAuthorizeScope(t *testing.T) { // Test all cases with the workspace id cases(func(c authTestCase) authTestCase { - c.actions = []Action{ActionCreate, ActionUpdate, ActionDelete} + c.actions = []policy.Action{policy.ActionCreate, policy.ActionUpdate, policy.ActionDelete} c.allow = false c.resource.WithID(workspaceID) return c @@ -943,7 +944,7 @@ func TestAuthorizeScope(t *testing.T) { }), // Test cases with random ids. These should always fail from the scope. cases(func(c authTestCase) authTestCase { - c.actions = []Action{ActionRead, ActionCreate, ActionUpdate, ActionDelete} + c.actions = []policy.Action{policy.ActionRead, policy.ActionCreate, policy.ActionUpdate, policy.ActionDelete} c.allow = false c.resource.WithID(uuid.New()) return c @@ -962,10 +963,10 @@ func TestAuthorizeScope(t *testing.T) { }), // Allowed by scope: []authTestCase{ - {resource: ResourceWorkspace.WithID(workspaceID).InOrg(defOrg).WithOwner(user.ID), actions: []Action{ActionRead}, allow: true}, + {resource: ResourceWorkspace.WithID(workspaceID).InOrg(defOrg).WithOwner(user.ID), actions: []policy.Action{policy.ActionRead}, allow: true}, // The scope will return true, but the user perms return false for resources not owned by the user. - {resource: ResourceWorkspace.WithID(workspaceID).InOrg(defOrg).WithOwner("not-me"), actions: []Action{ActionRead}, allow: false}, - {resource: ResourceWorkspace.WithID(workspaceID).InOrg(unusedID).WithOwner("not-me"), actions: []Action{ActionRead}, allow: false}, + {resource: ResourceWorkspace.WithID(workspaceID).InOrg(defOrg).WithOwner("not-me"), actions: []policy.Action{policy.ActionRead}, allow: false}, + {resource: ResourceWorkspace.WithID(workspaceID).InOrg(unusedID).WithOwner("not-me"), actions: []policy.Action{policy.ActionRead}, allow: false}, }, ) @@ -980,9 +981,9 @@ func TestAuthorizeScope(t *testing.T) { Role: Role{ Name: "create_workspace", DisplayName: "Create Workspace", - Site: Permissions(map[string][]Action{ + Site: Permissions(map[string][]policy.Action{ // Only read access for workspaces. - ResourceWorkspace.Type: {ActionCreate}, + ResourceWorkspace.Type: {policy.ActionCreate}, }), Org: map[string][]Permission{}, User: []Permission{}, @@ -995,7 +996,7 @@ func TestAuthorizeScope(t *testing.T) { testAuthorize(t, "CreatWorkspaceScope", user, // All these cases will fail because a resource ID is set. cases(func(c authTestCase) authTestCase { - c.actions = []Action{ActionCreate, ActionRead, ActionUpdate, ActionDelete} + c.actions = []policy.Action{policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete} c.allow = false c.resource.ID = uuid.NewString() return c @@ -1015,10 +1016,10 @@ func TestAuthorizeScope(t *testing.T) { // Test create allowed by scope: []authTestCase{ - {resource: ResourceWorkspace.InOrg(defOrg).WithOwner(user.ID), actions: []Action{ActionCreate}, allow: true}, + {resource: ResourceWorkspace.InOrg(defOrg).WithOwner(user.ID), actions: []policy.Action{policy.ActionCreate}, allow: true}, // The scope will return true, but the user perms return false for resources not owned by the user. - {resource: ResourceWorkspace.InOrg(defOrg).WithOwner("not-me"), actions: []Action{ActionCreate}, allow: false}, - {resource: ResourceWorkspace.InOrg(unusedID).WithOwner("not-me"), actions: []Action{ActionCreate}, allow: false}, + {resource: ResourceWorkspace.InOrg(defOrg).WithOwner("not-me"), actions: []policy.Action{policy.ActionCreate}, allow: false}, + {resource: ResourceWorkspace.InOrg(unusedID).WithOwner("not-me"), actions: []policy.Action{policy.ActionCreate}, allow: false}, }, ) } @@ -1036,7 +1037,7 @@ func cases(opt func(c authTestCase) authTestCase, cases []authTestCase) []authTe type authTestCase struct { resource Object - actions []Action + actions []policy.Action allow bool } @@ -1127,16 +1128,16 @@ func must[T any](value T, err error) T { } type MockAuthorizer struct { - AuthorizeFunc func(context.Context, Subject, Action, Object) error + AuthorizeFunc func(context.Context, Subject, policy.Action, Object) error } var _ Authorizer = (*MockAuthorizer)(nil) -func (d *MockAuthorizer) Authorize(ctx context.Context, s Subject, a Action, o Object) error { +func (d *MockAuthorizer) Authorize(ctx context.Context, s Subject, a policy.Action, o Object) error { return d.AuthorizeFunc(ctx, s, a, o) } -func (d *MockAuthorizer) Prepare(_ context.Context, subject Subject, action Action, _ string) (PreparedAuthorized, error) { +func (d *MockAuthorizer) Prepare(_ context.Context, subject Subject, action policy.Action, _ string) (PreparedAuthorized, error) { return &mockPreparedAuthorizer{ Original: d, Subject: subject, @@ -1152,7 +1153,7 @@ type mockPreparedAuthorizer struct { sync.RWMutex Original *MockAuthorizer Subject Subject - Action Action + Action policy.Action } func (f *mockPreparedAuthorizer) Authorize(ctx context.Context, object Object) error { diff --git a/coderd/rbac/authz_test.go b/coderd/rbac/authz_test.go index 9a38c94e33033..4ac8f20d94506 100644 --- a/coderd/rbac/authz_test.go +++ b/coderd/rbac/authz_test.go @@ -12,6 +12,7 @@ import ( "github.com/coder/coder/v2/coderd/coderdtest" "github.com/coder/coder/v2/coderd/rbac" + "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/testutil" ) @@ -167,7 +168,7 @@ func BenchmarkRBACAuthorize(b *testing.B) { objects := benchmarkSetup(orgs, users, b.N) b.ResetTimer() for i := 0; i < b.N; i++ { - allowed := authorizer.Authorize(context.Background(), c.Actor, rbac.ActionRead, objects[b.N%len(objects)]) + allowed := authorizer.Authorize(context.Background(), c.Actor, policy.ActionRead, objects[b.N%len(objects)]) _ = allowed } }) @@ -191,30 +192,30 @@ func BenchmarkRBACAuthorizeGroups(b *testing.B) { // Same benchmark cases, but this time groups will be used to match. // Some '*' permissions will still match, but using a fake action reduces // the chance. - neverMatchAction := rbac.Action("never-match-action") + neverMatchAction := policy.Action("never-match-action") for _, c := range benchCases { b.Run(c.Name+"GroupACL", func(b *testing.B) { userGroupAllow := uuid.NewString() c.Actor.Groups = append(c.Actor.Groups, userGroupAllow) c.Actor.Scope = rbac.ScopeAll objects := benchmarkSetup(orgs, users, b.N, func(object rbac.Object) rbac.Object { - m := map[string][]rbac.Action{ + m := map[string][]policy.Action{ // Add the user's group // Noise - uuid.NewString(): {rbac.ActionCreate, rbac.ActionRead, rbac.ActionUpdate, rbac.ActionDelete}, - uuid.NewString(): {rbac.ActionCreate, rbac.ActionRead, rbac.ActionUpdate}, - uuid.NewString(): {rbac.ActionCreate, rbac.ActionRead}, - uuid.NewString(): {rbac.ActionCreate}, - uuid.NewString(): {rbac.ActionRead, rbac.ActionUpdate, rbac.ActionDelete}, - uuid.NewString(): {rbac.ActionRead, rbac.ActionUpdate}, + uuid.NewString(): {policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete}, + uuid.NewString(): {policy.ActionCreate, policy.ActionRead, policy.ActionUpdate}, + uuid.NewString(): {policy.ActionCreate, policy.ActionRead}, + uuid.NewString(): {policy.ActionCreate}, + uuid.NewString(): {policy.ActionRead, policy.ActionUpdate, policy.ActionDelete}, + uuid.NewString(): {policy.ActionRead, policy.ActionUpdate}, } for _, g := range c.Actor.Groups { // Every group the user is in will be added, but it will not match the perms. This makes the // authorizer look at many groups before finding the one that matches. - m[g] = []rbac.Action{rbac.ActionCreate, rbac.ActionRead, rbac.ActionUpdate, rbac.ActionDelete} + m[g] = []policy.Action{policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete} } // This is the only group that will give permission. - m[userGroupAllow] = []rbac.Action{neverMatchAction} + m[userGroupAllow] = []policy.Action{neverMatchAction} return object.WithGroupACL(m) }) b.ResetTimer() @@ -244,7 +245,7 @@ func BenchmarkRBACFilter(b *testing.B) { b.Run("PrepareOnly-"+c.Name, func(b *testing.B) { obType := rbac.ResourceWorkspace.Type for i := 0; i < b.N; i++ { - _, err := authorizer.Prepare(context.Background(), c.Actor, rbac.ActionRead, obType) + _, err := authorizer.Prepare(context.Background(), c.Actor, policy.ActionRead, obType) require.NoError(b, err) } }) @@ -254,7 +255,7 @@ func BenchmarkRBACFilter(b *testing.B) { b.Run(c.Name, func(b *testing.B) { objects := benchmarkSetup(orgs, users, b.N) b.ResetTimer() - allowed, err := rbac.Filter(context.Background(), authorizer, c.Actor, rbac.ActionRead, objects) + allowed, err := rbac.Filter(context.Background(), authorizer, c.Actor, policy.ActionRead, objects) require.NoError(b, err) _ = allowed }) @@ -263,9 +264,9 @@ func BenchmarkRBACFilter(b *testing.B) { func benchmarkSetup(orgs []uuid.UUID, users []uuid.UUID, size int, opts ...func(object rbac.Object) rbac.Object) []rbac.Object { // Create a "random" but deterministic set of objects. - aclList := map[string][]rbac.Action{ - uuid.NewString(): {rbac.ActionRead, rbac.ActionUpdate}, - uuid.NewString(): {rbac.ActionCreate}, + aclList := map[string][]policy.Action{ + uuid.NewString(): {policy.ActionRead, policy.ActionUpdate}, + uuid.NewString(): {policy.ActionCreate}, } objectList := make([]rbac.Object, size) for i := range objectList { @@ -297,7 +298,7 @@ func BenchmarkCacher(b *testing.B) { var ( subj rbac.Subject obj rbac.Object - action rbac.Action + action policy.Action ) for i := 0; i < b.N; i++ { if i%rat == 0 { @@ -359,7 +360,7 @@ func TestCacher(t *testing.T) { var ( ctx = testutil.Context(t, testutil.WaitShort) authOut = make(chan error, 1) // buffered to not block - authorizeFunc = func(ctx context.Context, subject rbac.Subject, action rbac.Action, object rbac.Object) error { + authorizeFunc = func(ctx context.Context, subject rbac.Subject, action policy.Action, object rbac.Object) error { // Just return what you're told. return testutil.RequireRecvCtx(ctx, t, authOut) } diff --git a/coderd/rbac/error.go b/coderd/rbac/error.go index 37c83f759efa3..98735ade322c4 100644 --- a/coderd/rbac/error.go +++ b/coderd/rbac/error.go @@ -11,6 +11,7 @@ import ( "golang.org/x/xerrors" "github.com/coder/coder/v2/coderd/httpapi/httpapiconstraints" + "github.com/coder/coder/v2/coderd/rbac/policy" ) const ( @@ -28,7 +29,7 @@ type UnauthorizedError struct { // These fields are for debugging purposes. subject Subject - action Action + action policy.Action // Note only the object type is set for partial execution. object Object @@ -52,7 +53,7 @@ func IsUnauthorizedError(err error) bool { // ForbiddenWithInternal creates a new error that will return a simple // "forbidden" to the client, logging internally the more detailed message // provided. -func ForbiddenWithInternal(internal error, subject Subject, action Action, object Object, output rego.ResultSet) *UnauthorizedError { +func ForbiddenWithInternal(internal error, subject Subject, action policy.Action, object Object, output rego.ResultSet) *UnauthorizedError { return &UnauthorizedError{ internal: internal, subject: subject, diff --git a/coderd/rbac/object.go b/coderd/rbac/object.go index 51b6da339c6ee..bac8b90fe90c4 100644 --- a/coderd/rbac/object.go +++ b/coderd/rbac/object.go @@ -2,6 +2,8 @@ package rbac import ( "github.com/google/uuid" + + "github.com/coder/coder/v2/coderd/rbac/policy" ) const WildcardSymbol = "*" @@ -250,8 +252,8 @@ type Object struct { // Type is "workspace", "project", "app", etc Type string `json:"type"` - ACLUserList map[string][]Action ` json:"acl_user_list"` - ACLGroupList map[string][]Action ` json:"acl_group_list"` + ACLUserList map[string][]policy.Action ` json:"acl_user_list"` + ACLGroupList map[string][]policy.Action ` json:"acl_group_list"` } func (z Object) Equal(b Object) bool { @@ -279,7 +281,7 @@ func (z Object) Equal(b Object) bool { return true } -func equalACLLists(a, b map[string][]Action) bool { +func equalACLLists(a, b map[string][]policy.Action) bool { if len(a) != len(b) { return false } @@ -307,8 +309,8 @@ func (z Object) All() Object { Owner: "", OrgID: "", Type: z.Type, - ACLUserList: map[string][]Action{}, - ACLGroupList: map[string][]Action{}, + ACLUserList: map[string][]policy.Action{}, + ACLGroupList: map[string][]policy.Action{}, } } @@ -359,7 +361,7 @@ func (z Object) WithOwner(ownerID string) Object { } // WithACLUserList adds an ACL list to a given object -func (z Object) WithACLUserList(acl map[string][]Action) Object { +func (z Object) WithACLUserList(acl map[string][]policy.Action) Object { return Object{ ID: z.ID, Owner: z.Owner, @@ -370,7 +372,7 @@ func (z Object) WithACLUserList(acl map[string][]Action) Object { } } -func (z Object) WithGroupACL(groups map[string][]Action) Object { +func (z Object) WithGroupACL(groups map[string][]policy.Action) Object { return Object{ ID: z.ID, Owner: z.Owner, diff --git a/coderd/rbac/object_test.go b/coderd/rbac/object_test.go index 505f12b8cc7b0..373119f7f0e57 100644 --- a/coderd/rbac/object_test.go +++ b/coderd/rbac/object_test.go @@ -4,6 +4,7 @@ import ( "testing" "github.com/coder/coder/v2/coderd/rbac" + "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/coderd/util/slice" ) @@ -24,8 +25,8 @@ func TestObjectEqual(t *testing.T) { { Name: "NilVs0", A: rbac.Object{ - ACLGroupList: map[string][]rbac.Action{}, - ACLUserList: map[string][]rbac.Action{}, + ACLGroupList: map[string][]policy.Action{}, + ACLUserList: map[string][]policy.Action{}, }, B: rbac.Object{}, Expected: true, @@ -37,16 +38,16 @@ func TestObjectEqual(t *testing.T) { Owner: "owner", OrgID: "orgID", Type: "type", - ACLUserList: map[string][]rbac.Action{}, - ACLGroupList: map[string][]rbac.Action{}, + ACLUserList: map[string][]policy.Action{}, + ACLGroupList: map[string][]policy.Action{}, }, B: rbac.Object{ ID: "id", Owner: "owner", OrgID: "orgID", Type: "type", - ACLUserList: map[string][]rbac.Action{}, - ACLGroupList: map[string][]rbac.Action{}, + ACLUserList: map[string][]policy.Action{}, + ACLGroupList: map[string][]policy.Action{}, }, Expected: true, }, @@ -93,13 +94,13 @@ func TestObjectEqual(t *testing.T) { { Name: "DifferentACLUserList", A: rbac.Object{ - ACLUserList: map[string][]rbac.Action{ - "user1": {rbac.ActionRead}, + ACLUserList: map[string][]policy.Action{ + "user1": {policy.ActionRead}, }, }, B: rbac.Object{ - ACLUserList: map[string][]rbac.Action{ - "user2": {rbac.ActionRead}, + ACLUserList: map[string][]policy.Action{ + "user2": {policy.ActionRead}, }, }, Expected: false, @@ -107,13 +108,13 @@ func TestObjectEqual(t *testing.T) { { Name: "ACLUserDiff#Actions", A: rbac.Object{ - ACLUserList: map[string][]rbac.Action{ - "user1": {rbac.ActionRead}, + ACLUserList: map[string][]policy.Action{ + "user1": {policy.ActionRead}, }, }, B: rbac.Object{ - ACLUserList: map[string][]rbac.Action{ - "user1": {rbac.ActionRead, rbac.ActionUpdate}, + ACLUserList: map[string][]policy.Action{ + "user1": {policy.ActionRead, policy.ActionUpdate}, }, }, Expected: false, @@ -121,13 +122,13 @@ func TestObjectEqual(t *testing.T) { { Name: "ACLUserDiffAction", A: rbac.Object{ - ACLUserList: map[string][]rbac.Action{ - "user1": {rbac.ActionRead}, + ACLUserList: map[string][]policy.Action{ + "user1": {policy.ActionRead}, }, }, B: rbac.Object{ - ACLUserList: map[string][]rbac.Action{ - "user1": {rbac.ActionUpdate}, + ACLUserList: map[string][]policy.Action{ + "user1": {policy.ActionUpdate}, }, }, Expected: false, @@ -135,14 +136,14 @@ func TestObjectEqual(t *testing.T) { { Name: "ACLUserDiff#Users", A: rbac.Object{ - ACLUserList: map[string][]rbac.Action{ - "user1": {rbac.ActionRead}, + ACLUserList: map[string][]policy.Action{ + "user1": {policy.ActionRead}, }, }, B: rbac.Object{ - ACLUserList: map[string][]rbac.Action{ - "user1": {rbac.ActionRead}, - "user2": {rbac.ActionRead}, + ACLUserList: map[string][]policy.Action{ + "user1": {policy.ActionRead}, + "user2": {policy.ActionRead}, }, }, Expected: false, @@ -150,13 +151,13 @@ func TestObjectEqual(t *testing.T) { { Name: "DifferentACLGroupList", A: rbac.Object{ - ACLGroupList: map[string][]rbac.Action{ - "group1": {rbac.ActionRead}, + ACLGroupList: map[string][]policy.Action{ + "group1": {policy.ActionRead}, }, }, B: rbac.Object{ - ACLGroupList: map[string][]rbac.Action{ - "group2": {rbac.ActionRead}, + ACLGroupList: map[string][]policy.Action{ + "group2": {policy.ActionRead}, }, }, Expected: false, diff --git a/coderd/rbac/policy/policy.go b/coderd/rbac/policy/policy.go new file mode 100644 index 0000000000000..a3c0dc9f3436b --- /dev/null +++ b/coderd/rbac/policy/policy.go @@ -0,0 +1,11 @@ +package policy + +// Action represents the allowed actions to be done on an object. +type Action string + +const ( + ActionCreate Action = "create" + ActionRead Action = "read" + ActionUpdate Action = "update" + ActionDelete Action = "delete" +) diff --git a/coderd/rbac/roles.go b/coderd/rbac/roles.go index ebe122386fb99..f69cf49174f60 100644 --- a/coderd/rbac/roles.go +++ b/coderd/rbac/roles.go @@ -8,6 +8,8 @@ import ( "github.com/open-policy-agent/opa/ast" "golang.org/x/xerrors" + + "github.com/coder/coder/v2/coderd/rbac/policy" ) const ( @@ -144,22 +146,22 @@ func ReloadBuiltinRoles(opts *RoleOptions) { memberRole := Role{ Name: member, DisplayName: "Member", - Site: Permissions(map[string][]Action{ - ResourceRoleAssignment.Type: {ActionRead}, + Site: Permissions(map[string][]policy.Action{ + ResourceRoleAssignment.Type: {policy.ActionRead}, // All users can see the provisioner daemons. - ResourceProvisionerDaemon.Type: {ActionRead}, + ResourceProvisionerDaemon.Type: {policy.ActionRead}, // All users can see OAuth2 provider applications. - ResourceOAuth2ProviderApp.Type: {ActionRead}, + ResourceOAuth2ProviderApp.Type: {policy.ActionRead}, }), Org: map[string][]Permission{}, User: append(allPermsExcept(ResourceWorkspaceDormant, ResourceUser, ResourceOrganizationMember), - Permissions(map[string][]Action{ + Permissions(map[string][]policy.Action{ // Users cannot do create/update/delete on themselves, but they // can read their own details. - ResourceUser.Type: {ActionRead}, - ResourceUserWorkspaceBuildParameters.Type: {ActionRead}, + ResourceUser.Type: {policy.ActionRead}, + ResourceUserWorkspaceBuildParameters.Type: {policy.ActionRead}, // Users can create provisioner daemons scoped to themselves. - ResourceProvisionerDaemon.Type: {ActionCreate, ActionRead, ActionUpdate}, + ResourceProvisionerDaemon.Type: {policy.ActionCreate, policy.ActionRead, policy.ActionUpdate}, })..., ), }.withCachedRegoValue() @@ -167,19 +169,19 @@ func ReloadBuiltinRoles(opts *RoleOptions) { auditorRole := Role{ Name: auditor, DisplayName: "Auditor", - Site: Permissions(map[string][]Action{ + Site: Permissions(map[string][]policy.Action{ // Should be able to read all template details, even in orgs they // are not in. - ResourceTemplate.Type: {ActionRead}, - ResourceTemplateInsights.Type: {ActionRead}, - ResourceAuditLog.Type: {ActionRead}, - ResourceUser.Type: {ActionRead}, - ResourceGroup.Type: {ActionRead}, + ResourceTemplate.Type: {policy.ActionRead}, + ResourceTemplateInsights.Type: {policy.ActionRead}, + ResourceAuditLog.Type: {policy.ActionRead}, + ResourceUser.Type: {policy.ActionRead}, + ResourceGroup.Type: {policy.ActionRead}, // Allow auditors to query deployment stats and insights. - ResourceDeploymentStats.Type: {ActionRead}, - ResourceDeploymentValues.Type: {ActionRead}, + ResourceDeploymentStats.Type: {policy.ActionRead}, + ResourceDeploymentValues.Type: {policy.ActionRead}, // Org roles are not really used yet, so grant the perm at the site level. - ResourceOrganizationMember.Type: {ActionRead}, + ResourceOrganizationMember.Type: {policy.ActionRead}, }), Org: map[string][]Permission{}, User: []Permission{}, @@ -188,21 +190,21 @@ func ReloadBuiltinRoles(opts *RoleOptions) { templateAdminRole := Role{ Name: templateAdmin, DisplayName: "Template Admin", - Site: Permissions(map[string][]Action{ - ResourceTemplate.Type: {ActionCreate, ActionRead, ActionUpdate, ActionDelete}, + Site: Permissions(map[string][]policy.Action{ + ResourceTemplate.Type: {policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete}, // CRUD all files, even those they did not upload. - ResourceFile.Type: {ActionCreate, ActionRead, ActionUpdate, ActionDelete}, - ResourceWorkspace.Type: {ActionRead}, + ResourceFile.Type: {policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete}, + ResourceWorkspace.Type: {policy.ActionRead}, // CRUD to provisioner daemons for now. - ResourceProvisionerDaemon.Type: {ActionCreate, ActionRead, ActionUpdate, ActionDelete}, + ResourceProvisionerDaemon.Type: {policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete}, // Needs to read all organizations since - ResourceOrganization.Type: {ActionRead}, - ResourceUser.Type: {ActionRead}, - ResourceGroup.Type: {ActionRead}, + ResourceOrganization.Type: {policy.ActionRead}, + ResourceUser.Type: {policy.ActionRead}, + ResourceGroup.Type: {policy.ActionRead}, // Org roles are not really used yet, so grant the perm at the site level. - ResourceOrganizationMember.Type: {ActionRead}, + ResourceOrganizationMember.Type: {policy.ActionRead}, // Template admins can read all template insights data - ResourceTemplateInsights.Type: {ActionRead}, + ResourceTemplateInsights.Type: {policy.ActionRead}, }), Org: map[string][]Permission{}, User: []Permission{}, @@ -211,14 +213,14 @@ func ReloadBuiltinRoles(opts *RoleOptions) { userAdminRole := Role{ Name: userAdmin, DisplayName: "User Admin", - Site: Permissions(map[string][]Action{ - ResourceRoleAssignment.Type: {ActionCreate, ActionRead, ActionUpdate, ActionDelete}, - ResourceUser.Type: {ActionCreate, ActionRead, ActionUpdate, ActionDelete}, - ResourceUserData.Type: {ActionCreate, ActionRead, ActionUpdate, ActionDelete}, - ResourceUserWorkspaceBuildParameters.Type: {ActionCreate, ActionRead, ActionUpdate, ActionDelete}, + Site: Permissions(map[string][]policy.Action{ + ResourceRoleAssignment.Type: {policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete}, + ResourceUser.Type: {policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete}, + ResourceUserData.Type: {policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete}, + ResourceUserWorkspaceBuildParameters.Type: {policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete}, // Full perms to manage org members - ResourceOrganizationMember.Type: {ActionCreate, ActionRead, ActionUpdate, ActionDelete}, - ResourceGroup.Type: {ActionCreate, ActionRead, ActionUpdate, ActionDelete}, + ResourceOrganizationMember.Type: {policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete}, + ResourceGroup.Type: {policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete}, }), Org: map[string][]Permission{}, User: []Permission{}, @@ -277,19 +279,19 @@ func ReloadBuiltinRoles(opts *RoleOptions) { { // All org members can read the organization ResourceType: ResourceOrganization.Type, - Action: ActionRead, + Action: policy.ActionRead, }, { // Can read available roles. ResourceType: ResourceOrgRoleAssignment.Type, - Action: ActionRead, + Action: policy.ActionRead, }, }, }, User: []Permission{ { ResourceType: ResourceOrganizationMember.Type, - Action: ActionRead, + Action: policy.ActionRead, }, }, } @@ -349,9 +351,9 @@ type ExpandableRoles interface { // Permission is the format passed into the rego. type Permission struct { // Negate makes this a negative permission - Negate bool `json:"negate"` - ResourceType string `json:"resource_type"` - Action Action `json:"action"` + Negate bool `json:"negate"` + ResourceType string `json:"resource_type"` + Action policy.Action `json:"action"` } // Role is a set of permissions at multiple levels: @@ -521,7 +523,7 @@ func SiteRoles() []Role { // ChangeRoleSet is a helper function that finds the difference of 2 sets of // roles. When setting a user's new roles, it is equivalent to adding and // removing roles. This set determines the changes, so that the appropriate -// RBAC checks can be applied using "ActionCreate" and "ActionDelete" for +// RBAC checks can be applied using "policy.ActionCreate" and "policy.ActionDelete" for // "added" and "removed" roles respectively. func ChangeRoleSet(from []string, to []string) (added []string, removed []string) { has := make(map[string]struct{}) @@ -579,7 +581,7 @@ func roleSplit(role string) (name string, orgID string, err error) { // Permissions is just a helper function to make building roles that list out resources // and actions a bit easier. -func Permissions(perms map[string][]Action) []Permission { +func Permissions(perms map[string][]policy.Action) []Permission { list := make([]Permission, 0, len(perms)) for k, actions := range perms { for _, act := range actions { diff --git a/coderd/rbac/roles_internal_test.go b/coderd/rbac/roles_internal_test.go index 2055cfaafe42c..07126981081d8 100644 --- a/coderd/rbac/roles_internal_test.go +++ b/coderd/rbac/roles_internal_test.go @@ -6,6 +6,8 @@ import ( "github.com/google/uuid" "github.com/open-policy-agent/opa/ast" "github.com/stretchr/testify/require" + + "github.com/coder/coder/v2/coderd/rbac/policy" ) // BenchmarkRBACValueAllocation benchmarks the cost of allocating a rego input @@ -27,13 +29,13 @@ func BenchmarkRBACValueAllocation(b *testing.B) { WithID(uuid.New()). InOrg(uuid.New()). WithOwner(uuid.NewString()). - WithGroupACL(map[string][]Action{ - uuid.NewString(): {ActionRead, ActionCreate}, - uuid.NewString(): {ActionRead, ActionCreate}, - uuid.NewString(): {ActionRead, ActionCreate}, - }).WithACLUserList(map[string][]Action{ - uuid.NewString(): {ActionRead, ActionCreate}, - uuid.NewString(): {ActionRead, ActionCreate}, + WithGroupACL(map[string][]policy.Action{ + uuid.NewString(): {policy.ActionRead, policy.ActionCreate}, + uuid.NewString(): {policy.ActionRead, policy.ActionCreate}, + uuid.NewString(): {policy.ActionRead, policy.ActionCreate}, + }).WithACLUserList(map[string][]policy.Action{ + uuid.NewString(): {policy.ActionRead, policy.ActionCreate}, + uuid.NewString(): {policy.ActionRead, policy.ActionCreate}, }) jsonSubject := authSubject{ @@ -45,7 +47,7 @@ func BenchmarkRBACValueAllocation(b *testing.B) { b.Run("ManualRegoValue", func(b *testing.B) { for i := 0; i < b.N; i++ { - _, err := regoInputValue(actor, ActionRead, obj) + _, err := regoInputValue(actor, policy.ActionRead, obj) require.NoError(b, err) } }) @@ -53,7 +55,7 @@ func BenchmarkRBACValueAllocation(b *testing.B) { for i := 0; i < b.N; i++ { _, err := ast.InterfaceToValue(map[string]interface{}{ "subject": jsonSubject, - "action": ActionRead, + "action": policy.ActionRead, "object": obj, }) require.NoError(b, err) @@ -90,16 +92,16 @@ func TestRegoInputValue(t *testing.T) { WithID(uuid.New()). InOrg(uuid.New()). WithOwner(uuid.NewString()). - WithGroupACL(map[string][]Action{ - uuid.NewString(): {ActionRead, ActionCreate}, - uuid.NewString(): {ActionRead, ActionCreate}, - uuid.NewString(): {ActionRead, ActionCreate}, - }).WithACLUserList(map[string][]Action{ - uuid.NewString(): {ActionRead, ActionCreate}, - uuid.NewString(): {ActionRead, ActionCreate}, + WithGroupACL(map[string][]policy.Action{ + uuid.NewString(): {policy.ActionRead, policy.ActionCreate}, + uuid.NewString(): {policy.ActionRead, policy.ActionCreate}, + uuid.NewString(): {policy.ActionRead, policy.ActionCreate}, + }).WithACLUserList(map[string][]policy.Action{ + uuid.NewString(): {policy.ActionRead, policy.ActionCreate}, + uuid.NewString(): {policy.ActionRead, policy.ActionCreate}, }) - action := ActionRead + action := policy.ActionRead t.Run("InputValue", func(t *testing.T) { t.Parallel() diff --git a/coderd/rbac/roles_test.go b/coderd/rbac/roles_test.go index 7ef98a7f3d46e..b5e78e606b8d4 100644 --- a/coderd/rbac/roles_test.go +++ b/coderd/rbac/roles_test.go @@ -11,6 +11,7 @@ import ( "github.com/stretchr/testify/require" "github.com/coder/coder/v2/coderd/rbac" + "github.com/coder/coder/v2/coderd/rbac/policy" ) type authSubject struct { @@ -35,7 +36,7 @@ func TestOwnerExec(t *testing.T) { auth := rbac.NewCachingAuthorizer(prometheus.NewRegistry()) // Exec a random workspace - err := auth.Authorize(context.Background(), owner, rbac.ActionCreate, + err := auth.Authorize(context.Background(), owner, policy.ActionCreate, rbac.ResourceWorkspaceExecution.WithID(uuid.New()).InOrg(uuid.New()).WithOwner(uuid.NewString())) require.ErrorAsf(t, err, &rbac.UnauthorizedError{}, "expected unauthorized error") }) @@ -49,7 +50,7 @@ func TestOwnerExec(t *testing.T) { auth := rbac.NewCachingAuthorizer(prometheus.NewRegistry()) // Exec a random workspace - err := auth.Authorize(context.Background(), owner, rbac.ActionCreate, + err := auth.Authorize(context.Background(), owner, policy.ActionCreate, rbac.ResourceWorkspaceExecution.WithID(uuid.New()).InOrg(uuid.New()).WithOwner(uuid.NewString())) require.NoError(t, err, "expected owner can") }) @@ -94,7 +95,7 @@ func TestRolePermissions(t *testing.T) { // Name the test case to better locate the failing test case. Name string Resource rbac.Object - Actions []rbac.Action + Actions []policy.Action // AuthorizeMap must cover all subjects in 'requiredSubjects'. // This map will run an Authorize() check with the resource, action, // and subjects. The subjects are split into 2 categories, "true" and @@ -105,7 +106,7 @@ func TestRolePermissions(t *testing.T) { }{ { Name: "MyUser", - Actions: []rbac.Action{rbac.ActionRead}, + Actions: []policy.Action{policy.ActionRead}, Resource: rbac.ResourceUserObject(currentUser), AuthorizeMap: map[bool][]authSubject{ true: {orgMemberMe, owner, memberMe, templateAdmin, userAdmin}, @@ -114,7 +115,7 @@ func TestRolePermissions(t *testing.T) { }, { Name: "AUser", - Actions: []rbac.Action{rbac.ActionCreate, rbac.ActionUpdate, rbac.ActionDelete}, + Actions: []policy.Action{policy.ActionCreate, policy.ActionUpdate, policy.ActionDelete}, Resource: rbac.ResourceUser, AuthorizeMap: map[bool][]authSubject{ true: {owner, userAdmin}, @@ -124,7 +125,7 @@ func TestRolePermissions(t *testing.T) { { Name: "ReadMyWorkspaceInOrg", // When creating the WithID won't be set, but it does not change the result. - Actions: []rbac.Action{rbac.ActionRead}, + Actions: []policy.Action{policy.ActionRead}, Resource: rbac.ResourceWorkspace.WithID(workspaceID).InOrg(orgID).WithOwner(currentUser.String()), AuthorizeMap: map[bool][]authSubject{ true: {owner, orgMemberMe, orgAdmin, templateAdmin}, @@ -134,7 +135,7 @@ func TestRolePermissions(t *testing.T) { { Name: "C_RDMyWorkspaceInOrg", // When creating the WithID won't be set, but it does not change the result. - Actions: []rbac.Action{rbac.ActionCreate, rbac.ActionUpdate, rbac.ActionDelete}, + Actions: []policy.Action{policy.ActionCreate, policy.ActionUpdate, policy.ActionDelete}, Resource: rbac.ResourceWorkspace.WithID(workspaceID).InOrg(orgID).WithOwner(currentUser.String()), AuthorizeMap: map[bool][]authSubject{ true: {owner, orgMemberMe, orgAdmin}, @@ -144,7 +145,7 @@ func TestRolePermissions(t *testing.T) { { Name: "MyWorkspaceInOrgExecution", // When creating the WithID won't be set, but it does not change the result. - Actions: []rbac.Action{rbac.ActionCreate, rbac.ActionRead, rbac.ActionUpdate, rbac.ActionDelete}, + Actions: []policy.Action{policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete}, Resource: rbac.ResourceWorkspaceExecution.WithID(workspaceID).InOrg(orgID).WithOwner(currentUser.String()), AuthorizeMap: map[bool][]authSubject{ true: {owner, orgMemberMe}, @@ -154,7 +155,7 @@ func TestRolePermissions(t *testing.T) { { Name: "MyWorkspaceInOrgAppConnect", // When creating the WithID won't be set, but it does not change the result. - Actions: []rbac.Action{rbac.ActionCreate, rbac.ActionRead, rbac.ActionUpdate, rbac.ActionDelete}, + Actions: []policy.Action{policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete}, Resource: rbac.ResourceWorkspaceApplicationConnect.WithID(workspaceID).InOrg(orgID).WithOwner(currentUser.String()), AuthorizeMap: map[bool][]authSubject{ true: {owner, orgAdmin, orgMemberMe}, @@ -163,7 +164,7 @@ func TestRolePermissions(t *testing.T) { }, { Name: "Templates", - Actions: []rbac.Action{rbac.ActionCreate, rbac.ActionUpdate, rbac.ActionDelete}, + Actions: []policy.Action{policy.ActionCreate, policy.ActionUpdate, policy.ActionDelete}, Resource: rbac.ResourceTemplate.WithID(templateID).InOrg(orgID), AuthorizeMap: map[bool][]authSubject{ true: {owner, orgAdmin, templateAdmin}, @@ -172,7 +173,7 @@ func TestRolePermissions(t *testing.T) { }, { Name: "ReadTemplates", - Actions: []rbac.Action{rbac.ActionRead}, + Actions: []policy.Action{policy.ActionRead}, Resource: rbac.ResourceTemplate.InOrg(orgID), AuthorizeMap: map[bool][]authSubject{ true: {owner, orgAdmin, templateAdmin}, @@ -181,7 +182,7 @@ func TestRolePermissions(t *testing.T) { }, { Name: "Files", - Actions: []rbac.Action{rbac.ActionCreate}, + Actions: []policy.Action{policy.ActionCreate}, Resource: rbac.ResourceFile.WithID(fileID), AuthorizeMap: map[bool][]authSubject{ true: {owner, templateAdmin}, @@ -190,7 +191,7 @@ func TestRolePermissions(t *testing.T) { }, { Name: "MyFile", - Actions: []rbac.Action{rbac.ActionCreate, rbac.ActionRead, rbac.ActionUpdate, rbac.ActionDelete}, + Actions: []policy.Action{policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete}, Resource: rbac.ResourceFile.WithID(fileID).WithOwner(currentUser.String()), AuthorizeMap: map[bool][]authSubject{ true: {owner, memberMe, orgMemberMe, templateAdmin}, @@ -199,7 +200,7 @@ func TestRolePermissions(t *testing.T) { }, { Name: "CreateOrganizations", - Actions: []rbac.Action{rbac.ActionCreate}, + Actions: []policy.Action{policy.ActionCreate}, Resource: rbac.ResourceOrganization, AuthorizeMap: map[bool][]authSubject{ true: {owner}, @@ -208,7 +209,7 @@ func TestRolePermissions(t *testing.T) { }, { Name: "Organizations", - Actions: []rbac.Action{rbac.ActionUpdate, rbac.ActionDelete}, + Actions: []policy.Action{policy.ActionUpdate, policy.ActionDelete}, Resource: rbac.ResourceOrganization.WithID(orgID).InOrg(orgID), AuthorizeMap: map[bool][]authSubject{ true: {owner, orgAdmin}, @@ -217,7 +218,7 @@ func TestRolePermissions(t *testing.T) { }, { Name: "ReadOrganizations", - Actions: []rbac.Action{rbac.ActionRead}, + Actions: []policy.Action{policy.ActionRead}, Resource: rbac.ResourceOrganization.WithID(orgID).InOrg(orgID), AuthorizeMap: map[bool][]authSubject{ true: {owner, orgAdmin, orgMemberMe, templateAdmin}, @@ -226,7 +227,7 @@ func TestRolePermissions(t *testing.T) { }, { Name: "RoleAssignment", - Actions: []rbac.Action{rbac.ActionCreate, rbac.ActionUpdate, rbac.ActionDelete}, + Actions: []policy.Action{policy.ActionCreate, policy.ActionUpdate, policy.ActionDelete}, Resource: rbac.ResourceRoleAssignment, AuthorizeMap: map[bool][]authSubject{ true: {owner, userAdmin}, @@ -235,7 +236,7 @@ func TestRolePermissions(t *testing.T) { }, { Name: "ReadRoleAssignment", - Actions: []rbac.Action{rbac.ActionRead}, + Actions: []policy.Action{policy.ActionRead}, Resource: rbac.ResourceRoleAssignment, AuthorizeMap: map[bool][]authSubject{ true: {owner, orgAdmin, orgMemberMe, otherOrgAdmin, otherOrgMember, memberMe, templateAdmin, userAdmin}, @@ -244,7 +245,7 @@ func TestRolePermissions(t *testing.T) { }, { Name: "OrgRoleAssignment", - Actions: []rbac.Action{rbac.ActionCreate, rbac.ActionUpdate, rbac.ActionDelete}, + Actions: []policy.Action{policy.ActionCreate, policy.ActionUpdate, policy.ActionDelete}, Resource: rbac.ResourceOrgRoleAssignment.InOrg(orgID), AuthorizeMap: map[bool][]authSubject{ true: {owner, orgAdmin}, @@ -253,7 +254,7 @@ func TestRolePermissions(t *testing.T) { }, { Name: "ReadOrgRoleAssignment", - Actions: []rbac.Action{rbac.ActionRead}, + Actions: []policy.Action{policy.ActionRead}, Resource: rbac.ResourceOrgRoleAssignment.InOrg(orgID), AuthorizeMap: map[bool][]authSubject{ true: {owner, orgAdmin, orgMemberMe}, @@ -262,7 +263,7 @@ func TestRolePermissions(t *testing.T) { }, { Name: "APIKey", - Actions: []rbac.Action{rbac.ActionCreate, rbac.ActionRead, rbac.ActionUpdate, rbac.ActionDelete}, + Actions: []policy.Action{policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete}, Resource: rbac.ResourceAPIKey.WithID(apiKeyID).WithOwner(currentUser.String()), AuthorizeMap: map[bool][]authSubject{ true: {owner, orgMemberMe, memberMe}, @@ -271,7 +272,7 @@ func TestRolePermissions(t *testing.T) { }, { Name: "UserData", - Actions: []rbac.Action{rbac.ActionCreate, rbac.ActionRead, rbac.ActionUpdate, rbac.ActionDelete}, + Actions: []policy.Action{policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete}, Resource: rbac.ResourceUserData.WithID(currentUser).WithOwner(currentUser.String()), AuthorizeMap: map[bool][]authSubject{ true: {owner, orgMemberMe, memberMe, userAdmin}, @@ -280,7 +281,7 @@ func TestRolePermissions(t *testing.T) { }, { Name: "ManageOrgMember", - Actions: []rbac.Action{rbac.ActionCreate, rbac.ActionUpdate, rbac.ActionDelete}, + Actions: []policy.Action{policy.ActionCreate, policy.ActionUpdate, policy.ActionDelete}, Resource: rbac.ResourceOrganizationMember.WithID(currentUser).InOrg(orgID).WithOwner(currentUser.String()), AuthorizeMap: map[bool][]authSubject{ true: {owner, orgAdmin, userAdmin}, @@ -289,7 +290,7 @@ func TestRolePermissions(t *testing.T) { }, { Name: "ReadOrgMember", - Actions: []rbac.Action{rbac.ActionRead}, + Actions: []policy.Action{policy.ActionRead}, Resource: rbac.ResourceOrganizationMember.WithID(currentUser).InOrg(orgID).WithOwner(currentUser.String()), AuthorizeMap: map[bool][]authSubject{ true: {owner, orgAdmin, userAdmin, orgMemberMe, templateAdmin}, @@ -298,10 +299,10 @@ func TestRolePermissions(t *testing.T) { }, { Name: "AllUsersGroupACL", - Actions: []rbac.Action{rbac.ActionRead}, + Actions: []policy.Action{policy.ActionRead}, Resource: rbac.ResourceTemplate.WithID(templateID).InOrg(orgID).WithGroupACL( - map[string][]rbac.Action{ - orgID.String(): {rbac.ActionRead}, + map[string][]policy.Action{ + orgID.String(): {policy.ActionRead}, }), AuthorizeMap: map[bool][]authSubject{ @@ -311,7 +312,7 @@ func TestRolePermissions(t *testing.T) { }, { Name: "Groups", - Actions: []rbac.Action{rbac.ActionRead}, + Actions: []policy.Action{policy.ActionRead}, Resource: rbac.ResourceGroup.WithID(groupID).InOrg(orgID), AuthorizeMap: map[bool][]authSubject{ true: {owner, orgAdmin, userAdmin, templateAdmin}, diff --git a/coderd/rbac/scopes.go b/coderd/rbac/scopes.go index fe85be67632c5..6353ca3c67919 100644 --- a/coderd/rbac/scopes.go +++ b/coderd/rbac/scopes.go @@ -6,6 +6,8 @@ import ( "github.com/google/uuid" "golang.org/x/xerrors" + + "github.com/coder/coder/v2/coderd/rbac/policy" ) type WorkspaceAgentScopeParams struct { @@ -58,7 +60,7 @@ var builtinScopes = map[ScopeName]Scope{ Role: Role{ Name: fmt.Sprintf("Scope_%s", ScopeAll), DisplayName: "All operations", - Site: Permissions(map[string][]Action{ + Site: Permissions(map[string][]policy.Action{ ResourceWildcard.Type: {WildcardSymbol}, }), Org: map[string][]Permission{}, @@ -71,8 +73,8 @@ var builtinScopes = map[ScopeName]Scope{ Role: Role{ Name: fmt.Sprintf("Scope_%s", ScopeApplicationConnect), DisplayName: "Ability to connect to applications", - Site: Permissions(map[string][]Action{ - ResourceWorkspaceApplicationConnect.Type: {ActionCreate}, + Site: Permissions(map[string][]policy.Action{ + ResourceWorkspaceApplicationConnect.Type: {policy.ActionCreate}, }), Org: map[string][]Permission{}, User: []Permission{}, diff --git a/coderd/roles.go b/coderd/roles.go index 7d49eadba7222..1cc74535119e3 100644 --- a/coderd/roles.go +++ b/coderd/roles.go @@ -4,6 +4,7 @@ import ( "net/http" "github.com/coder/coder/v2/coderd/httpmw" + "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/coderd/httpapi" @@ -22,7 +23,7 @@ import ( func (api *API) assignableSiteRoles(rw http.ResponseWriter, r *http.Request) { ctx := r.Context() actorRoles := httpmw.UserAuthorization(r) - if !api.Authorize(r, rbac.ActionRead, rbac.ResourceRoleAssignment) { + if !api.Authorize(r, policy.ActionRead, rbac.ResourceRoleAssignment) { httpapi.Forbidden(rw) return } @@ -46,7 +47,7 @@ func (api *API) assignableOrgRoles(rw http.ResponseWriter, r *http.Request) { organization := httpmw.OrganizationParam(r) actorRoles := httpmw.UserAuthorization(r) - if !api.Authorize(r, rbac.ActionRead, rbac.ResourceOrgRoleAssignment.InOrg(organization.ID)) { + if !api.Authorize(r, policy.ActionRead, rbac.ResourceOrgRoleAssignment.InOrg(organization.ID)) { httpapi.ResourceNotFound(rw) return } diff --git a/coderd/templates.go b/coderd/templates.go index 59537b962c21e..94601ba2cc35b 100644 --- a/coderd/templates.go +++ b/coderd/templates.go @@ -19,6 +19,7 @@ import ( "github.com/coder/coder/v2/coderd/httpapi" "github.com/coder/coder/v2/coderd/httpmw" "github.com/coder/coder/v2/coderd/rbac" + "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/coderd/schedule" "github.com/coder/coder/v2/coderd/telemetry" "github.com/coder/coder/v2/coderd/util/ptr" @@ -322,7 +323,7 @@ func (api *API) postTemplateByOrganization(rw http.ResponseWriter, r *http.Reque if !createTemplate.DisableEveryoneGroupAccess { // The organization ID is used as the group ID for the everyone group // in this organization. - defaultsGroups[organization.ID.String()] = []rbac.Action{rbac.ActionRead} + defaultsGroups[organization.ID.String()] = []policy.Action{policy.ActionRead} } err = api.Database.InTx(func(tx database.Store) error { now := dbtime.Now() @@ -455,7 +456,7 @@ func (api *API) templatesByOrganization(rw http.ResponseWriter, r *http.Request) return } - prepared, err := api.HTTPAuth.AuthorizeSQLFilter(r, rbac.ActionRead, rbac.ResourceTemplate.Type) + prepared, err := api.HTTPAuth.AuthorizeSQLFilter(r, policy.ActionRead, rbac.ResourceTemplate.Type) if err != nil { httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ Message: "Internal error preparing sql filter.", @@ -807,7 +808,7 @@ func (api *API) templateExamples(rw http.ResponseWriter, r *http.Request) { organization = httpmw.OrganizationParam(r) ) - if !api.Authorize(r, rbac.ActionRead, rbac.ResourceTemplate.InOrg(organization.ID)) { + if !api.Authorize(r, policy.ActionRead, rbac.ResourceTemplate.InOrg(organization.ID)) { httpapi.ResourceNotFound(rw) return } diff --git a/coderd/templateversions.go b/coderd/templateversions.go index ca8f660454ea8..788a01ba353b1 100644 --- a/coderd/templateversions.go +++ b/coderd/templateversions.go @@ -17,6 +17,7 @@ import ( "golang.org/x/xerrors" "cdr.dev/slog" + "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/coderd/audit" "github.com/coder/coder/v2/coderd/database" @@ -430,7 +431,7 @@ func (api *API) postTemplateVersionDryRun(rw http.ResponseWriter, r *http.Reques // We use the workspace RBAC check since we don't want to allow dry runs if // the user can't create workspaces. - if !api.Authorize(r, rbac.ActionCreate, + if !api.Authorize(r, policy.ActionCreate, rbac.ResourceWorkspace.InOrg(templateVersion.OrganizationID).WithOwner(apiKey.UserID.String())) { httpapi.ResourceNotFound(rw) return @@ -603,7 +604,7 @@ func (api *API) patchTemplateVersionDryRunCancel(rw http.ResponseWriter, r *http if !ok { return } - if !api.Authorize(r, rbac.ActionUpdate, + if !api.Authorize(r, policy.ActionUpdate, rbac.ResourceWorkspace.InOrg(templateVersion.OrganizationID).WithOwner(job.ProvisionerJob.InitiatorID.String())) { httpapi.ResourceNotFound(rw) return @@ -684,7 +685,7 @@ func (api *API) fetchTemplateVersionDryRunJob(rw http.ResponseWriter, r *http.Re } // Do a workspace resource check since it's basically a workspace dry-run. - if !api.Authorize(r, rbac.ActionRead, + if !api.Authorize(r, policy.ActionRead, rbac.ResourceWorkspace.InOrg(templateVersion.OrganizationID).WithOwner(job.ProvisionerJob.InitiatorID.String())) { httpapi.Forbidden(rw) return database.GetProvisionerJobsByIDsWithQueuePositionRow{}, false @@ -1359,12 +1360,12 @@ func (api *API) postTemplateVersionsByOrganization(rw http.ResponseWriter, r *ht var err error // if example id is specified we need to copy the embedded tar into a new file in the database if req.ExampleID != "" { - if !api.Authorize(r, rbac.ActionCreate, rbac.ResourceFile.WithOwner(apiKey.UserID.String())) { + if !api.Authorize(r, policy.ActionCreate, rbac.ResourceFile.WithOwner(apiKey.UserID.String())) { httpapi.Forbidden(rw) return } // ensure we can read the file that either already exists or will be created - if !api.Authorize(r, rbac.ActionRead, rbac.ResourceFile.WithOwner(apiKey.UserID.String())) { + if !api.Authorize(r, policy.ActionRead, rbac.ResourceFile.WithOwner(apiKey.UserID.String())) { httpapi.Forbidden(rw) return } diff --git a/coderd/templateversions_test.go b/coderd/templateversions_test.go index 3089b4eecf3e1..1267213932649 100644 --- a/coderd/templateversions_test.go +++ b/coderd/templateversions_test.go @@ -18,6 +18,7 @@ import ( "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/externalauth" "github.com/coder/coder/v2/coderd/rbac" + "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/examples" "github.com/coder/coder/v2/provisioner/echo" @@ -38,14 +39,14 @@ func TestTemplateVersion(t *testing.T) { req.Name = "bananas" req.Message = "first try" }) - authz.AssertChecked(t, rbac.ActionCreate, rbac.ResourceTemplate.InOrg(user.OrganizationID)) + authz.AssertChecked(t, policy.ActionCreate, rbac.ResourceTemplate.InOrg(user.OrganizationID)) ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) defer cancel() authz.Reset() tv, err := client.TemplateVersion(ctx, version.ID) - authz.AssertChecked(t, rbac.ActionRead, tv) + authz.AssertChecked(t, policy.ActionRead, tv) require.NoError(t, err) assert.Equal(t, "bananas", tv.Name) diff --git a/coderd/users.go b/coderd/users.go index 47cb6e1cd3be3..c698661d71429 100644 --- a/coderd/users.go +++ b/coderd/users.go @@ -21,6 +21,7 @@ import ( "github.com/coder/coder/v2/coderd/httpapi" "github.com/coder/coder/v2/coderd/httpmw" "github.com/coder/coder/v2/coderd/rbac" + "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/coderd/searchquery" "github.com/coder/coder/v2/coderd/telemetry" "github.com/coder/coder/v2/coderd/userpassword" @@ -1021,7 +1022,7 @@ func (api *API) userRoles(rw http.ResponseWriter, r *http.Request) { ctx := r.Context() user := httpmw.UserParam(r) - if !api.Authorize(r, rbac.ActionRead, user.UserDataRBACObject()) { + if !api.Authorize(r, policy.ActionRead, user.UserDataRBACObject()) { httpapi.ResourceNotFound(rw) return } @@ -1171,7 +1172,7 @@ func (api *API) organizationsByUser(rw http.ResponseWriter, r *http.Request) { } // Only return orgs the user can read. - organizations, err = AuthorizeFilter(api.HTTPAuth, r, rbac.ActionRead, organizations) + organizations, err = AuthorizeFilter(api.HTTPAuth, r, policy.ActionRead, organizations) if err != nil { httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ Message: "Internal error fetching organizations.", diff --git a/coderd/users_test.go b/coderd/users_test.go index bc48632975eb2..588a2e107566b 100644 --- a/coderd/users_test.go +++ b/coderd/users_test.go @@ -10,6 +10,7 @@ import ( "github.com/coder/coder/v2/coderd" "github.com/coder/coder/v2/coderd/coderdtest/oidctest" + "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/serpent" "github.com/golang-jwt/jwt/v4" @@ -325,8 +326,8 @@ func TestDeleteUser(t *testing.T) { require.Equal(t, http.StatusUnauthorized, apiErr.StatusCode()) // RBAC checks - authz.AssertChecked(t, rbac.ActionCreate, rbac.ResourceUser) - authz.AssertChecked(t, rbac.ActionDelete, another) + authz.AssertChecked(t, policy.ActionCreate, rbac.ResourceUser) + authz.AssertChecked(t, policy.ActionDelete, another) }) t.Run("NoPermission", func(t *testing.T) { t.Parallel() diff --git a/coderd/workspaceagents.go b/coderd/workspaceagents.go index 03ca23dcc1162..d79d191af9ce5 100644 --- a/coderd/workspaceagents.go +++ b/coderd/workspaceagents.go @@ -35,7 +35,7 @@ import ( "github.com/coder/coder/v2/coderd/httpapi" "github.com/coder/coder/v2/coderd/httpmw" "github.com/coder/coder/v2/coderd/prometheusmetrics" - "github.com/coder/coder/v2/coderd/rbac" + "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/coderd/schedule" "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/codersdk/agentsdk" @@ -1030,7 +1030,7 @@ func (api *API) workspaceAgentClientCoordinate(rw http.ResponseWriter, r *http.R // This route accepts user API key auth and workspace proxy auth. The moon actor has // full permissions so should be able to pass this authz check. workspace := httpmw.WorkspaceParam(r) - if !api.Authorize(r, rbac.ActionCreate, workspace.ExecutionRBAC()) { + if !api.Authorize(r, policy.ActionCreate, workspace.ExecutionRBAC()) { httpapi.ResourceNotFound(rw) return } diff --git a/coderd/workspaceapps.go b/coderd/workspaceapps.go index 8c6ffdb62e34a..d2fa11b9ea2ea 100644 --- a/coderd/workspaceapps.go +++ b/coderd/workspaceapps.go @@ -16,7 +16,7 @@ import ( "github.com/coder/coder/v2/coderd/database/dbtime" "github.com/coder/coder/v2/coderd/httpapi" "github.com/coder/coder/v2/coderd/httpmw" - "github.com/coder/coder/v2/coderd/rbac" + "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/coderd/workspaceapps" "github.com/coder/coder/v2/coderd/workspaceapps/appurl" "github.com/coder/coder/v2/codersdk" @@ -53,7 +53,7 @@ func (api *API) appHost(rw http.ResponseWriter, r *http.Request) { func (api *API) workspaceApplicationAuth(rw http.ResponseWriter, r *http.Request) { ctx := r.Context() apiKey := httpmw.APIKey(r) - if !api.Authorize(r, rbac.ActionCreate, apiKey) { + if !api.Authorize(r, policy.ActionCreate, apiKey) { httpapi.ResourceNotFound(rw) return } diff --git a/coderd/workspaceapps/db.go b/coderd/workspaceapps/db.go index 619bdd95ba165..144de2f2573f9 100644 --- a/coderd/workspaceapps/db.go +++ b/coderd/workspaceapps/db.go @@ -19,6 +19,7 @@ import ( "github.com/coder/coder/v2/coderd/httpapi" "github.com/coder/coder/v2/coderd/httpmw" "github.com/coder/coder/v2/coderd/rbac" + "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/codersdk" ) @@ -281,8 +282,8 @@ func (p *DBTokenProvider) authorizeRequest(ctx context.Context, roles *rbac.Subj // Figure out which RBAC resource to check. For terminals we use execution // instead of application connect. var ( - rbacAction rbac.Action = rbac.ActionCreate - rbacResource rbac.Object = dbReq.Workspace.ApplicationConnectRBAC() + rbacAction policy.Action = policy.ActionCreate + rbacResource rbac.Object = dbReq.Workspace.ApplicationConnectRBAC() // rbacResourceOwned is for the level "authenticated". We still need to // make sure the API key has permissions to connect to the actor's own // workspace. Scopes would prevent this. diff --git a/coderd/workspacebuilds.go b/coderd/workspacebuilds.go index 40514cb913d86..ef5b63a1e5b19 100644 --- a/coderd/workspacebuilds.go +++ b/coderd/workspacebuilds.go @@ -28,6 +28,7 @@ import ( "github.com/coder/coder/v2/coderd/httpapi" "github.com/coder/coder/v2/coderd/httpmw" "github.com/coder/coder/v2/coderd/rbac" + "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/coderd/wsbuilder" "github.com/coder/coder/v2/codersdk" ) @@ -374,7 +375,7 @@ func (api *API) postWorkspaceBuilds(rw http.ResponseWriter, r *http.Request) { workspaceBuild, provisionerJob, err := builder.Build( ctx, api.Database, - func(action rbac.Action, object rbac.Objecter) bool { + func(action policy.Action, object rbac.Objecter) bool { return api.Authorize(r, action, object) }, audit.WorkspaceBuildBaggageFromRequest(r), @@ -636,7 +637,7 @@ func (api *API) workspaceBuildState(rw http.ResponseWriter, r *http.Request) { // You must have update permissions on the template to get the state. // This matches a push! - if !api.Authorize(r, rbac.ActionUpdate, template.RBACObject()) { + if !api.Authorize(r, policy.ActionUpdate, template.RBACObject()) { httpapi.ResourceNotFound(rw) return } diff --git a/coderd/workspaces.go b/coderd/workspaces.go index 6f398b34488d6..7d0344be4e321 100644 --- a/coderd/workspaces.go +++ b/coderd/workspaces.go @@ -24,6 +24,7 @@ import ( "github.com/coder/coder/v2/coderd/httpapi" "github.com/coder/coder/v2/coderd/httpmw" "github.com/coder/coder/v2/coderd/rbac" + "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/coderd/schedule/cron" "github.com/coder/coder/v2/coderd/searchquery" "github.com/coder/coder/v2/coderd/telemetry" @@ -160,7 +161,7 @@ func (api *API) workspaces(rw http.ResponseWriter, r *http.Request) { } // Workspaces do not have ACL columns. - prepared, err := api.HTTPAuth.AuthorizeSQLFilter(r, rbac.ActionRead, rbac.ResourceWorkspace.Type) + prepared, err := api.HTTPAuth.AuthorizeSQLFilter(r, policy.ActionRead, rbac.ResourceWorkspace.Type) if err != nil { httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ Message: "Internal error preparing sql filter.", @@ -375,7 +376,7 @@ func (api *API) postWorkspacesByOrganization(rw http.ResponseWriter, r *http.Req defer commitAudit() // Do this upfront to save work. - if !api.Authorize(r, rbac.ActionCreate, + if !api.Authorize(r, policy.ActionCreate, rbac.ResourceWorkspace.InOrg(organization.ID).WithOwner(member.UserID.String())) { httpapi.ResourceNotFound(rw) return @@ -570,7 +571,7 @@ func (api *API) postWorkspacesByOrganization(rw http.ResponseWriter, r *http.Req workspaceBuild, provisionerJob, err = builder.Build( ctx, db, - func(action rbac.Action, object rbac.Objecter) bool { + func(action policy.Action, object rbac.Objecter) bool { return api.Authorize(r, action, object) }, audit.WorkspaceBuildBaggageFromRequest(r), @@ -1109,7 +1110,7 @@ func (api *API) putExtendWorkspace(rw http.ResponseWriter, r *http.Request) { // @Router /workspaces/{workspace}/usage [post] func (api *API) postWorkspaceUsage(rw http.ResponseWriter, r *http.Request) { workspace := httpmw.WorkspaceParam(r) - if !api.Authorize(r, rbac.ActionUpdate, workspace) { + if !api.Authorize(r, policy.ActionUpdate, workspace) { httpapi.Forbidden(rw) return } diff --git a/coderd/workspaces_test.go b/coderd/workspaces_test.go index c01f9689d6ace..d91de4a5e26a1 100644 --- a/coderd/workspaces_test.go +++ b/coderd/workspaces_test.go @@ -31,6 +31,7 @@ import ( "github.com/coder/coder/v2/coderd/database/dbtime" "github.com/coder/coder/v2/coderd/parameter" "github.com/coder/coder/v2/coderd/rbac" + "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/coderd/schedule" "github.com/coder/coder/v2/coderd/schedule/cron" "github.com/coder/coder/v2/coderd/util/ptr" @@ -59,7 +60,7 @@ func TestWorkspace(t *testing.T) { authz.Reset() // Reset all previous checks done in setup. ws, err := client.Workspace(ctx, workspace.ID) - authz.AssertChecked(t, rbac.ActionRead, ws) + authz.AssertChecked(t, policy.ActionRead, ws) require.NoError(t, err) require.Equal(t, user.UserID, ws.LatestBuild.InitiatorID) require.Equal(t, codersdk.BuildReasonInitiator, ws.LatestBuild.Reason) diff --git a/coderd/wsbuilder/wsbuilder.go b/coderd/wsbuilder/wsbuilder.go index 810f52502c6f8..3959c0e55a428 100644 --- a/coderd/wsbuilder/wsbuilder.go +++ b/coderd/wsbuilder/wsbuilder.go @@ -10,6 +10,7 @@ import ( "net/http" "time" + "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/provisionersdk" "github.com/google/uuid" @@ -202,7 +203,7 @@ func (e BuildError) Unwrap() error { func (b *Builder) Build( ctx context.Context, store database.Store, - authFunc func(action rbac.Action, object rbac.Objecter) bool, + authFunc func(action policy.Action, object rbac.Objecter) bool, auditBaggage audit.WorkspaceBuildBaggage, ) ( *database.WorkspaceBuild, *database.ProvisionerJob, error, @@ -237,7 +238,7 @@ func (b *Builder) Build( // the calculation of multiple attributes. // // In order to utilize this cache, the functions that compute build attributes use a pointer receiver type. -func (b *Builder) buildTx(authFunc func(action rbac.Action, object rbac.Objecter) bool) ( +func (b *Builder) buildTx(authFunc func(action policy.Action, object rbac.Objecter) bool) ( *database.WorkspaceBuild, *database.ProvisionerJob, error, ) { if authFunc != nil { @@ -632,16 +633,16 @@ func (b *Builder) getLastBuildJob() (*database.ProvisionerJob, error) { } // authorize performs build authorization pre-checks using the provided authFunc -func (b *Builder) authorize(authFunc func(action rbac.Action, object rbac.Objecter) bool) error { +func (b *Builder) authorize(authFunc func(action policy.Action, object rbac.Objecter) bool) error { // Doing this up front saves a lot of work if the user doesn't have permission. // This is checked again in the dbauthz layer, but the check is cached // and will be a noop later. - var action rbac.Action + var action policy.Action switch b.trans { case database.WorkspaceTransitionDelete: - action = rbac.ActionDelete + action = policy.ActionDelete case database.WorkspaceTransitionStart, database.WorkspaceTransitionStop: - action = rbac.ActionUpdate + action = policy.ActionUpdate default: msg := fmt.Sprintf("Transition %q not supported.", b.trans) return BuildError{http.StatusBadRequest, msg, xerrors.New(msg)} @@ -659,12 +660,12 @@ func (b *Builder) authorize(authFunc func(action rbac.Action, object rbac.Object // If custom state, deny request since user could be corrupting or leaking // cloud state. if b.state.explicit != nil || b.state.orphan { - if !authFunc(rbac.ActionUpdate, template.RBACObject()) { + if !authFunc(policy.ActionUpdate, template.RBACObject()) { return BuildError{http.StatusForbidden, "Only template managers may provide custom state", xerrors.New("Only template managers may provide custom state")} } } - if b.logLevel != "" && !authFunc(rbac.ActionRead, rbac.ResourceDeploymentValues) { + if b.logLevel != "" && !authFunc(policy.ActionRead, rbac.ResourceDeploymentValues) { return BuildError{ http.StatusBadRequest, "Workspace builds with a custom log level are restricted to administrators only.", diff --git a/enterprise/coderd/appearance.go b/enterprise/coderd/appearance.go index 7029340672b6e..5104936ac62a4 100644 --- a/enterprise/coderd/appearance.go +++ b/enterprise/coderd/appearance.go @@ -16,6 +16,7 @@ import ( "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/httpapi" "github.com/coder/coder/v2/coderd/rbac" + "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/codersdk" ) @@ -136,7 +137,7 @@ func validateHexColor(color string) error { func (api *API) putAppearance(rw http.ResponseWriter, r *http.Request) { ctx := r.Context() - if !api.Authorize(r, rbac.ActionUpdate, rbac.ResourceDeploymentValues) { + if !api.Authorize(r, policy.ActionUpdate, rbac.ResourceDeploymentValues) { httpapi.Write(ctx, rw, http.StatusForbidden, codersdk.Response{ Message: "Insufficient permissions to update appearance", }) diff --git a/enterprise/coderd/coderd.go b/enterprise/coderd/coderd.go index 722cc3631ed05..56c774911018b 100644 --- a/enterprise/coderd/coderd.go +++ b/enterprise/coderd/coderd.go @@ -15,6 +15,7 @@ import ( "github.com/coder/coder/v2/coderd/appearance" "github.com/coder/coder/v2/coderd/database" agplportsharing "github.com/coder/coder/v2/coderd/portsharing" + "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/enterprise/coderd/portsharing" "golang.org/x/xerrors" @@ -132,7 +133,7 @@ func New(ctx context.Context, options *Options) (_ *API, err error) { // If the user can read the workspace proxy resource, return that. // If not, always default to the regions. actor, ok := agpldbauthz.ActorFromContext(ctx) - if ok && api.Authorizer.Authorize(ctx, actor, rbac.ActionRead, rbac.ResourceWorkspaceProxy) == nil { + if ok && api.Authorizer.Authorize(ctx, actor, policy.ActionRead, rbac.ResourceWorkspaceProxy) == nil { return api.fetchWorkspaceProxies(ctx) } return api.fetchRegions(ctx) @@ -1016,6 +1017,6 @@ func (api *API) runEntitlementsLoop(ctx context.Context) { } } -func (api *API) Authorize(r *http.Request, action rbac.Action, object rbac.Objecter) bool { +func (api *API) Authorize(r *http.Request, action policy.Action, object rbac.Objecter) bool { return api.AGPL.HTTPAuth.Authorize(r, action, object) } diff --git a/enterprise/coderd/coderd_test.go b/enterprise/coderd/coderd_test.go index 1b156a4355c89..e53b714b3fe22 100644 --- a/enterprise/coderd/coderd_test.go +++ b/enterprise/coderd/coderd_test.go @@ -15,6 +15,7 @@ import ( "go.uber.org/goleak" "cdr.dev/slog/sloggers/slogtest" + "github.com/coder/coder/v2/coderd/rbac/policy" agplaudit "github.com/coder/coder/v2/coderd/audit" "github.com/coder/coder/v2/coderd/coderdtest" @@ -498,7 +499,7 @@ func testDBAuthzRole(ctx context.Context) context.Context { { Name: "testing", DisplayName: "Unit Tests", - Site: rbac.Permissions(map[string][]rbac.Action{ + Site: rbac.Permissions(map[string][]policy.Action{ rbac.ResourceWildcard.Type: {rbac.WildcardSymbol}, }), Org: map[string][]rbac.Permission{}, diff --git a/enterprise/coderd/groups.go b/enterprise/coderd/groups.go index fa5da3f7827c3..dea135f683fb8 100644 --- a/enterprise/coderd/groups.go +++ b/enterprise/coderd/groups.go @@ -14,7 +14,7 @@ import ( "github.com/coder/coder/v2/coderd/database/db2sdk" "github.com/coder/coder/v2/coderd/httpapi" "github.com/coder/coder/v2/coderd/httpmw" - "github.com/coder/coder/v2/coderd/rbac" + "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/codersdk" ) @@ -397,7 +397,7 @@ func (api *API) groups(rw http.ResponseWriter, r *http.Request) { } // Filter groups based on rbac permissions - groups, err = coderd.AuthorizeFilter(api.AGPL.HTTPAuth, r, rbac.ActionRead, groups) + groups, err = coderd.AuthorizeFilter(api.AGPL.HTTPAuth, r, policy.ActionRead, groups) if err != nil { httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ Message: "Internal error fetching groups.", diff --git a/enterprise/coderd/licenses.go b/enterprise/coderd/licenses.go index 2808bc4920e93..54bc57b649f62 100644 --- a/enterprise/coderd/licenses.go +++ b/enterprise/coderd/licenses.go @@ -27,6 +27,7 @@ import ( "github.com/coder/coder/v2/coderd/database/dbtime" "github.com/coder/coder/v2/coderd/httpapi" "github.com/coder/coder/v2/coderd/rbac" + "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/enterprise/coderd/license" ) @@ -75,7 +76,7 @@ func (api *API) postLicense(rw http.ResponseWriter, r *http.Request) { ) defer commitAudit() - if !api.AGPL.Authorize(r, rbac.ActionCreate, rbac.ResourceLicense) { + if !api.AGPL.Authorize(r, policy.ActionCreate, rbac.ResourceLicense) { httpapi.Forbidden(rw) return } @@ -181,7 +182,7 @@ func (api *API) postRefreshEntitlements(rw http.ResponseWriter, r *http.Request) // If the user cannot create a new license, then they cannot refresh entitlements. // Refreshing entitlements is a way to force a refresh of the license, so it is // equivalent to creating a new license. - if !api.AGPL.Authorize(r, rbac.ActionCreate, rbac.ResourceLicense) { + if !api.AGPL.Authorize(r, policy.ActionCreate, rbac.ResourceLicense) { httpapi.Forbidden(rw) return } @@ -258,7 +259,7 @@ func (api *API) licenses(rw http.ResponseWriter, r *http.Request) { return } - licenses, err = coderd.AuthorizeFilter(api.AGPL.HTTPAuth, r, rbac.ActionRead, licenses) + licenses, err = coderd.AuthorizeFilter(api.AGPL.HTTPAuth, r, policy.ActionRead, licenses) if err != nil { httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ Message: "Internal error fetching licenses.", @@ -315,7 +316,7 @@ func (api *API) deleteLicense(rw http.ResponseWriter, r *http.Request) { defer commitAudit() aReq.Old = dl - if !api.AGPL.Authorize(r, rbac.ActionDelete, rbac.ResourceLicense) { + if !api.AGPL.Authorize(r, policy.ActionDelete, rbac.ResourceLicense) { httpapi.Forbidden(rw) return } diff --git a/enterprise/coderd/provisionerdaemons.go b/enterprise/coderd/provisionerdaemons.go index 0161e7a265d60..827ecfffe46a6 100644 --- a/enterprise/coderd/provisionerdaemons.go +++ b/enterprise/coderd/provisionerdaemons.go @@ -29,6 +29,7 @@ import ( "github.com/coder/coder/v2/coderd/httpmw" "github.com/coder/coder/v2/coderd/provisionerdserver" "github.com/coder/coder/v2/coderd/rbac" + "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/coderd/telemetry" "github.com/coder/coder/v2/coderd/util/ptr" "github.com/coder/coder/v2/codersdk" @@ -77,7 +78,7 @@ func (api *API) provisionerDaemons(rw http.ResponseWriter, r *http.Request) { if daemons == nil { daemons = []database.ProvisionerDaemon{} } - daemons, err = coderd.AuthorizeFilter(api.AGPL.HTTPAuth, r, rbac.ActionRead, daemons) + daemons, err = coderd.AuthorizeFilter(api.AGPL.HTTPAuth, r, policy.ActionRead, daemons) if err != nil { httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ Message: "Internal error fetching provisioner daemons.", @@ -107,7 +108,7 @@ func (p *provisionerDaemonAuth) authorize(r *http.Request, tags map[string]strin return tags, true } ua := httpmw.UserAuthorization(r) - if err := p.authorizer.Authorize(ctx, ua, rbac.ActionCreate, rbac.ResourceProvisionerDaemon); err == nil { + if err := p.authorizer.Authorize(ctx, ua, policy.ActionCreate, rbac.ResourceProvisionerDaemon); err == nil { // User is allowed to create provisioner daemons return tags, true } diff --git a/enterprise/coderd/replicas.go b/enterprise/coderd/replicas.go index 536048aaac84a..75b6c36fdde17 100644 --- a/enterprise/coderd/replicas.go +++ b/enterprise/coderd/replicas.go @@ -6,6 +6,7 @@ import ( "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/httpapi" "github.com/coder/coder/v2/coderd/rbac" + "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/codersdk" ) @@ -19,7 +20,7 @@ import ( // @Success 200 {array} codersdk.Replica // @Router /replicas [get] func (api *API) replicas(rw http.ResponseWriter, r *http.Request) { - if !api.AGPL.Authorize(r, rbac.ActionRead, rbac.ResourceReplicas) { + if !api.AGPL.Authorize(r, policy.ActionRead, rbac.ResourceReplicas) { httpapi.ResourceNotFound(rw) return } diff --git a/enterprise/coderd/templates.go b/enterprise/coderd/templates.go index f7ad7b12ed155..feddcce4d8372 100644 --- a/enterprise/coderd/templates.go +++ b/enterprise/coderd/templates.go @@ -16,6 +16,7 @@ import ( "github.com/coder/coder/v2/coderd/httpapi" "github.com/coder/coder/v2/coderd/httpmw" "github.com/coder/coder/v2/coderd/rbac" + "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/codersdk" ) @@ -35,7 +36,7 @@ func (api *API) templateAvailablePermissions(rw http.ResponseWriter, r *http.Req // Requires update permission on the template to list all avail users/groups // for assignment. - if !api.Authorize(r, rbac.ActionUpdate, template) { + if !api.Authorize(r, policy.ActionUpdate, template) { httpapi.ResourceNotFound(rw) return } @@ -305,9 +306,9 @@ func validateTemplateRole(role codersdk.TemplateRole) error { return nil } -func convertToTemplateRole(actions []rbac.Action) codersdk.TemplateRole { +func convertToTemplateRole(actions []policy.Action) codersdk.TemplateRole { switch { - case len(actions) == 1 && actions[0] == rbac.ActionRead: + case len(actions) == 1 && actions[0] == policy.ActionRead: return codersdk.TemplateRoleUse case len(actions) == 1 && actions[0] == rbac.WildcardSymbol: return codersdk.TemplateRoleAdmin @@ -316,12 +317,12 @@ func convertToTemplateRole(actions []rbac.Action) codersdk.TemplateRole { return "" } -func convertSDKTemplateRole(role codersdk.TemplateRole) []rbac.Action { +func convertSDKTemplateRole(role codersdk.TemplateRole) []policy.Action { switch role { case codersdk.TemplateRoleAdmin: - return []rbac.Action{rbac.WildcardSymbol} + return []policy.Action{rbac.WildcardSymbol} case codersdk.TemplateRoleUse: - return []rbac.Action{rbac.ActionRead} + return []policy.Action{policy.ActionRead} } return nil diff --git a/enterprise/coderd/workspaceproxy.go b/enterprise/coderd/workspaceproxy.go index 234212f479cfd..22fe1bc747cbe 100644 --- a/enterprise/coderd/workspaceproxy.go +++ b/enterprise/coderd/workspaceproxy.go @@ -21,7 +21,7 @@ import ( "github.com/coder/coder/v2/coderd/database/dbtime" "github.com/coder/coder/v2/coderd/httpapi" "github.com/coder/coder/v2/coderd/httpmw" - "github.com/coder/coder/v2/coderd/rbac" + "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/coderd/telemetry" "github.com/coder/coder/v2/coderd/workspaceapps" "github.com/coder/coder/v2/coderd/workspaceapps/appurl" @@ -799,7 +799,7 @@ func (api *API) workspaceProxyDeregister(rw http.ResponseWriter, r *http.Request func (api *API) reconnectingPTYSignedToken(rw http.ResponseWriter, r *http.Request) { ctx := r.Context() apiKey := httpmw.APIKey(r) - if !api.Authorize(r, rbac.ActionCreate, apiKey) { + if !api.Authorize(r, policy.ActionCreate, apiKey) { httpapi.ResourceNotFound(rw) return } diff --git a/enterprise/coderd/workspacequota.go b/enterprise/coderd/workspacequota.go index 44ea3f302ff37..d11111edac388 100644 --- a/enterprise/coderd/workspacequota.go +++ b/enterprise/coderd/workspacequota.go @@ -13,7 +13,7 @@ import ( "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/httpapi" "github.com/coder/coder/v2/coderd/httpmw" - "github.com/coder/coder/v2/coderd/rbac" + "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/provisionerd/proto" ) @@ -123,7 +123,7 @@ func (c *committer) CommitQuota( func (api *API) workspaceQuota(rw http.ResponseWriter, r *http.Request) { user := httpmw.UserParam(r) - if !api.AGPL.Authorize(r, rbac.ActionRead, user) { + if !api.AGPL.Authorize(r, policy.ActionRead, user) { httpapi.ResourceNotFound(rw) return } diff --git a/enterprise/tailnet/pgcoord.go b/enterprise/tailnet/pgcoord.go index 1a9dc88378b03..8cb9595492feb 100644 --- a/enterprise/tailnet/pgcoord.go +++ b/enterprise/tailnet/pgcoord.go @@ -19,6 +19,7 @@ import ( "github.com/coder/coder/v2/coderd/database/dbauthz" "github.com/coder/coder/v2/coderd/database/pubsub" "github.com/coder/coder/v2/coderd/rbac" + "github.com/coder/coder/v2/coderd/rbac/policy" agpl "github.com/coder/coder/v2/tailnet" "github.com/coder/coder/v2/tailnet/proto" ) @@ -101,7 +102,7 @@ var pgCoordSubject = rbac.Subject{ { Name: "tailnetcoordinator", DisplayName: "Tailnet Coordinator", - Site: rbac.Permissions(map[string][]rbac.Action{ + Site: rbac.Permissions(map[string][]policy.Action{ rbac.ResourceTailnetCoordinator.Type: {rbac.WildcardSymbol}, }), Org: map[string][]rbac.Permission{}, diff --git a/support/support.go b/support/support.go index 341e01e1862bb..e49f95e38d045 100644 --- a/support/support.go +++ b/support/support.go @@ -16,12 +16,12 @@ import ( "tailscale.com/net/netcheck" "github.com/coder/coder/v2/coderd/healthcheck/derphealth" + "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/google/uuid" "cdr.dev/slog" "cdr.dev/slog/sloggers/sloghuman" - "github.com/coder/coder/v2/coderd/rbac" "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/codersdk/agentsdk" "github.com/coder/coder/v2/codersdk/healthsdk" @@ -462,7 +462,7 @@ func Run(ctx context.Context, d *Deps) (*Bundle, error) { Object: codersdk.AuthorizationObject{ ResourceType: codersdk.ResourceDeploymentValues, }, - Action: string(rbac.ActionRead), + Action: string(policy.ActionRead), }, } From 1f5788feffcce3f7d5b391860d3244d23786ccf2 Mon Sep 17 00:00:00 2001 From: Steven Masley Date: Wed, 15 May 2024 11:09:42 -0500 Subject: [PATCH 061/149] chore: remove rbac psuedo resources, add custom verbs (#13276) Removes our pseudo rbac resources like `WorkspaceApplicationConnect` in favor of additional verbs like `ssh`. This is to make more intuitive permissions for building custom roles. The source of truth is now `policy.go` --- Makefile | 6 +- coderd/apidoc/docs.go | 113 ++++-- coderd/apidoc/swagger.json | 115 ++++-- coderd/authorize.go | 10 +- coderd/coderdtest/authorize.go | 7 - coderd/coderdtest/coderdtest.go | 2 +- coderd/database/dbauthz/dbauthz.go | 337 ++++++++---------- coderd/database/dbauthz/dbauthz_test.go | 129 ++++--- coderd/database/modelmethods.go | 71 +--- coderd/debug.go | 2 +- coderd/deployment.go | 2 +- coderd/insights.go | 2 +- coderd/rbac/README.md | 2 +- coderd/rbac/authz.go | 22 +- coderd/rbac/authz_internal_test.go | 188 +++++----- coderd/rbac/authz_test.go | 6 +- coderd/rbac/object.go | 257 ++----------- coderd/rbac/object_gen.go | 291 ++++++++++++++- coderd/rbac/object_test.go | 10 +- coderd/rbac/policy/policy.go | 234 ++++++++++++ coderd/rbac/roles.go | 85 +++-- coderd/rbac/roles_test.go | 271 ++++++++++++-- coderd/rbac/scopes.go | 8 +- coderd/roles.go | 4 +- coderd/users.go | 2 +- coderd/util/slice/slice.go | 12 + coderd/util/slice/slice_test.go | 8 + coderd/workspaceagents.go | 2 +- coderd/workspaceapps/apptest/apptest.go | 17 +- coderd/workspaceapps/db.go | 10 +- coderd/wsbuilder/wsbuilder.go | 2 +- codersdk/authorization.go | 2 +- codersdk/rbacresources.go | 77 ---- codersdk/rbacresources_gen.go | 50 +++ docs/api/authorization.md | 4 +- docs/api/schemas.md | 91 +++-- enterprise/coderd/appearance.go | 2 +- enterprise/coderd/authorize_test.go | 2 +- enterprise/coderd/coderd_test.go | 2 +- enterprise/coderd/templates.go | 5 +- enterprise/tailnet/pgcoord.go | 2 +- scripts/rbacgen/codersdk.gotmpl | 18 + scripts/rbacgen/main.go | 214 ++++++++--- scripts/rbacgen/object.gotmpl | 12 - scripts/rbacgen/rbacobject.gotmpl | 39 ++ site/src/api/typesGenerated.ts | 57 ++- .../src/pages/TemplatePage/TemplateLayout.tsx | 5 +- support/support.go | 11 +- 48 files changed, 1788 insertions(+), 1032 deletions(-) delete mode 100644 codersdk/rbacresources.go create mode 100644 codersdk/rbacresources_gen.go create mode 100644 scripts/rbacgen/codersdk.gotmpl delete mode 100644 scripts/rbacgen/object.gotmpl create mode 100644 scripts/rbacgen/rbacobject.gotmpl diff --git a/Makefile b/Makefile index 9a457c619ad49..a12f90db05214 100644 --- a/Makefile +++ b/Makefile @@ -486,6 +486,7 @@ gen: \ $(DB_GEN_FILES) \ site/src/api/typesGenerated.ts \ coderd/rbac/object_gen.go \ + codersdk/rbacresources_gen.go \ docs/admin/prometheus.md \ docs/cli.md \ docs/admin/audit-logs.md \ @@ -611,7 +612,10 @@ examples/examples.gen.json: scripts/examplegen/main.go examples/examples.go $(sh go run ./scripts/examplegen/main.go > examples/examples.gen.json coderd/rbac/object_gen.go: scripts/rbacgen/main.go coderd/rbac/object.go - go run scripts/rbacgen/main.go ./coderd/rbac > coderd/rbac/object_gen.go + go run scripts/rbacgen/main.go rbac > coderd/rbac/object_gen.go + +codersdk/rbacresources_gen.go: scripts/rbacgen/main.go coderd/rbac/object.go + go run scripts/rbacgen/main.go codersdk > codersdk/rbacresources_gen.go docs/admin/prometheus.md: scripts/metricsdocgen/main.go scripts/metricsdocgen/metrics go run scripts/metricsdocgen/main.go diff --git a/coderd/apidoc/docs.go b/coderd/apidoc/docs.go index 22961a36df98a..0a22d84d13642 100644 --- a/coderd/apidoc/docs.go +++ b/coderd/apidoc/docs.go @@ -8468,12 +8468,16 @@ const docTemplate = `{ "type": "object", "properties": { "action": { - "type": "string", "enum": [ "create", "read", "update", "delete" + ], + "allOf": [ + { + "$ref": "#/definitions/codersdk.RBACAction" + } ] }, "object": { @@ -10776,59 +10780,94 @@ const docTemplate = `{ } } }, - "codersdk.RBACResource": { + "codersdk.RBACAction": { "type": "string", "enum": [ - "workspace", - "workspace_proxy", - "workspace_execution", "application_connect", + "assign", + "create", + "delete", + "read", + "read_personal", + "ssh", + "update", + "update_personal", + "use", + "view_insights", + "start", + "stop" + ], + "x-enum-varnames": [ + "ActionApplicationConnect", + "ActionAssign", + "ActionCreate", + "ActionDelete", + "ActionRead", + "ActionReadPersonal", + "ActionSSH", + "ActionUpdate", + "ActionUpdatePersonal", + "ActionUse", + "ActionViewInsights", + "ActionWorkspaceStart", + "ActionWorkspaceStop" + ] + }, + "codersdk.RBACResource": { + "type": "string", + "enum": [ + "*", + "api_key", + "assign_org_role", + "assign_role", "audit_log", - "template", - "group", + "debug_info", + "deployment_config", + "deployment_stats", "file", - "provisioner_daemon", + "group", + "license", + "oauth2_app", + "oauth2_app_code_token", + "oauth2_app_secret", "organization", - "assign_role", - "assign_org_role", - "api_key", - "user", - "user_data", - "user_workspace_build_parameters", "organization_member", - "license", - "deployment_config", - "deployment_stats", + "provisioner_daemon", "replicas", - "debug_info", "system", - "template_insights" + "tailnet_coordinator", + "template", + "user", + "workspace", + "workspace_dormant", + "workspace_proxy" ], "x-enum-varnames": [ - "ResourceWorkspace", - "ResourceWorkspaceProxy", - "ResourceWorkspaceExecution", - "ResourceWorkspaceApplicationConnect", + "ResourceWildcard", + "ResourceApiKey", + "ResourceAssignOrgRole", + "ResourceAssignRole", "ResourceAuditLog", - "ResourceTemplate", - "ResourceGroup", + "ResourceDebugInfo", + "ResourceDeploymentConfig", + "ResourceDeploymentStats", "ResourceFile", - "ResourceProvisionerDaemon", + "ResourceGroup", + "ResourceLicense", + "ResourceOauth2App", + "ResourceOauth2AppCodeToken", + "ResourceOauth2AppSecret", "ResourceOrganization", - "ResourceRoleAssignment", - "ResourceOrgRoleAssignment", - "ResourceAPIKey", - "ResourceUser", - "ResourceUserData", - "ResourceUserWorkspaceBuildParameters", "ResourceOrganizationMember", - "ResourceLicense", - "ResourceDeploymentValues", - "ResourceDeploymentStats", + "ResourceProvisionerDaemon", "ResourceReplicas", - "ResourceDebugInfo", "ResourceSystem", - "ResourceTemplateInsights" + "ResourceTailnetCoordinator", + "ResourceTemplate", + "ResourceUser", + "ResourceWorkspace", + "ResourceWorkspaceDormant", + "ResourceWorkspaceProxy" ] }, "codersdk.RateLimitConfig": { diff --git a/coderd/apidoc/swagger.json b/coderd/apidoc/swagger.json index 76b606e46bb8f..331b1512393f7 100644 --- a/coderd/apidoc/swagger.json +++ b/coderd/apidoc/swagger.json @@ -7537,8 +7537,12 @@ "type": "object", "properties": { "action": { - "type": "string", - "enum": ["create", "read", "update", "delete"] + "enum": ["create", "read", "update", "delete"], + "allOf": [ + { + "$ref": "#/definitions/codersdk.RBACAction" + } + ] }, "object": { "description": "Object can represent a \"set\" of objects, such as: all workspaces in an organization, all workspaces owned by me, and all workspaces across the entire product.\nWhen defining an object, use the most specific language when possible to\nproduce the smallest set. Meaning to set as many fields on 'Object' as\nyou can. Example, if you want to check if you can update all workspaces\nowned by 'me', try to also add an 'OrganizationID' to the settings.\nOmitting the 'OrganizationID' could produce the incorrect value, as\nworkspaces have both `user` and `organization` owners.", @@ -9686,59 +9690,94 @@ } } }, - "codersdk.RBACResource": { + "codersdk.RBACAction": { "type": "string", "enum": [ - "workspace", - "workspace_proxy", - "workspace_execution", "application_connect", + "assign", + "create", + "delete", + "read", + "read_personal", + "ssh", + "update", + "update_personal", + "use", + "view_insights", + "start", + "stop" + ], + "x-enum-varnames": [ + "ActionApplicationConnect", + "ActionAssign", + "ActionCreate", + "ActionDelete", + "ActionRead", + "ActionReadPersonal", + "ActionSSH", + "ActionUpdate", + "ActionUpdatePersonal", + "ActionUse", + "ActionViewInsights", + "ActionWorkspaceStart", + "ActionWorkspaceStop" + ] + }, + "codersdk.RBACResource": { + "type": "string", + "enum": [ + "*", + "api_key", + "assign_org_role", + "assign_role", "audit_log", - "template", - "group", + "debug_info", + "deployment_config", + "deployment_stats", "file", - "provisioner_daemon", + "group", + "license", + "oauth2_app", + "oauth2_app_code_token", + "oauth2_app_secret", "organization", - "assign_role", - "assign_org_role", - "api_key", - "user", - "user_data", - "user_workspace_build_parameters", "organization_member", - "license", - "deployment_config", - "deployment_stats", + "provisioner_daemon", "replicas", - "debug_info", "system", - "template_insights" + "tailnet_coordinator", + "template", + "user", + "workspace", + "workspace_dormant", + "workspace_proxy" ], "x-enum-varnames": [ - "ResourceWorkspace", - "ResourceWorkspaceProxy", - "ResourceWorkspaceExecution", - "ResourceWorkspaceApplicationConnect", + "ResourceWildcard", + "ResourceApiKey", + "ResourceAssignOrgRole", + "ResourceAssignRole", "ResourceAuditLog", - "ResourceTemplate", - "ResourceGroup", + "ResourceDebugInfo", + "ResourceDeploymentConfig", + "ResourceDeploymentStats", "ResourceFile", - "ResourceProvisionerDaemon", + "ResourceGroup", + "ResourceLicense", + "ResourceOauth2App", + "ResourceOauth2AppCodeToken", + "ResourceOauth2AppSecret", "ResourceOrganization", - "ResourceRoleAssignment", - "ResourceOrgRoleAssignment", - "ResourceAPIKey", - "ResourceUser", - "ResourceUserData", - "ResourceUserWorkspaceBuildParameters", "ResourceOrganizationMember", - "ResourceLicense", - "ResourceDeploymentValues", - "ResourceDeploymentStats", + "ResourceProvisionerDaemon", "ResourceReplicas", - "ResourceDebugInfo", "ResourceSystem", - "ResourceTemplateInsights" + "ResourceTailnetCoordinator", + "ResourceTemplate", + "ResourceUser", + "ResourceWorkspace", + "ResourceWorkspaceDormant", + "ResourceWorkspaceProxy" ] }, "codersdk.RateLimitConfig": { diff --git a/coderd/authorize.go b/coderd/authorize.go index 9adff89769805..2f16fb8ceb720 100644 --- a/coderd/authorize.go +++ b/coderd/authorize.go @@ -169,7 +169,7 @@ func (api *API) checkAuthorization(rw http.ResponseWriter, r *http.Request) { obj := rbac.Object{ Owner: v.Object.OwnerID, OrgID: v.Object.OrganizationID, - Type: v.Object.ResourceType.String(), + Type: string(v.Object.ResourceType), } if obj.Owner == "me" { obj.Owner = auth.ID @@ -189,13 +189,7 @@ func (api *API) checkAuthorization(rw http.ResponseWriter, r *http.Request) { var dbObj rbac.Objecter var dbErr error // Only support referencing some resources by ID. - switch v.Object.ResourceType.String() { - case rbac.ResourceWorkspaceExecution.Type: - workSpace, err := api.Database.GetWorkspaceByID(ctx, id) - if err == nil { - dbObj = workSpace.ExecutionRBAC() - } - dbErr = err + switch string(v.Object.ResourceType) { case rbac.ResourceWorkspace.Type: dbObj, dbErr = api.Database.GetWorkspaceByID(ctx, id) case rbac.ResourceTemplate.Type: diff --git a/coderd/coderdtest/authorize.go b/coderd/coderdtest/authorize.go index 6c38063a0dbbe..e753e66f2d2f6 100644 --- a/coderd/coderdtest/authorize.go +++ b/coderd/coderdtest/authorize.go @@ -416,23 +416,16 @@ func RandomRBACObject() rbac.Object { func randomRBACType() string { all := []string{ rbac.ResourceWorkspace.Type, - rbac.ResourceWorkspaceExecution.Type, - rbac.ResourceWorkspaceApplicationConnect.Type, rbac.ResourceAuditLog.Type, rbac.ResourceTemplate.Type, rbac.ResourceGroup.Type, rbac.ResourceFile.Type, rbac.ResourceProvisionerDaemon.Type, rbac.ResourceOrganization.Type, - rbac.ResourceRoleAssignment.Type, - rbac.ResourceOrgRoleAssignment.Type, - rbac.ResourceAPIKey.Type, rbac.ResourceUser.Type, - rbac.ResourceUserData.Type, rbac.ResourceOrganizationMember.Type, rbac.ResourceWildcard.Type, rbac.ResourceLicense.Type, - rbac.ResourceDeploymentValues.Type, rbac.ResourceReplicas.Type, rbac.ResourceDebugInfo.Type, } diff --git a/coderd/coderdtest/coderdtest.go b/coderd/coderdtest/coderdtest.go index 14a2fb9231561..6153f1a68abcb 100644 --- a/coderd/coderdtest/coderdtest.go +++ b/coderd/coderdtest/coderdtest.go @@ -221,7 +221,7 @@ func NewOptions(t testing.TB, options *Options) (func(http.Handler), context.Can } if options.Authorizer == nil { - defAuth := rbac.NewCachingAuthorizer(prometheus.NewRegistry()) + defAuth := rbac.NewStrictCachingAuthorizer(prometheus.NewRegistry()) if _, ok := t.(*testing.T); ok { options.Authorizer = &RecordingAuthorizer{ Wrapped: defAuth, diff --git a/coderd/database/dbauthz/dbauthz.go b/coderd/database/dbauthz/dbauthz.go index 3d9129928c811..a096346f57064 100644 --- a/coderd/database/dbauthz/dbauthz.go +++ b/coderd/database/dbauthz/dbauthz.go @@ -16,12 +16,12 @@ import ( "github.com/open-policy-agent/opa/topdown" "cdr.dev/slog" + "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/database/dbtime" "github.com/coder/coder/v2/coderd/httpapi/httpapiconstraints" "github.com/coder/coder/v2/coderd/rbac" - "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/coderd/util/slice" "github.com/coder/coder/v2/provisionersdk" ) @@ -164,14 +164,14 @@ var ( DisplayName: "Provisioner Daemon", Site: rbac.Permissions(map[string][]policy.Action{ // TODO: Add ProvisionerJob resource type. - rbac.ResourceFile.Type: {policy.ActionRead}, - rbac.ResourceSystem.Type: {rbac.WildcardSymbol}, - rbac.ResourceTemplate.Type: {policy.ActionRead, policy.ActionUpdate}, - rbac.ResourceUser.Type: {policy.ActionRead}, - rbac.ResourceWorkspace.Type: {policy.ActionRead, policy.ActionUpdate, policy.ActionDelete}, - rbac.ResourceWorkspaceBuild.Type: {policy.ActionRead, policy.ActionUpdate, policy.ActionDelete}, - rbac.ResourceUserData.Type: {policy.ActionRead, policy.ActionUpdate}, - rbac.ResourceAPIKey.Type: {rbac.WildcardSymbol}, + rbac.ResourceFile.Type: {policy.ActionRead}, + rbac.ResourceSystem.Type: {policy.WildcardSymbol}, + rbac.ResourceTemplate.Type: {policy.ActionRead, policy.ActionUpdate}, + // Unsure why provisionerd needs update and read personal + rbac.ResourceUser.Type: {policy.ActionRead, policy.ActionReadPersonal, policy.ActionUpdatePersonal}, + rbac.ResourceWorkspaceDormant.Type: {policy.ActionDelete, policy.ActionRead, policy.ActionUpdate, policy.ActionWorkspaceStop}, + rbac.ResourceWorkspace.Type: {policy.ActionDelete, policy.ActionRead, policy.ActionUpdate, policy.ActionWorkspaceStart, policy.ActionWorkspaceStop}, + rbac.ResourceApiKey.Type: {policy.WildcardSymbol}, // When org scoped provisioner credentials are implemented, // this can be reduced to read a specific org. rbac.ResourceOrganization.Type: {policy.ActionRead}, @@ -192,11 +192,11 @@ var ( Name: "autostart", DisplayName: "Autostart Daemon", Site: rbac.Permissions(map[string][]policy.Action{ - rbac.ResourceSystem.Type: {rbac.WildcardSymbol}, - rbac.ResourceTemplate.Type: {policy.ActionRead, policy.ActionUpdate}, - rbac.ResourceWorkspace.Type: {policy.ActionRead, policy.ActionUpdate}, - rbac.ResourceWorkspaceBuild.Type: {policy.ActionRead, policy.ActionUpdate, policy.ActionDelete}, - rbac.ResourceUser.Type: {policy.ActionRead}, + rbac.ResourceSystem.Type: {policy.WildcardSymbol}, + rbac.ResourceTemplate.Type: {policy.ActionRead, policy.ActionUpdate}, + rbac.ResourceWorkspaceDormant.Type: {policy.ActionDelete, policy.ActionRead, policy.ActionUpdate, policy.ActionWorkspaceStop}, + rbac.ResourceWorkspace.Type: {policy.ActionDelete, policy.ActionRead, policy.ActionUpdate, policy.ActionWorkspaceStart, policy.ActionWorkspaceStop}, + rbac.ResourceUser.Type: {policy.ActionRead}, }), Org: map[string][]rbac.Permission{}, User: []rbac.Permission{}, @@ -214,7 +214,7 @@ var ( Name: "hangdetector", DisplayName: "Hang Detector Daemon", Site: rbac.Permissions(map[string][]policy.Action{ - rbac.ResourceSystem.Type: {rbac.WildcardSymbol}, + rbac.ResourceSystem.Type: {policy.WildcardSymbol}, rbac.ResourceTemplate.Type: {policy.ActionRead}, rbac.ResourceWorkspace.Type: {policy.ActionRead, policy.ActionUpdate}, }), @@ -234,19 +234,17 @@ var ( DisplayName: "Coder", Site: rbac.Permissions(map[string][]policy.Action{ rbac.ResourceWildcard.Type: {policy.ActionRead}, - rbac.ResourceAPIKey.Type: {policy.ActionCreate, policy.ActionUpdate, policy.ActionDelete}, + rbac.ResourceApiKey.Type: rbac.ResourceApiKey.AvailableActions(), rbac.ResourceGroup.Type: {policy.ActionCreate, policy.ActionUpdate}, - rbac.ResourceRoleAssignment.Type: {policy.ActionCreate, policy.ActionDelete}, - rbac.ResourceSystem.Type: {rbac.WildcardSymbol}, + rbac.ResourceAssignRole.Type: rbac.ResourceAssignRole.AvailableActions(), + rbac.ResourceSystem.Type: {policy.WildcardSymbol}, rbac.ResourceOrganization.Type: {policy.ActionCreate, policy.ActionRead}, rbac.ResourceOrganizationMember.Type: {policy.ActionCreate}, - rbac.ResourceOrgRoleAssignment.Type: {policy.ActionCreate}, + rbac.ResourceAssignOrgRole.Type: {policy.ActionRead, policy.ActionCreate, policy.ActionDelete}, rbac.ResourceProvisionerDaemon.Type: {policy.ActionCreate, policy.ActionUpdate}, - rbac.ResourceUser.Type: {policy.ActionCreate, policy.ActionUpdate, policy.ActionDelete}, - rbac.ResourceUserData.Type: {policy.ActionCreate, policy.ActionUpdate}, - rbac.ResourceWorkspace.Type: {policy.ActionUpdate}, - rbac.ResourceWorkspaceBuild.Type: {policy.ActionUpdate}, - rbac.ResourceWorkspaceExecution.Type: {policy.ActionCreate}, + rbac.ResourceUser.Type: rbac.ResourceUser.AvailableActions(), + rbac.ResourceWorkspaceDormant.Type: {policy.ActionUpdate, policy.ActionDelete, policy.ActionWorkspaceStop}, + rbac.ResourceWorkspace.Type: {policy.ActionUpdate, policy.ActionDelete, policy.ActionWorkspaceStart, policy.ActionWorkspaceStop, policy.ActionSSH}, rbac.ResourceWorkspaceProxy.Type: {policy.ActionCreate, policy.ActionUpdate, policy.ActionDelete}, }), Org: map[string][]rbac.Permission{}, @@ -315,6 +313,20 @@ func insert[ authorizer rbac.Authorizer, object rbac.Objecter, insertFunc Insert, +) Insert { + return insertWithAction(logger, authorizer, object, policy.ActionCreate, insertFunc) +} + +func insertWithAction[ + ObjectType any, + ArgumentType any, + Insert func(ctx context.Context, arg ArgumentType) (ObjectType, error), +]( + logger slog.Logger, + authorizer rbac.Authorizer, + object rbac.Objecter, + action policy.Action, + insertFunc Insert, ) Insert { return func(ctx context.Context, arg ArgumentType) (empty ObjectType, err error) { // Fetch the rbac subject @@ -324,7 +336,7 @@ func insert[ } // Authorize the action - err = authorizer.Authorize(ctx, act, policy.ActionCreate, object.RBACObject()) + err = authorizer.Authorize(ctx, act, action, object.RBACObject()) if err != nil { return empty, logNotAuthorizedError(ctx, logger, err) } @@ -384,13 +396,14 @@ func update[ // The database query function will **ALWAYS** hit the database, even if the // user cannot read the resource. This is because the resource details are // required to run a proper authorization check. -func fetch[ +func fetchWithAction[ ArgumentType any, ObjectType rbac.Objecter, DatabaseFunc func(ctx context.Context, arg ArgumentType) (ObjectType, error), ]( logger slog.Logger, authorizer rbac.Authorizer, + action policy.Action, f DatabaseFunc, ) DatabaseFunc { return func(ctx context.Context, arg ArgumentType) (empty ObjectType, err error) { @@ -407,7 +420,7 @@ func fetch[ } // Authorize the action - err = authorizer.Authorize(ctx, act, policy.ActionRead, object.RBACObject()) + err = authorizer.Authorize(ctx, act, action, object.RBACObject()) if err != nil { return empty, logNotAuthorizedError(ctx, logger, err) } @@ -416,6 +429,18 @@ func fetch[ } } +func fetch[ + ArgumentType any, + ObjectType rbac.Objecter, + DatabaseFunc func(ctx context.Context, arg ArgumentType) (ObjectType, error), +]( + logger slog.Logger, + authorizer rbac.Authorizer, + f DatabaseFunc, +) DatabaseFunc { + return fetchWithAction(logger, authorizer, policy.ActionRead, f) +} + // fetchAndExec uses fetchAndQuery but only returns the error. The naming comes // from SQL 'exec' functions which only return an error. // See fetchAndQuery for more information. @@ -488,6 +513,7 @@ func fetchWithPostFilter[ DatabaseFunc func(ctx context.Context, arg ArgumentType) ([]ObjectType, error), ]( authorizer rbac.Authorizer, + action policy.Action, f DatabaseFunc, ) DatabaseFunc { return func(ctx context.Context, arg ArgumentType) (empty []ObjectType, err error) { @@ -504,7 +530,7 @@ func fetchWithPostFilter[ } // Authorize the action - return rbac.Filter(ctx, authorizer, act, policy.ActionRead, objects) + return rbac.Filter(ctx, authorizer, act, action, objects) } } @@ -560,7 +586,7 @@ func (q *querier) canAssignRoles(ctx context.Context, orgID *uuid.UUID, added, r return NoActorError } - roleAssign := rbac.ResourceRoleAssignment + roleAssign := rbac.ResourceAssignRole shouldBeOrgRoles := false if orgID != nil { roleAssign = roleAssign.InOrg(*orgID) @@ -585,7 +611,7 @@ func (q *querier) canAssignRoles(ctx context.Context, orgID *uuid.UUID, added, r } if len(added) > 0 { - if err := q.authorizeContext(ctx, policy.ActionCreate, roleAssign); err != nil { + if err := q.authorizeContext(ctx, policy.ActionAssign, roleAssign); err != nil { return err } } @@ -655,6 +681,29 @@ func authorizedTemplateVersionFromJob(ctx context.Context, q *querier, job datab } } +func (q *querier) authorizeTemplateInsights(ctx context.Context, templateIDs []uuid.UUID) error { + // Abort early if can read all template insights, aka admins. + // TODO: If we know the org, that would allow org admins to abort early too. + if err := q.authorizeContext(ctx, policy.ActionViewInsights, rbac.ResourceTemplate); err != nil { + for _, templateID := range templateIDs { + template, err := q.db.GetTemplateByID(ctx, templateID) + if err != nil { + return err + } + + if err := q.authorizeContext(ctx, policy.ActionViewInsights, template); err != nil { + return err + } + } + if len(templateIDs) == 0 { + if err := q.authorizeContext(ctx, policy.ActionViewInsights, rbac.ResourceTemplate.All()); err != nil { + return err + } + } + } + return nil +} + func (q *querier) AcquireLock(ctx context.Context, id int64) error { return q.db.AcquireLock(ctx, id) } @@ -731,7 +780,7 @@ func (q *querier) DeleteAPIKeyByID(ctx context.Context, id string) error { func (q *querier) DeleteAPIKeysByUserID(ctx context.Context, userID uuid.UUID) error { // TODO: This is not 100% correct because it omits apikey IDs. err := q.authorizeContext(ctx, policy.ActionDelete, - rbac.ResourceAPIKey.WithOwner(userID.String())) + rbac.ResourceApiKey.WithOwner(userID.String())) if err != nil { return err } @@ -755,7 +804,7 @@ func (q *querier) DeleteAllTailnetTunnels(ctx context.Context, arg database.Dele func (q *querier) DeleteApplicationConnectAPIKeysByUserID(ctx context.Context, userID uuid.UUID) error { // TODO: This is not 100% correct because it omits apikey IDs. err := q.authorizeContext(ctx, policy.ActionDelete, - rbac.ResourceAPIKey.WithOwner(userID.String())) + rbac.ResourceApiKey.WithOwner(userID.String())) if err != nil { return err } @@ -770,14 +819,14 @@ func (q *querier) DeleteCoordinator(ctx context.Context, id uuid.UUID) error { } func (q *querier) DeleteExternalAuthLink(ctx context.Context, arg database.DeleteExternalAuthLinkParams) error { - return deleteQ(q.log, q.auth, func(ctx context.Context, arg database.DeleteExternalAuthLinkParams) (database.ExternalAuthLink, error) { + return fetchAndExec(q.log, q.auth, policy.ActionUpdatePersonal, func(ctx context.Context, arg database.DeleteExternalAuthLinkParams) (database.ExternalAuthLink, error) { //nolint:gosimple return q.db.GetExternalAuthLink(ctx, database.GetExternalAuthLinkParams{UserID: arg.UserID, ProviderID: arg.ProviderID}) }, q.db.DeleteExternalAuthLink)(ctx, arg) } func (q *querier) DeleteGitSSHKey(ctx context.Context, userID uuid.UUID) error { - return deleteQ(q.log, q.auth, q.db.GetGitSSHKey, q.db.DeleteGitSSHKey)(ctx, userID) + return fetchAndExec(q.log, q.auth, policy.ActionUpdatePersonal, q.db.GetGitSSHKey, q.db.DeleteGitSSHKey)(ctx, userID) } func (q *querier) DeleteGroupByID(ctx context.Context, id uuid.UUID) error { @@ -804,7 +853,7 @@ func (q *querier) DeleteLicense(ctx context.Context, id int32) (int32, error) { } func (q *querier) DeleteOAuth2ProviderAppByID(ctx context.Context, id uuid.UUID) error { - if err := q.authorizeContext(ctx, policy.ActionDelete, rbac.ResourceOAuth2ProviderApp); err != nil { + if err := q.authorizeContext(ctx, policy.ActionDelete, rbac.ResourceOauth2App); err != nil { return err } return q.db.DeleteOAuth2ProviderAppByID(ctx, id) @@ -823,14 +872,14 @@ func (q *querier) DeleteOAuth2ProviderAppCodeByID(ctx context.Context, id uuid.U func (q *querier) DeleteOAuth2ProviderAppCodesByAppAndUserID(ctx context.Context, arg database.DeleteOAuth2ProviderAppCodesByAppAndUserIDParams) error { if err := q.authorizeContext(ctx, policy.ActionDelete, - rbac.ResourceOAuth2ProviderAppCodeToken.WithOwner(arg.UserID.String())); err != nil { + rbac.ResourceOauth2AppCodeToken.WithOwner(arg.UserID.String())); err != nil { return err } return q.db.DeleteOAuth2ProviderAppCodesByAppAndUserID(ctx, arg) } func (q *querier) DeleteOAuth2ProviderAppSecretByID(ctx context.Context, id uuid.UUID) error { - if err := q.authorizeContext(ctx, policy.ActionDelete, rbac.ResourceOAuth2ProviderAppSecret); err != nil { + if err := q.authorizeContext(ctx, policy.ActionDelete, rbac.ResourceOauth2AppSecret); err != nil { return err } return q.db.DeleteOAuth2ProviderAppSecretByID(ctx, id) @@ -838,7 +887,7 @@ func (q *querier) DeleteOAuth2ProviderAppSecretByID(ctx context.Context, id uuid func (q *querier) DeleteOAuth2ProviderAppTokensByAppAndUserID(ctx context.Context, arg database.DeleteOAuth2ProviderAppTokensByAppAndUserIDParams) error { if err := q.authorizeContext(ctx, policy.ActionDelete, - rbac.ResourceOAuth2ProviderAppCodeToken.WithOwner(arg.UserID.String())); err != nil { + rbac.ResourceOauth2AppCodeToken.WithOwner(arg.UserID.String())); err != nil { return err } return q.db.DeleteOAuth2ProviderAppTokensByAppAndUserID(ctx, arg) @@ -950,15 +999,15 @@ func (q *querier) GetAPIKeyByName(ctx context.Context, arg database.GetAPIKeyByN } func (q *querier) GetAPIKeysByLoginType(ctx context.Context, loginType database.LoginType) ([]database.APIKey, error) { - return fetchWithPostFilter(q.auth, q.db.GetAPIKeysByLoginType)(ctx, loginType) + return fetchWithPostFilter(q.auth, policy.ActionRead, q.db.GetAPIKeysByLoginType)(ctx, loginType) } func (q *querier) GetAPIKeysByUserID(ctx context.Context, params database.GetAPIKeysByUserIDParams) ([]database.APIKey, error) { - return fetchWithPostFilter(q.auth, q.db.GetAPIKeysByUserID)(ctx, database.GetAPIKeysByUserIDParams{LoginType: params.LoginType, UserID: params.UserID}) + return fetchWithPostFilter(q.auth, policy.ActionRead, q.db.GetAPIKeysByUserID)(ctx, database.GetAPIKeysByUserIDParams{LoginType: params.LoginType, UserID: params.UserID}) } func (q *querier) GetAPIKeysLastUsedAfter(ctx context.Context, lastUsed time.Time) ([]database.APIKey, error) { - return fetchWithPostFilter(q.auth, q.db.GetAPIKeysLastUsedAfter)(ctx, lastUsed) + return fetchWithPostFilter(q.auth, policy.ActionRead, q.db.GetAPIKeysLastUsedAfter)(ctx, lastUsed) } func (q *querier) GetActiveUserCount(ctx context.Context) (int64, error) { @@ -1078,11 +1127,11 @@ func (q *querier) GetDeploymentWorkspaceStats(ctx context.Context) (database.Get } func (q *querier) GetExternalAuthLink(ctx context.Context, arg database.GetExternalAuthLinkParams) (database.ExternalAuthLink, error) { - return fetch(q.log, q.auth, q.db.GetExternalAuthLink)(ctx, arg) + return fetchWithAction(q.log, q.auth, policy.ActionReadPersonal, q.db.GetExternalAuthLink)(ctx, arg) } func (q *querier) GetExternalAuthLinksByUserID(ctx context.Context, userID uuid.UUID) ([]database.ExternalAuthLink, error) { - return fetchWithPostFilter(q.auth, q.db.GetExternalAuthLinksByUserID)(ctx, userID) + return fetchWithPostFilter(q.auth, policy.ActionReadPersonal, q.db.GetExternalAuthLinksByUserID)(ctx, userID) } func (q *querier) GetFileByHashAndCreator(ctx context.Context, arg database.GetFileByHashAndCreatorParams) (database.File, error) { @@ -1125,7 +1174,7 @@ func (q *querier) GetFileTemplates(ctx context.Context, fileID uuid.UUID) ([]dat } func (q *querier) GetGitSSHKey(ctx context.Context, userID uuid.UUID) (database.GitSSHKey, error) { - return fetch(q.log, q.auth, q.db.GetGitSSHKey)(ctx, userID) + return fetchWithAction(q.log, q.auth, policy.ActionReadPersonal, q.db.GetGitSSHKey)(ctx, userID) } func (q *querier) GetGroupByID(ctx context.Context, id uuid.UUID) (database.Group, error) { @@ -1144,11 +1193,11 @@ func (q *querier) GetGroupMembers(ctx context.Context, id uuid.UUID) ([]database } func (q *querier) GetGroupsByOrganizationAndUserID(ctx context.Context, arg database.GetGroupsByOrganizationAndUserIDParams) ([]database.Group, error) { - return fetchWithPostFilter(q.auth, q.db.GetGroupsByOrganizationAndUserID)(ctx, arg) + return fetchWithPostFilter(q.auth, policy.ActionRead, q.db.GetGroupsByOrganizationAndUserID)(ctx, arg) } func (q *querier) GetGroupsByOrganizationID(ctx context.Context, organizationID uuid.UUID) ([]database.Group, error) { - return fetchWithPostFilter(q.auth, q.db.GetGroupsByOrganizationID)(ctx, organizationID) + return fetchWithPostFilter(q.auth, policy.ActionRead, q.db.GetGroupsByOrganizationID)(ctx, organizationID) } func (q *querier) GetHealthSettings(ctx context.Context) (string, error) { @@ -1213,7 +1262,7 @@ func (q *querier) GetLicenses(ctx context.Context) ([]database.License, error) { fetch := func(ctx context.Context, _ interface{}) ([]database.License, error) { return q.db.GetLicenses(ctx) } - return fetchWithPostFilter(q.auth, fetch)(ctx, nil) + return fetchWithPostFilter(q.auth, policy.ActionRead, fetch)(ctx, nil) } func (q *querier) GetLogoURL(ctx context.Context) (string, error) { @@ -1227,7 +1276,7 @@ func (q *querier) GetNotificationBanners(ctx context.Context) (string, error) { } func (q *querier) GetOAuth2ProviderAppByID(ctx context.Context, id uuid.UUID) (database.OAuth2ProviderApp, error) { - if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceOAuth2ProviderApp); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceOauth2App); err != nil { return database.OAuth2ProviderApp{}, err } return q.db.GetOAuth2ProviderAppByID(ctx, id) @@ -1242,7 +1291,7 @@ func (q *querier) GetOAuth2ProviderAppCodeByPrefix(ctx context.Context, secretPr } func (q *querier) GetOAuth2ProviderAppSecretByID(ctx context.Context, id uuid.UUID) (database.OAuth2ProviderAppSecret, error) { - if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceOAuth2ProviderAppSecret); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceOauth2AppSecret); err != nil { return database.OAuth2ProviderAppSecret{}, err } return q.db.GetOAuth2ProviderAppSecretByID(ctx, id) @@ -1253,7 +1302,7 @@ func (q *querier) GetOAuth2ProviderAppSecretByPrefix(ctx context.Context, secret } func (q *querier) GetOAuth2ProviderAppSecretsByAppID(ctx context.Context, appID uuid.UUID) ([]database.OAuth2ProviderAppSecret, error) { - if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceOAuth2ProviderAppSecret); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceOauth2AppSecret); err != nil { return []database.OAuth2ProviderAppSecret{}, err } return q.db.GetOAuth2ProviderAppSecretsByAppID(ctx, appID) @@ -1269,14 +1318,14 @@ func (q *querier) GetOAuth2ProviderAppTokenByPrefix(ctx context.Context, hashPre if err != nil { return database.OAuth2ProviderAppToken{}, err } - if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceOAuth2ProviderAppCodeToken.WithOwner(key.UserID.String())); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceOauth2AppCodeToken.WithOwner(key.UserID.String())); err != nil { return database.OAuth2ProviderAppToken{}, err } return token, nil } func (q *querier) GetOAuth2ProviderApps(ctx context.Context) ([]database.OAuth2ProviderApp, error) { - if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceOAuth2ProviderApp); err != nil { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceOauth2App); err != nil { return []database.OAuth2ProviderApp{}, err } return q.db.GetOAuth2ProviderApps(ctx) @@ -1285,7 +1334,7 @@ func (q *querier) GetOAuth2ProviderApps(ctx context.Context) ([]database.OAuth2P func (q *querier) GetOAuth2ProviderAppsByUserID(ctx context.Context, userID uuid.UUID) ([]database.GetOAuth2ProviderAppsByUserIDRow, error) { // This authz check is to make sure the caller can read all their own tokens. if err := q.authorizeContext(ctx, policy.ActionRead, - rbac.ResourceOAuth2ProviderAppCodeToken.WithOwner(userID.String())); err != nil { + rbac.ResourceOauth2AppCodeToken.WithOwner(userID.String())); err != nil { return []database.GetOAuth2ProviderAppsByUserIDRow{}, err } return q.db.GetOAuth2ProviderAppsByUserID(ctx, userID) @@ -1309,7 +1358,7 @@ func (q *querier) GetOrganizationByName(ctx context.Context, name string) (datab func (q *querier) GetOrganizationIDsByMemberIDs(ctx context.Context, ids []uuid.UUID) ([]database.GetOrganizationIDsByMemberIDsRow, error) { // TODO: This should be rewritten to return a list of database.OrganizationMember for consistent RBAC objects. // Currently this row returns a list of org ids per user, which is challenging to check against the RBAC system. - return fetchWithPostFilter(q.auth, q.db.GetOrganizationIDsByMemberIDs)(ctx, ids) + return fetchWithPostFilter(q.auth, policy.ActionRead, q.db.GetOrganizationIDsByMemberIDs)(ctx, ids) } func (q *querier) GetOrganizationMemberByUserID(ctx context.Context, arg database.GetOrganizationMemberByUserIDParams) (database.OrganizationMember, error) { @@ -1317,18 +1366,18 @@ func (q *querier) GetOrganizationMemberByUserID(ctx context.Context, arg databas } func (q *querier) GetOrganizationMembershipsByUserID(ctx context.Context, userID uuid.UUID) ([]database.OrganizationMember, error) { - return fetchWithPostFilter(q.auth, q.db.GetOrganizationMembershipsByUserID)(ctx, userID) + return fetchWithPostFilter(q.auth, policy.ActionRead, q.db.GetOrganizationMembershipsByUserID)(ctx, userID) } func (q *querier) GetOrganizations(ctx context.Context) ([]database.Organization, error) { fetch := func(ctx context.Context, _ interface{}) ([]database.Organization, error) { return q.db.GetOrganizations(ctx) } - return fetchWithPostFilter(q.auth, fetch)(ctx, nil) + return fetchWithPostFilter(q.auth, policy.ActionRead, fetch)(ctx, nil) } func (q *querier) GetOrganizationsByUserID(ctx context.Context, userID uuid.UUID) ([]database.Organization, error) { - return fetchWithPostFilter(q.auth, q.db.GetOrganizationsByUserID)(ctx, userID) + return fetchWithPostFilter(q.auth, policy.ActionRead, q.db.GetOrganizationsByUserID)(ctx, userID) } func (q *querier) GetParameterSchemasByJobID(ctx context.Context, jobID uuid.UUID) ([]database.ParameterSchema, error) { @@ -1370,7 +1419,7 @@ func (q *querier) GetProvisionerDaemons(ctx context.Context) ([]database.Provisi fetch := func(ctx context.Context, _ interface{}) ([]database.ProvisionerDaemon, error) { return q.db.GetProvisionerDaemons(ctx) } - return fetchWithPostFilter(q.auth, fetch)(ctx, nil) + return fetchWithPostFilter(q.auth, policy.ActionRead, fetch)(ctx, nil) } func (q *querier) GetProvisionerJobByID(ctx context.Context, id uuid.UUID) (database.ProvisionerJob, error) { @@ -1496,31 +1545,15 @@ func (q *querier) GetTailnetTunnelPeerIDs(ctx context.Context, srcID uuid.UUID) } func (q *querier) GetTemplateAppInsights(ctx context.Context, arg database.GetTemplateAppInsightsParams) ([]database.GetTemplateAppInsightsRow, error) { - // Used by TemplateAppInsights endpoint - // For auditors, check read template_insights, and fall back to update template. - if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceTemplateInsights); err != nil { - for _, templateID := range arg.TemplateIDs { - template, err := q.db.GetTemplateByID(ctx, templateID) - if err != nil { - return nil, err - } - - if err := q.authorizeContext(ctx, policy.ActionUpdate, template); err != nil { - return nil, err - } - } - if len(arg.TemplateIDs) == 0 { - if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceTemplate.All()); err != nil { - return nil, err - } - } + if err := q.authorizeTemplateInsights(ctx, arg.TemplateIDs); err != nil { + return nil, err } return q.db.GetTemplateAppInsights(ctx, arg) } func (q *querier) GetTemplateAppInsightsByTemplate(ctx context.Context, arg database.GetTemplateAppInsightsByTemplateParams) ([]database.GetTemplateAppInsightsByTemplateRow, error) { // Only used by prometheus metrics, so we don't strictly need to check update template perms. - if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceTemplateInsights); err != nil { + if err := q.authorizeContext(ctx, policy.ActionViewInsights, rbac.ResourceTemplate); err != nil { return nil, err } return q.db.GetTemplateAppInsightsByTemplate(ctx, arg) @@ -1551,101 +1584,37 @@ func (q *querier) GetTemplateDAUs(ctx context.Context, arg database.GetTemplateD } func (q *querier) GetTemplateInsights(ctx context.Context, arg database.GetTemplateInsightsParams) (database.GetTemplateInsightsRow, error) { - // Used by TemplateInsights endpoint - // For auditors, check read template_insights, and fall back to update template. - if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceTemplateInsights); err != nil { - for _, templateID := range arg.TemplateIDs { - template, err := q.db.GetTemplateByID(ctx, templateID) - if err != nil { - return database.GetTemplateInsightsRow{}, err - } - - if err := q.authorizeContext(ctx, policy.ActionUpdate, template); err != nil { - return database.GetTemplateInsightsRow{}, err - } - } - if len(arg.TemplateIDs) == 0 { - if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceTemplate.All()); err != nil { - return database.GetTemplateInsightsRow{}, err - } - } + if err := q.authorizeTemplateInsights(ctx, arg.TemplateIDs); err != nil { + return database.GetTemplateInsightsRow{}, err } return q.db.GetTemplateInsights(ctx, arg) } func (q *querier) GetTemplateInsightsByInterval(ctx context.Context, arg database.GetTemplateInsightsByIntervalParams) ([]database.GetTemplateInsightsByIntervalRow, error) { - // Used by TemplateInsights endpoint - // For auditors, check read template_insights, and fall back to update template. - if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceTemplateInsights); err != nil { - for _, templateID := range arg.TemplateIDs { - template, err := q.db.GetTemplateByID(ctx, templateID) - if err != nil { - return nil, err - } - - if err := q.authorizeContext(ctx, policy.ActionUpdate, template); err != nil { - return nil, err - } - } - if len(arg.TemplateIDs) == 0 { - if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceTemplate.All()); err != nil { - return nil, err - } - } + if err := q.authorizeTemplateInsights(ctx, arg.TemplateIDs); err != nil { + return nil, err } return q.db.GetTemplateInsightsByInterval(ctx, arg) } func (q *querier) GetTemplateInsightsByTemplate(ctx context.Context, arg database.GetTemplateInsightsByTemplateParams) ([]database.GetTemplateInsightsByTemplateRow, error) { // Only used by prometheus metrics collector. No need to check update template perms. - if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceTemplateInsights); err != nil { + if err := q.authorizeContext(ctx, policy.ActionViewInsights, rbac.ResourceTemplate); err != nil { return nil, err } return q.db.GetTemplateInsightsByTemplate(ctx, arg) } func (q *querier) GetTemplateParameterInsights(ctx context.Context, arg database.GetTemplateParameterInsightsParams) ([]database.GetTemplateParameterInsightsRow, error) { - // Used by both insights endpoint and prometheus collector. - // For auditors, check read template_insights, and fall back to update template. - if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceTemplateInsights); err != nil { - for _, templateID := range arg.TemplateIDs { - template, err := q.db.GetTemplateByID(ctx, templateID) - if err != nil { - return nil, err - } - - if err := q.authorizeContext(ctx, policy.ActionUpdate, template); err != nil { - return nil, err - } - } - if len(arg.TemplateIDs) == 0 { - if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceTemplate.All()); err != nil { - return nil, err - } - } + if err := q.authorizeTemplateInsights(ctx, arg.TemplateIDs); err != nil { + return nil, err } return q.db.GetTemplateParameterInsights(ctx, arg) } func (q *querier) GetTemplateUsageStats(ctx context.Context, arg database.GetTemplateUsageStatsParams) ([]database.TemplateUsageStat, error) { - // Used by dbrollup tests, use same safe-guard as other insights endpoints. - // For auditors, check read template_insights, and fall back to update template. - if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceTemplateInsights); err != nil { - for _, templateID := range arg.TemplateIDs { - template, err := q.db.GetTemplateByID(ctx, templateID) - if err != nil { - return nil, err - } - - if err := q.authorizeContext(ctx, policy.ActionUpdate, template); err != nil { - return nil, err - } - } - if len(arg.TemplateIDs) == 0 { - if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceTemplate.All()); err != nil { - return nil, err - } - } + if err := q.authorizeTemplateInsights(ctx, arg.TemplateIDs); err != nil { + return nil, err } return q.db.GetTemplateUsageStats(ctx, arg) } @@ -1803,19 +1772,19 @@ func (q *querier) GetUnexpiredLicenses(ctx context.Context) ([]database.License, func (q *querier) GetUserActivityInsights(ctx context.Context, arg database.GetUserActivityInsightsParams) ([]database.GetUserActivityInsightsRow, error) { // Used by insights endpoints. Need to check both for auditors and for regular users with template acl perms. - if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceTemplateInsights); err != nil { + if err := q.authorizeContext(ctx, policy.ActionViewInsights, rbac.ResourceTemplate); err != nil { for _, templateID := range arg.TemplateIDs { template, err := q.db.GetTemplateByID(ctx, templateID) if err != nil { return nil, err } - if err := q.authorizeContext(ctx, policy.ActionUpdate, template); err != nil { + if err := q.authorizeContext(ctx, policy.ActionViewInsights, template); err != nil { return nil, err } } if len(arg.TemplateIDs) == 0 { - if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceTemplate.All()); err != nil { + if err := q.authorizeContext(ctx, policy.ActionViewInsights, rbac.ResourceTemplate.All()); err != nil { return nil, err } } @@ -1840,19 +1809,19 @@ func (q *querier) GetUserCount(ctx context.Context) (int64, error) { func (q *querier) GetUserLatencyInsights(ctx context.Context, arg database.GetUserLatencyInsightsParams) ([]database.GetUserLatencyInsightsRow, error) { // Used by insights endpoints. Need to check both for auditors and for regular users with template acl perms. - if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceTemplateInsights); err != nil { + if err := q.authorizeContext(ctx, policy.ActionViewInsights, rbac.ResourceTemplate); err != nil { for _, templateID := range arg.TemplateIDs { template, err := q.db.GetTemplateByID(ctx, templateID) if err != nil { return nil, err } - if err := q.authorizeContext(ctx, policy.ActionUpdate, template); err != nil { + if err := q.authorizeContext(ctx, policy.ActionViewInsights, template); err != nil { return nil, err } } if len(arg.TemplateIDs) == 0 { - if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceTemplate.All()); err != nil { + if err := q.authorizeContext(ctx, policy.ActionViewInsights, rbac.ResourceTemplate.All()); err != nil { return nil, err } } @@ -1886,7 +1855,11 @@ func (q *querier) GetUserWorkspaceBuildParameters(ctx context.Context, params da if err != nil { return nil, err } - if err := q.authorizeContext(ctx, policy.ActionRead, u.UserWorkspaceBuildParametersObject()); err != nil { + // This permission is a bit strange. Reading workspace build params should be a permission + // on the workspace. However, this use case is to autofill a user's last input + // to some parameter. So this is kind of a "user setting". For now, this will + // be lumped in with user personal data. Subject to change. + if err := q.authorizeContext(ctx, policy.ActionReadPersonal, u); err != nil { return nil, err } return q.db.GetUserWorkspaceBuildParameters(ctx, params) @@ -2143,7 +2116,7 @@ func (q *querier) GetWorkspaceByWorkspaceAppID(ctx context.Context, workspaceApp } func (q *querier) GetWorkspaceProxies(ctx context.Context) ([]database.WorkspaceProxy, error) { - return fetchWithPostFilter(q.auth, func(ctx context.Context, _ interface{}) ([]database.WorkspaceProxy, error) { + return fetchWithPostFilter(q.auth, policy.ActionRead, func(ctx context.Context, _ interface{}) ([]database.WorkspaceProxy, error) { return q.db.GetWorkspaceProxies(ctx) })(ctx, nil) } @@ -2277,7 +2250,7 @@ func (q *querier) GetWorkspacesEligibleForTransition(ctx context.Context, now ti func (q *querier) InsertAPIKey(ctx context.Context, arg database.InsertAPIKeyParams) (database.APIKey, error) { return insert(q.log, q.auth, - rbac.ResourceAPIKey.WithOwner(arg.UserID.String()), + rbac.ResourceApiKey.WithOwner(arg.UserID.String()), q.db.InsertAPIKey)(ctx, arg) } @@ -2312,7 +2285,7 @@ func (q *querier) InsertDeploymentID(ctx context.Context, value string) error { } func (q *querier) InsertExternalAuthLink(ctx context.Context, arg database.InsertExternalAuthLinkParams) (database.ExternalAuthLink, error) { - return insert(q.log, q.auth, rbac.ResourceUserData.WithOwner(arg.UserID.String()).WithID(arg.UserID), q.db.InsertExternalAuthLink)(ctx, arg) + return insertWithAction(q.log, q.auth, rbac.ResourceUser.WithID(arg.UserID).WithOwner(arg.UserID.String()), policy.ActionUpdatePersonal, q.db.InsertExternalAuthLink)(ctx, arg) } func (q *querier) InsertFile(ctx context.Context, arg database.InsertFileParams) (database.File, error) { @@ -2320,7 +2293,7 @@ func (q *querier) InsertFile(ctx context.Context, arg database.InsertFileParams) } func (q *querier) InsertGitSSHKey(ctx context.Context, arg database.InsertGitSSHKeyParams) (database.GitSSHKey, error) { - return insert(q.log, q.auth, rbac.ResourceUserData.WithOwner(arg.UserID.String()).WithID(arg.UserID), q.db.InsertGitSSHKey)(ctx, arg) + return insertWithAction(q.log, q.auth, rbac.ResourceUser.WithOwner(arg.UserID.String()).WithID(arg.UserID), policy.ActionUpdatePersonal, q.db.InsertGitSSHKey)(ctx, arg) } func (q *querier) InsertGroup(ctx context.Context, arg database.InsertGroupParams) (database.Group, error) { @@ -2349,7 +2322,7 @@ func (q *querier) InsertMissingGroups(ctx context.Context, arg database.InsertMi } func (q *querier) InsertOAuth2ProviderApp(ctx context.Context, arg database.InsertOAuth2ProviderAppParams) (database.OAuth2ProviderApp, error) { - if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceOAuth2ProviderApp); err != nil { + if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceOauth2App); err != nil { return database.OAuth2ProviderApp{}, err } return q.db.InsertOAuth2ProviderApp(ctx, arg) @@ -2357,14 +2330,14 @@ func (q *querier) InsertOAuth2ProviderApp(ctx context.Context, arg database.Inse func (q *querier) InsertOAuth2ProviderAppCode(ctx context.Context, arg database.InsertOAuth2ProviderAppCodeParams) (database.OAuth2ProviderAppCode, error) { if err := q.authorizeContext(ctx, policy.ActionCreate, - rbac.ResourceOAuth2ProviderAppCodeToken.WithOwner(arg.UserID.String())); err != nil { + rbac.ResourceOauth2AppCodeToken.WithOwner(arg.UserID.String())); err != nil { return database.OAuth2ProviderAppCode{}, err } return q.db.InsertOAuth2ProviderAppCode(ctx, arg) } func (q *querier) InsertOAuth2ProviderAppSecret(ctx context.Context, arg database.InsertOAuth2ProviderAppSecretParams) (database.OAuth2ProviderAppSecret, error) { - if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceOAuth2ProviderAppSecret); err != nil { + if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceOauth2AppSecret); err != nil { return database.OAuth2ProviderAppSecret{}, err } return q.db.InsertOAuth2ProviderAppSecret(ctx, arg) @@ -2375,7 +2348,7 @@ func (q *querier) InsertOAuth2ProviderAppToken(ctx context.Context, arg database if err != nil { return database.OAuth2ProviderAppToken{}, err } - if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceOAuth2ProviderAppCodeToken.WithOwner(key.UserID.String())); err != nil { + if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceOauth2AppCodeToken.WithOwner(key.UserID.String())); err != nil { return database.OAuth2ProviderAppToken{}, err } return q.db.InsertOAuth2ProviderAppToken(ctx, arg) @@ -2561,12 +2534,14 @@ func (q *querier) InsertWorkspaceBuild(ctx context.Context, arg database.InsertW return xerrors.Errorf("get workspace by id: %w", err) } - var action policy.Action = policy.ActionUpdate + var action policy.Action = policy.ActionWorkspaceStart if arg.Transition == database.WorkspaceTransitionDelete { action = policy.ActionDelete + } else if arg.Transition == database.WorkspaceTransitionStop { + action = policy.ActionWorkspaceStop } - if err = q.authorizeContext(ctx, action, w.WorkspaceBuildRBAC(arg.Transition)); err != nil { + if err = q.authorizeContext(ctx, action, w); err != nil { return xerrors.Errorf("authorize context: %w", err) } @@ -2719,14 +2694,14 @@ func (q *querier) UpdateExternalAuthLink(ctx context.Context, arg database.Updat fetch := func(ctx context.Context, arg database.UpdateExternalAuthLinkParams) (database.ExternalAuthLink, error) { return q.db.GetExternalAuthLink(ctx, database.GetExternalAuthLinkParams{UserID: arg.UserID, ProviderID: arg.ProviderID}) } - return updateWithReturn(q.log, q.auth, fetch, q.db.UpdateExternalAuthLink)(ctx, arg) + return fetchAndQuery(q.log, q.auth, policy.ActionUpdatePersonal, fetch, q.db.UpdateExternalAuthLink)(ctx, arg) } func (q *querier) UpdateGitSSHKey(ctx context.Context, arg database.UpdateGitSSHKeyParams) (database.GitSSHKey, error) { fetch := func(ctx context.Context, arg database.UpdateGitSSHKeyParams) (database.GitSSHKey, error) { return q.db.GetGitSSHKey(ctx, arg.UserID) } - return updateWithReturn(q.log, q.auth, fetch, q.db.UpdateGitSSHKey)(ctx, arg) + return fetchAndQuery(q.log, q.auth, policy.ActionUpdatePersonal, fetch, q.db.UpdateGitSSHKey)(ctx, arg) } func (q *querier) UpdateGroupByID(ctx context.Context, arg database.UpdateGroupByIDParams) (database.Group, error) { @@ -2765,14 +2740,14 @@ func (q *querier) UpdateMemberRoles(ctx context.Context, arg database.UpdateMemb } func (q *querier) UpdateOAuth2ProviderAppByID(ctx context.Context, arg database.UpdateOAuth2ProviderAppByIDParams) (database.OAuth2ProviderApp, error) { - if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceOAuth2ProviderApp); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceOauth2App); err != nil { return database.OAuth2ProviderApp{}, err } return q.db.UpdateOAuth2ProviderAppByID(ctx, arg) } func (q *querier) UpdateOAuth2ProviderAppSecretByID(ctx context.Context, arg database.UpdateOAuth2ProviderAppSecretByIDParams) (database.OAuth2ProviderAppSecret, error) { - if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceOAuth2ProviderAppSecret); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceOauth2AppSecret); err != nil { return database.OAuth2ProviderAppSecret{}, err } return q.db.UpdateOAuth2ProviderAppSecretByID(ctx, arg) @@ -2996,7 +2971,7 @@ func (q *querier) UpdateUserAppearanceSettings(ctx context.Context, arg database if err != nil { return database.User{}, err } - if err := q.authorizeContext(ctx, policy.ActionUpdate, u.UserDataRBACObject()); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdatePersonal, u); err != nil { return database.User{}, err } return q.db.UpdateUserAppearanceSettings(ctx, arg) @@ -3012,10 +2987,10 @@ func (q *querier) UpdateUserHashedPassword(ctx context.Context, arg database.Upd return err } - err = q.authorizeContext(ctx, policy.ActionUpdate, user.UserDataRBACObject()) + err = q.authorizeContext(ctx, policy.ActionUpdatePersonal, user) if err != nil { // Admins can update passwords for other users. - err = q.authorizeContext(ctx, policy.ActionUpdate, user.RBACObject()) + err = q.authorizeContext(ctx, policy.ActionUpdate, user) if err != nil { return err } @@ -3038,7 +3013,7 @@ func (q *querier) UpdateUserLink(ctx context.Context, arg database.UpdateUserLin LoginType: arg.LoginType, }) } - return updateWithReturn(q.log, q.auth, fetch, q.db.UpdateUserLink)(ctx, arg) + return fetchAndQuery(q.log, q.auth, policy.ActionUpdatePersonal, fetch, q.db.UpdateUserLink)(ctx, arg) } func (q *querier) UpdateUserLinkedID(ctx context.Context, arg database.UpdateUserLinkedIDParams) (database.UserLink, error) { @@ -3060,7 +3035,7 @@ func (q *querier) UpdateUserProfile(ctx context.Context, arg database.UpdateUser if err != nil { return database.User{}, err } - if err := q.authorizeContext(ctx, policy.ActionUpdate, u.UserDataRBACObject()); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdatePersonal, u); err != nil { return database.User{}, err } return q.db.UpdateUserProfile(ctx, arg) @@ -3071,7 +3046,7 @@ func (q *querier) UpdateUserQuietHoursSchedule(ctx context.Context, arg database if err != nil { return database.User{}, err } - if err := q.authorizeContext(ctx, policy.ActionUpdate, u.UserDataRBACObject()); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdatePersonal, u); err != nil { return database.User{}, err } return q.db.UpdateUserQuietHoursSchedule(ctx, arg) @@ -3310,7 +3285,7 @@ func (q *querier) UpsertAppSecurityKey(ctx context.Context, data string) error { } func (q *querier) UpsertApplicationName(ctx context.Context, value string) error { - if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceDeploymentValues); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceDeploymentConfig); err != nil { return err } return q.db.UpsertApplicationName(ctx, value) @@ -3324,7 +3299,7 @@ func (q *querier) UpsertDefaultProxy(ctx context.Context, arg database.UpsertDef } func (q *querier) UpsertHealthSettings(ctx context.Context, value string) error { - if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceDeploymentValues); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceDeploymentConfig); err != nil { return err } return q.db.UpsertHealthSettings(ctx, value) @@ -3359,14 +3334,14 @@ func (q *querier) UpsertLastUpdateCheck(ctx context.Context, value string) error } func (q *querier) UpsertLogoURL(ctx context.Context, value string) error { - if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceDeploymentValues); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceDeploymentConfig); err != nil { return err } return q.db.UpsertLogoURL(ctx, value) } func (q *querier) UpsertNotificationBanners(ctx context.Context, value string) error { - if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceDeploymentValues); err != nil { + if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceDeploymentConfig); err != nil { return err } return q.db.UpsertNotificationBanners(ctx, value) diff --git a/coderd/database/dbauthz/dbauthz_test.go b/coderd/database/dbauthz/dbauthz_test.go index 92dbbb8e7bce1..e8dcb2f8ee5bc 100644 --- a/coderd/database/dbauthz/dbauthz_test.go +++ b/coderd/database/dbauthz/dbauthz_test.go @@ -218,7 +218,7 @@ func (s *MethodTestSuite) TestAPIKey() { UserID: u.ID, LoginType: database.LoginTypePassword, Scope: database.APIKeyScopeAll, - }).Asserts(rbac.ResourceAPIKey.WithOwner(u.ID.String()), policy.ActionCreate) + }).Asserts(rbac.ResourceApiKey.WithOwner(u.ID.String()), policy.ActionCreate) })) s.Run("UpdateAPIKeyByID", s.Subtest(func(db database.Store, check *expects) { a, _ := dbgen.APIKey(s.T(), db, database.APIKey{}) @@ -230,21 +230,23 @@ func (s *MethodTestSuite) TestAPIKey() { a, _ := dbgen.APIKey(s.T(), db, database.APIKey{ Scope: database.APIKeyScopeApplicationConnect, }) - check.Args(a.UserID).Asserts(rbac.ResourceAPIKey.WithOwner(a.UserID.String()), policy.ActionDelete).Returns() + check.Args(a.UserID).Asserts(rbac.ResourceApiKey.WithOwner(a.UserID.String()), policy.ActionDelete).Returns() })) s.Run("DeleteExternalAuthLink", s.Subtest(func(db database.Store, check *expects) { a := dbgen.ExternalAuthLink(s.T(), db, database.ExternalAuthLink{}) check.Args(database.DeleteExternalAuthLinkParams{ ProviderID: a.ProviderID, UserID: a.UserID, - }).Asserts(a, policy.ActionDelete).Returns() + }).Asserts(rbac.ResourceUserObject(a.UserID), policy.ActionUpdatePersonal).Returns() })) s.Run("GetExternalAuthLinksByUserID", s.Subtest(func(db database.Store, check *expects) { a := dbgen.ExternalAuthLink(s.T(), db, database.ExternalAuthLink{}) b := dbgen.ExternalAuthLink(s.T(), db, database.ExternalAuthLink{ UserID: a.UserID, }) - check.Args(a.UserID).Asserts(a, policy.ActionRead, b, policy.ActionRead) + check.Args(a.UserID).Asserts( + rbac.ResourceUserObject(a.UserID), policy.ActionReadPersonal, + rbac.ResourceUserObject(b.UserID), policy.ActionReadPersonal) })) } @@ -524,10 +526,10 @@ func (s *MethodTestSuite) TestLicense() { Asserts(rbac.ResourceLicense, policy.ActionCreate) })) s.Run("UpsertLogoURL", s.Subtest(func(db database.Store, check *expects) { - check.Args("value").Asserts(rbac.ResourceDeploymentValues, policy.ActionCreate) + check.Args("value").Asserts(rbac.ResourceDeploymentConfig, policy.ActionUpdate) })) s.Run("UpsertNotificationBanners", s.Subtest(func(db database.Store, check *expects) { - check.Args("value").Asserts(rbac.ResourceDeploymentValues, policy.ActionCreate) + check.Args("value").Asserts(rbac.ResourceDeploymentConfig, policy.ActionUpdate) })) s.Run("GetLicenseByID", s.Subtest(func(db database.Store, check *expects) { l, err := db.InsertLicense(context.Background(), database.InsertLicenseParams{ @@ -634,7 +636,7 @@ func (s *MethodTestSuite) TestOrganization() { UserID: u.ID, Roles: []string{rbac.RoleOrgAdmin(o.ID)}, }).Asserts( - rbac.ResourceRoleAssignment.InOrg(o.ID), policy.ActionCreate, + rbac.ResourceAssignRole.InOrg(o.ID), policy.ActionAssign, rbac.ResourceOrganizationMember.InOrg(o.ID).WithID(u.ID), policy.ActionCreate) })) s.Run("UpdateMemberRoles", s.Subtest(func(db database.Store, check *expects) { @@ -654,8 +656,8 @@ func (s *MethodTestSuite) TestOrganization() { OrgID: o.ID, }).Asserts( mem, policy.ActionRead, - rbac.ResourceRoleAssignment.InOrg(o.ID), policy.ActionCreate, // org-mem - rbac.ResourceRoleAssignment.InOrg(o.ID), policy.ActionDelete, // org-admin + rbac.ResourceAssignRole.InOrg(o.ID), policy.ActionAssign, // org-mem + rbac.ResourceAssignRole.InOrg(o.ID), policy.ActionDelete, // org-admin ).Returns(out) })) } @@ -942,31 +944,31 @@ func (s *MethodTestSuite) TestTemplate() { }).Asserts(t1, policy.ActionUpdate).Returns() })) s.Run("GetTemplateInsights", s.Subtest(func(db database.Store, check *expects) { - check.Args(database.GetTemplateInsightsParams{}).Asserts(rbac.ResourceTemplateInsights, policy.ActionRead) + check.Args(database.GetTemplateInsightsParams{}).Asserts(rbac.ResourceTemplate, policy.ActionViewInsights) })) s.Run("GetUserLatencyInsights", s.Subtest(func(db database.Store, check *expects) { - check.Args(database.GetUserLatencyInsightsParams{}).Asserts(rbac.ResourceTemplateInsights, policy.ActionRead) + check.Args(database.GetUserLatencyInsightsParams{}).Asserts(rbac.ResourceTemplate, policy.ActionViewInsights) })) s.Run("GetUserActivityInsights", s.Subtest(func(db database.Store, check *expects) { - check.Args(database.GetUserActivityInsightsParams{}).Asserts(rbac.ResourceTemplateInsights, policy.ActionRead).Errors(sql.ErrNoRows) + check.Args(database.GetUserActivityInsightsParams{}).Asserts(rbac.ResourceTemplate, policy.ActionViewInsights).Errors(sql.ErrNoRows) })) s.Run("GetTemplateParameterInsights", s.Subtest(func(db database.Store, check *expects) { - check.Args(database.GetTemplateParameterInsightsParams{}).Asserts(rbac.ResourceTemplateInsights, policy.ActionRead) + check.Args(database.GetTemplateParameterInsightsParams{}).Asserts(rbac.ResourceTemplate, policy.ActionViewInsights) })) s.Run("GetTemplateInsightsByInterval", s.Subtest(func(db database.Store, check *expects) { - check.Args(database.GetTemplateInsightsByIntervalParams{}).Asserts(rbac.ResourceTemplateInsights, policy.ActionRead) + check.Args(database.GetTemplateInsightsByIntervalParams{}).Asserts(rbac.ResourceTemplate, policy.ActionViewInsights) })) s.Run("GetTemplateInsightsByTemplate", s.Subtest(func(db database.Store, check *expects) { - check.Args(database.GetTemplateInsightsByTemplateParams{}).Asserts(rbac.ResourceTemplateInsights, policy.ActionRead) + check.Args(database.GetTemplateInsightsByTemplateParams{}).Asserts(rbac.ResourceTemplate, policy.ActionViewInsights) })) s.Run("GetTemplateAppInsights", s.Subtest(func(db database.Store, check *expects) { - check.Args(database.GetTemplateAppInsightsParams{}).Asserts(rbac.ResourceTemplateInsights, policy.ActionRead) + check.Args(database.GetTemplateAppInsightsParams{}).Asserts(rbac.ResourceTemplate, policy.ActionViewInsights) })) s.Run("GetTemplateAppInsightsByTemplate", s.Subtest(func(db database.Store, check *expects) { - check.Args(database.GetTemplateAppInsightsByTemplateParams{}).Asserts(rbac.ResourceTemplateInsights, policy.ActionRead) + check.Args(database.GetTemplateAppInsightsByTemplateParams{}).Asserts(rbac.ResourceTemplate, policy.ActionViewInsights) })) s.Run("GetTemplateUsageStats", s.Subtest(func(db database.Store, check *expects) { - check.Args(database.GetTemplateUsageStatsParams{}).Asserts(rbac.ResourceTemplateInsights, policy.ActionRead).Errors(sql.ErrNoRows) + check.Args(database.GetTemplateUsageStatsParams{}).Asserts(rbac.ResourceTemplate, policy.ActionViewInsights).Errors(sql.ErrNoRows) })) s.Run("UpsertTemplateUsageStats", s.Subtest(func(db database.Store, check *expects) { check.Asserts(rbac.ResourceSystem, policy.ActionUpdate) @@ -982,7 +984,7 @@ func (s *MethodTestSuite) TestUser() { })) s.Run("DeleteAPIKeysByUserID", s.Subtest(func(db database.Store, check *expects) { u := dbgen.User(s.T(), db, database.User{}) - check.Args(u.ID).Asserts(rbac.ResourceAPIKey.WithOwner(u.ID.String()), policy.ActionDelete).Returns() + check.Args(u.ID).Asserts(rbac.ResourceApiKey.WithOwner(u.ID.String()), policy.ActionDelete).Returns() })) s.Run("GetQuotaAllowanceForUser", s.Subtest(func(db database.Store, check *expects) { u := dbgen.User(s.T(), db, database.User{}) @@ -1021,7 +1023,7 @@ func (s *MethodTestSuite) TestUser() { check.Args(database.InsertUserParams{ ID: uuid.New(), LoginType: database.LoginTypePassword, - }).Asserts(rbac.ResourceRoleAssignment, policy.ActionCreate, rbac.ResourceUser, policy.ActionCreate) + }).Asserts(rbac.ResourceAssignRole, policy.ActionAssign, rbac.ResourceUser, policy.ActionCreate) })) s.Run("InsertUserLink", s.Subtest(func(db database.Store, check *expects) { u := dbgen.User(s.T(), db, database.User{}) @@ -1038,13 +1040,13 @@ func (s *MethodTestSuite) TestUser() { u := dbgen.User(s.T(), db, database.User{}) check.Args(database.UpdateUserHashedPasswordParams{ ID: u.ID, - }).Asserts(u.UserDataRBACObject(), policy.ActionUpdate).Returns() + }).Asserts(u, policy.ActionUpdatePersonal).Returns() })) s.Run("UpdateUserQuietHoursSchedule", s.Subtest(func(db database.Store, check *expects) { u := dbgen.User(s.T(), db, database.User{}) check.Args(database.UpdateUserQuietHoursScheduleParams{ ID: u.ID, - }).Asserts(u.UserDataRBACObject(), policy.ActionUpdate) + }).Asserts(u, policy.ActionUpdatePersonal) })) s.Run("UpdateUserLastSeenAt", s.Subtest(func(db database.Store, check *expects) { u := dbgen.User(s.T(), db, database.User{}) @@ -1061,7 +1063,7 @@ func (s *MethodTestSuite) TestUser() { Email: u.Email, Username: u.Username, UpdatedAt: u.UpdatedAt, - }).Asserts(u.UserDataRBACObject(), policy.ActionUpdate).Returns(u) + }).Asserts(u, policy.ActionUpdatePersonal).Returns(u) })) s.Run("GetUserWorkspaceBuildParameters", s.Subtest(func(db database.Store, check *expects) { u := dbgen.User(s.T(), db, database.User{}) @@ -1070,7 +1072,7 @@ func (s *MethodTestSuite) TestUser() { OwnerID: u.ID, TemplateID: uuid.UUID{}, }, - ).Asserts(u.UserWorkspaceBuildParametersObject(), policy.ActionRead).Returns( + ).Asserts(u, policy.ActionReadPersonal).Returns( []database.GetUserWorkspaceBuildParametersRow{}, ) })) @@ -1080,7 +1082,7 @@ func (s *MethodTestSuite) TestUser() { ID: u.ID, ThemePreference: u.ThemePreference, UpdatedAt: u.UpdatedAt, - }).Asserts(u.UserDataRBACObject(), policy.ActionUpdate).Returns(u) + }).Asserts(u, policy.ActionUpdatePersonal).Returns(u) })) s.Run("UpdateUserStatus", s.Subtest(func(db database.Store, check *expects) { u := dbgen.User(s.T(), db, database.User{}) @@ -1092,38 +1094,38 @@ func (s *MethodTestSuite) TestUser() { })) s.Run("DeleteGitSSHKey", s.Subtest(func(db database.Store, check *expects) { key := dbgen.GitSSHKey(s.T(), db, database.GitSSHKey{}) - check.Args(key.UserID).Asserts(key, policy.ActionDelete).Returns() + check.Args(key.UserID).Asserts(rbac.ResourceUserObject(key.UserID), policy.ActionUpdatePersonal).Returns() })) s.Run("GetGitSSHKey", s.Subtest(func(db database.Store, check *expects) { key := dbgen.GitSSHKey(s.T(), db, database.GitSSHKey{}) - check.Args(key.UserID).Asserts(key, policy.ActionRead).Returns(key) + check.Args(key.UserID).Asserts(rbac.ResourceUserObject(key.UserID), policy.ActionReadPersonal).Returns(key) })) s.Run("InsertGitSSHKey", s.Subtest(func(db database.Store, check *expects) { u := dbgen.User(s.T(), db, database.User{}) check.Args(database.InsertGitSSHKeyParams{ UserID: u.ID, - }).Asserts(rbac.ResourceUserData.WithID(u.ID).WithOwner(u.ID.String()), policy.ActionCreate) + }).Asserts(u, policy.ActionUpdatePersonal) })) s.Run("UpdateGitSSHKey", s.Subtest(func(db database.Store, check *expects) { key := dbgen.GitSSHKey(s.T(), db, database.GitSSHKey{}) check.Args(database.UpdateGitSSHKeyParams{ UserID: key.UserID, UpdatedAt: key.UpdatedAt, - }).Asserts(key, policy.ActionUpdate).Returns(key) + }).Asserts(rbac.ResourceUserObject(key.UserID), policy.ActionUpdatePersonal).Returns(key) })) s.Run("GetExternalAuthLink", s.Subtest(func(db database.Store, check *expects) { link := dbgen.ExternalAuthLink(s.T(), db, database.ExternalAuthLink{}) check.Args(database.GetExternalAuthLinkParams{ ProviderID: link.ProviderID, UserID: link.UserID, - }).Asserts(link, policy.ActionRead).Returns(link) + }).Asserts(rbac.ResourceUserObject(link.UserID), policy.ActionReadPersonal).Returns(link) })) s.Run("InsertExternalAuthLink", s.Subtest(func(db database.Store, check *expects) { u := dbgen.User(s.T(), db, database.User{}) check.Args(database.InsertExternalAuthLinkParams{ ProviderID: uuid.NewString(), UserID: u.ID, - }).Asserts(rbac.ResourceUserData.WithOwner(u.ID.String()).WithID(u.ID), policy.ActionCreate) + }).Asserts(u, policy.ActionUpdatePersonal) })) s.Run("UpdateExternalAuthLink", s.Subtest(func(db database.Store, check *expects) { link := dbgen.ExternalAuthLink(s.T(), db, database.ExternalAuthLink{}) @@ -1134,7 +1136,7 @@ func (s *MethodTestSuite) TestUser() { OAuthRefreshToken: link.OAuthRefreshToken, OAuthExpiry: link.OAuthExpiry, UpdatedAt: link.UpdatedAt, - }).Asserts(link, policy.ActionUpdate).Returns(link) + }).Asserts(rbac.ResourceUserObject(link.UserID), policy.ActionUpdatePersonal).Returns(link) })) s.Run("UpdateUserLink", s.Subtest(func(db database.Store, check *expects) { link := dbgen.UserLink(s.T(), db, database.UserLink{}) @@ -1145,7 +1147,7 @@ func (s *MethodTestSuite) TestUser() { UserID: link.UserID, LoginType: link.LoginType, DebugContext: json.RawMessage("{}"), - }).Asserts(link, policy.ActionUpdate).Returns(link) + }).Asserts(rbac.ResourceUserObject(link.UserID), policy.ActionUpdatePersonal).Returns(link) })) s.Run("UpdateUserRoles", s.Subtest(func(db database.Store, check *expects) { u := dbgen.User(s.T(), db, database.User{RBACRoles: []string{rbac.RoleTemplateAdmin()}}) @@ -1156,8 +1158,8 @@ func (s *MethodTestSuite) TestUser() { ID: u.ID, }).Asserts( u, policy.ActionRead, - rbac.ResourceRoleAssignment, policy.ActionCreate, - rbac.ResourceRoleAssignment, policy.ActionDelete, + rbac.ResourceAssignRole, policy.ActionAssign, + rbac.ResourceAssignRole, policy.ActionDelete, ).Returns(o) })) s.Run("AllUserIDs", s.Subtest(func(db database.Store, check *expects) { @@ -1430,7 +1432,18 @@ func (s *MethodTestSuite) TestWorkspace() { WorkspaceID: w.ID, Transition: database.WorkspaceTransitionStart, Reason: database.BuildReasonInitiator, - }).Asserts(w.WorkspaceBuildRBAC(database.WorkspaceTransitionStart), policy.ActionUpdate) + }).Asserts(w, policy.ActionWorkspaceStart) + })) + s.Run("Stop/InsertWorkspaceBuild", s.Subtest(func(db database.Store, check *expects) { + t := dbgen.Template(s.T(), db, database.Template{}) + w := dbgen.Workspace(s.T(), db, database.Workspace{ + TemplateID: t.ID, + }) + check.Args(database.InsertWorkspaceBuildParams{ + WorkspaceID: w.ID, + Transition: database.WorkspaceTransitionStop, + Reason: database.BuildReasonInitiator, + }).Asserts(w, policy.ActionWorkspaceStop) })) s.Run("Start/RequireActiveVersion/VersionMismatch/InsertWorkspaceBuild", s.Subtest(func(db database.Store, check *expects) { t := dbgen.Template(s.T(), db, database.Template{}) @@ -1452,7 +1465,7 @@ func (s *MethodTestSuite) TestWorkspace() { Reason: database.BuildReasonInitiator, TemplateVersionID: v.ID, }).Asserts( - w.WorkspaceBuildRBAC(database.WorkspaceTransitionStart), policy.ActionUpdate, + w, policy.ActionWorkspaceStart, t, policy.ActionUpdate, ) })) @@ -1480,7 +1493,7 @@ func (s *MethodTestSuite) TestWorkspace() { Reason: database.BuildReasonInitiator, TemplateVersionID: v.ID, }).Asserts( - w.WorkspaceBuildRBAC(database.WorkspaceTransitionStart), policy.ActionUpdate, + w, policy.ActionWorkspaceStart, ) })) s.Run("Delete/InsertWorkspaceBuild", s.Subtest(func(db database.Store, check *expects) { @@ -1489,7 +1502,7 @@ func (s *MethodTestSuite) TestWorkspace() { WorkspaceID: w.ID, Transition: database.WorkspaceTransitionDelete, Reason: database.BuildReasonInitiator, - }).Asserts(w.WorkspaceBuildRBAC(database.WorkspaceTransitionDelete), policy.ActionDelete) + }).Asserts(w, policy.ActionDelete) })) s.Run("InsertWorkspaceBuildParameters", s.Subtest(func(db database.Store, check *expects) { w := dbgen.Workspace(s.T(), db, database.Workspace{}) @@ -2204,13 +2217,13 @@ func (s *MethodTestSuite) TestSystemFunctions() { check.Args().Asserts() })) s.Run("UpsertApplicationName", s.Subtest(func(db database.Store, check *expects) { - check.Args("").Asserts(rbac.ResourceDeploymentValues, policy.ActionCreate) + check.Args("").Asserts(rbac.ResourceDeploymentConfig, policy.ActionUpdate) })) s.Run("GetHealthSettings", s.Subtest(func(db database.Store, check *expects) { check.Args().Asserts() })) s.Run("UpsertHealthSettings", s.Subtest(func(db database.Store, check *expects) { - check.Args("foo").Asserts(rbac.ResourceDeploymentValues, policy.ActionCreate) + check.Args("foo").Asserts(rbac.ResourceDeploymentConfig, policy.ActionUpdate) })) s.Run("GetDeploymentWorkspaceAgentStats", s.Subtest(func(db database.Store, check *expects) { check.Args(time.Time{}).Asserts() @@ -2335,11 +2348,11 @@ func (s *MethodTestSuite) TestOAuth2ProviderApps() { dbgen.OAuth2ProviderApp(s.T(), db, database.OAuth2ProviderApp{Name: "first"}), dbgen.OAuth2ProviderApp(s.T(), db, database.OAuth2ProviderApp{Name: "last"}), } - check.Args().Asserts(rbac.ResourceOAuth2ProviderApp, policy.ActionRead).Returns(apps) + check.Args().Asserts(rbac.ResourceOauth2App, policy.ActionRead).Returns(apps) })) s.Run("GetOAuth2ProviderAppByID", s.Subtest(func(db database.Store, check *expects) { app := dbgen.OAuth2ProviderApp(s.T(), db, database.OAuth2ProviderApp{}) - check.Args(app.ID).Asserts(rbac.ResourceOAuth2ProviderApp, policy.ActionRead).Returns(app) + check.Args(app.ID).Asserts(rbac.ResourceOauth2App, policy.ActionRead).Returns(app) })) s.Run("GetOAuth2ProviderAppsByUserID", s.Subtest(func(db database.Store, check *expects) { user := dbgen.User(s.T(), db, database.User{}) @@ -2357,7 +2370,7 @@ func (s *MethodTestSuite) TestOAuth2ProviderApps() { APIKeyID: key.ID, }) } - check.Args(user.ID).Asserts(rbac.ResourceOAuth2ProviderAppCodeToken.WithOwner(user.ID.String()), policy.ActionRead).Returns([]database.GetOAuth2ProviderAppsByUserIDRow{ + check.Args(user.ID).Asserts(rbac.ResourceOauth2AppCodeToken.WithOwner(user.ID.String()), policy.ActionRead).Returns([]database.GetOAuth2ProviderAppsByUserIDRow{ { OAuth2ProviderApp: database.OAuth2ProviderApp{ ID: app.ID, @@ -2370,7 +2383,7 @@ func (s *MethodTestSuite) TestOAuth2ProviderApps() { }) })) s.Run("InsertOAuth2ProviderApp", s.Subtest(func(db database.Store, check *expects) { - check.Args(database.InsertOAuth2ProviderAppParams{}).Asserts(rbac.ResourceOAuth2ProviderApp, policy.ActionCreate) + check.Args(database.InsertOAuth2ProviderAppParams{}).Asserts(rbac.ResourceOauth2App, policy.ActionCreate) })) s.Run("UpdateOAuth2ProviderAppByID", s.Subtest(func(db database.Store, check *expects) { app := dbgen.OAuth2ProviderApp(s.T(), db, database.OAuth2ProviderApp{}) @@ -2381,11 +2394,11 @@ func (s *MethodTestSuite) TestOAuth2ProviderApps() { Name: app.Name, CallbackURL: app.CallbackURL, UpdatedAt: app.UpdatedAt, - }).Asserts(rbac.ResourceOAuth2ProviderApp, policy.ActionUpdate).Returns(app) + }).Asserts(rbac.ResourceOauth2App, policy.ActionUpdate).Returns(app) })) s.Run("DeleteOAuth2ProviderAppByID", s.Subtest(func(db database.Store, check *expects) { app := dbgen.OAuth2ProviderApp(s.T(), db, database.OAuth2ProviderApp{}) - check.Args(app.ID).Asserts(rbac.ResourceOAuth2ProviderApp, policy.ActionDelete) + check.Args(app.ID).Asserts(rbac.ResourceOauth2App, policy.ActionDelete) })) } @@ -2405,27 +2418,27 @@ func (s *MethodTestSuite) TestOAuth2ProviderAppSecrets() { _ = dbgen.OAuth2ProviderAppSecret(s.T(), db, database.OAuth2ProviderAppSecret{ AppID: app2.ID, }) - check.Args(app1.ID).Asserts(rbac.ResourceOAuth2ProviderAppSecret, policy.ActionRead).Returns(secrets) + check.Args(app1.ID).Asserts(rbac.ResourceOauth2AppSecret, policy.ActionRead).Returns(secrets) })) s.Run("GetOAuth2ProviderAppSecretByID", s.Subtest(func(db database.Store, check *expects) { app := dbgen.OAuth2ProviderApp(s.T(), db, database.OAuth2ProviderApp{}) secret := dbgen.OAuth2ProviderAppSecret(s.T(), db, database.OAuth2ProviderAppSecret{ AppID: app.ID, }) - check.Args(secret.ID).Asserts(rbac.ResourceOAuth2ProviderAppSecret, policy.ActionRead).Returns(secret) + check.Args(secret.ID).Asserts(rbac.ResourceOauth2AppSecret, policy.ActionRead).Returns(secret) })) s.Run("GetOAuth2ProviderAppSecretByPrefix", s.Subtest(func(db database.Store, check *expects) { app := dbgen.OAuth2ProviderApp(s.T(), db, database.OAuth2ProviderApp{}) secret := dbgen.OAuth2ProviderAppSecret(s.T(), db, database.OAuth2ProviderAppSecret{ AppID: app.ID, }) - check.Args(secret.SecretPrefix).Asserts(rbac.ResourceOAuth2ProviderAppSecret, policy.ActionRead).Returns(secret) + check.Args(secret.SecretPrefix).Asserts(rbac.ResourceOauth2AppSecret, policy.ActionRead).Returns(secret) })) s.Run("InsertOAuth2ProviderAppSecret", s.Subtest(func(db database.Store, check *expects) { app := dbgen.OAuth2ProviderApp(s.T(), db, database.OAuth2ProviderApp{}) check.Args(database.InsertOAuth2ProviderAppSecretParams{ AppID: app.ID, - }).Asserts(rbac.ResourceOAuth2ProviderAppSecret, policy.ActionCreate) + }).Asserts(rbac.ResourceOauth2AppSecret, policy.ActionCreate) })) s.Run("UpdateOAuth2ProviderAppSecretByID", s.Subtest(func(db database.Store, check *expects) { app := dbgen.OAuth2ProviderApp(s.T(), db, database.OAuth2ProviderApp{}) @@ -2436,14 +2449,14 @@ func (s *MethodTestSuite) TestOAuth2ProviderAppSecrets() { check.Args(database.UpdateOAuth2ProviderAppSecretByIDParams{ ID: secret.ID, LastUsedAt: secret.LastUsedAt, - }).Asserts(rbac.ResourceOAuth2ProviderAppSecret, policy.ActionUpdate).Returns(secret) + }).Asserts(rbac.ResourceOauth2AppSecret, policy.ActionUpdate).Returns(secret) })) s.Run("DeleteOAuth2ProviderAppSecretByID", s.Subtest(func(db database.Store, check *expects) { app := dbgen.OAuth2ProviderApp(s.T(), db, database.OAuth2ProviderApp{}) secret := dbgen.OAuth2ProviderAppSecret(s.T(), db, database.OAuth2ProviderAppSecret{ AppID: app.ID, }) - check.Args(secret.ID).Asserts(rbac.ResourceOAuth2ProviderAppSecret, policy.ActionDelete) + check.Args(secret.ID).Asserts(rbac.ResourceOauth2AppSecret, policy.ActionDelete) })) } @@ -2472,7 +2485,7 @@ func (s *MethodTestSuite) TestOAuth2ProviderAppCodes() { check.Args(database.InsertOAuth2ProviderAppCodeParams{ AppID: app.ID, UserID: user.ID, - }).Asserts(rbac.ResourceOAuth2ProviderAppCodeToken.WithOwner(user.ID.String()), policy.ActionCreate) + }).Asserts(rbac.ResourceOauth2AppCodeToken.WithOwner(user.ID.String()), policy.ActionCreate) })) s.Run("DeleteOAuth2ProviderAppCodeByID", s.Subtest(func(db database.Store, check *expects) { user := dbgen.User(s.T(), db, database.User{}) @@ -2495,7 +2508,7 @@ func (s *MethodTestSuite) TestOAuth2ProviderAppCodes() { check.Args(database.DeleteOAuth2ProviderAppCodesByAppAndUserIDParams{ AppID: app.ID, UserID: user.ID, - }).Asserts(rbac.ResourceOAuth2ProviderAppCodeToken.WithOwner(user.ID.String()), policy.ActionDelete) + }).Asserts(rbac.ResourceOauth2AppCodeToken.WithOwner(user.ID.String()), policy.ActionDelete) })) } @@ -2512,7 +2525,7 @@ func (s *MethodTestSuite) TestOAuth2ProviderAppTokens() { check.Args(database.InsertOAuth2ProviderAppTokenParams{ AppSecretID: secret.ID, APIKeyID: key.ID, - }).Asserts(rbac.ResourceOAuth2ProviderAppCodeToken.WithOwner(user.ID.String()), policy.ActionCreate) + }).Asserts(rbac.ResourceOauth2AppCodeToken.WithOwner(user.ID.String()), policy.ActionCreate) })) s.Run("GetOAuth2ProviderAppTokenByPrefix", s.Subtest(func(db database.Store, check *expects) { user := dbgen.User(s.T(), db, database.User{}) @@ -2527,7 +2540,7 @@ func (s *MethodTestSuite) TestOAuth2ProviderAppTokens() { AppSecretID: secret.ID, APIKeyID: key.ID, }) - check.Args(token.HashPrefix).Asserts(rbac.ResourceOAuth2ProviderAppCodeToken.WithOwner(user.ID.String()), policy.ActionRead) + check.Args(token.HashPrefix).Asserts(rbac.ResourceOauth2AppCodeToken.WithOwner(user.ID.String()), policy.ActionRead) })) s.Run("DeleteOAuth2ProviderAppTokensByAppAndUserID", s.Subtest(func(db database.Store, check *expects) { user := dbgen.User(s.T(), db, database.User{}) @@ -2547,6 +2560,6 @@ func (s *MethodTestSuite) TestOAuth2ProviderAppTokens() { check.Args(database.DeleteOAuth2ProviderAppTokensByAppAndUserIDParams{ AppID: app.ID, UserID: user.ID, - }).Asserts(rbac.ResourceOAuth2ProviderAppCodeToken.WithOwner(user.ID.String()), policy.ActionDelete) + }).Asserts(rbac.ResourceOauth2AppCodeToken.WithOwner(user.ID.String()), policy.ActionDelete) })) } diff --git a/coderd/database/modelmethods.go b/coderd/database/modelmethods.go index 9e7777283967d..d71c63b089556 100644 --- a/coderd/database/modelmethods.go +++ b/coderd/database/modelmethods.go @@ -100,7 +100,7 @@ func (s APIKeyScope) ToRBAC() rbac.ScopeName { } func (k APIKey) RBACObject() rbac.Object { - return rbac.ResourceAPIKey.WithIDString(k.ID). + return rbac.ResourceApiKey.WithIDString(k.ID). WithOwner(k.UserID.String()) } @@ -154,47 +154,12 @@ func (w GetWorkspaceByAgentIDRow) RBACObject() rbac.Object { } func (w Workspace) RBACObject() rbac.Object { - return rbac.ResourceWorkspace.WithID(w.ID). - InOrg(w.OrganizationID). - WithOwner(w.OwnerID.String()) -} - -func (w Workspace) ExecutionRBAC() rbac.Object { // If a workspace is locked it cannot be accessed. if w.DormantAt.Valid { return w.DormantRBAC() } - return rbac.ResourceWorkspaceExecution. - WithID(w.ID). - InOrg(w.OrganizationID). - WithOwner(w.OwnerID.String()) -} - -func (w Workspace) ApplicationConnectRBAC() rbac.Object { - // If a workspace is locked it cannot be accessed. - if w.DormantAt.Valid { - return w.DormantRBAC() - } - - return rbac.ResourceWorkspaceApplicationConnect. - WithID(w.ID). - InOrg(w.OrganizationID). - WithOwner(w.OwnerID.String()) -} - -func (w Workspace) WorkspaceBuildRBAC(transition WorkspaceTransition) rbac.Object { - // If a workspace is dormant it cannot be built. - // However we need to allow stopping a workspace by a caller once a workspace - // is locked (e.g. for autobuild). Additionally, if a user wants to delete - // a locked workspace, they shouldn't have to have it unlocked first. - if w.DormantAt.Valid && transition != WorkspaceTransitionStop && - transition != WorkspaceTransitionDelete { - return w.DormantRBAC() - } - - return rbac.ResourceWorkspaceBuild. - WithID(w.ID). + return rbac.ResourceWorkspace.WithID(w.ID). InOrg(w.OrganizationID). WithOwner(w.OwnerID.String()) } @@ -246,32 +211,17 @@ func (f File) RBACObject() rbac.Object { } // RBACObject returns the RBAC object for the site wide user resource. -// If you are trying to get the RBAC object for the UserData, use -// u.UserDataRBACObject() instead. func (u User) RBACObject() rbac.Object { return rbac.ResourceUserObject(u.ID) } -func (u User) UserDataRBACObject() rbac.Object { - return rbac.ResourceUserData.WithID(u.ID).WithOwner(u.ID.String()) -} - -func (u User) UserWorkspaceBuildParametersObject() rbac.Object { - return rbac.ResourceUserWorkspaceBuildParameters.WithID(u.ID).WithOwner(u.ID.String()) -} - func (u GetUsersRow) RBACObject() rbac.Object { return rbac.ResourceUserObject(u.ID) } -func (u GitSSHKey) RBACObject() rbac.Object { - return rbac.ResourceUserData.WithID(u.UserID).WithOwner(u.UserID.String()) -} - -func (u ExternalAuthLink) RBACObject() rbac.Object { - // I assume UserData is ok? - return rbac.ResourceUserData.WithID(u.UserID).WithOwner(u.UserID.String()) -} +func (u GitSSHKey) RBACObject() rbac.Object { return rbac.ResourceUserObject(u.UserID) } +func (u ExternalAuthLink) RBACObject() rbac.Object { return rbac.ResourceUserObject(u.UserID) } +func (u UserLink) RBACObject() rbac.Object { return rbac.ResourceUserObject(u.UserID) } func (u ExternalAuthLink) OAuthToken() *oauth2.Token { return &oauth2.Token{ @@ -281,25 +231,20 @@ func (u ExternalAuthLink) OAuthToken() *oauth2.Token { } } -func (u UserLink) RBACObject() rbac.Object { - // I assume UserData is ok? - return rbac.ResourceUserData.WithOwner(u.UserID.String()).WithID(u.UserID) -} - func (l License) RBACObject() rbac.Object { return rbac.ResourceLicense.WithIDString(strconv.FormatInt(int64(l.ID), 10)) } func (c OAuth2ProviderAppCode) RBACObject() rbac.Object { - return rbac.ResourceOAuth2ProviderAppCodeToken.WithOwner(c.UserID.String()) + return rbac.ResourceOauth2AppCodeToken.WithOwner(c.UserID.String()) } func (OAuth2ProviderAppSecret) RBACObject() rbac.Object { - return rbac.ResourceOAuth2ProviderAppSecret + return rbac.ResourceOauth2AppSecret } func (OAuth2ProviderApp) RBACObject() rbac.Object { - return rbac.ResourceOAuth2ProviderApp + return rbac.ResourceOauth2App } func (a GetOAuth2ProviderAppsByUserIDRow) RBACObject() rbac.Object { diff --git a/coderd/debug.go b/coderd/debug.go index 0e98539a71f75..b1f17f29e0102 100644 --- a/coderd/debug.go +++ b/coderd/debug.go @@ -194,7 +194,7 @@ func (api *API) deploymentHealthSettings(rw http.ResponseWriter, r *http.Request func (api *API) putDeploymentHealthSettings(rw http.ResponseWriter, r *http.Request) { ctx := r.Context() - if !api.Authorize(r, policy.ActionUpdate, rbac.ResourceDeploymentValues) { + if !api.Authorize(r, policy.ActionUpdate, rbac.ResourceDeploymentConfig) { httpapi.Write(ctx, rw, http.StatusForbidden, codersdk.Response{ Message: "Insufficient permissions to update health settings.", }) diff --git a/coderd/deployment.go b/coderd/deployment.go index 572bf9076bb59..4c78563a80456 100644 --- a/coderd/deployment.go +++ b/coderd/deployment.go @@ -17,7 +17,7 @@ import ( // @Success 200 {object} codersdk.DeploymentConfig // @Router /deployment/config [get] func (api *API) deploymentValues(rw http.ResponseWriter, r *http.Request) { - if !api.Authorize(r, policy.ActionRead, rbac.ResourceDeploymentValues) { + if !api.Authorize(r, policy.ActionRead, rbac.ResourceDeploymentConfig) { httpapi.Forbidden(rw) return } diff --git a/coderd/insights.go b/coderd/insights.go index 85b4ec8661d9c..2da27e2561762 100644 --- a/coderd/insights.go +++ b/coderd/insights.go @@ -33,7 +33,7 @@ const insightsTimeLayout = time.RFC3339 // @Success 200 {object} codersdk.DAUsResponse // @Router /insights/daus [get] func (api *API) deploymentDAUs(rw http.ResponseWriter, r *http.Request) { - if !api.Authorize(r, policy.ActionRead, rbac.ResourceDeploymentValues) { + if !api.Authorize(r, policy.ActionRead, rbac.ResourceDeploymentConfig) { httpapi.Forbidden(rw) return } diff --git a/coderd/rbac/README.md b/coderd/rbac/README.md index 2a73a59d7febc..e867fa9cce50a 100644 --- a/coderd/rbac/README.md +++ b/coderd/rbac/README.md @@ -106,7 +106,7 @@ You can test outside of golang by using the `opa` cli. **Evaluation** -opa eval --format=pretty 'false' -d policy.rego -i input.json +opa eval --format=pretty "data.authz.allow" -d policy.rego -i input.json **Partial Evaluation** diff --git a/coderd/rbac/authz.go b/coderd/rbac/authz.go index c647bb09f89a0..859782d0286b1 100644 --- a/coderd/rbac/authz.go +++ b/coderd/rbac/authz.go @@ -26,11 +26,6 @@ import ( "github.com/coder/coder/v2/coderd/util/slice" ) -// AllActions is a helper function to return all the possible actions types. -func AllActions() []policy.Action { - return []policy.Action{policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete} -} - type AuthCall struct { Actor Subject Action policy.Action @@ -219,6 +214,10 @@ type RegoAuthorizer struct { authorizeHist *prometheus.HistogramVec prepareHist prometheus.Histogram + + // strict checking also verifies the inputs to the authorizer. Making sure + // the action make sense for the input object. + strict bool } var _ Authorizer = (*RegoAuthorizer)(nil) @@ -240,6 +239,13 @@ func NewCachingAuthorizer(registry prometheus.Registerer) Authorizer { return Cacher(NewAuthorizer(registry)) } +// NewStrictCachingAuthorizer is mainly just for testing. +func NewStrictCachingAuthorizer(registry prometheus.Registerer) Authorizer { + auth := NewAuthorizer(registry) + auth.strict = true + return Cacher(auth) +} + func NewAuthorizer(registry prometheus.Registerer) *RegoAuthorizer { queryOnce.Do(func() { var err error @@ -326,6 +332,12 @@ type authSubject struct { // the object. // If an error is returned, the authorization is denied. func (a RegoAuthorizer) Authorize(ctx context.Context, subject Subject, action policy.Action, object Object) error { + if a.strict { + if err := object.ValidAction(action); err != nil { + return xerrors.Errorf("strict authz check: %w", err) + } + } + start := time.Now() ctx, span := tracing.StartSpan(ctx, trace.WithTimestamp(start), // Reuse the time.Now for metric and trace diff --git a/coderd/rbac/authz_internal_test.go b/coderd/rbac/authz_internal_test.go index cba69952ea481..7b53939a3651b 100644 --- a/coderd/rbac/authz_internal_test.go +++ b/coderd/rbac/authz_internal_test.go @@ -15,6 +15,7 @@ import ( "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/coderd/rbac/regosql" + "github.com/coder/coder/v2/coderd/util/slice" "github.com/coder/coder/v2/testutil" ) @@ -303,16 +304,16 @@ func TestAuthorizeDomain(t *testing.T) { testAuthorize(t, "UserACLList", user, []authTestCase{ { resource: ResourceWorkspace.WithOwner(unuseID.String()).InOrg(unuseID).WithACLUserList(map[string][]policy.Action{ - user.ID: AllActions(), + user.ID: ResourceWorkspace.AvailableActions(), }), - actions: AllActions(), + actions: ResourceWorkspace.AvailableActions(), allow: true, }, { resource: ResourceWorkspace.WithOwner(unuseID.String()).InOrg(unuseID).WithACLUserList(map[string][]policy.Action{ - user.ID: {WildcardSymbol}, + user.ID: {policy.WildcardSymbol}, }), - actions: AllActions(), + actions: ResourceWorkspace.AvailableActions(), allow: true, }, { @@ -335,16 +336,16 @@ func TestAuthorizeDomain(t *testing.T) { testAuthorize(t, "GroupACLList", user, []authTestCase{ { resource: ResourceWorkspace.WithOwner(unuseID.String()).InOrg(defOrg).WithGroupACL(map[string][]policy.Action{ - allUsersGroup: AllActions(), + allUsersGroup: ResourceWorkspace.AvailableActions(), }), - actions: AllActions(), + actions: ResourceWorkspace.AvailableActions(), allow: true, }, { resource: ResourceWorkspace.WithOwner(unuseID.String()).InOrg(defOrg).WithGroupACL(map[string][]policy.Action{ - allUsersGroup: {WildcardSymbol}, + allUsersGroup: {policy.WildcardSymbol}, }), - actions: AllActions(), + actions: ResourceWorkspace.AvailableActions(), allow: true, }, { @@ -366,27 +367,27 @@ func TestAuthorizeDomain(t *testing.T) { testAuthorize(t, "Member", user, []authTestCase{ // Org + me - {resource: ResourceWorkspace.InOrg(defOrg).WithOwner(user.ID), actions: AllActions(), allow: true}, - {resource: ResourceWorkspace.InOrg(defOrg), actions: AllActions(), allow: false}, + {resource: ResourceWorkspace.InOrg(defOrg).WithOwner(user.ID), actions: ResourceWorkspace.AvailableActions(), allow: true}, + {resource: ResourceWorkspace.InOrg(defOrg), actions: ResourceWorkspace.AvailableActions(), allow: false}, - {resource: ResourceWorkspace.WithOwner(user.ID), actions: AllActions(), allow: true}, + {resource: ResourceWorkspace.WithOwner(user.ID), actions: ResourceWorkspace.AvailableActions(), allow: true}, - {resource: ResourceWorkspace.All(), actions: AllActions(), allow: false}, + {resource: ResourceWorkspace.All(), actions: ResourceWorkspace.AvailableActions(), allow: false}, // Other org + me - {resource: ResourceWorkspace.InOrg(unuseID).WithOwner(user.ID), actions: AllActions(), allow: false}, - {resource: ResourceWorkspace.InOrg(unuseID), actions: AllActions(), allow: false}, + {resource: ResourceWorkspace.InOrg(unuseID).WithOwner(user.ID), actions: ResourceWorkspace.AvailableActions(), allow: false}, + {resource: ResourceWorkspace.InOrg(unuseID), actions: ResourceWorkspace.AvailableActions(), allow: false}, // Other org + other user - {resource: ResourceWorkspace.InOrg(defOrg).WithOwner("not-me"), actions: AllActions(), allow: false}, + {resource: ResourceWorkspace.InOrg(defOrg).WithOwner("not-me"), actions: ResourceWorkspace.AvailableActions(), allow: false}, - {resource: ResourceWorkspace.WithOwner("not-me"), actions: AllActions(), allow: false}, + {resource: ResourceWorkspace.WithOwner("not-me"), actions: ResourceWorkspace.AvailableActions(), allow: false}, // Other org + other us - {resource: ResourceWorkspace.InOrg(unuseID).WithOwner("not-me"), actions: AllActions(), allow: false}, - {resource: ResourceWorkspace.InOrg(unuseID), actions: AllActions(), allow: false}, + {resource: ResourceWorkspace.InOrg(unuseID).WithOwner("not-me"), actions: ResourceWorkspace.AvailableActions(), allow: false}, + {resource: ResourceWorkspace.InOrg(unuseID), actions: ResourceWorkspace.AvailableActions(), allow: false}, - {resource: ResourceWorkspace.WithOwner("not-me"), actions: AllActions(), allow: false}, + {resource: ResourceWorkspace.WithOwner("not-me"), actions: ResourceWorkspace.AvailableActions(), allow: false}, }) user = Subject{ @@ -398,8 +399,8 @@ func TestAuthorizeDomain(t *testing.T) { Site: []Permission{ { Negate: true, - ResourceType: WildcardSymbol, - Action: WildcardSymbol, + ResourceType: policy.WildcardSymbol, + Action: policy.WildcardSymbol, }, }, }}, @@ -407,27 +408,27 @@ func TestAuthorizeDomain(t *testing.T) { testAuthorize(t, "DeletedMember", user, []authTestCase{ // Org + me - {resource: ResourceWorkspace.InOrg(defOrg).WithOwner(user.ID), actions: AllActions(), allow: false}, - {resource: ResourceWorkspace.InOrg(defOrg), actions: AllActions(), allow: false}, + {resource: ResourceWorkspace.InOrg(defOrg).WithOwner(user.ID), actions: ResourceWorkspace.AvailableActions(), allow: false}, + {resource: ResourceWorkspace.InOrg(defOrg), actions: ResourceWorkspace.AvailableActions(), allow: false}, - {resource: ResourceWorkspace.WithOwner(user.ID), actions: AllActions(), allow: false}, + {resource: ResourceWorkspace.WithOwner(user.ID), actions: ResourceWorkspace.AvailableActions(), allow: false}, - {resource: ResourceWorkspace.All(), actions: AllActions(), allow: false}, + {resource: ResourceWorkspace.All(), actions: ResourceWorkspace.AvailableActions(), allow: false}, // Other org + me - {resource: ResourceWorkspace.InOrg(unuseID).WithOwner(user.ID), actions: AllActions(), allow: false}, - {resource: ResourceWorkspace.InOrg(unuseID), actions: AllActions(), allow: false}, + {resource: ResourceWorkspace.InOrg(unuseID).WithOwner(user.ID), actions: ResourceWorkspace.AvailableActions(), allow: false}, + {resource: ResourceWorkspace.InOrg(unuseID), actions: ResourceWorkspace.AvailableActions(), allow: false}, // Other org + other user - {resource: ResourceWorkspace.InOrg(defOrg).WithOwner("not-me"), actions: AllActions(), allow: false}, + {resource: ResourceWorkspace.InOrg(defOrg).WithOwner("not-me"), actions: ResourceWorkspace.AvailableActions(), allow: false}, - {resource: ResourceWorkspace.WithOwner("not-me"), actions: AllActions(), allow: false}, + {resource: ResourceWorkspace.WithOwner("not-me"), actions: ResourceWorkspace.AvailableActions(), allow: false}, // Other org + other use - {resource: ResourceWorkspace.InOrg(unuseID).WithOwner("not-me"), actions: AllActions(), allow: false}, - {resource: ResourceWorkspace.InOrg(unuseID), actions: AllActions(), allow: false}, + {resource: ResourceWorkspace.InOrg(unuseID).WithOwner("not-me"), actions: ResourceWorkspace.AvailableActions(), allow: false}, + {resource: ResourceWorkspace.InOrg(unuseID), actions: ResourceWorkspace.AvailableActions(), allow: false}, - {resource: ResourceWorkspace.WithOwner("not-me"), actions: AllActions(), allow: false}, + {resource: ResourceWorkspace.WithOwner("not-me"), actions: ResourceWorkspace.AvailableActions(), allow: false}, }) user = Subject{ @@ -439,29 +440,33 @@ func TestAuthorizeDomain(t *testing.T) { }, } + workspaceExceptConnect := slice.Omit(ResourceWorkspace.AvailableActions(), policy.ActionApplicationConnect, policy.ActionSSH) + workspaceConnect := []policy.Action{policy.ActionApplicationConnect, policy.ActionSSH} testAuthorize(t, "OrgAdmin", user, []authTestCase{ // Org + me - {resource: ResourceWorkspace.InOrg(defOrg).WithOwner(user.ID), actions: AllActions(), allow: true}, - {resource: ResourceWorkspace.InOrg(defOrg), actions: AllActions(), allow: true}, + {resource: ResourceWorkspace.InOrg(defOrg).WithOwner(user.ID), actions: ResourceWorkspace.AvailableActions(), allow: true}, + {resource: ResourceWorkspace.InOrg(defOrg), actions: workspaceExceptConnect, allow: true}, + {resource: ResourceWorkspace.InOrg(defOrg), actions: workspaceConnect, allow: false}, - {resource: ResourceWorkspace.WithOwner(user.ID), actions: AllActions(), allow: true}, + {resource: ResourceWorkspace.WithOwner(user.ID), actions: ResourceWorkspace.AvailableActions(), allow: true}, - {resource: ResourceWorkspace.All(), actions: AllActions(), allow: false}, + {resource: ResourceWorkspace.All(), actions: ResourceWorkspace.AvailableActions(), allow: false}, // Other org + me - {resource: ResourceWorkspace.InOrg(unuseID).WithOwner(user.ID), actions: AllActions(), allow: false}, - {resource: ResourceWorkspace.InOrg(unuseID), actions: AllActions(), allow: false}, + {resource: ResourceWorkspace.InOrg(unuseID).WithOwner(user.ID), actions: ResourceWorkspace.AvailableActions(), allow: false}, + {resource: ResourceWorkspace.InOrg(unuseID), actions: ResourceWorkspace.AvailableActions(), allow: false}, // Other org + other user - {resource: ResourceWorkspace.InOrg(defOrg).WithOwner("not-me"), actions: AllActions(), allow: true}, + {resource: ResourceWorkspace.InOrg(defOrg).WithOwner("not-me"), actions: workspaceExceptConnect, allow: true}, + {resource: ResourceWorkspace.InOrg(defOrg).WithOwner("not-me"), actions: workspaceConnect, allow: false}, - {resource: ResourceWorkspace.WithOwner("not-me"), actions: AllActions(), allow: false}, + {resource: ResourceWorkspace.WithOwner("not-me"), actions: ResourceWorkspace.AvailableActions(), allow: false}, // Other org + other use - {resource: ResourceWorkspace.InOrg(unuseID).WithOwner("not-me"), actions: AllActions(), allow: false}, - {resource: ResourceWorkspace.InOrg(unuseID), actions: AllActions(), allow: false}, + {resource: ResourceWorkspace.InOrg(unuseID).WithOwner("not-me"), actions: ResourceWorkspace.AvailableActions(), allow: false}, + {resource: ResourceWorkspace.InOrg(unuseID), actions: ResourceWorkspace.AvailableActions(), allow: false}, - {resource: ResourceWorkspace.WithOwner("not-me"), actions: AllActions(), allow: false}, + {resource: ResourceWorkspace.WithOwner("not-me"), actions: ResourceWorkspace.AvailableActions(), allow: false}, }) user = Subject{ @@ -475,27 +480,27 @@ func TestAuthorizeDomain(t *testing.T) { testAuthorize(t, "SiteAdmin", user, []authTestCase{ // Org + me - {resource: ResourceWorkspace.InOrg(defOrg).WithOwner(user.ID), actions: AllActions(), allow: true}, - {resource: ResourceWorkspace.InOrg(defOrg), actions: AllActions(), allow: true}, + {resource: ResourceWorkspace.InOrg(defOrg).WithOwner(user.ID), actions: ResourceWorkspace.AvailableActions(), allow: true}, + {resource: ResourceWorkspace.InOrg(defOrg), actions: ResourceWorkspace.AvailableActions(), allow: true}, - {resource: ResourceWorkspace.WithOwner(user.ID), actions: AllActions(), allow: true}, + {resource: ResourceWorkspace.WithOwner(user.ID), actions: ResourceWorkspace.AvailableActions(), allow: true}, - {resource: ResourceWorkspace.All(), actions: AllActions(), allow: true}, + {resource: ResourceWorkspace.All(), actions: ResourceWorkspace.AvailableActions(), allow: true}, // Other org + me - {resource: ResourceWorkspace.InOrg(unuseID).WithOwner(user.ID), actions: AllActions(), allow: true}, - {resource: ResourceWorkspace.InOrg(unuseID), actions: AllActions(), allow: true}, + {resource: ResourceWorkspace.InOrg(unuseID).WithOwner(user.ID), actions: ResourceWorkspace.AvailableActions(), allow: true}, + {resource: ResourceWorkspace.InOrg(unuseID), actions: ResourceWorkspace.AvailableActions(), allow: true}, // Other org + other user - {resource: ResourceWorkspace.InOrg(defOrg).WithOwner("not-me"), actions: AllActions(), allow: true}, + {resource: ResourceWorkspace.InOrg(defOrg).WithOwner("not-me"), actions: ResourceWorkspace.AvailableActions(), allow: true}, - {resource: ResourceWorkspace.WithOwner("not-me"), actions: AllActions(), allow: true}, + {resource: ResourceWorkspace.WithOwner("not-me"), actions: ResourceWorkspace.AvailableActions(), allow: true}, // Other org + other use - {resource: ResourceWorkspace.InOrg(unuseID).WithOwner("not-me"), actions: AllActions(), allow: true}, - {resource: ResourceWorkspace.InOrg(unuseID), actions: AllActions(), allow: true}, + {resource: ResourceWorkspace.InOrg(unuseID).WithOwner("not-me"), actions: ResourceWorkspace.AvailableActions(), allow: true}, + {resource: ResourceWorkspace.InOrg(unuseID), actions: ResourceWorkspace.AvailableActions(), allow: true}, - {resource: ResourceWorkspace.WithOwner("not-me"), actions: AllActions(), allow: true}, + {resource: ResourceWorkspace.WithOwner("not-me"), actions: ResourceWorkspace.AvailableActions(), allow: true}, }) user = Subject{ @@ -510,60 +515,60 @@ func TestAuthorizeDomain(t *testing.T) { testAuthorize(t, "ApplicationToken", user, // Create (connect) Actions cases(func(c authTestCase) authTestCase { - c.actions = []policy.Action{policy.ActionCreate} + c.actions = []policy.Action{policy.ActionApplicationConnect} return c }, []authTestCase{ // Org + me - {resource: ResourceWorkspaceApplicationConnect.InOrg(defOrg).WithOwner(user.ID), allow: true}, - {resource: ResourceWorkspaceApplicationConnect.InOrg(defOrg), allow: false}, + {resource: ResourceWorkspace.InOrg(defOrg).WithOwner(user.ID), allow: true}, + {resource: ResourceWorkspace.InOrg(defOrg), allow: false}, - {resource: ResourceWorkspaceApplicationConnect.WithOwner(user.ID), allow: true}, + {resource: ResourceWorkspace.WithOwner(user.ID), allow: true}, - {resource: ResourceWorkspaceApplicationConnect.All(), allow: false}, + {resource: ResourceWorkspace.All(), allow: false}, // Other org + me - {resource: ResourceWorkspaceApplicationConnect.InOrg(unuseID).WithOwner(user.ID), allow: false}, - {resource: ResourceWorkspaceApplicationConnect.InOrg(unuseID), allow: false}, + {resource: ResourceWorkspace.InOrg(unuseID).WithOwner(user.ID), allow: false}, + {resource: ResourceWorkspace.InOrg(unuseID), allow: false}, // Other org + other user - {resource: ResourceWorkspaceApplicationConnect.InOrg(defOrg).WithOwner("not-me"), allow: false}, + {resource: ResourceWorkspace.InOrg(defOrg).WithOwner("not-me"), allow: false}, - {resource: ResourceWorkspaceApplicationConnect.WithOwner("not-me"), allow: false}, + {resource: ResourceWorkspace.WithOwner("not-me"), allow: false}, // Other org + other use - {resource: ResourceWorkspaceApplicationConnect.InOrg(unuseID).WithOwner("not-me"), allow: false}, - {resource: ResourceWorkspaceApplicationConnect.InOrg(unuseID), allow: false}, + {resource: ResourceWorkspace.InOrg(unuseID).WithOwner("not-me"), allow: false}, + {resource: ResourceWorkspace.InOrg(unuseID), allow: false}, - {resource: ResourceWorkspaceApplicationConnect.WithOwner("not-me"), allow: false}, + {resource: ResourceWorkspace.WithOwner("not-me"), allow: false}, }), - // Not create actions + // No ActionApplicationConnect action cases(func(c authTestCase) authTestCase { c.actions = []policy.Action{policy.ActionRead, policy.ActionUpdate, policy.ActionDelete} c.allow = false return c }, []authTestCase{ // Org + me - {resource: ResourceWorkspaceApplicationConnect.InOrg(defOrg).WithOwner(user.ID)}, - {resource: ResourceWorkspaceApplicationConnect.InOrg(defOrg)}, + {resource: ResourceWorkspace.InOrg(defOrg).WithOwner(user.ID)}, + {resource: ResourceWorkspace.InOrg(defOrg)}, - {resource: ResourceWorkspaceApplicationConnect.WithOwner(user.ID)}, + {resource: ResourceWorkspace.WithOwner(user.ID)}, - {resource: ResourceWorkspaceApplicationConnect.All()}, + {resource: ResourceWorkspace.All()}, // Other org + me - {resource: ResourceWorkspaceApplicationConnect.InOrg(unuseID).WithOwner(user.ID)}, - {resource: ResourceWorkspaceApplicationConnect.InOrg(unuseID)}, + {resource: ResourceWorkspace.InOrg(unuseID).WithOwner(user.ID)}, + {resource: ResourceWorkspace.InOrg(unuseID)}, // Other org + other user - {resource: ResourceWorkspaceApplicationConnect.InOrg(defOrg).WithOwner("not-me")}, + {resource: ResourceWorkspace.InOrg(defOrg).WithOwner("not-me")}, - {resource: ResourceWorkspaceApplicationConnect.WithOwner("not-me")}, + {resource: ResourceWorkspace.WithOwner("not-me")}, // Other org + other use - {resource: ResourceWorkspaceApplicationConnect.InOrg(unuseID).WithOwner("not-me")}, - {resource: ResourceWorkspaceApplicationConnect.InOrg(unuseID)}, + {resource: ResourceWorkspace.InOrg(unuseID).WithOwner("not-me")}, + {resource: ResourceWorkspace.InOrg(unuseID)}, - {resource: ResourceWorkspaceApplicationConnect.WithOwner("not-me")}, + {resource: ResourceWorkspace.WithOwner("not-me")}, }), // Other Objects cases(func(c authTestCase) authTestCase { @@ -713,8 +718,8 @@ func TestAuthorizeLevels(t *testing.T) { User: []Permission{ { Negate: true, - ResourceType: WildcardSymbol, - Action: WildcardSymbol, + ResourceType: policy.WildcardSymbol, + Action: policy.WildcardSymbol, }, }, }, @@ -723,7 +728,7 @@ func TestAuthorizeLevels(t *testing.T) { testAuthorize(t, "AdminAlwaysAllow", user, cases(func(c authTestCase) authTestCase { - c.actions = AllActions() + c.actions = ResourceWorkspace.AvailableActions() c.allow = true return c }, []authTestCase{ @@ -761,7 +766,7 @@ func TestAuthorizeLevels(t *testing.T) { { Negate: true, ResourceType: "random", - Action: WildcardSymbol, + Action: policy.WildcardSymbol, }, }, }, @@ -772,8 +777,8 @@ func TestAuthorizeLevels(t *testing.T) { User: []Permission{ { Negate: true, - ResourceType: WildcardSymbol, - Action: WildcardSymbol, + ResourceType: policy.WildcardSymbol, + Action: policy.WildcardSymbol, }, }, }, @@ -782,7 +787,8 @@ func TestAuthorizeLevels(t *testing.T) { testAuthorize(t, "OrgAllowAll", user, cases(func(c authTestCase) authTestCase { - c.actions = AllActions() + // SSH and app connect are not implied here. + c.actions = slice.Omit(ResourceWorkspace.AvailableActions(), policy.ActionApplicationConnect, policy.ActionSSH) return c }, []authTestCase{ // Org + me @@ -840,9 +846,9 @@ func TestAuthorizeScope(t *testing.T) { }), // Allowed by scope: []authTestCase{ - {resource: ResourceWorkspaceApplicationConnect.InOrg(defOrg).WithOwner("not-me"), actions: []policy.Action{policy.ActionCreate}, allow: true}, - {resource: ResourceWorkspaceApplicationConnect.InOrg(defOrg).WithOwner(user.ID), actions: []policy.Action{policy.ActionCreate}, allow: true}, - {resource: ResourceWorkspaceApplicationConnect.InOrg(unusedID).WithOwner("not-me"), actions: []policy.Action{policy.ActionCreate}, allow: true}, + {resource: ResourceWorkspace.InOrg(defOrg).WithOwner("not-me"), actions: []policy.Action{policy.ActionApplicationConnect}, allow: true}, + {resource: ResourceWorkspace.InOrg(defOrg).WithOwner(user.ID), actions: []policy.Action{policy.ActionApplicationConnect}, allow: true}, + {resource: ResourceWorkspace.InOrg(unusedID).WithOwner("not-me"), actions: []policy.Action{policy.ActionApplicationConnect}, allow: true}, }, ) @@ -875,9 +881,9 @@ func TestAuthorizeScope(t *testing.T) { }), // Allowed by scope: []authTestCase{ - {resource: ResourceWorkspaceApplicationConnect.InOrg(defOrg).WithOwner(user.ID), actions: []policy.Action{policy.ActionCreate}, allow: true}, - {resource: ResourceWorkspaceApplicationConnect.InOrg(defOrg).WithOwner("not-me"), actions: []policy.Action{policy.ActionCreate}, allow: false}, - {resource: ResourceWorkspaceApplicationConnect.InOrg(unusedID).WithOwner("not-me"), actions: []policy.Action{policy.ActionCreate}, allow: false}, + {resource: ResourceWorkspace.InOrg(defOrg).WithOwner(user.ID), actions: []policy.Action{policy.ActionApplicationConnect}, allow: true}, + {resource: ResourceWorkspace.InOrg(defOrg).WithOwner("not-me"), actions: []policy.Action{policy.ActionApplicationConnect}, allow: false}, + {resource: ResourceWorkspace.InOrg(unusedID).WithOwner("not-me"), actions: []policy.Action{policy.ActionApplicationConnect}, allow: false}, }, ) diff --git a/coderd/rbac/authz_test.go b/coderd/rbac/authz_test.go index 4ac8f20d94506..05940856ec583 100644 --- a/coderd/rbac/authz_test.go +++ b/coderd/rbac/authz_test.go @@ -160,7 +160,7 @@ func BenchmarkRBACAuthorize(b *testing.B) { // There is no caching that occurs because a fresh context is used for each // call. And the context needs 'WithCacheCtx' to work. - authorizer := rbac.NewCachingAuthorizer(prometheus.NewRegistry()) + authorizer := rbac.NewStrictCachingAuthorizer(prometheus.NewRegistry()) // This benchmarks all the simple cases using just user permissions. Groups // are added as noise, but do not do anything. for _, c := range benchCases { @@ -187,7 +187,7 @@ func BenchmarkRBACAuthorizeGroups(b *testing.B) { uuid.MustParse("0632b012-49e0-4d70-a5b3-f4398f1dcd52"), uuid.MustParse("70dbaa7a-ea9c-4f68-a781-97b08af8461d"), ) - authorizer := rbac.NewCachingAuthorizer(prometheus.NewRegistry()) + authorizer := rbac.NewStrictCachingAuthorizer(prometheus.NewRegistry()) // Same benchmark cases, but this time groups will be used to match. // Some '*' permissions will still match, but using a fake action reduces @@ -239,7 +239,7 @@ func BenchmarkRBACFilter(b *testing.B) { uuid.MustParse("70dbaa7a-ea9c-4f68-a781-97b08af8461d"), ) - authorizer := rbac.NewCachingAuthorizer(prometheus.NewRegistry()) + authorizer := rbac.NewStrictCachingAuthorizer(prometheus.NewRegistry()) for _, c := range benchCases { b.Run("PrepareOnly-"+c.Name, func(b *testing.B) { diff --git a/coderd/rbac/object.go b/coderd/rbac/object.go index bac8b90fe90c4..30a74e4f825dd 100644 --- a/coderd/rbac/object.go +++ b/coderd/rbac/object.go @@ -1,237 +1,13 @@ package rbac import ( + "fmt" + "github.com/google/uuid" "github.com/coder/coder/v2/coderd/rbac/policy" ) -const WildcardSymbol = "*" - -// Objecter returns the RBAC object for itself. -type Objecter interface { - RBACObject() Object -} - -// Resources are just typed objects. Making resources this way allows directly -// passing them into an Authorize function and use the chaining api. -var ( - // ResourceWildcard represents all resource types - // Try to avoid using this where possible. - ResourceWildcard = Object{ - Type: WildcardSymbol, - } - - // ResourceWorkspace CRUD. Org + User owner - // create/delete = make or delete workspaces - // read = access workspace - // update = edit workspace variables - ResourceWorkspace = Object{ - Type: "workspace", - } - - // ResourceWorkspaceBuild refers to permissions necessary to - // insert a workspace build job. - // create/delete = ? - // read = read workspace builds - // update = insert/update workspace builds. - ResourceWorkspaceBuild = Object{ - Type: "workspace_build", - } - - // ResourceWorkspaceDormant is returned if a workspace is dormant. - // It grants restricted permissions on workspace builds. - ResourceWorkspaceDormant = Object{ - Type: "workspace_dormant", - } - - // ResourceWorkspaceProxy CRUD. Org - // create/delete = make or delete proxies - // read = read proxy urls - // update = edit workspace proxy fields - ResourceWorkspaceProxy = Object{ - Type: "workspace_proxy", - } - - // ResourceWorkspaceExecution CRUD. Org + User owner - // create = workspace remote execution - // read = ? - // update = ? - // delete = ? - ResourceWorkspaceExecution = Object{ - Type: "workspace_execution", - } - - // ResourceWorkspaceApplicationConnect CRUD. Org + User owner - // create = connect to an application - // read = ? - // update = ? - // delete = ? - ResourceWorkspaceApplicationConnect = Object{ - Type: "application_connect", - } - - // ResourceAuditLog - // read = access audit log - ResourceAuditLog = Object{ - Type: "audit_log", - } - - // ResourceTemplate CRUD. Org owner only. - // create/delete = Make or delete a new template - // update = Update the template, make new template versions - // read = read the template and all versions associated - ResourceTemplate = Object{ - Type: "template", - } - - // ResourceGroup CRUD. Org admins only. - // create/delete = Make or delete a new group. - // update = Update the name or members of a group. - // read = Read groups and their members. - ResourceGroup = Object{ - Type: "group", - } - - ResourceFile = Object{ - Type: "file", - } - - ResourceProvisionerDaemon = Object{ - Type: "provisioner_daemon", - } - - // ResourceOrganization CRUD. Has an org owner on all but 'create'. - // create/delete = make or delete organizations - // read = view org information (Can add user owner for read) - // update = ?? - ResourceOrganization = Object{ - Type: "organization", - } - - // ResourceRoleAssignment might be expanded later to allow more granular permissions - // to modifying roles. For now, this covers all possible roles, so having this permission - // allows granting/deleting **ALL** roles. - // Never has an owner or org. - // create = Assign roles - // update = ?? - // read = View available roles to assign - // delete = Remove role - ResourceRoleAssignment = Object{ - Type: "assign_role", - } - - // ResourceOrgRoleAssignment is just like ResourceRoleAssignment but for organization roles. - ResourceOrgRoleAssignment = Object{ - Type: "assign_org_role", - } - - // ResourceAPIKey is owned by a user. - // create = Create a new api key for user - // update = ?? - // read = View api key - // delete = Delete api key - ResourceAPIKey = Object{ - Type: "api_key", - } - - // ResourceUser is the user in the 'users' table. - // ResourceUser never has any owners or in an org, as it's site wide. - // create/delete = make or delete a new user. - // read = view all 'user' table data - // update = update all 'user' table data - ResourceUser = Object{ - Type: "user", - } - - // ResourceUserData is any data associated with a user. A user has control - // over their data (profile, password, etc). So this resource has an owner. - ResourceUserData = Object{ - Type: "user_data", - } - - // ResourceUserWorkspaceBuildParameters is the user's workspace build - // parameter history. - ResourceUserWorkspaceBuildParameters = Object{ - Type: "user_workspace_build_parameters", - } - - // ResourceOrganizationMember is a user's membership in an organization. - // Has ONLY an organization owner. - // create/delete = Create/delete member from org. - // update = Update organization member - // read = View member - ResourceOrganizationMember = Object{ - Type: "organization_member", - } - - // ResourceLicense is the license in the 'licenses' table. - // ResourceLicense is site wide. - // create/delete = add or remove license from site. - // read = view license claims - // update = not applicable; licenses are immutable - ResourceLicense = Object{ - Type: "license", - } - - // ResourceDeploymentValues - ResourceDeploymentValues = Object{ - Type: "deployment_config", - } - - ResourceDeploymentStats = Object{ - Type: "deployment_stats", - } - - ResourceReplicas = Object{ - Type: "replicas", - } - - // ResourceDebugInfo controls access to the debug routes `/api/v2/debug/*`. - ResourceDebugInfo = Object{ - Type: "debug_info", - } - - // ResourceSystem is a pseudo-resource only used for system-level actions. - ResourceSystem = Object{ - Type: "system", - } - - // ResourceTailnetCoordinator is a pseudo-resource for use by the tailnet coordinator - ResourceTailnetCoordinator = Object{ - Type: "tailnet_coordinator", - } - - // ResourceTemplateInsights is a pseudo-resource for reading template insights data. - ResourceTemplateInsights = Object{ - Type: "template_insights", - } - - // ResourceOAuth2ProviderApp CRUD. - // create/delete = Make or delete an OAuth2 app. - // update = Update the properties of the OAuth2 app. - // read = Read OAuth2 apps. - ResourceOAuth2ProviderApp = Object{ - Type: "oauth2_app", - } - - // ResourceOAuth2ProviderAppSecret CRUD. - // create/delete = Make or delete an OAuth2 app secret. - // update = Update last used date. - // read = Read OAuth2 app hashed or truncated secret. - ResourceOAuth2ProviderAppSecret = Object{ - Type: "oauth2_app_secret", - } - - // ResourceOAuth2ProviderAppCodeToken CRUD. - // create/delete = Make or delete an OAuth2 app code or token. - // update = None - // read = Check if OAuth2 app code or token exists. - ResourceOAuth2ProviderAppCodeToken = Object{ - Type: "oauth2_app_code_token", - } -) - // ResourceUserObject is a helper function to create a user object for authz checks. func ResourceUserObject(userID uuid.UUID) Object { return ResourceUser.WithID(userID).WithOwner(userID.String()) @@ -256,6 +32,35 @@ type Object struct { ACLGroupList map[string][]policy.Action ` json:"acl_group_list"` } +// ValidAction checks if the action is valid for the given object type. +func (z Object) ValidAction(action policy.Action) error { + perms, ok := policy.RBACPermissions[z.Type] + if !ok { + return fmt.Errorf("invalid type %q", z.Type) + } + if _, ok := perms.Actions[action]; !ok { + return fmt.Errorf("invalid action %q for type %q", action, z.Type) + } + + return nil +} + +// AvailableActions returns all available actions for a given object. +// Wildcard is omitted. +func (z Object) AvailableActions() []policy.Action { + perms, ok := policy.RBACPermissions[z.Type] + if !ok { + return []policy.Action{} + } + + actions := make([]policy.Action, 0, len(perms.Actions)) + for action := range perms.Actions { + actions = append(actions, action) + } + + return actions +} + func (z Object) Equal(b Object) bool { if z.ID != b.ID { return false diff --git a/coderd/rbac/object_gen.go b/coderd/rbac/object_gen.go index b1cac5704e049..57ec0982a15ae 100644 --- a/coderd/rbac/object_gen.go +++ b/coderd/rbac/object_gen.go @@ -1,38 +1,297 @@ // Code generated by rbacgen/main.go. DO NOT EDIT. package rbac -func AllResources() []Object { - return []Object{ - ResourceAPIKey, +import "github.com/coder/coder/v2/coderd/rbac/policy" + +// Objecter returns the RBAC object for itself. +type Objecter interface { + RBACObject() Object +} + +var ( + // ResourceWildcard + // Valid Actions + ResourceWildcard = Object{ + Type: "*", + } + + // ResourceApiKey + // Valid Actions + // - "ActionCreate" :: create an api key + // - "ActionDelete" :: delete an api key + // - "ActionRead" :: read api key details (secrets are not stored) + // - "ActionUpdate" :: update an api key, eg expires + ResourceApiKey = Object{ + Type: "api_key", + } + + // ResourceAssignOrgRole + // Valid Actions + // - "ActionAssign" :: ability to assign org scoped roles + // - "ActionDelete" :: ability to delete org scoped roles + // - "ActionRead" :: view what roles are assignable + ResourceAssignOrgRole = Object{ + Type: "assign_org_role", + } + + // ResourceAssignRole + // Valid Actions + // - "ActionAssign" :: ability to assign roles + // - "ActionDelete" :: ability to delete roles + // - "ActionRead" :: view what roles are assignable + ResourceAssignRole = Object{ + Type: "assign_role", + } + + // ResourceAuditLog + // Valid Actions + // - "ActionCreate" :: create new audit log entries + // - "ActionRead" :: read audit logs + ResourceAuditLog = Object{ + Type: "audit_log", + } + + // ResourceDebugInfo + // Valid Actions + // - "ActionRead" :: access to debug routes + ResourceDebugInfo = Object{ + Type: "debug_info", + } + + // ResourceDeploymentConfig + // Valid Actions + // - "ActionRead" :: read deployment config + // - "ActionUpdate" :: updating health information + ResourceDeploymentConfig = Object{ + Type: "deployment_config", + } + + // ResourceDeploymentStats + // Valid Actions + // - "ActionRead" :: read deployment stats + ResourceDeploymentStats = Object{ + Type: "deployment_stats", + } + + // ResourceFile + // Valid Actions + // - "ActionCreate" :: create a file + // - "ActionRead" :: read files + ResourceFile = Object{ + Type: "file", + } + + // ResourceGroup + // Valid Actions + // - "ActionCreate" :: create a group + // - "ActionDelete" :: delete a group + // - "ActionRead" :: read groups + // - "ActionUpdate" :: update a group + ResourceGroup = Object{ + Type: "group", + } + + // ResourceLicense + // Valid Actions + // - "ActionCreate" :: create a license + // - "ActionDelete" :: delete license + // - "ActionRead" :: read licenses + ResourceLicense = Object{ + Type: "license", + } + + // ResourceOauth2App + // Valid Actions + // - "ActionCreate" :: make an OAuth2 app. + // - "ActionDelete" :: delete an OAuth2 app + // - "ActionRead" :: read OAuth2 apps + // - "ActionUpdate" :: update the properties of the OAuth2 app. + ResourceOauth2App = Object{ + Type: "oauth2_app", + } + + // ResourceOauth2AppCodeToken + // Valid Actions + // - "ActionCreate" :: + // - "ActionDelete" :: + // - "ActionRead" :: + ResourceOauth2AppCodeToken = Object{ + Type: "oauth2_app_code_token", + } + + // ResourceOauth2AppSecret + // Valid Actions + // - "ActionCreate" :: + // - "ActionDelete" :: + // - "ActionRead" :: + // - "ActionUpdate" :: + ResourceOauth2AppSecret = Object{ + Type: "oauth2_app_secret", + } + + // ResourceOrganization + // Valid Actions + // - "ActionCreate" :: create an organization + // - "ActionDelete" :: delete an organization + // - "ActionRead" :: read organizations + // - "ActionUpdate" :: update an organization + ResourceOrganization = Object{ + Type: "organization", + } + + // ResourceOrganizationMember + // Valid Actions + // - "ActionCreate" :: create an organization member + // - "ActionDelete" :: delete member + // - "ActionRead" :: read member + // - "ActionUpdate" :: update an organization member + ResourceOrganizationMember = Object{ + Type: "organization_member", + } + + // ResourceProvisionerDaemon + // Valid Actions + // - "ActionCreate" :: create a provisioner daemon + // - "ActionDelete" :: delete a provisioner daemon + // - "ActionRead" :: read provisioner daemon + // - "ActionUpdate" :: update a provisioner daemon + ResourceProvisionerDaemon = Object{ + Type: "provisioner_daemon", + } + + // ResourceReplicas + // Valid Actions + // - "ActionRead" :: read replicas + ResourceReplicas = Object{ + Type: "replicas", + } + + // ResourceSystem + // Valid Actions + // - "ActionCreate" :: create system resources + // - "ActionDelete" :: delete system resources + // - "ActionRead" :: view system resources + // - "ActionUpdate" :: update system resources + ResourceSystem = Object{ + Type: "system", + } + + // ResourceTailnetCoordinator + // Valid Actions + // - "ActionCreate" :: + // - "ActionDelete" :: + // - "ActionRead" :: + // - "ActionUpdate" :: + ResourceTailnetCoordinator = Object{ + Type: "tailnet_coordinator", + } + + // ResourceTemplate + // Valid Actions + // - "ActionCreate" :: create a template + // - "ActionDelete" :: delete a template + // - "ActionRead" :: read template + // - "ActionUpdate" :: update a template + // - "ActionViewInsights" :: view insights + ResourceTemplate = Object{ + Type: "template", + } + + // ResourceUser + // Valid Actions + // - "ActionCreate" :: create a new user + // - "ActionDelete" :: delete an existing user + // - "ActionRead" :: read user data + // - "ActionReadPersonal" :: read personal user data like user settings and auth links + // - "ActionUpdate" :: update an existing user + // - "ActionUpdatePersonal" :: update personal data + ResourceUser = Object{ + Type: "user", + } + + // ResourceWorkspace + // Valid Actions + // - "ActionApplicationConnect" :: connect to workspace apps via browser + // - "ActionCreate" :: create a new workspace + // - "ActionDelete" :: delete workspace + // - "ActionRead" :: read workspace data to view on the UI + // - "ActionSSH" :: ssh into a given workspace + // - "ActionWorkspaceStart" :: allows starting a workspace + // - "ActionWorkspaceStop" :: allows stopping a workspace + // - "ActionUpdate" :: edit workspace settings (scheduling, permissions, parameters) + ResourceWorkspace = Object{ + Type: "workspace", + } + + // ResourceWorkspaceDormant + // Valid Actions + // - "ActionApplicationConnect" :: connect to workspace apps via browser + // - "ActionCreate" :: create a new workspace + // - "ActionDelete" :: delete workspace + // - "ActionRead" :: read workspace data to view on the UI + // - "ActionSSH" :: ssh into a given workspace + // - "ActionWorkspaceStart" :: allows starting a workspace + // - "ActionWorkspaceStop" :: allows stopping a workspace + // - "ActionUpdate" :: edit workspace settings (scheduling, permissions, parameters) + ResourceWorkspaceDormant = Object{ + Type: "workspace_dormant", + } + + // ResourceWorkspaceProxy + // Valid Actions + // - "ActionCreate" :: create a workspace proxy + // - "ActionDelete" :: delete a workspace proxy + // - "ActionRead" :: read and use a workspace proxy + // - "ActionUpdate" :: update a workspace proxy + ResourceWorkspaceProxy = Object{ + Type: "workspace_proxy", + } +) + +func AllResources() []Objecter { + return []Objecter{ + ResourceWildcard, + ResourceApiKey, + ResourceAssignOrgRole, + ResourceAssignRole, ResourceAuditLog, ResourceDebugInfo, + ResourceDeploymentConfig, ResourceDeploymentStats, - ResourceDeploymentValues, ResourceFile, ResourceGroup, ResourceLicense, - ResourceOAuth2ProviderApp, - ResourceOAuth2ProviderAppCodeToken, - ResourceOAuth2ProviderAppSecret, - ResourceOrgRoleAssignment, + ResourceOauth2App, + ResourceOauth2AppCodeToken, + ResourceOauth2AppSecret, ResourceOrganization, ResourceOrganizationMember, ResourceProvisionerDaemon, ResourceReplicas, - ResourceRoleAssignment, ResourceSystem, ResourceTailnetCoordinator, ResourceTemplate, - ResourceTemplateInsights, ResourceUser, - ResourceUserData, - ResourceUserWorkspaceBuildParameters, - ResourceWildcard, ResourceWorkspace, - ResourceWorkspaceApplicationConnect, - ResourceWorkspaceBuild, ResourceWorkspaceDormant, - ResourceWorkspaceExecution, ResourceWorkspaceProxy, } } + +func AllActions() []policy.Action { + return []policy.Action{ + policy.ActionApplicationConnect, + policy.ActionAssign, + policy.ActionCreate, + policy.ActionDelete, + policy.ActionRead, + policy.ActionReadPersonal, + policy.ActionSSH, + policy.ActionUpdate, + policy.ActionUpdatePersonal, + policy.ActionUse, + policy.ActionViewInsights, + policy.ActionWorkspaceStart, + policy.ActionWorkspaceStop, + } +} diff --git a/coderd/rbac/object_test.go b/coderd/rbac/object_test.go index 373119f7f0e57..ea6031f2ccae8 100644 --- a/coderd/rbac/object_test.go +++ b/coderd/rbac/object_test.go @@ -184,14 +184,14 @@ func TestAllResources(t *testing.T) { var typeNames []string resources := rbac.AllResources() for _, r := range resources { - if r.Type == "" { - t.Errorf("empty type name: %s", r.Type) + if r.RBACObject().Type == "" { + t.Errorf("empty type name: %s", r.RBACObject().Type) continue } - if slice.Contains(typeNames, r.Type) { - t.Errorf("duplicate type name: %s", r.Type) + if slice.Contains(typeNames, r.RBACObject().Type) { + t.Errorf("duplicate type name: %s", r.RBACObject().Type) continue } - typeNames = append(typeNames, r.Type) + typeNames = append(typeNames, r.RBACObject().Type) } } diff --git a/coderd/rbac/policy/policy.go b/coderd/rbac/policy/policy.go index a3c0dc9f3436b..26afb0e011ca7 100644 --- a/coderd/rbac/policy/policy.go +++ b/coderd/rbac/policy/policy.go @@ -1,5 +1,7 @@ package policy +const WildcardSymbol = "*" + // Action represents the allowed actions to be done on an object. type Action string @@ -8,4 +10,236 @@ const ( ActionRead Action = "read" ActionUpdate Action = "update" ActionDelete Action = "delete" + + ActionUse Action = "use" + ActionSSH Action = "ssh" + ActionApplicationConnect Action = "application_connect" + ActionViewInsights Action = "view_insights" + + ActionWorkspaceStart Action = "start" + ActionWorkspaceStop Action = "stop" + + ActionAssign Action = "assign" + + ActionReadPersonal Action = "read_personal" + ActionUpdatePersonal Action = "update_personal" ) + +type PermissionDefinition struct { + // name is optional. Used to override "Type" for function naming. + Name string + // Actions are a map of actions to some description of what the action + // should represent. The key in the actions map is the verb to use + // in the rbac policy. + Actions map[Action]ActionDefinition +} + +type ActionDefinition struct { + // Human friendly description to explain the action. + Description string +} + +func actDef(description string) ActionDefinition { + return ActionDefinition{ + Description: description, + } +} + +var workspaceActions = map[Action]ActionDefinition{ + ActionCreate: actDef("create a new workspace"), + ActionRead: actDef("read workspace data to view on the UI"), + // TODO: Make updates more granular + ActionUpdate: actDef("edit workspace settings (scheduling, permissions, parameters)"), + ActionDelete: actDef("delete workspace"), + + // Workspace provisioning. Start & stop are different so dormant workspaces can be + // stopped, but not stared. + ActionWorkspaceStart: actDef("allows starting a workspace"), + ActionWorkspaceStop: actDef("allows stopping a workspace"), + + // Running a workspace + ActionSSH: actDef("ssh into a given workspace"), + ActionApplicationConnect: actDef("connect to workspace apps via browser"), +} + +// RBACPermissions is indexed by the type +var RBACPermissions = map[string]PermissionDefinition{ + // Wildcard is every object, and the action "*" provides all actions. + // So can grant all actions on all types. + WildcardSymbol: { + Name: "Wildcard", + Actions: map[Action]ActionDefinition{}, + }, + "user": { + Actions: map[Action]ActionDefinition{ + // Actions deal with site wide user objects. + ActionRead: actDef("read user data"), + ActionCreate: actDef("create a new user"), + ActionUpdate: actDef("update an existing user"), + ActionDelete: actDef("delete an existing user"), + + ActionReadPersonal: actDef("read personal user data like user settings and auth links"), + ActionUpdatePersonal: actDef("update personal data"), + }, + }, + "workspace": { + Actions: workspaceActions, + }, + // Dormant workspaces have the same perms as workspaces. + "workspace_dormant": { + Actions: workspaceActions, + }, + "workspace_proxy": { + Actions: map[Action]ActionDefinition{ + ActionCreate: actDef("create a workspace proxy"), + ActionDelete: actDef("delete a workspace proxy"), + ActionUpdate: actDef("update a workspace proxy"), + ActionRead: actDef("read and use a workspace proxy"), + }, + }, + "license": { + Actions: map[Action]ActionDefinition{ + ActionCreate: actDef("create a license"), + ActionRead: actDef("read licenses"), + ActionDelete: actDef("delete license"), + // Licenses are immutable, so update makes no sense + }, + }, + "audit_log": { + Actions: map[Action]ActionDefinition{ + ActionRead: actDef("read audit logs"), + ActionCreate: actDef("create new audit log entries"), + }, + }, + "deployment_config": { + Actions: map[Action]ActionDefinition{ + ActionRead: actDef("read deployment config"), + ActionUpdate: actDef("updating health information"), + }, + }, + "deployment_stats": { + Actions: map[Action]ActionDefinition{ + ActionRead: actDef("read deployment stats"), + }, + }, + "replicas": { + Actions: map[Action]ActionDefinition{ + ActionRead: actDef("read replicas"), + }, + }, + "template": { + Actions: map[Action]ActionDefinition{ + ActionCreate: actDef("create a template"), + // TODO: Create a use permission maybe? + ActionRead: actDef("read template"), + ActionUpdate: actDef("update a template"), + ActionDelete: actDef("delete a template"), + ActionViewInsights: actDef("view insights"), + }, + }, + "group": { + Actions: map[Action]ActionDefinition{ + ActionCreate: actDef("create a group"), + ActionRead: actDef("read groups"), + ActionDelete: actDef("delete a group"), + ActionUpdate: actDef("update a group"), + }, + }, + "file": { + Actions: map[Action]ActionDefinition{ + ActionCreate: actDef("create a file"), + ActionRead: actDef("read files"), + }, + }, + "provisioner_daemon": { + Actions: map[Action]ActionDefinition{ + ActionCreate: actDef("create a provisioner daemon"), + // TODO: Move to use? + ActionRead: actDef("read provisioner daemon"), + ActionUpdate: actDef("update a provisioner daemon"), + ActionDelete: actDef("delete a provisioner daemon"), + }, + }, + "organization": { + Actions: map[Action]ActionDefinition{ + ActionCreate: actDef("create an organization"), + ActionRead: actDef("read organizations"), + ActionUpdate: actDef("update an organization"), + ActionDelete: actDef("delete an organization"), + }, + }, + "organization_member": { + Actions: map[Action]ActionDefinition{ + ActionCreate: actDef("create an organization member"), + ActionRead: actDef("read member"), + ActionUpdate: actDef("update an organization member"), + ActionDelete: actDef("delete member"), + }, + }, + "debug_info": { + Actions: map[Action]ActionDefinition{ + ActionRead: actDef("access to debug routes"), + }, + }, + "system": { + Actions: map[Action]ActionDefinition{ + ActionCreate: actDef("create system resources"), + ActionRead: actDef("view system resources"), + ActionUpdate: actDef("update system resources"), + ActionDelete: actDef("delete system resources"), + }, + }, + "api_key": { + Actions: map[Action]ActionDefinition{ + ActionCreate: actDef("create an api key"), + ActionRead: actDef("read api key details (secrets are not stored)"), + ActionDelete: actDef("delete an api key"), + ActionUpdate: actDef("update an api key, eg expires"), + }, + }, + "tailnet_coordinator": { + Actions: map[Action]ActionDefinition{ + ActionCreate: actDef(""), + ActionRead: actDef(""), + ActionUpdate: actDef(""), + ActionDelete: actDef(""), + }, + }, + "assign_role": { + Actions: map[Action]ActionDefinition{ + ActionAssign: actDef("ability to assign roles"), + ActionRead: actDef("view what roles are assignable"), + ActionDelete: actDef("ability to delete roles"), + }, + }, + "assign_org_role": { + Actions: map[Action]ActionDefinition{ + ActionAssign: actDef("ability to assign org scoped roles"), + ActionRead: actDef("view what roles are assignable"), + ActionDelete: actDef("ability to delete org scoped roles"), + }, + }, + "oauth2_app": { + Actions: map[Action]ActionDefinition{ + ActionCreate: actDef("make an OAuth2 app."), + ActionRead: actDef("read OAuth2 apps"), + ActionUpdate: actDef("update the properties of the OAuth2 app."), + ActionDelete: actDef("delete an OAuth2 app"), + }, + }, + "oauth2_app_secret": { + Actions: map[Action]ActionDefinition{ + ActionCreate: actDef(""), + ActionRead: actDef(""), + ActionUpdate: actDef(""), + ActionDelete: actDef(""), + }, + }, + "oauth2_app_code_token": { + Actions: map[Action]ActionDefinition{ + ActionCreate: actDef(""), + ActionRead: actDef(""), + ActionDelete: actDef(""), + }, + }, +} diff --git a/coderd/rbac/roles.go b/coderd/rbac/roles.go index f69cf49174f60..cee365d06624c 100644 --- a/coderd/rbac/roles.go +++ b/coderd/rbac/roles.go @@ -10,6 +10,7 @@ import ( "golang.org/x/xerrors" "github.com/coder/coder/v2/coderd/rbac/policy" + "github.com/coder/coder/v2/coderd/util/slice" ) const ( @@ -70,28 +71,28 @@ func RoleOrgMember(organizationID uuid.UUID) string { return roleName(orgMember, organizationID.String()) } -func allPermsExcept(excepts ...Object) []Permission { +func allPermsExcept(excepts ...Objecter) []Permission { resources := AllResources() var perms []Permission skip := make(map[string]bool) for _, e := range excepts { - skip[e.Type] = true + skip[e.RBACObject().Type] = true } for _, r := range resources { // Exceptions - if skip[r.Type] { + if skip[r.RBACObject().Type] { continue } // This should always be skipped. - if r.Type == ResourceWildcard.Type { + if r.RBACObject().Type == ResourceWildcard.Type { continue } // Owners can do everything else perms = append(perms, Permission{ Negate: false, - ResourceType: r.Type, - Action: WildcardSymbol, + ResourceType: r.RBACObject().Type, + Action: policy.WildcardSymbol, }) } return perms @@ -123,12 +124,12 @@ func ReloadBuiltinRoles(opts *RoleOptions) { opts = &RoleOptions{} } - ownerAndAdminExceptions := []Object{ResourceWorkspaceDormant} + ownerWorkspaceActions := ResourceWorkspace.AvailableActions() if opts.NoOwnerWorkspaceExec { - ownerAndAdminExceptions = append(ownerAndAdminExceptions, - ResourceWorkspaceExecution, - ResourceWorkspaceApplicationConnect, - ) + // Remove ssh and application connect from the owner role. This + // prevents owners from have exec access to all workspaces. + ownerWorkspaceActions = slice.Omit(ownerWorkspaceActions, + policy.ActionApplicationConnect, policy.ActionSSH) } // Static roles that never change should be allocated in a closure. @@ -138,30 +139,41 @@ func ReloadBuiltinRoles(opts *RoleOptions) { ownerRole := Role{ Name: owner, DisplayName: "Owner", - Site: allPermsExcept(ownerAndAdminExceptions...), - Org: map[string][]Permission{}, - User: []Permission{}, + Site: append( + // Workspace dormancy and workspace are omitted. + // Workspace is specifically handled based on the opts.NoOwnerWorkspaceExec + allPermsExcept(ResourceWorkspaceDormant, ResourceWorkspace), + // This adds back in the Workspace permissions. + Permissions(map[string][]policy.Action{ + ResourceWorkspace.Type: ownerWorkspaceActions, + ResourceWorkspaceDormant.Type: {policy.ActionRead, policy.ActionDelete, policy.ActionCreate, policy.ActionUpdate, policy.ActionWorkspaceStop}, + })...), + Org: map[string][]Permission{}, + User: []Permission{}, }.withCachedRegoValue() memberRole := Role{ Name: member, DisplayName: "Member", Site: Permissions(map[string][]policy.Action{ - ResourceRoleAssignment.Type: {policy.ActionRead}, + ResourceAssignRole.Type: {policy.ActionRead}, // All users can see the provisioner daemons. ResourceProvisionerDaemon.Type: {policy.ActionRead}, // All users can see OAuth2 provider applications. - ResourceOAuth2ProviderApp.Type: {policy.ActionRead}, + ResourceOauth2App.Type: {policy.ActionRead}, + ResourceWorkspaceProxy.Type: {policy.ActionRead}, }), Org: map[string][]Permission{}, User: append(allPermsExcept(ResourceWorkspaceDormant, ResourceUser, ResourceOrganizationMember), Permissions(map[string][]policy.Action{ + // Reduced permission set on dormant workspaces. No build, ssh, or exec + ResourceWorkspaceDormant.Type: {policy.ActionRead, policy.ActionDelete, policy.ActionCreate, policy.ActionUpdate, policy.ActionWorkspaceStop}, + // Users cannot do create/update/delete on themselves, but they // can read their own details. - ResourceUser.Type: {policy.ActionRead}, - ResourceUserWorkspaceBuildParameters.Type: {policy.ActionRead}, + ResourceUser.Type: {policy.ActionRead, policy.ActionReadPersonal, policy.ActionUpdatePersonal}, // Users can create provisioner daemons scoped to themselves. - ResourceProvisionerDaemon.Type: {policy.ActionCreate, policy.ActionRead, policy.ActionUpdate}, + ResourceProvisionerDaemon.Type: {policy.ActionRead, policy.ActionCreate, policy.ActionRead, policy.ActionUpdate}, })..., ), }.withCachedRegoValue() @@ -172,14 +184,13 @@ func ReloadBuiltinRoles(opts *RoleOptions) { Site: Permissions(map[string][]policy.Action{ // Should be able to read all template details, even in orgs they // are not in. - ResourceTemplate.Type: {policy.ActionRead}, - ResourceTemplateInsights.Type: {policy.ActionRead}, - ResourceAuditLog.Type: {policy.ActionRead}, - ResourceUser.Type: {policy.ActionRead}, - ResourceGroup.Type: {policy.ActionRead}, + ResourceTemplate.Type: {policy.ActionRead, policy.ActionViewInsights}, + ResourceAuditLog.Type: {policy.ActionRead}, + ResourceUser.Type: {policy.ActionRead}, + ResourceGroup.Type: {policy.ActionRead}, // Allow auditors to query deployment stats and insights. ResourceDeploymentStats.Type: {policy.ActionRead}, - ResourceDeploymentValues.Type: {policy.ActionRead}, + ResourceDeploymentConfig.Type: {policy.ActionRead}, // Org roles are not really used yet, so grant the perm at the site level. ResourceOrganizationMember.Type: {policy.ActionRead}, }), @@ -191,9 +202,9 @@ func ReloadBuiltinRoles(opts *RoleOptions) { Name: templateAdmin, DisplayName: "Template Admin", Site: Permissions(map[string][]policy.Action{ - ResourceTemplate.Type: {policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete}, + ResourceTemplate.Type: {policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete, policy.ActionViewInsights}, // CRUD all files, even those they did not upload. - ResourceFile.Type: {policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete}, + ResourceFile.Type: {policy.ActionCreate, policy.ActionRead}, ResourceWorkspace.Type: {policy.ActionRead}, // CRUD to provisioner daemons for now. ResourceProvisionerDaemon.Type: {policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete}, @@ -203,8 +214,6 @@ func ReloadBuiltinRoles(opts *RoleOptions) { ResourceGroup.Type: {policy.ActionRead}, // Org roles are not really used yet, so grant the perm at the site level. ResourceOrganizationMember.Type: {policy.ActionRead}, - // Template admins can read all template insights data - ResourceTemplateInsights.Type: {policy.ActionRead}, }), Org: map[string][]Permission{}, User: []Permission{}, @@ -214,10 +223,11 @@ func ReloadBuiltinRoles(opts *RoleOptions) { Name: userAdmin, DisplayName: "User Admin", Site: Permissions(map[string][]policy.Action{ - ResourceRoleAssignment.Type: {policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete}, - ResourceUser.Type: {policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete}, - ResourceUserData.Type: {policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete}, - ResourceUserWorkspaceBuildParameters.Type: {policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete}, + ResourceAssignRole.Type: {policy.ActionAssign, policy.ActionDelete, policy.ActionRead}, + ResourceUser.Type: { + policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete, + policy.ActionUpdatePersonal, policy.ActionReadPersonal, + }, // Full perms to manage org members ResourceOrganizationMember.Type: {policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete}, ResourceGroup.Type: {policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete}, @@ -261,7 +271,10 @@ func ReloadBuiltinRoles(opts *RoleOptions) { Site: []Permission{}, Org: map[string][]Permission{ // Org admins should not have workspace exec perms. - organizationID: allPermsExcept(ResourceWorkspaceExecution, ResourceWorkspaceDormant), + organizationID: append(allPermsExcept(ResourceWorkspace, ResourceWorkspaceDormant), Permissions(map[string][]policy.Action{ + ResourceWorkspaceDormant.Type: {policy.ActionRead, policy.ActionDelete, policy.ActionCreate, policy.ActionUpdate, policy.ActionWorkspaceStop}, + ResourceWorkspace.Type: slice.Omit(ResourceWorkspace.AvailableActions(), policy.ActionApplicationConnect, policy.ActionSSH), + })...), }, User: []Permission{}, } @@ -283,7 +296,7 @@ func ReloadBuiltinRoles(opts *RoleOptions) { }, { // Can read available roles. - ResourceType: ResourceOrgRoleAssignment.Type, + ResourceType: ResourceAssignOrgRole.Type, Action: policy.ActionRead, }, }, @@ -523,7 +536,7 @@ func SiteRoles() []Role { // ChangeRoleSet is a helper function that finds the difference of 2 sets of // roles. When setting a user's new roles, it is equivalent to adding and // removing roles. This set determines the changes, so that the appropriate -// RBAC checks can be applied using "policy.ActionCreate" and "policy.ActionDelete" for +// RBAC checks can be applied using "ActionCreate" and "ActionDelete" for // "added" and "removed" roles respectively. func ChangeRoleSet(from []string, to []string) (added []string, removed []string) { has := make(map[string]struct{}) diff --git a/coderd/rbac/roles_test.go b/coderd/rbac/roles_test.go index b5e78e606b8d4..44ef83b74cd20 100644 --- a/coderd/rbac/roles_test.go +++ b/coderd/rbac/roles_test.go @@ -34,10 +34,10 @@ func TestOwnerExec(t *testing.T) { }) t.Cleanup(func() { rbac.ReloadBuiltinRoles(nil) }) - auth := rbac.NewCachingAuthorizer(prometheus.NewRegistry()) + auth := rbac.NewStrictCachingAuthorizer(prometheus.NewRegistry()) // Exec a random workspace - err := auth.Authorize(context.Background(), owner, policy.ActionCreate, - rbac.ResourceWorkspaceExecution.WithID(uuid.New()).InOrg(uuid.New()).WithOwner(uuid.NewString())) + err := auth.Authorize(context.Background(), owner, policy.ActionSSH, + rbac.ResourceWorkspace.WithID(uuid.New()).InOrg(uuid.New()).WithOwner(uuid.NewString())) require.ErrorAsf(t, err, &rbac.UnauthorizedError{}, "expected unauthorized error") }) @@ -47,20 +47,22 @@ func TestOwnerExec(t *testing.T) { }) t.Cleanup(func() { rbac.ReloadBuiltinRoles(nil) }) - auth := rbac.NewCachingAuthorizer(prometheus.NewRegistry()) + auth := rbac.NewStrictCachingAuthorizer(prometheus.NewRegistry()) // Exec a random workspace - err := auth.Authorize(context.Background(), owner, policy.ActionCreate, - rbac.ResourceWorkspaceExecution.WithID(uuid.New()).InOrg(uuid.New()).WithOwner(uuid.NewString())) + err := auth.Authorize(context.Background(), owner, policy.ActionSSH, + rbac.ResourceWorkspace.WithID(uuid.New()).InOrg(uuid.New()).WithOwner(uuid.NewString())) require.NoError(t, err, "expected owner can") }) } -// TODO: add the SYSTEM to the MATRIX +// nolint:tparallel,paralleltest -- subtests share a map, just run sequentially. func TestRolePermissions(t *testing.T) { t.Parallel() - auth := rbac.NewCachingAuthorizer(prometheus.NewRegistry()) + crud := []policy.Action{policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete} + + auth := rbac.NewStrictCachingAuthorizer(prometheus.NewRegistry()) // currentUser is anything that references "me", "mine", or "my". currentUser := uuid.New() @@ -145,8 +147,8 @@ func TestRolePermissions(t *testing.T) { { Name: "MyWorkspaceInOrgExecution", // When creating the WithID won't be set, but it does not change the result. - Actions: []policy.Action{policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete}, - Resource: rbac.ResourceWorkspaceExecution.WithID(workspaceID).InOrg(orgID).WithOwner(currentUser.String()), + Actions: []policy.Action{policy.ActionSSH}, + Resource: rbac.ResourceWorkspace.WithID(workspaceID).InOrg(orgID).WithOwner(currentUser.String()), AuthorizeMap: map[bool][]authSubject{ true: {owner, orgMemberMe}, false: {orgAdmin, memberMe, otherOrgAdmin, otherOrgMember, templateAdmin, userAdmin}, @@ -155,16 +157,16 @@ func TestRolePermissions(t *testing.T) { { Name: "MyWorkspaceInOrgAppConnect", // When creating the WithID won't be set, but it does not change the result. - Actions: []policy.Action{policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete}, - Resource: rbac.ResourceWorkspaceApplicationConnect.WithID(workspaceID).InOrg(orgID).WithOwner(currentUser.String()), + Actions: []policy.Action{policy.ActionApplicationConnect}, + Resource: rbac.ResourceWorkspace.WithID(workspaceID).InOrg(orgID).WithOwner(currentUser.String()), AuthorizeMap: map[bool][]authSubject{ - true: {owner, orgAdmin, orgMemberMe}, - false: {memberMe, otherOrgAdmin, otherOrgMember, templateAdmin, userAdmin}, + true: {owner, orgMemberMe}, + false: {memberMe, otherOrgAdmin, otherOrgMember, templateAdmin, userAdmin, orgAdmin}, }, }, { Name: "Templates", - Actions: []policy.Action{policy.ActionCreate, policy.ActionUpdate, policy.ActionDelete}, + Actions: []policy.Action{policy.ActionCreate, policy.ActionUpdate, policy.ActionDelete, policy.ActionViewInsights}, Resource: rbac.ResourceTemplate.WithID(templateID).InOrg(orgID), AuthorizeMap: map[bool][]authSubject{ true: {owner, orgAdmin, templateAdmin}, @@ -191,7 +193,7 @@ func TestRolePermissions(t *testing.T) { }, { Name: "MyFile", - Actions: []policy.Action{policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete}, + Actions: []policy.Action{policy.ActionCreate, policy.ActionRead}, Resource: rbac.ResourceFile.WithID(fileID).WithOwner(currentUser.String()), AuthorizeMap: map[bool][]authSubject{ true: {owner, memberMe, orgMemberMe, templateAdmin}, @@ -227,8 +229,8 @@ func TestRolePermissions(t *testing.T) { }, { Name: "RoleAssignment", - Actions: []policy.Action{policy.ActionCreate, policy.ActionUpdate, policy.ActionDelete}, - Resource: rbac.ResourceRoleAssignment, + Actions: []policy.Action{policy.ActionAssign, policy.ActionDelete}, + Resource: rbac.ResourceAssignRole, AuthorizeMap: map[bool][]authSubject{ true: {owner, userAdmin}, false: {orgAdmin, orgMemberMe, otherOrgAdmin, otherOrgMember, memberMe, templateAdmin}, @@ -237,7 +239,7 @@ func TestRolePermissions(t *testing.T) { { Name: "ReadRoleAssignment", Actions: []policy.Action{policy.ActionRead}, - Resource: rbac.ResourceRoleAssignment, + Resource: rbac.ResourceAssignRole, AuthorizeMap: map[bool][]authSubject{ true: {owner, orgAdmin, orgMemberMe, otherOrgAdmin, otherOrgMember, memberMe, templateAdmin, userAdmin}, false: {}, @@ -245,8 +247,8 @@ func TestRolePermissions(t *testing.T) { }, { Name: "OrgRoleAssignment", - Actions: []policy.Action{policy.ActionCreate, policy.ActionUpdate, policy.ActionDelete}, - Resource: rbac.ResourceOrgRoleAssignment.InOrg(orgID), + Actions: []policy.Action{policy.ActionAssign, policy.ActionDelete}, + Resource: rbac.ResourceAssignOrgRole.InOrg(orgID), AuthorizeMap: map[bool][]authSubject{ true: {owner, orgAdmin}, false: {orgMemberMe, otherOrgAdmin, otherOrgMember, memberMe, templateAdmin, userAdmin}, @@ -255,7 +257,7 @@ func TestRolePermissions(t *testing.T) { { Name: "ReadOrgRoleAssignment", Actions: []policy.Action{policy.ActionRead}, - Resource: rbac.ResourceOrgRoleAssignment.InOrg(orgID), + Resource: rbac.ResourceAssignOrgRole.InOrg(orgID), AuthorizeMap: map[bool][]authSubject{ true: {owner, orgAdmin, orgMemberMe}, false: {otherOrgAdmin, otherOrgMember, memberMe, templateAdmin, userAdmin}, @@ -263,8 +265,8 @@ func TestRolePermissions(t *testing.T) { }, { Name: "APIKey", - Actions: []policy.Action{policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete}, - Resource: rbac.ResourceAPIKey.WithID(apiKeyID).WithOwner(currentUser.String()), + Actions: []policy.Action{policy.ActionCreate, policy.ActionRead, policy.ActionDelete, policy.ActionUpdate}, + Resource: rbac.ResourceApiKey.WithID(apiKeyID).WithOwner(currentUser.String()), AuthorizeMap: map[bool][]authSubject{ true: {owner, orgMemberMe, memberMe}, false: {orgAdmin, otherOrgAdmin, otherOrgMember, templateAdmin, userAdmin}, @@ -272,8 +274,8 @@ func TestRolePermissions(t *testing.T) { }, { Name: "UserData", - Actions: []policy.Action{policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete}, - Resource: rbac.ResourceUserData.WithID(currentUser).WithOwner(currentUser.String()), + Actions: []policy.Action{policy.ActionReadPersonal, policy.ActionUpdatePersonal}, + Resource: rbac.ResourceUserObject(currentUser), AuthorizeMap: map[bool][]authSubject{ true: {owner, orgMemberMe, memberMe, userAdmin}, false: {orgAdmin, otherOrgAdmin, otherOrgMember, templateAdmin}, @@ -312,6 +314,15 @@ func TestRolePermissions(t *testing.T) { }, { Name: "Groups", + Actions: []policy.Action{policy.ActionCreate, policy.ActionDelete, policy.ActionUpdate}, + Resource: rbac.ResourceGroup.WithID(groupID).InOrg(orgID), + AuthorizeMap: map[bool][]authSubject{ + true: {owner, orgAdmin, userAdmin}, + false: {memberMe, otherOrgAdmin, orgMemberMe, otherOrgMember, templateAdmin}, + }, + }, + { + Name: "GroupsRead", Actions: []policy.Action{policy.ActionRead}, Resource: rbac.ResourceGroup.WithID(groupID).InOrg(orgID), AuthorizeMap: map[bool][]authSubject{ @@ -321,7 +332,16 @@ func TestRolePermissions(t *testing.T) { }, { Name: "WorkspaceDormant", - Actions: rbac.AllActions(), + Actions: append(crud, policy.ActionWorkspaceStop), + Resource: rbac.ResourceWorkspaceDormant.WithID(uuid.New()).InOrg(orgID).WithOwner(memberMe.Actor.ID), + AuthorizeMap: map[bool][]authSubject{ + true: {orgMemberMe, orgAdmin, owner}, + false: {userAdmin, otherOrgAdmin, otherOrgMember, memberMe, templateAdmin}, + }, + }, + { + Name: "WorkspaceDormantUse", + Actions: []policy.Action{policy.ActionWorkspaceStart, policy.ActionApplicationConnect, policy.ActionSSH}, Resource: rbac.ResourceWorkspaceDormant.WithID(uuid.New()).InOrg(orgID).WithOwner(memberMe.Actor.ID), AuthorizeMap: map[bool][]authSubject{ true: {}, @@ -330,25 +350,198 @@ func TestRolePermissions(t *testing.T) { }, { Name: "WorkspaceBuild", - Actions: rbac.AllActions(), - Resource: rbac.ResourceWorkspaceBuild.WithID(uuid.New()).InOrg(orgID).WithOwner(memberMe.Actor.ID), + Actions: []policy.Action{policy.ActionWorkspaceStart, policy.ActionWorkspaceStop}, + Resource: rbac.ResourceWorkspace.WithID(uuid.New()).InOrg(orgID).WithOwner(memberMe.Actor.ID), AuthorizeMap: map[bool][]authSubject{ true: {owner, orgAdmin, orgMemberMe}, false: {userAdmin, otherOrgAdmin, otherOrgMember, templateAdmin, memberMe}, }, }, + // Some admin style resources + { + Name: "Licences", + Actions: []policy.Action{policy.ActionCreate, policy.ActionRead, policy.ActionDelete}, + Resource: rbac.ResourceLicense, + AuthorizeMap: map[bool][]authSubject{ + true: {owner}, + false: {orgAdmin, otherOrgAdmin, otherOrgMember, memberMe, orgMemberMe, templateAdmin, userAdmin}, + }, + }, + { + Name: "DeploymentStats", + Actions: []policy.Action{policy.ActionRead}, + Resource: rbac.ResourceDeploymentStats, + AuthorizeMap: map[bool][]authSubject{ + true: {owner}, + false: {orgAdmin, otherOrgAdmin, otherOrgMember, memberMe, orgMemberMe, templateAdmin, userAdmin}, + }, + }, + { + Name: "DeploymentConfig", + Actions: []policy.Action{policy.ActionRead, policy.ActionUpdate}, + Resource: rbac.ResourceDeploymentConfig, + AuthorizeMap: map[bool][]authSubject{ + true: {owner}, + false: {orgAdmin, otherOrgAdmin, otherOrgMember, memberMe, orgMemberMe, templateAdmin, userAdmin}, + }, + }, + { + Name: "DebugInfo", + Actions: []policy.Action{policy.ActionRead}, + Resource: rbac.ResourceDebugInfo, + AuthorizeMap: map[bool][]authSubject{ + true: {owner}, + false: {orgAdmin, otherOrgAdmin, otherOrgMember, memberMe, orgMemberMe, templateAdmin, userAdmin}, + }, + }, + { + Name: "Replicas", + Actions: []policy.Action{policy.ActionRead}, + Resource: rbac.ResourceReplicas, + AuthorizeMap: map[bool][]authSubject{ + true: {owner}, + false: {orgAdmin, otherOrgAdmin, otherOrgMember, memberMe, orgMemberMe, templateAdmin, userAdmin}, + }, + }, + { + Name: "TailnetCoordinator", + Actions: crud, + Resource: rbac.ResourceTailnetCoordinator, + AuthorizeMap: map[bool][]authSubject{ + true: {owner}, + false: {orgAdmin, otherOrgAdmin, otherOrgMember, memberMe, orgMemberMe, templateAdmin, userAdmin}, + }, + }, + { + Name: "AuditLogs", + Actions: []policy.Action{policy.ActionRead, policy.ActionCreate}, + Resource: rbac.ResourceAuditLog, + AuthorizeMap: map[bool][]authSubject{ + true: {owner}, + false: {orgAdmin, otherOrgAdmin, otherOrgMember, memberMe, orgMemberMe, templateAdmin, userAdmin}, + }, + }, + { + Name: "ProvisionerDaemons", + Actions: []policy.Action{policy.ActionCreate, policy.ActionUpdate, policy.ActionDelete}, + Resource: rbac.ResourceProvisionerDaemon.InOrg(orgID), + AuthorizeMap: map[bool][]authSubject{ + true: {owner, templateAdmin, orgAdmin}, + false: {otherOrgAdmin, otherOrgMember, memberMe, orgMemberMe, userAdmin}, + }, + }, + { + Name: "ProvisionerDaemonsRead", + Actions: []policy.Action{policy.ActionRead}, + Resource: rbac.ResourceProvisionerDaemon.InOrg(orgID), + AuthorizeMap: map[bool][]authSubject{ + // This should be fixed when multi-org goes live + true: {owner, templateAdmin, orgAdmin, otherOrgAdmin, otherOrgMember, memberMe, orgMemberMe, userAdmin}, + false: {}, + }, + }, + { + Name: "UserProvisionerDaemons", + Actions: []policy.Action{policy.ActionCreate, policy.ActionUpdate, policy.ActionDelete}, + Resource: rbac.ResourceProvisionerDaemon.WithOwner(currentUser.String()).InOrg(orgID), + AuthorizeMap: map[bool][]authSubject{ + true: {owner, templateAdmin, orgMemberMe, orgAdmin}, + false: {memberMe, otherOrgAdmin, otherOrgMember, userAdmin}, + }, + }, + { + Name: "System", + Actions: crud, + Resource: rbac.ResourceSystem, + AuthorizeMap: map[bool][]authSubject{ + true: {owner}, + false: {orgAdmin, otherOrgAdmin, otherOrgMember, memberMe, orgMemberMe, templateAdmin, userAdmin}, + }, + }, + { + Name: "Oauth2App", + Actions: []policy.Action{policy.ActionCreate, policy.ActionUpdate, policy.ActionDelete}, + Resource: rbac.ResourceOauth2App, + AuthorizeMap: map[bool][]authSubject{ + true: {owner}, + false: {orgAdmin, otherOrgAdmin, otherOrgMember, memberMe, orgMemberMe, templateAdmin, userAdmin}, + }, + }, + { + Name: "Oauth2AppRead", + Actions: []policy.Action{policy.ActionRead}, + Resource: rbac.ResourceOauth2App, + AuthorizeMap: map[bool][]authSubject{ + true: {owner, orgAdmin, otherOrgAdmin, otherOrgMember, memberMe, orgMemberMe, templateAdmin, userAdmin}, + false: {}, + }, + }, + { + Name: "Oauth2AppSecret", + Actions: crud, + Resource: rbac.ResourceOauth2AppSecret, + AuthorizeMap: map[bool][]authSubject{ + true: {owner}, + false: {orgAdmin, otherOrgAdmin, otherOrgMember, memberMe, orgMemberMe, templateAdmin, userAdmin}, + }, + }, + { + Name: "Oauth2Token", + Actions: []policy.Action{policy.ActionCreate, policy.ActionRead, policy.ActionDelete}, + Resource: rbac.ResourceOauth2AppCodeToken, + AuthorizeMap: map[bool][]authSubject{ + true: {owner}, + false: {orgAdmin, otherOrgAdmin, otherOrgMember, memberMe, orgMemberMe, templateAdmin, userAdmin}, + }, + }, + { + Name: "WorkspaceProxy", + Actions: []policy.Action{policy.ActionCreate, policy.ActionUpdate, policy.ActionDelete}, + Resource: rbac.ResourceWorkspaceProxy, + AuthorizeMap: map[bool][]authSubject{ + true: {owner}, + false: {orgAdmin, otherOrgAdmin, otherOrgMember, memberMe, orgMemberMe, templateAdmin, userAdmin}, + }, + }, + { + Name: "WorkspaceProxyRead", + Actions: []policy.Action{policy.ActionRead}, + Resource: rbac.ResourceWorkspaceProxy, + AuthorizeMap: map[bool][]authSubject{ + true: {owner, orgAdmin, otherOrgAdmin, otherOrgMember, memberMe, orgMemberMe, templateAdmin, userAdmin}, + false: {}, + }, + }, + } + + // We expect every permission to be tested above. + remainingPermissions := make(map[string]map[policy.Action]bool) + for rtype, perms := range policy.RBACPermissions { + remainingPermissions[rtype] = make(map[policy.Action]bool) + for action := range perms.Actions { + remainingPermissions[rtype][action] = true + } } + passed := true + // nolint:tparallel,paralleltest for _, c := range testCases { c := c + // nolint:tparallel,paralleltest -- These share the same remainingPermissions map t.Run(c.Name, func(t *testing.T) { - t.Parallel() remainingSubjs := make(map[string]struct{}) for _, subj := range requiredSubjects { remainingSubjs[subj.Name] = struct{}{} } for _, action := range c.Actions { + err := c.Resource.ValidAction(action) + ok := assert.NoError(t, err, "%q is not a valid action for type %q", action, c.Resource.Type) + if !ok { + passed = passed && assert.NoError(t, err, "%q is not a valid action for type %q", action, c.Resource.Type) + continue + } + for result, subjs := range c.AuthorizeMap { for _, subj := range subjs { delete(remainingSubjs, subj.Name) @@ -359,11 +552,13 @@ func TestRolePermissions(t *testing.T) { if actor.Scope == nil { actor.Scope = rbac.ScopeAll } + + delete(remainingPermissions[c.Resource.Type], action) err := auth.Authorize(context.Background(), actor, action, c.Resource) if result { - assert.NoError(t, err, fmt.Sprintf("Should pass: %s", msg)) + passed = passed && assert.NoError(t, err, fmt.Sprintf("Should pass: %s", msg)) } else { - assert.ErrorContains(t, err, "forbidden", fmt.Sprintf("Should fail: %s", msg)) + passed = passed && assert.ErrorContains(t, err, "forbidden", fmt.Sprintf("Should fail: %s", msg)) } } } @@ -371,6 +566,18 @@ func TestRolePermissions(t *testing.T) { require.Empty(t, remainingSubjs, "test should cover all subjects") }) } + + // Only run these if the tests on top passed. Otherwise, the error output is too noisy. + if passed { + for rtype, v := range remainingPermissions { + // nolint:tparallel,paralleltest -- Making a subtest for easier diagnosing failures. + t.Run(fmt.Sprintf("%s-AllActions", rtype), func(t *testing.T) { + if len(v) > 0 { + assert.Equal(t, map[policy.Action]bool{}, v, "remaining permissions should be empty for type %q", rtype) + } + }) + } + } } func TestIsOrgRole(t *testing.T) { diff --git a/coderd/rbac/scopes.go b/coderd/rbac/scopes.go index 6353ca3c67919..3eccd8194f31a 100644 --- a/coderd/rbac/scopes.go +++ b/coderd/rbac/scopes.go @@ -61,12 +61,12 @@ var builtinScopes = map[ScopeName]Scope{ Name: fmt.Sprintf("Scope_%s", ScopeAll), DisplayName: "All operations", Site: Permissions(map[string][]policy.Action{ - ResourceWildcard.Type: {WildcardSymbol}, + ResourceWildcard.Type: {policy.WildcardSymbol}, }), Org: map[string][]Permission{}, User: []Permission{}, }, - AllowIDList: []string{WildcardSymbol}, + AllowIDList: []string{policy.WildcardSymbol}, }, ScopeApplicationConnect: { @@ -74,12 +74,12 @@ var builtinScopes = map[ScopeName]Scope{ Name: fmt.Sprintf("Scope_%s", ScopeApplicationConnect), DisplayName: "Ability to connect to applications", Site: Permissions(map[string][]policy.Action{ - ResourceWorkspaceApplicationConnect.Type: {policy.ActionCreate}, + ResourceWorkspace.Type: {policy.ActionApplicationConnect}, }), Org: map[string][]Permission{}, User: []Permission{}, }, - AllowIDList: []string{WildcardSymbol}, + AllowIDList: []string{policy.WildcardSymbol}, }, } diff --git a/coderd/roles.go b/coderd/roles.go index 1cc74535119e3..5665e298f0e5d 100644 --- a/coderd/roles.go +++ b/coderd/roles.go @@ -23,7 +23,7 @@ import ( func (api *API) assignableSiteRoles(rw http.ResponseWriter, r *http.Request) { ctx := r.Context() actorRoles := httpmw.UserAuthorization(r) - if !api.Authorize(r, policy.ActionRead, rbac.ResourceRoleAssignment) { + if !api.Authorize(r, policy.ActionRead, rbac.ResourceAssignRole) { httpapi.Forbidden(rw) return } @@ -47,7 +47,7 @@ func (api *API) assignableOrgRoles(rw http.ResponseWriter, r *http.Request) { organization := httpmw.OrganizationParam(r) actorRoles := httpmw.UserAuthorization(r) - if !api.Authorize(r, policy.ActionRead, rbac.ResourceOrgRoleAssignment.InOrg(organization.ID)) { + if !api.Authorize(r, policy.ActionRead, rbac.ResourceAssignOrgRole.InOrg(organization.ID)) { httpapi.ResourceNotFound(rw) return } diff --git a/coderd/users.go b/coderd/users.go index c698661d71429..c8ca04e390c7f 100644 --- a/coderd/users.go +++ b/coderd/users.go @@ -1022,7 +1022,7 @@ func (api *API) userRoles(rw http.ResponseWriter, r *http.Request) { ctx := r.Context() user := httpmw.UserParam(r) - if !api.Authorize(r, policy.ActionRead, user.UserDataRBACObject()) { + if !api.Authorize(r, policy.ActionReadPersonal, user) { httpapi.ResourceNotFound(rw) return } diff --git a/coderd/util/slice/slice.go b/coderd/util/slice/slice.go index 8586aae770610..f06930f373557 100644 --- a/coderd/util/slice/slice.go +++ b/coderd/util/slice/slice.go @@ -4,6 +4,18 @@ import ( "golang.org/x/exp/constraints" ) +// Omit creates a new slice with the arguments omitted from the list. +func Omit[T comparable](a []T, omits ...T) []T { + tmp := make([]T, 0, len(a)) + for _, v := range a { + if Contains(omits, v) { + continue + } + tmp = append(tmp, v) + } + return tmp +} + // SameElements returns true if the 2 lists have the same elements in any // order. func SameElements[T comparable](a []T, b []T) bool { diff --git a/coderd/util/slice/slice_test.go b/coderd/util/slice/slice_test.go index cf686f3de4a48..ef947a13e7659 100644 --- a/coderd/util/slice/slice_test.go +++ b/coderd/util/slice/slice_test.go @@ -123,3 +123,11 @@ func TestDescending(t *testing.T) { assert.Equal(t, 0, slice.Descending(1, 1)) assert.Equal(t, -1, slice.Descending(2, 1)) } + +func TestOmit(t *testing.T) { + t.Parallel() + + assert.Equal(t, []string{"a", "b", "f"}, + slice.Omit([]string{"a", "b", "c", "d", "e", "f"}, "c", "d", "e"), + ) +} diff --git a/coderd/workspaceagents.go b/coderd/workspaceagents.go index d79d191af9ce5..9faae72f22ef7 100644 --- a/coderd/workspaceagents.go +++ b/coderd/workspaceagents.go @@ -1030,7 +1030,7 @@ func (api *API) workspaceAgentClientCoordinate(rw http.ResponseWriter, r *http.R // This route accepts user API key auth and workspace proxy auth. The moon actor has // full permissions so should be able to pass this authz check. workspace := httpmw.WorkspaceParam(r) - if !api.Authorize(r, policy.ActionCreate, workspace.ExecutionRBAC()) { + if !api.Authorize(r, policy.ActionSSH, workspace) { httpapi.ResourceNotFound(rw) return } diff --git a/coderd/workspaceapps/apptest/apptest.go b/coderd/workspaceapps/apptest/apptest.go index 5ba60fbb58687..851d8ff144eb0 100644 --- a/coderd/workspaceapps/apptest/apptest.go +++ b/coderd/workspaceapps/apptest/apptest.go @@ -541,32 +541,31 @@ func Run(t *testing.T, appHostIsPrimary bool, factory DeploymentFactory) { appTokenAPIClient.HTTPClient.Transport = appDetails.SDKClient.HTTPClient.Transport var ( - canCreateApplicationConnect = "can-create-application_connect" - canReadUserMe = "can-read-user-me" + canApplicationConnect = "can-create-application_connect" + canReadUserMe = "can-read-user-me" ) authRes, err := appTokenAPIClient.AuthCheck(ctx, codersdk.AuthorizationRequest{ Checks: map[string]codersdk.AuthorizationCheck{ - canCreateApplicationConnect: { + canApplicationConnect: { Object: codersdk.AuthorizationObject{ - ResourceType: "application_connect", - OwnerID: "me", + ResourceType: "workspace", + OwnerID: appDetails.FirstUser.UserID.String(), OrganizationID: appDetails.FirstUser.OrganizationID.String(), }, - Action: "create", + Action: codersdk.ActionApplicationConnect, }, canReadUserMe: { Object: codersdk.AuthorizationObject{ ResourceType: "user", - OwnerID: "me", ResourceID: appDetails.FirstUser.UserID.String(), }, - Action: "read", + Action: codersdk.ActionRead, }, }, }) require.NoError(t, err) - require.True(t, authRes[canCreateApplicationConnect]) + require.True(t, authRes[canApplicationConnect]) require.False(t, authRes[canReadUserMe]) // Load the application page with the API key set. diff --git a/coderd/workspaceapps/db.go b/coderd/workspaceapps/db.go index 144de2f2573f9..1b369cf6d6ef4 100644 --- a/coderd/workspaceapps/db.go +++ b/coderd/workspaceapps/db.go @@ -282,16 +282,16 @@ func (p *DBTokenProvider) authorizeRequest(ctx context.Context, roles *rbac.Subj // Figure out which RBAC resource to check. For terminals we use execution // instead of application connect. var ( - rbacAction policy.Action = policy.ActionCreate - rbacResource rbac.Object = dbReq.Workspace.ApplicationConnectRBAC() + rbacAction policy.Action = policy.ActionApplicationConnect + rbacResource rbac.Object = dbReq.Workspace.RBACObject() // rbacResourceOwned is for the level "authenticated". We still need to // make sure the API key has permissions to connect to the actor's own // workspace. Scopes would prevent this. - rbacResourceOwned rbac.Object = rbac.ResourceWorkspaceApplicationConnect.WithOwner(roles.ID) + rbacResourceOwned rbac.Object = rbac.ResourceWorkspace.WithOwner(roles.ID) ) if dbReq.AccessMethod == AccessMethodTerminal { - rbacResource = dbReq.Workspace.ExecutionRBAC() - rbacResourceOwned = rbac.ResourceWorkspaceExecution.WithOwner(roles.ID) + rbacAction = policy.ActionSSH + rbacResourceOwned = rbac.ResourceWorkspace.WithOwner(roles.ID) } // Do a standard RBAC check. This accounts for share level "owner" and any diff --git a/coderd/wsbuilder/wsbuilder.go b/coderd/wsbuilder/wsbuilder.go index 3959c0e55a428..b34eb9ce3c858 100644 --- a/coderd/wsbuilder/wsbuilder.go +++ b/coderd/wsbuilder/wsbuilder.go @@ -665,7 +665,7 @@ func (b *Builder) authorize(authFunc func(action policy.Action, object rbac.Obje } } - if b.logLevel != "" && !authFunc(policy.ActionRead, rbac.ResourceDeploymentValues) { + if b.logLevel != "" && !authFunc(policy.ActionRead, rbac.ResourceDeploymentConfig) { return BuildError{ http.StatusBadRequest, "Workspace builds with a custom log level are restricted to administrators only.", diff --git a/codersdk/authorization.go b/codersdk/authorization.go index 4e8a6eed7019f..c3cff7abed149 100644 --- a/codersdk/authorization.go +++ b/codersdk/authorization.go @@ -32,7 +32,7 @@ type AuthorizationCheck struct { // Omitting the 'OrganizationID' could produce the incorrect value, as // workspaces have both `user` and `organization` owners. Object AuthorizationObject `json:"object"` - Action string `json:"action" enums:"create,read,update,delete"` + Action RBACAction `json:"action" enums:"create,read,update,delete"` } // AuthorizationObject can represent a "set" of objects, such as: all workspaces in an organization, all workspaces owned by me, diff --git a/codersdk/rbacresources.go b/codersdk/rbacresources.go deleted file mode 100644 index 4b517e544e28f..0000000000000 --- a/codersdk/rbacresources.go +++ /dev/null @@ -1,77 +0,0 @@ -package codersdk - -type RBACResource string - -const ( - ResourceWorkspace RBACResource = "workspace" - ResourceWorkspaceProxy RBACResource = "workspace_proxy" - ResourceWorkspaceExecution RBACResource = "workspace_execution" - ResourceWorkspaceApplicationConnect RBACResource = "application_connect" - ResourceAuditLog RBACResource = "audit_log" - ResourceTemplate RBACResource = "template" - ResourceGroup RBACResource = "group" - ResourceFile RBACResource = "file" - ResourceProvisionerDaemon RBACResource = "provisioner_daemon" - ResourceOrganization RBACResource = "organization" - ResourceRoleAssignment RBACResource = "assign_role" - ResourceOrgRoleAssignment RBACResource = "assign_org_role" - ResourceAPIKey RBACResource = "api_key" - ResourceUser RBACResource = "user" - ResourceUserData RBACResource = "user_data" - ResourceUserWorkspaceBuildParameters RBACResource = "user_workspace_build_parameters" - ResourceOrganizationMember RBACResource = "organization_member" - ResourceLicense RBACResource = "license" - ResourceDeploymentValues RBACResource = "deployment_config" - ResourceDeploymentStats RBACResource = "deployment_stats" - ResourceReplicas RBACResource = "replicas" - ResourceDebugInfo RBACResource = "debug_info" - ResourceSystem RBACResource = "system" - ResourceTemplateInsights RBACResource = "template_insights" -) - -const ( - ActionCreate = "create" - ActionRead = "read" - ActionUpdate = "update" - ActionDelete = "delete" -) - -var ( - AllRBACResources = []RBACResource{ - ResourceWorkspace, - ResourceWorkspaceProxy, - ResourceWorkspaceExecution, - ResourceWorkspaceApplicationConnect, - ResourceAuditLog, - ResourceTemplate, - ResourceGroup, - ResourceFile, - ResourceProvisionerDaemon, - ResourceOrganization, - ResourceRoleAssignment, - ResourceOrgRoleAssignment, - ResourceAPIKey, - ResourceUser, - ResourceUserData, - ResourceUserWorkspaceBuildParameters, - ResourceOrganizationMember, - ResourceLicense, - ResourceDeploymentValues, - ResourceDeploymentStats, - ResourceReplicas, - ResourceDebugInfo, - ResourceSystem, - ResourceTemplateInsights, - } - - AllRBACActions = []string{ - ActionCreate, - ActionRead, - ActionUpdate, - ActionDelete, - } -) - -func (r RBACResource) String() string { - return string(r) -} diff --git a/codersdk/rbacresources_gen.go b/codersdk/rbacresources_gen.go new file mode 100644 index 0000000000000..9c7d9cc485128 --- /dev/null +++ b/codersdk/rbacresources_gen.go @@ -0,0 +1,50 @@ +// Code generated by rbacgen/main.go. DO NOT EDIT. +package codersdk + +type RBACResource string + +const ( + ResourceWildcard RBACResource = "*" + ResourceApiKey RBACResource = "api_key" + ResourceAssignOrgRole RBACResource = "assign_org_role" + ResourceAssignRole RBACResource = "assign_role" + ResourceAuditLog RBACResource = "audit_log" + ResourceDebugInfo RBACResource = "debug_info" + ResourceDeploymentConfig RBACResource = "deployment_config" + ResourceDeploymentStats RBACResource = "deployment_stats" + ResourceFile RBACResource = "file" + ResourceGroup RBACResource = "group" + ResourceLicense RBACResource = "license" + ResourceOauth2App RBACResource = "oauth2_app" + ResourceOauth2AppCodeToken RBACResource = "oauth2_app_code_token" + ResourceOauth2AppSecret RBACResource = "oauth2_app_secret" + ResourceOrganization RBACResource = "organization" + ResourceOrganizationMember RBACResource = "organization_member" + ResourceProvisionerDaemon RBACResource = "provisioner_daemon" + ResourceReplicas RBACResource = "replicas" + ResourceSystem RBACResource = "system" + ResourceTailnetCoordinator RBACResource = "tailnet_coordinator" + ResourceTemplate RBACResource = "template" + ResourceUser RBACResource = "user" + ResourceWorkspace RBACResource = "workspace" + ResourceWorkspaceDormant RBACResource = "workspace_dormant" + ResourceWorkspaceProxy RBACResource = "workspace_proxy" +) + +type RBACAction string + +const ( + ActionApplicationConnect RBACAction = "application_connect" + ActionAssign RBACAction = "assign" + ActionCreate RBACAction = "create" + ActionDelete RBACAction = "delete" + ActionRead RBACAction = "read" + ActionReadPersonal RBACAction = "read_personal" + ActionSSH RBACAction = "ssh" + ActionUpdate RBACAction = "update" + ActionUpdatePersonal RBACAction = "update_personal" + ActionUse RBACAction = "use" + ActionViewInsights RBACAction = "view_insights" + ActionWorkspaceStart RBACAction = "start" + ActionWorkspaceStop RBACAction = "stop" +) diff --git a/docs/api/authorization.md b/docs/api/authorization.md index 17fc2e81d2299..94f8772183d0d 100644 --- a/docs/api/authorization.md +++ b/docs/api/authorization.md @@ -25,7 +25,7 @@ curl -X POST http://coder-server:8080/api/v2/authcheck \ "organization_id": "string", "owner_id": "string", "resource_id": "string", - "resource_type": "workspace" + "resource_type": "*" } }, "property2": { @@ -34,7 +34,7 @@ curl -X POST http://coder-server:8080/api/v2/authcheck \ "organization_id": "string", "owner_id": "string", "resource_id": "string", - "resource_type": "workspace" + "resource_type": "*" } } } diff --git a/docs/api/schemas.md b/docs/api/schemas.md index 68ad8c8612733..42f8f43517233 100644 --- a/docs/api/schemas.md +++ b/docs/api/schemas.md @@ -1071,7 +1071,7 @@ "organization_id": "string", "owner_id": "string", "resource_id": "string", - "resource_type": "workspace" + "resource_type": "*" } } ``` @@ -1082,7 +1082,7 @@ AuthorizationCheck is used to check if the currently authenticated user (or the | Name | Type | Required | Restrictions | Description | | -------- | ------------------------------------------------------------ | -------- | ------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `action` | string | false | | | +| `action` | [codersdk.RBACAction](#codersdkrbacaction) | false | | | | `object` | [codersdk.AuthorizationObject](#codersdkauthorizationobject) | false | | Object can represent a "set" of objects, such as: all workspaces in an organization, all workspaces owned by me, and all workspaces across the entire product. When defining an object, use the most specific language when possible to produce the smallest set. Meaning to set as many fields on 'Object' as you can. Example, if you want to check if you can update all workspaces owned by 'me', try to also add an 'OrganizationID' to the settings. Omitting the 'OrganizationID' could produce the incorrect value, as workspaces have both `user` and `organization` owners. | #### Enumerated Values @@ -1101,7 +1101,7 @@ AuthorizationCheck is used to check if the currently authenticated user (or the "organization_id": "string", "owner_id": "string", "resource_id": "string", - "resource_type": "workspace" + "resource_type": "*" } ``` @@ -1127,7 +1127,7 @@ AuthorizationObject can represent a "set" of objects, such as: all workspaces in "organization_id": "string", "owner_id": "string", "resource_id": "string", - "resource_type": "workspace" + "resource_type": "*" } }, "property2": { @@ -1136,7 +1136,7 @@ AuthorizationObject can represent a "set" of objects, such as: all workspaces in "organization_id": "string", "owner_id": "string", "resource_id": "string", - "resource_type": "workspace" + "resource_type": "*" } } } @@ -3968,42 +3968,69 @@ CreateWorkspaceRequest provides options for creating a new workspace. Only one o | `icon` | string | false | | | | `name` | string | true | | | +## codersdk.RBACAction + +```json +"application_connect" +``` + +### Properties + +#### Enumerated Values + +| Value | +| --------------------- | +| `application_connect` | +| `assign` | +| `create` | +| `delete` | +| `read` | +| `read_personal` | +| `ssh` | +| `update` | +| `update_personal` | +| `use` | +| `view_insights` | +| `start` | +| `stop` | + ## codersdk.RBACResource ```json -"workspace" +"*" ``` ### Properties #### Enumerated Values -| Value | -| --------------------------------- | -| `workspace` | -| `workspace_proxy` | -| `workspace_execution` | -| `application_connect` | -| `audit_log` | -| `template` | -| `group` | -| `file` | -| `provisioner_daemon` | -| `organization` | -| `assign_role` | -| `assign_org_role` | -| `api_key` | -| `user` | -| `user_data` | -| `user_workspace_build_parameters` | -| `organization_member` | -| `license` | -| `deployment_config` | -| `deployment_stats` | -| `replicas` | -| `debug_info` | -| `system` | -| `template_insights` | +| Value | +| ----------------------- | +| `*` | +| `api_key` | +| `assign_org_role` | +| `assign_role` | +| `audit_log` | +| `debug_info` | +| `deployment_config` | +| `deployment_stats` | +| `file` | +| `group` | +| `license` | +| `oauth2_app` | +| `oauth2_app_code_token` | +| `oauth2_app_secret` | +| `organization` | +| `organization_member` | +| `provisioner_daemon` | +| `replicas` | +| `system` | +| `tailnet_coordinator` | +| `template` | +| `user` | +| `workspace` | +| `workspace_dormant` | +| `workspace_proxy` | ## codersdk.RateLimitConfig diff --git a/enterprise/coderd/appearance.go b/enterprise/coderd/appearance.go index 5104936ac62a4..8a9d51cdb9070 100644 --- a/enterprise/coderd/appearance.go +++ b/enterprise/coderd/appearance.go @@ -137,7 +137,7 @@ func validateHexColor(color string) error { func (api *API) putAppearance(rw http.ResponseWriter, r *http.Request) { ctx := r.Context() - if !api.Authorize(r, policy.ActionUpdate, rbac.ResourceDeploymentValues) { + if !api.Authorize(r, policy.ActionUpdate, rbac.ResourceDeploymentConfig) { httpapi.Write(ctx, rw, http.StatusForbidden, codersdk.Response{ Message: "Insufficient permissions to update appearance", }) diff --git a/enterprise/coderd/authorize_test.go b/enterprise/coderd/authorize_test.go index 8a1fab590acee..30d890b0beab6 100644 --- a/enterprise/coderd/authorize_test.go +++ b/enterprise/coderd/authorize_test.go @@ -59,7 +59,7 @@ func TestCheckACLPermissions(t *testing.T) { ResourceType: codersdk.ResourceTemplate, ResourceID: template.ID.String(), }, - Action: "write", + Action: codersdk.ActionUpdate, }, } diff --git a/enterprise/coderd/coderd_test.go b/enterprise/coderd/coderd_test.go index e53b714b3fe22..d881a21e49423 100644 --- a/enterprise/coderd/coderd_test.go +++ b/enterprise/coderd/coderd_test.go @@ -500,7 +500,7 @@ func testDBAuthzRole(ctx context.Context) context.Context { Name: "testing", DisplayName: "Unit Tests", Site: rbac.Permissions(map[string][]policy.Action{ - rbac.ResourceWildcard.Type: {rbac.WildcardSymbol}, + rbac.ResourceWildcard.Type: {policy.WildcardSymbol}, }), Org: map[string][]rbac.Permission{}, User: []rbac.Permission{}, diff --git a/enterprise/coderd/templates.go b/enterprise/coderd/templates.go index feddcce4d8372..6caf882192816 100644 --- a/enterprise/coderd/templates.go +++ b/enterprise/coderd/templates.go @@ -15,7 +15,6 @@ import ( "github.com/coder/coder/v2/coderd/database/dbauthz" "github.com/coder/coder/v2/coderd/httpapi" "github.com/coder/coder/v2/coderd/httpmw" - "github.com/coder/coder/v2/coderd/rbac" "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/codersdk" ) @@ -310,7 +309,7 @@ func convertToTemplateRole(actions []policy.Action) codersdk.TemplateRole { switch { case len(actions) == 1 && actions[0] == policy.ActionRead: return codersdk.TemplateRoleUse - case len(actions) == 1 && actions[0] == rbac.WildcardSymbol: + case len(actions) == 1 && actions[0] == policy.WildcardSymbol: return codersdk.TemplateRoleAdmin } @@ -320,7 +319,7 @@ func convertToTemplateRole(actions []policy.Action) codersdk.TemplateRole { func convertSDKTemplateRole(role codersdk.TemplateRole) []policy.Action { switch role { case codersdk.TemplateRoleAdmin: - return []policy.Action{rbac.WildcardSymbol} + return []policy.Action{policy.WildcardSymbol} case codersdk.TemplateRoleUse: return []policy.Action{policy.ActionRead} } diff --git a/enterprise/tailnet/pgcoord.go b/enterprise/tailnet/pgcoord.go index 8cb9595492feb..baccfe66a7fd7 100644 --- a/enterprise/tailnet/pgcoord.go +++ b/enterprise/tailnet/pgcoord.go @@ -103,7 +103,7 @@ var pgCoordSubject = rbac.Subject{ Name: "tailnetcoordinator", DisplayName: "Tailnet Coordinator", Site: rbac.Permissions(map[string][]policy.Action{ - rbac.ResourceTailnetCoordinator.Type: {rbac.WildcardSymbol}, + rbac.ResourceTailnetCoordinator.Type: {policy.WildcardSymbol}, }), Org: map[string][]rbac.Permission{}, User: []rbac.Permission{}, diff --git a/scripts/rbacgen/codersdk.gotmpl b/scripts/rbacgen/codersdk.gotmpl new file mode 100644 index 0000000000000..1492eaf86c2bf --- /dev/null +++ b/scripts/rbacgen/codersdk.gotmpl @@ -0,0 +1,18 @@ +// Code generated by rbacgen/main.go. DO NOT EDIT. +package codersdk + +type RBACResource string + +const ( + {{- range $element := . }} + Resource{{ pascalCaseName $element.FunctionName }} RBACResource = "{{ $element.Type }}" + {{- end }} +) + +type RBACAction string + +const ( + {{- range $element := actionsList }} + {{ $element.Enum }} RBACAction = "{{ $element.Value }}" + {{- end }} +) diff --git a/scripts/rbacgen/main.go b/scripts/rbacgen/main.go index d237227f693dc..38f13434c77e4 100644 --- a/scripts/rbacgen/main.go +++ b/scripts/rbacgen/main.go @@ -2,89 +2,213 @@ package main import ( "bytes" - "context" _ "embed" + "errors" + "flag" "fmt" + "go/ast" "go/format" - "go/types" + "go/parser" + "go/token" "html/template" "log" "os" - "sort" + "slices" + "strings" - "golang.org/x/tools/go/packages" + "github.com/coder/coder/v2/coderd/rbac/policy" ) -//go:embed object.gotmpl -var objectGoTpl string +//go:embed rbacobject.gotmpl +var rbacObjectTemplate string -type TplState struct { - ResourceNames []string +//go:embed codersdk.gotmpl +var codersdkTemplate string + +func usage() { + _, _ = fmt.Println("Usage: rbacgen ") + _, _ = fmt.Println("Must choose a template target.") } // main will generate a file that lists all rbac objects. // This is to provide an "AllResources" function that is always // in sync. func main() { - ctx, cancel := context.WithCancel(context.Background()) - defer cancel() + flag.Parse() - path := "." - if len(os.Args) > 1 { - path = os.Args[1] + if len(flag.Args()) < 1 { + usage() + os.Exit(1) } - cfg := &packages.Config{ - Mode: packages.NeedTypes | packages.NeedName | packages.NeedTypesInfo | packages.NeedDeps, - Tests: false, - Context: ctx, + // It did not make sense to have 2 different generators that do essentially + // the same thing, but different format for the BE and the sdk. + // So the argument switches the go template to use. + var source string + switch strings.ToLower(flag.Args()[0]) { + case "codersdk": + source = codersdkTemplate + case "rbac": + source = rbacObjectTemplate + default: + _, _ = fmt.Fprintf(os.Stderr, "%q is not a valid templte target\n", flag.Args()[0]) + usage() + os.Exit(2) } - pkgs, err := packages.Load(cfg, path) + out, err := generateRbacObjects(source) if err != nil { - log.Fatalf("Failed to load package: %s", err.Error()) + log.Fatalf("Generate source: %s", err.Error()) } - if len(pkgs) != 1 { - log.Fatalf("Expected 1 package, got %d", len(pkgs)) + formatted, err := format.Source(out) + if err != nil { + log.Fatalf("Format template: %s", err.Error()) } - rbacPkg := pkgs[0] - if rbacPkg.Name != "rbac" { - log.Fatalf("Expected rbac package, got %q", rbacPkg.Name) + _, _ = fmt.Fprint(os.Stdout, string(formatted)) +} + +func pascalCaseName[T ~string](name T) string { + names := strings.Split(string(name), "_") + for i := range names { + names[i] = capitalize(names[i]) } + return strings.Join(names, "") +} + +func capitalize(name string) string { + return strings.ToUpper(string(name[0])) + name[1:] +} - tpl, err := template.New("object.gotmpl").Parse(objectGoTpl) +type Definition struct { + policy.PermissionDefinition + Type string +} + +func (p Definition) FunctionName() string { + if p.Name != "" { + return p.Name + } + return p.Type +} + +// fileActions is required because we cannot get the variable name of the enum +// at runtime. So parse the package to get it. This is purely to ensure enum +// names are consistent, which is a bit annoying, but not too bad. +func fileActions(file *ast.File) map[string]string { + // actions is a map from the enum value -> enum name + actions := make(map[string]string) + + // Find the action consts +fileDeclLoop: + for _, decl := range file.Decls { + switch typedDecl := decl.(type) { + case *ast.GenDecl: + if len(typedDecl.Specs) == 0 { + continue + } + // This is the right on, loop over all idents, pull the actions + for _, spec := range typedDecl.Specs { + vSpec, ok := spec.(*ast.ValueSpec) + if !ok { + continue fileDeclLoop + } + + typeIdent, ok := vSpec.Type.(*ast.Ident) + if !ok { + continue fileDeclLoop + } + + if typeIdent.Name != "Action" || len(vSpec.Values) != 1 || len(vSpec.Names) != 1 { + continue fileDeclLoop + } + + literal, ok := vSpec.Values[0].(*ast.BasicLit) + if !ok { + continue fileDeclLoop + } + actions[strings.Trim(literal.Value, `"`)] = vSpec.Names[0].Name + } + default: + continue + } + } + return actions +} + +type ActionDetails struct { + Enum string + Value string +} + +// generateRbacObjects will take the policy.go file, and send it as input +// to the go templates. Some AST of the Action enum is also included. +func generateRbacObjects(templateSource string) ([]byte, error) { + // Parse the policy.go file for the action enums + f, err := parser.ParseFile(token.NewFileSet(), "./coderd/rbac/policy/policy.go", nil, parser.ParseComments) if err != nil { - log.Fatalf("Failed to parse templates: %s", err.Error()) + return nil, fmt.Errorf("parsing policy.go: %w", err) + } + actionMap := fileActions(f) + actionList := make([]ActionDetails, 0) + for value, enum := range actionMap { + actionList = append(actionList, ActionDetails{ + Enum: enum, + Value: value, + }) } - var out bytes.Buffer - err = tpl.Execute(&out, TplState{ - ResourceNames: allResources(rbacPkg), + // Sorting actions for auto gen consistency. + slices.SortFunc(actionList, func(a, b ActionDetails) int { + return strings.Compare(a.Enum, b.Enum) }) + var errorList []error + var x int + tpl, err := template.New("object.gotmpl").Funcs(template.FuncMap{ + "capitalize": capitalize, + "pascalCaseName": pascalCaseName[string], + "actionsList": func() []ActionDetails { + return actionList + }, + "actionEnum": func(action policy.Action) string { + x++ + v, ok := actionMap[string(action)] + if !ok { + errorList = append(errorList, fmt.Errorf("action value %q does not have a constant a matching enum constant", action)) + } + return v + }, + "concat": func(strs ...string) string { return strings.Join(strs, "") }, + }).Parse(templateSource) if err != nil { - log.Fatalf("Execute template: %s", err.Error()) + return nil, fmt.Errorf("parse template: %w", err) } - formatted, err := format.Source(out.Bytes()) - if err != nil { - log.Fatalf("Format template: %s", err.Error()) + // Convert to sorted list for autogen consistency. + var out bytes.Buffer + list := make([]Definition, 0) + for t, v := range policy.RBACPermissions { + v := v + list = append(list, Definition{ + PermissionDefinition: v, + Type: t, + }) } - _, _ = fmt.Fprint(os.Stdout, string(formatted)) -} + slices.SortFunc(list, func(a, b Definition) int { + return strings.Compare(a.Type, b.Type) + }) -func allResources(pkg *packages.Package) []string { - var resources []string - names := pkg.Types.Scope().Names() - for _, name := range names { - obj, ok := pkg.Types.Scope().Lookup(name).(*types.Var) - if ok && obj.Type().String() == "github.com/coder/coder/v2/coderd/rbac.Object" { - resources = append(resources, obj.Name()) - } + err = tpl.Execute(&out, list) + if err != nil { + return nil, fmt.Errorf("execute template: %w", err) + } + + if len(errorList) > 0 { + return nil, errors.Join(errorList...) } - sort.Strings(resources) - return resources + + return out.Bytes(), nil } diff --git a/scripts/rbacgen/object.gotmpl b/scripts/rbacgen/object.gotmpl deleted file mode 100644 index 281acbc581925..0000000000000 --- a/scripts/rbacgen/object.gotmpl +++ /dev/null @@ -1,12 +0,0 @@ -// Code generated by rbacgen/main.go. DO NOT EDIT. -package rbac - -func AllResources() []Object { - return []Object{ - {{- range .ResourceNames }} - {{ . }}, - {{- end }} - } -} - - diff --git a/scripts/rbacgen/rbacobject.gotmpl b/scripts/rbacgen/rbacobject.gotmpl new file mode 100644 index 0000000000000..9e529d2986817 --- /dev/null +++ b/scripts/rbacgen/rbacobject.gotmpl @@ -0,0 +1,39 @@ +// Code generated by rbacgen/main.go. DO NOT EDIT. +package rbac + +import "github.com/coder/coder/v2/coderd/rbac/policy" + +// Objecter returns the RBAC object for itself. +type Objecter interface { + RBACObject() Object +} + +var ( + {{- range $element := . }} + {{- $Name := pascalCaseName $element.FunctionName }} + // Resource{{ $Name }} + // Valid Actions + {{- range $action, $value := .Actions }} + // - "{{ actionEnum $action }}" :: {{ $value.Description }} + {{- end }} + Resource{{ $Name }} = Object { + Type: "{{ $element.Type }}", + } + {{ end -}} +) + +func AllResources() []Objecter { + return []Objecter{ + {{- range $element := . }} + Resource{{ pascalCaseName $element.FunctionName }}, + {{- end }} + } +} + +func AllActions() []policy.Action { + return []policy.Action { + {{- range $element := actionsList }} + policy.{{ $element.Enum }}, + {{- end }} + } +} diff --git a/site/src/api/typesGenerated.ts b/site/src/api/typesGenerated.ts index 8d49bc6ca7223..9331339ed1aa1 100644 --- a/site/src/api/typesGenerated.ts +++ b/site/src/api/typesGenerated.ts @@ -134,7 +134,7 @@ export interface AuthMethods { // From codersdk/authorization.go export interface AuthorizationCheck { readonly object: AuthorizationObject; - readonly action: string; + readonly action: RBACAction; } // From codersdk/authorization.go @@ -2055,10 +2055,41 @@ export const ProxyHealthStatuses: ProxyHealthStatus[] = [ "unregistered", ]; -// From codersdk/rbacresources.go +// From codersdk/rbacresources_gen.go +export type RBACAction = + | "application_connect" + | "assign" + | "create" + | "delete" + | "read" + | "read_personal" + | "ssh" + | "start" + | "stop" + | "update" + | "update_personal" + | "use" + | "view_insights"; +export const RBACActions: RBACAction[] = [ + "application_connect", + "assign", + "create", + "delete", + "read", + "read_personal", + "ssh", + "start", + "stop", + "update", + "update_personal", + "use", + "view_insights", +]; + +// From codersdk/rbacresources_gen.go export type RBACResource = + | "*" | "api_key" - | "application_connect" | "assign_org_role" | "assign_role" | "audit_log" @@ -2068,22 +2099,23 @@ export type RBACResource = | "file" | "group" | "license" + | "oauth2_app" + | "oauth2_app_code_token" + | "oauth2_app_secret" | "organization" | "organization_member" | "provisioner_daemon" | "replicas" | "system" + | "tailnet_coordinator" | "template" - | "template_insights" | "user" - | "user_data" - | "user_workspace_build_parameters" | "workspace" - | "workspace_execution" + | "workspace_dormant" | "workspace_proxy"; export const RBACResources: RBACResource[] = [ + "*", "api_key", - "application_connect", "assign_org_role", "assign_role", "audit_log", @@ -2093,18 +2125,19 @@ export const RBACResources: RBACResource[] = [ "file", "group", "license", + "oauth2_app", + "oauth2_app_code_token", + "oauth2_app_secret", "organization", "organization_member", "provisioner_daemon", "replicas", "system", + "tailnet_coordinator", "template", - "template_insights", "user", - "user_data", - "user_workspace_build_parameters", "workspace", - "workspace_execution", + "workspace_dormant", "workspace_proxy", ]; diff --git a/site/src/pages/TemplatePage/TemplateLayout.tsx b/site/src/pages/TemplatePage/TemplateLayout.tsx index ec19d80c166cc..bd53a6dc39052 100644 --- a/site/src/pages/TemplatePage/TemplateLayout.tsx +++ b/site/src/pages/TemplatePage/TemplateLayout.tsx @@ -28,9 +28,10 @@ const templatePermissions = ( }, canReadInsights: { object: { - resource_type: "template_insights", + resource_type: "template", + resource_id: templateId, }, - action: "read", + action: "view_insights", }, }); diff --git a/support/support.go b/support/support.go index e49f95e38d045..af3ad21200d02 100644 --- a/support/support.go +++ b/support/support.go @@ -10,18 +10,15 @@ import ( "net/http/httptest" "strings" + "github.com/google/uuid" "golang.org/x/sync/errgroup" "golang.org/x/xerrors" "tailscale.com/ipn/ipnstate" "tailscale.com/net/netcheck" - "github.com/coder/coder/v2/coderd/healthcheck/derphealth" - "github.com/coder/coder/v2/coderd/rbac/policy" - - "github.com/google/uuid" - "cdr.dev/slog" "cdr.dev/slog/sloggers/sloghuman" + "github.com/coder/coder/v2/coderd/healthcheck/derphealth" "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/codersdk/agentsdk" "github.com/coder/coder/v2/codersdk/healthsdk" @@ -460,9 +457,9 @@ func Run(ctx context.Context, d *Deps) (*Bundle, error) { authChecks := map[string]codersdk.AuthorizationCheck{ "Read DeploymentValues": { Object: codersdk.AuthorizationObject{ - ResourceType: codersdk.ResourceDeploymentValues, + ResourceType: codersdk.ResourceDeploymentConfig, }, - Action: string(policy.ActionRead), + Action: codersdk.ActionRead, }, } From fc6f18aa96d199d40819b8aa232e158c320e8093 Mon Sep 17 00:00:00 2001 From: Kayla Washburn-Love Date: Wed, 15 May 2024 13:14:34 -0600 Subject: [PATCH 062/149] feat(site): add an organization switcher to the user menu (#13269) --- site/src/api/queries/users.ts | 7 +++ site/src/contexts/auth/RequireAuth.tsx | 2 +- .../dashboard/Navbar/NavbarView.stories.tsx | 2 + .../UserDropdown/UserDropdown.stories.tsx | 2 + .../Navbar/UserDropdown/UserDropdown.tsx | 11 +++++ .../UserDropdown/UserDropdownContent.tsx | 45 ++++++++++++++++++- 6 files changed, 67 insertions(+), 2 deletions(-) diff --git a/site/src/api/queries/users.ts b/site/src/api/queries/users.ts index 7dcd157f7bc6c..cf70038e7ca23 100644 --- a/site/src/api/queries/users.ts +++ b/site/src/api/queries/users.ts @@ -249,3 +249,10 @@ export const updateAppearanceSettings = ( }, }; }; + +export const myOrganizations = () => { + return { + queryKey: ["organizations", "me"], + queryFn: () => API.getOrganizations(), + }; +}; diff --git a/site/src/contexts/auth/RequireAuth.tsx b/site/src/contexts/auth/RequireAuth.tsx index b1def94fd9485..6172ba8212ac5 100644 --- a/site/src/contexts/auth/RequireAuth.tsx +++ b/site/src/contexts/auth/RequireAuth.tsx @@ -67,7 +67,7 @@ export const RequireAuth: FC = () => { }; type RequireKeys = Omit & { - [K in keyof Pick]: NonNullable; + [K in keyof Pick]-?: NonNullable; }; // We can do some TS magic here but I would rather to be explicit on what diff --git a/site/src/modules/dashboard/Navbar/NavbarView.stories.tsx b/site/src/modules/dashboard/Navbar/NavbarView.stories.tsx index cf5522c56203a..2490234bd36e1 100644 --- a/site/src/modules/dashboard/Navbar/NavbarView.stories.tsx +++ b/site/src/modules/dashboard/Navbar/NavbarView.stories.tsx @@ -1,6 +1,7 @@ import type { Meta, StoryObj } from "@storybook/react"; import { chromaticWithTablet } from "testHelpers/chromatic"; import { MockUser, MockUser2 } from "testHelpers/entities"; +import { withDashboardProvider } from "testHelpers/storybook"; import { NavbarView } from "./NavbarView"; const meta: Meta = { @@ -10,6 +11,7 @@ const meta: Meta = { args: { user: MockUser, }, + decorators: [withDashboardProvider], }; export default meta; diff --git a/site/src/modules/dashboard/Navbar/UserDropdown/UserDropdown.stories.tsx b/site/src/modules/dashboard/Navbar/UserDropdown/UserDropdown.stories.tsx index 04e18fc3d49f0..c7652eb460c77 100644 --- a/site/src/modules/dashboard/Navbar/UserDropdown/UserDropdown.stories.tsx +++ b/site/src/modules/dashboard/Navbar/UserDropdown/UserDropdown.stories.tsx @@ -1,6 +1,7 @@ import type { Meta, StoryObj } from "@storybook/react"; import { expect, screen, userEvent, within, waitFor } from "@storybook/test"; import { MockBuildInfo, MockUser } from "testHelpers/entities"; +import { withDashboardProvider } from "testHelpers/storybook"; import { UserDropdown } from "./UserDropdown"; const meta: Meta = { @@ -16,6 +17,7 @@ const meta: Meta = { { icon: "/icon/aws.svg", name: "Amazon Web Services", target: "" }, ], }, + decorators: [withDashboardProvider], }; export default meta; diff --git a/site/src/modules/dashboard/Navbar/UserDropdown/UserDropdown.tsx b/site/src/modules/dashboard/Navbar/UserDropdown/UserDropdown.tsx index b42858157d142..1efdfec9df0ed 100644 --- a/site/src/modules/dashboard/Navbar/UserDropdown/UserDropdown.tsx +++ b/site/src/modules/dashboard/Navbar/UserDropdown/UserDropdown.tsx @@ -1,6 +1,8 @@ import { css, type Interpolation, type Theme, useTheme } from "@emotion/react"; import Badge from "@mui/material/Badge"; import type { FC } from "react"; +import { useQuery } from "react-query"; +import { myOrganizations } from "api/queries/users"; import type * as TypesGen from "api/typesGenerated"; import { DropdownArrow } from "components/DropdownArrow/DropdownArrow"; import { @@ -9,6 +11,7 @@ import { PopoverTrigger, } from "components/Popover/Popover"; import { UserAvatar } from "components/UserAvatar/UserAvatar"; +import { useDashboard } from "modules/dashboard/useDashboard"; import { BUTTON_SM_HEIGHT, navHeight } from "theme/constants"; import { UserDropdownContent } from "./UserDropdownContent"; @@ -26,6 +29,11 @@ export const UserDropdown: FC = ({ onSignOut, }) => { const theme = useTheme(); + const organizationsQuery = useQuery({ + ...myOrganizations(), + enabled: Boolean(localStorage.getItem("enableMultiOrganizationUi")), + }); + const { organizationId, setOrganizationId } = useDashboard(); return ( @@ -63,6 +71,9 @@ export const UserDropdown: FC = ({ user={user} buildInfo={buildInfo} supportLinks={supportLinks} + organizations={organizationsQuery.data} + organizationId={organizationId} + setOrganizationId={setOrganizationId} onSignOut={onSignOut} /> diff --git a/site/src/modules/dashboard/Navbar/UserDropdown/UserDropdownContent.tsx b/site/src/modules/dashboard/Navbar/UserDropdown/UserDropdownContent.tsx index fa3f64c37cb18..8dc0f23d34f73 100644 --- a/site/src/modules/dashboard/Navbar/UserDropdown/UserDropdownContent.tsx +++ b/site/src/modules/dashboard/Navbar/UserDropdown/UserDropdownContent.tsx @@ -84,14 +84,20 @@ const styles = { export interface UserDropdownContentProps { user: TypesGen.User; + organizations?: TypesGen.Organization[]; + organizationId?: string; + setOrganizationId?: (id: string) => void; buildInfo?: TypesGen.BuildInfoResponse; supportLinks?: readonly TypesGen.LinkConfig[]; onSignOut: () => void; } export const UserDropdownContent: FC = ({ - buildInfo, user, + organizations, + organizationId, + setOrganizationId, + buildInfo, supportLinks, onSignOut, }) => { @@ -128,6 +134,43 @@ export const UserDropdownContent: FC = ({ + {organizations && ( + <> +
+
+ My teams +
+ {organizations.map((org) => ( + { + setOrganizationId?.(org.id); + popover.setIsOpen(false); + }} + > + {/* */} + + {org.name} + {organizationId === org.id && ( + Current + )} + + + ))} +
+ + + )} + From 114fb31fbb7a9a271128bcde51e1292d7a55400d Mon Sep 17 00:00:00 2001 From: Stephen Kirby <58410745+stirby@users.noreply.github.com> Date: Wed, 15 May 2024 14:40:46 -0500 Subject: [PATCH 063/149] fixed sharable port + coder_app interaction (#13285) --- docs/networking/port-forwarding.md | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/docs/networking/port-forwarding.md b/docs/networking/port-forwarding.md index 09597bb94f9db..89454b8258e3d 100644 --- a/docs/networking/port-forwarding.md +++ b/docs/networking/port-forwarding.md @@ -120,11 +120,14 @@ not it is still accessible. ![Annotated port controls in the UI](../images/networking/annotatedports.png) +The sharing level is limited by the maximum level enforced in the template +settings in enterprise deployments, and not restricted in OSS deployments. + This can also be used to change the sharing level of `coder_app`s by entering -their port number in the shared ports UI. The `share` attribute of `coder_app`s -defined using the terraform provider can be overridden by sharing the port. The -sharing level is limited by the maximum level enforced by the template in -enterprise deployments, and not restricted in OSS deployments. +their port number in the sharable ports UI. The `share` attribute on `coder_app` +resource uses a different method of authentication and **is not impacted by the +template's maximum sharing level**, nor the level of a shared port that points +to the app. ### Configure maximum port sharing level (enterprise) From 63e06853eb147ee71b6d79db68d1edc66711de1c Mon Sep 17 00:00:00 2001 From: Michael Smith Date: Wed, 15 May 2024 16:59:15 -0400 Subject: [PATCH 064/149] fix: update tests for useClipboard to minimize risks of flakes (#13250) * wip: commit progress on test revamps * fix: update existing tests to new format * chore: add test case for global snackbar * refactor: consolidate files * refactor: make http dependency more explicit * chore: add extra test case for exposed error value * docs: fix typos * fix: make sure clipboard is reset between test runs * docs: add more context to comments * refactor: update mock console.error logic to use jest.spyOn * docs: add more clarifying comments * refactor: split off type alias for clarity --- site/src/hooks/useClipboard.test.tsx | 309 +++++++++++++++------------ site/src/hooks/useClipboard.ts | 5 +- 2 files changed, 178 insertions(+), 136 deletions(-) diff --git a/site/src/hooks/useClipboard.test.tsx b/site/src/hooks/useClipboard.test.tsx index 5ddbed3f8cc12..b8296efb26eb0 100644 --- a/site/src/hooks/useClipboard.test.tsx +++ b/site/src/hooks/useClipboard.test.tsx @@ -1,129 +1,126 @@ -import { act, renderHook } from "@testing-library/react"; +/** + * @file The test setup for this file is a little funky because of how React + * Testing Library works. + * + * When you call user.setup to make a new user session, it will make a mock + * clipboard instance that will always succeed. It also can't be removed after + * it's been added, and it will persist across test cases. This actually makes + * testing useClipboard properly impossible because any call to user.setup + * immediately pollutes the tests with false negatives. Even if something should + * fail, it won't. + */ +import { act, renderHook, screen } from "@testing-library/react"; import { GlobalSnackbar } from "components/GlobalSnackbar/GlobalSnackbar"; import { ThemeProvider } from "contexts/ThemeProvider"; import { type UseClipboardInput, type UseClipboardResult, + COPY_FAILED_MESSAGE, useClipboard, + HTTP_FALLBACK_DATA_ID, } from "./useClipboard"; -describe(useClipboard.name, () => { - describe("HTTP (non-secure) connections", () => { - scheduleClipboardTests({ isHttps: false }); - }); - - describe("HTTPS (secure/default) connections", () => { - scheduleClipboardTests({ isHttps: true }); - }); -}); +// Need to mock console.error because we deliberately need to trigger errors in +// the code to assert that it can recover from them, but we also don't want them +// logged if they're expected +const originalConsoleError = console.error; -/** - * @file This is a very weird test setup. - * - * There are two main things that it's fighting against to insure that the - * clipboard functionality is working as expected: - * 1. userEvent.setup's default global behavior - * 2. The fact that we need to reuse the same set of test cases for two separate - * contexts (secure and insecure), each with their own version of global - * state. - * - * The goal of this file is to provide a shared set of test behavior that can - * be imported into two separate test files (one for HTTP, one for HTTPS), - * without any risk of global state conflicts. - * - * --- - * For (1), normally you could call userEvent.setup to enable clipboard mocking, - * but userEvent doesn't expose a teardown function. It also modifies the global - * scope for the whole test file, so enabling just one userEvent session will - * make a mock clipboard exist for all other tests, even though you didn't tell - * them to set up a session. The mock also assumes that the clipboard API will - * always be available, which is not true on HTTP-only connections - * - * Since these tests need to split hairs and differentiate between HTTP and - * HTTPS connections, setting up a single userEvent is disastrous. It will make - * all the tests pass, even if they shouldn't. Have to avoid that by creating a - * custom clipboard mock. - * - * --- - * For (2), we're fighting against Jest's default behavior, which is to treat - * the test file as the main boundary for test environments, with each test case - * able to run in parallel. That works if you have one single global state, but - * we need two separate versions of the global state, while repeating the exact - * same test cases for each one. - * - * If both tests were to be placed in the same file, Jest would not isolate them - * and would let their setup steps interfere with each other. This leads to one - * of two things: - * 1. One of the global mocks overrides the other, making it so that one - * connection type always fails - * 2. The two just happen not to conflict each other, through some convoluted - * order of operations involving closure, but you have no idea why the code - * is working, and it's impossible to debug. - */ -type MockClipboardEscapeHatches = Readonly<{ - getMockText: () => string; - setMockText: (newText: string) => void; - simulateFailure: boolean; - setSimulateFailure: (failureMode: boolean) => void; +type SetupMockClipboardResult = Readonly<{ + mockClipboard: Clipboard; + mockExecCommand: typeof global.document.execCommand; + getClipboardText: () => string; + setSimulateFailure: (shouldFail: boolean) => void; + resetMockClipboardState: () => void; }>; -type MockClipboard = Readonly; -function makeMockClipboard(isSecureContext: boolean): MockClipboard { - let mockClipboardValue = ""; - let shouldFail = false; - - return { - get simulateFailure() { - return shouldFail; - }, - setSimulateFailure: (value) => { - shouldFail = value; - }, +function setupMockClipboard(isSecure: boolean): SetupMockClipboardResult { + let mockClipboardText = ""; + let shouldSimulateFailure = false; + const mockClipboard: Clipboard = { readText: async () => { - if (shouldFail) { - throw new Error("Clipboard deliberately failed"); - } - - if (!isSecureContext) { + if (!isSecure) { throw new Error( - "Trying to read from clipboard outside secure context!", + "Not allowed to access clipboard outside of secure contexts", ); } - return mockClipboardValue; + if (shouldSimulateFailure) { + throw new Error("Failed to read from clipboard"); + } + + return mockClipboardText; }, + writeText: async (newText) => { - if (shouldFail) { - throw new Error("Clipboard deliberately failed"); + if (!isSecure) { + throw new Error( + "Not allowed to access clipboard outside of secure contexts", + ); } - if (!isSecureContext) { - throw new Error("Trying to write to clipboard outside secure context!"); + if (shouldSimulateFailure) { + throw new Error("Failed to write to clipboard"); } - mockClipboardValue = newText; - }, - - getMockText: () => mockClipboardValue, - setMockText: (newText) => { - mockClipboardValue = newText; + mockClipboardText = newText; }, + // Don't need these other methods for any of the tests; read and write are + // both synchronous and slower than the promise-based methods, so ideally + // we won't ever need to call them in the hook logic addEventListener: jest.fn(), removeEventListener: jest.fn(), dispatchEvent: jest.fn(), read: jest.fn(), write: jest.fn(), }; + + return { + mockClipboard, + getClipboardText: () => mockClipboardText, + setSimulateFailure: (newShouldFailValue) => { + shouldSimulateFailure = newShouldFailValue; + }, + resetMockClipboardState: () => { + shouldSimulateFailure = false; + mockClipboardText = ""; + }, + mockExecCommand: (commandId) => { + if (commandId !== "copy") { + return false; + } + + if (shouldSimulateFailure) { + throw new Error("Failed to execute command 'copy'"); + } + + const dummyInput = document.querySelector( + `input[data-testid=${HTTP_FALLBACK_DATA_ID}]`, + ); + + const inputIsFocused = + dummyInput instanceof HTMLInputElement && + document.activeElement === dummyInput; + + let copySuccessful = false; + if (inputIsFocused) { + mockClipboardText = dummyInput.value; + copySuccessful = true; + } + + return copySuccessful; + }, + }; } -function renderUseClipboard(inputs: UseClipboardInput) { - return renderHook( +function renderUseClipboard(inputs: TInput) { + return renderHook( (props) => useClipboard(props), { initialProps: inputs, wrapper: ({ children }) => ( + // Need ThemeProvider because GlobalSnackbar uses theme {children} @@ -133,79 +130,92 @@ function renderUseClipboard(inputs: UseClipboardInput) { ); } -type ScheduleConfig = Readonly<{ isHttps: boolean }>; +type RenderResult = ReturnType["result"]; -export function scheduleClipboardTests({ isHttps }: ScheduleConfig) { - const mockClipboardInstance = makeMockClipboard(isHttps); - const originalNavigator = window.navigator; +// execCommand is the workaround for copying text to the clipboard on HTTP-only +// connections +const originalExecCommand = global.document.execCommand; +const originalNavigator = window.navigator; + +// Not a big fan of describe.each most of the time, but since we need to test +// the exact same test cases against different inputs, and we want them to run +// as sequentially as possible to minimize flakes, they make sense here +const secureContextValues: readonly boolean[] = [true, false]; +describe.each(secureContextValues)("useClipboard - secure: %j", (isSecure) => { + const { + mockClipboard, + mockExecCommand, + getClipboardText, + setSimulateFailure, + resetMockClipboardState, + } = setupMockClipboard(isSecure); beforeEach(() => { jest.useFakeTimers(); + + // Can't use jest.spyOn here because there's no guarantee that the mock + // browser environment actually implements execCommand. Trying to spy on an + // undefined value will throw an error + global.document.execCommand = mockExecCommand; + jest.spyOn(window, "navigator", "get").mockImplementation(() => ({ ...originalNavigator, - clipboard: mockClipboardInstance, + clipboard: mockClipboard, })); - if (!isHttps) { - // Not the biggest fan of exposing implementation details like this, but - // making any kind of mock for execCommand is really gnarly in general - global.document.execCommand = jest.fn(() => { - if (mockClipboardInstance.simulateFailure) { - return false; - } - - const dummyInput = document.querySelector("input[data-testid=dummy]"); - const inputIsFocused = - dummyInput instanceof HTMLInputElement && - document.activeElement === dummyInput; - - let copySuccessful = false; - if (inputIsFocused) { - mockClipboardInstance.setMockText(dummyInput.value); - copySuccessful = true; - } - - return copySuccessful; - }); - } + jest.spyOn(console, "error").mockImplementation((errorValue, ...rest) => { + const canIgnore = + errorValue instanceof Error && + errorValue.message === COPY_FAILED_MESSAGE; + + if (!canIgnore) { + originalConsoleError(errorValue, ...rest); + } + }); }); afterEach(() => { + jest.runAllTimers(); jest.useRealTimers(); - mockClipboardInstance.setMockText(""); - mockClipboardInstance.setSimulateFailure(false); + jest.resetAllMocks(); + global.document.execCommand = originalExecCommand; + + // Still have to reset the mock clipboard state because the same mock values + // are reused for each test case in a given describe.each iteration + resetMockClipboardState(); }); - const assertClipboardTextUpdate = async ( - result: ReturnType["result"], + const assertClipboardUpdateLifecycle = async ( + result: RenderResult, textToCheck: string, ): Promise => { await act(() => result.current.copyToClipboard()); expect(result.current.showCopiedSuccess).toBe(true); - const clipboardText = mockClipboardInstance.getMockText(); + // Because of timing trickery, any timeouts for flipping the copy status + // back to false will usually trigger before any test cases calling this + // assert function can complete. This will never be an issue in the real + // world, but it will kick up 'act' warnings in the console, which makes + // tests more annoying. Getting around that by waiting for all timeouts to + // wrap up, but note that the value of showCopiedSuccess will become false + // after runAllTimersAsync finishes + await act(() => jest.runAllTimersAsync()); + + const clipboardText = getClipboardText(); expect(clipboardText).toEqual(textToCheck); }; - /** - * Start of test cases - */ it("Copies the current text to the user's clipboard", async () => { const textToCopy = "dogs"; const { result } = renderUseClipboard({ textToCopy }); - await assertClipboardTextUpdate(result, textToCopy); + await assertClipboardUpdateLifecycle(result, textToCopy); }); it("Should indicate to components not to show successful copy after a set period of time", async () => { const textToCopy = "cats"; const { result } = renderUseClipboard({ textToCopy }); - await assertClipboardTextUpdate(result, textToCopy); - - setTimeout(() => { - expect(result.current.showCopiedSuccess).toBe(false); - }, 10_000); - - await jest.runAllTimersAsync(); + await assertClipboardUpdateLifecycle(result, textToCopy); + expect(result.current.showCopiedSuccess).toBe(false); }); it("Should notify the user of an error using the provided callback", async () => { @@ -213,8 +223,39 @@ export function scheduleClipboardTests({ isHttps }: ScheduleConfig) { const onError = jest.fn(); const { result } = renderUseClipboard({ textToCopy, onError }); - mockClipboardInstance.setSimulateFailure(true); + setSimulateFailure(true); await act(() => result.current.copyToClipboard()); expect(onError).toBeCalled(); }); -} + + it("Should dispatch a new toast message to the global snackbar when errors happen while no error callback is provided to the hook", async () => { + const textToCopy = "crow"; + const { result } = renderUseClipboard({ textToCopy }); + + /** + * @todo Look into why deferring error-based state updates to the global + * snackbar still kicks up act warnings, even after wrapping copyToClipboard + * in act. copyToClipboard should be the main source of the state + * transitions, but it looks like extra state changes are still getting + * flushed through the GlobalSnackbar component afterwards + */ + setSimulateFailure(true); + await act(() => result.current.copyToClipboard()); + + const errorMessageNode = screen.queryByText(COPY_FAILED_MESSAGE); + expect(errorMessageNode).not.toBeNull(); + }); + + it("Should expose the error as a value when a copy fails", async () => { + // Using empty onError callback to silence any possible act warnings from + // Snackbar state transitions that you might get if the hook uses the + // default + const textToCopy = "hamster"; + const { result } = renderUseClipboard({ textToCopy, onError: jest.fn() }); + + setSimulateFailure(true); + await act(() => result.current.copyToClipboard()); + + expect(result.current.error).toBeInstanceOf(Error); + }); +}); diff --git a/site/src/hooks/useClipboard.ts b/site/src/hooks/useClipboard.ts index 83ec8283ed710..6228c3778766d 100644 --- a/site/src/hooks/useClipboard.ts +++ b/site/src/hooks/useClipboard.ts @@ -2,7 +2,8 @@ import { useEffect, useRef, useState } from "react"; import { displayError } from "components/GlobalSnackbar/utils"; const CLIPBOARD_TIMEOUT_MS = 1_000; -const COPY_FAILED_MESSAGE = "Failed to copy text to clipboard"; +export const COPY_FAILED_MESSAGE = "Failed to copy text to clipboard"; +export const HTTP_FALLBACK_DATA_ID = "http-fallback"; export type UseClipboardInput = Readonly<{ textToCopy: string; @@ -99,7 +100,7 @@ function simulateClipboardWrite(textToCopy: string): boolean { const dummyInput = document.createElement("input"); // Have to add test ID to dummy element for mocking purposes in tests - dummyInput.setAttribute("data-testid", "dummy"); + dummyInput.setAttribute("data-testid", HTTP_FALLBACK_DATA_ID); // Using visually-hidden styling to ensure that inserting the element doesn't // cause any content reflows on the page (removes any risk of UI flickers). From a0fce363cd55f878af473f7ab803fa1c9f481504 Mon Sep 17 00:00:00 2001 From: Mathias Fredriksson Date: Thu, 16 May 2024 16:53:01 +0300 Subject: [PATCH 065/149] feat(coderd): add `times_used` to `coder_app`s in insights API (#13292) For now, only applied to `coder_app`s, same logic can be implemented for VS Code, SSH, etc. Part of #13099 --- coderd/apidoc/docs.go | 4 + coderd/apidoc/swagger.json | 4 + coderd/database/dbmem/dbmem.go | 74 ++++++++++++++++++- coderd/database/queries.sql.go | 46 ++++++++++-- coderd/database/queries/insights.sql | 44 +++++++++-- coderd/insights.go | 1 + ...es_three_weeks_second_template.json.golden | 18 +++-- ...ks_second_template_only_report.json.golden | 18 +++-- ..._workspaces_week_all_templates.json.golden | 24 ++++-- ...orkspaces_week_deployment_wide.json.golden | 24 ++++-- ...workspaces_week_first_template.json.golden | 21 ++++-- ...r_timezone_(S\303\243o_Paulo).json.golden" | 24 ++++-- ...orkspaces_week_second_template.json.golden | 18 +++-- ...workspaces_week_third_template.json.golden | 18 +++-- ...kly_aggregated_deployment_wide.json.golden | 24 ++++-- ...ekly_aggregated_first_template.json.golden | 21 ++++-- ...es_weekly_aggregated_templates.json.golden | 24 ++++-- ...rameters_two_days_ago,_no_data.json.golden | 15 ++-- ...rday_and_today_deployment_wide.json.golden | 15 ++-- codersdk/insights.go | 1 + docs/api/insights.md | 1 + docs/api/schemas.md | 4 + site/src/api/typesGenerated.ts | 1 + .../TemplateInsightsPage.stories.tsx | 4 + 24 files changed, 346 insertions(+), 102 deletions(-) diff --git a/coderd/apidoc/docs.go b/coderd/apidoc/docs.go index 0a22d84d13642..8e7fad2c05a49 100644 --- a/coderd/apidoc/docs.go +++ b/coderd/apidoc/docs.go @@ -11372,6 +11372,10 @@ const docTemplate = `{ "format": "uuid" } }, + "times_used": { + "type": "integer", + "example": 2 + }, "type": { "allOf": [ { diff --git a/coderd/apidoc/swagger.json b/coderd/apidoc/swagger.json index 331b1512393f7..582ccc74f22c3 100644 --- a/coderd/apidoc/swagger.json +++ b/coderd/apidoc/swagger.json @@ -10272,6 +10272,10 @@ "format": "uuid" } }, + "times_used": { + "type": "integer", + "example": 2 + }, "type": { "allOf": [ { diff --git a/coderd/database/dbmem/dbmem.go b/coderd/database/dbmem/dbmem.go index 8a2ce25b34367..d1bbd6df49492 100644 --- a/coderd/database/dbmem/dbmem.go +++ b/coderd/database/dbmem/dbmem.go @@ -3149,6 +3149,30 @@ func (q *FakeQuerier) GetTemplateAppInsights(ctx context.Context, arg database.G GROUP BY start_time, user_id, slug, display_name, icon ), + -- Analyze the users unique app usage across all templates. Count + -- usage across consecutive intervals as continuous usage. + times_used AS ( + SELECT DISTINCT ON (user_id, slug, display_name, icon, uniq) + slug, + display_name, + icon, + -- Turn start_time into a unique identifier that identifies a users + -- continuous app usage. The value of uniq is otherwise garbage. + -- + -- Since we're aggregating per user app usage across templates, + -- there can be duplicate start_times. To handle this, we use the + -- dense_rank() function, otherwise row_number() would suffice. + start_time - ( + dense_rank() OVER ( + PARTITION BY + user_id, slug, display_name, icon + ORDER BY + start_time + ) * '30 minutes'::interval + ) AS uniq + FROM + template_usage_stats_with_apps + ), */ // Due to query optimizations, this logic is somewhat inverted from @@ -3160,12 +3184,19 @@ func (q *FakeQuerier) GetTemplateAppInsights(ctx context.Context, arg database.G DisplayName string Icon string } + type appTimesUsedGroupBy struct { + UserID uuid.UUID + Slug string + DisplayName string + Icon string + } type appInsightsRow struct { appInsightsGroupBy TemplateIDs []uuid.UUID AppUsageMins int64 } appInsightRows := make(map[appInsightsGroupBy]appInsightsRow) + appTimesUsedRows := make(map[appTimesUsedGroupBy]map[time.Time]struct{}) // FROM for _, stat := range q.templateUsageStats { // WHERE @@ -3201,9 +3232,42 @@ func (q *FakeQuerier) GetTemplateAppInsights(ctx context.Context, arg database.G row.TemplateIDs = append(row.TemplateIDs, stat.TemplateID) row.AppUsageMins = least(row.AppUsageMins+appUsage, 30) appInsightRows[key] = row + + // Prepare to do times_used calculation, distinct start times. + timesUsedKey := appTimesUsedGroupBy{ + UserID: stat.UserID, + Slug: slug, + DisplayName: app.DisplayName, + Icon: app.Icon, + } + if appTimesUsedRows[timesUsedKey] == nil { + appTimesUsedRows[timesUsedKey] = make(map[time.Time]struct{}) + } + // This assigns a distinct time, so we don't need to + // dense_rank() later on, we can simply do row_number(). + appTimesUsedRows[timesUsedKey][stat.StartTime] = struct{}{} } } + appTimesUsedTempRows := make(map[appTimesUsedGroupBy][]time.Time) + for key, times := range appTimesUsedRows { + for t := range times { + appTimesUsedTempRows[key] = append(appTimesUsedTempRows[key], t) + } + } + for _, times := range appTimesUsedTempRows { + slices.SortFunc(times, func(a, b time.Time) int { + return int(a.Sub(b)) + }) + } + for key, times := range appTimesUsedTempRows { + uniq := make(map[time.Time]struct{}) + for i, t := range times { + uniq[t.Add(-(30 * time.Minute * time.Duration(i)))] = struct{}{} + } + appTimesUsedRows[key] = uniq + } + /* -- Even though we allow identical apps to be aggregated across -- templates, we still want to be able to report which templates @@ -3288,14 +3352,20 @@ func (q *FakeQuerier) GetTemplateAppInsights(ctx context.Context, arg database.G var rows []database.GetTemplateAppInsightsRow for key, gr := range groupedRows { - rows = append(rows, database.GetTemplateAppInsightsRow{ + row := database.GetTemplateAppInsightsRow{ TemplateIDs: templateRows[key].TemplateIDs, ActiveUsers: int64(len(uniqueSortedUUIDs(gr.ActiveUserIDs))), Slug: key.Slug, DisplayName: key.DisplayName, Icon: key.Icon, UsageSeconds: gr.UsageSeconds, - }) + } + for tuk, uniq := range appTimesUsedRows { + if key.Slug == tuk.Slug && key.DisplayName == tuk.DisplayName && key.Icon == tuk.Icon { + row.TimesUsed += int64(len(uniq)) + } + } + rows = append(rows, row) } // NOTE(mafredri): Add sorting if we decide on how to handle PostgreSQL collations. diff --git a/coderd/database/queries.sql.go b/coderd/database/queries.sql.go index e0fba2dad35bd..f4e7d4d70e4b6 100644 --- a/coderd/database/queries.sql.go +++ b/coderd/database/queries.sql.go @@ -1805,7 +1805,7 @@ WITH apps.slug, apps.display_name, apps.icon, - tus.app_usage_mins + (tus.app_usage_mins -> apps.slug)::smallint AS usage_mins FROM apps JOIN @@ -1829,14 +1829,36 @@ WITH display_name, icon, -- See motivation in GetTemplateInsights for LEAST(SUM(n), 30). - LEAST(SUM(app_usage.value::smallint), 30) AS usage_mins + LEAST(SUM(usage_mins), 30) AS usage_mins FROM - template_usage_stats_with_apps, jsonb_each(app_usage_mins) AS app_usage - WHERE - app_usage.key = slug + template_usage_stats_with_apps GROUP BY start_time, user_id, slug, display_name, icon ), + -- Analyze the users unique app usage across all templates. Count + -- usage across consecutive intervals as continuous usage. + times_used AS ( + SELECT DISTINCT ON (user_id, slug, display_name, icon, uniq) + slug, + display_name, + icon, + -- Turn start_time into a unique identifier that identifies a users + -- continuous app usage. The value of uniq is otherwise garbage. + -- + -- Since we're aggregating per user app usage across templates, + -- there can be duplicate start_times. To handle this, we use the + -- dense_rank() function, otherwise row_number() would suffice. + start_time - ( + dense_rank() OVER ( + PARTITION BY + user_id, slug, display_name, icon + ORDER BY + start_time + ) * '30 minutes'::interval + ) AS uniq + FROM + template_usage_stats_with_apps + ), -- Even though we allow identical apps to be aggregated across -- templates, we still want to be able to report which templates -- the data comes from. @@ -1858,7 +1880,17 @@ SELECT ai.slug, ai.display_name, ai.icon, - (SUM(ai.usage_mins) * 60)::bigint AS usage_seconds + (SUM(ai.usage_mins) * 60)::bigint AS usage_seconds, + COALESCE(( + SELECT + COUNT(*) + FROM + times_used + WHERE + times_used.slug = ai.slug + AND times_used.display_name = ai.display_name + AND times_used.icon = ai.icon + ), 0)::bigint AS times_used FROM app_insights AS ai JOIN @@ -1884,6 +1916,7 @@ type GetTemplateAppInsightsRow struct { DisplayName string `db:"display_name" json:"display_name"` Icon string `db:"icon" json:"icon"` UsageSeconds int64 `db:"usage_seconds" json:"usage_seconds"` + TimesUsed int64 `db:"times_used" json:"times_used"` } // GetTemplateAppInsights returns the aggregate usage of each app in a given @@ -1905,6 +1938,7 @@ func (q *sqlQuerier) GetTemplateAppInsights(ctx context.Context, arg GetTemplate &i.DisplayName, &i.Icon, &i.UsageSeconds, + &i.TimesUsed, ); err != nil { return nil, err } diff --git a/coderd/database/queries/insights.sql b/coderd/database/queries/insights.sql index cd526efeb516e..79b0d43529e4b 100644 --- a/coderd/database/queries/insights.sql +++ b/coderd/database/queries/insights.sql @@ -249,7 +249,7 @@ WITH apps.slug, apps.display_name, apps.icon, - tus.app_usage_mins + (tus.app_usage_mins -> apps.slug)::smallint AS usage_mins FROM apps JOIN @@ -273,14 +273,36 @@ WITH display_name, icon, -- See motivation in GetTemplateInsights for LEAST(SUM(n), 30). - LEAST(SUM(app_usage.value::smallint), 30) AS usage_mins + LEAST(SUM(usage_mins), 30) AS usage_mins FROM - template_usage_stats_with_apps, jsonb_each(app_usage_mins) AS app_usage - WHERE - app_usage.key = slug + template_usage_stats_with_apps GROUP BY start_time, user_id, slug, display_name, icon ), + -- Analyze the users unique app usage across all templates. Count + -- usage across consecutive intervals as continuous usage. + times_used AS ( + SELECT DISTINCT ON (user_id, slug, display_name, icon, uniq) + slug, + display_name, + icon, + -- Turn start_time into a unique identifier that identifies a users + -- continuous app usage. The value of uniq is otherwise garbage. + -- + -- Since we're aggregating per user app usage across templates, + -- there can be duplicate start_times. To handle this, we use the + -- dense_rank() function, otherwise row_number() would suffice. + start_time - ( + dense_rank() OVER ( + PARTITION BY + user_id, slug, display_name, icon + ORDER BY + start_time + ) * '30 minutes'::interval + ) AS uniq + FROM + template_usage_stats_with_apps + ), -- Even though we allow identical apps to be aggregated across -- templates, we still want to be able to report which templates -- the data comes from. @@ -302,7 +324,17 @@ SELECT ai.slug, ai.display_name, ai.icon, - (SUM(ai.usage_mins) * 60)::bigint AS usage_seconds + (SUM(ai.usage_mins) * 60)::bigint AS usage_seconds, + COALESCE(( + SELECT + COUNT(*) + FROM + times_used + WHERE + times_used.slug = ai.slug + AND times_used.display_name = ai.display_name + AND times_used.icon = ai.icon + ), 0)::bigint AS times_used FROM app_insights AS ai JOIN diff --git a/coderd/insights.go b/coderd/insights.go index 2da27e2561762..a54e79a525644 100644 --- a/coderd/insights.go +++ b/coderd/insights.go @@ -543,6 +543,7 @@ func convertTemplateInsightsApps(usage database.GetTemplateInsightsRow, appUsage Slug: app.Slug, Icon: app.Icon, Seconds: app.UsageSeconds, + TimesUsed: app.TimesUsed, }) } diff --git a/coderd/testdata/insights/template/multiple_users_and_workspaces_three_weeks_second_template.json.golden b/coderd/testdata/insights/template/multiple_users_and_workspaces_three_weeks_second_template.json.golden index b5552f1db6902..05681323e56e5 100644 --- a/coderd/testdata/insights/template/multiple_users_and_workspaces_three_weeks_second_template.json.golden +++ b/coderd/testdata/insights/template/multiple_users_and_workspaces_three_weeks_second_template.json.golden @@ -15,7 +15,8 @@ "display_name": "Visual Studio Code", "slug": "vscode", "icon": "/icon/code.svg", - "seconds": 3600 + "seconds": 3600, + "times_used": 0 }, { "template_ids": [], @@ -23,7 +24,8 @@ "display_name": "JetBrains", "slug": "jetbrains", "icon": "/icon/intellij.svg", - "seconds": 0 + "seconds": 0, + "times_used": 0 }, { "template_ids": [ @@ -33,7 +35,8 @@ "display_name": "Web Terminal", "slug": "reconnecting-pty", "icon": "/icon/terminal.svg", - "seconds": 7200 + "seconds": 7200, + "times_used": 0 }, { "template_ids": [ @@ -43,7 +46,8 @@ "display_name": "SSH", "slug": "ssh", "icon": "/icon/terminal.svg", - "seconds": 10800 + "seconds": 10800, + "times_used": 0 }, { "template_ids": [], @@ -51,7 +55,8 @@ "display_name": "SFTP", "slug": "sftp", "icon": "/icon/terminal.svg", - "seconds": 0 + "seconds": 0, + "times_used": 0 }, { "template_ids": [ @@ -61,7 +66,8 @@ "display_name": "app1", "slug": "app1", "icon": "/icon1.png", - "seconds": 25200 + "seconds": 25200, + "times_used": 2 } ], "parameters_usage": [] diff --git a/coderd/testdata/insights/template/multiple_users_and_workspaces_three_weeks_second_template_only_report.json.golden b/coderd/testdata/insights/template/multiple_users_and_workspaces_three_weeks_second_template_only_report.json.golden index a5ad121ea8a3c..cfd4e17fb203a 100644 --- a/coderd/testdata/insights/template/multiple_users_and_workspaces_three_weeks_second_template_only_report.json.golden +++ b/coderd/testdata/insights/template/multiple_users_and_workspaces_three_weeks_second_template_only_report.json.golden @@ -15,7 +15,8 @@ "display_name": "Visual Studio Code", "slug": "vscode", "icon": "/icon/code.svg", - "seconds": 3600 + "seconds": 3600, + "times_used": 0 }, { "template_ids": [], @@ -23,7 +24,8 @@ "display_name": "JetBrains", "slug": "jetbrains", "icon": "/icon/intellij.svg", - "seconds": 0 + "seconds": 0, + "times_used": 0 }, { "template_ids": [ @@ -33,7 +35,8 @@ "display_name": "Web Terminal", "slug": "reconnecting-pty", "icon": "/icon/terminal.svg", - "seconds": 7200 + "seconds": 7200, + "times_used": 0 }, { "template_ids": [ @@ -43,7 +46,8 @@ "display_name": "SSH", "slug": "ssh", "icon": "/icon/terminal.svg", - "seconds": 10800 + "seconds": 10800, + "times_used": 0 }, { "template_ids": [], @@ -51,7 +55,8 @@ "display_name": "SFTP", "slug": "sftp", "icon": "/icon/terminal.svg", - "seconds": 0 + "seconds": 0, + "times_used": 0 }, { "template_ids": [ @@ -61,7 +66,8 @@ "display_name": "app1", "slug": "app1", "icon": "/icon1.png", - "seconds": 25200 + "seconds": 25200, + "times_used": 2 } ], "parameters_usage": [] diff --git a/coderd/testdata/insights/template/multiple_users_and_workspaces_week_all_templates.json.golden b/coderd/testdata/insights/template/multiple_users_and_workspaces_week_all_templates.json.golden index b3eef47ce02e9..dd716fd84f3e3 100644 --- a/coderd/testdata/insights/template/multiple_users_and_workspaces_week_all_templates.json.golden +++ b/coderd/testdata/insights/template/multiple_users_and_workspaces_week_all_templates.json.golden @@ -18,7 +18,8 @@ "display_name": "Visual Studio Code", "slug": "vscode", "icon": "/icon/code.svg", - "seconds": 3600 + "seconds": 3600, + "times_used": 0 }, { "template_ids": [ @@ -28,7 +29,8 @@ "display_name": "JetBrains", "slug": "jetbrains", "icon": "/icon/intellij.svg", - "seconds": 120 + "seconds": 120, + "times_used": 0 }, { "template_ids": [ @@ -38,7 +40,8 @@ "display_name": "Web Terminal", "slug": "reconnecting-pty", "icon": "/icon/terminal.svg", - "seconds": 3600 + "seconds": 3600, + "times_used": 0 }, { "template_ids": [ @@ -50,7 +53,8 @@ "display_name": "SSH", "slug": "ssh", "icon": "/icon/terminal.svg", - "seconds": 11520 + "seconds": 11520, + "times_used": 0 }, { "template_ids": [], @@ -58,7 +62,8 @@ "display_name": "SFTP", "slug": "sftp", "icon": "/icon/terminal.svg", - "seconds": 0 + "seconds": 0, + "times_used": 0 }, { "template_ids": [ @@ -69,7 +74,8 @@ "display_name": "app1", "slug": "app1", "icon": "/icon1.png", - "seconds": 25380 + "seconds": 25380, + "times_used": 4 }, { "template_ids": [ @@ -79,7 +85,8 @@ "display_name": "app3", "slug": "app3", "icon": "/icon2.png", - "seconds": 720 + "seconds": 720, + "times_used": 1 }, { "template_ids": [ @@ -89,7 +96,8 @@ "display_name": "otherapp1", "slug": "otherapp1", "icon": "/icon1.png", - "seconds": 300 + "seconds": 300, + "times_used": 1 } ], "parameters_usage": [] diff --git a/coderd/testdata/insights/template/multiple_users_and_workspaces_week_deployment_wide.json.golden b/coderd/testdata/insights/template/multiple_users_and_workspaces_week_deployment_wide.json.golden index b3eef47ce02e9..dd716fd84f3e3 100644 --- a/coderd/testdata/insights/template/multiple_users_and_workspaces_week_deployment_wide.json.golden +++ b/coderd/testdata/insights/template/multiple_users_and_workspaces_week_deployment_wide.json.golden @@ -18,7 +18,8 @@ "display_name": "Visual Studio Code", "slug": "vscode", "icon": "/icon/code.svg", - "seconds": 3600 + "seconds": 3600, + "times_used": 0 }, { "template_ids": [ @@ -28,7 +29,8 @@ "display_name": "JetBrains", "slug": "jetbrains", "icon": "/icon/intellij.svg", - "seconds": 120 + "seconds": 120, + "times_used": 0 }, { "template_ids": [ @@ -38,7 +40,8 @@ "display_name": "Web Terminal", "slug": "reconnecting-pty", "icon": "/icon/terminal.svg", - "seconds": 3600 + "seconds": 3600, + "times_used": 0 }, { "template_ids": [ @@ -50,7 +53,8 @@ "display_name": "SSH", "slug": "ssh", "icon": "/icon/terminal.svg", - "seconds": 11520 + "seconds": 11520, + "times_used": 0 }, { "template_ids": [], @@ -58,7 +62,8 @@ "display_name": "SFTP", "slug": "sftp", "icon": "/icon/terminal.svg", - "seconds": 0 + "seconds": 0, + "times_used": 0 }, { "template_ids": [ @@ -69,7 +74,8 @@ "display_name": "app1", "slug": "app1", "icon": "/icon1.png", - "seconds": 25380 + "seconds": 25380, + "times_used": 4 }, { "template_ids": [ @@ -79,7 +85,8 @@ "display_name": "app3", "slug": "app3", "icon": "/icon2.png", - "seconds": 720 + "seconds": 720, + "times_used": 1 }, { "template_ids": [ @@ -89,7 +96,8 @@ "display_name": "otherapp1", "slug": "otherapp1", "icon": "/icon1.png", - "seconds": 300 + "seconds": 300, + "times_used": 1 } ], "parameters_usage": [] diff --git a/coderd/testdata/insights/template/multiple_users_and_workspaces_week_first_template.json.golden b/coderd/testdata/insights/template/multiple_users_and_workspaces_week_first_template.json.golden index 9adec1dd2a666..bdb882543a409 100644 --- a/coderd/testdata/insights/template/multiple_users_and_workspaces_week_first_template.json.golden +++ b/coderd/testdata/insights/template/multiple_users_and_workspaces_week_first_template.json.golden @@ -15,7 +15,8 @@ "display_name": "Visual Studio Code", "slug": "vscode", "icon": "/icon/code.svg", - "seconds": 3600 + "seconds": 3600, + "times_used": 0 }, { "template_ids": [ @@ -25,7 +26,8 @@ "display_name": "JetBrains", "slug": "jetbrains", "icon": "/icon/intellij.svg", - "seconds": 120 + "seconds": 120, + "times_used": 0 }, { "template_ids": [], @@ -33,7 +35,8 @@ "display_name": "Web Terminal", "slug": "reconnecting-pty", "icon": "/icon/terminal.svg", - "seconds": 0 + "seconds": 0, + "times_used": 0 }, { "template_ids": [ @@ -43,7 +46,8 @@ "display_name": "SSH", "slug": "ssh", "icon": "/icon/terminal.svg", - "seconds": 7920 + "seconds": 7920, + "times_used": 0 }, { "template_ids": [], @@ -51,7 +55,8 @@ "display_name": "SFTP", "slug": "sftp", "icon": "/icon/terminal.svg", - "seconds": 0 + "seconds": 0, + "times_used": 0 }, { "template_ids": [ @@ -61,7 +66,8 @@ "display_name": "app1", "slug": "app1", "icon": "/icon1.png", - "seconds": 3780 + "seconds": 3780, + "times_used": 3 }, { "template_ids": [ @@ -71,7 +77,8 @@ "display_name": "app3", "slug": "app3", "icon": "/icon2.png", - "seconds": 720 + "seconds": 720, + "times_used": 1 } ], "parameters_usage": [] diff --git "a/coderd/testdata/insights/template/multiple_users_and_workspaces_week_other_timezone_(S\303\243o_Paulo).json.golden" "b/coderd/testdata/insights/template/multiple_users_and_workspaces_week_other_timezone_(S\303\243o_Paulo).json.golden" index e45e23bd88d29..4624f17d6fb26 100644 --- "a/coderd/testdata/insights/template/multiple_users_and_workspaces_week_other_timezone_(S\303\243o_Paulo).json.golden" +++ "b/coderd/testdata/insights/template/multiple_users_and_workspaces_week_other_timezone_(S\303\243o_Paulo).json.golden" @@ -17,7 +17,8 @@ "display_name": "Visual Studio Code", "slug": "vscode", "icon": "/icon/code.svg", - "seconds": 3600 + "seconds": 3600, + "times_used": 0 }, { "template_ids": [ @@ -27,7 +28,8 @@ "display_name": "JetBrains", "slug": "jetbrains", "icon": "/icon/intellij.svg", - "seconds": 120 + "seconds": 120, + "times_used": 0 }, { "template_ids": [], @@ -35,7 +37,8 @@ "display_name": "Web Terminal", "slug": "reconnecting-pty", "icon": "/icon/terminal.svg", - "seconds": 0 + "seconds": 0, + "times_used": 0 }, { "template_ids": [ @@ -45,7 +48,8 @@ "display_name": "SSH", "slug": "ssh", "icon": "/icon/terminal.svg", - "seconds": 4320 + "seconds": 4320, + "times_used": 0 }, { "template_ids": [], @@ -53,7 +57,8 @@ "display_name": "SFTP", "slug": "sftp", "icon": "/icon/terminal.svg", - "seconds": 0 + "seconds": 0, + "times_used": 0 }, { "template_ids": [ @@ -64,7 +69,8 @@ "display_name": "app1", "slug": "app1", "icon": "/icon1.png", - "seconds": 21720 + "seconds": 21720, + "times_used": 2 }, { "template_ids": [ @@ -74,7 +80,8 @@ "display_name": "app3", "slug": "app3", "icon": "/icon2.png", - "seconds": 4320 + "seconds": 4320, + "times_used": 2 }, { "template_ids": [ @@ -84,7 +91,8 @@ "display_name": "otherapp1", "slug": "otherapp1", "icon": "/icon1.png", - "seconds": 300 + "seconds": 300, + "times_used": 1 } ], "parameters_usage": [] diff --git a/coderd/testdata/insights/template/multiple_users_and_workspaces_week_second_template.json.golden b/coderd/testdata/insights/template/multiple_users_and_workspaces_week_second_template.json.golden index 0aaae268732d7..bf3790516ebc6 100644 --- a/coderd/testdata/insights/template/multiple_users_and_workspaces_week_second_template.json.golden +++ b/coderd/testdata/insights/template/multiple_users_and_workspaces_week_second_template.json.golden @@ -15,7 +15,8 @@ "display_name": "Visual Studio Code", "slug": "vscode", "icon": "/icon/code.svg", - "seconds": 3600 + "seconds": 3600, + "times_used": 0 }, { "template_ids": [], @@ -23,7 +24,8 @@ "display_name": "JetBrains", "slug": "jetbrains", "icon": "/icon/intellij.svg", - "seconds": 0 + "seconds": 0, + "times_used": 0 }, { "template_ids": [], @@ -31,7 +33,8 @@ "display_name": "Web Terminal", "slug": "reconnecting-pty", "icon": "/icon/terminal.svg", - "seconds": 0 + "seconds": 0, + "times_used": 0 }, { "template_ids": [ @@ -41,7 +44,8 @@ "display_name": "SSH", "slug": "ssh", "icon": "/icon/terminal.svg", - "seconds": 3600 + "seconds": 3600, + "times_used": 0 }, { "template_ids": [], @@ -49,7 +53,8 @@ "display_name": "SFTP", "slug": "sftp", "icon": "/icon/terminal.svg", - "seconds": 0 + "seconds": 0, + "times_used": 0 }, { "template_ids": [ @@ -59,7 +64,8 @@ "display_name": "app1", "slug": "app1", "icon": "/icon1.png", - "seconds": 25200 + "seconds": 25200, + "times_used": 2 } ], "parameters_usage": [] diff --git a/coderd/testdata/insights/template/multiple_users_and_workspaces_week_third_template.json.golden b/coderd/testdata/insights/template/multiple_users_and_workspaces_week_third_template.json.golden index fc0e3785d1d2f..37bd18a11ec89 100644 --- a/coderd/testdata/insights/template/multiple_users_and_workspaces_week_third_template.json.golden +++ b/coderd/testdata/insights/template/multiple_users_and_workspaces_week_third_template.json.golden @@ -13,7 +13,8 @@ "display_name": "Visual Studio Code", "slug": "vscode", "icon": "/icon/code.svg", - "seconds": 0 + "seconds": 0, + "times_used": 0 }, { "template_ids": [], @@ -21,7 +22,8 @@ "display_name": "JetBrains", "slug": "jetbrains", "icon": "/icon/intellij.svg", - "seconds": 0 + "seconds": 0, + "times_used": 0 }, { "template_ids": [ @@ -31,7 +33,8 @@ "display_name": "Web Terminal", "slug": "reconnecting-pty", "icon": "/icon/terminal.svg", - "seconds": 3600 + "seconds": 3600, + "times_used": 0 }, { "template_ids": [ @@ -41,7 +44,8 @@ "display_name": "SSH", "slug": "ssh", "icon": "/icon/terminal.svg", - "seconds": 3600 + "seconds": 3600, + "times_used": 0 }, { "template_ids": [], @@ -49,7 +53,8 @@ "display_name": "SFTP", "slug": "sftp", "icon": "/icon/terminal.svg", - "seconds": 0 + "seconds": 0, + "times_used": 0 }, { "template_ids": [ @@ -59,7 +64,8 @@ "display_name": "otherapp1", "slug": "otherapp1", "icon": "/icon1.png", - "seconds": 300 + "seconds": 300, + "times_used": 1 } ], "parameters_usage": [] diff --git a/coderd/testdata/insights/template/multiple_users_and_workspaces_weekly_aggregated_deployment_wide.json.golden b/coderd/testdata/insights/template/multiple_users_and_workspaces_weekly_aggregated_deployment_wide.json.golden index 37012ce9d312f..e408b34fa7e43 100644 --- a/coderd/testdata/insights/template/multiple_users_and_workspaces_weekly_aggregated_deployment_wide.json.golden +++ b/coderd/testdata/insights/template/multiple_users_and_workspaces_weekly_aggregated_deployment_wide.json.golden @@ -18,7 +18,8 @@ "display_name": "Visual Studio Code", "slug": "vscode", "icon": "/icon/code.svg", - "seconds": 7200 + "seconds": 7200, + "times_used": 0 }, { "template_ids": [ @@ -28,7 +29,8 @@ "display_name": "JetBrains", "slug": "jetbrains", "icon": "/icon/intellij.svg", - "seconds": 120 + "seconds": 120, + "times_used": 0 }, { "template_ids": [ @@ -38,7 +40,8 @@ "display_name": "Web Terminal", "slug": "reconnecting-pty", "icon": "/icon/terminal.svg", - "seconds": 3600 + "seconds": 3600, + "times_used": 0 }, { "template_ids": [ @@ -50,7 +53,8 @@ "display_name": "SSH", "slug": "ssh", "icon": "/icon/terminal.svg", - "seconds": 15120 + "seconds": 15120, + "times_used": 0 }, { "template_ids": [], @@ -58,7 +62,8 @@ "display_name": "SFTP", "slug": "sftp", "icon": "/icon/terminal.svg", - "seconds": 0 + "seconds": 0, + "times_used": 0 }, { "template_ids": [ @@ -69,7 +74,8 @@ "display_name": "app1", "slug": "app1", "icon": "/icon1.png", - "seconds": 25380 + "seconds": 25380, + "times_used": 4 }, { "template_ids": [ @@ -79,7 +85,8 @@ "display_name": "app3", "slug": "app3", "icon": "/icon2.png", - "seconds": 3600 + "seconds": 3600, + "times_used": 1 }, { "template_ids": [ @@ -89,7 +96,8 @@ "display_name": "otherapp1", "slug": "otherapp1", "icon": "/icon1.png", - "seconds": 300 + "seconds": 300, + "times_used": 1 } ], "parameters_usage": [] diff --git a/coderd/testdata/insights/template/multiple_users_and_workspaces_weekly_aggregated_first_template.json.golden b/coderd/testdata/insights/template/multiple_users_and_workspaces_weekly_aggregated_first_template.json.golden index 6852211092390..a37b5d49180d8 100644 --- a/coderd/testdata/insights/template/multiple_users_and_workspaces_weekly_aggregated_first_template.json.golden +++ b/coderd/testdata/insights/template/multiple_users_and_workspaces_weekly_aggregated_first_template.json.golden @@ -15,7 +15,8 @@ "display_name": "Visual Studio Code", "slug": "vscode", "icon": "/icon/code.svg", - "seconds": 3600 + "seconds": 3600, + "times_used": 0 }, { "template_ids": [ @@ -25,7 +26,8 @@ "display_name": "JetBrains", "slug": "jetbrains", "icon": "/icon/intellij.svg", - "seconds": 120 + "seconds": 120, + "times_used": 0 }, { "template_ids": [], @@ -33,7 +35,8 @@ "display_name": "Web Terminal", "slug": "reconnecting-pty", "icon": "/icon/terminal.svg", - "seconds": 0 + "seconds": 0, + "times_used": 0 }, { "template_ids": [ @@ -43,7 +46,8 @@ "display_name": "SSH", "slug": "ssh", "icon": "/icon/terminal.svg", - "seconds": 7920 + "seconds": 7920, + "times_used": 0 }, { "template_ids": [], @@ -51,7 +55,8 @@ "display_name": "SFTP", "slug": "sftp", "icon": "/icon/terminal.svg", - "seconds": 0 + "seconds": 0, + "times_used": 0 }, { "template_ids": [ @@ -61,7 +66,8 @@ "display_name": "app1", "slug": "app1", "icon": "/icon1.png", - "seconds": 3780 + "seconds": 3780, + "times_used": 3 }, { "template_ids": [ @@ -71,7 +77,8 @@ "display_name": "app3", "slug": "app3", "icon": "/icon2.png", - "seconds": 720 + "seconds": 720, + "times_used": 1 } ], "parameters_usage": [] diff --git a/coderd/testdata/insights/template/multiple_users_and_workspaces_weekly_aggregated_templates.json.golden b/coderd/testdata/insights/template/multiple_users_and_workspaces_weekly_aggregated_templates.json.golden index 38df7fbced082..6d5d38a6b2278 100644 --- a/coderd/testdata/insights/template/multiple_users_and_workspaces_weekly_aggregated_templates.json.golden +++ b/coderd/testdata/insights/template/multiple_users_and_workspaces_weekly_aggregated_templates.json.golden @@ -18,7 +18,8 @@ "display_name": "Visual Studio Code", "slug": "vscode", "icon": "/icon/code.svg", - "seconds": 7200 + "seconds": 7200, + "times_used": 0 }, { "template_ids": [ @@ -28,7 +29,8 @@ "display_name": "JetBrains", "slug": "jetbrains", "icon": "/icon/intellij.svg", - "seconds": 120 + "seconds": 120, + "times_used": 0 }, { "template_ids": [ @@ -38,7 +40,8 @@ "display_name": "Web Terminal", "slug": "reconnecting-pty", "icon": "/icon/terminal.svg", - "seconds": 3600 + "seconds": 3600, + "times_used": 0 }, { "template_ids": [ @@ -50,7 +53,8 @@ "display_name": "SSH", "slug": "ssh", "icon": "/icon/terminal.svg", - "seconds": 15120 + "seconds": 15120, + "times_used": 0 }, { "template_ids": [], @@ -58,7 +62,8 @@ "display_name": "SFTP", "slug": "sftp", "icon": "/icon/terminal.svg", - "seconds": 0 + "seconds": 0, + "times_used": 0 }, { "template_ids": [ @@ -69,7 +74,8 @@ "display_name": "app1", "slug": "app1", "icon": "/icon1.png", - "seconds": 25380 + "seconds": 25380, + "times_used": 4 }, { "template_ids": [ @@ -79,7 +85,8 @@ "display_name": "app3", "slug": "app3", "icon": "/icon2.png", - "seconds": 3600 + "seconds": 3600, + "times_used": 1 }, { "template_ids": [ @@ -89,7 +96,8 @@ "display_name": "otherapp1", "slug": "otherapp1", "icon": "/icon1.png", - "seconds": 300 + "seconds": 300, + "times_used": 1 } ], "parameters_usage": [] diff --git a/coderd/testdata/insights/template/parameters_two_days_ago,_no_data.json.golden b/coderd/testdata/insights/template/parameters_two_days_ago,_no_data.json.golden index dd9761ef0a2ce..3d6328e3134a3 100644 --- a/coderd/testdata/insights/template/parameters_two_days_ago,_no_data.json.golden +++ b/coderd/testdata/insights/template/parameters_two_days_ago,_no_data.json.golden @@ -11,7 +11,8 @@ "display_name": "Visual Studio Code", "slug": "vscode", "icon": "/icon/code.svg", - "seconds": 0 + "seconds": 0, + "times_used": 0 }, { "template_ids": [], @@ -19,7 +20,8 @@ "display_name": "JetBrains", "slug": "jetbrains", "icon": "/icon/intellij.svg", - "seconds": 0 + "seconds": 0, + "times_used": 0 }, { "template_ids": [], @@ -27,7 +29,8 @@ "display_name": "Web Terminal", "slug": "reconnecting-pty", "icon": "/icon/terminal.svg", - "seconds": 0 + "seconds": 0, + "times_used": 0 }, { "template_ids": [], @@ -35,7 +38,8 @@ "display_name": "SSH", "slug": "ssh", "icon": "/icon/terminal.svg", - "seconds": 0 + "seconds": 0, + "times_used": 0 }, { "template_ids": [], @@ -43,7 +47,8 @@ "display_name": "SFTP", "slug": "sftp", "icon": "/icon/terminal.svg", - "seconds": 0 + "seconds": 0, + "times_used": 0 } ], "parameters_usage": [] diff --git a/coderd/testdata/insights/template/parameters_yesterday_and_today_deployment_wide.json.golden b/coderd/testdata/insights/template/parameters_yesterday_and_today_deployment_wide.json.golden index 7f0c5b2ed9520..dfdaf745fd18d 100644 --- a/coderd/testdata/insights/template/parameters_yesterday_and_today_deployment_wide.json.golden +++ b/coderd/testdata/insights/template/parameters_yesterday_and_today_deployment_wide.json.golden @@ -11,7 +11,8 @@ "display_name": "Visual Studio Code", "slug": "vscode", "icon": "/icon/code.svg", - "seconds": 0 + "seconds": 0, + "times_used": 0 }, { "template_ids": [], @@ -19,7 +20,8 @@ "display_name": "JetBrains", "slug": "jetbrains", "icon": "/icon/intellij.svg", - "seconds": 0 + "seconds": 0, + "times_used": 0 }, { "template_ids": [], @@ -27,7 +29,8 @@ "display_name": "Web Terminal", "slug": "reconnecting-pty", "icon": "/icon/terminal.svg", - "seconds": 0 + "seconds": 0, + "times_used": 0 }, { "template_ids": [], @@ -35,7 +38,8 @@ "display_name": "SSH", "slug": "ssh", "icon": "/icon/terminal.svg", - "seconds": 0 + "seconds": 0, + "times_used": 0 }, { "template_ids": [], @@ -43,7 +47,8 @@ "display_name": "SFTP", "slug": "sftp", "icon": "/icon/terminal.svg", - "seconds": 0 + "seconds": 0, + "times_used": 0 } ], "parameters_usage": [ diff --git a/codersdk/insights.go b/codersdk/insights.go index 27eb8c3009d30..c9e708de8f34a 100644 --- a/codersdk/insights.go +++ b/codersdk/insights.go @@ -217,6 +217,7 @@ type TemplateAppUsage struct { Slug string `json:"slug" example:"vscode"` Icon string `json:"icon"` Seconds int64 `json:"seconds" example:"80500"` + TimesUsed int64 `json:"times_used" example:"2"` } // TemplateParameterUsage shows the usage of a parameter for one or more diff --git a/docs/api/insights.md b/docs/api/insights.md index 4b8609ae4ffd3..7dae576b847b8 100644 --- a/docs/api/insights.md +++ b/docs/api/insights.md @@ -81,6 +81,7 @@ curl -X GET http://coder-server:8080/api/v2/insights/templates?before=0&after=0 "seconds": 80500, "slug": "vscode", "template_ids": ["497f6eca-6276-4993-bfeb-53cbbbba6f08"], + "times_used": 2, "type": "builtin" } ], diff --git a/docs/api/schemas.md b/docs/api/schemas.md index 42f8f43517233..cd5c1366e392a 100644 --- a/docs/api/schemas.md +++ b/docs/api/schemas.md @@ -4558,6 +4558,7 @@ CreateWorkspaceRequest provides options for creating a new workspace. Only one o "seconds": 80500, "slug": "vscode", "template_ids": ["497f6eca-6276-4993-bfeb-53cbbbba6f08"], + "times_used": 2, "type": "builtin" } ``` @@ -4571,6 +4572,7 @@ CreateWorkspaceRequest provides options for creating a new workspace. Only one o | `seconds` | integer | false | | | | `slug` | string | false | | | | `template_ids` | array of string | false | | | +| `times_used` | integer | false | | | | `type` | [codersdk.TemplateAppsType](#codersdktemplateappstype) | false | | | ## codersdk.TemplateAppsType @@ -4700,6 +4702,7 @@ CreateWorkspaceRequest provides options for creating a new workspace. Only one o "seconds": 80500, "slug": "vscode", "template_ids": ["497f6eca-6276-4993-bfeb-53cbbbba6f08"], + "times_used": 2, "type": "builtin" } ], @@ -4765,6 +4768,7 @@ CreateWorkspaceRequest provides options for creating a new workspace. Only one o "seconds": 80500, "slug": "vscode", "template_ids": ["497f6eca-6276-4993-bfeb-53cbbbba6f08"], + "times_used": 2, "type": "builtin" } ], diff --git a/site/src/api/typesGenerated.ts b/site/src/api/typesGenerated.ts index 9331339ed1aa1..b3280d200328a 100644 --- a/site/src/api/typesGenerated.ts +++ b/site/src/api/typesGenerated.ts @@ -1093,6 +1093,7 @@ export interface TemplateAppUsage { readonly slug: string; readonly icon: string; readonly seconds: number; + readonly times_used: number; } // From codersdk/templates.go diff --git a/site/src/pages/TemplatePage/TemplateInsightsPage/TemplateInsightsPage.stories.tsx b/site/src/pages/TemplatePage/TemplateInsightsPage/TemplateInsightsPage.stories.tsx index 894beb3a600d0..3630a936929a3 100644 --- a/site/src/pages/TemplatePage/TemplateInsightsPage/TemplateInsightsPage.stories.tsx +++ b/site/src/pages/TemplatePage/TemplateInsightsPage/TemplateInsightsPage.stories.tsx @@ -68,6 +68,7 @@ export const Loaded: Story = { slug: "vscode", icon: "/icon/code.svg", seconds: 2513400, + times_used: 0, }, { template_ids: ["0d286645-29aa-4eaf-9b52-cc5d2740c90b"], @@ -76,6 +77,7 @@ export const Loaded: Story = { slug: "jetbrains", icon: "/icon/intellij.svg", seconds: 0, + times_used: 0, }, { template_ids: ["0d286645-29aa-4eaf-9b52-cc5d2740c90b"], @@ -84,6 +86,7 @@ export const Loaded: Story = { slug: "reconnecting-pty", icon: "/icon/terminal.svg", seconds: 110400, + times_used: 0, }, { template_ids: ["0d286645-29aa-4eaf-9b52-cc5d2740c90b"], @@ -92,6 +95,7 @@ export const Loaded: Story = { slug: "ssh", icon: "/icon/terminal.svg", seconds: 1020900, + times_used: 0, }, ], parameters_usage: [ From 194be12133f02e45103123ef4346a6dc5aec41fd Mon Sep 17 00:00:00 2001 From: Steven Masley Date: Thu, 16 May 2024 12:07:44 -0500 Subject: [PATCH 066/149] chore: verify validity of built in rbac roles (#13296) Verifies our built in roles are valid according to our policy.go. Working on custom roles requires the dynamic roles to adhere to these rules. Feels fair the built in ones do too. --- coderd/rbac/object.go | 7 +++-- coderd/rbac/roles.go | 55 ++++++++++++++++++++++++++++++++++++++- coderd/rbac/roles_test.go | 21 +++++++++++++++ scripts/rbacgen/main.go | 10 ++++--- 4 files changed, 84 insertions(+), 9 deletions(-) diff --git a/coderd/rbac/object.go b/coderd/rbac/object.go index 30a74e4f825dd..dfd8ab6b55b23 100644 --- a/coderd/rbac/object.go +++ b/coderd/rbac/object.go @@ -1,9 +1,8 @@ package rbac import ( - "fmt" - "github.com/google/uuid" + "golang.org/x/xerrors" "github.com/coder/coder/v2/coderd/rbac/policy" ) @@ -36,10 +35,10 @@ type Object struct { func (z Object) ValidAction(action policy.Action) error { perms, ok := policy.RBACPermissions[z.Type] if !ok { - return fmt.Errorf("invalid type %q", z.Type) + return xerrors.Errorf("invalid type %q", z.Type) } if _, ok := perms.Actions[action]; !ok { - return fmt.Errorf("invalid action %q for type %q", action, z.Type) + return xerrors.Errorf("invalid action %q for type %q", action, z.Type) } return nil diff --git a/coderd/rbac/roles.go b/coderd/rbac/roles.go index cee365d06624c..fbac8ddf5379d 100644 --- a/coderd/rbac/roles.go +++ b/coderd/rbac/roles.go @@ -1,6 +1,7 @@ package rbac import ( + "errors" "sort" "strings" @@ -369,6 +370,30 @@ type Permission struct { Action policy.Action `json:"action"` } +func (perm Permission) Valid() error { + if perm.ResourceType == policy.WildcardSymbol { + // Wildcard is tricky to check. Just allow it. + return nil + } + + resource, ok := policy.RBACPermissions[perm.ResourceType] + if !ok { + return xerrors.Errorf("invalid resource type %q", perm.ResourceType) + } + + // Wildcard action is always valid + if perm.Action == policy.WildcardSymbol { + return nil + } + + _, ok = resource.Actions[perm.Action] + if !ok { + return xerrors.Errorf("invalid action %q for resource %q", perm.Action, perm.ResourceType) + } + + return nil +} + // Role is a set of permissions at multiple levels: // - Site level permissions apply EVERYWHERE // - Org level permissions apply to EVERYTHING in a given ORG @@ -393,6 +418,34 @@ type Role struct { cachedRegoValue ast.Value } +// Valid will check all it's permissions and ensure they are all correct +// according to the policy. This verifies every action specified make sense +// for the given resource. +func (role Role) Valid() error { + var errs []error + for _, perm := range role.Site { + if err := perm.Valid(); err != nil { + errs = append(errs, xerrors.Errorf("site: %w", err)) + } + } + + for orgID, permissions := range role.Org { + for _, perm := range permissions { + if err := perm.Valid(); err != nil { + errs = append(errs, xerrors.Errorf("org=%q: %w", orgID, err)) + } + } + } + + for _, perm := range role.User { + if err := perm.Valid(); err != nil { + errs = append(errs, xerrors.Errorf("user: %w", err)) + } + } + + return errors.Join(errs...) +} + type Roles []Role func (roles Roles) Expand() ([]Role, error) { @@ -402,7 +455,7 @@ func (roles Roles) Expand() ([]Role, error) { func (roles Roles) Names() []string { names := make([]string, 0, len(roles)) for _, r := range roles { - return append(names, r.Name) + names = append(names, r.Name) } return names } diff --git a/coderd/rbac/roles_test.go b/coderd/rbac/roles_test.go index 44ef83b74cd20..fe589449b8884 100644 --- a/coderd/rbac/roles_test.go +++ b/coderd/rbac/roles_test.go @@ -20,6 +20,27 @@ type authSubject struct { Actor rbac.Subject } +// TestBuiltInRoles makes sure our built-in roles are valid by our own policy +// rules. If this is incorrect, that is a mistake. +func TestBuiltInRoles(t *testing.T) { + t.Parallel() + for _, r := range rbac.SiteRoles() { + r := r + t.Run(r.Name, func(t *testing.T) { + t.Parallel() + require.NoError(t, r.Valid(), "invalid role") + }) + } + + for _, r := range rbac.OrganizationRoles(uuid.New()) { + r := r + t.Run(r.Name, func(t *testing.T) { + t.Parallel() + require.NoError(t, r.Valid(), "invalid role") + }) + } +} + //nolint:tparallel,paralleltest func TestOwnerExec(t *testing.T) { owner := rbac.Subject{ diff --git a/scripts/rbacgen/main.go b/scripts/rbacgen/main.go index 38f13434c77e4..1eb186c1b5ce4 100644 --- a/scripts/rbacgen/main.go +++ b/scripts/rbacgen/main.go @@ -16,6 +16,8 @@ import ( "slices" "strings" + "golang.org/x/xerrors" + "github.com/coder/coder/v2/coderd/rbac/policy" ) @@ -148,7 +150,7 @@ func generateRbacObjects(templateSource string) ([]byte, error) { // Parse the policy.go file for the action enums f, err := parser.ParseFile(token.NewFileSet(), "./coderd/rbac/policy/policy.go", nil, parser.ParseComments) if err != nil { - return nil, fmt.Errorf("parsing policy.go: %w", err) + return nil, xerrors.Errorf("parsing policy.go: %w", err) } actionMap := fileActions(f) actionList := make([]ActionDetails, 0) @@ -176,14 +178,14 @@ func generateRbacObjects(templateSource string) ([]byte, error) { x++ v, ok := actionMap[string(action)] if !ok { - errorList = append(errorList, fmt.Errorf("action value %q does not have a constant a matching enum constant", action)) + errorList = append(errorList, xerrors.Errorf("action value %q does not have a constant a matching enum constant", action)) } return v }, "concat": func(strs ...string) string { return strings.Join(strs, "") }, }).Parse(templateSource) if err != nil { - return nil, fmt.Errorf("parse template: %w", err) + return nil, xerrors.Errorf("parse template: %w", err) } // Convert to sorted list for autogen consistency. @@ -203,7 +205,7 @@ func generateRbacObjects(templateSource string) ([]byte, error) { err = tpl.Execute(&out, list) if err != nil { - return nil, fmt.Errorf("execute template: %w", err) + return nil, xerrors.Errorf("execute template: %w", err) } if len(errorList) > 0 { From cf91eff7cf761e19e7ac49e28ed185168190ab79 Mon Sep 17 00:00:00 2001 From: Steven Masley Date: Thu, 16 May 2024 13:11:26 -0500 Subject: [PATCH 067/149] chore: implement databased backend for custom roles (#13295) Includes db schema and dbauthz layer for upserting custom roles. Unit test in `customroles_test.go` verify against escalating permissions through this feature. --- coderd/database/dbauthz/customroles_test.go | 258 ++++++++++++++++++ coderd/database/dbauthz/dbauthz.go | 142 +++++++++- coderd/database/dbauthz/dbauthz_test.go | 61 +++++ coderd/database/dbauthz/setup_test.go | 4 +- coderd/database/dbmem/dbmem.go | 52 ++++ coderd/database/dbmetrics/dbmetrics.go | 14 + coderd/database/dbmock/dbmock.go | 30 ++ coderd/database/dump.sql | 17 ++ .../migrations/000209_custom_roles.down.sql | 2 + .../migrations/000209_custom_roles.up.sql | 26 ++ .../fixtures/000209_custom_roles.up.sql | 20 ++ coderd/database/models.go | 11 + coderd/database/querier.go | 2 + coderd/database/queries.sql.go | 101 +++++++ coderd/database/queries/roles.sql | 41 +++ coderd/database/unique_constraint.go | 2 + coderd/rbac/object_gen.go | 3 +- coderd/rbac/policy/policy.go | 3 +- coderd/rbac/roles.go | 36 ++- coderd/rbac/roles_test.go | 11 +- coderd/rbac/rolestore/rolestore.go | 37 +++ 21 files changed, 854 insertions(+), 19 deletions(-) create mode 100644 coderd/database/dbauthz/customroles_test.go create mode 100644 coderd/database/migrations/000209_custom_roles.down.sql create mode 100644 coderd/database/migrations/000209_custom_roles.up.sql create mode 100644 coderd/database/migrations/testdata/fixtures/000209_custom_roles.up.sql create mode 100644 coderd/database/queries/roles.sql create mode 100644 coderd/rbac/rolestore/rolestore.go diff --git a/coderd/database/dbauthz/customroles_test.go b/coderd/database/dbauthz/customroles_test.go new file mode 100644 index 0000000000000..aaa2c7a34bbf3 --- /dev/null +++ b/coderd/database/dbauthz/customroles_test.go @@ -0,0 +1,258 @@ +package dbauthz_test + +import ( + "encoding/json" + "testing" + + "github.com/google/uuid" + "github.com/prometheus/client_golang/prometheus" + "github.com/stretchr/testify/require" + + "cdr.dev/slog" + "github.com/coder/coder/v2/coderd/coderdtest" + "github.com/coder/coder/v2/coderd/database" + "github.com/coder/coder/v2/coderd/database/dbauthz" + "github.com/coder/coder/v2/coderd/database/dbmem" + "github.com/coder/coder/v2/coderd/rbac" + "github.com/coder/coder/v2/coderd/rbac/policy" + "github.com/coder/coder/v2/testutil" +) + +// TestUpsertCustomRoles verifies creating custom roles cannot escalate permissions. +func TestUpsertCustomRoles(t *testing.T) { + t.Parallel() + + userID := uuid.New() + subjectFromRoles := func(roles rbac.ExpandableRoles) rbac.Subject { + return rbac.Subject{ + FriendlyName: "Test user", + ID: userID.String(), + Roles: roles, + Groups: nil, + Scope: rbac.ScopeAll, + } + } + + canAssignRole := rbac.Role{ + Name: "can-assign", + DisplayName: "", + Site: rbac.Permissions(map[string][]policy.Action{ + rbac.ResourceAssignRole.Type: {policy.ActionCreate}, + }), + } + + merge := func(u ...interface{}) rbac.Roles { + all := make([]rbac.Role, 0) + for _, v := range u { + v := v + switch t := v.(type) { + case rbac.Role: + all = append(all, t) + case rbac.ExpandableRoles: + all = append(all, must(t.Expand())...) + case string: + all = append(all, must(rbac.RoleByName(t))) + default: + panic("unknown type") + } + } + + return all + } + + orgID := uuid.New() + testCases := []struct { + name string + + subject rbac.ExpandableRoles + + // Perms to create on new custom role + site []rbac.Permission + org map[string][]rbac.Permission + user []rbac.Permission + errorContains string + }{ + { + // No roles, so no assign role + name: "no-roles", + subject: rbac.RoleNames([]string{}), + errorContains: "forbidden", + }, + { + // This works because the new role has 0 perms + name: "empty", + subject: merge(canAssignRole), + }, + { + name: "mixed-scopes", + subject: merge(canAssignRole, rbac.RoleOwner()), + site: rbac.Permissions(map[string][]policy.Action{ + rbac.ResourceWorkspace.Type: {policy.ActionRead}, + }), + org: map[string][]rbac.Permission{ + uuid.New().String(): rbac.Permissions(map[string][]policy.Action{ + rbac.ResourceWorkspace.Type: {policy.ActionRead}, + }), + }, + errorContains: "cannot assign both org and site permissions", + }, + { + name: "multiple-org", + subject: merge(canAssignRole, rbac.RoleOwner()), + org: map[string][]rbac.Permission{ + uuid.New().String(): rbac.Permissions(map[string][]policy.Action{ + rbac.ResourceWorkspace.Type: {policy.ActionRead}, + }), + uuid.New().String(): rbac.Permissions(map[string][]policy.Action{ + rbac.ResourceWorkspace.Type: {policy.ActionRead}, + }), + }, + errorContains: "cannot assign permissions to more than 1", + }, + { + name: "invalid-action", + subject: merge(canAssignRole, rbac.RoleOwner()), + site: rbac.Permissions(map[string][]policy.Action{ + // Action does not go with resource + rbac.ResourceWorkspace.Type: {policy.ActionViewInsights}, + }), + errorContains: "invalid action", + }, + { + name: "invalid-resource", + subject: merge(canAssignRole, rbac.RoleOwner()), + site: rbac.Permissions(map[string][]policy.Action{ + "foobar": {policy.ActionViewInsights}, + }), + errorContains: "invalid resource", + }, + { + // Not allowing these at this time. + name: "negative-permission", + subject: merge(canAssignRole, rbac.RoleOwner()), + site: []rbac.Permission{ + { + Negate: true, + ResourceType: rbac.ResourceWorkspace.Type, + Action: policy.ActionRead, + }, + }, + errorContains: "no negative permissions", + }, + { + name: "wildcard", // not allowed + subject: merge(canAssignRole, rbac.RoleOwner()), + site: rbac.Permissions(map[string][]policy.Action{ + rbac.ResourceWorkspace.Type: {policy.WildcardSymbol}, + }), + errorContains: "no wildcard symbols", + }, + // escalation checks + { + name: "read-workspace-escalation", + subject: merge(canAssignRole), + site: rbac.Permissions(map[string][]policy.Action{ + rbac.ResourceWorkspace.Type: {policy.ActionRead}, + }), + errorContains: "not allowed to grant this permission", + }, + { + name: "read-workspace-outside-org", + subject: merge(canAssignRole, rbac.RoleOrgAdmin(orgID)), + org: map[string][]rbac.Permission{ + // The org admin is for a different org + uuid.NewString(): rbac.Permissions(map[string][]policy.Action{ + rbac.ResourceWorkspace.Type: {policy.ActionRead}, + }), + }, + errorContains: "not allowed to grant this permission", + }, + { + name: "user-escalation", + // These roles do not grant user perms + subject: merge(canAssignRole, rbac.RoleOrgAdmin(orgID)), + user: rbac.Permissions(map[string][]policy.Action{ + rbac.ResourceWorkspace.Type: {policy.ActionRead}, + }), + errorContains: "not allowed to grant this permission", + }, + { + name: "template-admin-escalation", + subject: merge(canAssignRole, rbac.RoleTemplateAdmin()), + site: rbac.Permissions(map[string][]policy.Action{ + rbac.ResourceWorkspace.Type: {policy.ActionRead}, // ok! + rbac.ResourceDeploymentConfig.Type: {policy.ActionUpdate}, // not ok! + }), + user: rbac.Permissions(map[string][]policy.Action{ + rbac.ResourceWorkspace.Type: {policy.ActionRead}, // ok! + }), + errorContains: "deployment_config", + }, + // ok! + { + name: "read-workspace-template-admin", + subject: merge(canAssignRole, rbac.RoleTemplateAdmin()), + site: rbac.Permissions(map[string][]policy.Action{ + rbac.ResourceWorkspace.Type: {policy.ActionRead}, + }), + }, + { + name: "read-workspace-in-org", + subject: merge(canAssignRole, rbac.RoleOrgAdmin(orgID)), + org: map[string][]rbac.Permission{ + // Org admin of this org, this is ok! + orgID.String(): rbac.Permissions(map[string][]policy.Action{ + rbac.ResourceWorkspace.Type: {policy.ActionRead}, + }), + }, + }, + { + name: "user-perms", + // This is weird, but is ok + subject: merge(canAssignRole, rbac.RoleMember()), + user: rbac.Permissions(map[string][]policy.Action{ + rbac.ResourceWorkspace.Type: {policy.ActionRead}, + }), + }, + { + name: "site+user-perms", + subject: merge(canAssignRole, rbac.RoleMember(), rbac.RoleTemplateAdmin()), + site: rbac.Permissions(map[string][]policy.Action{ + rbac.ResourceWorkspace.Type: {policy.ActionRead}, + }), + user: rbac.Permissions(map[string][]policy.Action{ + rbac.ResourceWorkspace.Type: {policy.ActionRead}, + }), + }, + } + + for _, tc := range testCases { + tc := tc + + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + db := dbmem.New() + rec := &coderdtest.RecordingAuthorizer{ + Wrapped: rbac.NewAuthorizer(prometheus.NewRegistry()), + } + az := dbauthz.New(db, rec, slog.Make(), coderdtest.AccessControlStorePointer()) + + subject := subjectFromRoles(tc.subject) + ctx := testutil.Context(t, testutil.WaitMedium) + ctx = dbauthz.As(ctx, subject) + + _, err := az.UpsertCustomRole(ctx, database.UpsertCustomRoleParams{ + Name: "test-role", + DisplayName: "", + SitePermissions: must(json.Marshal(tc.site)), + OrgPermissions: must(json.Marshal(tc.org)), + UserPermissions: must(json.Marshal(tc.user)), + }) + if tc.errorContains != "" { + require.ErrorContains(t, err, tc.errorContains) + } else { + require.NoError(t, err) + } + }) + } +} diff --git a/coderd/database/dbauthz/dbauthz.go b/coderd/database/dbauthz/dbauthz.go index a096346f57064..92b9637e9ddf9 100644 --- a/coderd/database/dbauthz/dbauthz.go +++ b/coderd/database/dbauthz/dbauthz.go @@ -6,6 +6,7 @@ import ( "encoding/json" "errors" "fmt" + "strings" "sync/atomic" "time" @@ -17,6 +18,7 @@ import ( "cdr.dev/slog" "github.com/coder/coder/v2/coderd/rbac/policy" + "github.com/coder/coder/v2/coderd/rbac/rolestore" "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/database/dbtime" @@ -580,6 +582,7 @@ func (q *querier) authorizeUpdateFileTemplate(ctx context.Context, file database } } +// canAssignRoles handles assigning built in and custom roles. func (q *querier) canAssignRoles(ctx context.Context, orgID *uuid.UUID, added, removed []string) error { actor, ok := ActorFromContext(ctx) if !ok { @@ -594,6 +597,7 @@ func (q *querier) canAssignRoles(ctx context.Context, orgID *uuid.UUID, added, r } grantedRoles := append(added, removed...) + customRoles := make([]string, 0) // Validate that the roles being assigned are valid. for _, r := range grantedRoles { _, isOrgRole := rbac.IsOrgRole(r) @@ -606,7 +610,34 @@ func (q *querier) canAssignRoles(ctx context.Context, orgID *uuid.UUID, added, r // All roles should be valid roles if _, err := rbac.RoleByName(r); err != nil { - return xerrors.Errorf("%q is not a supported role", r) + customRoles = append(customRoles, r) + } + } + + customRolesMap := make(map[string]struct{}, len(customRoles)) + for _, r := range customRoles { + customRolesMap[r] = struct{}{} + } + + if len(customRoles) > 0 { + expandedCustomRoles, err := q.CustomRolesByName(ctx, customRoles) + if err != nil { + return xerrors.Errorf("fetching custom roles: %w", err) + } + + // If the lists are not identical, then have a problem, as some roles + // provided do no exist. + if len(customRoles) != len(expandedCustomRoles) { + for _, role := range customRoles { + // Stop at the first one found. We could make a better error that + // returns them all, but then someone could pass in a large list to make us do + // a lot of loop iterations. + if !slices.ContainsFunc(expandedCustomRoles, func(customRole database.CustomRole) bool { + return strings.EqualFold(customRole.Name, role) + }) { + return xerrors.Errorf("%q is not a supported role", role) + } + } } } @@ -623,6 +654,11 @@ func (q *querier) canAssignRoles(ctx context.Context, orgID *uuid.UUID, added, r } for _, roleName := range grantedRoles { + if _, isCustom := customRolesMap[roleName]; isCustom { + // For now, use a constant name so our static assign map still works. + roleName = rbac.CustomSiteRole() + } + if !rbac.CanAssignRole(actor.Roles, roleName) { return xerrors.Errorf("not authorized to assign role %q", roleName) } @@ -704,6 +740,31 @@ func (q *querier) authorizeTemplateInsights(ctx context.Context, templateIDs []u return nil } +// customRoleEscalationCheck checks to make sure the caller has every permission they are adding +// to a custom role. This prevents permission escalation. +func (q *querier) customRoleEscalationCheck(ctx context.Context, actor rbac.Subject, perm rbac.Permission, object rbac.Object) error { + if perm.Negate { + // Users do not need negative permissions. We can include it later if required. + return xerrors.Errorf("invalid permission for action=%q type=%q, no negative permissions", perm.Action, perm.ResourceType) + } + + if perm.Action == policy.WildcardSymbol || perm.ResourceType == policy.WildcardSymbol { + // It is possible to check for supersets with wildcards, but wildcards can also + // include resources and actions that do not exist today. Custom roles should only be allowed + // to include permissions for existing resources. + return xerrors.Errorf("invalid permission for action=%q type=%q, no wildcard symbols", perm.Action, perm.ResourceType) + } + + object.Type = perm.ResourceType + if err := q.auth.Authorize(ctx, actor, perm.Action, object); err != nil { + // This is a forbidden error, but we can provide more context. Since the user can create a role, just not + // with this perm. + return xerrors.Errorf("invalid permission for action=%q type=%q, not allowed to grant this permission", perm.Action, perm.ResourceType) + } + + return nil +} + func (q *querier) AcquireLock(ctx context.Context, id int64) error { return q.db.AcquireLock(ctx, id) } @@ -773,6 +834,13 @@ func (q *querier) CleanTailnetTunnels(ctx context.Context) error { return q.db.CleanTailnetTunnels(ctx) } +func (q *querier) CustomRolesByName(ctx context.Context, lookupRoles []string) ([]database.CustomRole, error) { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceAssignRole); err != nil { + return nil, err + } + return q.db.CustomRolesByName(ctx, lookupRoles) +} + func (q *querier) DeleteAPIKeyByID(ctx context.Context, id string) error { return deleteQ(q.log, q.auth, q.db.GetAPIKeyByID, q.db.DeleteAPIKeyByID)(ctx, id) } @@ -3291,6 +3359,78 @@ func (q *querier) UpsertApplicationName(ctx context.Context, value string) error return q.db.UpsertApplicationName(ctx, value) } +// UpsertCustomRole does a series of authz checks to protect custom roles. +// - Check custom roles are valid for their resource types + actions +// - Check the actor can create the custom role +// - Check the custom role does not grant perms the actor does not have +// - Prevent negative perms +// - Prevent roles with site and org permissions. +func (q *querier) UpsertCustomRole(ctx context.Context, arg database.UpsertCustomRoleParams) (database.CustomRole, error) { + act, ok := ActorFromContext(ctx) + if !ok { + return database.CustomRole{}, NoActorError + } + + // TODO: If this is an org role, check the org assign role type. + if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceAssignRole); err != nil { + return database.CustomRole{}, err + } + + // There is quite a bit of validation we should do here. First, let's make sure the json data is correct. + rbacRole, err := rolestore.ConvertDBRole(database.CustomRole{ + Name: arg.Name, + DisplayName: arg.DisplayName, + SitePermissions: arg.SitePermissions, + OrgPermissions: arg.OrgPermissions, + UserPermissions: arg.UserPermissions, + }) + if err != nil { + return database.CustomRole{}, xerrors.Errorf("invalid args: %w", err) + } + + err = rbacRole.Valid() + if err != nil { + return database.CustomRole{}, xerrors.Errorf("invalid role: %w", err) + } + + if len(rbacRole.Org) > 0 && len(rbacRole.Site) > 0 { + // This is a choice to keep roles simple. If we allow mixing site and org scoped perms, then knowing who can + // do what gets more complicated. + return database.CustomRole{}, xerrors.Errorf("invalid custom role, cannot assign both org and site permissions at the same time") + } + + if len(rbacRole.Org) > 1 { + // Again to avoid more complexity in our roles + return database.CustomRole{}, xerrors.Errorf("invalid custom role, cannot assign permissions to more than 1 org at a time") + } + + // Prevent escalation + for _, sitePerm := range rbacRole.Site { + err := q.customRoleEscalationCheck(ctx, act, sitePerm, rbac.Object{Type: sitePerm.ResourceType}) + if err != nil { + return database.CustomRole{}, xerrors.Errorf("site permission: %w", err) + } + } + + for orgID, perms := range rbacRole.Org { + for _, orgPerm := range perms { + err := q.customRoleEscalationCheck(ctx, act, orgPerm, rbac.Object{OrgID: orgID, Type: orgPerm.ResourceType}) + if err != nil { + return database.CustomRole{}, xerrors.Errorf("org=%q: %w", orgID, err) + } + } + } + + for _, userPerm := range rbacRole.User { + err := q.customRoleEscalationCheck(ctx, act, userPerm, rbac.Object{Type: userPerm.ResourceType, Owner: act.ID}) + if err != nil { + return database.CustomRole{}, xerrors.Errorf("user permission: %w", err) + } + } + + return q.db.UpsertCustomRole(ctx, arg) +} + func (q *querier) UpsertDefaultProxy(ctx context.Context, arg database.UpsertDefaultProxyParams) error { if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceSystem); err != nil { return err diff --git a/coderd/database/dbauthz/dbauthz_test.go b/coderd/database/dbauthz/dbauthz_test.go index e8dcb2f8ee5bc..7d04a0d20a52e 100644 --- a/coderd/database/dbauthz/dbauthz_test.go +++ b/coderd/database/dbauthz/dbauthz_test.go @@ -1167,6 +1167,67 @@ func (s *MethodTestSuite) TestUser() { b := dbgen.User(s.T(), db, database.User{}) check.Args().Asserts(rbac.ResourceSystem, policy.ActionRead).Returns(slice.New(a.ID, b.ID)) })) + s.Run("CustomRolesByName", s.Subtest(func(db database.Store, check *expects) { + check.Args([]string{}).Asserts(rbac.ResourceAssignRole, policy.ActionRead).Returns([]database.CustomRole{}) + })) + s.Run("Blank/UpsertCustomRole", s.Subtest(func(db database.Store, check *expects) { + // Blank is no perms in the role + check.Args(database.UpsertCustomRoleParams{ + Name: "test", + DisplayName: "Test Name", + SitePermissions: []byte(`[]`), + OrgPermissions: []byte(`{}`), + UserPermissions: []byte(`[]`), + }).Asserts(rbac.ResourceAssignRole, policy.ActionCreate) + })) + s.Run("SitePermissions/UpsertCustomRole", s.Subtest(func(db database.Store, check *expects) { + check.Args(database.UpsertCustomRoleParams{ + Name: "test", + DisplayName: "Test Name", + SitePermissions: must(json.Marshal(rbac.Permissions(map[string][]policy.Action{ + rbac.ResourceTemplate.Type: {policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete, policy.ActionViewInsights}, + }))), + OrgPermissions: []byte(`{}`), + UserPermissions: must(json.Marshal(rbac.Permissions(map[string][]policy.Action{ + rbac.ResourceWorkspace.Type: {policy.ActionRead}, + }))), + }).Asserts( + // First check + rbac.ResourceAssignRole, policy.ActionCreate, + // Escalation checks + rbac.ResourceTemplate, policy.ActionCreate, + rbac.ResourceTemplate, policy.ActionRead, + rbac.ResourceTemplate, policy.ActionUpdate, + rbac.ResourceTemplate, policy.ActionDelete, + rbac.ResourceTemplate, policy.ActionViewInsights, + + rbac.ResourceWorkspace.WithOwner(testActorID.String()), policy.ActionRead, + ) + })) + s.Run("OrgPermissions/UpsertCustomRole", s.Subtest(func(db database.Store, check *expects) { + orgID := uuid.New() + check.Args(database.UpsertCustomRoleParams{ + Name: "test", + DisplayName: "Test Name", + SitePermissions: []byte(`[]`), + OrgPermissions: must(json.Marshal(map[string][]rbac.Permission{ + orgID.String(): rbac.Permissions(map[string][]policy.Action{ + rbac.ResourceTemplate.Type: {policy.ActionCreate, policy.ActionRead}, + }), + })), + UserPermissions: must(json.Marshal(rbac.Permissions(map[string][]policy.Action{ + rbac.ResourceWorkspace.Type: {policy.ActionRead}, + }))), + }).Asserts( + // First check + rbac.ResourceAssignRole, policy.ActionCreate, + // Escalation checks + rbac.ResourceTemplate.InOrg(orgID), policy.ActionCreate, + rbac.ResourceTemplate.InOrg(orgID), policy.ActionRead, + + rbac.ResourceWorkspace.WithOwner(testActorID.String()), policy.ActionRead, + ) + })) } func (s *MethodTestSuite) TestWorkspace() { diff --git a/coderd/database/dbauthz/setup_test.go b/coderd/database/dbauthz/setup_test.go index 16829cdef669e..3385ca3f3240c 100644 --- a/coderd/database/dbauthz/setup_test.go +++ b/coderd/database/dbauthz/setup_test.go @@ -99,6 +99,8 @@ func (s *MethodTestSuite) TearDownSuite() { }) } +var testActorID = uuid.New() + // Subtest is a helper function that returns a function that can be passed to // s.Run(). This function will run the test case for the method that is being // tested. The check parameter is used to assert the results of the method. @@ -120,7 +122,7 @@ func (s *MethodTestSuite) Subtest(testCaseF func(db database.Store, check *expec } az := dbauthz.New(db, rec, slog.Make(), coderdtest.AccessControlStorePointer()) actor := rbac.Subject{ - ID: uuid.NewString(), + ID: testActorID.String(), Roles: rbac.RoleNames{rbac.RoleOwner()}, Groups: []string{}, Scope: rbac.ScopeAll, diff --git a/coderd/database/dbmem/dbmem.go b/coderd/database/dbmem/dbmem.go index d1bbd6df49492..ea896b28641f4 100644 --- a/coderd/database/dbmem/dbmem.go +++ b/coderd/database/dbmem/dbmem.go @@ -75,6 +75,7 @@ func New() database.Store { workspaces: make([]database.Workspace, 0), licenses: make([]database.License, 0), workspaceProxies: make([]database.WorkspaceProxy, 0), + customRoles: make([]database.CustomRole, 0), locks: map[int64]struct{}{}, }, } @@ -179,6 +180,7 @@ type data struct { workspaceResources []database.WorkspaceResource workspaces []database.Workspace workspaceProxies []database.WorkspaceProxy + customRoles []database.CustomRole // Locks is a map of lock names. Any keys within the map are currently // locked. locks map[int64]struct{} @@ -1172,6 +1174,23 @@ func (*FakeQuerier) CleanTailnetTunnels(context.Context) error { return ErrUnimplemented } +func (q *FakeQuerier) CustomRolesByName(_ context.Context, lookupRoles []string) ([]database.CustomRole, error) { + q.mutex.Lock() + defer q.mutex.Unlock() + + found := make([]database.CustomRole, 0) + for _, role := range q.data.customRoles { + if slices.ContainsFunc(lookupRoles, func(s string) bool { + return strings.EqualFold(s, role.Name) + }) { + role := role + found = append(found, role) + } + } + + return found, nil +} + func (q *FakeQuerier) DeleteAPIKeyByID(_ context.Context, id string) error { q.mutex.Lock() defer q.mutex.Unlock() @@ -8258,6 +8277,39 @@ func (q *FakeQuerier) UpsertApplicationName(_ context.Context, data string) erro return nil } +func (q *FakeQuerier) UpsertCustomRole(_ context.Context, arg database.UpsertCustomRoleParams) (database.CustomRole, error) { + err := validateDatabaseType(arg) + if err != nil { + return database.CustomRole{}, err + } + + q.mutex.RLock() + defer q.mutex.RUnlock() + for i := range q.customRoles { + if strings.EqualFold(q.customRoles[i].Name, arg.Name) { + q.customRoles[i].DisplayName = arg.DisplayName + q.customRoles[i].SitePermissions = arg.SitePermissions + q.customRoles[i].OrgPermissions = arg.OrgPermissions + q.customRoles[i].UserPermissions = arg.UserPermissions + q.customRoles[i].UpdatedAt = dbtime.Now() + return q.customRoles[i], nil + } + } + + role := database.CustomRole{ + Name: arg.Name, + DisplayName: arg.DisplayName, + SitePermissions: arg.SitePermissions, + OrgPermissions: arg.OrgPermissions, + UserPermissions: arg.UserPermissions, + CreatedAt: dbtime.Now(), + UpdatedAt: dbtime.Now(), + } + q.customRoles = append(q.customRoles, role) + + return role, nil +} + func (q *FakeQuerier) UpsertDefaultProxy(_ context.Context, arg database.UpsertDefaultProxyParams) error { q.defaultProxyDisplayName = arg.DisplayName q.defaultProxyIconURL = arg.IconUrl diff --git a/coderd/database/dbmetrics/dbmetrics.go b/coderd/database/dbmetrics/dbmetrics.go index 77ebfd6718757..4e0c2b8fed158 100644 --- a/coderd/database/dbmetrics/dbmetrics.go +++ b/coderd/database/dbmetrics/dbmetrics.go @@ -144,6 +144,13 @@ func (m metricsStore) CleanTailnetTunnels(ctx context.Context) error { return r0 } +func (m metricsStore) CustomRolesByName(ctx context.Context, lookupRoles []string) ([]database.CustomRole, error) { + start := time.Now() + r0, r1 := m.s.CustomRolesByName(ctx, lookupRoles) + m.queryLatencies.WithLabelValues("CustomRolesByName").Observe(time.Since(start).Seconds()) + return r0, r1 +} + func (m metricsStore) DeleteAPIKeyByID(ctx context.Context, id string) error { start := time.Now() err := m.s.DeleteAPIKeyByID(ctx, id) @@ -2153,6 +2160,13 @@ func (m metricsStore) UpsertApplicationName(ctx context.Context, value string) e return r0 } +func (m metricsStore) UpsertCustomRole(ctx context.Context, arg database.UpsertCustomRoleParams) (database.CustomRole, error) { + start := time.Now() + r0, r1 := m.s.UpsertCustomRole(ctx, arg) + m.queryLatencies.WithLabelValues("UpsertCustomRole").Observe(time.Since(start).Seconds()) + return r0, r1 +} + func (m metricsStore) UpsertDefaultProxy(ctx context.Context, arg database.UpsertDefaultProxyParams) error { start := time.Now() r0 := m.s.UpsertDefaultProxy(ctx, arg) diff --git a/coderd/database/dbmock/dbmock.go b/coderd/database/dbmock/dbmock.go index e651c8301c933..69558e884c6a6 100644 --- a/coderd/database/dbmock/dbmock.go +++ b/coderd/database/dbmock/dbmock.go @@ -173,6 +173,21 @@ func (mr *MockStoreMockRecorder) CleanTailnetTunnels(arg0 any) *gomock.Call { return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CleanTailnetTunnels", reflect.TypeOf((*MockStore)(nil).CleanTailnetTunnels), arg0) } +// CustomRolesByName mocks base method. +func (m *MockStore) CustomRolesByName(arg0 context.Context, arg1 []string) ([]database.CustomRole, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "CustomRolesByName", arg0, arg1) + ret0, _ := ret[0].([]database.CustomRole) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// CustomRolesByName indicates an expected call of CustomRolesByName. +func (mr *MockStoreMockRecorder) CustomRolesByName(arg0, arg1 any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CustomRolesByName", reflect.TypeOf((*MockStore)(nil).CustomRolesByName), arg0, arg1) +} + // DeleteAPIKeyByID mocks base method. func (m *MockStore) DeleteAPIKeyByID(arg0 context.Context, arg1 string) error { m.ctrl.T.Helper() @@ -4507,6 +4522,21 @@ func (mr *MockStoreMockRecorder) UpsertApplicationName(arg0, arg1 any) *gomock.C return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpsertApplicationName", reflect.TypeOf((*MockStore)(nil).UpsertApplicationName), arg0, arg1) } +// UpsertCustomRole mocks base method. +func (m *MockStore) UpsertCustomRole(arg0 context.Context, arg1 database.UpsertCustomRoleParams) (database.CustomRole, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "UpsertCustomRole", arg0, arg1) + ret0, _ := ret[0].(database.CustomRole) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// UpsertCustomRole indicates an expected call of UpsertCustomRole. +func (mr *MockStoreMockRecorder) UpsertCustomRole(arg0, arg1 any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpsertCustomRole", reflect.TypeOf((*MockStore)(nil).UpsertCustomRole), arg0, arg1) +} + // UpsertDefaultProxy mocks base method. func (m *MockStore) UpsertDefaultProxy(arg0 context.Context, arg1 database.UpsertDefaultProxyParams) error { m.ctrl.T.Helper() diff --git a/coderd/database/dump.sql b/coderd/database/dump.sql index ed400cf82198f..33a9ebbef8139 100644 --- a/coderd/database/dump.sql +++ b/coderd/database/dump.sql @@ -404,6 +404,18 @@ CREATE TABLE audit_logs ( resource_icon text NOT NULL ); +CREATE TABLE custom_roles ( + name text NOT NULL, + display_name text NOT NULL, + site_permissions jsonb DEFAULT '[]'::jsonb NOT NULL, + org_permissions jsonb DEFAULT '{}'::jsonb NOT NULL, + user_permissions jsonb DEFAULT '[]'::jsonb NOT NULL, + created_at timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL, + updated_at timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL +); + +COMMENT ON TABLE custom_roles IS 'Custom roles allow dynamic roles expanded at runtime'; + CREATE TABLE dbcrypt_keys ( number integer NOT NULL, active_key_digest text, @@ -1398,6 +1410,9 @@ ALTER TABLE ONLY api_keys ALTER TABLE ONLY audit_logs ADD CONSTRAINT audit_logs_pkey PRIMARY KEY (id); +ALTER TABLE ONLY custom_roles + ADD CONSTRAINT custom_roles_pkey PRIMARY KEY (name); + ALTER TABLE ONLY dbcrypt_keys ADD CONSTRAINT dbcrypt_keys_active_key_digest_key UNIQUE (active_key_digest); @@ -1606,6 +1621,8 @@ CREATE INDEX idx_audit_log_user_id ON audit_logs USING btree (user_id); CREATE INDEX idx_audit_logs_time_desc ON audit_logs USING btree ("time" DESC); +CREATE UNIQUE INDEX idx_custom_roles_name_lower ON custom_roles USING btree (lower(name)); + CREATE INDEX idx_organization_member_organization_id_uuid ON organization_members USING btree (organization_id); CREATE INDEX idx_organization_member_user_id_uuid ON organization_members USING btree (user_id); diff --git a/coderd/database/migrations/000209_custom_roles.down.sql b/coderd/database/migrations/000209_custom_roles.down.sql new file mode 100644 index 0000000000000..b0f9b2a8cc76c --- /dev/null +++ b/coderd/database/migrations/000209_custom_roles.down.sql @@ -0,0 +1,2 @@ +DROP INDEX IF EXISTS idx_custom_roles_name_lower; +DROP TABLE IF EXISTS custom_roles; diff --git a/coderd/database/migrations/000209_custom_roles.up.sql b/coderd/database/migrations/000209_custom_roles.up.sql new file mode 100644 index 0000000000000..b55788c16b955 --- /dev/null +++ b/coderd/database/migrations/000209_custom_roles.up.sql @@ -0,0 +1,26 @@ +CREATE TABLE custom_roles ( + -- name is globally unique. Org scoped roles have their orgid appended + -- like: "name":"organization-admin:bbe8c156-c61e-4d36-b91e-697c6b1477e8" + name text primary key, + -- display_name is the actual name of the role displayed to the user. + display_name text NOT NULL, + + -- Unfortunately these values are schemaless json documents. + -- If there was a permission table for these, that would involve + -- many necessary joins to accomplish this simple json. + + -- site_permissions is '[]Permission' + site_permissions jsonb NOT NULL default '[]', + -- org_permissions is 'map[][]Permission' + org_permissions jsonb NOT NULL default '{}', + -- user_permissions is '[]Permission' + user_permissions jsonb NOT NULL default '[]', + + -- extra convenience meta data. + created_at timestamp with time zone NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at timestamp with time zone NOT NULL DEFAULT CURRENT_TIMESTAMP +); + +-- Ensure no case variants of the same roles +CREATE UNIQUE INDEX idx_custom_roles_name_lower ON custom_roles USING btree (lower(name)); +COMMENT ON TABLE custom_roles IS 'Custom roles allow dynamic roles expanded at runtime'; diff --git a/coderd/database/migrations/testdata/fixtures/000209_custom_roles.up.sql b/coderd/database/migrations/testdata/fixtures/000209_custom_roles.up.sql new file mode 100644 index 0000000000000..c63e119523624 --- /dev/null +++ b/coderd/database/migrations/testdata/fixtures/000209_custom_roles.up.sql @@ -0,0 +1,20 @@ +INSERT INTO + custom_roles ( + name, + display_name, + site_permissions, + org_permissions, + user_permissions, + created_at, + updated_at +) +VALUES + ( + 'custom-role', + 'Custom Role', + '[{"negate":false,"resource_type":"deployment_config","action":"update"},{"negate":false,"resource_type":"workspace","action":"read"}]', + '{}', + '[{"negate":false,"resource_type":"workspace","action":"read"}]', + date_trunc('hour', NOW()), + date_trunc('hour', NOW()) + '30 minute'::interval + ); diff --git a/coderd/database/models.go b/coderd/database/models.go index 18587b05ade1a..33cf1c607939c 100644 --- a/coderd/database/models.go +++ b/coderd/database/models.go @@ -1781,6 +1781,17 @@ type AuditLog struct { ResourceIcon string `db:"resource_icon" json:"resource_icon"` } +// Custom roles allow dynamic roles expanded at runtime +type CustomRole struct { + Name string `db:"name" json:"name"` + DisplayName string `db:"display_name" json:"display_name"` + SitePermissions json.RawMessage `db:"site_permissions" json:"site_permissions"` + OrgPermissions json.RawMessage `db:"org_permissions" json:"org_permissions"` + UserPermissions json.RawMessage `db:"user_permissions" json:"user_permissions"` + CreatedAt time.Time `db:"created_at" json:"created_at"` + UpdatedAt time.Time `db:"updated_at" json:"updated_at"` +} + // A table used to store the keys used to encrypt the database. type DBCryptKey struct { // An integer used to identify the key. diff --git a/coderd/database/querier.go b/coderd/database/querier.go index 405f86bf47688..01615a58e06bd 100644 --- a/coderd/database/querier.go +++ b/coderd/database/querier.go @@ -48,6 +48,7 @@ type sqlcQuerier interface { CleanTailnetCoordinators(ctx context.Context) error CleanTailnetLostPeers(ctx context.Context) error CleanTailnetTunnels(ctx context.Context) error + CustomRolesByName(ctx context.Context, lookupRoles []string) ([]CustomRole, error) DeleteAPIKeyByID(ctx context.Context, id string) error DeleteAPIKeysByUserID(ctx context.Context, userID uuid.UUID) error DeleteAllTailnetClientSubscriptions(ctx context.Context, arg DeleteAllTailnetClientSubscriptionsParams) error @@ -413,6 +414,7 @@ type sqlcQuerier interface { UpdateWorkspacesDormantDeletingAtByTemplateID(ctx context.Context, arg UpdateWorkspacesDormantDeletingAtByTemplateIDParams) error UpsertAppSecurityKey(ctx context.Context, value string) error UpsertApplicationName(ctx context.Context, value string) error + UpsertCustomRole(ctx context.Context, arg UpsertCustomRoleParams) (CustomRole, error) // The default proxy is implied and not actually stored in the database. // So we need to store it's configuration here for display purposes. // The functional values are immutable and controlled implicitly. diff --git a/coderd/database/queries.sql.go b/coderd/database/queries.sql.go index f4e7d4d70e4b6..7a0b60478f79f 100644 --- a/coderd/database/queries.sql.go +++ b/coderd/database/queries.sql.go @@ -5553,6 +5553,107 @@ func (q *sqlQuerier) UpdateReplica(ctx context.Context, arg UpdateReplicaParams) return i, err } +const customRolesByName = `-- name: CustomRolesByName :many +SELECT + name, display_name, site_permissions, org_permissions, user_permissions, created_at, updated_at +FROM + custom_roles +WHERE + -- Case insensitive + name ILIKE ANY($1 :: text []) +` + +func (q *sqlQuerier) CustomRolesByName(ctx context.Context, lookupRoles []string) ([]CustomRole, error) { + rows, err := q.db.QueryContext(ctx, customRolesByName, pq.Array(lookupRoles)) + if err != nil { + return nil, err + } + defer rows.Close() + var items []CustomRole + for rows.Next() { + var i CustomRole + if err := rows.Scan( + &i.Name, + &i.DisplayName, + &i.SitePermissions, + &i.OrgPermissions, + &i.UserPermissions, + &i.CreatedAt, + &i.UpdatedAt, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const upsertCustomRole = `-- name: UpsertCustomRole :one +INSERT INTO + custom_roles ( + name, + display_name, + site_permissions, + org_permissions, + user_permissions, + created_at, + updated_at +) +VALUES ( + -- Always force lowercase names + lower($1), + $2, + $3, + $4, + $5, + now(), + now() + ) +ON CONFLICT (name) + DO UPDATE SET + display_name = $2, + site_permissions = $3, + org_permissions = $4, + user_permissions = $5, + updated_at = now() +RETURNING name, display_name, site_permissions, org_permissions, user_permissions, created_at, updated_at +` + +type UpsertCustomRoleParams struct { + Name string `db:"name" json:"name"` + DisplayName string `db:"display_name" json:"display_name"` + SitePermissions json.RawMessage `db:"site_permissions" json:"site_permissions"` + OrgPermissions json.RawMessage `db:"org_permissions" json:"org_permissions"` + UserPermissions json.RawMessage `db:"user_permissions" json:"user_permissions"` +} + +func (q *sqlQuerier) UpsertCustomRole(ctx context.Context, arg UpsertCustomRoleParams) (CustomRole, error) { + row := q.db.QueryRowContext(ctx, upsertCustomRole, + arg.Name, + arg.DisplayName, + arg.SitePermissions, + arg.OrgPermissions, + arg.UserPermissions, + ) + var i CustomRole + err := row.Scan( + &i.Name, + &i.DisplayName, + &i.SitePermissions, + &i.OrgPermissions, + &i.UserPermissions, + &i.CreatedAt, + &i.UpdatedAt, + ) + return i, err +} + const getAppSecurityKey = `-- name: GetAppSecurityKey :one SELECT value FROM site_configs WHERE key = 'app_signing_key' ` diff --git a/coderd/database/queries/roles.sql b/coderd/database/queries/roles.sql new file mode 100644 index 0000000000000..30ec437e1814e --- /dev/null +++ b/coderd/database/queries/roles.sql @@ -0,0 +1,41 @@ +-- name: CustomRolesByName :many +SELECT + * +FROM + custom_roles +WHERE + -- Case insensitive + name ILIKE ANY(@lookup_roles :: text []) +; + + +-- name: UpsertCustomRole :one +INSERT INTO + custom_roles ( + name, + display_name, + site_permissions, + org_permissions, + user_permissions, + created_at, + updated_at +) +VALUES ( + -- Always force lowercase names + lower(@name), + @display_name, + @site_permissions, + @org_permissions, + @user_permissions, + now(), + now() + ) +ON CONFLICT (name) + DO UPDATE SET + display_name = @display_name, + site_permissions = @site_permissions, + org_permissions = @org_permissions, + user_permissions = @user_permissions, + updated_at = now() +RETURNING * +; diff --git a/coderd/database/unique_constraint.go b/coderd/database/unique_constraint.go index 9db8af72c8cf6..9dfc8c124aa75 100644 --- a/coderd/database/unique_constraint.go +++ b/coderd/database/unique_constraint.go @@ -9,6 +9,7 @@ const ( UniqueAgentStatsPkey UniqueConstraint = "agent_stats_pkey" // ALTER TABLE ONLY workspace_agent_stats ADD CONSTRAINT agent_stats_pkey PRIMARY KEY (id); UniqueAPIKeysPkey UniqueConstraint = "api_keys_pkey" // ALTER TABLE ONLY api_keys ADD CONSTRAINT api_keys_pkey PRIMARY KEY (id); UniqueAuditLogsPkey UniqueConstraint = "audit_logs_pkey" // ALTER TABLE ONLY audit_logs ADD CONSTRAINT audit_logs_pkey PRIMARY KEY (id); + UniqueCustomRolesPkey UniqueConstraint = "custom_roles_pkey" // ALTER TABLE ONLY custom_roles ADD CONSTRAINT custom_roles_pkey PRIMARY KEY (name); UniqueDbcryptKeysActiveKeyDigestKey UniqueConstraint = "dbcrypt_keys_active_key_digest_key" // ALTER TABLE ONLY dbcrypt_keys ADD CONSTRAINT dbcrypt_keys_active_key_digest_key UNIQUE (active_key_digest); UniqueDbcryptKeysPkey UniqueConstraint = "dbcrypt_keys_pkey" // ALTER TABLE ONLY dbcrypt_keys ADD CONSTRAINT dbcrypt_keys_pkey PRIMARY KEY (number); UniqueDbcryptKeysRevokedKeyDigestKey UniqueConstraint = "dbcrypt_keys_revoked_key_digest_key" // ALTER TABLE ONLY dbcrypt_keys ADD CONSTRAINT dbcrypt_keys_revoked_key_digest_key UNIQUE (revoked_key_digest); @@ -74,6 +75,7 @@ const ( UniqueWorkspaceResourcesPkey UniqueConstraint = "workspace_resources_pkey" // ALTER TABLE ONLY workspace_resources ADD CONSTRAINT workspace_resources_pkey PRIMARY KEY (id); UniqueWorkspacesPkey UniqueConstraint = "workspaces_pkey" // ALTER TABLE ONLY workspaces ADD CONSTRAINT workspaces_pkey PRIMARY KEY (id); UniqueIndexAPIKeyName UniqueConstraint = "idx_api_key_name" // CREATE UNIQUE INDEX idx_api_key_name ON api_keys USING btree (user_id, token_name) WHERE (login_type = 'token'::login_type); + UniqueIndexCustomRolesNameLower UniqueConstraint = "idx_custom_roles_name_lower" // CREATE UNIQUE INDEX idx_custom_roles_name_lower ON custom_roles USING btree (lower(name)); UniqueIndexOrganizationName UniqueConstraint = "idx_organization_name" // CREATE UNIQUE INDEX idx_organization_name ON organizations USING btree (name); UniqueIndexOrganizationNameLower UniqueConstraint = "idx_organization_name_lower" // CREATE UNIQUE INDEX idx_organization_name_lower ON organizations USING btree (lower(name)); UniqueIndexProvisionerDaemonsNameOwnerKey UniqueConstraint = "idx_provisioner_daemons_name_owner_key" // CREATE UNIQUE INDEX idx_provisioner_daemons_name_owner_key ON provisioner_daemons USING btree (name, lower(COALESCE((tags ->> 'owner'::text), ''::text))); diff --git a/coderd/rbac/object_gen.go b/coderd/rbac/object_gen.go index 57ec0982a15ae..9ab848d795b1c 100644 --- a/coderd/rbac/object_gen.go +++ b/coderd/rbac/object_gen.go @@ -37,7 +37,8 @@ var ( // ResourceAssignRole // Valid Actions // - "ActionAssign" :: ability to assign roles - // - "ActionDelete" :: ability to delete roles + // - "ActionCreate" :: ability to create/delete/edit custom roles + // - "ActionDelete" :: ability to unassign roles // - "ActionRead" :: view what roles are assignable ResourceAssignRole = Object{ Type: "assign_role", diff --git a/coderd/rbac/policy/policy.go b/coderd/rbac/policy/policy.go index 26afb0e011ca7..2d3213264a514 100644 --- a/coderd/rbac/policy/policy.go +++ b/coderd/rbac/policy/policy.go @@ -209,7 +209,8 @@ var RBACPermissions = map[string]PermissionDefinition{ Actions: map[Action]ActionDefinition{ ActionAssign: actDef("ability to assign roles"), ActionRead: actDef("view what roles are assignable"), - ActionDelete: actDef("ability to delete roles"), + ActionDelete: actDef("ability to unassign roles"), + ActionCreate: actDef("ability to create/delete/edit custom roles"), }, }, "assign_org_role": { diff --git a/coderd/rbac/roles.go b/coderd/rbac/roles.go index fbac8ddf5379d..7086e2fe0e2a4 100644 --- a/coderd/rbac/roles.go +++ b/coderd/rbac/roles.go @@ -20,6 +20,10 @@ const ( templateAdmin string = "template-admin" userAdmin string = "user-admin" auditor string = "auditor" + // customSiteRole is a placeholder for all custom site roles. + // This is used for what roles can assign other roles. + // TODO: Make this more dynamic to allow other roles to grant. + customSiteRole string = "custom-site-role" orgAdmin string = "organization-admin" orgMember string = "organization-member" @@ -52,6 +56,8 @@ func RoleOwner() string { return roleName(owner, "") } +func CustomSiteRole() string { return roleName(customSiteRole, "") } + func RoleTemplateAdmin() string { return roleName(templateAdmin, "") } @@ -320,22 +326,24 @@ func ReloadBuiltinRoles(opts *RoleOptions) { // map[actor_role][assign_role] var assignRoles = map[string]map[string]bool{ "system": { - owner: true, - auditor: true, - member: true, - orgAdmin: true, - orgMember: true, - templateAdmin: true, - userAdmin: true, + owner: true, + auditor: true, + member: true, + orgAdmin: true, + orgMember: true, + templateAdmin: true, + userAdmin: true, + customSiteRole: true, }, owner: { - owner: true, - auditor: true, - member: true, - orgAdmin: true, - orgMember: true, - templateAdmin: true, - userAdmin: true, + owner: true, + auditor: true, + member: true, + orgAdmin: true, + orgMember: true, + templateAdmin: true, + userAdmin: true, + customSiteRole: true, }, userAdmin: { member: true, diff --git a/coderd/rbac/roles_test.go b/coderd/rbac/roles_test.go index fe589449b8884..d90f045284c5b 100644 --- a/coderd/rbac/roles_test.go +++ b/coderd/rbac/roles_test.go @@ -248,6 +248,15 @@ func TestRolePermissions(t *testing.T) { false: {otherOrgAdmin, otherOrgMember, memberMe, userAdmin}, }, }, + { + Name: "CreateCustomRole", + Actions: []policy.Action{policy.ActionCreate}, + Resource: rbac.ResourceAssignRole, + AuthorizeMap: map[bool][]authSubject{ + true: {owner}, + false: {userAdmin, orgAdmin, orgMemberMe, otherOrgAdmin, otherOrgMember, memberMe, templateAdmin}, + }, + }, { Name: "RoleAssignment", Actions: []policy.Action{policy.ActionAssign, policy.ActionDelete}, @@ -380,7 +389,7 @@ func TestRolePermissions(t *testing.T) { }, // Some admin style resources { - Name: "Licences", + Name: "Licenses", Actions: []policy.Action{policy.ActionCreate, policy.ActionRead, policy.ActionDelete}, Resource: rbac.ResourceLicense, AuthorizeMap: map[bool][]authSubject{ diff --git a/coderd/rbac/rolestore/rolestore.go b/coderd/rbac/rolestore/rolestore.go new file mode 100644 index 0000000000000..5cf69bcd41fde --- /dev/null +++ b/coderd/rbac/rolestore/rolestore.go @@ -0,0 +1,37 @@ +package rolestore + +import ( + "encoding/json" + + "golang.org/x/xerrors" + + "github.com/coder/coder/v2/coderd/database" + "github.com/coder/coder/v2/coderd/rbac" +) + +func ConvertDBRole(dbRole database.CustomRole) (rbac.Role, error) { + role := rbac.Role{ + Name: dbRole.Name, + DisplayName: dbRole.DisplayName, + Site: nil, + Org: nil, + User: nil, + } + + err := json.Unmarshal(dbRole.SitePermissions, &role.Site) + if err != nil { + return role, xerrors.Errorf("unmarshal site permissions: %w", err) + } + + err = json.Unmarshal(dbRole.OrgPermissions, &role.Org) + if err != nil { + return role, xerrors.Errorf("unmarshal org permissions: %w", err) + } + + err = json.Unmarshal(dbRole.UserPermissions, &role.User) + if err != nil { + return role, xerrors.Errorf("unmarshal user permissions: %w", err) + } + + return role, nil +} From 85de0e966d8ae45b3939dd8f9b49f007a8ef51de Mon Sep 17 00:00:00 2001 From: Colin Adler Date: Thu, 16 May 2024 13:42:42 -0500 Subject: [PATCH 068/149] chore: fix `TestMeasureLatency/MeasureLatencyRecvTimeout` flake (#13301) --- Makefile | 3 + coderd/database/pubsub/psmock/doc.go | 4 + coderd/database/pubsub/psmock/psmock.go | 98 +++++++++++++++++++++ coderd/database/pubsub/pubsub_linux_test.go | 20 +++-- 4 files changed, 117 insertions(+), 8 deletions(-) create mode 100644 coderd/database/pubsub/psmock/doc.go create mode 100644 coderd/database/pubsub/psmock/psmock.go diff --git a/Makefile b/Makefile index a12f90db05214..874ae8ee56abc 100644 --- a/Makefile +++ b/Makefile @@ -558,6 +558,9 @@ coderd/database/querier.go: coderd/database/sqlc.yaml coderd/database/dump.sql $ coderd/database/dbmock/dbmock.go: coderd/database/db.go coderd/database/querier.go go generate ./coderd/database/dbmock/ +coderd/database/pubsub/psmock/psmock.go: coderd/database/pubsub/pubsub.go + go generate ./coderd/database/pubsub/psmock + tailnet/tailnettest/coordinatormock.go tailnet/tailnettest/multiagentmock.go tailnet/tailnettest/coordinateemock.go: tailnet/coordinator.go tailnet/multiagent.go go generate ./tailnet/tailnettest/ diff --git a/coderd/database/pubsub/psmock/doc.go b/coderd/database/pubsub/psmock/doc.go new file mode 100644 index 0000000000000..62224ef0bb86e --- /dev/null +++ b/coderd/database/pubsub/psmock/doc.go @@ -0,0 +1,4 @@ +// package psmock contains a mocked implementation of the pubsub.Pubsub interface for use in tests +package psmock + +//go:generate mockgen -destination ./psmock.go -package psmock github.com/coder/coder/v2/coderd/database/pubsub Pubsub diff --git a/coderd/database/pubsub/psmock/psmock.go b/coderd/database/pubsub/psmock/psmock.go new file mode 100644 index 0000000000000..6f5841f758ab0 --- /dev/null +++ b/coderd/database/pubsub/psmock/psmock.go @@ -0,0 +1,98 @@ +// Code generated by MockGen. DO NOT EDIT. +// Source: github.com/coder/coder/v2/coderd/database/pubsub (interfaces: Pubsub) +// +// Generated by this command: +// +// mockgen -destination ./psmock.go -package psmock github.com/coder/coder/v2/coderd/database/pubsub Pubsub +// + +// Package psmock is a generated GoMock package. +package psmock + +import ( + reflect "reflect" + + pubsub "github.com/coder/coder/v2/coderd/database/pubsub" + gomock "go.uber.org/mock/gomock" +) + +// MockPubsub is a mock of Pubsub interface. +type MockPubsub struct { + ctrl *gomock.Controller + recorder *MockPubsubMockRecorder +} + +// MockPubsubMockRecorder is the mock recorder for MockPubsub. +type MockPubsubMockRecorder struct { + mock *MockPubsub +} + +// NewMockPubsub creates a new mock instance. +func NewMockPubsub(ctrl *gomock.Controller) *MockPubsub { + mock := &MockPubsub{ctrl: ctrl} + mock.recorder = &MockPubsubMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockPubsub) EXPECT() *MockPubsubMockRecorder { + return m.recorder +} + +// Close mocks base method. +func (m *MockPubsub) Close() error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Close") + ret0, _ := ret[0].(error) + return ret0 +} + +// Close indicates an expected call of Close. +func (mr *MockPubsubMockRecorder) Close() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Close", reflect.TypeOf((*MockPubsub)(nil).Close)) +} + +// Publish mocks base method. +func (m *MockPubsub) Publish(arg0 string, arg1 []byte) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Publish", arg0, arg1) + ret0, _ := ret[0].(error) + return ret0 +} + +// Publish indicates an expected call of Publish. +func (mr *MockPubsubMockRecorder) Publish(arg0, arg1 any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Publish", reflect.TypeOf((*MockPubsub)(nil).Publish), arg0, arg1) +} + +// Subscribe mocks base method. +func (m *MockPubsub) Subscribe(arg0 string, arg1 pubsub.Listener) (func(), error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Subscribe", arg0, arg1) + ret0, _ := ret[0].(func()) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// Subscribe indicates an expected call of Subscribe. +func (mr *MockPubsubMockRecorder) Subscribe(arg0, arg1 any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Subscribe", reflect.TypeOf((*MockPubsub)(nil).Subscribe), arg0, arg1) +} + +// SubscribeWithErr mocks base method. +func (m *MockPubsub) SubscribeWithErr(arg0 string, arg1 pubsub.ListenerWithErr) (func(), error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "SubscribeWithErr", arg0, arg1) + ret0, _ := ret[0].(func()) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// SubscribeWithErr indicates an expected call of SubscribeWithErr. +func (mr *MockPubsubMockRecorder) SubscribeWithErr(arg0, arg1 any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "SubscribeWithErr", reflect.TypeOf((*MockPubsub)(nil).SubscribeWithErr), arg0, arg1) +} diff --git a/coderd/database/pubsub/pubsub_linux_test.go b/coderd/database/pubsub/pubsub_linux_test.go index efde759096677..203287eb71637 100644 --- a/coderd/database/pubsub/pubsub_linux_test.go +++ b/coderd/database/pubsub/pubsub_linux_test.go @@ -14,14 +14,15 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "go.uber.org/mock/gomock" "golang.org/x/xerrors" - "cdr.dev/slog/sloggers/sloghuman" - "cdr.dev/slog" + "cdr.dev/slog/sloggers/sloghuman" "cdr.dev/slog/sloggers/slogtest" "github.com/coder/coder/v2/coderd/database/dbtestutil" "github.com/coder/coder/v2/coderd/database/pubsub" + "github.com/coder/coder/v2/coderd/database/pubsub/psmock" "github.com/coder/coder/v2/testutil" ) @@ -339,15 +340,18 @@ func TestMeasureLatency(t *testing.T) { t.Parallel() logger := slogtest.Make(t, nil).Leveled(slog.LevelDebug) - ps, done := newPubsub() - defer done() + ctrl := gomock.NewController(t) + ps := psmock.NewMockPubsub(ctrl) - ctx, cancel := context.WithDeadline(context.Background(), time.Now().Add(-time.Hour)) - defer cancel() + ps.EXPECT().Subscribe(gomock.Any(), gomock.Any()).Return(func() {}, (error)(nil)) + ps.EXPECT().Publish(gomock.Any(), gomock.Any()).Return((error)(nil)) + + ctx, cancel := context.WithCancel(context.Background()) + cancel() send, recv, err := pubsub.NewLatencyMeasurer(logger).Measure(ctx, ps) - require.ErrorContains(t, err, context.DeadlineExceeded.Error()) - require.Greater(t, send.Seconds(), 0.0) + require.ErrorContains(t, err, context.Canceled.Error()) + require.Greater(t, send.Nanoseconds(), int64(0)) require.EqualValues(t, recv, time.Duration(-1)) }) From ad8c314130fd2d227118991a488bb9356d66c54c Mon Sep 17 00:00:00 2001 From: Steven Masley Date: Thu, 16 May 2024 13:47:47 -0500 Subject: [PATCH 069/149] chore: implement api for creating custom roles (#13298) api endpoint (gated by experiment) to create custom_roles --- coderd/apidoc/docs.go | 87 ++++++++- coderd/apidoc/swagger.json | 87 ++++++++- coderd/audit.go | 4 +- coderd/coderd.go | 4 +- coderd/database/db2sdk/db2sdk.go | 77 +++++++- coderd/database/dbauthz/dbauthz.go | 5 +- coderd/httpmw/apikey.go | 13 +- coderd/members.go | 6 +- coderd/rbac/rolestore/rolestore.go | 109 +++++++++++ coderd/roles.go | 8 +- coderd/roles_test.go | 6 +- coderd/users.go | 23 +-- coderd/users_test.go | 2 +- codersdk/deployment.go | 4 + codersdk/organizations.go | 10 +- codersdk/roles.go | 54 +++++- codersdk/users.go | 2 +- docs/api/members.md | 124 +++++++++++++ docs/api/schemas.md | 151 +++++++++++----- enterprise/coderd/coderd.go | 17 ++ enterprise/coderd/roles.go | 80 +++++++++ enterprise/coderd/roles_test.go | 170 ++++++++++++++++++ enterprise/coderd/userauth.go | 3 +- enterprise/coderd/users.go | 25 +++ site/src/api/api.ts | 2 +- site/src/api/typesGenerated.ts | 26 ++- site/src/pages/UsersPage/UsersPage.test.tsx | 4 +- site/src/pages/UsersPage/UsersPageView.tsx | 2 +- .../UsersPage/UsersTable/EditRolesButton.tsx | 6 +- .../UsersPage/UsersTable/UserRoleCell.tsx | 14 +- .../pages/UsersPage/UsersTable/UsersTable.tsx | 2 +- .../UsersPage/UsersTable/UsersTableBody.tsx | 2 +- site/src/testHelpers/entities.ts | 12 +- 33 files changed, 1009 insertions(+), 132 deletions(-) create mode 100644 enterprise/coderd/roles.go create mode 100644 enterprise/coderd/roles_test.go diff --git a/coderd/apidoc/docs.go b/coderd/apidoc/docs.go index 8e7fad2c05a49..5883fdb2f47c8 100644 --- a/coderd/apidoc/docs.go +++ b/coderd/apidoc/docs.go @@ -4286,6 +4286,32 @@ const docTemplate = `{ } } } + }, + "patch": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "produces": [ + "application/json" + ], + "tags": [ + "Members" + ], + "summary": "Upsert a custom site-wide role", + "operationId": "upsert-a-custom-site-wide-role", + "responses": { + "200": { + "description": "OK", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/codersdk.Role" + } + } + } + } } }, "/users/{user}": { @@ -9547,17 +9573,20 @@ const docTemplate = `{ "enum": [ "example", "auto-fill-parameters", - "multi-organization" + "multi-organization", + "custom-roles" ], "x-enum-comments": { "ExperimentAutoFillParameters": "This should not be taken out of experiments until we have redesigned the feature.", + "ExperimentCustomRoles": "Allows creating runtime custom roles", "ExperimentExample": "This isn't used for anything.", "ExperimentMultiOrganization": "Requires organization context for interactions, default org is assumed." }, "x-enum-varnames": [ "ExperimentExample", "ExperimentAutoFillParameters", - "ExperimentMultiOrganization" + "ExperimentMultiOrganization", + "ExperimentCustomRoles" ] }, "codersdk.ExternalAuth": { @@ -10372,7 +10401,7 @@ const docTemplate = `{ "roles": { "type": "array", "items": { - "$ref": "#/definitions/codersdk.Role" + "$ref": "#/definitions/codersdk.SlimRole" } }, "updated_at": { @@ -10452,6 +10481,21 @@ const docTemplate = `{ } } }, + "codersdk.Permission": { + "type": "object", + "properties": { + "action": { + "$ref": "#/definitions/codersdk.RBACAction" + }, + "negate": { + "description": "Negate makes this a negative permission", + "type": "boolean" + }, + "resource_type": { + "$ref": "#/definitions/codersdk.RBACResource" + } + } + }, "codersdk.PostOAuth2ProviderAppRequest": { "type": "object", "required": [ @@ -11094,6 +11138,28 @@ const docTemplate = `{ }, "name": { "type": "string" + }, + "organization_permissions": { + "description": "map[\u003corg_id\u003e] -\u003e Permissions", + "type": "object", + "additionalProperties": { + "type": "array", + "items": { + "$ref": "#/definitions/codersdk.Permission" + } + } + }, + "site_permissions": { + "type": "array", + "items": { + "$ref": "#/definitions/codersdk.Permission" + } + }, + "user_permissions": { + "type": "array", + "items": { + "$ref": "#/definitions/codersdk.Permission" + } } } }, @@ -11160,6 +11226,17 @@ const docTemplate = `{ } } }, + "codersdk.SlimRole": { + "type": "object", + "properties": { + "display_name": { + "type": "string" + }, + "name": { + "type": "string" + } + } + }, "codersdk.SupportConfig": { "type": "object", "properties": { @@ -11677,7 +11754,7 @@ const docTemplate = `{ "roles": { "type": "array", "items": { - "$ref": "#/definitions/codersdk.Role" + "$ref": "#/definitions/codersdk.SlimRole" } }, "status": { @@ -12214,7 +12291,7 @@ const docTemplate = `{ "roles": { "type": "array", "items": { - "$ref": "#/definitions/codersdk.Role" + "$ref": "#/definitions/codersdk.SlimRole" } }, "status": { diff --git a/coderd/apidoc/swagger.json b/coderd/apidoc/swagger.json index 582ccc74f22c3..d6684e7cf6c18 100644 --- a/coderd/apidoc/swagger.json +++ b/coderd/apidoc/swagger.json @@ -3775,6 +3775,28 @@ } } } + }, + "patch": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "produces": ["application/json"], + "tags": ["Members"], + "summary": "Upsert a custom site-wide role", + "operationId": "upsert-a-custom-site-wide-role", + "responses": { + "200": { + "description": "OK", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/codersdk.Role" + } + } + } + } } }, "/users/{user}": { @@ -8545,16 +8567,23 @@ }, "codersdk.Experiment": { "type": "string", - "enum": ["example", "auto-fill-parameters", "multi-organization"], + "enum": [ + "example", + "auto-fill-parameters", + "multi-organization", + "custom-roles" + ], "x-enum-comments": { "ExperimentAutoFillParameters": "This should not be taken out of experiments until we have redesigned the feature.", + "ExperimentCustomRoles": "Allows creating runtime custom roles", "ExperimentExample": "This isn't used for anything.", "ExperimentMultiOrganization": "Requires organization context for interactions, default org is assumed." }, "x-enum-varnames": [ "ExperimentExample", "ExperimentAutoFillParameters", - "ExperimentMultiOrganization" + "ExperimentMultiOrganization", + "ExperimentCustomRoles" ] }, "codersdk.ExternalAuth": { @@ -9316,7 +9345,7 @@ "roles": { "type": "array", "items": { - "$ref": "#/definitions/codersdk.Role" + "$ref": "#/definitions/codersdk.SlimRole" } }, "updated_at": { @@ -9391,6 +9420,21 @@ } } }, + "codersdk.Permission": { + "type": "object", + "properties": { + "action": { + "$ref": "#/definitions/codersdk.RBACAction" + }, + "negate": { + "description": "Negate makes this a negative permission", + "type": "boolean" + }, + "resource_type": { + "$ref": "#/definitions/codersdk.RBACResource" + } + } + }, "codersdk.PostOAuth2ProviderAppRequest": { "type": "object", "required": ["callback_url", "name"], @@ -9996,6 +10040,28 @@ }, "name": { "type": "string" + }, + "organization_permissions": { + "description": "map[\u003corg_id\u003e] -\u003e Permissions", + "type": "object", + "additionalProperties": { + "type": "array", + "items": { + "$ref": "#/definitions/codersdk.Permission" + } + } + }, + "site_permissions": { + "type": "array", + "items": { + "$ref": "#/definitions/codersdk.Permission" + } + }, + "user_permissions": { + "type": "array", + "items": { + "$ref": "#/definitions/codersdk.Permission" + } } } }, @@ -10062,6 +10128,17 @@ } } }, + "codersdk.SlimRole": { + "type": "object", + "properties": { + "display_name": { + "type": "string" + }, + "name": { + "type": "string" + } + } + }, "codersdk.SupportConfig": { "type": "object", "properties": { @@ -10559,7 +10636,7 @@ "roles": { "type": "array", "items": { - "$ref": "#/definitions/codersdk.Role" + "$ref": "#/definitions/codersdk.SlimRole" } }, "status": { @@ -11053,7 +11130,7 @@ "roles": { "type": "array", "items": { - "$ref": "#/definitions/codersdk.Role" + "$ref": "#/definitions/codersdk.SlimRole" } }, "status": { diff --git a/coderd/audit.go b/coderd/audit.go index 782c977afcf1c..315913dff49c2 100644 --- a/coderd/audit.go +++ b/coderd/audit.go @@ -196,12 +196,12 @@ func (api *API) convertAuditLog(ctx context.Context, dblog database.GetAuditLogs CreatedAt: dblog.UserCreatedAt.Time, Status: codersdk.UserStatus(dblog.UserStatus.UserStatus), }, - Roles: []codersdk.Role{}, + Roles: []codersdk.SlimRole{}, } for _, roleName := range dblog.UserRoles { rbacRole, _ := rbac.RoleByName(roleName) - user.Roles = append(user.Roles, db2sdk.Role(rbacRole)) + user.Roles = append(user.Roles, db2sdk.SlimRole(rbacRole)) } } diff --git a/coderd/coderd.go b/coderd/coderd.go index c0631c0752c0c..80f77d92ee672 100644 --- a/coderd/coderd.go +++ b/coderd/coderd.go @@ -61,6 +61,7 @@ import ( "github.com/coder/coder/v2/coderd/provisionerdserver" "github.com/coder/coder/v2/coderd/rbac" "github.com/coder/coder/v2/coderd/rbac/policy" + "github.com/coder/coder/v2/coderd/rbac/rolestore" "github.com/coder/coder/v2/coderd/schedule" "github.com/coder/coder/v2/coderd/telemetry" "github.com/coder/coder/v2/coderd/tracing" @@ -631,6 +632,7 @@ func New(options *Options) *API { httpmw.AttachRequestID, httpmw.ExtractRealIP(api.RealIPConfig), httpmw.Logger(api.Logger), + rolestore.CustomRoleMW, prometheusMW, // Build-Version is helpful for debugging. func(next http.Handler) http.Handler { @@ -915,7 +917,7 @@ func New(options *Options) *API { r.Post("/logout", api.postLogout) // These routes query information about site wide roles. r.Route("/roles", func(r chi.Router) { - r.Get("/", api.assignableSiteRoles) + r.Get("/", api.AssignableSiteRoles) }) r.Route("/{user}", func(r chi.Router) { r.Use(httpmw.ExtractUserParam(options.Database)) diff --git a/coderd/database/db2sdk/db2sdk.go b/coderd/database/db2sdk/db2sdk.go index 14a24e66316d0..ab6f3aa82b3f6 100644 --- a/coderd/database/db2sdk/db2sdk.go +++ b/coderd/database/db2sdk/db2sdk.go @@ -18,6 +18,7 @@ import ( "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/parameter" "github.com/coder/coder/v2/coderd/rbac" + "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/coderd/workspaceapps/appurl" "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/provisionersdk/proto" @@ -28,9 +29,25 @@ import ( // database types to slices of codersdk types. // Only works if the function takes a single argument. func List[F any, T any](list []F, convert func(F) T) []T { - into := make([]T, 0, len(list)) - for _, item := range list { - into = append(into, convert(item)) + return ListLazy(convert)(list) +} + +// ListLazy returns the converter function for a list, but does not eval +// the input. Helpful for combining the Map and the List functions. +func ListLazy[F any, T any](convert func(F) T) func(list []F) []T { + return func(list []F) []T { + into := make([]T, 0, len(list)) + for _, item := range list { + into = append(into, convert(item)) + } + return into + } +} + +func Map[K comparable, F any, T any](params map[K]F, convert func(F) T) map[K]T { + into := make(map[K]T) + for k, item := range params { + into[k] = convert(item) } return into } @@ -150,12 +167,20 @@ func User(user database.User, organizationIDs []uuid.UUID) codersdk.User { convertedUser := codersdk.User{ ReducedUser: ReducedUser(user), OrganizationIDs: organizationIDs, - Roles: make([]codersdk.Role, 0, len(user.RBACRoles)), + Roles: make([]codersdk.SlimRole, 0, len(user.RBACRoles)), } for _, roleName := range user.RBACRoles { - rbacRole, _ := rbac.RoleByName(roleName) - convertedUser.Roles = append(convertedUser.Roles, Role(rbacRole)) + rbacRole, err := rbac.RoleByName(roleName) + if err == nil { + convertedUser.Roles = append(convertedUser.Roles, SlimRole(rbacRole)) + } else { + // TODO: Fix this for custom roles to display the actual display_name + // Requires plumbing either a cached role value, or the db. + convertedUser.Roles = append(convertedUser.Roles, codersdk.SlimRole{ + Name: roleName, + }) + } } return convertedUser @@ -180,8 +205,8 @@ func Group(group database.Group, members []database.User) codersdk.Group { } } -func Role(role rbac.Role) codersdk.Role { - return codersdk.Role{ +func SlimRole(role rbac.Role) codersdk.SlimRole { + return codersdk.SlimRole{ DisplayName: role.DisplayName, Name: role.Name, } @@ -500,3 +525,39 @@ func ProvisionerDaemon(dbDaemon database.ProvisionerDaemon) codersdk.Provisioner } return result } + +func Role(role rbac.Role) codersdk.Role { + return codersdk.Role{ + Name: role.Name, + DisplayName: role.DisplayName, + SitePermissions: List(role.Site, Permission), + OrganizationPermissions: Map(role.Org, ListLazy(Permission)), + UserPermissions: List(role.Site, Permission), + } +} + +func Permission(permission rbac.Permission) codersdk.Permission { + return codersdk.Permission{ + Negate: permission.Negate, + ResourceType: codersdk.RBACResource(permission.ResourceType), + Action: codersdk.RBACAction(permission.Action), + } +} + +func RoleToRBAC(role codersdk.Role) rbac.Role { + return rbac.Role{ + Name: role.Name, + DisplayName: role.DisplayName, + Site: List(role.SitePermissions, PermissionToRBAC), + Org: Map(role.OrganizationPermissions, ListLazy(PermissionToRBAC)), + User: List(role.UserPermissions, PermissionToRBAC), + } +} + +func PermissionToRBAC(permission codersdk.Permission) rbac.Permission { + return rbac.Permission{ + Negate: permission.Negate, + ResourceType: string(permission.ResourceType), + Action: policy.Action(permission.Action), + } +} diff --git a/coderd/database/dbauthz/dbauthz.go b/coderd/database/dbauthz/dbauthz.go index 92b9637e9ddf9..fe49de61e7e84 100644 --- a/coderd/database/dbauthz/dbauthz.go +++ b/coderd/database/dbauthz/dbauthz.go @@ -620,7 +620,8 @@ func (q *querier) canAssignRoles(ctx context.Context, orgID *uuid.UUID, added, r } if len(customRoles) > 0 { - expandedCustomRoles, err := q.CustomRolesByName(ctx, customRoles) + // Leverage any custom role cache that might exist. + expandedCustomRoles, err := rolestore.Expand(ctx, q.db, customRoles) if err != nil { return xerrors.Errorf("fetching custom roles: %w", err) } @@ -632,7 +633,7 @@ func (q *querier) canAssignRoles(ctx context.Context, orgID *uuid.UUID, added, r // Stop at the first one found. We could make a better error that // returns them all, but then someone could pass in a large list to make us do // a lot of loop iterations. - if !slices.ContainsFunc(expandedCustomRoles, func(customRole database.CustomRole) bool { + if !slices.ContainsFunc(expandedCustomRoles, func(customRole rbac.Role) bool { return strings.EqualFold(customRole.Name, role) }) { return xerrors.Errorf("%q is not a supported role", role) diff --git a/coderd/httpmw/apikey.go b/coderd/httpmw/apikey.go index 0bd064bf8e28a..5bb45424b57f9 100644 --- a/coderd/httpmw/apikey.go +++ b/coderd/httpmw/apikey.go @@ -24,6 +24,7 @@ import ( "github.com/coder/coder/v2/coderd/httpapi" "github.com/coder/coder/v2/coderd/promoauth" "github.com/coder/coder/v2/coderd/rbac" + "github.com/coder/coder/v2/coderd/rbac/rolestore" "github.com/coder/coder/v2/codersdk" ) @@ -437,11 +438,21 @@ func ExtractAPIKey(rw http.ResponseWriter, r *http.Request, cfg ExtractAPIKeyCon }) } + //nolint:gocritic // Permission to lookup custom roles the user has assigned. + rbacRoles, err := rolestore.Expand(dbauthz.AsSystemRestricted(ctx), cfg.DB, roles.Roles) + if err != nil { + return write(http.StatusInternalServerError, codersdk.Response{ + Message: "Failed to expand authenticated user roles", + Detail: err.Error(), + Validations: nil, + }) + } + // Actor is the user's authorization context. actor := rbac.Subject{ FriendlyName: roles.Username, ID: key.UserID.String(), - Roles: rbac.RoleNames(roles.Roles), + Roles: rbacRoles, Groups: roles.Groups, Scope: rbac.ScopeName(key.Scope), }.WithCachedASTValue() diff --git a/coderd/members.go b/coderd/members.go index 038851870cf8d..6a3fe3b2bcb09 100644 --- a/coderd/members.go +++ b/coderd/members.go @@ -82,7 +82,7 @@ func (api *API) updateOrganizationMemberRoles(ctx context.Context, args database } if _, err := rbac.RoleByName(r); err != nil { - return database.OrganizationMember{}, xerrors.Errorf("%q is not a supported role", r) + return database.OrganizationMember{}, xerrors.Errorf("%q is not a supported organization role", r) } } @@ -99,12 +99,12 @@ func convertOrganizationMember(mem database.OrganizationMember) codersdk.Organiz OrganizationID: mem.OrganizationID, CreatedAt: mem.CreatedAt, UpdatedAt: mem.UpdatedAt, - Roles: make([]codersdk.Role, 0, len(mem.Roles)), + Roles: make([]codersdk.SlimRole, 0, len(mem.Roles)), } for _, roleName := range mem.Roles { rbacRole, _ := rbac.RoleByName(roleName) - convertedMember.Roles = append(convertedMember.Roles, db2sdk.Role(rbacRole)) + convertedMember.Roles = append(convertedMember.Roles, db2sdk.SlimRole(rbacRole)) } return convertedMember } diff --git a/coderd/rbac/rolestore/rolestore.go b/coderd/rbac/rolestore/rolestore.go index 5cf69bcd41fde..0ed8b2f12fcdb 100644 --- a/coderd/rbac/rolestore/rolestore.go +++ b/coderd/rbac/rolestore/rolestore.go @@ -1,14 +1,96 @@ package rolestore import ( + "context" "encoding/json" + "net/http" "golang.org/x/xerrors" "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/rbac" + "github.com/coder/coder/v2/coderd/util/syncmap" ) +type customRoleCtxKey struct{} + +// CustomRoleMW adds a custom role cache on the ctx to prevent duplicate +// db fetches. +func CustomRoleMW(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + r = r.WithContext(CustomRoleCacheContext(r.Context())) + next.ServeHTTP(w, r) + }) +} + +// CustomRoleCacheContext prevents needing to lookup custom roles within the +// same request lifecycle. Optimizing this to span requests should be done +// in the future. +func CustomRoleCacheContext(ctx context.Context) context.Context { + return context.WithValue(ctx, customRoleCtxKey{}, syncmap.New[string, rbac.Role]()) +} + +func roleCache(ctx context.Context) *syncmap.Map[string, rbac.Role] { + c, ok := ctx.Value(customRoleCtxKey{}).(*syncmap.Map[string, rbac.Role]) + if !ok { + return syncmap.New[string, rbac.Role]() + } + return c +} + +// Expand will expand built in roles, and fetch custom roles from the database. +func Expand(ctx context.Context, db database.Store, names []string) (rbac.Roles, error) { + if len(names) == 0 { + // That was easy + return []rbac.Role{}, nil + } + + cache := roleCache(ctx) + lookup := make([]string, 0) + roles := make([]rbac.Role, 0, len(names)) + + for _, name := range names { + // Remove any built in roles + expanded, err := rbac.RoleByName(name) + if err == nil { + roles = append(roles, expanded) + continue + } + + // Check custom role cache + customRole, ok := cache.Load(name) + if ok { + roles = append(roles, customRole) + continue + } + + // Defer custom role lookup + lookup = append(lookup, name) + } + + if len(lookup) > 0 { + // If some roles are missing from the database, they are omitted from + // the expansion. These roles are no-ops. Should we raise some kind of + // warning when this happens? + dbroles, err := db.CustomRolesByName(ctx, lookup) + if err != nil { + return nil, xerrors.Errorf("fetch custom roles: %w", err) + } + + // convert dbroles -> roles + for _, dbrole := range dbroles { + converted, err := ConvertDBRole(dbrole) + if err != nil { + return nil, xerrors.Errorf("convert db role %q: %w", dbrole, err) + } + roles = append(roles, converted) + cache.Store(dbrole.Name, converted) + } + } + + return roles, nil +} + func ConvertDBRole(dbRole database.CustomRole) (rbac.Role, error) { role := rbac.Role{ Name: dbRole.Name, @@ -35,3 +117,30 @@ func ConvertDBRole(dbRole database.CustomRole) (rbac.Role, error) { return role, nil } + +func ConvertRoleToDB(role rbac.Role) (database.CustomRole, error) { + dbRole := database.CustomRole{ + Name: role.Name, + DisplayName: role.DisplayName, + } + + siteData, err := json.Marshal(role.Site) + if err != nil { + return dbRole, xerrors.Errorf("marshal site permissions: %w", err) + } + dbRole.SitePermissions = siteData + + orgData, err := json.Marshal(role.Org) + if err != nil { + return dbRole, xerrors.Errorf("marshal org permissions: %w", err) + } + dbRole.OrgPermissions = orgData + + userData, err := json.Marshal(role.User) + if err != nil { + return dbRole, xerrors.Errorf("marshal user permissions: %w", err) + } + dbRole.UserPermissions = userData + + return dbRole, nil +} diff --git a/coderd/roles.go b/coderd/roles.go index 5665e298f0e5d..f90f0e474dddf 100644 --- a/coderd/roles.go +++ b/coderd/roles.go @@ -11,7 +11,7 @@ import ( "github.com/coder/coder/v2/coderd/rbac" ) -// assignableSiteRoles returns all site wide roles that can be assigned. +// AssignableSiteRoles returns all site wide roles that can be assigned. // // @Summary Get site member roles // @ID get-site-member-roles @@ -20,7 +20,7 @@ import ( // @Tags Members // @Success 200 {array} codersdk.AssignableRoles // @Router /users/roles [get] -func (api *API) assignableSiteRoles(rw http.ResponseWriter, r *http.Request) { +func (api *API) AssignableSiteRoles(rw http.ResponseWriter, r *http.Request) { ctx := r.Context() actorRoles := httpmw.UserAuthorization(r) if !api.Authorize(r, policy.ActionRead, rbac.ResourceAssignRole) { @@ -32,7 +32,7 @@ func (api *API) assignableSiteRoles(rw http.ResponseWriter, r *http.Request) { httpapi.Write(ctx, rw, http.StatusOK, assignableRoles(actorRoles.Roles, roles)) } -// assignableSiteRoles returns all org wide roles that can be assigned. +// assignableOrgRoles returns all org wide roles that can be assigned. // // @Summary Get member roles by organization // @ID get-member-roles-by-organization @@ -66,7 +66,7 @@ func assignableRoles(actorRoles rbac.ExpandableRoles, roles []rbac.Role) []coder continue } assignable = append(assignable, codersdk.AssignableRoles{ - Role: codersdk.Role{ + SlimRole: codersdk.SlimRole{ Name: role.Name, DisplayName: role.DisplayName, }, diff --git a/coderd/roles_test.go b/coderd/roles_test.go index c50f24eb467a0..6754ddc17c9c2 100644 --- a/coderd/roles_test.go +++ b/coderd/roles_test.go @@ -143,9 +143,9 @@ func TestListRoles(t *testing.T) { } } -func convertRole(roleName string) codersdk.Role { +func convertRole(roleName string) codersdk.SlimRole { role, _ := rbac.RoleByName(roleName) - return codersdk.Role{ + return codersdk.SlimRole{ DisplayName: role.DisplayName, Name: role.Name, } @@ -156,7 +156,7 @@ func convertRoles(assignableRoles map[string]bool) []codersdk.AssignableRoles { for roleName, assignable := range assignableRoles { role := convertRole(roleName) converted = append(converted, codersdk.AssignableRoles{ - Role: role, + SlimRole: role, Assignable: assignable, }) } diff --git a/coderd/users.go b/coderd/users.go index c8ca04e390c7f..8db74cadadc9b 100644 --- a/coderd/users.go +++ b/coderd/users.go @@ -1095,7 +1095,7 @@ func (api *API) putUserRoles(rw http.ResponseWriter, r *http.Request) { return } - updatedUser, err := UpdateSiteUserRoles(ctx, api.Database, database.UpdateUserRolesParams{ + updatedUser, err := api.Database.UpdateUserRoles(ctx, database.UpdateUserRolesParams{ GrantedRoles: params.Roles, ID: user.ID, }) @@ -1123,27 +1123,6 @@ func (api *API) putUserRoles(rw http.ResponseWriter, r *http.Request) { httpapi.Write(ctx, rw, http.StatusOK, db2sdk.User(updatedUser, organizationIDs)) } -// UpdateSiteUserRoles will ensure only site wide roles are passed in as arguments. -// If an organization role is included, an error is returned. -func UpdateSiteUserRoles(ctx context.Context, db database.Store, args database.UpdateUserRolesParams) (database.User, error) { - // Enforce only site wide roles. - for _, r := range args.GrantedRoles { - if _, ok := rbac.IsOrgRole(r); ok { - return database.User{}, xerrors.Errorf("Must only update site wide roles") - } - - if _, err := rbac.RoleByName(r); err != nil { - return database.User{}, xerrors.Errorf("%q is not a supported role", r) - } - } - - updatedUser, err := db.UpdateUserRoles(ctx, args) - if err != nil { - return database.User{}, xerrors.Errorf("update site roles: %w", err) - } - return updatedUser, nil -} - // Returns organizations the parameterized user has access to. // // @Summary Get organizations by user diff --git a/coderd/users_test.go b/coderd/users_test.go index 588a2e107566b..01cac4d1c8251 100644 --- a/coderd/users_test.go +++ b/coderd/users_test.go @@ -1049,7 +1049,7 @@ func TestGrantSiteRoles(t *testing.T) { c.AssignToUser = newUser.ID.String() } - var newRoles []codersdk.Role + var newRoles []codersdk.SlimRole if c.OrgID != uuid.Nil { // Org assign var mem codersdk.OrganizationMember diff --git a/codersdk/deployment.go b/codersdk/deployment.go index 8f8499e51f13b..dd52cae77d1b4 100644 --- a/codersdk/deployment.go +++ b/codersdk/deployment.go @@ -54,6 +54,7 @@ const ( FeatureWorkspaceBatchActions FeatureName = "workspace_batch_actions" FeatureAccessControl FeatureName = "access_control" FeatureControlSharedPorts FeatureName = "control_shared_ports" + FeatureCustomRoles FeatureName = "custom_roles" ) // FeatureNames must be kept in-sync with the Feature enum above. @@ -74,6 +75,7 @@ var FeatureNames = []FeatureName{ FeatureWorkspaceBatchActions, FeatureAccessControl, FeatureControlSharedPorts, + FeatureCustomRoles, } // Humanize returns the feature name in a human-readable format. @@ -98,6 +100,7 @@ func (n FeatureName) AlwaysEnable() bool { FeatureAppearance: true, FeatureWorkspaceBatchActions: true, FeatureHighAvailability: true, + FeatureCustomRoles: true, }[n] } @@ -2218,6 +2221,7 @@ const ( ExperimentExample Experiment = "example" // This isn't used for anything. ExperimentAutoFillParameters Experiment = "auto-fill-parameters" // This should not be taken out of experiments until we have redesigned the feature. ExperimentMultiOrganization Experiment = "multi-organization" // Requires organization context for interactions, default org is assumed. + ExperimentCustomRoles Experiment = "custom-roles" // Allows creating runtime custom roles ) // ExperimentsAll should include all experiments that are safe for diff --git a/codersdk/organizations.go b/codersdk/organizations.go index 441f4774f2441..4c9cf81c497d3 100644 --- a/codersdk/organizations.go +++ b/codersdk/organizations.go @@ -48,11 +48,11 @@ type Organization struct { } type OrganizationMember struct { - UserID uuid.UUID `db:"user_id" json:"user_id" format:"uuid"` - OrganizationID uuid.UUID `db:"organization_id" json:"organization_id" format:"uuid"` - CreatedAt time.Time `db:"created_at" json:"created_at" format:"date-time"` - UpdatedAt time.Time `db:"updated_at" json:"updated_at" format:"date-time"` - Roles []Role `db:"roles" json:"roles"` + UserID uuid.UUID `db:"user_id" json:"user_id" format:"uuid"` + OrganizationID uuid.UUID `db:"organization_id" json:"organization_id" format:"uuid"` + CreatedAt time.Time `db:"created_at" json:"created_at" format:"date-time"` + UpdatedAt time.Time `db:"updated_at" json:"updated_at" format:"date-time"` + Roles []SlimRole `db:"roles" json:"roles"` } // CreateTemplateVersionRequest enables callers to create a new Template Version. diff --git a/codersdk/roles.go b/codersdk/roles.go index 5ed9a92539654..90112f7c6ef30 100644 --- a/codersdk/roles.go +++ b/codersdk/roles.go @@ -9,16 +9,52 @@ import ( "github.com/google/uuid" ) -type Role struct { +// SlimRole omits permission information from a role. +// At present, this is because our apis do not return permission information, +// and it would require extra db calls to fetch this information. The UI does +// not need it, so most api calls will use this structure that omits information. +type SlimRole struct { Name string `json:"name"` DisplayName string `json:"display_name"` } type AssignableRoles struct { - Role + SlimRole Assignable bool `json:"assignable"` } +// Permission is the format passed into the rego. +type Permission struct { + // Negate makes this a negative permission + Negate bool `json:"negate"` + ResourceType RBACResource `json:"resource_type"` + Action RBACAction `json:"action"` +} + +// Role is a longer form of SlimRole used to edit custom roles. +type Role struct { + Name string `json:"name"` + DisplayName string `json:"display_name"` + SitePermissions []Permission `json:"site_permissions"` + // map[] -> Permissions + OrganizationPermissions map[string][]Permission `json:"organization_permissions"` + UserPermissions []Permission `json:"user_permissions"` +} + +// PatchRole will upsert a custom site wide role +func (c *Client) PatchRole(ctx context.Context, req Role) (Role, error) { + res, err := c.Request(ctx, http.MethodPatch, "/api/v2/users/roles", req) + if err != nil { + return Role{}, err + } + defer res.Body.Close() + if res.StatusCode != http.StatusOK { + return Role{}, ReadBodyAsError(res) + } + var role Role + return role, json.NewDecoder(res.Body).Decode(&role) +} + // ListSiteRoles lists all assignable site wide roles. func (c *Client) ListSiteRoles(ctx context.Context) ([]AssignableRoles, error) { res, err := c.Request(ctx, http.MethodGet, "/api/v2/users/roles", nil) @@ -46,3 +82,17 @@ func (c *Client) ListOrganizationRoles(ctx context.Context, org uuid.UUID) ([]As var roles []AssignableRoles return roles, json.NewDecoder(res.Body).Decode(&roles) } + +// CreatePermissions is a helper function to quickly build permissions. +func CreatePermissions(mapping map[RBACResource][]RBACAction) []Permission { + perms := make([]Permission, 0) + for t, actions := range mapping { + for _, action := range actions { + perms = append(perms, Permission{ + ResourceType: t, + Action: action, + }) + } + } + return perms +} diff --git a/codersdk/users.go b/codersdk/users.go index 7eb7604fc57b7..80ca583141c9b 100644 --- a/codersdk/users.go +++ b/codersdk/users.go @@ -63,7 +63,7 @@ type User struct { ReducedUser `table:"r,recursive_inline"` OrganizationIDs []uuid.UUID `json:"organization_ids" format:"uuid"` - Roles []Role `json:"roles"` + Roles []SlimRole `json:"roles"` } type GetUsersResponse struct { diff --git a/docs/api/members.md b/docs/api/members.md index e44056664588a..43ae4e8f23da1 100644 --- a/docs/api/members.md +++ b/docs/api/members.md @@ -154,3 +154,127 @@ Status Code **200** | `» name` | string | false | | | To perform this operation, you must be authenticated. [Learn more](authentication.md). + +## Upsert a custom site-wide role + +### Code samples + +```shell +# Example request using curl +curl -X PATCH http://coder-server:8080/api/v2/users/roles \ + -H 'Accept: application/json' \ + -H 'Coder-Session-Token: API_KEY' +``` + +`PATCH /users/roles` + +### Example responses + +> 200 Response + +```json +[ + { + "display_name": "string", + "name": "string", + "organization_permissions": { + "property1": [ + { + "action": "application_connect", + "negate": true, + "resource_type": "*" + } + ], + "property2": [ + { + "action": "application_connect", + "negate": true, + "resource_type": "*" + } + ] + }, + "site_permissions": [ + { + "action": "application_connect", + "negate": true, + "resource_type": "*" + } + ], + "user_permissions": [ + { + "action": "application_connect", + "negate": true, + "resource_type": "*" + } + ] + } +] +``` + +### Responses + +| Status | Meaning | Description | Schema | +| ------ | ------------------------------------------------------- | ----------- | ------------------------------------------------- | +| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | array of [codersdk.Role](schemas.md#codersdkrole) | + +

Response Schema

+ +Status Code **200** + +| Name | Type | Required | Restrictions | Description | +| ---------------------------- | -------------------------------------------------------- | -------- | ------------ | --------------------------------------- | +| `[array item]` | array | false | | | +| `» display_name` | string | false | | | +| `» name` | string | false | | | +| `» organization_permissions` | object | false | | map[] -> Permissions | +| `»» [any property]` | array | false | | | +| `»»» action` | [codersdk.RBACAction](schemas.md#codersdkrbacaction) | false | | | +| `»»» negate` | boolean | false | | Negate makes this a negative permission | +| `»»» resource_type` | [codersdk.RBACResource](schemas.md#codersdkrbacresource) | false | | | +| `» site_permissions` | array | false | | | +| `» user_permissions` | array | false | | | + +#### Enumerated Values + +| Property | Value | +| --------------- | ----------------------- | +| `action` | `application_connect` | +| `action` | `assign` | +| `action` | `create` | +| `action` | `delete` | +| `action` | `read` | +| `action` | `read_personal` | +| `action` | `ssh` | +| `action` | `update` | +| `action` | `update_personal` | +| `action` | `use` | +| `action` | `view_insights` | +| `action` | `start` | +| `action` | `stop` | +| `resource_type` | `*` | +| `resource_type` | `api_key` | +| `resource_type` | `assign_org_role` | +| `resource_type` | `assign_role` | +| `resource_type` | `audit_log` | +| `resource_type` | `debug_info` | +| `resource_type` | `deployment_config` | +| `resource_type` | `deployment_stats` | +| `resource_type` | `file` | +| `resource_type` | `group` | +| `resource_type` | `license` | +| `resource_type` | `oauth2_app` | +| `resource_type` | `oauth2_app_code_token` | +| `resource_type` | `oauth2_app_secret` | +| `resource_type` | `organization` | +| `resource_type` | `organization_member` | +| `resource_type` | `provisioner_daemon` | +| `resource_type` | `replicas` | +| `resource_type` | `system` | +| `resource_type` | `tailnet_coordinator` | +| `resource_type` | `template` | +| `resource_type` | `user` | +| `resource_type` | `workspace` | +| `resource_type` | `workspace_dormant` | +| `resource_type` | `workspace_proxy` | + +To perform this operation, you must be authenticated. [Learn more](authentication.md). diff --git a/docs/api/schemas.md b/docs/api/schemas.md index cd5c1366e392a..ae35585e2fb12 100644 --- a/docs/api/schemas.md +++ b/docs/api/schemas.md @@ -2694,6 +2694,7 @@ CreateWorkspaceRequest provides options for creating a new workspace. Only one o | `example` | | `auto-fill-parameters` | | `multi-organization` | +| `custom-roles` | ## codersdk.ExternalAuth @@ -3579,13 +3580,13 @@ CreateWorkspaceRequest provides options for creating a new workspace. Only one o ### Properties -| Name | Type | Required | Restrictions | Description | -| ----------------- | --------------------------------------- | -------- | ------------ | ----------- | -| `created_at` | string | false | | | -| `organization_id` | string | false | | | -| `roles` | array of [codersdk.Role](#codersdkrole) | false | | | -| `updated_at` | string | false | | | -| `user_id` | string | false | | | +| Name | Type | Required | Restrictions | Description | +| ----------------- | ----------------------------------------------- | -------- | ------------ | ----------- | +| `created_at` | string | false | | | +| `organization_id` | string | false | | | +| `roles` | array of [codersdk.SlimRole](#codersdkslimrole) | false | | | +| `updated_at` | string | false | | | +| `user_id` | string | false | | | ## codersdk.PatchGroupRequest @@ -3649,6 +3650,24 @@ CreateWorkspaceRequest provides options for creating a new workspace. Only one o | `name` | string | true | | | | `regenerate_token` | boolean | false | | | +## codersdk.Permission + +```json +{ + "action": "application_connect", + "negate": true, + "resource_type": "*" +} +``` + +### Properties + +| Name | Type | Required | Restrictions | Description | +| --------------- | ---------------------------------------------- | -------- | ------------ | --------------------------------------- | +| `action` | [codersdk.RBACAction](#codersdkrbacaction) | false | | | +| `negate` | boolean | false | | Negate makes this a negative permission | +| `resource_type` | [codersdk.RBACResource](#codersdkrbacresource) | false | | | + ## codersdk.PostOAuth2ProviderAppRequest ```json @@ -4271,16 +4290,50 @@ CreateWorkspaceRequest provides options for creating a new workspace. Only one o ```json { "display_name": "string", - "name": "string" + "name": "string", + "organization_permissions": { + "property1": [ + { + "action": "application_connect", + "negate": true, + "resource_type": "*" + } + ], + "property2": [ + { + "action": "application_connect", + "negate": true, + "resource_type": "*" + } + ] + }, + "site_permissions": [ + { + "action": "application_connect", + "negate": true, + "resource_type": "*" + } + ], + "user_permissions": [ + { + "action": "application_connect", + "negate": true, + "resource_type": "*" + } + ] } ``` ### Properties -| Name | Type | Required | Restrictions | Description | -| -------------- | ------ | -------- | ------------ | ----------- | -| `display_name` | string | false | | | -| `name` | string | false | | | +| Name | Type | Required | Restrictions | Description | +| -------------------------- | --------------------------------------------------- | -------- | ------------ | ---------------------------- | +| `display_name` | string | false | | | +| `name` | string | false | | | +| `organization_permissions` | object | false | | map[] -> Permissions | +| » `[any property]` | array of [codersdk.Permission](#codersdkpermission) | false | | | +| `site_permissions` | array of [codersdk.Permission](#codersdkpermission) | false | | | +| `user_permissions` | array of [codersdk.Permission](#codersdkpermission) | false | | | ## codersdk.SSHConfig @@ -4356,6 +4409,22 @@ CreateWorkspaceRequest provides options for creating a new workspace. Only one o | `disable_expiry_refresh` | boolean | false | | Disable expiry refresh will disable automatically refreshing api keys when they are used from the api. This means the api key lifetime at creation is the lifetime of the api key. | | `max_token_lifetime` | integer | false | | | +## codersdk.SlimRole + +```json +{ + "display_name": "string", + "name": "string" +} +``` + +### Properties + +| Name | Type | Required | Restrictions | Description | +| -------------- | ------ | -------- | ------------ | ----------- | +| `display_name` | string | false | | | +| `name` | string | false | | | + ## codersdk.SupportConfig ```json @@ -4906,21 +4975,21 @@ CreateWorkspaceRequest provides options for creating a new workspace. Only one o ### Properties -| Name | Type | Required | Restrictions | Description | -| ------------------ | ---------------------------------------------- | -------- | ------------ | ----------- | -| `avatar_url` | string | false | | | -| `created_at` | string | true | | | -| `email` | string | true | | | -| `id` | string | true | | | -| `last_seen_at` | string | false | | | -| `login_type` | [codersdk.LoginType](#codersdklogintype) | false | | | -| `name` | string | false | | | -| `organization_ids` | array of string | false | | | -| `role` | [codersdk.TemplateRole](#codersdktemplaterole) | false | | | -| `roles` | array of [codersdk.Role](#codersdkrole) | false | | | -| `status` | [codersdk.UserStatus](#codersdkuserstatus) | false | | | -| `theme_preference` | string | false | | | -| `username` | string | true | | | +| Name | Type | Required | Restrictions | Description | +| ------------------ | ----------------------------------------------- | -------- | ------------ | ----------- | +| `avatar_url` | string | false | | | +| `created_at` | string | true | | | +| `email` | string | true | | | +| `id` | string | true | | | +| `last_seen_at` | string | false | | | +| `login_type` | [codersdk.LoginType](#codersdklogintype) | false | | | +| `name` | string | false | | | +| `organization_ids` | array of string | false | | | +| `role` | [codersdk.TemplateRole](#codersdktemplaterole) | false | | | +| `roles` | array of [codersdk.SlimRole](#codersdkslimrole) | false | | | +| `status` | [codersdk.UserStatus](#codersdkuserstatus) | false | | | +| `theme_preference` | string | false | | | +| `username` | string | true | | | #### Enumerated Values @@ -5497,20 +5566,20 @@ If the schedule is empty, the user will be updated to use the default schedule.| ### Properties -| Name | Type | Required | Restrictions | Description | -| ------------------ | ------------------------------------------ | -------- | ------------ | ----------- | -| `avatar_url` | string | false | | | -| `created_at` | string | true | | | -| `email` | string | true | | | -| `id` | string | true | | | -| `last_seen_at` | string | false | | | -| `login_type` | [codersdk.LoginType](#codersdklogintype) | false | | | -| `name` | string | false | | | -| `organization_ids` | array of string | false | | | -| `roles` | array of [codersdk.Role](#codersdkrole) | false | | | -| `status` | [codersdk.UserStatus](#codersdkuserstatus) | false | | | -| `theme_preference` | string | false | | | -| `username` | string | true | | | +| Name | Type | Required | Restrictions | Description | +| ------------------ | ----------------------------------------------- | -------- | ------------ | ----------- | +| `avatar_url` | string | false | | | +| `created_at` | string | true | | | +| `email` | string | true | | | +| `id` | string | true | | | +| `last_seen_at` | string | false | | | +| `login_type` | [codersdk.LoginType](#codersdklogintype) | false | | | +| `name` | string | false | | | +| `organization_ids` | array of string | false | | | +| `roles` | array of [codersdk.SlimRole](#codersdkslimrole) | false | | | +| `status` | [codersdk.UserStatus](#codersdkuserstatus) | false | | | +| `theme_preference` | string | false | | | +| `username` | string | true | | | #### Enumerated Values diff --git a/enterprise/coderd/coderd.go b/enterprise/coderd/coderd.go index 56c774911018b..524bfd26f3d74 100644 --- a/enterprise/coderd/coderd.go +++ b/enterprise/coderd/coderd.go @@ -326,6 +326,23 @@ func New(ctx context.Context, options *Options) (_ *API, err error) { r.Put("/", api.putAppearance) }) }) + + r.Route("/users/roles", func(r chi.Router) { + r.Use( + apiKeyMiddleware, + ) + r.Group(func(r chi.Router) { + r.Use( + api.customRolesEnabledMW, + ) + r.Patch("/", api.patchRole) + }) + // Unfortunate, but this r.Route overrides the AGPL roles route. + // The AGPL does not have the entitlements to block the licensed + // routes, so we need to duplicate the AGPL here. + r.Get("/", api.AGPL.AssignableSiteRoles) + }) + r.Route("/users/{user}/quiet-hours", func(r chi.Router) { r.Use( api.autostopRequirementEnabledMW, diff --git a/enterprise/coderd/roles.go b/enterprise/coderd/roles.go new file mode 100644 index 0000000000000..2224e7f25c0bf --- /dev/null +++ b/enterprise/coderd/roles.go @@ -0,0 +1,80 @@ +package coderd + +import ( + "net/http" + + "github.com/coder/coder/v2/coderd/database" + "github.com/coder/coder/v2/coderd/database/db2sdk" + "github.com/coder/coder/v2/coderd/httpapi" + "github.com/coder/coder/v2/coderd/rbac/rolestore" + "github.com/coder/coder/v2/codersdk" +) + +// patchRole will allow creating a custom role +// +// @Summary Upsert a custom site-wide role +// @ID upsert-a-custom-site-wide-role +// @Security CoderSessionToken +// @Produce json +// @Tags Members +// @Success 200 {array} codersdk.Role +// @Router /users/roles [patch] +func (api *API) patchRole(rw http.ResponseWriter, r *http.Request) { + ctx := r.Context() + + var req codersdk.Role + if !httpapi.Read(ctx, rw, r, &req) { + return + } + + if len(req.OrganizationPermissions) > 0 { + // Org perms should be assigned only in org specific roles. Otherwise, + // it gets complicated to keep track of who can do what. + httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{ + Message: "Invalid request, not allowed to assign organization permissions for a site wide role.", + Detail: "site wide roles may not contain organization specific permissions", + }) + return + } + + // Make sure all permissions inputted are valid according to our policy. + rbacRole := db2sdk.RoleToRBAC(req) + args, err := rolestore.ConvertRoleToDB(rbacRole) + if err != nil { + httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{ + Message: "Invalid request", + Detail: err.Error(), + }) + return + } + + inserted, err := api.Database.UpsertCustomRole(ctx, database.UpsertCustomRoleParams{ + Name: args.Name, + DisplayName: args.DisplayName, + SitePermissions: args.SitePermissions, + OrgPermissions: args.OrgPermissions, + UserPermissions: args.UserPermissions, + }) + if httpapi.Is404Error(err) { + httpapi.ResourceNotFound(rw) + return + } + if err != nil { + httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{ + Message: "Failed to update role permissions", + Detail: err.Error(), + }) + return + } + + convertedInsert, err := rolestore.ConvertDBRole(inserted) + if err != nil { + httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ + Message: "Permissions were updated, unable to read them back out of the database.", + Detail: err.Error(), + }) + return + } + + httpapi.Write(ctx, rw, http.StatusOK, db2sdk.Role(convertedInsert)) +} diff --git a/enterprise/coderd/roles_test.go b/enterprise/coderd/roles_test.go new file mode 100644 index 0000000000000..450f80e0b7fe3 --- /dev/null +++ b/enterprise/coderd/roles_test.go @@ -0,0 +1,170 @@ +package coderd_test + +import ( + "bytes" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/coder/coder/v2/coderd/coderdtest" + "github.com/coder/coder/v2/coderd/database/db2sdk" + "github.com/coder/coder/v2/codersdk" + "github.com/coder/coder/v2/enterprise/coderd/coderdenttest" + "github.com/coder/coder/v2/enterprise/coderd/license" + "github.com/coder/coder/v2/provisioner/echo" + "github.com/coder/coder/v2/testutil" +) + +func TestCustomRole(t *testing.T) { + t.Parallel() + templateAdminCustom := codersdk.Role{ + Name: "test-role", + DisplayName: "Testing Purposes", + // Basically creating a template admin manually + SitePermissions: codersdk.CreatePermissions(map[codersdk.RBACResource][]codersdk.RBACAction{ + codersdk.ResourceTemplate: {codersdk.ActionCreate, codersdk.ActionRead, codersdk.ActionUpdate, codersdk.ActionViewInsights}, + codersdk.ResourceFile: {codersdk.ActionCreate, codersdk.ActionRead}, + codersdk.ResourceWorkspace: {codersdk.ActionRead}, + }), + OrganizationPermissions: nil, + UserPermissions: nil, + } + + // Create, assign, and use a custom role + t.Run("Success", func(t *testing.T) { + t.Parallel() + dv := coderdtest.DeploymentValues(t) + dv.Experiments = []string{string(codersdk.ExperimentCustomRoles)} + owner, first := coderdenttest.New(t, &coderdenttest.Options{ + Options: &coderdtest.Options{ + DeploymentValues: dv, + }, + LicenseOptions: &coderdenttest.LicenseOptions{ + Features: license.Features{ + codersdk.FeatureCustomRoles: 1, + }, + }, + }) + + ctx := testutil.Context(t, testutil.WaitMedium) + + //nolint:gocritic // owner is required for this + role, err := owner.PatchRole(ctx, templateAdminCustom) + require.NoError(t, err, "upsert role") + + // Assign the custom template admin role + tmplAdmin, user := coderdtest.CreateAnotherUser(t, owner, first.OrganizationID, role.Name) + + // Assert the role exists + roleNamesF := func(role codersdk.SlimRole) string { return role.Name } + require.Contains(t, db2sdk.List(user.Roles, roleNamesF), role.Name) + + // Try to create a template version + coderdtest.CreateTemplateVersion(t, tmplAdmin, first.OrganizationID, nil) + + // Verify the role exists in the list + // TODO: Turn this assertion back on when the cli api experience is created. + //allRoles, err := tmplAdmin.ListSiteRoles(ctx) + //require.NoError(t, err) + // + //require.True(t, slices.ContainsFunc(allRoles, func(selected codersdk.AssignableRoles) bool { + // return selected.Name == role.Name + //}), "role missing from site role list") + }) + + // Revoked licenses cannot modify/create custom roles, but they can + // use the existing roles. + t.Run("Revoked License", func(t *testing.T) { + t.Parallel() + dv := coderdtest.DeploymentValues(t) + dv.Experiments = []string{string(codersdk.ExperimentCustomRoles)} + owner, first := coderdenttest.New(t, &coderdenttest.Options{ + Options: &coderdtest.Options{ + DeploymentValues: dv, + }, + LicenseOptions: &coderdenttest.LicenseOptions{ + Features: license.Features{ + codersdk.FeatureCustomRoles: 1, + }, + }, + }) + + ctx := testutil.Context(t, testutil.WaitMedium) + + //nolint:gocritic // owner is required for this + role, err := owner.PatchRole(ctx, templateAdminCustom) + require.NoError(t, err, "upsert role") + + // Remove the license to block enterprise functionality + licenses, err := owner.Licenses(ctx) + require.NoError(t, err, "get licenses") + for _, license := range licenses { + // Should be only 1... + err := owner.DeleteLicense(ctx, license.ID) + require.NoError(t, err, "delete license") + } + + // Verify functionality is lost + _, err = owner.PatchRole(ctx, templateAdminCustom) + require.ErrorContains(t, err, "Custom roles is an Enterprise feature", "upsert role") + + // Assign the custom template admin role + tmplAdmin, _ := coderdtest.CreateAnotherUser(t, owner, first.OrganizationID, role.Name) + + // Try to create a template version, eg using the custom role + coderdtest.CreateTemplateVersion(t, tmplAdmin, first.OrganizationID, nil) + }) + + // Role patches are complete, as in the request overrides the existing role. + t.Run("RoleOverrides", func(t *testing.T) { + t.Parallel() + dv := coderdtest.DeploymentValues(t) + dv.Experiments = []string{string(codersdk.ExperimentCustomRoles)} + owner, first := coderdenttest.New(t, &coderdenttest.Options{ + Options: &coderdtest.Options{ + DeploymentValues: dv, + }, + LicenseOptions: &coderdenttest.LicenseOptions{ + Features: license.Features{ + codersdk.FeatureCustomRoles: 1, + }, + }, + }) + + ctx := testutil.Context(t, testutil.WaitMedium) + //nolint:gocritic // owner is required for this + role, err := owner.PatchRole(ctx, templateAdminCustom) + require.NoError(t, err, "upsert role") + + // Assign the custom template admin role + tmplAdmin, _ := coderdtest.CreateAnotherUser(t, owner, first.OrganizationID, role.Name) + + // Try to create a template version, eg using the custom role + coderdtest.CreateTemplateVersion(t, tmplAdmin, first.OrganizationID, nil) + + //nolint:gocritic // owner is required for this + role, err = owner.PatchRole(ctx, codersdk.Role{ + Name: templateAdminCustom.Name, + DisplayName: templateAdminCustom.DisplayName, + // These are all left nil, which sets the custom role to have 0 + // permissions. Omitting this does not "inherit" what already + // exists. + SitePermissions: nil, + OrganizationPermissions: nil, + UserPermissions: nil, + }) + require.NoError(t, err, "upsert role with override") + + // The role should no longer have template perms + data, err := echo.TarWithOptions(ctx, tmplAdmin.Logger(), nil) + require.NoError(t, err) + file, err := tmplAdmin.Upload(ctx, codersdk.ContentTypeTar, bytes.NewReader(data)) + require.NoError(t, err) + _, err = tmplAdmin.CreateTemplateVersion(ctx, first.OrganizationID, codersdk.CreateTemplateVersionRequest{ + FileID: file.ID, + StorageMethod: codersdk.ProvisionerStorageMethodFile, + Provisioner: codersdk.ProvisionerTypeEcho, + }) + require.ErrorContains(t, err, "forbidden") + }) +} diff --git a/enterprise/coderd/userauth.go b/enterprise/coderd/userauth.go index f35d38ca448d9..a2dcac6085c2a 100644 --- a/enterprise/coderd/userauth.go +++ b/enterprise/coderd/userauth.go @@ -7,7 +7,6 @@ import ( "golang.org/x/xerrors" "cdr.dev/slog" - "github.com/coder/coder/v2/coderd" "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/database/dbauthz" "github.com/coder/coder/v2/codersdk" @@ -96,7 +95,7 @@ func (api *API) setUserSiteRoles(ctx context.Context, logger slog.Logger, db dat // Should this be feature protected? return db.InTx(func(tx database.Store) error { - _, err := coderd.UpdateSiteUserRoles(ctx, db, database.UpdateUserRolesParams{ + _, err := db.UpdateUserRoles(ctx, database.UpdateUserRolesParams{ GrantedRoles: roles, ID: userID, }) diff --git a/enterprise/coderd/users.go b/enterprise/coderd/users.go index 935eeb8f6e689..a29aa1836557d 100644 --- a/enterprise/coderd/users.go +++ b/enterprise/coderd/users.go @@ -14,6 +14,31 @@ import ( "github.com/coder/coder/v2/codersdk" ) +func (api *API) customRolesEnabledMW(next http.Handler) http.Handler { + return httpmw.RequireExperiment(api.AGPL.Experiments, codersdk.ExperimentCustomRoles)( + http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { + // Entitlement must be enabled. + api.entitlementsMu.RLock() + entitled := api.entitlements.Features[codersdk.FeatureCustomRoles].Entitlement != codersdk.EntitlementNotEntitled + enabled := api.entitlements.Features[codersdk.FeatureCustomRoles].Enabled + api.entitlementsMu.RUnlock() + if !entitled { + httpapi.Write(r.Context(), rw, http.StatusForbidden, codersdk.Response{ + Message: "Custom roles is an Enterprise feature. Contact sales!", + }) + return + } + if !enabled { + httpapi.Write(r.Context(), rw, http.StatusForbidden, codersdk.Response{ + Message: "Custom roles is not enabled", + }) + return + } + + next.ServeHTTP(rw, r) + })) +} + func (api *API) autostopRequirementEnabledMW(next http.Handler) http.Handler { return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { // Entitlement must be enabled. diff --git a/site/src/api/api.ts b/site/src/api/api.ts index ed7f18ef1472c..8baa6a5edfc1c 100644 --- a/site/src/api/api.ts +++ b/site/src/api/api.ts @@ -1102,7 +1102,7 @@ class ApiMethods { }; updateUserRoles = async ( - roles: TypesGen.Role["name"][], + roles: TypesGen.SlimRole["name"][], userId: TypesGen.User["id"], ): Promise => { const response = await this.axios.put( diff --git a/site/src/api/typesGenerated.ts b/site/src/api/typesGenerated.ts index b3280d200328a..08b1ac2732d82 100644 --- a/site/src/api/typesGenerated.ts +++ b/site/src/api/typesGenerated.ts @@ -65,7 +65,7 @@ export interface ArchiveTemplateVersionsResponse { } // From codersdk/roles.go -export interface AssignableRoles extends Role { +export interface AssignableRoles extends SlimRole { readonly assignable: boolean; } @@ -786,7 +786,7 @@ export interface OrganizationMember { readonly organization_id: string; readonly created_at: string; readonly updated_at: string; - readonly roles: readonly Role[]; + readonly roles: readonly SlimRole[]; } // From codersdk/pagination.go @@ -821,6 +821,13 @@ export interface PatchWorkspaceProxy { readonly regenerate_token: boolean; } +// From codersdk/roles.go +export interface Permission { + readonly negate: boolean; + readonly resource_type: RBACResource; + readonly action: RBACAction; +} + // From codersdk/oauth2.go export interface PostOAuth2ProviderAppRequest { readonly name: string; @@ -970,6 +977,9 @@ export interface Response { export interface Role { readonly name: string; readonly display_name: string; + readonly site_permissions: readonly Permission[]; + readonly organization_permissions: Record; + readonly user_permissions: readonly Permission[]; } // From codersdk/deployment.go @@ -1014,6 +1024,12 @@ export interface SessionLifetime { readonly max_token_lifetime?: number; } +// From codersdk/roles.go +export interface SlimRole { + readonly name: string; + readonly display_name: string; +} + // From codersdk/deployment.go export interface SupportConfig { readonly links: readonly LinkConfig[]; @@ -1405,7 +1421,7 @@ export interface UpsertWorkspaceAgentPortShareRequest { // From codersdk/users.go export interface User extends ReducedUser { readonly organization_ids: readonly string[]; - readonly roles: readonly Role[]; + readonly roles: readonly SlimRole[]; } // From codersdk/insights.go @@ -1910,10 +1926,12 @@ export const Entitlements: Entitlement[] = [ // From codersdk/deployment.go export type Experiment = | "auto-fill-parameters" + | "custom-roles" | "example" | "multi-organization"; export const Experiments: Experiment[] = [ "auto-fill-parameters", + "custom-roles", "example", "multi-organization", ]; @@ -1926,6 +1944,7 @@ export type FeatureName = | "audit_log" | "browser_only" | "control_shared_ports" + | "custom_roles" | "external_provisioner_daemons" | "external_token_encryption" | "high_availability" @@ -1943,6 +1962,7 @@ export const FeatureNames: FeatureName[] = [ "audit_log", "browser_only", "control_shared_ports", + "custom_roles", "external_provisioner_daemons", "external_token_encryption", "high_availability", diff --git a/site/src/pages/UsersPage/UsersPage.test.tsx b/site/src/pages/UsersPage/UsersPage.test.tsx index ebc5e24a5e6b6..edbc0118b09f2 100644 --- a/site/src/pages/UsersPage/UsersPage.test.tsx +++ b/site/src/pages/UsersPage/UsersPage.test.tsx @@ -2,7 +2,7 @@ import { fireEvent, screen, within } from "@testing-library/react"; import userEvent from "@testing-library/user-event"; import { HttpResponse, http } from "msw"; import { API } from "api/api"; -import type { Role } from "api/typesGenerated"; +import type { SlimRole } from "api/typesGenerated"; import { MockUser, MockUser2, @@ -102,7 +102,7 @@ const resetUserPassword = async (setupActionSpies: () => void) => { fireEvent.click(confirmButton); }; -const updateUserRole = async (role: Role) => { +const updateUserRole = async (role: SlimRole) => { // Get the first user in the table const users = await screen.findAllByText(/.*@coder.com/); const userRow = users[0].closest("tr"); diff --git a/site/src/pages/UsersPage/UsersPageView.tsx b/site/src/pages/UsersPage/UsersPageView.tsx index 9349311392146..be5f50b6ff9b8 100644 --- a/site/src/pages/UsersPage/UsersPageView.tsx +++ b/site/src/pages/UsersPage/UsersPageView.tsx @@ -25,7 +25,7 @@ export interface UsersPageViewProps { onResetUserPassword: (user: TypesGen.User) => void; onUpdateUserRoles: ( user: TypesGen.User, - roles: TypesGen.Role["name"][], + roles: TypesGen.SlimRole["name"][], ) => void; filterProps: ComponentProps; isNonInitialPage: boolean; diff --git a/site/src/pages/UsersPage/UsersTable/EditRolesButton.tsx b/site/src/pages/UsersPage/UsersTable/EditRolesButton.tsx index 76099c9796205..b304bbed01f89 100644 --- a/site/src/pages/UsersPage/UsersTable/EditRolesButton.tsx +++ b/site/src/pages/UsersPage/UsersTable/EditRolesButton.tsx @@ -3,7 +3,7 @@ import UserIcon from "@mui/icons-material/PersonOutline"; import Checkbox from "@mui/material/Checkbox"; import IconButton from "@mui/material/IconButton"; import type { FC } from "react"; -import type { Role } from "api/typesGenerated"; +import type { SlimRole } from "api/typesGenerated"; import { HelpTooltip, HelpTooltipContent, @@ -69,9 +69,9 @@ const Option: FC = ({ export interface EditRolesButtonProps { isLoading: boolean; - roles: readonly Role[]; + roles: readonly SlimRole[]; selectedRoleNames: Set; - onChange: (roles: Role["name"][]) => void; + onChange: (roles: SlimRole["name"][]) => void; isDefaultOpen?: boolean; oidcRoleSync: boolean; userLoginType: string; diff --git a/site/src/pages/UsersPage/UsersTable/UserRoleCell.tsx b/site/src/pages/UsersPage/UsersTable/UserRoleCell.tsx index 36090dbfdeff8..398354f94ee69 100644 --- a/site/src/pages/UsersPage/UsersTable/UserRoleCell.tsx +++ b/site/src/pages/UsersPage/UsersTable/UserRoleCell.tsx @@ -17,7 +17,7 @@ import { useTheme } from "@emotion/react"; import Stack from "@mui/material/Stack"; import TableCell from "@mui/material/TableCell"; import type { FC } from "react"; -import type { Role, User } from "api/typesGenerated"; +import type { SlimRole, User } from "api/typesGenerated"; import { Pill } from "components/Pill/Pill"; import { Popover, @@ -28,7 +28,7 @@ import { EditRolesButton } from "./EditRolesButton"; type UserRoleCellProps = { canEditUsers: boolean; - allAvailableRoles: Role[] | undefined; + allAvailableRoles: SlimRole[] | undefined; user: User; isLoading: boolean; oidcRoleSyncEnabled: boolean; @@ -90,7 +90,7 @@ export const UserRoleCell: FC = ({ }; type OverflowRolePillProps = { - roles: readonly Role[]; + roles: readonly SlimRole[]; }; const OverflowRolePill: FC = ({ roles }) => { @@ -148,7 +148,7 @@ const OverflowRolePill: FC = ({ roles }) => { ); }; -const fallbackRole: Role = { +const fallbackRole: SlimRole = { name: "member", display_name: "Member", } as const; @@ -160,7 +160,9 @@ const roleNamesByAccessLevel: readonly string[] = [ "auditor", ]; -function sortRolesByAccessLevel(roles: readonly Role[]): readonly Role[] { +function sortRolesByAccessLevel( + roles: readonly SlimRole[], +): readonly SlimRole[] { if (roles.length === 0) { return roles; } @@ -172,7 +174,7 @@ function sortRolesByAccessLevel(roles: readonly Role[]): readonly Role[] { ); } -function getSelectedRoleNames(roles: readonly Role[]) { +function getSelectedRoleNames(roles: readonly SlimRole[]) { const roleNameSet = new Set(roles.map((role) => role.name)); if (roleNameSet.size === 0) { roleNameSet.add(fallbackRole.name); diff --git a/site/src/pages/UsersPage/UsersTable/UsersTable.tsx b/site/src/pages/UsersPage/UsersTable/UsersTable.tsx index 119b00a851146..d3748f2d8ea95 100644 --- a/site/src/pages/UsersPage/UsersTable/UsersTable.tsx +++ b/site/src/pages/UsersPage/UsersTable/UsersTable.tsx @@ -36,7 +36,7 @@ export interface UsersTableProps { onResetUserPassword: (user: TypesGen.User) => void; onUpdateUserRoles: ( user: TypesGen.User, - roles: TypesGen.Role["name"][], + roles: TypesGen.SlimRole["name"][], ) => void; isNonInitialPage: boolean; actorID: string; diff --git a/site/src/pages/UsersPage/UsersTable/UsersTableBody.tsx b/site/src/pages/UsersPage/UsersTable/UsersTableBody.tsx index 03222edbedd6d..03a99bd423bf9 100644 --- a/site/src/pages/UsersPage/UsersTable/UsersTableBody.tsx +++ b/site/src/pages/UsersPage/UsersTable/UsersTableBody.tsx @@ -52,7 +52,7 @@ interface UsersTableBodyProps { onResetUserPassword: (user: TypesGen.User) => void; onUpdateUserRoles: ( user: TypesGen.User, - roles: TypesGen.Role["name"][], + roles: TypesGen.SlimRole["name"][], ) => void; isNonInitialPage: boolean; actorID: string; diff --git a/site/src/testHelpers/entities.ts b/site/src/testHelpers/entities.ts index 6cf97131aba67..5fe1e9cc7b0ff 100644 --- a/site/src/testHelpers/entities.ts +++ b/site/src/testHelpers/entities.ts @@ -229,27 +229,27 @@ export const MockUpdateCheck: TypesGen.UpdateCheckResponse = { version: "v99.999.9999+c9cdf14", }; -export const MockOwnerRole: TypesGen.Role = { +export const MockOwnerRole: TypesGen.SlimRole = { name: "owner", display_name: "Owner", }; -export const MockUserAdminRole: TypesGen.Role = { +export const MockUserAdminRole: TypesGen.SlimRole = { name: "user_admin", display_name: "User Admin", }; -export const MockTemplateAdminRole: TypesGen.Role = { +export const MockTemplateAdminRole: TypesGen.SlimRole = { name: "template_admin", display_name: "Template Admin", }; -export const MockMemberRole: TypesGen.Role = { +export const MockMemberRole: TypesGen.SlimRole = { name: "member", display_name: "Member", }; -export const MockAuditorRole: TypesGen.Role = { +export const MockAuditorRole: TypesGen.SlimRole = { name: "auditor", display_name: "Auditor", }; @@ -257,7 +257,7 @@ export const MockAuditorRole: TypesGen.Role = { // assignableRole takes a role and a boolean. The boolean implies if the // actor can assign (add/remove) the role from other users. export function assignableRole( - role: TypesGen.Role, + role: TypesGen.SlimRole, assignable: boolean, ): TypesGen.AssignableRoles { return { From 80538c079d430ba98b7ce1bfbb213b3d3b724bd0 Mon Sep 17 00:00:00 2001 From: Colin Adler Date: Thu, 16 May 2024 14:07:07 -0500 Subject: [PATCH 070/149] chore: update git -> 2.43.4 and terraform -> 1.7.5 (#13299) This fixes an RCE in git and gets us one minor version closer to fixing a critical Terraform vulnerability. In the next release we'll bump to 1.8.x. --- .github/actions/setup-tf/action.yaml | 2 +- docs/install/offline.md | 2 +- dogfood/Dockerfile | 4 ++-- install.sh | 2 +- provisioner/terraform/install.go | 4 ++-- scripts/Dockerfile.base | 4 ++-- 6 files changed, 9 insertions(+), 9 deletions(-) diff --git a/.github/actions/setup-tf/action.yaml b/.github/actions/setup-tf/action.yaml index 576b7d7738287..0fa40bdbfdefc 100644 --- a/.github/actions/setup-tf/action.yaml +++ b/.github/actions/setup-tf/action.yaml @@ -7,5 +7,5 @@ runs: - name: Install Terraform uses: hashicorp/setup-terraform@v3 with: - terraform_version: 1.6.6 + terraform_version: 1.7.5 terraform_wrapper: false diff --git a/docs/install/offline.md b/docs/install/offline.md index ed60703e64ade..120aa5c9f76b7 100644 --- a/docs/install/offline.md +++ b/docs/install/offline.md @@ -54,7 +54,7 @@ RUN mkdir -p /opt/terraform # The below step is optional if you wish to keep the existing version. # See https://github.com/coder/coder/blob/main/provisioner/terraform/install.go#L23-L24 # for supported Terraform versions. -ARG TERRAFORM_VERSION=1.5.6 +ARG TERRAFORM_VERSION=1.7.5 RUN apk update && \ apk del terraform && \ curl -LOs https://releases.hashicorp.com/terraform/${TERRAFORM_VERSION}/terraform_${TERRAFORM_VERSION}_linux_amd64.zip \ diff --git a/dogfood/Dockerfile b/dogfood/Dockerfile index 4daaa0a636557..735b87dea27f7 100644 --- a/dogfood/Dockerfile +++ b/dogfood/Dockerfile @@ -169,9 +169,9 @@ RUN apt-get update --quiet && apt-get install --yes \ # Configure FIPS-compliant policies update-crypto-policies --set FIPS -# NOTE: In scripts/Dockerfile.base we specifically install Terraform version 1.6.6. +# NOTE: In scripts/Dockerfile.base we specifically install Terraform version 1.7.5. # Installing the same version here to match. -RUN wget -O /tmp/terraform.zip "https://releases.hashicorp.com/terraform/1.6.6/terraform_1.6.6_linux_amd64.zip" && \ +RUN wget -O /tmp/terraform.zip "https://releases.hashicorp.com/terraform/1.7.5/terraform_1.7.5_linux_amd64.zip" && \ unzip /tmp/terraform.zip -d /usr/local/bin && \ rm -f /tmp/terraform.zip && \ chmod +x /usr/local/bin/terraform && \ diff --git a/install.sh b/install.sh index e5d0ca6fab0ae..c5dd828b2652d 100755 --- a/install.sh +++ b/install.sh @@ -250,7 +250,7 @@ EOF main() { MAINLINE=1 STABLE=0 - TERRAFORM_VERSION="1.6.6" + TERRAFORM_VERSION="1.7.5" if [ "${TRACE-}" ]; then set -x diff --git a/provisioner/terraform/install.go b/provisioner/terraform/install.go index e50c3d9af99ab..e3014fb8758be 100644 --- a/provisioner/terraform/install.go +++ b/provisioner/terraform/install.go @@ -20,10 +20,10 @@ var ( // when Terraform is not available on the system. // NOTE: Keep this in sync with the version in scripts/Dockerfile.base. // NOTE: Keep this in sync with the version in install.sh. - TerraformVersion = version.Must(version.NewVersion("1.6.6")) + TerraformVersion = version.Must(version.NewVersion("1.7.5")) minTerraformVersion = version.Must(version.NewVersion("1.1.0")) - maxTerraformVersion = version.Must(version.NewVersion("1.6.9")) // use .9 to automatically allow patch releases + maxTerraformVersion = version.Must(version.NewVersion("1.7.9")) // use .9 to automatically allow patch releases terraformMinorVersionMismatch = xerrors.New("Terraform binary minor version mismatch.") ) diff --git a/scripts/Dockerfile.base b/scripts/Dockerfile.base index 1cae05fe9e0db..fa249f6a62cf9 100644 --- a/scripts/Dockerfile.base +++ b/scripts/Dockerfile.base @@ -10,7 +10,7 @@ RUN apk add --no-cache \ curl \ wget \ bash \ - git \ + git=2.43.4-r0 \ openssl \ openssh-client && \ addgroup \ @@ -26,7 +26,7 @@ RUN apk add --no-cache \ # Terraform was disabled in the edge repo due to a build issue. # https://gitlab.alpinelinux.org/alpine/aports/-/commit/f3e263d94cfac02d594bef83790c280e045eba35 # Using wget for now. Note that busybox unzip doesn't support streaming. -RUN ARCH="$(arch)"; if [ "${ARCH}" == "x86_64" ]; then ARCH="amd64"; elif [ "${ARCH}" == "aarch64" ]; then ARCH="arm64"; fi; wget -O /tmp/terraform.zip "https://releases.hashicorp.com/terraform/1.6.6/terraform_1.6.6_linux_${ARCH}.zip" && \ +RUN ARCH="$(arch)"; if [ "${ARCH}" == "x86_64" ]; then ARCH="amd64"; elif [ "${ARCH}" == "aarch64" ]; then ARCH="arm64"; fi; wget -O /tmp/terraform.zip "https://releases.hashicorp.com/terraform/1.7.5/terraform_1.7.5_linux_${ARCH}.zip" && \ busybox unzip /tmp/terraform.zip -d /usr/local/bin && \ rm -f /tmp/terraform.zip && \ chmod +x /usr/local/bin/terraform && \ From 92c5dfa266b05f42eababc2031cd938b4e692b1d Mon Sep 17 00:00:00 2001 From: Colin Adler Date: Thu, 16 May 2024 15:24:03 -0500 Subject: [PATCH 071/149] docs: bump k8s install version (#13302) --- docs/install/kubernetes.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/install/kubernetes.md b/docs/install/kubernetes.md index a883e810b8ae0..e8d781ee5dcb6 100644 --- a/docs/install/kubernetes.md +++ b/docs/install/kubernetes.md @@ -134,7 +134,7 @@ locally in order to log in and manage templates. helm install coder coder-v2/coder \ --namespace coder \ --values values.yaml \ - --version 2.11.0 + --version 2.11.1 ``` For the **stable** Coder release: From 0998cedb5c3e2124f7abcf395c98294df0a1c690 Mon Sep 17 00:00:00 2001 From: Mathias Fredriksson Date: Fri, 17 May 2024 11:19:48 +0300 Subject: [PATCH 072/149] chore(scripts): fix a few release script changelog issues (#13200) --- scripts/release.sh | 1 + scripts/release/check_commit_metadata.sh | 17 ++++++++++++++--- scripts/release/generate_release_notes.sh | 1 - 3 files changed, 15 insertions(+), 4 deletions(-) diff --git a/scripts/release.sh b/scripts/release.sh index d1d2af1502d16..01da3c7728ec5 100755 --- a/scripts/release.sh +++ b/scripts/release.sh @@ -210,6 +210,7 @@ new_version="${new_version%$'\n'}" # Remove the trailing newline. release_notes="$(execrelative ./release/generate_release_notes.sh --old-version "$old_version" --new-version "$new_version" --ref "$ref")" +mkdir -p build release_notes_file="build/RELEASE-${new_version}.md" if ((dry_run)); then release_notes_file="build/RELEASE-${new_version}-DRYRUN.md" diff --git a/scripts/release/check_commit_metadata.sh b/scripts/release/check_commit_metadata.sh index 41b4d4229d155..906818412a4a9 100755 --- a/scripts/release/check_commit_metadata.sh +++ b/scripts/release/check_commit_metadata.sh @@ -99,7 +99,9 @@ main() { git_cherry_out=$( { git log --no-merges --cherry-mark --pretty=format:"%m %H %s" "${to_ref}...origin/main" + echo git log --no-merges --cherry-mark --pretty=format:"%m %H %s" "${from_ref}...origin/main" + echo } | { grep '^=' || true; } | sort -u | sort -k3 ) if [[ -n ${git_cherry_out} ]]; then @@ -209,6 +211,15 @@ main() { fi fi + author= + if [[ -v authors[${commit_sha_long}] ]]; then + author=${authors[${commit_sha_long}]} + if [[ ${author} == "app/dependabot" ]]; then + log "Skipping commit by app/dependabot ${commit_sha_short} (${commit_sha_long})" + continue + fi + fi + if [[ ${left_right} == "<" ]]; then # Skip commits that are already in main. log "Skipping commit ${commit_sha_short} from other branch (${commit_sha_long} ${title})" @@ -218,7 +229,7 @@ main() { COMMIT_METADATA_COMMITS+=("${commit_sha_long_orig}") # Safety-check, guarantee all commits had their metadata fetched. - if [[ ! -v authors[${commit_sha_long}] ]] || [[ ! -v labels[${commit_sha_long}] ]]; then + if [[ -z ${author} ]] || [[ ! -v labels[${commit_sha_long}] ]]; then if [[ ${ignore_missing_metadata} != 1 ]]; then error "Metadata missing for commit ${commit_sha_short} (${commit_sha_long})" else @@ -228,8 +239,8 @@ main() { # Store the commit title for later use. COMMIT_METADATA_TITLE[${commit_sha_short}]=${title} - if [[ -v authors[${commit_sha_long}] ]]; then - COMMIT_METADATA_AUTHORS[${commit_sha_short}]="@${authors[${commit_sha_long}]}" + if [[ -n ${author} ]]; then + COMMIT_METADATA_AUTHORS[${commit_sha_short}]="@${author}" fi # Create humanized titles where possible, examples: diff --git a/scripts/release/generate_release_notes.sh b/scripts/release/generate_release_notes.sh index 7aa24ee4fe198..b593ccad3cc5b 100755 --- a/scripts/release/generate_release_notes.sh +++ b/scripts/release/generate_release_notes.sh @@ -147,7 +147,6 @@ for commit in "${commits[@]}"; do title="${title} (${commit})" fi line="- ${title}" - line=${line//) (/, )} if [[ -v COMMIT_METADATA_AUTHORS[${commit}] ]]; then line+=" (${COMMIT_METADATA_AUTHORS[${commit}]})" fi From f66d0445da659cda503135faa27f7b6b82186c5f Mon Sep 17 00:00:00 2001 From: Mathias Fredriksson Date: Fri, 17 May 2024 11:25:10 +0300 Subject: [PATCH 073/149] chore(scripts): fix stable release promote script (#13204) --- scripts/lib.sh | 24 ++++++++++++++---------- scripts/release/main.go | 10 +++++++--- scripts/release/main_internal_test.go | 13 ++++++++++--- scripts/release_promote_stable.sh | 3 +++ 4 files changed, 34 insertions(+), 16 deletions(-) diff --git a/scripts/lib.sh b/scripts/lib.sh index 78ec22d503fbf..e245fb4ab8cc7 100644 --- a/scripts/lib.sh +++ b/scripts/lib.sh @@ -134,18 +134,22 @@ requiredenvs() { } gh_auth() { - local fail=0 - if [[ "${CODER:-}" == "true" ]]; then - if ! output=$(coder external-auth access-token github 2>&1); then - log "ERROR: Could not authenticate with GitHub." - log "$output" - fail=1 + if [[ -z ${GITHUB_TOKEN:-} ]]; then + if [[ -n ${GH_TOKEN:-} ]]; then + export GITHUB_TOKEN=${GH_TOKEN} + elif [[ ${CODER:-} == true ]]; then + if ! output=$(coder external-auth access-token github 2>&1); then + # TODO(mafredri): We could allow checking `gh auth token` here. + log "${output}" + error "Could not authenticate with GitHub using Coder external auth." + else + export GITHUB_TOKEN=${output} + fi + elif token="$(gh auth token --hostname github.com 2>/dev/null)"; then + export GITHUB_TOKEN=${token} else - GITHUB_TOKEN=$(coder external-auth access-token github) - export GITHUB_TOKEN + error "GitHub authentication is required to run this command, please set GITHUB_TOKEN or run 'gh auth login'." fi - else - log "Please authenticate gh CLI by running 'gh auth login'" fi } diff --git a/scripts/release/main.go b/scripts/release/main.go index 919205b76db65..8eaeb20825a92 100644 --- a/scripts/release/main.go +++ b/scripts/release/main.go @@ -62,9 +62,9 @@ func main() { Value: serpent.BoolOf(&r.debug), }, { - Flag: "gh-token", + Flag: "github-token", Description: "GitHub personal access token.", - Env: "GH_TOKEN", + Env: "GITHUB_TOKEN", Value: serpent.StringOf(&r.ghToken), }, { @@ -245,7 +245,7 @@ func (r *releaseCommand) promoteVersionToStable(ctx context.Context, inv *serpen updatedNewStable.Prerelease = github.Bool(false) updatedNewStable.Draft = github.Bool(false) if !r.dryRun { - _, _, err = client.Repositories.EditRelease(ctx, owner, repo, newStable.GetID(), newStable) + _, _, err = client.Repositories.EditRelease(ctx, owner, repo, newStable.GetID(), updatedNewStable) if err != nil { return xerrors.Errorf("edit release failed: %w", err) } @@ -268,6 +268,10 @@ func cloneRelease(r *github.RepositoryRelease) *github.RepositoryRelease { // // > ## Stable (since April 23, 2024) func addStableSince(date time.Time, body string) string { + // Protect against adding twice. + if strings.Contains(body, "> ## Stable (since") { + return body + } return fmt.Sprintf("> ## Stable (since %s)\n\n", date.Format("January 02, 2006")) + body } diff --git a/scripts/release/main_internal_test.go b/scripts/release/main_internal_test.go index 74a6d46d05c8a..d5d10706683a2 100644 --- a/scripts/release/main_internal_test.go +++ b/scripts/release/main_internal_test.go @@ -131,12 +131,19 @@ func Test_addStableSince(t *testing.T) { date := time.Date(2024, time.April, 23, 0, 0, 0, 0, time.UTC) body := "## Changelog" - expected := "> ## Stable (since April 23, 2024)\n\n## Changelog" - result := addStableSince(date, body) + want := "> ## Stable (since April 23, 2024)\n\n## Changelog" + got := addStableSince(date, body) - if diff := cmp.Diff(expected, result); diff != "" { + if diff := cmp.Diff(want, got); diff != "" { require.Fail(t, "addStableSince() mismatch (-want +got):\n%s", diff) } + + // Test that it doesn't add twice. + got = addStableSince(date, got) + + if diff := cmp.Diff(want, got); diff != "" { + require.Fail(t, "addStableSince() mismatch (-want +got):\n%s", diff, "addStableSince() should not add twice") + } } func Test_release_autoversion(t *testing.T) { diff --git a/scripts/release_promote_stable.sh b/scripts/release_promote_stable.sh index 33b55d3855d41..1ac0f8318d749 100755 --- a/scripts/release_promote_stable.sh +++ b/scripts/release_promote_stable.sh @@ -4,6 +4,9 @@ set -euo pipefail # shellcheck source=scripts/lib.sh source "$(dirname "${BASH_SOURCE[0]}")/lib.sh" +# Make sure GITHUB_TOKEN is set for the release command. +gh_auth + # This script is a convenience wrapper around the release promote command. # # Sed hack to make help text look like this script. From f23d4802b5bbaaf70aef230d628dd3eac79af95b Mon Sep 17 00:00:00 2001 From: Cian Johnston Date: Fri, 17 May 2024 10:24:56 +0100 Subject: [PATCH 074/149] ci: fix test-migrations target when main branch is not present locally (#13306) --- Makefile | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile index 874ae8ee56abc..7a24a293d16a8 100644 --- a/Makefile +++ b/Makefile @@ -797,8 +797,11 @@ test-postgres: test-postgres-docker test-migrations: test-postgres-docker echo "--- test migrations" set -euo pipefail - COMMIT_FROM=$(shell git rev-parse --short HEAD) - COMMIT_TO=$(shell git rev-parse --short main) + COMMIT_FROM=$(shell git log -1 --format='%h' HEAD) + echo "COMMIT_FROM=$${COMMIT_FROM}" + COMMIT_TO=$(shell git log -1 --format='%h' origin/main) + echo "COMMIT_TO=$${COMMIT_TO}" + if [[ "$${COMMIT_FROM}" == "$${COMMIT_TO}" ]]; then echo "Nothing to do!"; exit 0; fi echo "DROP DATABASE IF EXISTS migrate_test_$${COMMIT_FROM}; CREATE DATABASE migrate_test_$${COMMIT_FROM};" | psql 'postgresql://postgres:postgres@localhost:5432/postgres?sslmode=disable' go run ./scripts/migrate-test/main.go --from="$$COMMIT_FROM" --to="$$COMMIT_TO" --postgres-url="postgresql://postgres:postgres@localhost:5432/migrate_test_$${COMMIT_FROM}?sslmode=disable" From f176ff532f9e182fb65ea08a798f407d9e5ba8a9 Mon Sep 17 00:00:00 2001 From: Colin Adler Date: Fri, 17 May 2024 11:55:30 -0500 Subject: [PATCH 075/149] ci: re-pin `actions/dependency-review-action` back to a release (#13309) --- .github/workflows/ci.yaml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 49ad712b7dee4..003319255580b 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -916,8 +916,7 @@ jobs: uses: actions/checkout@v4 - name: "Dependency Review" id: review - # TODO: Replace this with the latest release once https://github.com/actions/dependency-review-action/pull/761 is merged. - uses: actions/dependency-review-action@82ab8f69c78827a746628706b5d2c3f87231fd4c + uses: actions/dependency-review-action@v4.3.2 with: allow-licenses: Apache-2.0, BSD-2-Clause, BSD-3-Clause, CC0-1.0, ISC, MIT, MIT-0, MPL-2.0 allow-dependencies-licenses: "pkg:golang/github.com/pelletier/go-toml/v2" From d8bb5a05db2f724ce9f274a88bc78e4a732c0d5b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 17 May 2024 12:03:32 -0500 Subject: [PATCH 076/149] chore: bump github.com/fergusstrange/embedded-postgres from 1.26.0 to 1.27.0 (#13255) Bumps [github.com/fergusstrange/embedded-postgres](https://github.com/fergusstrange/embedded-postgres) from 1.26.0 to 1.27.0. - [Release notes](https://github.com/fergusstrange/embedded-postgres/releases) - [Commits](https://github.com/fergusstrange/embedded-postgres/compare/v1.26.0...v1.27.0) --- updated-dependencies: - dependency-name: github.com/fergusstrange/embedded-postgres dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- go.mod | 2 +- go.sum | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/go.mod b/go.mod index b9037722ca368..d40572a4a2df3 100644 --- a/go.mod +++ b/go.mod @@ -115,7 +115,7 @@ require ( github.com/fatih/color v1.16.0 github.com/fatih/structs v1.1.0 github.com/fatih/structtag v1.2.0 - github.com/fergusstrange/embedded-postgres v1.26.0 + github.com/fergusstrange/embedded-postgres v1.27.0 github.com/fullsailor/pkcs7 v0.0.0-20190404230743-d7302db945fa github.com/gen2brain/beeep v0.0.0-20220402123239-6a3042f4b71a github.com/gliderlabs/ssh v0.3.4 diff --git a/go.sum b/go.sum index ad15d5cc1ca36..98bc966fa40ff 100644 --- a/go.sum +++ b/go.sum @@ -298,8 +298,8 @@ github.com/fatih/structtag v1.2.0/go.mod h1:mBJUNpUnHmRKrKlQQlmCrh5PuhftFbNv8Ys4 github.com/felixge/httpsnoop v1.0.2/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= -github.com/fergusstrange/embedded-postgres v1.26.0 h1:mTgUBNST+6zro0TkIb9Fuo9Qg8mSU0ILus9jZKmFmJg= -github.com/fergusstrange/embedded-postgres v1.26.0/go.mod h1:t/MLs0h9ukYM6FSt99R7InCHs1nW0ordoVCcnzmpTYw= +github.com/fergusstrange/embedded-postgres v1.27.0 h1:RAlpWL194IhEpPgeJceTM0ifMJKhiSVxBVIDYB1Jee8= +github.com/fergusstrange/embedded-postgres v1.27.0/go.mod h1:t/MLs0h9ukYM6FSt99R7InCHs1nW0ordoVCcnzmpTYw= github.com/fortytw2/leaktest v1.3.0 h1:u8491cBMTQ8ft8aeV+adlcytMZylmA5nnwwkRZjI8vw= github.com/fortytw2/leaktest v1.3.0/go.mod h1:jDsjWgpAGjm2CA7WthBh/CdZYEPF31XHquHwclZch5g= github.com/foxcpp/go-mockdns v1.0.0 h1:7jBqxd3WDWwi/6WhDvacvH1XsN3rOLXyHM1uhvIx6FI= From 4af0f093eeec9b5bfaeff55c65c271844060834b Mon Sep 17 00:00:00 2001 From: Bruno Quaresma Date: Fri, 17 May 2024 15:26:00 -0300 Subject: [PATCH 077/149] fix(site): fix floating number on duration fields (#13209) --- .../DurationField/DurationField.stories.tsx | 86 ++++++++ .../DurationField/DurationField.tsx | 187 ++++++++++++++++++ .../TemplateSchedulePage/TTLHelperText.tsx | 13 +- .../TemplateScheduleForm.tsx | 67 +++---- .../TemplateSchedulePage.test.tsx | 2 +- .../TemplateSchedulePage/formHelpers.tsx | 4 +- .../useWorkspacesToBeDeleted.ts | 6 +- site/src/utils/time.ts | 31 +++ 8 files changed, 343 insertions(+), 53 deletions(-) create mode 100644 site/src/components/DurationField/DurationField.stories.tsx create mode 100644 site/src/components/DurationField/DurationField.tsx create mode 100644 site/src/utils/time.ts diff --git a/site/src/components/DurationField/DurationField.stories.tsx b/site/src/components/DurationField/DurationField.stories.tsx new file mode 100644 index 0000000000000..32e3953f9b5c6 --- /dev/null +++ b/site/src/components/DurationField/DurationField.stories.tsx @@ -0,0 +1,86 @@ +import type { Meta, StoryObj } from "@storybook/react"; +import { expect, within, userEvent } from "@storybook/test"; +import { useState } from "react"; +import { DurationField } from "./DurationField"; + +const meta: Meta = { + title: "components/DurationField", + component: DurationField, + args: { + label: "Duration", + }, + render: function RenderComponent(args) { + const [value, setValue] = useState(args.valueMs); + return ( + setValue(value)} + /> + ); + }, +}; + +export default meta; +type Story = StoryObj; + +export const Hours: Story = { + args: { + valueMs: hoursToMs(16), + }, +}; + +export const Days: Story = { + args: { + valueMs: daysToMs(2), + }, +}; + +export const TypeOnlyNumbers: Story = { + args: { + valueMs: 0, + }, + play: async ({ canvasElement }) => { + const canvas = within(canvasElement); + const input = canvas.getByLabelText("Duration"); + await userEvent.clear(input); + await userEvent.type(input, "abcd_.?/48.0"); + await expect(input).toHaveValue("480"); + }, +}; + +export const ChangeUnit: Story = { + args: { + valueMs: daysToMs(2), + }, + play: async ({ canvasElement }) => { + const canvas = within(canvasElement); + const input = canvas.getByLabelText("Duration"); + const unitDropdown = canvas.getByLabelText("Time unit"); + await userEvent.click(unitDropdown); + const hoursOption = within(document.body).getByText("Hours"); + await userEvent.click(hoursOption); + await expect(input).toHaveValue("48"); + }, +}; + +export const CantConvertToDays: Story = { + args: { + valueMs: hoursToMs(2), + }, + play: async ({ canvasElement }) => { + const canvas = within(canvasElement); + const unitDropdown = canvas.getByLabelText("Time unit"); + await userEvent.click(unitDropdown); + const daysOption = within(document.body).getByText("Days"); + await expect(daysOption).toHaveAttribute("aria-disabled", "true"); + }, +}; + +function hoursToMs(hours: number): number { + return hours * 60 * 60 * 1000; +} + +function daysToMs(days: number): number { + return days * 24 * 60 * 60 * 1000; +} diff --git a/site/src/components/DurationField/DurationField.tsx b/site/src/components/DurationField/DurationField.tsx new file mode 100644 index 0000000000000..8e2dc752ba410 --- /dev/null +++ b/site/src/components/DurationField/DurationField.tsx @@ -0,0 +1,187 @@ +import KeyboardArrowDown from "@mui/icons-material/KeyboardArrowDown"; +import FormHelperText from "@mui/material/FormHelperText"; +import MenuItem from "@mui/material/MenuItem"; +import Select from "@mui/material/Select"; +import TextField, { type TextFieldProps } from "@mui/material/TextField"; +import { type FC, useEffect, useReducer } from "react"; +import { + type TimeUnit, + durationInDays, + durationInHours, + suggestedTimeUnit, +} from "utils/time"; + +type DurationFieldProps = Omit & { + valueMs: number; + onChange: (value: number) => void; +}; + +type State = { + unit: TimeUnit; + // Handling empty values as strings in the input simplifies the process, + // especially when a user clears the input field. + durationFieldValue: string; +}; + +type Action = + | { type: "SYNC_WITH_PARENT"; parentValueMs: number } + | { type: "CHANGE_DURATION_FIELD_VALUE"; fieldValue: string } + | { type: "CHANGE_TIME_UNIT"; unit: TimeUnit }; + +const reducer = (state: State, action: Action): State => { + switch (action.type) { + case "SYNC_WITH_PARENT": { + return initState(action.parentValueMs); + } + case "CHANGE_DURATION_FIELD_VALUE": { + return { + ...state, + durationFieldValue: action.fieldValue, + }; + } + case "CHANGE_TIME_UNIT": { + const currentDurationMs = durationInMs( + state.durationFieldValue, + state.unit, + ); + + if ( + action.unit === "days" && + !canConvertDurationToDays(currentDurationMs) + ) { + return state; + } + + return { + unit: action.unit, + durationFieldValue: + action.unit === "hours" + ? durationInHours(currentDurationMs).toString() + : durationInDays(currentDurationMs).toString(), + }; + } + default: { + return state; + } + } +}; + +export const DurationField: FC = (props) => { + const { + valueMs: parentValueMs, + onChange, + helperText, + ...textFieldProps + } = props; + const [state, dispatch] = useReducer(reducer, initState(parentValueMs)); + const currentDurationMs = durationInMs(state.durationFieldValue, state.unit); + + useEffect(() => { + if (parentValueMs !== currentDurationMs) { + dispatch({ type: "SYNC_WITH_PARENT", parentValueMs }); + } + }, [currentDurationMs, parentValueMs]); + + return ( +
+
+ { + const durationFieldValue = intMask(e.currentTarget.value); + + dispatch({ + type: "CHANGE_DURATION_FIELD_VALUE", + fieldValue: durationFieldValue, + }); + + const newDurationInMs = durationInMs( + durationFieldValue, + state.unit, + ); + if (newDurationInMs !== parentValueMs) { + onChange(newDurationInMs); + } + }} + inputProps={{ + step: 1, + }} + /> + +
+ + {helperText && ( + {helperText} + )} +
+ ); +}; + +function initState(value: number): State { + const unit = suggestedTimeUnit(value); + const durationFieldValue = + unit === "hours" + ? durationInHours(value).toString() + : durationInDays(value).toString(); + + return { + unit, + durationFieldValue, + }; +} + +function intMask(value: string): string { + return value.replace(/\D/g, ""); +} + +function durationInMs(durationFieldValue: string, unit: TimeUnit): number { + const durationInMs = parseInt(durationFieldValue, 10); + + if (Number.isNaN(durationInMs)) { + return 0; + } + + return unit === "hours" + ? hoursToDuration(durationInMs) + : daysToDuration(durationInMs); +} + +function hoursToDuration(hours: number): number { + return hours * 60 * 60 * 1000; +} + +function daysToDuration(days: number): number { + return days * 24 * hoursToDuration(1); +} + +function canConvertDurationToDays(duration: number): boolean { + return Number.isInteger(durationInDays(duration)); +} diff --git a/site/src/pages/TemplateSettingsPage/TemplateSchedulePage/TTLHelperText.tsx b/site/src/pages/TemplateSettingsPage/TemplateSchedulePage/TTLHelperText.tsx index 4114f4d37d5b9..11f83f1e21a9c 100644 --- a/site/src/pages/TemplateSettingsPage/TemplateSchedulePage/TTLHelperText.tsx +++ b/site/src/pages/TemplateSettingsPage/TemplateSchedulePage/TTLHelperText.tsx @@ -1,5 +1,6 @@ +import { humanDuration } from "utils/time"; + const hours = (h: number) => (h === 1 ? "hour" : "hours"); -const days = (d: number) => (d === 1 ? "day" : "days"); export const DefaultTTLHelperText = (props: { ttl?: number }) => { const { ttl = 0 } = props; @@ -60,7 +61,7 @@ export const FailureTTLHelperText = (props: { ttl?: number }) => { return ( - Coder will attempt to stop failed workspaces after {ttl} {days(ttl)}. + Coder will attempt to stop failed workspaces after {humanDuration(ttl)}. ); }; @@ -79,8 +80,8 @@ export const DormancyTTLHelperText = (props: { ttl?: number }) => { return ( - Coder will mark workspaces as dormant after {ttl} {days(ttl)} without user - connections. + Coder will mark workspaces as dormant after {humanDuration(ttl)} without + user connections. ); }; @@ -99,8 +100,8 @@ export const DormancyAutoDeletionTTLHelperText = (props: { ttl?: number }) => { return ( - Coder will automatically delete dormant workspaces after {ttl} {days(ttl)} - . + Coder will automatically delete dormant workspaces after{" "} + {humanDuration(ttl)}. ); }; diff --git a/site/src/pages/TemplateSettingsPage/TemplateSchedulePage/TemplateScheduleForm.tsx b/site/src/pages/TemplateSettingsPage/TemplateSchedulePage/TemplateScheduleForm.tsx index 47e31f05498a3..25986850a2335 100644 --- a/site/src/pages/TemplateSettingsPage/TemplateSchedulePage/TemplateScheduleForm.tsx +++ b/site/src/pages/TemplateSettingsPage/TemplateSchedulePage/TemplateScheduleForm.tsx @@ -6,6 +6,7 @@ import TextField from "@mui/material/TextField"; import { type FormikTouched, useFormik } from "formik"; import { type ChangeEvent, type FC, useState, useEffect } from "react"; import type { Template, UpdateTemplateMeta } from "api/typesGenerated"; +import { DurationField } from "components/DurationField/DurationField"; import { FormSection, HorizontalForm, @@ -47,9 +48,9 @@ import { const MS_HOUR_CONVERSION = 3600000; const MS_DAY_CONVERSION = 86400000; -const FAILURE_CLEANUP_DEFAULT = 7; -const INACTIVITY_CLEANUP_DEFAULT = 180; -const DORMANT_AUTODELETION_DEFAULT = 30; +const FAILURE_CLEANUP_DEFAULT = 7 * MS_DAY_CONVERSION; +const INACTIVITY_CLEANUP_DEFAULT = 180 * MS_DAY_CONVERSION; +const DORMANT_AUTODELETION_DEFAULT = 30 * MS_DAY_CONVERSION; /** * The default form field space is 4 but since this form is quite heavy I think * increase the space can make it feels lighter. @@ -83,16 +84,9 @@ export const TemplateScheduleForm: FC = ({ // on display, convert from ms => hours default_ttl_ms: template.default_ttl_ms / MS_HOUR_CONVERSION, activity_bump_ms: template.activity_bump_ms / MS_HOUR_CONVERSION, - failure_ttl_ms: allowAdvancedScheduling - ? template.failure_ttl_ms / MS_DAY_CONVERSION - : 0, - time_til_dormant_ms: allowAdvancedScheduling - ? template.time_til_dormant_ms / MS_DAY_CONVERSION - : 0, - time_til_dormant_autodelete_ms: allowAdvancedScheduling - ? template.time_til_dormant_autodelete_ms / MS_DAY_CONVERSION - : 0, - + failure_ttl_ms: template.failure_ttl_ms, + time_til_dormant_ms: template.time_til_dormant_ms, + time_til_dormant_autodelete_ms: template.time_til_dormant_autodelete_ms, autostop_requirement_days_of_week: allowAdvancedScheduling ? convertAutostopRequirementDaysValue( template.autostop_requirement.days_of_week, @@ -210,16 +204,10 @@ export const TemplateScheduleForm: FC = ({ activity_bump_ms: form.values.activity_bump_ms ? form.values.activity_bump_ms * MS_HOUR_CONVERSION : undefined, - failure_ttl_ms: form.values.failure_ttl_ms - ? form.values.failure_ttl_ms * MS_DAY_CONVERSION - : undefined, - time_til_dormant_ms: form.values.time_til_dormant_ms - ? form.values.time_til_dormant_ms * MS_DAY_CONVERSION - : undefined, - time_til_dormant_autodelete_ms: form.values.time_til_dormant_autodelete_ms - ? form.values.time_til_dormant_autodelete_ms * MS_DAY_CONVERSION - : undefined, - + failure_ttl_ms: form.values.failure_ttl_ms, + time_til_dormant_ms: form.values.time_til_dormant_ms, + time_til_dormant_autodelete_ms: + form.values.time_til_dormant_autodelete_ms, autostop_requirement: { days_of_week: calculateAutostopRequirementDaysValue( form.values.autostop_requirement_days_of_week, @@ -229,7 +217,6 @@ export const TemplateScheduleForm: FC = ({ autostart_requirement: { days_of_week: form.values.autostart_requirement_days_of_week, }, - allow_user_autostart: form.values.allow_user_autostart, allow_user_autostop: form.values.allow_user_autostop, update_workspace_last_used_at: form.values.update_workspace_last_used_at, @@ -498,7 +485,8 @@ export const TemplateScheduleForm: FC = ({ } label={Enable Dormancy Threshold} /> - = ({ /> ), })} + label="Time until dormant" + valueMs={form.values.time_til_dormant_ms ?? 0} + onChange={(v) => form.setFieldValue("time_til_dormant_ms", v)} disabled={ isSubmitting || !form.values.inactivity_cleanup_enabled } - fullWidth - inputProps={{ min: 0, step: "any" }} - label="Time until dormant (days)" - type="number" /> @@ -539,7 +526,7 @@ export const TemplateScheduleForm: FC = ({ } /> - = ({ /> ), })} + label="Time until deletion" + valueMs={form.values.time_til_dormant_autodelete_ms ?? 0} + onChange={(v) => + form.setFieldValue("time_til_dormant_autodelete_ms", v) + } disabled={ isSubmitting || !form.values.dormant_autodeletion_cleanup_enabled } - fullWidth - inputProps={{ min: 0, step: "any" }} - label="Time until deletion (days)" - type="number" /> @@ -573,24 +561,23 @@ export const TemplateScheduleForm: FC = ({ Enable Failure Cleanup When enabled, Coder will attempt to stop workspaces that - are in a failed state after a specified number of days. + are in a failed state after a period of time. } /> - ), })} + label="Time until cleanup" + valueMs={form.values.failure_ttl_ms ?? 0} + onChange={(v) => form.setFieldValue("failure_ttl_ms", v)} disabled={ isSubmitting || !form.values.failure_cleanup_enabled } - fullWidth - inputProps={{ min: 0, step: "any" }} - label="Time until cleanup (days)" - type="number" /> diff --git a/site/src/pages/TemplateSettingsPage/TemplateSchedulePage/TemplateSchedulePage.test.tsx b/site/src/pages/TemplateSettingsPage/TemplateSchedulePage/TemplateSchedulePage.test.tsx index 48d9d8ef44e4f..f1e5c51c9b2ce 100644 --- a/site/src/pages/TemplateSettingsPage/TemplateSchedulePage/TemplateSchedulePage.test.tsx +++ b/site/src/pages/TemplateSettingsPage/TemplateSchedulePage/TemplateSchedulePage.test.tsx @@ -286,7 +286,7 @@ describe("TemplateSchedulePage", () => { }; const validate = () => getValidationSchema().validateSync(values); expect(validate).toThrowError( - "Dormancy threshold days must not be less than 0.", + "Dormancy threshold must not be less than 0.", ); }); diff --git a/site/src/pages/TemplateSettingsPage/TemplateSchedulePage/formHelpers.tsx b/site/src/pages/TemplateSettingsPage/TemplateSchedulePage/formHelpers.tsx index 77a2d6d8f1596..606c590744871 100644 --- a/site/src/pages/TemplateSettingsPage/TemplateSchedulePage/formHelpers.tsx +++ b/site/src/pages/TemplateSettingsPage/TemplateSchedulePage/formHelpers.tsx @@ -57,10 +57,10 @@ export const getValidationSchema = (): Yup.AnyObjectSchema => time_til_dormant_ms: Yup.number() .integer() .required() - .min(0, "Dormancy threshold days must not be less than 0.") + .min(0, "Dormancy threshold must not be less than 0.") .test( "positive-if-enabled", - "Dormancy threshold days must be greater than zero when enabled.", + "Dormancy threshold must be greater than zero when enabled.", function (value) { const parent = this.parent as TemplateScheduleFormValues; if (parent.inactivity_cleanup_enabled) { diff --git a/site/src/pages/TemplateSettingsPage/TemplateSchedulePage/useWorkspacesToBeDeleted.ts b/site/src/pages/TemplateSettingsPage/TemplateSchedulePage/useWorkspacesToBeDeleted.ts index 978825dd00829..4e171f0978a8b 100644 --- a/site/src/pages/TemplateSettingsPage/TemplateSchedulePage/useWorkspacesToBeDeleted.ts +++ b/site/src/pages/TemplateSettingsPage/TemplateSchedulePage/useWorkspacesToBeDeleted.ts @@ -25,7 +25,7 @@ export const useWorkspacesToGoDormant = ( const proposedLocking = new Date( new Date(workspace.last_used_at).getTime() + - formValues.time_til_dormant_ms * DayInMS, + formValues.time_til_dormant_ms, ); if (compareAsc(proposedLocking, fromDate) < 1) { @@ -34,8 +34,6 @@ export const useWorkspacesToGoDormant = ( }); }; -const DayInMS = 86400000; - export const useWorkspacesToBeDeleted = ( template: Template, formValues: TemplateScheduleFormValues, @@ -53,7 +51,7 @@ export const useWorkspacesToBeDeleted = ( const proposedLocking = new Date( new Date(workspace.dormant_at).getTime() + - formValues.time_til_dormant_autodelete_ms * DayInMS, + formValues.time_til_dormant_autodelete_ms, ); if (compareAsc(proposedLocking, fromDate) < 1) { diff --git a/site/src/utils/time.ts b/site/src/utils/time.ts new file mode 100644 index 0000000000000..67e3362bcbd69 --- /dev/null +++ b/site/src/utils/time.ts @@ -0,0 +1,31 @@ +export type TimeUnit = "days" | "hours"; + +export function humanDuration(durationInMs: number) { + if (durationInMs === 0) { + return "0 hours"; + } + + const timeUnit = suggestedTimeUnit(durationInMs); + const durationValue = + timeUnit === "days" + ? durationInDays(durationInMs) + : durationInHours(durationInMs); + + return `${durationValue} ${timeUnit}`; +} + +export function suggestedTimeUnit(duration: number): TimeUnit { + if (duration === 0) { + return "hours"; + } + + return Number.isInteger(durationInDays(duration)) ? "days" : "hours"; +} + +export function durationInHours(duration: number): number { + return duration / 1000 / 60 / 60; +} + +export function durationInDays(duration: number): number { + return duration / 1000 / 60 / 60 / 24; +} From a63d427efd87038d853de178bafcfe508fe99c85 Mon Sep 17 00:00:00 2001 From: Kayla Washburn-Love Date: Fri, 17 May 2024 12:40:38 -0600 Subject: [PATCH 078/149] chore: add unique org name constraint to db (#13311) --- coderd/database/dump.sql | 3 +++ coderd/database/migrations/000210_unique_org_name.down.sql | 2 ++ coderd/database/migrations/000210_unique_org_name.up.sql | 2 ++ coderd/database/unique_constraint.go | 1 + 4 files changed, 8 insertions(+) create mode 100644 coderd/database/migrations/000210_unique_org_name.down.sql create mode 100644 coderd/database/migrations/000210_unique_org_name.up.sql diff --git a/coderd/database/dump.sql b/coderd/database/dump.sql index 33a9ebbef8139..718c323fd7804 100644 --- a/coderd/database/dump.sql +++ b/coderd/database/dump.sql @@ -1479,6 +1479,9 @@ ALTER TABLE ONLY oauth2_provider_apps ALTER TABLE ONLY organization_members ADD CONSTRAINT organization_members_pkey PRIMARY KEY (organization_id, user_id); +ALTER TABLE ONLY organizations + ADD CONSTRAINT organizations_name UNIQUE (name); + ALTER TABLE ONLY organizations ADD CONSTRAINT organizations_pkey PRIMARY KEY (id); diff --git a/coderd/database/migrations/000210_unique_org_name.down.sql b/coderd/database/migrations/000210_unique_org_name.down.sql new file mode 100644 index 0000000000000..d06cff629fda7 --- /dev/null +++ b/coderd/database/migrations/000210_unique_org_name.down.sql @@ -0,0 +1,2 @@ +ALTER TABLE ONLY organizations + DROP CONSTRAINT IF EXISTS organizations_name; diff --git a/coderd/database/migrations/000210_unique_org_name.up.sql b/coderd/database/migrations/000210_unique_org_name.up.sql new file mode 100644 index 0000000000000..44079422b4104 --- /dev/null +++ b/coderd/database/migrations/000210_unique_org_name.up.sql @@ -0,0 +1,2 @@ +ALTER TABLE ONLY organizations + ADD CONSTRAINT organizations_name UNIQUE (name); diff --git a/coderd/database/unique_constraint.go b/coderd/database/unique_constraint.go index 9dfc8c124aa75..8b61e06518f0b 100644 --- a/coderd/database/unique_constraint.go +++ b/coderd/database/unique_constraint.go @@ -32,6 +32,7 @@ const ( UniqueOauth2ProviderAppsNameKey UniqueConstraint = "oauth2_provider_apps_name_key" // ALTER TABLE ONLY oauth2_provider_apps ADD CONSTRAINT oauth2_provider_apps_name_key UNIQUE (name); UniqueOauth2ProviderAppsPkey UniqueConstraint = "oauth2_provider_apps_pkey" // ALTER TABLE ONLY oauth2_provider_apps ADD CONSTRAINT oauth2_provider_apps_pkey PRIMARY KEY (id); UniqueOrganizationMembersPkey UniqueConstraint = "organization_members_pkey" // ALTER TABLE ONLY organization_members ADD CONSTRAINT organization_members_pkey PRIMARY KEY (organization_id, user_id); + UniqueOrganizationsName UniqueConstraint = "organizations_name" // ALTER TABLE ONLY organizations ADD CONSTRAINT organizations_name UNIQUE (name); UniqueOrganizationsPkey UniqueConstraint = "organizations_pkey" // ALTER TABLE ONLY organizations ADD CONSTRAINT organizations_pkey PRIMARY KEY (id); UniqueParameterSchemasJobIDNameKey UniqueConstraint = "parameter_schemas_job_id_name_key" // ALTER TABLE ONLY parameter_schemas ADD CONSTRAINT parameter_schemas_job_id_name_key UNIQUE (job_id, name); UniqueParameterSchemasPkey UniqueConstraint = "parameter_schemas_pkey" // ALTER TABLE ONLY parameter_schemas ADD CONSTRAINT parameter_schemas_pkey PRIMARY KEY (id); From 45b45f1107614da9d89f53fc14b3430afeedfd55 Mon Sep 17 00:00:00 2001 From: Cian Johnston Date: Mon, 20 May 2024 10:35:06 +0100 Subject: [PATCH 079/149] ci: re-enable test migrations in release workflow (#13307) --- .github/workflows/release.yaml | 6 +++--- scripts/migrate-test/main.go | 21 +++++++++++++++++---- 2 files changed, 20 insertions(+), 7 deletions(-) diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 9f23a5c488e80..faa6593452e25 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -178,9 +178,9 @@ jobs: env: EV_SIGNING_CERT: ${{ secrets.EV_SIGNING_CERT }} - # - name: Test migrations from current ref to main - # run: | - # make test-migrations + - name: Test migrations from current ref to main + run: | + make test-migrations # Setup GCloud for signing Windows binaries. - name: Authenticate to Google Cloud diff --git a/scripts/migrate-test/main.go b/scripts/migrate-test/main.go index deaa7a021b628..145ccb3e1a361 100644 --- a/scripts/migrate-test/main.go +++ b/scripts/migrate-test/main.go @@ -6,6 +6,7 @@ import ( "database/sql" "flag" "fmt" + "io" "io/fs" "os" "os/exec" @@ -80,27 +81,39 @@ func main() { _, _ = fmt.Fprintf(os.Stderr, "Init database at version %q\n", migrateFromVersion) if err := migrations.UpWithFS(conn, migrateFromFS); err != nil { - panic(err) + friendlyError(os.Stderr, err, migrateFromVersion, migrateToVersion) + os.Exit(1) } _, _ = fmt.Fprintf(os.Stderr, "Migrate to version %q\n", migrateToVersion) if err := migrations.UpWithFS(conn, migrateToFS); err != nil { - panic(err) + friendlyError(os.Stderr, err, migrateFromVersion, migrateToVersion) + os.Exit(1) } _, _ = fmt.Fprintf(os.Stderr, "Dump schema at version %q\n", migrateToVersion) dumpBytesAfter, err := dbtestutil.PGDumpSchemaOnly(postgresURL) if err != nil { - panic(err) + friendlyError(os.Stderr, err, migrateFromVersion, migrateToVersion) + os.Exit(1) } if diff := cmp.Diff(string(dumpBytesAfter), string(stripGenPreamble(expectedSchemaAfter))); diff != "" { - _, _ = fmt.Fprintf(os.Stderr, "Schema differs from expected after migration: %s\n", diff) + friendlyError(os.Stderr, xerrors.Errorf("Schema differs from expected after migration: %s", diff), migrateFromVersion, migrateToVersion) os.Exit(1) } _, _ = fmt.Fprintf(os.Stderr, "OK\n") } +func friendlyError(w io.Writer, err error, v1, v2 string) { + _, _ = fmt.Fprintf(w, "Migrating from version %q to %q failed:\n", v1, v2) + _, _ = fmt.Fprintf(w, "\t%s\n", err.Error()) + _, _ = fmt.Fprintf(w, "Check the following:\n") + _, _ = fmt.Fprintf(w, " - All migrations from version %q must exist in version %q with the same migration numbers.\n", v2, v1) + _, _ = fmt.Fprintf(w, " - Each migration must have the same effect.\n") + _, _ = fmt.Fprintf(w, " - There must be no gaps or duplicates in the migration numbers.\n") +} + func makeMigrateFS(version string) (fs.FS, error) { // Export the migrations from the requested version to a zip archive out, err := exec.Command("git", "archive", "--format=zip", version, "coderd/database/migrations").CombinedOutput() From b8b80fe6d21aa1d1fc3e0fd7f43371fa66182be6 Mon Sep 17 00:00:00 2001 From: Marcin Tojek Date: Mon, 20 May 2024 15:30:19 +0200 Subject: [PATCH 080/149] feat: store `coder_workspace_tags` in the database (#13294) --- coderd/database/dbauthz/dbauthz.go | 30 ++ coderd/database/dbauthz/dbauthz_test.go | 13 + coderd/database/dbgen/dbgen.go | 10 + coderd/database/dbmem/dbmem.go | 38 +++ coderd/database/dbmetrics/dbmetrics.go | 14 + coderd/database/dbmock/dbmock.go | 30 ++ coderd/database/dump.sql | 12 + coderd/database/foreign_key_constraint.go | 109 +++---- .../migrations/000211_workspace_tags.down.sql | 1 + .../migrations/000211_workspace_tags.up.sql | 6 + coderd/database/migrations/migrate_test.go | 1 + coderd/database/models.go | 6 + coderd/database/querier.go | 2 + coderd/database/queries.sql.go | 55 ++++ .../queries/templateversionworkspacetags.sql | 16 + coderd/database/unique_constraint.go | 169 +++++----- .../provisionerdserver/provisionerdserver.go | 19 ++ .../provisionerdserver_test.go | 32 ++ flake.nix | 2 +- go.mod | 2 +- go.sum | 4 +- provisioner/terraform/parse.go | 136 +++++++- provisioner/terraform/parse_test.go | 171 ++++++++++ provisionerd/proto/provisionerd.pb.go | 230 +++++++------- provisionerd/proto/provisionerd.proto | 1 + provisionerd/runner/runner.go | 16 +- provisionersdk/proto/provisioner.pb.go | 293 ++++++++++-------- provisionersdk/proto/provisioner.proto | 1 + site/e2e/helpers.ts | 1 + site/e2e/provisionerGenerated.ts | 27 ++ 30 files changed, 1051 insertions(+), 396 deletions(-) create mode 100644 coderd/database/migrations/000211_workspace_tags.down.sql create mode 100644 coderd/database/migrations/000211_workspace_tags.up.sql create mode 100644 coderd/database/queries/templateversionworkspacetags.sql diff --git a/coderd/database/dbauthz/dbauthz.go b/coderd/database/dbauthz/dbauthz.go index fe49de61e7e84..f3c1ee081eb83 100644 --- a/coderd/database/dbauthz/dbauthz.go +++ b/coderd/database/dbauthz/dbauthz.go @@ -1786,6 +1786,29 @@ func (q *querier) GetTemplateVersionVariables(ctx context.Context, templateVersi return q.db.GetTemplateVersionVariables(ctx, templateVersionID) } +func (q *querier) GetTemplateVersionWorkspaceTags(ctx context.Context, templateVersionID uuid.UUID) ([]database.TemplateVersionWorkspaceTag, error) { + tv, err := q.db.GetTemplateVersionByID(ctx, templateVersionID) + if err != nil { + return nil, err + } + + var object rbac.Objecter + template, err := q.db.GetTemplateByID(ctx, tv.TemplateID.UUID) + if err != nil { + if !errors.Is(err, sql.ErrNoRows) { + return nil, err + } + object = rbac.ResourceTemplate.InOrg(tv.OrganizationID) + } else { + object = tv.RBACObject(template) + } + + if err := q.authorizeContext(ctx, policy.ActionRead, object); err != nil { + return nil, err + } + return q.db.GetTemplateVersionWorkspaceTags(ctx, templateVersionID) +} + // GetTemplateVersionsByIDs is only used for workspace build data. // The workspace is already fetched. func (q *querier) GetTemplateVersionsByIDs(ctx context.Context, ids []uuid.UUID) ([]database.TemplateVersion, error) { @@ -2507,6 +2530,13 @@ func (q *querier) InsertTemplateVersionVariable(ctx context.Context, arg databas return q.db.InsertTemplateVersionVariable(ctx, arg) } +func (q *querier) InsertTemplateVersionWorkspaceTag(ctx context.Context, arg database.InsertTemplateVersionWorkspaceTagParams) (database.TemplateVersionWorkspaceTag, error) { + if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceSystem); err != nil { + return database.TemplateVersionWorkspaceTag{}, err + } + return q.db.InsertTemplateVersionWorkspaceTag(ctx, arg) +} + func (q *querier) InsertUser(ctx context.Context, arg database.InsertUserParams) (database.User, error) { // Always check if the assigned roles can actually be assigned by this actor. impliedRoles := append([]string{rbac.RoleMember()}, arg.RBACRoles...) diff --git a/coderd/database/dbauthz/dbauthz_test.go b/coderd/database/dbauthz/dbauthz_test.go index 7d04a0d20a52e..b6d911dc3849a 100644 --- a/coderd/database/dbauthz/dbauthz_test.go +++ b/coderd/database/dbauthz/dbauthz_test.go @@ -778,6 +778,16 @@ func (s *MethodTestSuite) TestTemplate() { }) check.Args(tv.ID).Asserts(t1, policy.ActionRead).Returns([]database.TemplateVersionVariable{tvv1}) })) + s.Run("GetTemplateVersionWorkspaceTags", s.Subtest(func(db database.Store, check *expects) { + t1 := dbgen.Template(s.T(), db, database.Template{}) + tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{ + TemplateID: uuid.NullUUID{UUID: t1.ID, Valid: true}, + }) + wt1 := dbgen.TemplateVersionWorkspaceTag(s.T(), db, database.TemplateVersionWorkspaceTag{ + TemplateVersionID: tv.ID, + }) + check.Args(tv.ID).Asserts(t1, policy.ActionRead).Returns([]database.TemplateVersionWorkspaceTag{wt1}) + })) s.Run("GetTemplateGroupRoles", s.Subtest(func(db database.Store, check *expects) { t1 := dbgen.Template(s.T(), db, database.Template{}) check.Args(t1.ID).Asserts(t1, policy.ActionUpdate) @@ -2339,6 +2349,9 @@ func (s *MethodTestSuite) TestSystemFunctions() { s.Run("InsertTemplateVersionVariable", s.Subtest(func(db database.Store, check *expects) { check.Args(database.InsertTemplateVersionVariableParams{}).Asserts(rbac.ResourceSystem, policy.ActionCreate) })) + s.Run("InsertTemplateVersionWorkspaceTag", s.Subtest(func(db database.Store, check *expects) { + check.Args(database.InsertTemplateVersionWorkspaceTagParams{}).Asserts(rbac.ResourceSystem, policy.ActionCreate) + })) s.Run("UpdateInactiveUsersToDormant", s.Subtest(func(db database.Store, check *expects) { check.Args(database.UpdateInactiveUsersToDormantParams{}).Asserts(rbac.ResourceSystem, policy.ActionCreate).Errors(sql.ErrNoRows) })) diff --git a/coderd/database/dbgen/dbgen.go b/coderd/database/dbgen/dbgen.go index fe660e7e8fa93..93d629e71e49f 100644 --- a/coderd/database/dbgen/dbgen.go +++ b/coderd/database/dbgen/dbgen.go @@ -678,6 +678,16 @@ func TemplateVersionVariable(t testing.TB, db database.Store, orig database.Temp return version } +func TemplateVersionWorkspaceTag(t testing.TB, db database.Store, orig database.TemplateVersionWorkspaceTag) database.TemplateVersionWorkspaceTag { + workspaceTag, err := db.InsertTemplateVersionWorkspaceTag(genCtx, database.InsertTemplateVersionWorkspaceTagParams{ + TemplateVersionID: takeFirst(orig.TemplateVersionID, uuid.New()), + Key: takeFirst(orig.Key, namesgenerator.GetRandomName(1)), + Value: takeFirst(orig.Value, namesgenerator.GetRandomName(1)), + }) + require.NoError(t, err, "insert template version workspace tag") + return workspaceTag +} + func TemplateVersionParameter(t testing.TB, db database.Store, orig database.TemplateVersionParameter) database.TemplateVersionParameter { t.Helper() diff --git a/coderd/database/dbmem/dbmem.go b/coderd/database/dbmem/dbmem.go index ea896b28641f4..9c76d04b5a374 100644 --- a/coderd/database/dbmem/dbmem.go +++ b/coderd/database/dbmem/dbmem.go @@ -163,6 +163,7 @@ type data struct { templateVersions []database.TemplateVersionTable templateVersionParameters []database.TemplateVersionParameter templateVersionVariables []database.TemplateVersionVariable + templateVersionWorkspaceTags []database.TemplateVersionWorkspaceTag templates []database.TemplateTable templateUsageStats []database.TemplateUsageStat workspaceAgents []database.WorkspaceAgent @@ -4177,6 +4178,24 @@ func (q *FakeQuerier) GetTemplateVersionVariables(_ context.Context, templateVer return variables, nil } +func (q *FakeQuerier) GetTemplateVersionWorkspaceTags(_ context.Context, templateVersionID uuid.UUID) ([]database.TemplateVersionWorkspaceTag, error) { + q.mutex.RLock() + defer q.mutex.RUnlock() + + workspaceTags := make([]database.TemplateVersionWorkspaceTag, 0) + for _, workspaceTag := range q.templateVersionWorkspaceTags { + if workspaceTag.TemplateVersionID != templateVersionID { + continue + } + workspaceTags = append(workspaceTags, workspaceTag) + } + + sort.Slice(workspaceTags, func(i, j int) bool { + return workspaceTags[i].Key < workspaceTags[j].Key + }) + return workspaceTags, nil +} + func (q *FakeQuerier) GetTemplateVersionsByIDs(_ context.Context, ids []uuid.UUID) ([]database.TemplateVersion, error) { q.mutex.RLock() defer q.mutex.RUnlock() @@ -6352,6 +6371,25 @@ func (q *FakeQuerier) InsertTemplateVersionVariable(_ context.Context, arg datab return variable, nil } +func (q *FakeQuerier) InsertTemplateVersionWorkspaceTag(_ context.Context, arg database.InsertTemplateVersionWorkspaceTagParams) (database.TemplateVersionWorkspaceTag, error) { + err := validateDatabaseType(arg) + if err != nil { + return database.TemplateVersionWorkspaceTag{}, err + } + + q.mutex.Lock() + defer q.mutex.Unlock() + + //nolint:gosimple + workspaceTag := database.TemplateVersionWorkspaceTag{ + TemplateVersionID: arg.TemplateVersionID, + Key: arg.Key, + Value: arg.Value, + } + q.templateVersionWorkspaceTags = append(q.templateVersionWorkspaceTags, workspaceTag) + return workspaceTag, nil +} + func (q *FakeQuerier) InsertUser(_ context.Context, arg database.InsertUserParams) (database.User, error) { if err := validateDatabaseType(arg); err != nil { return database.User{}, err diff --git a/coderd/database/dbmetrics/dbmetrics.go b/coderd/database/dbmetrics/dbmetrics.go index 4e0c2b8fed158..f294b8266c75f 100644 --- a/coderd/database/dbmetrics/dbmetrics.go +++ b/coderd/database/dbmetrics/dbmetrics.go @@ -1012,6 +1012,13 @@ func (m metricsStore) GetTemplateVersionVariables(ctx context.Context, templateV return variables, err } +func (m metricsStore) GetTemplateVersionWorkspaceTags(ctx context.Context, templateVersionID uuid.UUID) ([]database.TemplateVersionWorkspaceTag, error) { + start := time.Now() + r0, r1 := m.s.GetTemplateVersionWorkspaceTags(ctx, templateVersionID) + m.queryLatencies.WithLabelValues("GetTemplateVersionWorkspaceTags").Observe(time.Since(start).Seconds()) + return r0, r1 +} + func (m metricsStore) GetTemplateVersionsByIDs(ctx context.Context, ids []uuid.UUID) ([]database.TemplateVersion, error) { start := time.Now() versions, err := m.s.GetTemplateVersionsByIDs(ctx, ids) @@ -1600,6 +1607,13 @@ func (m metricsStore) InsertTemplateVersionVariable(ctx context.Context, arg dat return variable, err } +func (m metricsStore) InsertTemplateVersionWorkspaceTag(ctx context.Context, arg database.InsertTemplateVersionWorkspaceTagParams) (database.TemplateVersionWorkspaceTag, error) { + start := time.Now() + r0, r1 := m.s.InsertTemplateVersionWorkspaceTag(ctx, arg) + m.queryLatencies.WithLabelValues("InsertTemplateVersionWorkspaceTag").Observe(time.Since(start).Seconds()) + return r0, r1 +} + func (m metricsStore) InsertUser(ctx context.Context, arg database.InsertUserParams) (database.User, error) { start := time.Now() user, err := m.s.InsertUser(ctx, arg) diff --git a/coderd/database/dbmock/dbmock.go b/coderd/database/dbmock/dbmock.go index 69558e884c6a6..157118be65c3a 100644 --- a/coderd/database/dbmock/dbmock.go +++ b/coderd/database/dbmock/dbmock.go @@ -2085,6 +2085,21 @@ func (mr *MockStoreMockRecorder) GetTemplateVersionVariables(arg0, arg1 any) *go return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetTemplateVersionVariables", reflect.TypeOf((*MockStore)(nil).GetTemplateVersionVariables), arg0, arg1) } +// GetTemplateVersionWorkspaceTags mocks base method. +func (m *MockStore) GetTemplateVersionWorkspaceTags(arg0 context.Context, arg1 uuid.UUID) ([]database.TemplateVersionWorkspaceTag, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetTemplateVersionWorkspaceTags", arg0, arg1) + ret0, _ := ret[0].([]database.TemplateVersionWorkspaceTag) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetTemplateVersionWorkspaceTags indicates an expected call of GetTemplateVersionWorkspaceTags. +func (mr *MockStoreMockRecorder) GetTemplateVersionWorkspaceTags(arg0, arg1 any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetTemplateVersionWorkspaceTags", reflect.TypeOf((*MockStore)(nil).GetTemplateVersionWorkspaceTags), arg0, arg1) +} + // GetTemplateVersionsByIDs mocks base method. func (m *MockStore) GetTemplateVersionsByIDs(arg0 context.Context, arg1 []uuid.UUID) ([]database.TemplateVersion, error) { m.ctrl.T.Helper() @@ -3353,6 +3368,21 @@ func (mr *MockStoreMockRecorder) InsertTemplateVersionVariable(arg0, arg1 any) * return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "InsertTemplateVersionVariable", reflect.TypeOf((*MockStore)(nil).InsertTemplateVersionVariable), arg0, arg1) } +// InsertTemplateVersionWorkspaceTag mocks base method. +func (m *MockStore) InsertTemplateVersionWorkspaceTag(arg0 context.Context, arg1 database.InsertTemplateVersionWorkspaceTagParams) (database.TemplateVersionWorkspaceTag, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "InsertTemplateVersionWorkspaceTag", arg0, arg1) + ret0, _ := ret[0].(database.TemplateVersionWorkspaceTag) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// InsertTemplateVersionWorkspaceTag indicates an expected call of InsertTemplateVersionWorkspaceTag. +func (mr *MockStoreMockRecorder) InsertTemplateVersionWorkspaceTag(arg0, arg1 any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "InsertTemplateVersionWorkspaceTag", reflect.TypeOf((*MockStore)(nil).InsertTemplateVersionWorkspaceTag), arg0, arg1) +} + // InsertUser mocks base method. func (m *MockStore) InsertUser(arg0 context.Context, arg1 database.InsertUserParams) (database.User, error) { m.ctrl.T.Helper() diff --git a/coderd/database/dump.sql b/coderd/database/dump.sql index 718c323fd7804..097f56aff5915 100644 --- a/coderd/database/dump.sql +++ b/coderd/database/dump.sql @@ -943,6 +943,12 @@ CREATE VIEW template_version_with_user AS COMMENT ON VIEW template_version_with_user IS 'Joins in the username + avatar url of the created by user.'; +CREATE TABLE template_version_workspace_tags ( + template_version_id uuid NOT NULL, + key text NOT NULL, + value text NOT NULL +); + CREATE TABLE templates ( id uuid NOT NULL, created_at timestamp with time zone NOT NULL, @@ -1536,6 +1542,9 @@ ALTER TABLE ONLY template_version_parameters ALTER TABLE ONLY template_version_variables ADD CONSTRAINT template_version_variables_template_version_id_name_key UNIQUE (template_version_id, name); +ALTER TABLE ONLY template_version_workspace_tags + ADD CONSTRAINT template_version_workspace_tags_template_version_id_key_key UNIQUE (template_version_id, key); + ALTER TABLE ONLY template_versions ADD CONSTRAINT template_versions_pkey PRIMARY KEY (id); @@ -1797,6 +1806,9 @@ ALTER TABLE ONLY template_version_parameters ALTER TABLE ONLY template_version_variables ADD CONSTRAINT template_version_variables_template_version_id_fkey FOREIGN KEY (template_version_id) REFERENCES template_versions(id) ON DELETE CASCADE; +ALTER TABLE ONLY template_version_workspace_tags + ADD CONSTRAINT template_version_workspace_tags_template_version_id_fkey FOREIGN KEY (template_version_id) REFERENCES template_versions(id) ON DELETE CASCADE; + ALTER TABLE ONLY template_versions ADD CONSTRAINT template_versions_created_by_fkey FOREIGN KEY (created_by) REFERENCES users(id) ON DELETE RESTRICT; diff --git a/coderd/database/foreign_key_constraint.go b/coderd/database/foreign_key_constraint.go index c808f4ecf6816..2a8f1738d3cb8 100644 --- a/coderd/database/foreign_key_constraint.go +++ b/coderd/database/foreign_key_constraint.go @@ -6,58 +6,59 @@ type ForeignKeyConstraint string // ForeignKeyConstraint enums. const ( - ForeignKeyAPIKeysUserIDUUID ForeignKeyConstraint = "api_keys_user_id_uuid_fkey" // ALTER TABLE ONLY api_keys ADD CONSTRAINT api_keys_user_id_uuid_fkey FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE; - ForeignKeyGitAuthLinksOauthAccessTokenKeyID ForeignKeyConstraint = "git_auth_links_oauth_access_token_key_id_fkey" // ALTER TABLE ONLY external_auth_links ADD CONSTRAINT git_auth_links_oauth_access_token_key_id_fkey FOREIGN KEY (oauth_access_token_key_id) REFERENCES dbcrypt_keys(active_key_digest); - ForeignKeyGitAuthLinksOauthRefreshTokenKeyID ForeignKeyConstraint = "git_auth_links_oauth_refresh_token_key_id_fkey" // ALTER TABLE ONLY external_auth_links ADD CONSTRAINT git_auth_links_oauth_refresh_token_key_id_fkey FOREIGN KEY (oauth_refresh_token_key_id) REFERENCES dbcrypt_keys(active_key_digest); - ForeignKeyGitSSHKeysUserID ForeignKeyConstraint = "gitsshkeys_user_id_fkey" // ALTER TABLE ONLY gitsshkeys ADD CONSTRAINT gitsshkeys_user_id_fkey FOREIGN KEY (user_id) REFERENCES users(id); - ForeignKeyGroupMembersGroupID ForeignKeyConstraint = "group_members_group_id_fkey" // ALTER TABLE ONLY group_members ADD CONSTRAINT group_members_group_id_fkey FOREIGN KEY (group_id) REFERENCES groups(id) ON DELETE CASCADE; - ForeignKeyGroupMembersUserID ForeignKeyConstraint = "group_members_user_id_fkey" // ALTER TABLE ONLY group_members ADD CONSTRAINT group_members_user_id_fkey FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE; - ForeignKeyGroupsOrganizationID ForeignKeyConstraint = "groups_organization_id_fkey" // ALTER TABLE ONLY groups ADD CONSTRAINT groups_organization_id_fkey FOREIGN KEY (organization_id) REFERENCES organizations(id) ON DELETE CASCADE; - ForeignKeyJfrogXrayScansAgentID ForeignKeyConstraint = "jfrog_xray_scans_agent_id_fkey" // ALTER TABLE ONLY jfrog_xray_scans ADD CONSTRAINT jfrog_xray_scans_agent_id_fkey FOREIGN KEY (agent_id) REFERENCES workspace_agents(id) ON DELETE CASCADE; - ForeignKeyJfrogXrayScansWorkspaceID ForeignKeyConstraint = "jfrog_xray_scans_workspace_id_fkey" // ALTER TABLE ONLY jfrog_xray_scans ADD CONSTRAINT jfrog_xray_scans_workspace_id_fkey FOREIGN KEY (workspace_id) REFERENCES workspaces(id) ON DELETE CASCADE; - ForeignKeyOauth2ProviderAppCodesAppID ForeignKeyConstraint = "oauth2_provider_app_codes_app_id_fkey" // ALTER TABLE ONLY oauth2_provider_app_codes ADD CONSTRAINT oauth2_provider_app_codes_app_id_fkey FOREIGN KEY (app_id) REFERENCES oauth2_provider_apps(id) ON DELETE CASCADE; - ForeignKeyOauth2ProviderAppCodesUserID ForeignKeyConstraint = "oauth2_provider_app_codes_user_id_fkey" // ALTER TABLE ONLY oauth2_provider_app_codes ADD CONSTRAINT oauth2_provider_app_codes_user_id_fkey FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE; - ForeignKeyOauth2ProviderAppSecretsAppID ForeignKeyConstraint = "oauth2_provider_app_secrets_app_id_fkey" // ALTER TABLE ONLY oauth2_provider_app_secrets ADD CONSTRAINT oauth2_provider_app_secrets_app_id_fkey FOREIGN KEY (app_id) REFERENCES oauth2_provider_apps(id) ON DELETE CASCADE; - ForeignKeyOauth2ProviderAppTokensAPIKeyID ForeignKeyConstraint = "oauth2_provider_app_tokens_api_key_id_fkey" // ALTER TABLE ONLY oauth2_provider_app_tokens ADD CONSTRAINT oauth2_provider_app_tokens_api_key_id_fkey FOREIGN KEY (api_key_id) REFERENCES api_keys(id) ON DELETE CASCADE; - ForeignKeyOauth2ProviderAppTokensAppSecretID ForeignKeyConstraint = "oauth2_provider_app_tokens_app_secret_id_fkey" // ALTER TABLE ONLY oauth2_provider_app_tokens ADD CONSTRAINT oauth2_provider_app_tokens_app_secret_id_fkey FOREIGN KEY (app_secret_id) REFERENCES oauth2_provider_app_secrets(id) ON DELETE CASCADE; - ForeignKeyOrganizationMembersOrganizationIDUUID ForeignKeyConstraint = "organization_members_organization_id_uuid_fkey" // ALTER TABLE ONLY organization_members ADD CONSTRAINT organization_members_organization_id_uuid_fkey FOREIGN KEY (organization_id) REFERENCES organizations(id) ON DELETE CASCADE; - ForeignKeyOrganizationMembersUserIDUUID ForeignKeyConstraint = "organization_members_user_id_uuid_fkey" // ALTER TABLE ONLY organization_members ADD CONSTRAINT organization_members_user_id_uuid_fkey FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE; - ForeignKeyParameterSchemasJobID ForeignKeyConstraint = "parameter_schemas_job_id_fkey" // ALTER TABLE ONLY parameter_schemas ADD CONSTRAINT parameter_schemas_job_id_fkey FOREIGN KEY (job_id) REFERENCES provisioner_jobs(id) ON DELETE CASCADE; - ForeignKeyProvisionerDaemonsOrganizationID ForeignKeyConstraint = "provisioner_daemons_organization_id_fkey" // ALTER TABLE ONLY provisioner_daemons ADD CONSTRAINT provisioner_daemons_organization_id_fkey FOREIGN KEY (organization_id) REFERENCES organizations(id) ON DELETE CASCADE; - ForeignKeyProvisionerJobLogsJobID ForeignKeyConstraint = "provisioner_job_logs_job_id_fkey" // ALTER TABLE ONLY provisioner_job_logs ADD CONSTRAINT provisioner_job_logs_job_id_fkey FOREIGN KEY (job_id) REFERENCES provisioner_jobs(id) ON DELETE CASCADE; - ForeignKeyProvisionerJobsOrganizationID ForeignKeyConstraint = "provisioner_jobs_organization_id_fkey" // ALTER TABLE ONLY provisioner_jobs ADD CONSTRAINT provisioner_jobs_organization_id_fkey FOREIGN KEY (organization_id) REFERENCES organizations(id) ON DELETE CASCADE; - ForeignKeyTailnetAgentsCoordinatorID ForeignKeyConstraint = "tailnet_agents_coordinator_id_fkey" // ALTER TABLE ONLY tailnet_agents ADD CONSTRAINT tailnet_agents_coordinator_id_fkey FOREIGN KEY (coordinator_id) REFERENCES tailnet_coordinators(id) ON DELETE CASCADE; - ForeignKeyTailnetClientSubscriptionsCoordinatorID ForeignKeyConstraint = "tailnet_client_subscriptions_coordinator_id_fkey" // ALTER TABLE ONLY tailnet_client_subscriptions ADD CONSTRAINT tailnet_client_subscriptions_coordinator_id_fkey FOREIGN KEY (coordinator_id) REFERENCES tailnet_coordinators(id) ON DELETE CASCADE; - ForeignKeyTailnetClientsCoordinatorID ForeignKeyConstraint = "tailnet_clients_coordinator_id_fkey" // ALTER TABLE ONLY tailnet_clients ADD CONSTRAINT tailnet_clients_coordinator_id_fkey FOREIGN KEY (coordinator_id) REFERENCES tailnet_coordinators(id) ON DELETE CASCADE; - ForeignKeyTailnetPeersCoordinatorID ForeignKeyConstraint = "tailnet_peers_coordinator_id_fkey" // ALTER TABLE ONLY tailnet_peers ADD CONSTRAINT tailnet_peers_coordinator_id_fkey FOREIGN KEY (coordinator_id) REFERENCES tailnet_coordinators(id) ON DELETE CASCADE; - ForeignKeyTailnetTunnelsCoordinatorID ForeignKeyConstraint = "tailnet_tunnels_coordinator_id_fkey" // ALTER TABLE ONLY tailnet_tunnels ADD CONSTRAINT tailnet_tunnels_coordinator_id_fkey FOREIGN KEY (coordinator_id) REFERENCES tailnet_coordinators(id) ON DELETE CASCADE; - ForeignKeyTemplateVersionParametersTemplateVersionID ForeignKeyConstraint = "template_version_parameters_template_version_id_fkey" // ALTER TABLE ONLY template_version_parameters ADD CONSTRAINT template_version_parameters_template_version_id_fkey FOREIGN KEY (template_version_id) REFERENCES template_versions(id) ON DELETE CASCADE; - ForeignKeyTemplateVersionVariablesTemplateVersionID ForeignKeyConstraint = "template_version_variables_template_version_id_fkey" // ALTER TABLE ONLY template_version_variables ADD CONSTRAINT template_version_variables_template_version_id_fkey FOREIGN KEY (template_version_id) REFERENCES template_versions(id) ON DELETE CASCADE; - ForeignKeyTemplateVersionsCreatedBy ForeignKeyConstraint = "template_versions_created_by_fkey" // ALTER TABLE ONLY template_versions ADD CONSTRAINT template_versions_created_by_fkey FOREIGN KEY (created_by) REFERENCES users(id) ON DELETE RESTRICT; - ForeignKeyTemplateVersionsOrganizationID ForeignKeyConstraint = "template_versions_organization_id_fkey" // ALTER TABLE ONLY template_versions ADD CONSTRAINT template_versions_organization_id_fkey FOREIGN KEY (organization_id) REFERENCES organizations(id) ON DELETE CASCADE; - ForeignKeyTemplateVersionsTemplateID ForeignKeyConstraint = "template_versions_template_id_fkey" // ALTER TABLE ONLY template_versions ADD CONSTRAINT template_versions_template_id_fkey FOREIGN KEY (template_id) REFERENCES templates(id) ON DELETE CASCADE; - ForeignKeyTemplatesCreatedBy ForeignKeyConstraint = "templates_created_by_fkey" // ALTER TABLE ONLY templates ADD CONSTRAINT templates_created_by_fkey FOREIGN KEY (created_by) REFERENCES users(id) ON DELETE RESTRICT; - ForeignKeyTemplatesOrganizationID ForeignKeyConstraint = "templates_organization_id_fkey" // ALTER TABLE ONLY templates ADD CONSTRAINT templates_organization_id_fkey FOREIGN KEY (organization_id) REFERENCES organizations(id) ON DELETE CASCADE; - ForeignKeyUserLinksOauthAccessTokenKeyID ForeignKeyConstraint = "user_links_oauth_access_token_key_id_fkey" // ALTER TABLE ONLY user_links ADD CONSTRAINT user_links_oauth_access_token_key_id_fkey FOREIGN KEY (oauth_access_token_key_id) REFERENCES dbcrypt_keys(active_key_digest); - ForeignKeyUserLinksOauthRefreshTokenKeyID ForeignKeyConstraint = "user_links_oauth_refresh_token_key_id_fkey" // ALTER TABLE ONLY user_links ADD CONSTRAINT user_links_oauth_refresh_token_key_id_fkey FOREIGN KEY (oauth_refresh_token_key_id) REFERENCES dbcrypt_keys(active_key_digest); - ForeignKeyUserLinksUserID ForeignKeyConstraint = "user_links_user_id_fkey" // ALTER TABLE ONLY user_links ADD CONSTRAINT user_links_user_id_fkey FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE; - ForeignKeyWorkspaceAgentLogSourcesWorkspaceAgentID ForeignKeyConstraint = "workspace_agent_log_sources_workspace_agent_id_fkey" // ALTER TABLE ONLY workspace_agent_log_sources ADD CONSTRAINT workspace_agent_log_sources_workspace_agent_id_fkey FOREIGN KEY (workspace_agent_id) REFERENCES workspace_agents(id) ON DELETE CASCADE; - ForeignKeyWorkspaceAgentMetadataWorkspaceAgentID ForeignKeyConstraint = "workspace_agent_metadata_workspace_agent_id_fkey" // ALTER TABLE ONLY workspace_agent_metadata ADD CONSTRAINT workspace_agent_metadata_workspace_agent_id_fkey FOREIGN KEY (workspace_agent_id) REFERENCES workspace_agents(id) ON DELETE CASCADE; - ForeignKeyWorkspaceAgentPortShareWorkspaceID ForeignKeyConstraint = "workspace_agent_port_share_workspace_id_fkey" // ALTER TABLE ONLY workspace_agent_port_share ADD CONSTRAINT workspace_agent_port_share_workspace_id_fkey FOREIGN KEY (workspace_id) REFERENCES workspaces(id) ON DELETE CASCADE; - ForeignKeyWorkspaceAgentScriptsWorkspaceAgentID ForeignKeyConstraint = "workspace_agent_scripts_workspace_agent_id_fkey" // ALTER TABLE ONLY workspace_agent_scripts ADD CONSTRAINT workspace_agent_scripts_workspace_agent_id_fkey FOREIGN KEY (workspace_agent_id) REFERENCES workspace_agents(id) ON DELETE CASCADE; - ForeignKeyWorkspaceAgentStartupLogsAgentID ForeignKeyConstraint = "workspace_agent_startup_logs_agent_id_fkey" // ALTER TABLE ONLY workspace_agent_logs ADD CONSTRAINT workspace_agent_startup_logs_agent_id_fkey FOREIGN KEY (agent_id) REFERENCES workspace_agents(id) ON DELETE CASCADE; - ForeignKeyWorkspaceAgentsResourceID ForeignKeyConstraint = "workspace_agents_resource_id_fkey" // ALTER TABLE ONLY workspace_agents ADD CONSTRAINT workspace_agents_resource_id_fkey FOREIGN KEY (resource_id) REFERENCES workspace_resources(id) ON DELETE CASCADE; - ForeignKeyWorkspaceAppStatsAgentID ForeignKeyConstraint = "workspace_app_stats_agent_id_fkey" // ALTER TABLE ONLY workspace_app_stats ADD CONSTRAINT workspace_app_stats_agent_id_fkey FOREIGN KEY (agent_id) REFERENCES workspace_agents(id); - ForeignKeyWorkspaceAppStatsUserID ForeignKeyConstraint = "workspace_app_stats_user_id_fkey" // ALTER TABLE ONLY workspace_app_stats ADD CONSTRAINT workspace_app_stats_user_id_fkey FOREIGN KEY (user_id) REFERENCES users(id); - ForeignKeyWorkspaceAppStatsWorkspaceID ForeignKeyConstraint = "workspace_app_stats_workspace_id_fkey" // ALTER TABLE ONLY workspace_app_stats ADD CONSTRAINT workspace_app_stats_workspace_id_fkey FOREIGN KEY (workspace_id) REFERENCES workspaces(id); - ForeignKeyWorkspaceAppsAgentID ForeignKeyConstraint = "workspace_apps_agent_id_fkey" // ALTER TABLE ONLY workspace_apps ADD CONSTRAINT workspace_apps_agent_id_fkey FOREIGN KEY (agent_id) REFERENCES workspace_agents(id) ON DELETE CASCADE; - ForeignKeyWorkspaceBuildParametersWorkspaceBuildID ForeignKeyConstraint = "workspace_build_parameters_workspace_build_id_fkey" // ALTER TABLE ONLY workspace_build_parameters ADD CONSTRAINT workspace_build_parameters_workspace_build_id_fkey FOREIGN KEY (workspace_build_id) REFERENCES workspace_builds(id) ON DELETE CASCADE; - ForeignKeyWorkspaceBuildsJobID ForeignKeyConstraint = "workspace_builds_job_id_fkey" // ALTER TABLE ONLY workspace_builds ADD CONSTRAINT workspace_builds_job_id_fkey FOREIGN KEY (job_id) REFERENCES provisioner_jobs(id) ON DELETE CASCADE; - ForeignKeyWorkspaceBuildsTemplateVersionID ForeignKeyConstraint = "workspace_builds_template_version_id_fkey" // ALTER TABLE ONLY workspace_builds ADD CONSTRAINT workspace_builds_template_version_id_fkey FOREIGN KEY (template_version_id) REFERENCES template_versions(id) ON DELETE CASCADE; - ForeignKeyWorkspaceBuildsWorkspaceID ForeignKeyConstraint = "workspace_builds_workspace_id_fkey" // ALTER TABLE ONLY workspace_builds ADD CONSTRAINT workspace_builds_workspace_id_fkey FOREIGN KEY (workspace_id) REFERENCES workspaces(id) ON DELETE CASCADE; - ForeignKeyWorkspaceResourceMetadataWorkspaceResourceID ForeignKeyConstraint = "workspace_resource_metadata_workspace_resource_id_fkey" // ALTER TABLE ONLY workspace_resource_metadata ADD CONSTRAINT workspace_resource_metadata_workspace_resource_id_fkey FOREIGN KEY (workspace_resource_id) REFERENCES workspace_resources(id) ON DELETE CASCADE; - ForeignKeyWorkspaceResourcesJobID ForeignKeyConstraint = "workspace_resources_job_id_fkey" // ALTER TABLE ONLY workspace_resources ADD CONSTRAINT workspace_resources_job_id_fkey FOREIGN KEY (job_id) REFERENCES provisioner_jobs(id) ON DELETE CASCADE; - ForeignKeyWorkspacesOrganizationID ForeignKeyConstraint = "workspaces_organization_id_fkey" // ALTER TABLE ONLY workspaces ADD CONSTRAINT workspaces_organization_id_fkey FOREIGN KEY (organization_id) REFERENCES organizations(id) ON DELETE RESTRICT; - ForeignKeyWorkspacesOwnerID ForeignKeyConstraint = "workspaces_owner_id_fkey" // ALTER TABLE ONLY workspaces ADD CONSTRAINT workspaces_owner_id_fkey FOREIGN KEY (owner_id) REFERENCES users(id) ON DELETE RESTRICT; - ForeignKeyWorkspacesTemplateID ForeignKeyConstraint = "workspaces_template_id_fkey" // ALTER TABLE ONLY workspaces ADD CONSTRAINT workspaces_template_id_fkey FOREIGN KEY (template_id) REFERENCES templates(id) ON DELETE RESTRICT; + ForeignKeyAPIKeysUserIDUUID ForeignKeyConstraint = "api_keys_user_id_uuid_fkey" // ALTER TABLE ONLY api_keys ADD CONSTRAINT api_keys_user_id_uuid_fkey FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE; + ForeignKeyGitAuthLinksOauthAccessTokenKeyID ForeignKeyConstraint = "git_auth_links_oauth_access_token_key_id_fkey" // ALTER TABLE ONLY external_auth_links ADD CONSTRAINT git_auth_links_oauth_access_token_key_id_fkey FOREIGN KEY (oauth_access_token_key_id) REFERENCES dbcrypt_keys(active_key_digest); + ForeignKeyGitAuthLinksOauthRefreshTokenKeyID ForeignKeyConstraint = "git_auth_links_oauth_refresh_token_key_id_fkey" // ALTER TABLE ONLY external_auth_links ADD CONSTRAINT git_auth_links_oauth_refresh_token_key_id_fkey FOREIGN KEY (oauth_refresh_token_key_id) REFERENCES dbcrypt_keys(active_key_digest); + ForeignKeyGitSSHKeysUserID ForeignKeyConstraint = "gitsshkeys_user_id_fkey" // ALTER TABLE ONLY gitsshkeys ADD CONSTRAINT gitsshkeys_user_id_fkey FOREIGN KEY (user_id) REFERENCES users(id); + ForeignKeyGroupMembersGroupID ForeignKeyConstraint = "group_members_group_id_fkey" // ALTER TABLE ONLY group_members ADD CONSTRAINT group_members_group_id_fkey FOREIGN KEY (group_id) REFERENCES groups(id) ON DELETE CASCADE; + ForeignKeyGroupMembersUserID ForeignKeyConstraint = "group_members_user_id_fkey" // ALTER TABLE ONLY group_members ADD CONSTRAINT group_members_user_id_fkey FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE; + ForeignKeyGroupsOrganizationID ForeignKeyConstraint = "groups_organization_id_fkey" // ALTER TABLE ONLY groups ADD CONSTRAINT groups_organization_id_fkey FOREIGN KEY (organization_id) REFERENCES organizations(id) ON DELETE CASCADE; + ForeignKeyJfrogXrayScansAgentID ForeignKeyConstraint = "jfrog_xray_scans_agent_id_fkey" // ALTER TABLE ONLY jfrog_xray_scans ADD CONSTRAINT jfrog_xray_scans_agent_id_fkey FOREIGN KEY (agent_id) REFERENCES workspace_agents(id) ON DELETE CASCADE; + ForeignKeyJfrogXrayScansWorkspaceID ForeignKeyConstraint = "jfrog_xray_scans_workspace_id_fkey" // ALTER TABLE ONLY jfrog_xray_scans ADD CONSTRAINT jfrog_xray_scans_workspace_id_fkey FOREIGN KEY (workspace_id) REFERENCES workspaces(id) ON DELETE CASCADE; + ForeignKeyOauth2ProviderAppCodesAppID ForeignKeyConstraint = "oauth2_provider_app_codes_app_id_fkey" // ALTER TABLE ONLY oauth2_provider_app_codes ADD CONSTRAINT oauth2_provider_app_codes_app_id_fkey FOREIGN KEY (app_id) REFERENCES oauth2_provider_apps(id) ON DELETE CASCADE; + ForeignKeyOauth2ProviderAppCodesUserID ForeignKeyConstraint = "oauth2_provider_app_codes_user_id_fkey" // ALTER TABLE ONLY oauth2_provider_app_codes ADD CONSTRAINT oauth2_provider_app_codes_user_id_fkey FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE; + ForeignKeyOauth2ProviderAppSecretsAppID ForeignKeyConstraint = "oauth2_provider_app_secrets_app_id_fkey" // ALTER TABLE ONLY oauth2_provider_app_secrets ADD CONSTRAINT oauth2_provider_app_secrets_app_id_fkey FOREIGN KEY (app_id) REFERENCES oauth2_provider_apps(id) ON DELETE CASCADE; + ForeignKeyOauth2ProviderAppTokensAPIKeyID ForeignKeyConstraint = "oauth2_provider_app_tokens_api_key_id_fkey" // ALTER TABLE ONLY oauth2_provider_app_tokens ADD CONSTRAINT oauth2_provider_app_tokens_api_key_id_fkey FOREIGN KEY (api_key_id) REFERENCES api_keys(id) ON DELETE CASCADE; + ForeignKeyOauth2ProviderAppTokensAppSecretID ForeignKeyConstraint = "oauth2_provider_app_tokens_app_secret_id_fkey" // ALTER TABLE ONLY oauth2_provider_app_tokens ADD CONSTRAINT oauth2_provider_app_tokens_app_secret_id_fkey FOREIGN KEY (app_secret_id) REFERENCES oauth2_provider_app_secrets(id) ON DELETE CASCADE; + ForeignKeyOrganizationMembersOrganizationIDUUID ForeignKeyConstraint = "organization_members_organization_id_uuid_fkey" // ALTER TABLE ONLY organization_members ADD CONSTRAINT organization_members_organization_id_uuid_fkey FOREIGN KEY (organization_id) REFERENCES organizations(id) ON DELETE CASCADE; + ForeignKeyOrganizationMembersUserIDUUID ForeignKeyConstraint = "organization_members_user_id_uuid_fkey" // ALTER TABLE ONLY organization_members ADD CONSTRAINT organization_members_user_id_uuid_fkey FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE; + ForeignKeyParameterSchemasJobID ForeignKeyConstraint = "parameter_schemas_job_id_fkey" // ALTER TABLE ONLY parameter_schemas ADD CONSTRAINT parameter_schemas_job_id_fkey FOREIGN KEY (job_id) REFERENCES provisioner_jobs(id) ON DELETE CASCADE; + ForeignKeyProvisionerDaemonsOrganizationID ForeignKeyConstraint = "provisioner_daemons_organization_id_fkey" // ALTER TABLE ONLY provisioner_daemons ADD CONSTRAINT provisioner_daemons_organization_id_fkey FOREIGN KEY (organization_id) REFERENCES organizations(id) ON DELETE CASCADE; + ForeignKeyProvisionerJobLogsJobID ForeignKeyConstraint = "provisioner_job_logs_job_id_fkey" // ALTER TABLE ONLY provisioner_job_logs ADD CONSTRAINT provisioner_job_logs_job_id_fkey FOREIGN KEY (job_id) REFERENCES provisioner_jobs(id) ON DELETE CASCADE; + ForeignKeyProvisionerJobsOrganizationID ForeignKeyConstraint = "provisioner_jobs_organization_id_fkey" // ALTER TABLE ONLY provisioner_jobs ADD CONSTRAINT provisioner_jobs_organization_id_fkey FOREIGN KEY (organization_id) REFERENCES organizations(id) ON DELETE CASCADE; + ForeignKeyTailnetAgentsCoordinatorID ForeignKeyConstraint = "tailnet_agents_coordinator_id_fkey" // ALTER TABLE ONLY tailnet_agents ADD CONSTRAINT tailnet_agents_coordinator_id_fkey FOREIGN KEY (coordinator_id) REFERENCES tailnet_coordinators(id) ON DELETE CASCADE; + ForeignKeyTailnetClientSubscriptionsCoordinatorID ForeignKeyConstraint = "tailnet_client_subscriptions_coordinator_id_fkey" // ALTER TABLE ONLY tailnet_client_subscriptions ADD CONSTRAINT tailnet_client_subscriptions_coordinator_id_fkey FOREIGN KEY (coordinator_id) REFERENCES tailnet_coordinators(id) ON DELETE CASCADE; + ForeignKeyTailnetClientsCoordinatorID ForeignKeyConstraint = "tailnet_clients_coordinator_id_fkey" // ALTER TABLE ONLY tailnet_clients ADD CONSTRAINT tailnet_clients_coordinator_id_fkey FOREIGN KEY (coordinator_id) REFERENCES tailnet_coordinators(id) ON DELETE CASCADE; + ForeignKeyTailnetPeersCoordinatorID ForeignKeyConstraint = "tailnet_peers_coordinator_id_fkey" // ALTER TABLE ONLY tailnet_peers ADD CONSTRAINT tailnet_peers_coordinator_id_fkey FOREIGN KEY (coordinator_id) REFERENCES tailnet_coordinators(id) ON DELETE CASCADE; + ForeignKeyTailnetTunnelsCoordinatorID ForeignKeyConstraint = "tailnet_tunnels_coordinator_id_fkey" // ALTER TABLE ONLY tailnet_tunnels ADD CONSTRAINT tailnet_tunnels_coordinator_id_fkey FOREIGN KEY (coordinator_id) REFERENCES tailnet_coordinators(id) ON DELETE CASCADE; + ForeignKeyTemplateVersionParametersTemplateVersionID ForeignKeyConstraint = "template_version_parameters_template_version_id_fkey" // ALTER TABLE ONLY template_version_parameters ADD CONSTRAINT template_version_parameters_template_version_id_fkey FOREIGN KEY (template_version_id) REFERENCES template_versions(id) ON DELETE CASCADE; + ForeignKeyTemplateVersionVariablesTemplateVersionID ForeignKeyConstraint = "template_version_variables_template_version_id_fkey" // ALTER TABLE ONLY template_version_variables ADD CONSTRAINT template_version_variables_template_version_id_fkey FOREIGN KEY (template_version_id) REFERENCES template_versions(id) ON DELETE CASCADE; + ForeignKeyTemplateVersionWorkspaceTagsTemplateVersionID ForeignKeyConstraint = "template_version_workspace_tags_template_version_id_fkey" // ALTER TABLE ONLY template_version_workspace_tags ADD CONSTRAINT template_version_workspace_tags_template_version_id_fkey FOREIGN KEY (template_version_id) REFERENCES template_versions(id) ON DELETE CASCADE; + ForeignKeyTemplateVersionsCreatedBy ForeignKeyConstraint = "template_versions_created_by_fkey" // ALTER TABLE ONLY template_versions ADD CONSTRAINT template_versions_created_by_fkey FOREIGN KEY (created_by) REFERENCES users(id) ON DELETE RESTRICT; + ForeignKeyTemplateVersionsOrganizationID ForeignKeyConstraint = "template_versions_organization_id_fkey" // ALTER TABLE ONLY template_versions ADD CONSTRAINT template_versions_organization_id_fkey FOREIGN KEY (organization_id) REFERENCES organizations(id) ON DELETE CASCADE; + ForeignKeyTemplateVersionsTemplateID ForeignKeyConstraint = "template_versions_template_id_fkey" // ALTER TABLE ONLY template_versions ADD CONSTRAINT template_versions_template_id_fkey FOREIGN KEY (template_id) REFERENCES templates(id) ON DELETE CASCADE; + ForeignKeyTemplatesCreatedBy ForeignKeyConstraint = "templates_created_by_fkey" // ALTER TABLE ONLY templates ADD CONSTRAINT templates_created_by_fkey FOREIGN KEY (created_by) REFERENCES users(id) ON DELETE RESTRICT; + ForeignKeyTemplatesOrganizationID ForeignKeyConstraint = "templates_organization_id_fkey" // ALTER TABLE ONLY templates ADD CONSTRAINT templates_organization_id_fkey FOREIGN KEY (organization_id) REFERENCES organizations(id) ON DELETE CASCADE; + ForeignKeyUserLinksOauthAccessTokenKeyID ForeignKeyConstraint = "user_links_oauth_access_token_key_id_fkey" // ALTER TABLE ONLY user_links ADD CONSTRAINT user_links_oauth_access_token_key_id_fkey FOREIGN KEY (oauth_access_token_key_id) REFERENCES dbcrypt_keys(active_key_digest); + ForeignKeyUserLinksOauthRefreshTokenKeyID ForeignKeyConstraint = "user_links_oauth_refresh_token_key_id_fkey" // ALTER TABLE ONLY user_links ADD CONSTRAINT user_links_oauth_refresh_token_key_id_fkey FOREIGN KEY (oauth_refresh_token_key_id) REFERENCES dbcrypt_keys(active_key_digest); + ForeignKeyUserLinksUserID ForeignKeyConstraint = "user_links_user_id_fkey" // ALTER TABLE ONLY user_links ADD CONSTRAINT user_links_user_id_fkey FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE; + ForeignKeyWorkspaceAgentLogSourcesWorkspaceAgentID ForeignKeyConstraint = "workspace_agent_log_sources_workspace_agent_id_fkey" // ALTER TABLE ONLY workspace_agent_log_sources ADD CONSTRAINT workspace_agent_log_sources_workspace_agent_id_fkey FOREIGN KEY (workspace_agent_id) REFERENCES workspace_agents(id) ON DELETE CASCADE; + ForeignKeyWorkspaceAgentMetadataWorkspaceAgentID ForeignKeyConstraint = "workspace_agent_metadata_workspace_agent_id_fkey" // ALTER TABLE ONLY workspace_agent_metadata ADD CONSTRAINT workspace_agent_metadata_workspace_agent_id_fkey FOREIGN KEY (workspace_agent_id) REFERENCES workspace_agents(id) ON DELETE CASCADE; + ForeignKeyWorkspaceAgentPortShareWorkspaceID ForeignKeyConstraint = "workspace_agent_port_share_workspace_id_fkey" // ALTER TABLE ONLY workspace_agent_port_share ADD CONSTRAINT workspace_agent_port_share_workspace_id_fkey FOREIGN KEY (workspace_id) REFERENCES workspaces(id) ON DELETE CASCADE; + ForeignKeyWorkspaceAgentScriptsWorkspaceAgentID ForeignKeyConstraint = "workspace_agent_scripts_workspace_agent_id_fkey" // ALTER TABLE ONLY workspace_agent_scripts ADD CONSTRAINT workspace_agent_scripts_workspace_agent_id_fkey FOREIGN KEY (workspace_agent_id) REFERENCES workspace_agents(id) ON DELETE CASCADE; + ForeignKeyWorkspaceAgentStartupLogsAgentID ForeignKeyConstraint = "workspace_agent_startup_logs_agent_id_fkey" // ALTER TABLE ONLY workspace_agent_logs ADD CONSTRAINT workspace_agent_startup_logs_agent_id_fkey FOREIGN KEY (agent_id) REFERENCES workspace_agents(id) ON DELETE CASCADE; + ForeignKeyWorkspaceAgentsResourceID ForeignKeyConstraint = "workspace_agents_resource_id_fkey" // ALTER TABLE ONLY workspace_agents ADD CONSTRAINT workspace_agents_resource_id_fkey FOREIGN KEY (resource_id) REFERENCES workspace_resources(id) ON DELETE CASCADE; + ForeignKeyWorkspaceAppStatsAgentID ForeignKeyConstraint = "workspace_app_stats_agent_id_fkey" // ALTER TABLE ONLY workspace_app_stats ADD CONSTRAINT workspace_app_stats_agent_id_fkey FOREIGN KEY (agent_id) REFERENCES workspace_agents(id); + ForeignKeyWorkspaceAppStatsUserID ForeignKeyConstraint = "workspace_app_stats_user_id_fkey" // ALTER TABLE ONLY workspace_app_stats ADD CONSTRAINT workspace_app_stats_user_id_fkey FOREIGN KEY (user_id) REFERENCES users(id); + ForeignKeyWorkspaceAppStatsWorkspaceID ForeignKeyConstraint = "workspace_app_stats_workspace_id_fkey" // ALTER TABLE ONLY workspace_app_stats ADD CONSTRAINT workspace_app_stats_workspace_id_fkey FOREIGN KEY (workspace_id) REFERENCES workspaces(id); + ForeignKeyWorkspaceAppsAgentID ForeignKeyConstraint = "workspace_apps_agent_id_fkey" // ALTER TABLE ONLY workspace_apps ADD CONSTRAINT workspace_apps_agent_id_fkey FOREIGN KEY (agent_id) REFERENCES workspace_agents(id) ON DELETE CASCADE; + ForeignKeyWorkspaceBuildParametersWorkspaceBuildID ForeignKeyConstraint = "workspace_build_parameters_workspace_build_id_fkey" // ALTER TABLE ONLY workspace_build_parameters ADD CONSTRAINT workspace_build_parameters_workspace_build_id_fkey FOREIGN KEY (workspace_build_id) REFERENCES workspace_builds(id) ON DELETE CASCADE; + ForeignKeyWorkspaceBuildsJobID ForeignKeyConstraint = "workspace_builds_job_id_fkey" // ALTER TABLE ONLY workspace_builds ADD CONSTRAINT workspace_builds_job_id_fkey FOREIGN KEY (job_id) REFERENCES provisioner_jobs(id) ON DELETE CASCADE; + ForeignKeyWorkspaceBuildsTemplateVersionID ForeignKeyConstraint = "workspace_builds_template_version_id_fkey" // ALTER TABLE ONLY workspace_builds ADD CONSTRAINT workspace_builds_template_version_id_fkey FOREIGN KEY (template_version_id) REFERENCES template_versions(id) ON DELETE CASCADE; + ForeignKeyWorkspaceBuildsWorkspaceID ForeignKeyConstraint = "workspace_builds_workspace_id_fkey" // ALTER TABLE ONLY workspace_builds ADD CONSTRAINT workspace_builds_workspace_id_fkey FOREIGN KEY (workspace_id) REFERENCES workspaces(id) ON DELETE CASCADE; + ForeignKeyWorkspaceResourceMetadataWorkspaceResourceID ForeignKeyConstraint = "workspace_resource_metadata_workspace_resource_id_fkey" // ALTER TABLE ONLY workspace_resource_metadata ADD CONSTRAINT workspace_resource_metadata_workspace_resource_id_fkey FOREIGN KEY (workspace_resource_id) REFERENCES workspace_resources(id) ON DELETE CASCADE; + ForeignKeyWorkspaceResourcesJobID ForeignKeyConstraint = "workspace_resources_job_id_fkey" // ALTER TABLE ONLY workspace_resources ADD CONSTRAINT workspace_resources_job_id_fkey FOREIGN KEY (job_id) REFERENCES provisioner_jobs(id) ON DELETE CASCADE; + ForeignKeyWorkspacesOrganizationID ForeignKeyConstraint = "workspaces_organization_id_fkey" // ALTER TABLE ONLY workspaces ADD CONSTRAINT workspaces_organization_id_fkey FOREIGN KEY (organization_id) REFERENCES organizations(id) ON DELETE RESTRICT; + ForeignKeyWorkspacesOwnerID ForeignKeyConstraint = "workspaces_owner_id_fkey" // ALTER TABLE ONLY workspaces ADD CONSTRAINT workspaces_owner_id_fkey FOREIGN KEY (owner_id) REFERENCES users(id) ON DELETE RESTRICT; + ForeignKeyWorkspacesTemplateID ForeignKeyConstraint = "workspaces_template_id_fkey" // ALTER TABLE ONLY workspaces ADD CONSTRAINT workspaces_template_id_fkey FOREIGN KEY (template_id) REFERENCES templates(id) ON DELETE RESTRICT; ) diff --git a/coderd/database/migrations/000211_workspace_tags.down.sql b/coderd/database/migrations/000211_workspace_tags.down.sql new file mode 100644 index 0000000000000..71ae8dcd8327c --- /dev/null +++ b/coderd/database/migrations/000211_workspace_tags.down.sql @@ -0,0 +1 @@ +DROP TABLE IF EXISTS template_version_workspace_tags; diff --git a/coderd/database/migrations/000211_workspace_tags.up.sql b/coderd/database/migrations/000211_workspace_tags.up.sql new file mode 100644 index 0000000000000..10942ba5c0607 --- /dev/null +++ b/coderd/database/migrations/000211_workspace_tags.up.sql @@ -0,0 +1,6 @@ +CREATE TABLE IF NOT EXISTS template_version_workspace_tags ( + template_version_id uuid not null references template_versions (id) on delete cascade, + key text not null, + value text not null, + unique (template_version_id, key) +); diff --git a/coderd/database/migrations/migrate_test.go b/coderd/database/migrations/migrate_test.go index f7c505f077b82..f7e284621edea 100644 --- a/coderd/database/migrations/migrate_test.go +++ b/coderd/database/migrations/migrate_test.go @@ -267,6 +267,7 @@ func TestMigrateUpWithFixtures(t *testing.T) { "workspace_build_parameters", "template_version_variables", "dbcrypt_keys", // having zero rows is a valid state for this table + "template_version_workspace_tags", } s := &tableStats{s: make(map[string]int)} diff --git a/coderd/database/models.go b/coderd/database/models.go index 33cf1c607939c..3636f04fd05c5 100644 --- a/coderd/database/models.go +++ b/coderd/database/models.go @@ -2267,6 +2267,12 @@ type TemplateVersionVariable struct { Sensitive bool `db:"sensitive" json:"sensitive"` } +type TemplateVersionWorkspaceTag struct { + TemplateVersionID uuid.UUID `db:"template_version_id" json:"template_version_id"` + Key string `db:"key" json:"key"` + Value string `db:"value" json:"value"` +} + type User struct { ID uuid.UUID `db:"id" json:"id"` Email string `db:"email" json:"email"` diff --git a/coderd/database/querier.go b/coderd/database/querier.go index 01615a58e06bd..cbc76dee5f602 100644 --- a/coderd/database/querier.go +++ b/coderd/database/querier.go @@ -211,6 +211,7 @@ type sqlcQuerier interface { GetTemplateVersionByTemplateIDAndName(ctx context.Context, arg GetTemplateVersionByTemplateIDAndNameParams) (TemplateVersion, error) GetTemplateVersionParameters(ctx context.Context, templateVersionID uuid.UUID) ([]TemplateVersionParameter, error) GetTemplateVersionVariables(ctx context.Context, templateVersionID uuid.UUID) ([]TemplateVersionVariable, error) + GetTemplateVersionWorkspaceTags(ctx context.Context, templateVersionID uuid.UUID) ([]TemplateVersionWorkspaceTag, error) GetTemplateVersionsByIDs(ctx context.Context, ids []uuid.UUID) ([]TemplateVersion, error) GetTemplateVersionsByTemplateID(ctx context.Context, arg GetTemplateVersionsByTemplateIDParams) ([]TemplateVersion, error) GetTemplateVersionsCreatedAfter(ctx context.Context, createdAt time.Time) ([]TemplateVersion, error) @@ -327,6 +328,7 @@ type sqlcQuerier interface { InsertTemplateVersion(ctx context.Context, arg InsertTemplateVersionParams) error InsertTemplateVersionParameter(ctx context.Context, arg InsertTemplateVersionParameterParams) (TemplateVersionParameter, error) InsertTemplateVersionVariable(ctx context.Context, arg InsertTemplateVersionVariableParams) (TemplateVersionVariable, error) + InsertTemplateVersionWorkspaceTag(ctx context.Context, arg InsertTemplateVersionWorkspaceTagParams) (TemplateVersionWorkspaceTag, error) InsertUser(ctx context.Context, arg InsertUserParams) (User, error) // InsertUserGroupsByName adds a user to all provided groups, if they exist. InsertUserGroupsByName(ctx context.Context, arg InsertUserGroupsByNameParams) error diff --git a/coderd/database/queries.sql.go b/coderd/database/queries.sql.go index 7a0b60478f79f..baf124dce9b48 100644 --- a/coderd/database/queries.sql.go +++ b/coderd/database/queries.sql.go @@ -8011,6 +8011,61 @@ func (q *sqlQuerier) InsertTemplateVersionVariable(ctx context.Context, arg Inse return i, err } +const getTemplateVersionWorkspaceTags = `-- name: GetTemplateVersionWorkspaceTags :many +SELECT template_version_id, key, value FROM template_version_workspace_tags WHERE template_version_id = $1 ORDER BY LOWER(key) ASC +` + +func (q *sqlQuerier) GetTemplateVersionWorkspaceTags(ctx context.Context, templateVersionID uuid.UUID) ([]TemplateVersionWorkspaceTag, error) { + rows, err := q.db.QueryContext(ctx, getTemplateVersionWorkspaceTags, templateVersionID) + if err != nil { + return nil, err + } + defer rows.Close() + var items []TemplateVersionWorkspaceTag + for rows.Next() { + var i TemplateVersionWorkspaceTag + if err := rows.Scan(&i.TemplateVersionID, &i.Key, &i.Value); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const insertTemplateVersionWorkspaceTag = `-- name: InsertTemplateVersionWorkspaceTag :one +INSERT INTO + template_version_workspace_tags ( + template_version_id, + key, + value + ) +VALUES + ( + $1, + $2, + $3 + ) RETURNING template_version_id, key, value +` + +type InsertTemplateVersionWorkspaceTagParams struct { + TemplateVersionID uuid.UUID `db:"template_version_id" json:"template_version_id"` + Key string `db:"key" json:"key"` + Value string `db:"value" json:"value"` +} + +func (q *sqlQuerier) InsertTemplateVersionWorkspaceTag(ctx context.Context, arg InsertTemplateVersionWorkspaceTagParams) (TemplateVersionWorkspaceTag, error) { + row := q.db.QueryRowContext(ctx, insertTemplateVersionWorkspaceTag, arg.TemplateVersionID, arg.Key, arg.Value) + var i TemplateVersionWorkspaceTag + err := row.Scan(&i.TemplateVersionID, &i.Key, &i.Value) + return i, err +} + const getUserLinkByLinkedID = `-- name: GetUserLinkByLinkedID :one SELECT user_links.user_id, user_links.login_type, user_links.linked_id, user_links.oauth_access_token, user_links.oauth_refresh_token, user_links.oauth_expiry, user_links.oauth_access_token_key_id, user_links.oauth_refresh_token_key_id, user_links.debug_context diff --git a/coderd/database/queries/templateversionworkspacetags.sql b/coderd/database/queries/templateversionworkspacetags.sql new file mode 100644 index 0000000000000..8e74ed1aa1732 --- /dev/null +++ b/coderd/database/queries/templateversionworkspacetags.sql @@ -0,0 +1,16 @@ +-- name: InsertTemplateVersionWorkspaceTag :one +INSERT INTO + template_version_workspace_tags ( + template_version_id, + key, + value + ) +VALUES + ( + $1, + $2, + $3 + ) RETURNING *; + +-- name: GetTemplateVersionWorkspaceTags :many +SELECT * FROM template_version_workspace_tags WHERE template_version_id = $1 ORDER BY LOWER(key) ASC; diff --git a/coderd/database/unique_constraint.go b/coderd/database/unique_constraint.go index 8b61e06518f0b..cbae30279c5e9 100644 --- a/coderd/database/unique_constraint.go +++ b/coderd/database/unique_constraint.go @@ -6,88 +6,89 @@ type UniqueConstraint string // UniqueConstraint enums. const ( - UniqueAgentStatsPkey UniqueConstraint = "agent_stats_pkey" // ALTER TABLE ONLY workspace_agent_stats ADD CONSTRAINT agent_stats_pkey PRIMARY KEY (id); - UniqueAPIKeysPkey UniqueConstraint = "api_keys_pkey" // ALTER TABLE ONLY api_keys ADD CONSTRAINT api_keys_pkey PRIMARY KEY (id); - UniqueAuditLogsPkey UniqueConstraint = "audit_logs_pkey" // ALTER TABLE ONLY audit_logs ADD CONSTRAINT audit_logs_pkey PRIMARY KEY (id); - UniqueCustomRolesPkey UniqueConstraint = "custom_roles_pkey" // ALTER TABLE ONLY custom_roles ADD CONSTRAINT custom_roles_pkey PRIMARY KEY (name); - UniqueDbcryptKeysActiveKeyDigestKey UniqueConstraint = "dbcrypt_keys_active_key_digest_key" // ALTER TABLE ONLY dbcrypt_keys ADD CONSTRAINT dbcrypt_keys_active_key_digest_key UNIQUE (active_key_digest); - UniqueDbcryptKeysPkey UniqueConstraint = "dbcrypt_keys_pkey" // ALTER TABLE ONLY dbcrypt_keys ADD CONSTRAINT dbcrypt_keys_pkey PRIMARY KEY (number); - UniqueDbcryptKeysRevokedKeyDigestKey UniqueConstraint = "dbcrypt_keys_revoked_key_digest_key" // ALTER TABLE ONLY dbcrypt_keys ADD CONSTRAINT dbcrypt_keys_revoked_key_digest_key UNIQUE (revoked_key_digest); - UniqueFilesHashCreatedByKey UniqueConstraint = "files_hash_created_by_key" // ALTER TABLE ONLY files ADD CONSTRAINT files_hash_created_by_key UNIQUE (hash, created_by); - UniqueFilesPkey UniqueConstraint = "files_pkey" // ALTER TABLE ONLY files ADD CONSTRAINT files_pkey PRIMARY KEY (id); - UniqueGitAuthLinksProviderIDUserIDKey UniqueConstraint = "git_auth_links_provider_id_user_id_key" // ALTER TABLE ONLY external_auth_links ADD CONSTRAINT git_auth_links_provider_id_user_id_key UNIQUE (provider_id, user_id); - UniqueGitSSHKeysPkey UniqueConstraint = "gitsshkeys_pkey" // ALTER TABLE ONLY gitsshkeys ADD CONSTRAINT gitsshkeys_pkey PRIMARY KEY (user_id); - UniqueGroupMembersUserIDGroupIDKey UniqueConstraint = "group_members_user_id_group_id_key" // ALTER TABLE ONLY group_members ADD CONSTRAINT group_members_user_id_group_id_key UNIQUE (user_id, group_id); - UniqueGroupsNameOrganizationIDKey UniqueConstraint = "groups_name_organization_id_key" // ALTER TABLE ONLY groups ADD CONSTRAINT groups_name_organization_id_key UNIQUE (name, organization_id); - UniqueGroupsPkey UniqueConstraint = "groups_pkey" // ALTER TABLE ONLY groups ADD CONSTRAINT groups_pkey PRIMARY KEY (id); - UniqueJfrogXrayScansPkey UniqueConstraint = "jfrog_xray_scans_pkey" // ALTER TABLE ONLY jfrog_xray_scans ADD CONSTRAINT jfrog_xray_scans_pkey PRIMARY KEY (agent_id, workspace_id); - UniqueLicensesJWTKey UniqueConstraint = "licenses_jwt_key" // ALTER TABLE ONLY licenses ADD CONSTRAINT licenses_jwt_key UNIQUE (jwt); - UniqueLicensesPkey UniqueConstraint = "licenses_pkey" // ALTER TABLE ONLY licenses ADD CONSTRAINT licenses_pkey PRIMARY KEY (id); - UniqueOauth2ProviderAppCodesPkey UniqueConstraint = "oauth2_provider_app_codes_pkey" // ALTER TABLE ONLY oauth2_provider_app_codes ADD CONSTRAINT oauth2_provider_app_codes_pkey PRIMARY KEY (id); - UniqueOauth2ProviderAppCodesSecretPrefixKey UniqueConstraint = "oauth2_provider_app_codes_secret_prefix_key" // ALTER TABLE ONLY oauth2_provider_app_codes ADD CONSTRAINT oauth2_provider_app_codes_secret_prefix_key UNIQUE (secret_prefix); - UniqueOauth2ProviderAppSecretsPkey UniqueConstraint = "oauth2_provider_app_secrets_pkey" // ALTER TABLE ONLY oauth2_provider_app_secrets ADD CONSTRAINT oauth2_provider_app_secrets_pkey PRIMARY KEY (id); - UniqueOauth2ProviderAppSecretsSecretPrefixKey UniqueConstraint = "oauth2_provider_app_secrets_secret_prefix_key" // ALTER TABLE ONLY oauth2_provider_app_secrets ADD CONSTRAINT oauth2_provider_app_secrets_secret_prefix_key UNIQUE (secret_prefix); - UniqueOauth2ProviderAppTokensHashPrefixKey UniqueConstraint = "oauth2_provider_app_tokens_hash_prefix_key" // ALTER TABLE ONLY oauth2_provider_app_tokens ADD CONSTRAINT oauth2_provider_app_tokens_hash_prefix_key UNIQUE (hash_prefix); - UniqueOauth2ProviderAppTokensPkey UniqueConstraint = "oauth2_provider_app_tokens_pkey" // ALTER TABLE ONLY oauth2_provider_app_tokens ADD CONSTRAINT oauth2_provider_app_tokens_pkey PRIMARY KEY (id); - UniqueOauth2ProviderAppsNameKey UniqueConstraint = "oauth2_provider_apps_name_key" // ALTER TABLE ONLY oauth2_provider_apps ADD CONSTRAINT oauth2_provider_apps_name_key UNIQUE (name); - UniqueOauth2ProviderAppsPkey UniqueConstraint = "oauth2_provider_apps_pkey" // ALTER TABLE ONLY oauth2_provider_apps ADD CONSTRAINT oauth2_provider_apps_pkey PRIMARY KEY (id); - UniqueOrganizationMembersPkey UniqueConstraint = "organization_members_pkey" // ALTER TABLE ONLY organization_members ADD CONSTRAINT organization_members_pkey PRIMARY KEY (organization_id, user_id); - UniqueOrganizationsName UniqueConstraint = "organizations_name" // ALTER TABLE ONLY organizations ADD CONSTRAINT organizations_name UNIQUE (name); - UniqueOrganizationsPkey UniqueConstraint = "organizations_pkey" // ALTER TABLE ONLY organizations ADD CONSTRAINT organizations_pkey PRIMARY KEY (id); - UniqueParameterSchemasJobIDNameKey UniqueConstraint = "parameter_schemas_job_id_name_key" // ALTER TABLE ONLY parameter_schemas ADD CONSTRAINT parameter_schemas_job_id_name_key UNIQUE (job_id, name); - UniqueParameterSchemasPkey UniqueConstraint = "parameter_schemas_pkey" // ALTER TABLE ONLY parameter_schemas ADD CONSTRAINT parameter_schemas_pkey PRIMARY KEY (id); - UniqueParameterValuesPkey UniqueConstraint = "parameter_values_pkey" // ALTER TABLE ONLY parameter_values ADD CONSTRAINT parameter_values_pkey PRIMARY KEY (id); - UniqueParameterValuesScopeIDNameKey UniqueConstraint = "parameter_values_scope_id_name_key" // ALTER TABLE ONLY parameter_values ADD CONSTRAINT parameter_values_scope_id_name_key UNIQUE (scope_id, name); - UniqueProvisionerDaemonsPkey UniqueConstraint = "provisioner_daemons_pkey" // ALTER TABLE ONLY provisioner_daemons ADD CONSTRAINT provisioner_daemons_pkey PRIMARY KEY (id); - UniqueProvisionerJobLogsPkey UniqueConstraint = "provisioner_job_logs_pkey" // ALTER TABLE ONLY provisioner_job_logs ADD CONSTRAINT provisioner_job_logs_pkey PRIMARY KEY (id); - UniqueProvisionerJobsPkey UniqueConstraint = "provisioner_jobs_pkey" // ALTER TABLE ONLY provisioner_jobs ADD CONSTRAINT provisioner_jobs_pkey PRIMARY KEY (id); - UniqueSiteConfigsKeyKey UniqueConstraint = "site_configs_key_key" // ALTER TABLE ONLY site_configs ADD CONSTRAINT site_configs_key_key UNIQUE (key); - UniqueTailnetAgentsPkey UniqueConstraint = "tailnet_agents_pkey" // ALTER TABLE ONLY tailnet_agents ADD CONSTRAINT tailnet_agents_pkey PRIMARY KEY (id, coordinator_id); - UniqueTailnetClientSubscriptionsPkey UniqueConstraint = "tailnet_client_subscriptions_pkey" // ALTER TABLE ONLY tailnet_client_subscriptions ADD CONSTRAINT tailnet_client_subscriptions_pkey PRIMARY KEY (client_id, coordinator_id, agent_id); - UniqueTailnetClientsPkey UniqueConstraint = "tailnet_clients_pkey" // ALTER TABLE ONLY tailnet_clients ADD CONSTRAINT tailnet_clients_pkey PRIMARY KEY (id, coordinator_id); - UniqueTailnetCoordinatorsPkey UniqueConstraint = "tailnet_coordinators_pkey" // ALTER TABLE ONLY tailnet_coordinators ADD CONSTRAINT tailnet_coordinators_pkey PRIMARY KEY (id); - UniqueTailnetPeersPkey UniqueConstraint = "tailnet_peers_pkey" // ALTER TABLE ONLY tailnet_peers ADD CONSTRAINT tailnet_peers_pkey PRIMARY KEY (id, coordinator_id); - UniqueTailnetTunnelsPkey UniqueConstraint = "tailnet_tunnels_pkey" // ALTER TABLE ONLY tailnet_tunnels ADD CONSTRAINT tailnet_tunnels_pkey PRIMARY KEY (coordinator_id, src_id, dst_id); - UniqueTemplateUsageStatsPkey UniqueConstraint = "template_usage_stats_pkey" // ALTER TABLE ONLY template_usage_stats ADD CONSTRAINT template_usage_stats_pkey PRIMARY KEY (start_time, template_id, user_id); - UniqueTemplateVersionParametersTemplateVersionIDNameKey UniqueConstraint = "template_version_parameters_template_version_id_name_key" // ALTER TABLE ONLY template_version_parameters ADD CONSTRAINT template_version_parameters_template_version_id_name_key UNIQUE (template_version_id, name); - UniqueTemplateVersionVariablesTemplateVersionIDNameKey UniqueConstraint = "template_version_variables_template_version_id_name_key" // ALTER TABLE ONLY template_version_variables ADD CONSTRAINT template_version_variables_template_version_id_name_key UNIQUE (template_version_id, name); - UniqueTemplateVersionsPkey UniqueConstraint = "template_versions_pkey" // ALTER TABLE ONLY template_versions ADD CONSTRAINT template_versions_pkey PRIMARY KEY (id); - UniqueTemplateVersionsTemplateIDNameKey UniqueConstraint = "template_versions_template_id_name_key" // ALTER TABLE ONLY template_versions ADD CONSTRAINT template_versions_template_id_name_key UNIQUE (template_id, name); - UniqueTemplatesPkey UniqueConstraint = "templates_pkey" // ALTER TABLE ONLY templates ADD CONSTRAINT templates_pkey PRIMARY KEY (id); - UniqueUserLinksPkey UniqueConstraint = "user_links_pkey" // ALTER TABLE ONLY user_links ADD CONSTRAINT user_links_pkey PRIMARY KEY (user_id, login_type); - UniqueUsersPkey UniqueConstraint = "users_pkey" // ALTER TABLE ONLY users ADD CONSTRAINT users_pkey PRIMARY KEY (id); - UniqueWorkspaceAgentLogSourcesPkey UniqueConstraint = "workspace_agent_log_sources_pkey" // ALTER TABLE ONLY workspace_agent_log_sources ADD CONSTRAINT workspace_agent_log_sources_pkey PRIMARY KEY (workspace_agent_id, id); - UniqueWorkspaceAgentMetadataPkey UniqueConstraint = "workspace_agent_metadata_pkey" // ALTER TABLE ONLY workspace_agent_metadata ADD CONSTRAINT workspace_agent_metadata_pkey PRIMARY KEY (workspace_agent_id, key); - UniqueWorkspaceAgentPortSharePkey UniqueConstraint = "workspace_agent_port_share_pkey" // ALTER TABLE ONLY workspace_agent_port_share ADD CONSTRAINT workspace_agent_port_share_pkey PRIMARY KEY (workspace_id, agent_name, port); - UniqueWorkspaceAgentStartupLogsPkey UniqueConstraint = "workspace_agent_startup_logs_pkey" // ALTER TABLE ONLY workspace_agent_logs ADD CONSTRAINT workspace_agent_startup_logs_pkey PRIMARY KEY (id); - UniqueWorkspaceAgentsPkey UniqueConstraint = "workspace_agents_pkey" // ALTER TABLE ONLY workspace_agents ADD CONSTRAINT workspace_agents_pkey PRIMARY KEY (id); - UniqueWorkspaceAppStatsPkey UniqueConstraint = "workspace_app_stats_pkey" // ALTER TABLE ONLY workspace_app_stats ADD CONSTRAINT workspace_app_stats_pkey PRIMARY KEY (id); - UniqueWorkspaceAppStatsUserIDAgentIDSessionIDKey UniqueConstraint = "workspace_app_stats_user_id_agent_id_session_id_key" // ALTER TABLE ONLY workspace_app_stats ADD CONSTRAINT workspace_app_stats_user_id_agent_id_session_id_key UNIQUE (user_id, agent_id, session_id); - UniqueWorkspaceAppsAgentIDSlugIndex UniqueConstraint = "workspace_apps_agent_id_slug_idx" // ALTER TABLE ONLY workspace_apps ADD CONSTRAINT workspace_apps_agent_id_slug_idx UNIQUE (agent_id, slug); - UniqueWorkspaceAppsPkey UniqueConstraint = "workspace_apps_pkey" // ALTER TABLE ONLY workspace_apps ADD CONSTRAINT workspace_apps_pkey PRIMARY KEY (id); - UniqueWorkspaceBuildParametersWorkspaceBuildIDNameKey UniqueConstraint = "workspace_build_parameters_workspace_build_id_name_key" // ALTER TABLE ONLY workspace_build_parameters ADD CONSTRAINT workspace_build_parameters_workspace_build_id_name_key UNIQUE (workspace_build_id, name); - UniqueWorkspaceBuildsJobIDKey UniqueConstraint = "workspace_builds_job_id_key" // ALTER TABLE ONLY workspace_builds ADD CONSTRAINT workspace_builds_job_id_key UNIQUE (job_id); - UniqueWorkspaceBuildsPkey UniqueConstraint = "workspace_builds_pkey" // ALTER TABLE ONLY workspace_builds ADD CONSTRAINT workspace_builds_pkey PRIMARY KEY (id); - UniqueWorkspaceBuildsWorkspaceIDBuildNumberKey UniqueConstraint = "workspace_builds_workspace_id_build_number_key" // ALTER TABLE ONLY workspace_builds ADD CONSTRAINT workspace_builds_workspace_id_build_number_key UNIQUE (workspace_id, build_number); - UniqueWorkspaceProxiesPkey UniqueConstraint = "workspace_proxies_pkey" // ALTER TABLE ONLY workspace_proxies ADD CONSTRAINT workspace_proxies_pkey PRIMARY KEY (id); - UniqueWorkspaceProxiesRegionIDUnique UniqueConstraint = "workspace_proxies_region_id_unique" // ALTER TABLE ONLY workspace_proxies ADD CONSTRAINT workspace_proxies_region_id_unique UNIQUE (region_id); - UniqueWorkspaceResourceMetadataName UniqueConstraint = "workspace_resource_metadata_name" // ALTER TABLE ONLY workspace_resource_metadata ADD CONSTRAINT workspace_resource_metadata_name UNIQUE (workspace_resource_id, key); - UniqueWorkspaceResourceMetadataPkey UniqueConstraint = "workspace_resource_metadata_pkey" // ALTER TABLE ONLY workspace_resource_metadata ADD CONSTRAINT workspace_resource_metadata_pkey PRIMARY KEY (id); - UniqueWorkspaceResourcesPkey UniqueConstraint = "workspace_resources_pkey" // ALTER TABLE ONLY workspace_resources ADD CONSTRAINT workspace_resources_pkey PRIMARY KEY (id); - UniqueWorkspacesPkey UniqueConstraint = "workspaces_pkey" // ALTER TABLE ONLY workspaces ADD CONSTRAINT workspaces_pkey PRIMARY KEY (id); - UniqueIndexAPIKeyName UniqueConstraint = "idx_api_key_name" // CREATE UNIQUE INDEX idx_api_key_name ON api_keys USING btree (user_id, token_name) WHERE (login_type = 'token'::login_type); - UniqueIndexCustomRolesNameLower UniqueConstraint = "idx_custom_roles_name_lower" // CREATE UNIQUE INDEX idx_custom_roles_name_lower ON custom_roles USING btree (lower(name)); - UniqueIndexOrganizationName UniqueConstraint = "idx_organization_name" // CREATE UNIQUE INDEX idx_organization_name ON organizations USING btree (name); - UniqueIndexOrganizationNameLower UniqueConstraint = "idx_organization_name_lower" // CREATE UNIQUE INDEX idx_organization_name_lower ON organizations USING btree (lower(name)); - UniqueIndexProvisionerDaemonsNameOwnerKey UniqueConstraint = "idx_provisioner_daemons_name_owner_key" // CREATE UNIQUE INDEX idx_provisioner_daemons_name_owner_key ON provisioner_daemons USING btree (name, lower(COALESCE((tags ->> 'owner'::text), ''::text))); - UniqueIndexUsersEmail UniqueConstraint = "idx_users_email" // CREATE UNIQUE INDEX idx_users_email ON users USING btree (email) WHERE (deleted = false); - UniqueIndexUsersUsername UniqueConstraint = "idx_users_username" // CREATE UNIQUE INDEX idx_users_username ON users USING btree (username) WHERE (deleted = false); - UniqueOrganizationsSingleDefaultOrg UniqueConstraint = "organizations_single_default_org" // CREATE UNIQUE INDEX organizations_single_default_org ON organizations USING btree (is_default) WHERE (is_default = true); - UniqueTemplateUsageStatsStartTimeTemplateIDUserIDIndex UniqueConstraint = "template_usage_stats_start_time_template_id_user_id_idx" // CREATE UNIQUE INDEX template_usage_stats_start_time_template_id_user_id_idx ON template_usage_stats USING btree (start_time, template_id, user_id); - UniqueTemplatesOrganizationIDNameIndex UniqueConstraint = "templates_organization_id_name_idx" // CREATE UNIQUE INDEX templates_organization_id_name_idx ON templates USING btree (organization_id, lower((name)::text)) WHERE (deleted = false); - UniqueUserLinksLinkedIDLoginTypeIndex UniqueConstraint = "user_links_linked_id_login_type_idx" // CREATE UNIQUE INDEX user_links_linked_id_login_type_idx ON user_links USING btree (linked_id, login_type) WHERE (linked_id <> ''::text); - UniqueUsersEmailLowerIndex UniqueConstraint = "users_email_lower_idx" // CREATE UNIQUE INDEX users_email_lower_idx ON users USING btree (lower(email)) WHERE (deleted = false); - UniqueUsersUsernameLowerIndex UniqueConstraint = "users_username_lower_idx" // CREATE UNIQUE INDEX users_username_lower_idx ON users USING btree (lower(username)) WHERE (deleted = false); - UniqueWorkspaceProxiesLowerNameIndex UniqueConstraint = "workspace_proxies_lower_name_idx" // CREATE UNIQUE INDEX workspace_proxies_lower_name_idx ON workspace_proxies USING btree (lower(name)) WHERE (deleted = false); - UniqueWorkspacesOwnerIDLowerIndex UniqueConstraint = "workspaces_owner_id_lower_idx" // CREATE UNIQUE INDEX workspaces_owner_id_lower_idx ON workspaces USING btree (owner_id, lower((name)::text)) WHERE (deleted = false); + UniqueAgentStatsPkey UniqueConstraint = "agent_stats_pkey" // ALTER TABLE ONLY workspace_agent_stats ADD CONSTRAINT agent_stats_pkey PRIMARY KEY (id); + UniqueAPIKeysPkey UniqueConstraint = "api_keys_pkey" // ALTER TABLE ONLY api_keys ADD CONSTRAINT api_keys_pkey PRIMARY KEY (id); + UniqueAuditLogsPkey UniqueConstraint = "audit_logs_pkey" // ALTER TABLE ONLY audit_logs ADD CONSTRAINT audit_logs_pkey PRIMARY KEY (id); + UniqueCustomRolesPkey UniqueConstraint = "custom_roles_pkey" // ALTER TABLE ONLY custom_roles ADD CONSTRAINT custom_roles_pkey PRIMARY KEY (name); + UniqueDbcryptKeysActiveKeyDigestKey UniqueConstraint = "dbcrypt_keys_active_key_digest_key" // ALTER TABLE ONLY dbcrypt_keys ADD CONSTRAINT dbcrypt_keys_active_key_digest_key UNIQUE (active_key_digest); + UniqueDbcryptKeysPkey UniqueConstraint = "dbcrypt_keys_pkey" // ALTER TABLE ONLY dbcrypt_keys ADD CONSTRAINT dbcrypt_keys_pkey PRIMARY KEY (number); + UniqueDbcryptKeysRevokedKeyDigestKey UniqueConstraint = "dbcrypt_keys_revoked_key_digest_key" // ALTER TABLE ONLY dbcrypt_keys ADD CONSTRAINT dbcrypt_keys_revoked_key_digest_key UNIQUE (revoked_key_digest); + UniqueFilesHashCreatedByKey UniqueConstraint = "files_hash_created_by_key" // ALTER TABLE ONLY files ADD CONSTRAINT files_hash_created_by_key UNIQUE (hash, created_by); + UniqueFilesPkey UniqueConstraint = "files_pkey" // ALTER TABLE ONLY files ADD CONSTRAINT files_pkey PRIMARY KEY (id); + UniqueGitAuthLinksProviderIDUserIDKey UniqueConstraint = "git_auth_links_provider_id_user_id_key" // ALTER TABLE ONLY external_auth_links ADD CONSTRAINT git_auth_links_provider_id_user_id_key UNIQUE (provider_id, user_id); + UniqueGitSSHKeysPkey UniqueConstraint = "gitsshkeys_pkey" // ALTER TABLE ONLY gitsshkeys ADD CONSTRAINT gitsshkeys_pkey PRIMARY KEY (user_id); + UniqueGroupMembersUserIDGroupIDKey UniqueConstraint = "group_members_user_id_group_id_key" // ALTER TABLE ONLY group_members ADD CONSTRAINT group_members_user_id_group_id_key UNIQUE (user_id, group_id); + UniqueGroupsNameOrganizationIDKey UniqueConstraint = "groups_name_organization_id_key" // ALTER TABLE ONLY groups ADD CONSTRAINT groups_name_organization_id_key UNIQUE (name, organization_id); + UniqueGroupsPkey UniqueConstraint = "groups_pkey" // ALTER TABLE ONLY groups ADD CONSTRAINT groups_pkey PRIMARY KEY (id); + UniqueJfrogXrayScansPkey UniqueConstraint = "jfrog_xray_scans_pkey" // ALTER TABLE ONLY jfrog_xray_scans ADD CONSTRAINT jfrog_xray_scans_pkey PRIMARY KEY (agent_id, workspace_id); + UniqueLicensesJWTKey UniqueConstraint = "licenses_jwt_key" // ALTER TABLE ONLY licenses ADD CONSTRAINT licenses_jwt_key UNIQUE (jwt); + UniqueLicensesPkey UniqueConstraint = "licenses_pkey" // ALTER TABLE ONLY licenses ADD CONSTRAINT licenses_pkey PRIMARY KEY (id); + UniqueOauth2ProviderAppCodesPkey UniqueConstraint = "oauth2_provider_app_codes_pkey" // ALTER TABLE ONLY oauth2_provider_app_codes ADD CONSTRAINT oauth2_provider_app_codes_pkey PRIMARY KEY (id); + UniqueOauth2ProviderAppCodesSecretPrefixKey UniqueConstraint = "oauth2_provider_app_codes_secret_prefix_key" // ALTER TABLE ONLY oauth2_provider_app_codes ADD CONSTRAINT oauth2_provider_app_codes_secret_prefix_key UNIQUE (secret_prefix); + UniqueOauth2ProviderAppSecretsPkey UniqueConstraint = "oauth2_provider_app_secrets_pkey" // ALTER TABLE ONLY oauth2_provider_app_secrets ADD CONSTRAINT oauth2_provider_app_secrets_pkey PRIMARY KEY (id); + UniqueOauth2ProviderAppSecretsSecretPrefixKey UniqueConstraint = "oauth2_provider_app_secrets_secret_prefix_key" // ALTER TABLE ONLY oauth2_provider_app_secrets ADD CONSTRAINT oauth2_provider_app_secrets_secret_prefix_key UNIQUE (secret_prefix); + UniqueOauth2ProviderAppTokensHashPrefixKey UniqueConstraint = "oauth2_provider_app_tokens_hash_prefix_key" // ALTER TABLE ONLY oauth2_provider_app_tokens ADD CONSTRAINT oauth2_provider_app_tokens_hash_prefix_key UNIQUE (hash_prefix); + UniqueOauth2ProviderAppTokensPkey UniqueConstraint = "oauth2_provider_app_tokens_pkey" // ALTER TABLE ONLY oauth2_provider_app_tokens ADD CONSTRAINT oauth2_provider_app_tokens_pkey PRIMARY KEY (id); + UniqueOauth2ProviderAppsNameKey UniqueConstraint = "oauth2_provider_apps_name_key" // ALTER TABLE ONLY oauth2_provider_apps ADD CONSTRAINT oauth2_provider_apps_name_key UNIQUE (name); + UniqueOauth2ProviderAppsPkey UniqueConstraint = "oauth2_provider_apps_pkey" // ALTER TABLE ONLY oauth2_provider_apps ADD CONSTRAINT oauth2_provider_apps_pkey PRIMARY KEY (id); + UniqueOrganizationMembersPkey UniqueConstraint = "organization_members_pkey" // ALTER TABLE ONLY organization_members ADD CONSTRAINT organization_members_pkey PRIMARY KEY (organization_id, user_id); + UniqueOrganizationsName UniqueConstraint = "organizations_name" // ALTER TABLE ONLY organizations ADD CONSTRAINT organizations_name UNIQUE (name); + UniqueOrganizationsPkey UniqueConstraint = "organizations_pkey" // ALTER TABLE ONLY organizations ADD CONSTRAINT organizations_pkey PRIMARY KEY (id); + UniqueParameterSchemasJobIDNameKey UniqueConstraint = "parameter_schemas_job_id_name_key" // ALTER TABLE ONLY parameter_schemas ADD CONSTRAINT parameter_schemas_job_id_name_key UNIQUE (job_id, name); + UniqueParameterSchemasPkey UniqueConstraint = "parameter_schemas_pkey" // ALTER TABLE ONLY parameter_schemas ADD CONSTRAINT parameter_schemas_pkey PRIMARY KEY (id); + UniqueParameterValuesPkey UniqueConstraint = "parameter_values_pkey" // ALTER TABLE ONLY parameter_values ADD CONSTRAINT parameter_values_pkey PRIMARY KEY (id); + UniqueParameterValuesScopeIDNameKey UniqueConstraint = "parameter_values_scope_id_name_key" // ALTER TABLE ONLY parameter_values ADD CONSTRAINT parameter_values_scope_id_name_key UNIQUE (scope_id, name); + UniqueProvisionerDaemonsPkey UniqueConstraint = "provisioner_daemons_pkey" // ALTER TABLE ONLY provisioner_daemons ADD CONSTRAINT provisioner_daemons_pkey PRIMARY KEY (id); + UniqueProvisionerJobLogsPkey UniqueConstraint = "provisioner_job_logs_pkey" // ALTER TABLE ONLY provisioner_job_logs ADD CONSTRAINT provisioner_job_logs_pkey PRIMARY KEY (id); + UniqueProvisionerJobsPkey UniqueConstraint = "provisioner_jobs_pkey" // ALTER TABLE ONLY provisioner_jobs ADD CONSTRAINT provisioner_jobs_pkey PRIMARY KEY (id); + UniqueSiteConfigsKeyKey UniqueConstraint = "site_configs_key_key" // ALTER TABLE ONLY site_configs ADD CONSTRAINT site_configs_key_key UNIQUE (key); + UniqueTailnetAgentsPkey UniqueConstraint = "tailnet_agents_pkey" // ALTER TABLE ONLY tailnet_agents ADD CONSTRAINT tailnet_agents_pkey PRIMARY KEY (id, coordinator_id); + UniqueTailnetClientSubscriptionsPkey UniqueConstraint = "tailnet_client_subscriptions_pkey" // ALTER TABLE ONLY tailnet_client_subscriptions ADD CONSTRAINT tailnet_client_subscriptions_pkey PRIMARY KEY (client_id, coordinator_id, agent_id); + UniqueTailnetClientsPkey UniqueConstraint = "tailnet_clients_pkey" // ALTER TABLE ONLY tailnet_clients ADD CONSTRAINT tailnet_clients_pkey PRIMARY KEY (id, coordinator_id); + UniqueTailnetCoordinatorsPkey UniqueConstraint = "tailnet_coordinators_pkey" // ALTER TABLE ONLY tailnet_coordinators ADD CONSTRAINT tailnet_coordinators_pkey PRIMARY KEY (id); + UniqueTailnetPeersPkey UniqueConstraint = "tailnet_peers_pkey" // ALTER TABLE ONLY tailnet_peers ADD CONSTRAINT tailnet_peers_pkey PRIMARY KEY (id, coordinator_id); + UniqueTailnetTunnelsPkey UniqueConstraint = "tailnet_tunnels_pkey" // ALTER TABLE ONLY tailnet_tunnels ADD CONSTRAINT tailnet_tunnels_pkey PRIMARY KEY (coordinator_id, src_id, dst_id); + UniqueTemplateUsageStatsPkey UniqueConstraint = "template_usage_stats_pkey" // ALTER TABLE ONLY template_usage_stats ADD CONSTRAINT template_usage_stats_pkey PRIMARY KEY (start_time, template_id, user_id); + UniqueTemplateVersionParametersTemplateVersionIDNameKey UniqueConstraint = "template_version_parameters_template_version_id_name_key" // ALTER TABLE ONLY template_version_parameters ADD CONSTRAINT template_version_parameters_template_version_id_name_key UNIQUE (template_version_id, name); + UniqueTemplateVersionVariablesTemplateVersionIDNameKey UniqueConstraint = "template_version_variables_template_version_id_name_key" // ALTER TABLE ONLY template_version_variables ADD CONSTRAINT template_version_variables_template_version_id_name_key UNIQUE (template_version_id, name); + UniqueTemplateVersionWorkspaceTagsTemplateVersionIDKeyKey UniqueConstraint = "template_version_workspace_tags_template_version_id_key_key" // ALTER TABLE ONLY template_version_workspace_tags ADD CONSTRAINT template_version_workspace_tags_template_version_id_key_key UNIQUE (template_version_id, key); + UniqueTemplateVersionsPkey UniqueConstraint = "template_versions_pkey" // ALTER TABLE ONLY template_versions ADD CONSTRAINT template_versions_pkey PRIMARY KEY (id); + UniqueTemplateVersionsTemplateIDNameKey UniqueConstraint = "template_versions_template_id_name_key" // ALTER TABLE ONLY template_versions ADD CONSTRAINT template_versions_template_id_name_key UNIQUE (template_id, name); + UniqueTemplatesPkey UniqueConstraint = "templates_pkey" // ALTER TABLE ONLY templates ADD CONSTRAINT templates_pkey PRIMARY KEY (id); + UniqueUserLinksPkey UniqueConstraint = "user_links_pkey" // ALTER TABLE ONLY user_links ADD CONSTRAINT user_links_pkey PRIMARY KEY (user_id, login_type); + UniqueUsersPkey UniqueConstraint = "users_pkey" // ALTER TABLE ONLY users ADD CONSTRAINT users_pkey PRIMARY KEY (id); + UniqueWorkspaceAgentLogSourcesPkey UniqueConstraint = "workspace_agent_log_sources_pkey" // ALTER TABLE ONLY workspace_agent_log_sources ADD CONSTRAINT workspace_agent_log_sources_pkey PRIMARY KEY (workspace_agent_id, id); + UniqueWorkspaceAgentMetadataPkey UniqueConstraint = "workspace_agent_metadata_pkey" // ALTER TABLE ONLY workspace_agent_metadata ADD CONSTRAINT workspace_agent_metadata_pkey PRIMARY KEY (workspace_agent_id, key); + UniqueWorkspaceAgentPortSharePkey UniqueConstraint = "workspace_agent_port_share_pkey" // ALTER TABLE ONLY workspace_agent_port_share ADD CONSTRAINT workspace_agent_port_share_pkey PRIMARY KEY (workspace_id, agent_name, port); + UniqueWorkspaceAgentStartupLogsPkey UniqueConstraint = "workspace_agent_startup_logs_pkey" // ALTER TABLE ONLY workspace_agent_logs ADD CONSTRAINT workspace_agent_startup_logs_pkey PRIMARY KEY (id); + UniqueWorkspaceAgentsPkey UniqueConstraint = "workspace_agents_pkey" // ALTER TABLE ONLY workspace_agents ADD CONSTRAINT workspace_agents_pkey PRIMARY KEY (id); + UniqueWorkspaceAppStatsPkey UniqueConstraint = "workspace_app_stats_pkey" // ALTER TABLE ONLY workspace_app_stats ADD CONSTRAINT workspace_app_stats_pkey PRIMARY KEY (id); + UniqueWorkspaceAppStatsUserIDAgentIDSessionIDKey UniqueConstraint = "workspace_app_stats_user_id_agent_id_session_id_key" // ALTER TABLE ONLY workspace_app_stats ADD CONSTRAINT workspace_app_stats_user_id_agent_id_session_id_key UNIQUE (user_id, agent_id, session_id); + UniqueWorkspaceAppsAgentIDSlugIndex UniqueConstraint = "workspace_apps_agent_id_slug_idx" // ALTER TABLE ONLY workspace_apps ADD CONSTRAINT workspace_apps_agent_id_slug_idx UNIQUE (agent_id, slug); + UniqueWorkspaceAppsPkey UniqueConstraint = "workspace_apps_pkey" // ALTER TABLE ONLY workspace_apps ADD CONSTRAINT workspace_apps_pkey PRIMARY KEY (id); + UniqueWorkspaceBuildParametersWorkspaceBuildIDNameKey UniqueConstraint = "workspace_build_parameters_workspace_build_id_name_key" // ALTER TABLE ONLY workspace_build_parameters ADD CONSTRAINT workspace_build_parameters_workspace_build_id_name_key UNIQUE (workspace_build_id, name); + UniqueWorkspaceBuildsJobIDKey UniqueConstraint = "workspace_builds_job_id_key" // ALTER TABLE ONLY workspace_builds ADD CONSTRAINT workspace_builds_job_id_key UNIQUE (job_id); + UniqueWorkspaceBuildsPkey UniqueConstraint = "workspace_builds_pkey" // ALTER TABLE ONLY workspace_builds ADD CONSTRAINT workspace_builds_pkey PRIMARY KEY (id); + UniqueWorkspaceBuildsWorkspaceIDBuildNumberKey UniqueConstraint = "workspace_builds_workspace_id_build_number_key" // ALTER TABLE ONLY workspace_builds ADD CONSTRAINT workspace_builds_workspace_id_build_number_key UNIQUE (workspace_id, build_number); + UniqueWorkspaceProxiesPkey UniqueConstraint = "workspace_proxies_pkey" // ALTER TABLE ONLY workspace_proxies ADD CONSTRAINT workspace_proxies_pkey PRIMARY KEY (id); + UniqueWorkspaceProxiesRegionIDUnique UniqueConstraint = "workspace_proxies_region_id_unique" // ALTER TABLE ONLY workspace_proxies ADD CONSTRAINT workspace_proxies_region_id_unique UNIQUE (region_id); + UniqueWorkspaceResourceMetadataName UniqueConstraint = "workspace_resource_metadata_name" // ALTER TABLE ONLY workspace_resource_metadata ADD CONSTRAINT workspace_resource_metadata_name UNIQUE (workspace_resource_id, key); + UniqueWorkspaceResourceMetadataPkey UniqueConstraint = "workspace_resource_metadata_pkey" // ALTER TABLE ONLY workspace_resource_metadata ADD CONSTRAINT workspace_resource_metadata_pkey PRIMARY KEY (id); + UniqueWorkspaceResourcesPkey UniqueConstraint = "workspace_resources_pkey" // ALTER TABLE ONLY workspace_resources ADD CONSTRAINT workspace_resources_pkey PRIMARY KEY (id); + UniqueWorkspacesPkey UniqueConstraint = "workspaces_pkey" // ALTER TABLE ONLY workspaces ADD CONSTRAINT workspaces_pkey PRIMARY KEY (id); + UniqueIndexAPIKeyName UniqueConstraint = "idx_api_key_name" // CREATE UNIQUE INDEX idx_api_key_name ON api_keys USING btree (user_id, token_name) WHERE (login_type = 'token'::login_type); + UniqueIndexCustomRolesNameLower UniqueConstraint = "idx_custom_roles_name_lower" // CREATE UNIQUE INDEX idx_custom_roles_name_lower ON custom_roles USING btree (lower(name)); + UniqueIndexOrganizationName UniqueConstraint = "idx_organization_name" // CREATE UNIQUE INDEX idx_organization_name ON organizations USING btree (name); + UniqueIndexOrganizationNameLower UniqueConstraint = "idx_organization_name_lower" // CREATE UNIQUE INDEX idx_organization_name_lower ON organizations USING btree (lower(name)); + UniqueIndexProvisionerDaemonsNameOwnerKey UniqueConstraint = "idx_provisioner_daemons_name_owner_key" // CREATE UNIQUE INDEX idx_provisioner_daemons_name_owner_key ON provisioner_daemons USING btree (name, lower(COALESCE((tags ->> 'owner'::text), ''::text))); + UniqueIndexUsersEmail UniqueConstraint = "idx_users_email" // CREATE UNIQUE INDEX idx_users_email ON users USING btree (email) WHERE (deleted = false); + UniqueIndexUsersUsername UniqueConstraint = "idx_users_username" // CREATE UNIQUE INDEX idx_users_username ON users USING btree (username) WHERE (deleted = false); + UniqueOrganizationsSingleDefaultOrg UniqueConstraint = "organizations_single_default_org" // CREATE UNIQUE INDEX organizations_single_default_org ON organizations USING btree (is_default) WHERE (is_default = true); + UniqueTemplateUsageStatsStartTimeTemplateIDUserIDIndex UniqueConstraint = "template_usage_stats_start_time_template_id_user_id_idx" // CREATE UNIQUE INDEX template_usage_stats_start_time_template_id_user_id_idx ON template_usage_stats USING btree (start_time, template_id, user_id); + UniqueTemplatesOrganizationIDNameIndex UniqueConstraint = "templates_organization_id_name_idx" // CREATE UNIQUE INDEX templates_organization_id_name_idx ON templates USING btree (organization_id, lower((name)::text)) WHERE (deleted = false); + UniqueUserLinksLinkedIDLoginTypeIndex UniqueConstraint = "user_links_linked_id_login_type_idx" // CREATE UNIQUE INDEX user_links_linked_id_login_type_idx ON user_links USING btree (linked_id, login_type) WHERE (linked_id <> ''::text); + UniqueUsersEmailLowerIndex UniqueConstraint = "users_email_lower_idx" // CREATE UNIQUE INDEX users_email_lower_idx ON users USING btree (lower(email)) WHERE (deleted = false); + UniqueUsersUsernameLowerIndex UniqueConstraint = "users_username_lower_idx" // CREATE UNIQUE INDEX users_username_lower_idx ON users USING btree (lower(username)) WHERE (deleted = false); + UniqueWorkspaceProxiesLowerNameIndex UniqueConstraint = "workspace_proxies_lower_name_idx" // CREATE UNIQUE INDEX workspace_proxies_lower_name_idx ON workspace_proxies USING btree (lower(name)) WHERE (deleted = false); + UniqueWorkspacesOwnerIDLowerIndex UniqueConstraint = "workspaces_owner_id_lower_idx" // CREATE UNIQUE INDEX workspaces_owner_id_lower_idx ON workspaces USING btree (owner_id, lower((name)::text)) WHERE (deleted = false); ) diff --git a/coderd/provisionerdserver/provisionerdserver.go b/coderd/provisionerdserver/provisionerdserver.go index f6a8d6abe3a5e..e47aba8a56eab 100644 --- a/coderd/provisionerdserver/provisionerdserver.go +++ b/coderd/provisionerdserver/provisionerdserver.go @@ -817,6 +817,25 @@ func (s *server) UpdateJob(ctx context.Context, request *proto.UpdateJobRequest) s.Logger.Debug(ctx, "published job logs", slog.F("job_id", parsedID)) } + if len(request.WorkspaceTags) > 0 { + templateVersion, err := s.Database.GetTemplateVersionByJobID(ctx, job.ID) + if err != nil { + s.Logger.Error(ctx, "failed to get the template version", slog.F("job_id", parsedID), slog.Error(err)) + return nil, xerrors.Errorf("get template version by job id: %w", err) + } + + for key, value := range request.WorkspaceTags { + _, err := s.Database.InsertTemplateVersionWorkspaceTag(ctx, database.InsertTemplateVersionWorkspaceTagParams{ + TemplateVersionID: templateVersion.ID, + Key: key, + Value: value, + }) + if err != nil { + return nil, xerrors.Errorf("update template version workspace tags: %w", err) + } + } + } + if len(request.Readme) > 0 { err := s.Database.UpdateTemplateVersionDescriptionByJobID(ctx, database.UpdateTemplateVersionDescriptionByJobIDParams{ JobID: job.ID, diff --git a/coderd/provisionerdserver/provisionerdserver_test.go b/coderd/provisionerdserver/provisionerdserver_test.go index 6757bd2c6396d..f7b5ef7e29625 100644 --- a/coderd/provisionerdserver/provisionerdserver_test.go +++ b/coderd/provisionerdserver/provisionerdserver_test.go @@ -750,6 +750,38 @@ func TestUpdateJob(t *testing.T) { require.Equal(t, templateVariables[1].Value, "") }) }) + + t.Run("WorkspaceTags", func(t *testing.T) { + t.Parallel() + + ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) + defer cancel() + + srv, db, _, pd := setup(t, false, &overrides{}) + job := setupJob(t, db, pd.ID) + versionID := uuid.New() + err := db.InsertTemplateVersion(ctx, database.InsertTemplateVersionParams{ + ID: versionID, + JobID: job, + }) + require.NoError(t, err) + _, err = srv.UpdateJob(ctx, &proto.UpdateJobRequest{ + JobId: job.String(), + WorkspaceTags: map[string]string{ + "bird": "tweety", + "cat": "jinx", + }, + }) + require.NoError(t, err) + + workspaceTags, err := db.GetTemplateVersionWorkspaceTags(ctx, versionID) + require.NoError(t, err) + require.Len(t, workspaceTags, 2) + require.Equal(t, workspaceTags[0].Key, "bird") + require.Equal(t, workspaceTags[0].Value, "tweety") + require.Equal(t, workspaceTags[1].Key, "cat") + require.Equal(t, workspaceTags[1].Value, "jinx") + }) } func TestFailJob(t *testing.T) { diff --git a/flake.nix b/flake.nix index 577a422cdbbf3..fb33091b2f60c 100644 --- a/flake.nix +++ b/flake.nix @@ -97,7 +97,7 @@ name = "coder-${osArch}"; # Updated with ./scripts/update-flake.sh`. # This should be updated whenever go.mod changes! - vendorHash = "sha256-YOqgW5v7qXfOYcCQECZyJfoewChtQDfRCrTcr7Ui37Y="; + vendorHash = "sha256-tBczWqmpIfr8zwftHl/W3nw3qiSei+aIw3fZmtl0SwI="; proxyVendor = true; src = ./.; nativeBuildInputs = with pkgs; [ getopt openssl zstd ]; diff --git a/go.mod b/go.mod index d40572a4a2df3..e67821bcdfb52 100644 --- a/go.mod +++ b/go.mod @@ -104,7 +104,7 @@ require ( github.com/coder/flog v1.1.0 github.com/coder/pretty v0.0.0-20230908205945-e89ba86370e0 github.com/coder/retry v1.5.1 - github.com/coder/terraform-provider-coder v0.21.0 + github.com/coder/terraform-provider-coder v0.22.0 github.com/coder/wgtunnel v0.1.13-0.20231127054351-578bfff9b92a github.com/coreos/go-oidc/v3 v3.10.0 github.com/coreos/go-systemd v0.0.0-20191104093116-d3cd4ed1dbcf diff --git a/go.sum b/go.sum index 98bc966fa40ff..8c5ef8240b246 100644 --- a/go.sum +++ b/go.sum @@ -219,8 +219,8 @@ github.com/coder/ssh v0.0.0-20231128192721-70855dedb788 h1:YoUSJ19E8AtuUFVYBpXuO github.com/coder/ssh v0.0.0-20231128192721-70855dedb788/go.mod h1:aGQbuCLyhRLMzZF067xc84Lh7JDs1FKwCmF1Crl9dxQ= github.com/coder/tailscale v1.1.1-0.20240501025849-d8a4721c3162 h1:1uXQe8UAXMjvXYQ+XtpCT3LjbideQI3xpeejN9kh56A= github.com/coder/tailscale v1.1.1-0.20240501025849-d8a4721c3162/go.mod h1:L8tPrwSi31RAMEMV8rjb0vYTGs7rXt8rAHbqY/p41j4= -github.com/coder/terraform-provider-coder v0.21.0 h1:aoDmFJULYZpS66EIAZuNY4IxElaDkdRaWMWp9ScD2R8= -github.com/coder/terraform-provider-coder v0.21.0/go.mod h1:hqxd15PJeftFBOnGBBPN6WfNQutZtnahwwPeV8U6TyA= +github.com/coder/terraform-provider-coder v0.22.0 h1:L72WFa9/6sc/nnXENPS8LpWi/2NBV+DRUW0WT//pEaU= +github.com/coder/terraform-provider-coder v0.22.0/go.mod h1:wMun9UZ9HT2CzF6qPPBup1odzBpVUc0/xSFoXgdI3tk= github.com/coder/wgtunnel v0.1.13-0.20231127054351-578bfff9b92a h1:KhR9LUVllMZ+e9lhubZ1HNrtJDgH5YLoTvpKwmrGag4= github.com/coder/wgtunnel v0.1.13-0.20231127054351-578bfff9b92a/go.mod h1:QzfptVUdEO+XbkzMKx1kw13i9wwpJlfI1RrZ6SNZ0hA= github.com/coder/wireguard-go v0.0.0-20230807234434-d825b45ccbf5 h1:eDk/42Kj4xN4yfE504LsvcFEo3dWUiCOaBiWJ2uIH2A= diff --git a/provisioner/terraform/parse.go b/provisioner/terraform/parse.go index 10ab7b801b071..ad55321f2e99a 100644 --- a/provisioner/terraform/parse.go +++ b/provisioner/terraform/parse.go @@ -1,12 +1,18 @@ package terraform import ( + "context" "encoding/json" "fmt" + "os" "path/filepath" + "slices" "sort" "strings" + "github.com/hashicorp/hcl/v2" + "github.com/hashicorp/hcl/v2/hclparse" + "github.com/hashicorp/hcl/v2/hclsyntax" "github.com/hashicorp/terraform-config-inspect/tfconfig" "github.com/mitchellh/go-wordwrap" "golang.org/x/xerrors" @@ -28,6 +34,129 @@ func (s *server) Parse(sess *provisionersdk.Session, _ *proto.ParseRequest, _ <- return provisionersdk.ParseErrorf("load module: %s", formatDiagnostics(sess.WorkDirectory, diags)) } + workspaceTags, err := s.loadWorkspaceTags(ctx, module) + if err != nil { + return provisionersdk.ParseErrorf("can't load workspace tags: %v", err) + } + + templateVariables, err := loadTerraformVariables(module) + if err != nil { + return provisionersdk.ParseErrorf("can't load template variables: %v", err) + } + + return &proto.ParseComplete{ + TemplateVariables: templateVariables, + WorkspaceTags: workspaceTags, + } +} + +var rootTemplateSchema = &hcl.BodySchema{ + Blocks: []hcl.BlockHeaderSchema{ + { + Type: "data", + LabelNames: []string{"type", "name"}, + }, + }, +} + +var coderWorkspaceTagsSchema = &hcl.BodySchema{ + Attributes: []hcl.AttributeSchema{ + { + Name: "tags", + }, + }, +} + +func (s *server) loadWorkspaceTags(ctx context.Context, module *tfconfig.Module) (map[string]string, error) { + workspaceTags := map[string]string{} + + for _, dataResource := range module.DataResources { + if dataResource.Type != "coder_workspace_tags" { + s.logger.Debug(ctx, "skip resource as it is not a coder_workspace_tags", "resource_name", dataResource.Name, "resource_type", dataResource.Type) + continue + } + + var file *hcl.File + var diags hcl.Diagnostics + parser := hclparse.NewParser() + + if !strings.HasSuffix(dataResource.Pos.Filename, ".tf") { + s.logger.Debug(ctx, "only .tf files can be parsed", "filename", dataResource.Pos.Filename) + continue + } + // We know in which HCL file is the data resource defined. + file, diags = parser.ParseHCLFile(dataResource.Pos.Filename) + + if diags.HasErrors() { + return nil, xerrors.Errorf("can't parse the resource file: %s", diags.Error()) + } + + // Parse root to find "coder_workspace_tags". + content, _, diags := file.Body.PartialContent(rootTemplateSchema) + if diags.HasErrors() { + return nil, xerrors.Errorf("can't parse the resource file: %s", diags.Error()) + } + + // Iterate over blocks to locate the exact "coder_workspace_tags" data resource. + for _, block := range content.Blocks { + if !slices.Equal(block.Labels, []string{"coder_workspace_tags", dataResource.Name}) { + continue + } + + // Parse "coder_workspace_tags" to find all key-value tags. + resContent, _, diags := block.Body.PartialContent(coderWorkspaceTagsSchema) + if diags.HasErrors() { + return nil, xerrors.Errorf(`can't parse the resource coder_workspace_tags: %s`, diags.Error()) + } + + if resContent == nil { + continue // workspace tags are not present + } + + if _, ok := resContent.Attributes["tags"]; !ok { + return nil, xerrors.Errorf(`"tags" attribute is required by coder_workspace_tags`) + } + + expr := resContent.Attributes["tags"].Expr + tagsExpr, ok := expr.(*hclsyntax.ObjectConsExpr) + if !ok { + return nil, xerrors.Errorf(`"tags" attribute is expected to be a key-value map`) + } + + // Parse key-value entries in "coder_workspace_tags" + for _, tagItem := range tagsExpr.Items { + key, err := previewFileContent(tagItem.KeyExpr.Range()) + if err != nil { + return nil, xerrors.Errorf("can't preview the resource file: %v", err) + } + key = strings.Trim(key, `"`) + + value, err := previewFileContent(tagItem.ValueExpr.Range()) + if err != nil { + return nil, xerrors.Errorf("can't preview the resource file: %v", err) + } + + s.logger.Info(ctx, "workspace tag found", "key", key, "value", value) + + if _, ok := workspaceTags[key]; ok { + return nil, xerrors.Errorf(`workspace tag "%s" is defined multiple times`, key) + } + workspaceTags[key] = value + } + } + } + return workspaceTags, nil +} + +func previewFileContent(fileRange hcl.Range) (string, error) { + body, err := os.ReadFile(fileRange.Filename) + if err != nil { + return "", err + } + return string(fileRange.SliceBytes(body)), nil +} + +func loadTerraformVariables(module *tfconfig.Module) ([]*proto.TemplateVariable, error) { // Sort variables by (filename, line) to make the ordering consistent variables := make([]*tfconfig.Variable, 0, len(module.Variables)) for _, v := range module.Variables { @@ -38,17 +167,14 @@ func (s *server) Parse(sess *provisionersdk.Session, _ *proto.ParseRequest, _ <- }) var templateVariables []*proto.TemplateVariable - for _, v := range variables { mv, err := convertTerraformVariable(v) if err != nil { - return provisionersdk.ParseErrorf("can't convert the Terraform variable to a managed one: %s", err) + return nil, err } templateVariables = append(templateVariables, mv) } - return &proto.ParseComplete{ - TemplateVariables: templateVariables, - } + return templateVariables, nil } // Converts a Terraform variable to a template-wide variable, processed by Coder. diff --git a/provisioner/terraform/parse_test.go b/provisioner/terraform/parse_test.go index c28532af25831..3ff6181dc8624 100644 --- a/provisioner/terraform/parse_test.go +++ b/provisioner/terraform/parse_test.go @@ -201,6 +201,177 @@ func TestParse(t *testing.T) { }, }, }, + { + Name: "workspace-tags", + Files: map[string]string{ + "parameters.tf": `data "coder_parameter" "os_selector" { + name = "os_selector" + display_name = "Operating System" + mutable = false + + default = "osx" + + option { + icon = "/icons/linux.png" + name = "Linux" + value = "linux" + } + option { + icon = "/icons/osx.png" + name = "OSX" + value = "osx" + } + option { + icon = "/icons/windows.png" + name = "Windows" + value = "windows" + } + } + + data "coder_parameter" "feature_cache_enabled" { + name = "feature_cache_enabled" + display_name = "Enable cache?" + type = "bool" + + default = false + } + + data "coder_parameter" "feature_debug_enabled" { + name = "feature_debug_enabled" + display_name = "Enable debug?" + type = "bool" + + default = true + }`, + "tags.tf": `data "coder_workspace_tags" "custom_workspace_tags" { + tags = { + "cluster" = "developers" + "os" = data.coder_parameter.os_selector.value + "debug" = "${data.coder_parameter.feature_debug_enabled.value}+12345" + "cache" = data.coder_parameter.feature_cache_enabled.value == "true" ? "nix-with-cache" : "no-cache" + } + }`, + }, + Response: &proto.ParseComplete{ + WorkspaceTags: map[string]string{ + "cluster": `"developers"`, + "os": `data.coder_parameter.os_selector.value`, + "debug": `"${data.coder_parameter.feature_debug_enabled.value}+12345"`, + "cache": `data.coder_parameter.feature_cache_enabled.value == "true" ? "nix-with-cache" : "no-cache"`, + }, + }, + }, + { + Name: "workspace-tags-in-a-single-file", + Files: map[string]string{ + "main.tf": ` + + data "coder_parameter" "os_selector" { + name = "os_selector" + display_name = "Operating System" + mutable = false + + default = "osx" + + option { + icon = "/icons/linux.png" + name = "Linux" + value = "linux" + } + option { + icon = "/icons/osx.png" + name = "OSX" + value = "osx" + } + option { + icon = "/icons/windows.png" + name = "Windows" + value = "windows" + } + } + + data "coder_parameter" "feature_cache_enabled" { + name = "feature_cache_enabled" + display_name = "Enable cache?" + type = "bool" + + default = false + } + + data "coder_parameter" "feature_debug_enabled" { + name = "feature_debug_enabled" + display_name = "Enable debug?" + type = "bool" + + default = true + } + + data "coder_workspace_tags" "custom_workspace_tags" { + tags = { + "cluster" = "developers" + "os" = data.coder_parameter.os_selector.value + "debug" = "${data.coder_parameter.feature_debug_enabled.value}+12345" + "cache" = data.coder_parameter.feature_cache_enabled.value == "true" ? "nix-with-cache" : "no-cache" + } + } + `, + }, + Response: &proto.ParseComplete{ + WorkspaceTags: map[string]string{ + "cluster": `"developers"`, + "os": `data.coder_parameter.os_selector.value`, + "debug": `"${data.coder_parameter.feature_debug_enabled.value}+12345"`, + "cache": `data.coder_parameter.feature_cache_enabled.value == "true" ? "nix-with-cache" : "no-cache"`, + }, + }, + }, + { + Name: "workspace-tags-duplicate-tag", + Files: map[string]string{ + "main.tf": ` + + data "coder_workspace_tags" "custom_workspace_tags" { + tags = { + "cluster" = "developers" + "debug" = "yes" + "debug" = "no" + "cache" = "no-cache" + } + } + `, + }, + ErrorContains: `workspace tag "debug" is defined multiple times`, + }, + { + Name: "workspace-tags-wrong-tag-format", + Files: map[string]string{ + "main.tf": ` + + data "coder_workspace_tags" "custom_workspace_tags" { + tags { + cluster = "developers" + debug = "yes" + cache = "no-cache" + } + } + `, + }, + ErrorContains: `"tags" attribute is required by coder_workspace_tags`, + }, + { + Name: "empty-main", + Files: map[string]string{ + "main.tf": ``, + }, + Response: &proto.ParseComplete{}, + }, + { + Name: "non-tf-files", + Files: map[string]string{ + "any-file.txt": "Foobar", + }, + Response: &proto.ParseComplete{}, + }, } for _, testCase := range testCases { diff --git a/provisionerd/proto/provisionerd.pb.go b/provisionerd/proto/provisionerd.pb.go index b9b89d05dbbcf..f2a8123ff780d 100644 --- a/provisionerd/proto/provisionerd.pb.go +++ b/provisionerd/proto/provisionerd.pb.go @@ -567,6 +567,7 @@ type UpdateJobRequest struct { TemplateVariables []*proto.TemplateVariable `protobuf:"bytes,4,rep,name=template_variables,json=templateVariables,proto3" json:"template_variables,omitempty"` UserVariableValues []*proto.VariableValue `protobuf:"bytes,5,rep,name=user_variable_values,json=userVariableValues,proto3" json:"user_variable_values,omitempty"` Readme []byte `protobuf:"bytes,6,opt,name=readme,proto3" json:"readme,omitempty"` + WorkspaceTags map[string]string `protobuf:"bytes,7,rep,name=workspace_tags,json=workspaceTags,proto3" json:"workspace_tags,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` } func (x *UpdateJobRequest) Reset() { @@ -636,6 +637,13 @@ func (x *UpdateJobRequest) GetReadme() []byte { return nil } +func (x *UpdateJobRequest) GetWorkspaceTags() map[string]string { + if x != nil { + return x.WorkspaceTags + } + return nil +} + type UpdateJobResponse struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -1562,7 +1570,7 @@ var file_provisionerd_proto_provisionerd_proto_rawDesc = []byte{ 0x74, 0x65, 0x64, 0x41, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x67, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x73, 0x74, 0x61, 0x67, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x6f, 0x75, 0x74, - 0x70, 0x75, 0x74, 0x22, 0x8a, 0x02, 0x0a, 0x10, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x4a, 0x6f, + 0x70, 0x75, 0x74, 0x22, 0xa6, 0x03, 0x0a, 0x10, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x15, 0x0a, 0x06, 0x6a, 0x6f, 0x62, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6a, 0x6f, 0x62, 0x49, 0x64, 0x12, 0x25, 0x0a, 0x04, 0x6c, 0x6f, 0x67, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x11, 0x2e, @@ -1578,63 +1586,73 @@ var file_provisionerd_proto_provisionerd_proto_rawDesc = []byte{ 0x2e, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x12, 0x75, 0x73, 0x65, 0x72, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x16, 0x0a, 0x06, 0x72, 0x65, 0x61, 0x64, 0x6d, 0x65, 0x18, 0x06, 0x20, 0x01, - 0x28, 0x0c, 0x52, 0x06, 0x72, 0x65, 0x61, 0x64, 0x6d, 0x65, 0x4a, 0x04, 0x08, 0x03, 0x10, 0x04, - 0x22, 0x7a, 0x0a, 0x11, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x73, - 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x63, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x65, - 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x08, 0x63, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x65, - 0x64, 0x12, 0x43, 0x0a, 0x0f, 0x76, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, - 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, - 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0e, 0x76, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, - 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x4a, 0x04, 0x08, 0x02, 0x10, 0x03, 0x22, 0x4a, 0x0a, 0x12, - 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x51, 0x75, 0x6f, 0x74, 0x61, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x12, 0x15, 0x0a, 0x06, 0x6a, 0x6f, 0x62, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x05, 0x6a, 0x6f, 0x62, 0x49, 0x64, 0x12, 0x1d, 0x0a, 0x0a, 0x64, 0x61, 0x69, - 0x6c, 0x79, 0x5f, 0x63, 0x6f, 0x73, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x09, 0x64, - 0x61, 0x69, 0x6c, 0x79, 0x43, 0x6f, 0x73, 0x74, 0x22, 0x68, 0x0a, 0x13, 0x43, 0x6f, 0x6d, 0x6d, - 0x69, 0x74, 0x51, 0x75, 0x6f, 0x74, 0x61, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, - 0x0e, 0x0a, 0x02, 0x6f, 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x02, 0x6f, 0x6b, 0x12, - 0x29, 0x0a, 0x10, 0x63, 0x72, 0x65, 0x64, 0x69, 0x74, 0x73, 0x5f, 0x63, 0x6f, 0x6e, 0x73, 0x75, - 0x6d, 0x65, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0f, 0x63, 0x72, 0x65, 0x64, 0x69, - 0x74, 0x73, 0x43, 0x6f, 0x6e, 0x73, 0x75, 0x6d, 0x65, 0x64, 0x12, 0x16, 0x0a, 0x06, 0x62, 0x75, - 0x64, 0x67, 0x65, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x06, 0x62, 0x75, 0x64, 0x67, - 0x65, 0x74, 0x22, 0x0f, 0x0a, 0x0d, 0x43, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x41, 0x63, 0x71, 0x75, - 0x69, 0x72, 0x65, 0x2a, 0x34, 0x0a, 0x09, 0x4c, 0x6f, 0x67, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, - 0x12, 0x16, 0x0a, 0x12, 0x50, 0x52, 0x4f, 0x56, 0x49, 0x53, 0x49, 0x4f, 0x4e, 0x45, 0x52, 0x5f, - 0x44, 0x41, 0x45, 0x4d, 0x4f, 0x4e, 0x10, 0x00, 0x12, 0x0f, 0x0a, 0x0b, 0x50, 0x52, 0x4f, 0x56, - 0x49, 0x53, 0x49, 0x4f, 0x4e, 0x45, 0x52, 0x10, 0x01, 0x32, 0xc5, 0x03, 0x0a, 0x11, 0x50, 0x72, - 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x44, 0x61, 0x65, 0x6d, 0x6f, 0x6e, 0x12, - 0x41, 0x0a, 0x0a, 0x41, 0x63, 0x71, 0x75, 0x69, 0x72, 0x65, 0x4a, 0x6f, 0x62, 0x12, 0x13, 0x2e, - 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x45, 0x6d, 0x70, - 0x74, 0x79, 0x1a, 0x19, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, - 0x64, 0x2e, 0x41, 0x63, 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x4a, 0x6f, 0x62, 0x22, 0x03, 0x88, - 0x02, 0x01, 0x12, 0x52, 0x0a, 0x14, 0x41, 0x63, 0x71, 0x75, 0x69, 0x72, 0x65, 0x4a, 0x6f, 0x62, - 0x57, 0x69, 0x74, 0x68, 0x43, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x12, 0x1b, 0x2e, 0x70, 0x72, 0x6f, - 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x43, 0x61, 0x6e, 0x63, 0x65, 0x6c, - 0x41, 0x63, 0x71, 0x75, 0x69, 0x72, 0x65, 0x1a, 0x19, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, - 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x41, 0x63, 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x4a, - 0x6f, 0x62, 0x28, 0x01, 0x30, 0x01, 0x12, 0x52, 0x0a, 0x0b, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, - 0x51, 0x75, 0x6f, 0x74, 0x61, 0x12, 0x20, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, - 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x51, 0x75, 0x6f, 0x74, 0x61, - 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x21, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, - 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x51, 0x75, 0x6f, - 0x74, 0x61, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x4c, 0x0a, 0x09, 0x55, 0x70, - 0x64, 0x61, 0x74, 0x65, 0x4a, 0x6f, 0x62, 0x12, 0x1e, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, - 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x4a, 0x6f, 0x62, - 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1f, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, - 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x4a, 0x6f, 0x62, - 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x37, 0x0a, 0x07, 0x46, 0x61, 0x69, 0x6c, - 0x4a, 0x6f, 0x62, 0x12, 0x17, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, - 0x72, 0x64, 0x2e, 0x46, 0x61, 0x69, 0x6c, 0x65, 0x64, 0x4a, 0x6f, 0x62, 0x1a, 0x13, 0x2e, 0x70, - 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x45, 0x6d, 0x70, 0x74, - 0x79, 0x12, 0x3e, 0x0a, 0x0b, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x4a, 0x6f, 0x62, - 0x12, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, - 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x64, 0x4a, 0x6f, 0x62, 0x1a, 0x13, 0x2e, 0x70, - 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x45, 0x6d, 0x70, 0x74, - 0x79, 0x42, 0x2e, 0x5a, 0x2c, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, - 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x76, 0x32, 0x2f, 0x70, - 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2f, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x28, 0x0c, 0x52, 0x06, 0x72, 0x65, 0x61, 0x64, 0x6d, 0x65, 0x12, 0x58, 0x0a, 0x0e, 0x77, 0x6f, + 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x74, 0x61, 0x67, 0x73, 0x18, 0x07, 0x20, 0x03, + 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, + 0x64, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x61, 0x67, 0x73, + 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0d, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, + 0x54, 0x61, 0x67, 0x73, 0x1a, 0x40, 0x0a, 0x12, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, + 0x65, 0x54, 0x61, 0x67, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, + 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x4a, 0x04, 0x08, 0x03, 0x10, 0x04, 0x22, 0x7a, 0x0a, 0x11, + 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x63, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x65, 0x64, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x08, 0x52, 0x08, 0x63, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x65, 0x64, 0x12, 0x43, 0x0a, + 0x0f, 0x76, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, + 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, + 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x56, 0x61, 0x6c, + 0x75, 0x65, 0x52, 0x0e, 0x76, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x56, 0x61, 0x6c, 0x75, + 0x65, 0x73, 0x4a, 0x04, 0x08, 0x02, 0x10, 0x03, 0x22, 0x4a, 0x0a, 0x12, 0x43, 0x6f, 0x6d, 0x6d, + 0x69, 0x74, 0x51, 0x75, 0x6f, 0x74, 0x61, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x15, + 0x0a, 0x06, 0x6a, 0x6f, 0x62, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, + 0x6a, 0x6f, 0x62, 0x49, 0x64, 0x12, 0x1d, 0x0a, 0x0a, 0x64, 0x61, 0x69, 0x6c, 0x79, 0x5f, 0x63, + 0x6f, 0x73, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x09, 0x64, 0x61, 0x69, 0x6c, 0x79, + 0x43, 0x6f, 0x73, 0x74, 0x22, 0x68, 0x0a, 0x13, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x51, 0x75, + 0x6f, 0x74, 0x61, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x6f, + 0x6b, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x02, 0x6f, 0x6b, 0x12, 0x29, 0x0a, 0x10, 0x63, + 0x72, 0x65, 0x64, 0x69, 0x74, 0x73, 0x5f, 0x63, 0x6f, 0x6e, 0x73, 0x75, 0x6d, 0x65, 0x64, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0f, 0x63, 0x72, 0x65, 0x64, 0x69, 0x74, 0x73, 0x43, 0x6f, + 0x6e, 0x73, 0x75, 0x6d, 0x65, 0x64, 0x12, 0x16, 0x0a, 0x06, 0x62, 0x75, 0x64, 0x67, 0x65, 0x74, + 0x18, 0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x06, 0x62, 0x75, 0x64, 0x67, 0x65, 0x74, 0x22, 0x0f, + 0x0a, 0x0d, 0x43, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x41, 0x63, 0x71, 0x75, 0x69, 0x72, 0x65, 0x2a, + 0x34, 0x0a, 0x09, 0x4c, 0x6f, 0x67, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x16, 0x0a, 0x12, + 0x50, 0x52, 0x4f, 0x56, 0x49, 0x53, 0x49, 0x4f, 0x4e, 0x45, 0x52, 0x5f, 0x44, 0x41, 0x45, 0x4d, + 0x4f, 0x4e, 0x10, 0x00, 0x12, 0x0f, 0x0a, 0x0b, 0x50, 0x52, 0x4f, 0x56, 0x49, 0x53, 0x49, 0x4f, + 0x4e, 0x45, 0x52, 0x10, 0x01, 0x32, 0xc5, 0x03, 0x0a, 0x11, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, + 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x44, 0x61, 0x65, 0x6d, 0x6f, 0x6e, 0x12, 0x41, 0x0a, 0x0a, 0x41, + 0x63, 0x71, 0x75, 0x69, 0x72, 0x65, 0x4a, 0x6f, 0x62, 0x12, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, + 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x1a, 0x19, + 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x41, 0x63, + 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x4a, 0x6f, 0x62, 0x22, 0x03, 0x88, 0x02, 0x01, 0x12, 0x52, + 0x0a, 0x14, 0x41, 0x63, 0x71, 0x75, 0x69, 0x72, 0x65, 0x4a, 0x6f, 0x62, 0x57, 0x69, 0x74, 0x68, + 0x43, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x12, 0x1b, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, + 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x43, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x41, 0x63, 0x71, 0x75, + 0x69, 0x72, 0x65, 0x1a, 0x19, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, + 0x72, 0x64, 0x2e, 0x41, 0x63, 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x4a, 0x6f, 0x62, 0x28, 0x01, + 0x30, 0x01, 0x12, 0x52, 0x0a, 0x0b, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x51, 0x75, 0x6f, 0x74, + 0x61, 0x12, 0x20, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, + 0x2e, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x51, 0x75, 0x6f, 0x74, 0x61, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x1a, 0x21, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, + 0x72, 0x64, 0x2e, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x51, 0x75, 0x6f, 0x74, 0x61, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x4c, 0x0a, 0x09, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, + 0x4a, 0x6f, 0x62, 0x12, 0x1e, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, + 0x72, 0x64, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x1a, 0x1f, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, + 0x72, 0x64, 0x2e, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x4a, 0x6f, 0x62, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x37, 0x0a, 0x07, 0x46, 0x61, 0x69, 0x6c, 0x4a, 0x6f, 0x62, 0x12, + 0x17, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x46, + 0x61, 0x69, 0x6c, 0x65, 0x64, 0x4a, 0x6f, 0x62, 0x1a, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, + 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x12, 0x3e, 0x0a, + 0x0b, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x4a, 0x6f, 0x62, 0x12, 0x1a, 0x2e, 0x70, + 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x43, 0x6f, 0x6d, 0x70, + 0x6c, 0x65, 0x74, 0x65, 0x64, 0x4a, 0x6f, 0x62, 0x1a, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, + 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x42, 0x2e, 0x5a, + 0x2c, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x63, 0x6f, 0x64, 0x65, + 0x72, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x76, 0x32, 0x2f, 0x70, 0x72, 0x6f, 0x76, 0x69, + 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -1650,7 +1668,7 @@ func file_provisionerd_proto_provisionerd_proto_rawDescGZIP() []byte { } var file_provisionerd_proto_provisionerd_proto_enumTypes = make([]protoimpl.EnumInfo, 1) -var file_provisionerd_proto_provisionerd_proto_msgTypes = make([]protoimpl.MessageInfo, 20) +var file_provisionerd_proto_provisionerd_proto_msgTypes = make([]protoimpl.MessageInfo, 21) var file_provisionerd_proto_provisionerd_proto_goTypes = []interface{}{ (LogSource)(0), // 0: provisionerd.LogSource (*Empty)(nil), // 1: provisionerd.Empty @@ -1673,15 +1691,16 @@ var file_provisionerd_proto_provisionerd_proto_goTypes = []interface{}{ (*CompletedJob_WorkspaceBuild)(nil), // 18: provisionerd.CompletedJob.WorkspaceBuild (*CompletedJob_TemplateImport)(nil), // 19: provisionerd.CompletedJob.TemplateImport (*CompletedJob_TemplateDryRun)(nil), // 20: provisionerd.CompletedJob.TemplateDryRun - (proto.LogLevel)(0), // 21: provisioner.LogLevel - (*proto.TemplateVariable)(nil), // 22: provisioner.TemplateVariable - (*proto.VariableValue)(nil), // 23: provisioner.VariableValue - (*proto.RichParameterValue)(nil), // 24: provisioner.RichParameterValue - (*proto.ExternalAuthProvider)(nil), // 25: provisioner.ExternalAuthProvider - (*proto.Metadata)(nil), // 26: provisioner.Metadata - (*proto.Resource)(nil), // 27: provisioner.Resource - (*proto.RichParameter)(nil), // 28: provisioner.RichParameter - (*proto.ExternalAuthProviderResource)(nil), // 29: provisioner.ExternalAuthProviderResource + nil, // 21: provisionerd.UpdateJobRequest.WorkspaceTagsEntry + (proto.LogLevel)(0), // 22: provisioner.LogLevel + (*proto.TemplateVariable)(nil), // 23: provisioner.TemplateVariable + (*proto.VariableValue)(nil), // 24: provisioner.VariableValue + (*proto.RichParameterValue)(nil), // 25: provisioner.RichParameterValue + (*proto.ExternalAuthProvider)(nil), // 26: provisioner.ExternalAuthProvider + (*proto.Metadata)(nil), // 27: provisioner.Metadata + (*proto.Resource)(nil), // 28: provisioner.Resource + (*proto.RichParameter)(nil), // 29: provisioner.RichParameter + (*proto.ExternalAuthProviderResource)(nil), // 30: provisioner.ExternalAuthProviderResource } var file_provisionerd_proto_provisionerd_proto_depIdxs = []int32{ 11, // 0: provisionerd.AcquiredJob.workspace_build:type_name -> provisionerd.AcquiredJob.WorkspaceBuild @@ -1695,43 +1714,44 @@ var file_provisionerd_proto_provisionerd_proto_depIdxs = []int32{ 19, // 8: provisionerd.CompletedJob.template_import:type_name -> provisionerd.CompletedJob.TemplateImport 20, // 9: provisionerd.CompletedJob.template_dry_run:type_name -> provisionerd.CompletedJob.TemplateDryRun 0, // 10: provisionerd.Log.source:type_name -> provisionerd.LogSource - 21, // 11: provisionerd.Log.level:type_name -> provisioner.LogLevel + 22, // 11: provisionerd.Log.level:type_name -> provisioner.LogLevel 5, // 12: provisionerd.UpdateJobRequest.logs:type_name -> provisionerd.Log - 22, // 13: provisionerd.UpdateJobRequest.template_variables:type_name -> provisioner.TemplateVariable - 23, // 14: provisionerd.UpdateJobRequest.user_variable_values:type_name -> provisioner.VariableValue - 23, // 15: provisionerd.UpdateJobResponse.variable_values:type_name -> provisioner.VariableValue - 24, // 16: provisionerd.AcquiredJob.WorkspaceBuild.rich_parameter_values:type_name -> provisioner.RichParameterValue - 23, // 17: provisionerd.AcquiredJob.WorkspaceBuild.variable_values:type_name -> provisioner.VariableValue - 25, // 18: provisionerd.AcquiredJob.WorkspaceBuild.external_auth_providers:type_name -> provisioner.ExternalAuthProvider - 26, // 19: provisionerd.AcquiredJob.WorkspaceBuild.metadata:type_name -> provisioner.Metadata - 26, // 20: provisionerd.AcquiredJob.TemplateImport.metadata:type_name -> provisioner.Metadata - 23, // 21: provisionerd.AcquiredJob.TemplateImport.user_variable_values:type_name -> provisioner.VariableValue - 24, // 22: provisionerd.AcquiredJob.TemplateDryRun.rich_parameter_values:type_name -> provisioner.RichParameterValue - 23, // 23: provisionerd.AcquiredJob.TemplateDryRun.variable_values:type_name -> provisioner.VariableValue - 26, // 24: provisionerd.AcquiredJob.TemplateDryRun.metadata:type_name -> provisioner.Metadata - 27, // 25: provisionerd.CompletedJob.WorkspaceBuild.resources:type_name -> provisioner.Resource - 27, // 26: provisionerd.CompletedJob.TemplateImport.start_resources:type_name -> provisioner.Resource - 27, // 27: provisionerd.CompletedJob.TemplateImport.stop_resources:type_name -> provisioner.Resource - 28, // 28: provisionerd.CompletedJob.TemplateImport.rich_parameters:type_name -> provisioner.RichParameter - 29, // 29: provisionerd.CompletedJob.TemplateImport.external_auth_providers:type_name -> provisioner.ExternalAuthProviderResource - 27, // 30: provisionerd.CompletedJob.TemplateDryRun.resources:type_name -> provisioner.Resource - 1, // 31: provisionerd.ProvisionerDaemon.AcquireJob:input_type -> provisionerd.Empty - 10, // 32: provisionerd.ProvisionerDaemon.AcquireJobWithCancel:input_type -> provisionerd.CancelAcquire - 8, // 33: provisionerd.ProvisionerDaemon.CommitQuota:input_type -> provisionerd.CommitQuotaRequest - 6, // 34: provisionerd.ProvisionerDaemon.UpdateJob:input_type -> provisionerd.UpdateJobRequest - 3, // 35: provisionerd.ProvisionerDaemon.FailJob:input_type -> provisionerd.FailedJob - 4, // 36: provisionerd.ProvisionerDaemon.CompleteJob:input_type -> provisionerd.CompletedJob - 2, // 37: provisionerd.ProvisionerDaemon.AcquireJob:output_type -> provisionerd.AcquiredJob - 2, // 38: provisionerd.ProvisionerDaemon.AcquireJobWithCancel:output_type -> provisionerd.AcquiredJob - 9, // 39: provisionerd.ProvisionerDaemon.CommitQuota:output_type -> provisionerd.CommitQuotaResponse - 7, // 40: provisionerd.ProvisionerDaemon.UpdateJob:output_type -> provisionerd.UpdateJobResponse - 1, // 41: provisionerd.ProvisionerDaemon.FailJob:output_type -> provisionerd.Empty - 1, // 42: provisionerd.ProvisionerDaemon.CompleteJob:output_type -> provisionerd.Empty - 37, // [37:43] is the sub-list for method output_type - 31, // [31:37] is the sub-list for method input_type - 31, // [31:31] is the sub-list for extension type_name - 31, // [31:31] is the sub-list for extension extendee - 0, // [0:31] is the sub-list for field type_name + 23, // 13: provisionerd.UpdateJobRequest.template_variables:type_name -> provisioner.TemplateVariable + 24, // 14: provisionerd.UpdateJobRequest.user_variable_values:type_name -> provisioner.VariableValue + 21, // 15: provisionerd.UpdateJobRequest.workspace_tags:type_name -> provisionerd.UpdateJobRequest.WorkspaceTagsEntry + 24, // 16: provisionerd.UpdateJobResponse.variable_values:type_name -> provisioner.VariableValue + 25, // 17: provisionerd.AcquiredJob.WorkspaceBuild.rich_parameter_values:type_name -> provisioner.RichParameterValue + 24, // 18: provisionerd.AcquiredJob.WorkspaceBuild.variable_values:type_name -> provisioner.VariableValue + 26, // 19: provisionerd.AcquiredJob.WorkspaceBuild.external_auth_providers:type_name -> provisioner.ExternalAuthProvider + 27, // 20: provisionerd.AcquiredJob.WorkspaceBuild.metadata:type_name -> provisioner.Metadata + 27, // 21: provisionerd.AcquiredJob.TemplateImport.metadata:type_name -> provisioner.Metadata + 24, // 22: provisionerd.AcquiredJob.TemplateImport.user_variable_values:type_name -> provisioner.VariableValue + 25, // 23: provisionerd.AcquiredJob.TemplateDryRun.rich_parameter_values:type_name -> provisioner.RichParameterValue + 24, // 24: provisionerd.AcquiredJob.TemplateDryRun.variable_values:type_name -> provisioner.VariableValue + 27, // 25: provisionerd.AcquiredJob.TemplateDryRun.metadata:type_name -> provisioner.Metadata + 28, // 26: provisionerd.CompletedJob.WorkspaceBuild.resources:type_name -> provisioner.Resource + 28, // 27: provisionerd.CompletedJob.TemplateImport.start_resources:type_name -> provisioner.Resource + 28, // 28: provisionerd.CompletedJob.TemplateImport.stop_resources:type_name -> provisioner.Resource + 29, // 29: provisionerd.CompletedJob.TemplateImport.rich_parameters:type_name -> provisioner.RichParameter + 30, // 30: provisionerd.CompletedJob.TemplateImport.external_auth_providers:type_name -> provisioner.ExternalAuthProviderResource + 28, // 31: provisionerd.CompletedJob.TemplateDryRun.resources:type_name -> provisioner.Resource + 1, // 32: provisionerd.ProvisionerDaemon.AcquireJob:input_type -> provisionerd.Empty + 10, // 33: provisionerd.ProvisionerDaemon.AcquireJobWithCancel:input_type -> provisionerd.CancelAcquire + 8, // 34: provisionerd.ProvisionerDaemon.CommitQuota:input_type -> provisionerd.CommitQuotaRequest + 6, // 35: provisionerd.ProvisionerDaemon.UpdateJob:input_type -> provisionerd.UpdateJobRequest + 3, // 36: provisionerd.ProvisionerDaemon.FailJob:input_type -> provisionerd.FailedJob + 4, // 37: provisionerd.ProvisionerDaemon.CompleteJob:input_type -> provisionerd.CompletedJob + 2, // 38: provisionerd.ProvisionerDaemon.AcquireJob:output_type -> provisionerd.AcquiredJob + 2, // 39: provisionerd.ProvisionerDaemon.AcquireJobWithCancel:output_type -> provisionerd.AcquiredJob + 9, // 40: provisionerd.ProvisionerDaemon.CommitQuota:output_type -> provisionerd.CommitQuotaResponse + 7, // 41: provisionerd.ProvisionerDaemon.UpdateJob:output_type -> provisionerd.UpdateJobResponse + 1, // 42: provisionerd.ProvisionerDaemon.FailJob:output_type -> provisionerd.Empty + 1, // 43: provisionerd.ProvisionerDaemon.CompleteJob:output_type -> provisionerd.Empty + 38, // [38:44] is the sub-list for method output_type + 32, // [32:38] is the sub-list for method input_type + 32, // [32:32] is the sub-list for extension type_name + 32, // [32:32] is the sub-list for extension extendee + 0, // [0:32] is the sub-list for field type_name } func init() { file_provisionerd_proto_provisionerd_proto_init() } @@ -1990,7 +2010,7 @@ func file_provisionerd_proto_provisionerd_proto_init() { GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_provisionerd_proto_provisionerd_proto_rawDesc, NumEnums: 1, - NumMessages: 20, + NumMessages: 21, NumExtensions: 0, NumServices: 1, }, diff --git a/provisionerd/proto/provisionerd.proto b/provisionerd/proto/provisionerd.proto index 61b3f588b2843..426ba63e2f98e 100644 --- a/provisionerd/proto/provisionerd.proto +++ b/provisionerd/proto/provisionerd.proto @@ -116,6 +116,7 @@ message UpdateJobRequest { repeated provisioner.TemplateVariable template_variables = 4; repeated provisioner.VariableValue user_variable_values = 5; bytes readme = 6; + map workspace_tags = 7; } message UpdateJobResponse { diff --git a/provisionerd/runner/runner.go b/provisionerd/runner/runner.go index d68759f48102e..08230a80051d0 100644 --- a/provisionerd/runner/runner.go +++ b/provisionerd/runner/runner.go @@ -517,7 +517,7 @@ func (r *Runner) runTemplateImport(ctx context.Context) (*proto.CompletedJob, *p Stage: "Parsing template parameters", CreatedAt: time.Now().UnixMilli(), }) - templateVariables, readme, err := r.runTemplateImportParse(ctx) + workspaceTags, templateVariables, readme, err := r.runTemplateImportParse(ctx) // TODO workspace_tags if err != nil { return nil, r.failedJobf("run parse: %s", err) } @@ -529,6 +529,7 @@ func (r *Runner) runTemplateImport(ctx context.Context) (*proto.CompletedJob, *p TemplateVariables: templateVariables, UserVariableValues: r.job.GetTemplateImport().GetUserVariableValues(), Readme: readme, + WorkspaceTags: workspaceTags, }) if err != nil { return nil, r.failedJobf("update job: %s", err) @@ -586,19 +587,19 @@ func (r *Runner) runTemplateImport(ctx context.Context) (*proto.CompletedJob, *p // Parses template variables and README from source. func (r *Runner) runTemplateImportParse(ctx context.Context) ( - vars []*sdkproto.TemplateVariable, readme []byte, err error, + workspaceTags map[string]string, vars []*sdkproto.TemplateVariable, readme []byte, err error, ) { ctx, span := r.startTrace(ctx, tracing.FuncName()) defer span.End() err = r.session.Send(&sdkproto.Request{Type: &sdkproto.Request_Parse{Parse: &sdkproto.ParseRequest{}}}) if err != nil { - return nil, nil, xerrors.Errorf("parse source: %w", err) + return nil, nil, nil, xerrors.Errorf("parse source: %w", err) } for { msg, err := r.session.Recv() if err != nil { - return nil, nil, xerrors.Errorf("recv parse source: %w", err) + return nil, nil, nil, xerrors.Errorf("recv parse source: %w", err) } switch msgType := msg.Type.(type) { case *sdkproto.Response_Log: @@ -617,17 +618,18 @@ func (r *Runner) runTemplateImportParse(ctx context.Context) ( case *sdkproto.Response_Parse: pc := msgType.Parse r.logger.Debug(context.Background(), "parse complete", + slog.F("workspace_tags", pc.WorkspaceTags), slog.F("template_variables", pc.TemplateVariables), slog.F("readme_len", len(pc.Readme)), slog.F("error", pc.Error), ) if pc.Error != "" { - return nil, nil, xerrors.Errorf("parse error: %s", pc.Error) + return nil, nil, nil, xerrors.Errorf("parse error: %s", pc.Error) } - return msgType.Parse.TemplateVariables, msgType.Parse.Readme, nil + return msgType.Parse.WorkspaceTags, msgType.Parse.TemplateVariables, msgType.Parse.Readme, nil default: - return nil, nil, xerrors.Errorf("invalid message type %q received from provisioner", + return nil, nil, nil, xerrors.Errorf("invalid message type %q received from provisioner", reflect.TypeOf(msg.Type).String()) } } diff --git a/provisionersdk/proto/provisioner.pb.go b/provisionersdk/proto/provisioner.pb.go index 99d7b2e26a695..f6ead5e28ba16 100644 --- a/provisionersdk/proto/provisioner.pb.go +++ b/provisionersdk/proto/provisioner.pb.go @@ -1884,6 +1884,7 @@ type ParseComplete struct { Error string `protobuf:"bytes,1,opt,name=error,proto3" json:"error,omitempty"` TemplateVariables []*TemplateVariable `protobuf:"bytes,2,rep,name=template_variables,json=templateVariables,proto3" json:"template_variables,omitempty"` Readme []byte `protobuf:"bytes,3,opt,name=readme,proto3" json:"readme,omitempty"` + WorkspaceTags map[string]string `protobuf:"bytes,4,rep,name=workspace_tags,json=workspaceTags,proto3" json:"workspace_tags,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` } func (x *ParseComplete) Reset() { @@ -1939,6 +1940,13 @@ func (x *ParseComplete) GetReadme() []byte { return nil } +func (x *ParseComplete) GetWorkspaceTags() map[string]string { + if x != nil { + return x.WorkspaceTags + } + return nil +} + // PlanRequest asks the provisioner to plan what resources & parameters it will create type PlanRequest struct { state protoimpl.MessageState @@ -2929,7 +2937,7 @@ var file_provisionersdk_proto_provisioner_proto_rawDesc = []byte{ 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x5f, 0x6c, 0x6f, 0x67, 0x5f, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x13, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x4c, 0x6f, 0x67, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x22, 0x0e, 0x0a, 0x0c, - 0x50, 0x61, 0x72, 0x73, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x8b, 0x01, 0x0a, + 0x50, 0x61, 0x72, 0x73, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0xa3, 0x02, 0x0a, 0x0d, 0x50, 0x61, 0x72, 0x73, 0x65, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x4c, 0x0a, 0x12, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, @@ -2938,115 +2946,124 @@ var file_provisionersdk_proto_provisioner_proto_rawDesc = []byte{ 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x52, 0x11, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x73, 0x12, 0x16, 0x0a, 0x06, 0x72, 0x65, 0x61, 0x64, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, - 0x28, 0x0c, 0x52, 0x06, 0x72, 0x65, 0x61, 0x64, 0x6d, 0x65, 0x22, 0xb5, 0x02, 0x0a, 0x0b, 0x50, - 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x31, 0x0a, 0x08, 0x6d, 0x65, - 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x70, - 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x4d, 0x65, 0x74, 0x61, 0x64, - 0x61, 0x74, 0x61, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x53, 0x0a, - 0x15, 0x72, 0x69, 0x63, 0x68, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x5f, - 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x70, - 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x69, 0x63, 0x68, 0x50, - 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x13, 0x72, - 0x69, 0x63, 0x68, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, - 0x65, 0x73, 0x12, 0x43, 0x0a, 0x0f, 0x76, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72, - 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, - 0x6c, 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0e, 0x76, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, - 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x59, 0x0a, 0x17, 0x65, 0x78, 0x74, 0x65, 0x72, + 0x28, 0x0c, 0x52, 0x06, 0x72, 0x65, 0x61, 0x64, 0x6d, 0x65, 0x12, 0x54, 0x0a, 0x0e, 0x77, 0x6f, + 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x74, 0x61, 0x67, 0x73, 0x18, 0x04, 0x20, 0x03, + 0x28, 0x0b, 0x32, 0x2d, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, + 0x2e, 0x50, 0x61, 0x72, 0x73, 0x65, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x2e, 0x57, + 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x61, 0x67, 0x73, 0x45, 0x6e, 0x74, 0x72, + 0x79, 0x52, 0x0d, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x61, 0x67, 0x73, + 0x1a, 0x40, 0x0a, 0x12, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x61, 0x67, + 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, + 0x38, 0x01, 0x22, 0xb5, 0x02, 0x0a, 0x0b, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x12, 0x31, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, + 0x65, 0x72, 0x2e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x08, 0x6d, 0x65, 0x74, + 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x53, 0x0a, 0x15, 0x72, 0x69, 0x63, 0x68, 0x5f, 0x70, 0x61, + 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x02, + 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, + 0x65, 0x72, 0x2e, 0x52, 0x69, 0x63, 0x68, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, + 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x13, 0x72, 0x69, 0x63, 0x68, 0x50, 0x61, 0x72, 0x61, 0x6d, + 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x43, 0x0a, 0x0f, 0x76, 0x61, + 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x03, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, + 0x72, 0x2e, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, + 0x0e, 0x76, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, + 0x59, 0x0a, 0x17, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x61, 0x75, 0x74, 0x68, + 0x5f, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x21, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x45, + 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, + 0x64, 0x65, 0x72, 0x52, 0x15, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x75, 0x74, + 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x73, 0x22, 0xf8, 0x01, 0x0a, 0x0c, 0x50, + 0x6c, 0x61, 0x6e, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x65, + 0x72, 0x72, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, + 0x72, 0x12, 0x33, 0x0a, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x02, + 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, + 0x65, 0x72, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x09, 0x72, 0x65, 0x73, + 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x12, 0x3a, 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, + 0x74, 0x65, 0x72, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, + 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x69, 0x63, 0x68, 0x50, 0x61, 0x72, + 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, + 0x72, 0x73, 0x12, 0x61, 0x0a, 0x17, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x61, + 0x75, 0x74, 0x68, 0x5f, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x73, 0x18, 0x04, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, + 0x72, 0x2e, 0x45, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, + 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x15, + 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, + 0x69, 0x64, 0x65, 0x72, 0x73, 0x22, 0x41, 0x0a, 0x0c, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x31, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, + 0x61, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, + 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x08, + 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x22, 0x8f, 0x02, 0x0a, 0x0d, 0x41, 0x70, 0x70, + 0x6c, 0x79, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, + 0x61, 0x74, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, + 0x12, 0x14, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x33, 0x0a, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, + 0x63, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, + 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, + 0x52, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x12, 0x3a, 0x0a, 0x0a, 0x70, + 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, + 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x69, + 0x63, 0x68, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x52, 0x0a, 0x70, 0x61, 0x72, + 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x12, 0x61, 0x0a, 0x17, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x61, 0x75, 0x74, 0x68, 0x5f, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, - 0x72, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, + 0x72, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x45, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, - 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x52, 0x15, 0x65, 0x78, 0x74, - 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, - 0x72, 0x73, 0x22, 0xf8, 0x01, 0x0a, 0x0c, 0x50, 0x6c, 0x61, 0x6e, 0x43, 0x6f, 0x6d, 0x70, 0x6c, - 0x65, 0x74, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x33, 0x0a, 0x09, 0x72, 0x65, 0x73, - 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x70, - 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, - 0x72, 0x63, 0x65, 0x52, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x12, 0x3a, - 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x18, 0x03, 0x20, 0x03, - 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, - 0x2e, 0x52, 0x69, 0x63, 0x68, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x52, 0x0a, - 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x12, 0x61, 0x0a, 0x17, 0x65, 0x78, - 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x61, 0x75, 0x74, 0x68, 0x5f, 0x70, 0x72, 0x6f, 0x76, - 0x69, 0x64, 0x65, 0x72, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x70, 0x72, - 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x45, 0x78, 0x74, 0x65, 0x72, 0x6e, - 0x61, 0x6c, 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x52, 0x65, - 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x15, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, - 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x73, 0x22, 0x41, 0x0a, - 0x0c, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x31, 0x0a, - 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x4d, 0x65, - 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, - 0x22, 0x8f, 0x02, 0x0a, 0x0d, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, - 0x74, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x0c, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, - 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x33, - 0x0a, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, - 0x0b, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, - 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, - 0x63, 0x65, 0x73, 0x12, 0x3a, 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, - 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, - 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x69, 0x63, 0x68, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, - 0x74, 0x65, 0x72, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x12, - 0x61, 0x0a, 0x17, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x61, 0x75, 0x74, 0x68, - 0x5f, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, - 0x32, 0x29, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x45, - 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, - 0x64, 0x65, 0x72, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x15, 0x65, 0x78, 0x74, - 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, - 0x72, 0x73, 0x22, 0x0f, 0x0a, 0x0d, 0x43, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x52, 0x65, 0x71, 0x75, - 0x65, 0x73, 0x74, 0x22, 0x8c, 0x02, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, - 0x2d, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x43, 0x6f, - 0x6e, 0x66, 0x69, 0x67, 0x48, 0x00, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x31, - 0x0a, 0x05, 0x70, 0x61, 0x72, 0x73, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, - 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x73, - 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x48, 0x00, 0x52, 0x05, 0x70, 0x61, 0x72, 0x73, - 0x65, 0x12, 0x2e, 0x0a, 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x18, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x6c, - 0x61, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x48, 0x00, 0x52, 0x04, 0x70, 0x6c, 0x61, - 0x6e, 0x12, 0x31, 0x0a, 0x05, 0x61, 0x70, 0x70, 0x6c, 0x79, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x19, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x41, - 0x70, 0x70, 0x6c, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x48, 0x00, 0x52, 0x05, 0x61, - 0x70, 0x70, 0x6c, 0x79, 0x12, 0x34, 0x0a, 0x06, 0x63, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x18, 0x05, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, - 0x65, 0x72, 0x2e, 0x43, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, - 0x48, 0x00, 0x52, 0x06, 0x63, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x42, 0x06, 0x0a, 0x04, 0x74, 0x79, - 0x70, 0x65, 0x22, 0xd1, 0x01, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, - 0x24, 0x0a, 0x03, 0x6c, 0x6f, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x70, - 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x4c, 0x6f, 0x67, 0x48, 0x00, - 0x52, 0x03, 0x6c, 0x6f, 0x67, 0x12, 0x32, 0x0a, 0x05, 0x70, 0x61, 0x72, 0x73, 0x65, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, - 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x73, 0x65, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, - 0x48, 0x00, 0x52, 0x05, 0x70, 0x61, 0x72, 0x73, 0x65, 0x12, 0x2f, 0x0a, 0x04, 0x70, 0x6c, 0x61, - 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, - 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x6c, 0x61, 0x6e, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, - 0x74, 0x65, 0x48, 0x00, 0x52, 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x12, 0x32, 0x0a, 0x05, 0x61, 0x70, - 0x70, 0x6c, 0x79, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, - 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x43, 0x6f, 0x6d, - 0x70, 0x6c, 0x65, 0x74, 0x65, 0x48, 0x00, 0x52, 0x05, 0x61, 0x70, 0x70, 0x6c, 0x79, 0x42, 0x06, - 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x2a, 0x3f, 0x0a, 0x08, 0x4c, 0x6f, 0x67, 0x4c, 0x65, 0x76, - 0x65, 0x6c, 0x12, 0x09, 0x0a, 0x05, 0x54, 0x52, 0x41, 0x43, 0x45, 0x10, 0x00, 0x12, 0x09, 0x0a, - 0x05, 0x44, 0x45, 0x42, 0x55, 0x47, 0x10, 0x01, 0x12, 0x08, 0x0a, 0x04, 0x49, 0x4e, 0x46, 0x4f, - 0x10, 0x02, 0x12, 0x08, 0x0a, 0x04, 0x57, 0x41, 0x52, 0x4e, 0x10, 0x03, 0x12, 0x09, 0x0a, 0x05, - 0x45, 0x52, 0x52, 0x4f, 0x52, 0x10, 0x04, 0x2a, 0x3b, 0x0a, 0x0f, 0x41, 0x70, 0x70, 0x53, 0x68, - 0x61, 0x72, 0x69, 0x6e, 0x67, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x12, 0x09, 0x0a, 0x05, 0x4f, 0x57, - 0x4e, 0x45, 0x52, 0x10, 0x00, 0x12, 0x11, 0x0a, 0x0d, 0x41, 0x55, 0x54, 0x48, 0x45, 0x4e, 0x54, - 0x49, 0x43, 0x41, 0x54, 0x45, 0x44, 0x10, 0x01, 0x12, 0x0a, 0x0a, 0x06, 0x50, 0x55, 0x42, 0x4c, - 0x49, 0x43, 0x10, 0x02, 0x2a, 0x37, 0x0a, 0x13, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, - 0x65, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x09, 0x0a, 0x05, 0x53, - 0x54, 0x41, 0x52, 0x54, 0x10, 0x00, 0x12, 0x08, 0x0a, 0x04, 0x53, 0x54, 0x4f, 0x50, 0x10, 0x01, - 0x12, 0x0b, 0x0a, 0x07, 0x44, 0x45, 0x53, 0x54, 0x52, 0x4f, 0x59, 0x10, 0x02, 0x32, 0x49, 0x0a, - 0x0b, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x12, 0x3a, 0x0a, 0x07, - 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x14, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, - 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x15, 0x2e, - 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x65, 0x73, 0x70, - 0x6f, 0x6e, 0x73, 0x65, 0x28, 0x01, 0x30, 0x01, 0x42, 0x30, 0x5a, 0x2e, 0x67, 0x69, 0x74, 0x68, - 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x63, 0x6f, 0x64, - 0x65, 0x72, 0x2f, 0x76, 0x32, 0x2f, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, - 0x72, 0x73, 0x64, 0x6b, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x33, + 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x52, 0x65, 0x73, 0x6f, 0x75, + 0x72, 0x63, 0x65, 0x52, 0x15, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x75, 0x74, + 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x73, 0x22, 0x0f, 0x0a, 0x0d, 0x43, 0x61, + 0x6e, 0x63, 0x65, 0x6c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x8c, 0x02, 0x0a, 0x07, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2d, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, + 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, + 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x48, 0x00, 0x52, 0x06, + 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x31, 0x0a, 0x05, 0x70, 0x61, 0x72, 0x73, 0x65, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, + 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x73, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x48, 0x00, 0x52, 0x05, 0x70, 0x61, 0x72, 0x73, 0x65, 0x12, 0x2e, 0x0a, 0x04, 0x70, 0x6c, 0x61, + 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, + 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x48, 0x00, 0x52, 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x12, 0x31, 0x0a, 0x05, 0x61, 0x70, 0x70, + 0x6c, 0x79, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, + 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x48, 0x00, 0x52, 0x05, 0x61, 0x70, 0x70, 0x6c, 0x79, 0x12, 0x34, 0x0a, 0x06, + 0x63, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, + 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x43, 0x61, 0x6e, 0x63, 0x65, + 0x6c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x48, 0x00, 0x52, 0x06, 0x63, 0x61, 0x6e, 0x63, + 0x65, 0x6c, 0x42, 0x06, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x22, 0xd1, 0x01, 0x0a, 0x08, 0x52, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x24, 0x0a, 0x03, 0x6c, 0x6f, 0x67, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, + 0x65, 0x72, 0x2e, 0x4c, 0x6f, 0x67, 0x48, 0x00, 0x52, 0x03, 0x6c, 0x6f, 0x67, 0x12, 0x32, 0x0a, + 0x05, 0x70, 0x61, 0x72, 0x73, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, + 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x73, 0x65, + 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x48, 0x00, 0x52, 0x05, 0x70, 0x61, 0x72, 0x73, + 0x65, 0x12, 0x2f, 0x0a, 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x19, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x6c, + 0x61, 0x6e, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x48, 0x00, 0x52, 0x04, 0x70, 0x6c, + 0x61, 0x6e, 0x12, 0x32, 0x0a, 0x05, 0x61, 0x70, 0x70, 0x6c, 0x79, 0x18, 0x04, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, + 0x41, 0x70, 0x70, 0x6c, 0x79, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x48, 0x00, 0x52, + 0x05, 0x61, 0x70, 0x70, 0x6c, 0x79, 0x42, 0x06, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x2a, 0x3f, + 0x0a, 0x08, 0x4c, 0x6f, 0x67, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x12, 0x09, 0x0a, 0x05, 0x54, 0x52, + 0x41, 0x43, 0x45, 0x10, 0x00, 0x12, 0x09, 0x0a, 0x05, 0x44, 0x45, 0x42, 0x55, 0x47, 0x10, 0x01, + 0x12, 0x08, 0x0a, 0x04, 0x49, 0x4e, 0x46, 0x4f, 0x10, 0x02, 0x12, 0x08, 0x0a, 0x04, 0x57, 0x41, + 0x52, 0x4e, 0x10, 0x03, 0x12, 0x09, 0x0a, 0x05, 0x45, 0x52, 0x52, 0x4f, 0x52, 0x10, 0x04, 0x2a, + 0x3b, 0x0a, 0x0f, 0x41, 0x70, 0x70, 0x53, 0x68, 0x61, 0x72, 0x69, 0x6e, 0x67, 0x4c, 0x65, 0x76, + 0x65, 0x6c, 0x12, 0x09, 0x0a, 0x05, 0x4f, 0x57, 0x4e, 0x45, 0x52, 0x10, 0x00, 0x12, 0x11, 0x0a, + 0x0d, 0x41, 0x55, 0x54, 0x48, 0x45, 0x4e, 0x54, 0x49, 0x43, 0x41, 0x54, 0x45, 0x44, 0x10, 0x01, + 0x12, 0x0a, 0x0a, 0x06, 0x50, 0x55, 0x42, 0x4c, 0x49, 0x43, 0x10, 0x02, 0x2a, 0x37, 0x0a, 0x13, + 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x69, 0x74, + 0x69, 0x6f, 0x6e, 0x12, 0x09, 0x0a, 0x05, 0x53, 0x54, 0x41, 0x52, 0x54, 0x10, 0x00, 0x12, 0x08, + 0x0a, 0x04, 0x53, 0x54, 0x4f, 0x50, 0x10, 0x01, 0x12, 0x0b, 0x0a, 0x07, 0x44, 0x45, 0x53, 0x54, + 0x52, 0x4f, 0x59, 0x10, 0x02, 0x32, 0x49, 0x0a, 0x0b, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, + 0x6f, 0x6e, 0x65, 0x72, 0x12, 0x3a, 0x0a, 0x07, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x12, + 0x14, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, + 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x28, 0x01, 0x30, 0x01, + 0x42, 0x30, 0x5a, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x63, + 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x76, 0x32, 0x2f, 0x70, 0x72, + 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x73, 0x64, 0x6b, 0x2f, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -3062,7 +3079,7 @@ func file_provisionersdk_proto_provisioner_proto_rawDescGZIP() []byte { } var file_provisionersdk_proto_provisioner_proto_enumTypes = make([]protoimpl.EnumInfo, 3) -var file_provisionersdk_proto_provisioner_proto_msgTypes = make([]protoimpl.MessageInfo, 31) +var file_provisionersdk_proto_provisioner_proto_msgTypes = make([]protoimpl.MessageInfo, 32) var file_provisionersdk_proto_provisioner_proto_goTypes = []interface{}{ (LogLevel)(0), // 0: provisioner.LogLevel (AppSharingLevel)(0), // 1: provisioner.AppSharingLevel @@ -3098,6 +3115,7 @@ var file_provisionersdk_proto_provisioner_proto_goTypes = []interface{}{ (*Agent_Metadata)(nil), // 31: provisioner.Agent.Metadata nil, // 32: provisioner.Agent.EnvEntry (*Resource_Metadata)(nil), // 33: provisioner.Resource.Metadata + nil, // 34: provisioner.ParseComplete.WorkspaceTagsEntry } var file_provisionersdk_proto_provisioner_proto_depIdxs = []int32{ 5, // 0: provisioner.RichParameter.options:type_name -> provisioner.RichParameterOption @@ -3114,33 +3132,34 @@ var file_provisionersdk_proto_provisioner_proto_depIdxs = []int32{ 33, // 11: provisioner.Resource.metadata:type_name -> provisioner.Resource.Metadata 2, // 12: provisioner.Metadata.workspace_transition:type_name -> provisioner.WorkspaceTransition 4, // 13: provisioner.ParseComplete.template_variables:type_name -> provisioner.TemplateVariable - 20, // 14: provisioner.PlanRequest.metadata:type_name -> provisioner.Metadata - 7, // 15: provisioner.PlanRequest.rich_parameter_values:type_name -> provisioner.RichParameterValue - 8, // 16: provisioner.PlanRequest.variable_values:type_name -> provisioner.VariableValue - 12, // 17: provisioner.PlanRequest.external_auth_providers:type_name -> provisioner.ExternalAuthProvider - 19, // 18: provisioner.PlanComplete.resources:type_name -> provisioner.Resource - 6, // 19: provisioner.PlanComplete.parameters:type_name -> provisioner.RichParameter - 11, // 20: provisioner.PlanComplete.external_auth_providers:type_name -> provisioner.ExternalAuthProviderResource - 20, // 21: provisioner.ApplyRequest.metadata:type_name -> provisioner.Metadata - 19, // 22: provisioner.ApplyComplete.resources:type_name -> provisioner.Resource - 6, // 23: provisioner.ApplyComplete.parameters:type_name -> provisioner.RichParameter - 11, // 24: provisioner.ApplyComplete.external_auth_providers:type_name -> provisioner.ExternalAuthProviderResource - 21, // 25: provisioner.Request.config:type_name -> provisioner.Config - 22, // 26: provisioner.Request.parse:type_name -> provisioner.ParseRequest - 24, // 27: provisioner.Request.plan:type_name -> provisioner.PlanRequest - 26, // 28: provisioner.Request.apply:type_name -> provisioner.ApplyRequest - 28, // 29: provisioner.Request.cancel:type_name -> provisioner.CancelRequest - 9, // 30: provisioner.Response.log:type_name -> provisioner.Log - 23, // 31: provisioner.Response.parse:type_name -> provisioner.ParseComplete - 25, // 32: provisioner.Response.plan:type_name -> provisioner.PlanComplete - 27, // 33: provisioner.Response.apply:type_name -> provisioner.ApplyComplete - 29, // 34: provisioner.Provisioner.Session:input_type -> provisioner.Request - 30, // 35: provisioner.Provisioner.Session:output_type -> provisioner.Response - 35, // [35:36] is the sub-list for method output_type - 34, // [34:35] is the sub-list for method input_type - 34, // [34:34] is the sub-list for extension type_name - 34, // [34:34] is the sub-list for extension extendee - 0, // [0:34] is the sub-list for field type_name + 34, // 14: provisioner.ParseComplete.workspace_tags:type_name -> provisioner.ParseComplete.WorkspaceTagsEntry + 20, // 15: provisioner.PlanRequest.metadata:type_name -> provisioner.Metadata + 7, // 16: provisioner.PlanRequest.rich_parameter_values:type_name -> provisioner.RichParameterValue + 8, // 17: provisioner.PlanRequest.variable_values:type_name -> provisioner.VariableValue + 12, // 18: provisioner.PlanRequest.external_auth_providers:type_name -> provisioner.ExternalAuthProvider + 19, // 19: provisioner.PlanComplete.resources:type_name -> provisioner.Resource + 6, // 20: provisioner.PlanComplete.parameters:type_name -> provisioner.RichParameter + 11, // 21: provisioner.PlanComplete.external_auth_providers:type_name -> provisioner.ExternalAuthProviderResource + 20, // 22: provisioner.ApplyRequest.metadata:type_name -> provisioner.Metadata + 19, // 23: provisioner.ApplyComplete.resources:type_name -> provisioner.Resource + 6, // 24: provisioner.ApplyComplete.parameters:type_name -> provisioner.RichParameter + 11, // 25: provisioner.ApplyComplete.external_auth_providers:type_name -> provisioner.ExternalAuthProviderResource + 21, // 26: provisioner.Request.config:type_name -> provisioner.Config + 22, // 27: provisioner.Request.parse:type_name -> provisioner.ParseRequest + 24, // 28: provisioner.Request.plan:type_name -> provisioner.PlanRequest + 26, // 29: provisioner.Request.apply:type_name -> provisioner.ApplyRequest + 28, // 30: provisioner.Request.cancel:type_name -> provisioner.CancelRequest + 9, // 31: provisioner.Response.log:type_name -> provisioner.Log + 23, // 32: provisioner.Response.parse:type_name -> provisioner.ParseComplete + 25, // 33: provisioner.Response.plan:type_name -> provisioner.PlanComplete + 27, // 34: provisioner.Response.apply:type_name -> provisioner.ApplyComplete + 29, // 35: provisioner.Provisioner.Session:input_type -> provisioner.Request + 30, // 36: provisioner.Provisioner.Session:output_type -> provisioner.Response + 36, // [36:37] is the sub-list for method output_type + 35, // [35:36] is the sub-list for method input_type + 35, // [35:35] is the sub-list for extension type_name + 35, // [35:35] is the sub-list for extension extendee + 0, // [0:35] is the sub-list for field type_name } func init() { file_provisionersdk_proto_provisioner_proto_init() } @@ -3534,7 +3553,7 @@ func file_provisionersdk_proto_provisioner_proto_init() { GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_provisionersdk_proto_provisioner_proto_rawDesc, NumEnums: 3, - NumMessages: 31, + NumMessages: 32, NumExtensions: 0, NumServices: 1, }, diff --git a/provisionersdk/proto/provisioner.proto b/provisionersdk/proto/provisioner.proto index 1ee779aa76eff..e378ff79dcd46 100644 --- a/provisionersdk/proto/provisioner.proto +++ b/provisionersdk/proto/provisioner.proto @@ -246,6 +246,7 @@ message ParseComplete { string error = 1; repeated TemplateVariable template_variables = 2; bytes readme = 3; + map workspace_tags = 4; } // PlanRequest asks the provisioner to plan what resources & parameters it will create diff --git a/site/e2e/helpers.ts b/site/e2e/helpers.ts index 2aa45193806d0..4d047b948e93b 100644 --- a/site/e2e/helpers.ts +++ b/site/e2e/helpers.ts @@ -501,6 +501,7 @@ const createTemplateVersionTar = async ( templateVariables: [], error: "", readme: new Uint8Array(), + workspaceTags: {}, ...response.parse, } as ParseComplete; tar.addFile( diff --git a/site/e2e/provisionerGenerated.ts b/site/e2e/provisionerGenerated.ts index 1ba6f6f64a9df..744b5c0918f32 100644 --- a/site/e2e/provisionerGenerated.ts +++ b/site/e2e/provisionerGenerated.ts @@ -250,6 +250,12 @@ export interface ParseComplete { error: string; templateVariables: TemplateVariable[]; readme: Uint8Array; + workspaceTags: { [key: string]: string }; +} + +export interface ParseComplete_WorkspaceTagsEntry { + key: string; + value: string; } /** PlanRequest asks the provisioner to plan what resources & parameters it will create */ @@ -881,6 +887,27 @@ export const ParseComplete = { if (message.readme.length !== 0) { writer.uint32(26).bytes(message.readme); } + Object.entries(message.workspaceTags).forEach(([key, value]) => { + ParseComplete_WorkspaceTagsEntry.encode( + { key: key as any, value }, + writer.uint32(34).fork(), + ).ldelim(); + }); + return writer; + }, +}; + +export const ParseComplete_WorkspaceTagsEntry = { + encode( + message: ParseComplete_WorkspaceTagsEntry, + writer: _m0.Writer = _m0.Writer.create(), + ): _m0.Writer { + if (message.key !== "") { + writer.uint32(10).string(message.key); + } + if (message.value !== "") { + writer.uint32(18).string(message.value); + } return writer; }, }; From 273209432dc1b039cc60a4cb61681086e5aaca33 Mon Sep 17 00:00:00 2001 From: Dean Sheather Date: Mon, 20 May 2024 19:57:39 -0700 Subject: [PATCH 081/149] chore: fix tailnet integration test flake (#13313) --- tailnet/test/integration/integration.go | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tailnet/test/integration/integration.go b/tailnet/test/integration/integration.go index db0f1500e491a..b26365ea3ee8b 100644 --- a/tailnet/test/integration/integration.go +++ b/tailnet/test/integration/integration.go @@ -449,6 +449,9 @@ func ExecBackground(t *testing.T, processName string, netNS *os.File, name strin waitErr := make(chan error, 1) go func() { err := cmd.Wait() + if err != nil && strings.Contains(err.Error(), "signal: terminated") { + err = nil + } waitErr <- err close(waitErr) }() From 8e78b9495d96387e68679acae6fe9511bcc756be Mon Sep 17 00:00:00 2001 From: Asher Date: Tue, 21 May 2024 17:19:59 +0000 Subject: [PATCH 082/149] feat: open most recent directory or workspace when launching VS Code (#13326) --- .../resources/VSCodeDesktopButton/VSCodeDesktopButton.tsx | 1 + 1 file changed, 1 insertion(+) diff --git a/site/src/modules/resources/VSCodeDesktopButton/VSCodeDesktopButton.tsx b/site/src/modules/resources/VSCodeDesktopButton/VSCodeDesktopButton.tsx index 73763439076bd..b1d714756eceb 100644 --- a/site/src/modules/resources/VSCodeDesktopButton/VSCodeDesktopButton.tsx +++ b/site/src/modules/resources/VSCodeDesktopButton/VSCodeDesktopButton.tsx @@ -126,6 +126,7 @@ const VSCodeButton: FC = ({ workspace: workspaceName, url: location.origin, token: key, + openRecent: "true", }); if (agentName) { query.set("agent", agentName); From c61b64be6145e354353b6c87a0a5cb24421a5184 Mon Sep 17 00:00:00 2001 From: Steven Masley Date: Tue, 21 May 2024 13:14:00 -0500 Subject: [PATCH 083/149] feat: add hidden enterprise cmd command to list roles (#13303) * feat: add hidden enterprise cmd command to list roles This includes custom roles, and has a json ouput option for more granular permissions --- coderd/apidoc/docs.go | 26 +++ coderd/apidoc/swagger.json | 26 +++ coderd/database/dbauthz/dbauthz.go | 5 +- coderd/database/dbauthz/dbauthz_test.go | 4 +- coderd/database/dbmem/dbmem.go | 20 +- coderd/database/dbmetrics/dbmetrics.go | 6 +- coderd/database/dbmock/dbmock.go | 12 +- coderd/database/dump.sql | 5 +- .../000212_custom_role_orgs.down.sql | 3 + .../migrations/000212_custom_role_orgs.up.sql | 5 + coderd/database/models.go | 2 + coderd/database/querier.go | 2 +- coderd/database/queries.sql.go | 27 ++- coderd/database/queries/roles.sql | 13 +- coderd/httpapi/name.go | 2 +- coderd/rbac/rolestore/rolestore.go | 7 +- coderd/roles.go | 42 +++- coderd/roles_test.go | 24 ++- codersdk/roles.go | 16 +- docs/api/members.md | 192 ++++++++++++++++-- docs/api/schemas.md | 48 ++++- enterprise/cli/rolescmd.go | 111 ++++++++++ enterprise/cli/rolescmd_test.go | 68 +++++++ enterprise/cli/root.go | 1 + enterprise/coderd/roles.go | 8 + enterprise/coderd/roles_test.go | 47 ++++- site/src/api/typesGenerated.ts | 3 +- site/src/testHelpers/entities.ts | 23 ++- 28 files changed, 662 insertions(+), 86 deletions(-) create mode 100644 coderd/database/migrations/000212_custom_role_orgs.down.sql create mode 100644 coderd/database/migrations/000212_custom_role_orgs.up.sql create mode 100644 enterprise/cli/rolescmd.go create mode 100644 enterprise/cli/rolescmd_test.go diff --git a/coderd/apidoc/docs.go b/coderd/apidoc/docs.go index 5883fdb2f47c8..6dde991904811 100644 --- a/coderd/apidoc/docs.go +++ b/coderd/apidoc/docs.go @@ -8335,11 +8335,37 @@ const docTemplate = `{ "assignable": { "type": "boolean" }, + "built_in": { + "description": "BuiltIn roles are immutable", + "type": "boolean" + }, "display_name": { "type": "string" }, "name": { "type": "string" + }, + "organization_permissions": { + "description": "map[\u003corg_id\u003e] -\u003e Permissions", + "type": "object", + "additionalProperties": { + "type": "array", + "items": { + "$ref": "#/definitions/codersdk.Permission" + } + } + }, + "site_permissions": { + "type": "array", + "items": { + "$ref": "#/definitions/codersdk.Permission" + } + }, + "user_permissions": { + "type": "array", + "items": { + "$ref": "#/definitions/codersdk.Permission" + } } } }, diff --git a/coderd/apidoc/swagger.json b/coderd/apidoc/swagger.json index d6684e7cf6c18..d52e3c515d7d2 100644 --- a/coderd/apidoc/swagger.json +++ b/coderd/apidoc/swagger.json @@ -7400,11 +7400,37 @@ "assignable": { "type": "boolean" }, + "built_in": { + "description": "BuiltIn roles are immutable", + "type": "boolean" + }, "display_name": { "type": "string" }, "name": { "type": "string" + }, + "organization_permissions": { + "description": "map[\u003corg_id\u003e] -\u003e Permissions", + "type": "object", + "additionalProperties": { + "type": "array", + "items": { + "$ref": "#/definitions/codersdk.Permission" + } + } + }, + "site_permissions": { + "type": "array", + "items": { + "$ref": "#/definitions/codersdk.Permission" + } + }, + "user_permissions": { + "type": "array", + "items": { + "$ref": "#/definitions/codersdk.Permission" + } } } }, diff --git a/coderd/database/dbauthz/dbauthz.go b/coderd/database/dbauthz/dbauthz.go index f3c1ee081eb83..bfb28ece948c3 100644 --- a/coderd/database/dbauthz/dbauthz.go +++ b/coderd/database/dbauthz/dbauthz.go @@ -835,11 +835,12 @@ func (q *querier) CleanTailnetTunnels(ctx context.Context) error { return q.db.CleanTailnetTunnels(ctx) } -func (q *querier) CustomRolesByName(ctx context.Context, lookupRoles []string) ([]database.CustomRole, error) { +// TODO: Handle org scoped lookups +func (q *querier) CustomRoles(ctx context.Context, arg database.CustomRolesParams) ([]database.CustomRole, error) { if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceAssignRole); err != nil { return nil, err } - return q.db.CustomRolesByName(ctx, lookupRoles) + return q.db.CustomRoles(ctx, arg) } func (q *querier) DeleteAPIKeyByID(ctx context.Context, id string) error { diff --git a/coderd/database/dbauthz/dbauthz_test.go b/coderd/database/dbauthz/dbauthz_test.go index b6d911dc3849a..e2b6171b587c3 100644 --- a/coderd/database/dbauthz/dbauthz_test.go +++ b/coderd/database/dbauthz/dbauthz_test.go @@ -1177,8 +1177,8 @@ func (s *MethodTestSuite) TestUser() { b := dbgen.User(s.T(), db, database.User{}) check.Args().Asserts(rbac.ResourceSystem, policy.ActionRead).Returns(slice.New(a.ID, b.ID)) })) - s.Run("CustomRolesByName", s.Subtest(func(db database.Store, check *expects) { - check.Args([]string{}).Asserts(rbac.ResourceAssignRole, policy.ActionRead).Returns([]database.CustomRole{}) + s.Run("CustomRoles", s.Subtest(func(db database.Store, check *expects) { + check.Args(database.CustomRolesParams{}).Asserts(rbac.ResourceAssignRole, policy.ActionRead).Returns([]database.CustomRole{}) })) s.Run("Blank/UpsertCustomRole", s.Subtest(func(db database.Store, check *expects) { // Blank is no perms in the role diff --git a/coderd/database/dbmem/dbmem.go b/coderd/database/dbmem/dbmem.go index 9c76d04b5a374..0a8fe6e24a8a6 100644 --- a/coderd/database/dbmem/dbmem.go +++ b/coderd/database/dbmem/dbmem.go @@ -1175,18 +1175,26 @@ func (*FakeQuerier) CleanTailnetTunnels(context.Context) error { return ErrUnimplemented } -func (q *FakeQuerier) CustomRolesByName(_ context.Context, lookupRoles []string) ([]database.CustomRole, error) { +func (q *FakeQuerier) CustomRoles(_ context.Context, arg database.CustomRolesParams) ([]database.CustomRole, error) { q.mutex.Lock() defer q.mutex.Unlock() found := make([]database.CustomRole, 0) for _, role := range q.data.customRoles { - if slices.ContainsFunc(lookupRoles, func(s string) bool { - return strings.EqualFold(s, role.Name) - }) { - role := role - found = append(found, role) + role := role + if len(arg.LookupRoles) > 0 { + if !slices.ContainsFunc(arg.LookupRoles, func(s string) bool { + return strings.EqualFold(s, role.Name) + }) { + continue + } } + + if arg.ExcludeOrgRoles && role.OrganizationID.Valid { + continue + } + + found = append(found, role) } return found, nil diff --git a/coderd/database/dbmetrics/dbmetrics.go b/coderd/database/dbmetrics/dbmetrics.go index f294b8266c75f..1b59724a6ea21 100644 --- a/coderd/database/dbmetrics/dbmetrics.go +++ b/coderd/database/dbmetrics/dbmetrics.go @@ -144,10 +144,10 @@ func (m metricsStore) CleanTailnetTunnels(ctx context.Context) error { return r0 } -func (m metricsStore) CustomRolesByName(ctx context.Context, lookupRoles []string) ([]database.CustomRole, error) { +func (m metricsStore) CustomRoles(ctx context.Context, arg database.CustomRolesParams) ([]database.CustomRole, error) { start := time.Now() - r0, r1 := m.s.CustomRolesByName(ctx, lookupRoles) - m.queryLatencies.WithLabelValues("CustomRolesByName").Observe(time.Since(start).Seconds()) + r0, r1 := m.s.CustomRoles(ctx, arg) + m.queryLatencies.WithLabelValues("CustomRoles").Observe(time.Since(start).Seconds()) return r0, r1 } diff --git a/coderd/database/dbmock/dbmock.go b/coderd/database/dbmock/dbmock.go index 157118be65c3a..128b76cfcd0c6 100644 --- a/coderd/database/dbmock/dbmock.go +++ b/coderd/database/dbmock/dbmock.go @@ -173,19 +173,19 @@ func (mr *MockStoreMockRecorder) CleanTailnetTunnels(arg0 any) *gomock.Call { return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CleanTailnetTunnels", reflect.TypeOf((*MockStore)(nil).CleanTailnetTunnels), arg0) } -// CustomRolesByName mocks base method. -func (m *MockStore) CustomRolesByName(arg0 context.Context, arg1 []string) ([]database.CustomRole, error) { +// CustomRoles mocks base method. +func (m *MockStore) CustomRoles(arg0 context.Context, arg1 database.CustomRolesParams) ([]database.CustomRole, error) { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "CustomRolesByName", arg0, arg1) + ret := m.ctrl.Call(m, "CustomRoles", arg0, arg1) ret0, _ := ret[0].([]database.CustomRole) ret1, _ := ret[1].(error) return ret0, ret1 } -// CustomRolesByName indicates an expected call of CustomRolesByName. -func (mr *MockStoreMockRecorder) CustomRolesByName(arg0, arg1 any) *gomock.Call { +// CustomRoles indicates an expected call of CustomRoles. +func (mr *MockStoreMockRecorder) CustomRoles(arg0, arg1 any) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CustomRolesByName", reflect.TypeOf((*MockStore)(nil).CustomRolesByName), arg0, arg1) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CustomRoles", reflect.TypeOf((*MockStore)(nil).CustomRoles), arg0, arg1) } // DeleteAPIKeyByID mocks base method. diff --git a/coderd/database/dump.sql b/coderd/database/dump.sql index 097f56aff5915..fde9c9556ac84 100644 --- a/coderd/database/dump.sql +++ b/coderd/database/dump.sql @@ -411,11 +411,14 @@ CREATE TABLE custom_roles ( org_permissions jsonb DEFAULT '{}'::jsonb NOT NULL, user_permissions jsonb DEFAULT '[]'::jsonb NOT NULL, created_at timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL, - updated_at timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL + updated_at timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL, + organization_id uuid ); COMMENT ON TABLE custom_roles IS 'Custom roles allow dynamic roles expanded at runtime'; +COMMENT ON COLUMN custom_roles.organization_id IS 'Roles can optionally be scoped to an organization'; + CREATE TABLE dbcrypt_keys ( number integer NOT NULL, active_key_digest text, diff --git a/coderd/database/migrations/000212_custom_role_orgs.down.sql b/coderd/database/migrations/000212_custom_role_orgs.down.sql new file mode 100644 index 0000000000000..39b7b0cfed852 --- /dev/null +++ b/coderd/database/migrations/000212_custom_role_orgs.down.sql @@ -0,0 +1,3 @@ +ALTER TABLE custom_roles + -- This column is nullable, meaning no organization scope + DROP COLUMN organization_id; diff --git a/coderd/database/migrations/000212_custom_role_orgs.up.sql b/coderd/database/migrations/000212_custom_role_orgs.up.sql new file mode 100644 index 0000000000000..a4cf2bacff15b --- /dev/null +++ b/coderd/database/migrations/000212_custom_role_orgs.up.sql @@ -0,0 +1,5 @@ +ALTER TABLE custom_roles + -- This column is nullable, meaning no organization scope + ADD COLUMN organization_id uuid; + +COMMENT ON COLUMN custom_roles.organization_id IS 'Roles can optionally be scoped to an organization' diff --git a/coderd/database/models.go b/coderd/database/models.go index 3636f04fd05c5..42c41c83bd5dc 100644 --- a/coderd/database/models.go +++ b/coderd/database/models.go @@ -1790,6 +1790,8 @@ type CustomRole struct { UserPermissions json.RawMessage `db:"user_permissions" json:"user_permissions"` CreatedAt time.Time `db:"created_at" json:"created_at"` UpdatedAt time.Time `db:"updated_at" json:"updated_at"` + // Roles can optionally be scoped to an organization + OrganizationID uuid.NullUUID `db:"organization_id" json:"organization_id"` } // A table used to store the keys used to encrypt the database. diff --git a/coderd/database/querier.go b/coderd/database/querier.go index cbc76dee5f602..8c75b9dcb53a9 100644 --- a/coderd/database/querier.go +++ b/coderd/database/querier.go @@ -48,7 +48,7 @@ type sqlcQuerier interface { CleanTailnetCoordinators(ctx context.Context) error CleanTailnetLostPeers(ctx context.Context) error CleanTailnetTunnels(ctx context.Context) error - CustomRolesByName(ctx context.Context, lookupRoles []string) ([]CustomRole, error) + CustomRoles(ctx context.Context, arg CustomRolesParams) ([]CustomRole, error) DeleteAPIKeyByID(ctx context.Context, id string) error DeleteAPIKeysByUserID(ctx context.Context, userID uuid.UUID) error DeleteAllTailnetClientSubscriptions(ctx context.Context, arg DeleteAllTailnetClientSubscriptionsParams) error diff --git a/coderd/database/queries.sql.go b/coderd/database/queries.sql.go index baf124dce9b48..c38de30b4cb84 100644 --- a/coderd/database/queries.sql.go +++ b/coderd/database/queries.sql.go @@ -5553,18 +5553,33 @@ func (q *sqlQuerier) UpdateReplica(ctx context.Context, arg UpdateReplicaParams) return i, err } -const customRolesByName = `-- name: CustomRolesByName :many +const customRoles = `-- name: CustomRoles :many SELECT - name, display_name, site_permissions, org_permissions, user_permissions, created_at, updated_at + name, display_name, site_permissions, org_permissions, user_permissions, created_at, updated_at, organization_id FROM custom_roles WHERE + true + -- Lookup roles filter + AND CASE WHEN array_length($1 :: text[], 1) > 0 THEN -- Case insensitive name ILIKE ANY($1 :: text []) + ELSE true + END + -- Org scoping filter, to only fetch site wide roles + AND CASE WHEN $2 :: boolean THEN + organization_id IS null + ELSE true + END ` -func (q *sqlQuerier) CustomRolesByName(ctx context.Context, lookupRoles []string) ([]CustomRole, error) { - rows, err := q.db.QueryContext(ctx, customRolesByName, pq.Array(lookupRoles)) +type CustomRolesParams struct { + LookupRoles []string `db:"lookup_roles" json:"lookup_roles"` + ExcludeOrgRoles bool `db:"exclude_org_roles" json:"exclude_org_roles"` +} + +func (q *sqlQuerier) CustomRoles(ctx context.Context, arg CustomRolesParams) ([]CustomRole, error) { + rows, err := q.db.QueryContext(ctx, customRoles, pq.Array(arg.LookupRoles), arg.ExcludeOrgRoles) if err != nil { return nil, err } @@ -5580,6 +5595,7 @@ func (q *sqlQuerier) CustomRolesByName(ctx context.Context, lookupRoles []string &i.UserPermissions, &i.CreatedAt, &i.UpdatedAt, + &i.OrganizationID, ); err != nil { return nil, err } @@ -5622,7 +5638,7 @@ ON CONFLICT (name) org_permissions = $4, user_permissions = $5, updated_at = now() -RETURNING name, display_name, site_permissions, org_permissions, user_permissions, created_at, updated_at +RETURNING name, display_name, site_permissions, org_permissions, user_permissions, created_at, updated_at, organization_id ` type UpsertCustomRoleParams struct { @@ -5650,6 +5666,7 @@ func (q *sqlQuerier) UpsertCustomRole(ctx context.Context, arg UpsertCustomRoleP &i.UserPermissions, &i.CreatedAt, &i.UpdatedAt, + &i.OrganizationID, ) return i, err } diff --git a/coderd/database/queries/roles.sql b/coderd/database/queries/roles.sql index 30ec437e1814e..2137dea34b077 100644 --- a/coderd/database/queries/roles.sql +++ b/coderd/database/queries/roles.sql @@ -1,14 +1,23 @@ --- name: CustomRolesByName :many +-- name: CustomRoles :many SELECT * FROM custom_roles WHERE + true + -- Lookup roles filter + AND CASE WHEN array_length(@lookup_roles :: text[], 1) > 0 THEN -- Case insensitive name ILIKE ANY(@lookup_roles :: text []) + ELSE true + END + -- Org scoping filter, to only fetch site wide roles + AND CASE WHEN @exclude_org_roles :: boolean THEN + organization_id IS null + ELSE true + END ; - -- name: UpsertCustomRole :one INSERT INTO custom_roles ( diff --git a/coderd/httpapi/name.go b/coderd/httpapi/name.go index 0083927c85a08..d8b64a71bdc44 100644 --- a/coderd/httpapi/name.go +++ b/coderd/httpapi/name.go @@ -38,7 +38,7 @@ func UsernameFrom(str string) string { } // NameValid returns whether the input string is a valid name. -// It is a generic validator for any name (user, workspace, template, etc.). +// It is a generic validator for any name (user, workspace, template, role name, etc.). func NameValid(str string) error { if len(str) > 32 { return xerrors.New("must be <= 32 characters") diff --git a/coderd/rbac/rolestore/rolestore.go b/coderd/rbac/rolestore/rolestore.go index 0ed8b2f12fcdb..9881cde028826 100644 --- a/coderd/rbac/rolestore/rolestore.go +++ b/coderd/rbac/rolestore/rolestore.go @@ -72,7 +72,10 @@ func Expand(ctx context.Context, db database.Store, names []string) (rbac.Roles, // If some roles are missing from the database, they are omitted from // the expansion. These roles are no-ops. Should we raise some kind of // warning when this happens? - dbroles, err := db.CustomRolesByName(ctx, lookup) + dbroles, err := db.CustomRoles(ctx, database.CustomRolesParams{ + LookupRoles: lookup, + ExcludeOrgRoles: false, + }) if err != nil { return nil, xerrors.Errorf("fetch custom roles: %w", err) } @@ -81,7 +84,7 @@ func Expand(ctx context.Context, db database.Store, names []string) (rbac.Roles, for _, dbrole := range dbroles { converted, err := ConvertDBRole(dbrole) if err != nil { - return nil, xerrors.Errorf("convert db role %q: %w", dbrole, err) + return nil, xerrors.Errorf("convert db role %q: %w", dbrole.Name, err) } roles = append(roles, converted) cache.Store(dbrole.Name, converted) diff --git a/coderd/roles.go b/coderd/roles.go index f90f0e474dddf..3d6245f9d4594 100644 --- a/coderd/roles.go +++ b/coderd/roles.go @@ -3,8 +3,11 @@ package coderd import ( "net/http" + "github.com/coder/coder/v2/coderd/database" + "github.com/coder/coder/v2/coderd/database/db2sdk" "github.com/coder/coder/v2/coderd/httpmw" "github.com/coder/coder/v2/coderd/rbac/policy" + "github.com/coder/coder/v2/coderd/rbac/rolestore" "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/coderd/httpapi" @@ -28,8 +31,25 @@ func (api *API) AssignableSiteRoles(rw http.ResponseWriter, r *http.Request) { return } - roles := rbac.SiteRoles() - httpapi.Write(ctx, rw, http.StatusOK, assignableRoles(actorRoles.Roles, roles)) + dbCustomRoles, err := api.Database.CustomRoles(ctx, database.CustomRolesParams{ + // Only site wide custom roles to be included + ExcludeOrgRoles: true, + LookupRoles: nil, + }) + if err != nil { + httpapi.InternalServerError(rw, err) + return + } + + customRoles := make([]rbac.Role, 0, len(dbCustomRoles)) + for _, customRole := range dbCustomRoles { + rbacRole, err := rolestore.ConvertDBRole(customRole) + if err == nil { + customRoles = append(customRoles, rbacRole) + } + } + + httpapi.Write(ctx, rw, http.StatusOK, assignableRoles(actorRoles.Roles, rbac.SiteRoles(), customRoles)) } // assignableOrgRoles returns all org wide roles that can be assigned. @@ -53,10 +73,10 @@ func (api *API) assignableOrgRoles(rw http.ResponseWriter, r *http.Request) { } roles := rbac.OrganizationRoles(organization.ID) - httpapi.Write(ctx, rw, http.StatusOK, assignableRoles(actorRoles.Roles, roles)) + httpapi.Write(ctx, rw, http.StatusOK, assignableRoles(actorRoles.Roles, roles, []rbac.Role{})) } -func assignableRoles(actorRoles rbac.ExpandableRoles, roles []rbac.Role) []codersdk.AssignableRoles { +func assignableRoles(actorRoles rbac.ExpandableRoles, roles []rbac.Role, customRoles []rbac.Role) []codersdk.AssignableRoles { assignable := make([]codersdk.AssignableRoles, 0) for _, role := range roles { // The member role is implied, and not assignable. @@ -66,11 +86,17 @@ func assignableRoles(actorRoles rbac.ExpandableRoles, roles []rbac.Role) []coder continue } assignable = append(assignable, codersdk.AssignableRoles{ - SlimRole: codersdk.SlimRole{ - Name: role.Name, - DisplayName: role.DisplayName, - }, + Role: db2sdk.Role(role), + Assignable: rbac.CanAssignRole(actorRoles, role.Name), + BuiltIn: true, + }) + } + + for _, role := range customRoles { + assignable = append(assignable, codersdk.AssignableRoles{ + Role: db2sdk.Role(role), Assignable: rbac.CanAssignRole(actorRoles, role.Name), + BuiltIn: false, }) } return assignable diff --git a/coderd/roles_test.go b/coderd/roles_test.go index 6754ddc17c9c2..d82c03033cb54 100644 --- a/coderd/roles_test.go +++ b/coderd/roles_test.go @@ -8,6 +8,7 @@ import ( "github.com/stretchr/testify/require" "github.com/coder/coder/v2/coderd/coderdtest" + "github.com/coder/coder/v2/coderd/database/db2sdk" "github.com/coder/coder/v2/coderd/rbac" "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/testutil" @@ -137,18 +138,27 @@ func TestListRoles(t *testing.T) { require.Contains(t, apiErr.Message, c.AuthorizedError) } else { require.NoError(t, err) - require.ElementsMatch(t, c.ExpectedRoles, roles) + ignorePerms := func(f codersdk.AssignableRoles) codersdk.AssignableRoles { + return codersdk.AssignableRoles{ + Role: codersdk.Role{ + Name: f.Name, + DisplayName: f.DisplayName, + }, + Assignable: f.Assignable, + BuiltIn: true, + } + } + expected := db2sdk.List(c.ExpectedRoles, ignorePerms) + found := db2sdk.List(roles, ignorePerms) + require.ElementsMatch(t, expected, found) } }) } } -func convertRole(roleName string) codersdk.SlimRole { +func convertRole(roleName string) codersdk.Role { role, _ := rbac.RoleByName(roleName) - return codersdk.SlimRole{ - DisplayName: role.DisplayName, - Name: role.Name, - } + return db2sdk.Role(role) } func convertRoles(assignableRoles map[string]bool) []codersdk.AssignableRoles { @@ -156,7 +166,7 @@ func convertRoles(assignableRoles map[string]bool) []codersdk.AssignableRoles { for roleName, assignable := range assignableRoles { role := convertRole(roleName) converted = append(converted, codersdk.AssignableRoles{ - SlimRole: role, + Role: role, Assignable: assignable, }) } diff --git a/codersdk/roles.go b/codersdk/roles.go index 90112f7c6ef30..29b0174931fbe 100644 --- a/codersdk/roles.go +++ b/codersdk/roles.go @@ -19,8 +19,10 @@ type SlimRole struct { } type AssignableRoles struct { - SlimRole - Assignable bool `json:"assignable"` + Role `table:"r,recursive_inline"` + Assignable bool `json:"assignable" table:"assignable"` + // BuiltIn roles are immutable + BuiltIn bool `json:"built_in" table:"built_in"` } // Permission is the format passed into the rego. @@ -33,12 +35,12 @@ type Permission struct { // Role is a longer form of SlimRole used to edit custom roles. type Role struct { - Name string `json:"name"` - DisplayName string `json:"display_name"` - SitePermissions []Permission `json:"site_permissions"` + Name string `json:"name" table:"name,default_sort"` + DisplayName string `json:"display_name" table:"display_name"` + SitePermissions []Permission `json:"site_permissions" table:"site_permissions"` // map[] -> Permissions - OrganizationPermissions map[string][]Permission `json:"organization_permissions"` - UserPermissions []Permission `json:"user_permissions"` + OrganizationPermissions map[string][]Permission `json:"organization_permissions" table:"org_permissions"` + UserPermissions []Permission `json:"user_permissions" table:"user_permissions"` } // PatchRole will upsert a custom site wide role diff --git a/docs/api/members.md b/docs/api/members.md index 43ae4e8f23da1..8b34200e50e95 100644 --- a/docs/api/members.md +++ b/docs/api/members.md @@ -27,8 +27,39 @@ curl -X GET http://coder-server:8080/api/v2/organizations/{organization}/members [ { "assignable": true, + "built_in": true, "display_name": "string", - "name": "string" + "name": "string", + "organization_permissions": { + "property1": [ + { + "action": "application_connect", + "negate": true, + "resource_type": "*" + } + ], + "property2": [ + { + "action": "application_connect", + "negate": true, + "resource_type": "*" + } + ] + }, + "site_permissions": [ + { + "action": "application_connect", + "negate": true, + "resource_type": "*" + } + ], + "user_permissions": [ + { + "action": "application_connect", + "negate": true, + "resource_type": "*" + } + ] } ] ``` @@ -43,12 +74,63 @@ curl -X GET http://coder-server:8080/api/v2/organizations/{organization}/members Status Code **200** -| Name | Type | Required | Restrictions | Description | -| ---------------- | ------- | -------- | ------------ | ----------- | -| `[array item]` | array | false | | | -| `» assignable` | boolean | false | | | -| `» display_name` | string | false | | | -| `» name` | string | false | | | +| Name | Type | Required | Restrictions | Description | +| ---------------------------- | -------------------------------------------------------- | -------- | ------------ | --------------------------------------- | +| `[array item]` | array | false | | | +| `» assignable` | boolean | false | | | +| `» built_in` | boolean | false | | Built in roles are immutable | +| `» display_name` | string | false | | | +| `» name` | string | false | | | +| `» organization_permissions` | object | false | | map[] -> Permissions | +| `»» [any property]` | array | false | | | +| `»»» action` | [codersdk.RBACAction](schemas.md#codersdkrbacaction) | false | | | +| `»»» negate` | boolean | false | | Negate makes this a negative permission | +| `»»» resource_type` | [codersdk.RBACResource](schemas.md#codersdkrbacresource) | false | | | +| `» site_permissions` | array | false | | | +| `» user_permissions` | array | false | | | + +#### Enumerated Values + +| Property | Value | +| --------------- | ----------------------- | +| `action` | `application_connect` | +| `action` | `assign` | +| `action` | `create` | +| `action` | `delete` | +| `action` | `read` | +| `action` | `read_personal` | +| `action` | `ssh` | +| `action` | `update` | +| `action` | `update_personal` | +| `action` | `use` | +| `action` | `view_insights` | +| `action` | `start` | +| `action` | `stop` | +| `resource_type` | `*` | +| `resource_type` | `api_key` | +| `resource_type` | `assign_org_role` | +| `resource_type` | `assign_role` | +| `resource_type` | `audit_log` | +| `resource_type` | `debug_info` | +| `resource_type` | `deployment_config` | +| `resource_type` | `deployment_stats` | +| `resource_type` | `file` | +| `resource_type` | `group` | +| `resource_type` | `license` | +| `resource_type` | `oauth2_app` | +| `resource_type` | `oauth2_app_code_token` | +| `resource_type` | `oauth2_app_secret` | +| `resource_type` | `organization` | +| `resource_type` | `organization_member` | +| `resource_type` | `provisioner_daemon` | +| `resource_type` | `replicas` | +| `resource_type` | `system` | +| `resource_type` | `tailnet_coordinator` | +| `resource_type` | `template` | +| `resource_type` | `user` | +| `resource_type` | `workspace` | +| `resource_type` | `workspace_dormant` | +| `resource_type` | `workspace_proxy` | To perform this operation, you must be authenticated. [Learn more](authentication.md). @@ -130,8 +212,39 @@ curl -X GET http://coder-server:8080/api/v2/users/roles \ [ { "assignable": true, + "built_in": true, "display_name": "string", - "name": "string" + "name": "string", + "organization_permissions": { + "property1": [ + { + "action": "application_connect", + "negate": true, + "resource_type": "*" + } + ], + "property2": [ + { + "action": "application_connect", + "negate": true, + "resource_type": "*" + } + ] + }, + "site_permissions": [ + { + "action": "application_connect", + "negate": true, + "resource_type": "*" + } + ], + "user_permissions": [ + { + "action": "application_connect", + "negate": true, + "resource_type": "*" + } + ] } ] ``` @@ -146,12 +259,63 @@ curl -X GET http://coder-server:8080/api/v2/users/roles \ Status Code **200** -| Name | Type | Required | Restrictions | Description | -| ---------------- | ------- | -------- | ------------ | ----------- | -| `[array item]` | array | false | | | -| `» assignable` | boolean | false | | | -| `» display_name` | string | false | | | -| `» name` | string | false | | | +| Name | Type | Required | Restrictions | Description | +| ---------------------------- | -------------------------------------------------------- | -------- | ------------ | --------------------------------------- | +| `[array item]` | array | false | | | +| `» assignable` | boolean | false | | | +| `» built_in` | boolean | false | | Built in roles are immutable | +| `» display_name` | string | false | | | +| `» name` | string | false | | | +| `» organization_permissions` | object | false | | map[] -> Permissions | +| `»» [any property]` | array | false | | | +| `»»» action` | [codersdk.RBACAction](schemas.md#codersdkrbacaction) | false | | | +| `»»» negate` | boolean | false | | Negate makes this a negative permission | +| `»»» resource_type` | [codersdk.RBACResource](schemas.md#codersdkrbacresource) | false | | | +| `» site_permissions` | array | false | | | +| `» user_permissions` | array | false | | | + +#### Enumerated Values + +| Property | Value | +| --------------- | ----------------------- | +| `action` | `application_connect` | +| `action` | `assign` | +| `action` | `create` | +| `action` | `delete` | +| `action` | `read` | +| `action` | `read_personal` | +| `action` | `ssh` | +| `action` | `update` | +| `action` | `update_personal` | +| `action` | `use` | +| `action` | `view_insights` | +| `action` | `start` | +| `action` | `stop` | +| `resource_type` | `*` | +| `resource_type` | `api_key` | +| `resource_type` | `assign_org_role` | +| `resource_type` | `assign_role` | +| `resource_type` | `audit_log` | +| `resource_type` | `debug_info` | +| `resource_type` | `deployment_config` | +| `resource_type` | `deployment_stats` | +| `resource_type` | `file` | +| `resource_type` | `group` | +| `resource_type` | `license` | +| `resource_type` | `oauth2_app` | +| `resource_type` | `oauth2_app_code_token` | +| `resource_type` | `oauth2_app_secret` | +| `resource_type` | `organization` | +| `resource_type` | `organization_member` | +| `resource_type` | `provisioner_daemon` | +| `resource_type` | `replicas` | +| `resource_type` | `system` | +| `resource_type` | `tailnet_coordinator` | +| `resource_type` | `template` | +| `resource_type` | `user` | +| `resource_type` | `workspace` | +| `resource_type` | `workspace_dormant` | +| `resource_type` | `workspace_proxy` | To perform this operation, you must be authenticated. [Learn more](authentication.md). diff --git a/docs/api/schemas.md b/docs/api/schemas.md index ae35585e2fb12..d1b6c6a3d82e0 100644 --- a/docs/api/schemas.md +++ b/docs/api/schemas.md @@ -802,18 +802,54 @@ ```json { "assignable": true, + "built_in": true, "display_name": "string", - "name": "string" + "name": "string", + "organization_permissions": { + "property1": [ + { + "action": "application_connect", + "negate": true, + "resource_type": "*" + } + ], + "property2": [ + { + "action": "application_connect", + "negate": true, + "resource_type": "*" + } + ] + }, + "site_permissions": [ + { + "action": "application_connect", + "negate": true, + "resource_type": "*" + } + ], + "user_permissions": [ + { + "action": "application_connect", + "negate": true, + "resource_type": "*" + } + ] } ``` ### Properties -| Name | Type | Required | Restrictions | Description | -| -------------- | ------- | -------- | ------------ | ----------- | -| `assignable` | boolean | false | | | -| `display_name` | string | false | | | -| `name` | string | false | | | +| Name | Type | Required | Restrictions | Description | +| -------------------------- | --------------------------------------------------- | -------- | ------------ | ---------------------------- | +| `assignable` | boolean | false | | | +| `built_in` | boolean | false | | Built in roles are immutable | +| `display_name` | string | false | | | +| `name` | string | false | | | +| `organization_permissions` | object | false | | map[] -> Permissions | +| » `[any property]` | array of [codersdk.Permission](#codersdkpermission) | false | | | +| `site_permissions` | array of [codersdk.Permission](#codersdkpermission) | false | | | +| `user_permissions` | array of [codersdk.Permission](#codersdkpermission) | false | | | ## codersdk.AuditAction diff --git a/enterprise/cli/rolescmd.go b/enterprise/cli/rolescmd.go new file mode 100644 index 0000000000000..b0a9346697a01 --- /dev/null +++ b/enterprise/cli/rolescmd.go @@ -0,0 +1,111 @@ +package cli + +import ( + "fmt" + "slices" + "strings" + + "golang.org/x/xerrors" + + "github.com/coder/coder/v2/cli/cliui" + "github.com/coder/coder/v2/codersdk" + "github.com/coder/serpent" +) + +// **NOTE** Only covers site wide roles at present. Org scoped roles maybe +// should be nested under some command that scopes to an org?? + +func (r *RootCmd) roles() *serpent.Command { + cmd := &serpent.Command{ + Use: "roles", + Short: "Manage site-wide roles.", + Aliases: []string{"role"}, + Handler: func(inv *serpent.Invocation) error { + return inv.Command.HelpHandler(inv) + }, + Hidden: true, + Children: []*serpent.Command{ + r.showRole(), + }, + } + return cmd +} + +func (r *RootCmd) showRole() *serpent.Command { + formatter := cliui.NewOutputFormatter( + cliui.ChangeFormatterData( + cliui.TableFormat([]assignableRolesTableRow{}, []string{"name", "display_name", "built_in", "site_permissions", "org_permissions", "user_permissions"}), + func(data any) (any, error) { + input, ok := data.([]codersdk.AssignableRoles) + if !ok { + return nil, xerrors.Errorf("expected []codersdk.AssignableRoles got %T", data) + } + rows := make([]assignableRolesTableRow, 0, len(input)) + for _, role := range input { + rows = append(rows, assignableRolesTableRow{ + Name: role.Name, + DisplayName: role.DisplayName, + SitePermissions: fmt.Sprintf("%d permissions", len(role.SitePermissions)), + OrganizationPermissions: fmt.Sprintf("%d organizations", len(role.OrganizationPermissions)), + UserPermissions: fmt.Sprintf("%d permissions", len(role.UserPermissions)), + Assignable: role.Assignable, + BuiltIn: role.BuiltIn, + }) + } + return rows, nil + }, + ), + cliui.JSONFormat(), + ) + + client := new(codersdk.Client) + cmd := &serpent.Command{ + Use: "show [role_names ...]", + Short: "Show role(s)", + Middleware: serpent.Chain( + r.InitClient(client), + ), + Handler: func(inv *serpent.Invocation) error { + ctx := inv.Context() + roles, err := client.ListSiteRoles(ctx) + if err != nil { + return xerrors.Errorf("listing roles: %w", err) + } + + if len(inv.Args) > 0 { + // filter roles + filtered := make([]codersdk.AssignableRoles, 0) + for _, role := range roles { + if slices.ContainsFunc(inv.Args, func(s string) bool { + return strings.EqualFold(s, role.Name) + }) { + filtered = append(filtered, role) + } + } + roles = filtered + } + + out, err := formatter.Format(inv.Context(), roles) + if err != nil { + return err + } + + _, err = fmt.Fprintln(inv.Stdout, out) + return err + }, + } + formatter.AttachOptions(&cmd.Options) + + return cmd +} + +type assignableRolesTableRow struct { + Name string `table:"name,default_sort"` + DisplayName string `table:"display_name"` + SitePermissions string ` table:"site_permissions"` + // map[] -> Permissions + OrganizationPermissions string `table:"org_permissions"` + UserPermissions string `table:"user_permissions"` + Assignable bool `table:"assignable"` + BuiltIn bool `table:"built_in"` +} diff --git a/enterprise/cli/rolescmd_test.go b/enterprise/cli/rolescmd_test.go new file mode 100644 index 0000000000000..df776603e0ac4 --- /dev/null +++ b/enterprise/cli/rolescmd_test.go @@ -0,0 +1,68 @@ +package cli_test + +import ( + "testing" + + "github.com/stretchr/testify/require" + + "github.com/coder/coder/v2/cli/clitest" + "github.com/coder/coder/v2/coderd/coderdtest" + "github.com/coder/coder/v2/coderd/rbac" + "github.com/coder/coder/v2/codersdk" + "github.com/coder/coder/v2/enterprise/coderd/coderdenttest" + "github.com/coder/coder/v2/enterprise/coderd/license" + "github.com/coder/coder/v2/pty/ptytest" + "github.com/coder/coder/v2/testutil" +) + +func TestShowRoles(t *testing.T) { + t.Parallel() + + t.Run("OK", func(t *testing.T) { + t.Parallel() + + dv := coderdtest.DeploymentValues(t) + dv.Experiments = []string{string(codersdk.ExperimentCustomRoles)} + owner, admin := coderdenttest.New(t, &coderdenttest.Options{ + Options: &coderdtest.Options{ + DeploymentValues: dv, + }, + LicenseOptions: &coderdenttest.LicenseOptions{ + Features: license.Features{ + codersdk.FeatureCustomRoles: 1, + }, + }, + }) + + // Requires an owner + client, _ := coderdtest.CreateAnotherUser(t, owner, admin.OrganizationID, rbac.RoleOwner()) + + const expectedRole = "test-role" + ctx := testutil.Context(t, testutil.WaitMedium) + _, err := client.PatchRole(ctx, codersdk.Role{ + Name: expectedRole, + DisplayName: "Test Role", + SitePermissions: codersdk.CreatePermissions(map[codersdk.RBACResource][]codersdk.RBACAction{ + codersdk.ResourceWorkspace: {codersdk.ActionRead, codersdk.ActionUpdate}, + }), + }) + require.NoError(t, err, "create role") + + inv, conf := newCLI(t, "roles", "show", "test-role") + + pty := ptytest.New(t) + inv.Stdout = pty.Output() + clitest.SetupConfig(t, client, conf) + + err = inv.Run() + require.NoError(t, err) + + matches := []string{ + "test-role", "2 permissions", + } + + for _, match := range matches { + pty.ExpectMatch(match) + } + }) +} diff --git a/enterprise/cli/root.go b/enterprise/cli/root.go index 74615ff0e9d2e..69b686c4174aa 100644 --- a/enterprise/cli/root.go +++ b/enterprise/cli/root.go @@ -17,6 +17,7 @@ func (r *RootCmd) enterpriseOnly() []*serpent.Command { r.licenses(), r.groups(), r.provisionerDaemons(), + r.roles(), } } diff --git a/enterprise/coderd/roles.go b/enterprise/coderd/roles.go index 2224e7f25c0bf..552197f7c4401 100644 --- a/enterprise/coderd/roles.go +++ b/enterprise/coderd/roles.go @@ -27,6 +27,14 @@ func (api *API) patchRole(rw http.ResponseWriter, r *http.Request) { return } + if err := httpapi.NameValid(req.Name); err != nil { + httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{ + Message: "Invalid role name", + Detail: err.Error(), + }) + return + } + if len(req.OrganizationPermissions) > 0 { // Org perms should be assigned only in org specific roles. Otherwise, // it gets complicated to keep track of who can do what. diff --git a/enterprise/coderd/roles_test.go b/enterprise/coderd/roles_test.go index 450f80e0b7fe3..67b863e63bacd 100644 --- a/enterprise/coderd/roles_test.go +++ b/enterprise/coderd/roles_test.go @@ -2,6 +2,7 @@ package coderd_test import ( "bytes" + "slices" "testing" "github.com/stretchr/testify/require" @@ -63,13 +64,12 @@ func TestCustomRole(t *testing.T) { coderdtest.CreateTemplateVersion(t, tmplAdmin, first.OrganizationID, nil) // Verify the role exists in the list - // TODO: Turn this assertion back on when the cli api experience is created. - //allRoles, err := tmplAdmin.ListSiteRoles(ctx) - //require.NoError(t, err) - // - //require.True(t, slices.ContainsFunc(allRoles, func(selected codersdk.AssignableRoles) bool { - // return selected.Name == role.Name - //}), "role missing from site role list") + allRoles, err := tmplAdmin.ListSiteRoles(ctx) + require.NoError(t, err) + + require.True(t, slices.ContainsFunc(allRoles, func(selected codersdk.AssignableRoles) bool { + return selected.Name == role.Name + }), "role missing from site role list") }) // Revoked licenses cannot modify/create custom roles, but they can @@ -167,4 +167,37 @@ func TestCustomRole(t *testing.T) { }) require.ErrorContains(t, err, "forbidden") }) + + t.Run("InvalidName", func(t *testing.T) { + t.Parallel() + dv := coderdtest.DeploymentValues(t) + dv.Experiments = []string{string(codersdk.ExperimentCustomRoles)} + owner, _ := coderdenttest.New(t, &coderdenttest.Options{ + Options: &coderdtest.Options{ + DeploymentValues: dv, + }, + LicenseOptions: &coderdenttest.LicenseOptions{ + Features: license.Features{ + codersdk.FeatureCustomRoles: 1, + }, + }, + }) + + ctx := testutil.Context(t, testutil.WaitMedium) + + //nolint:gocritic // owner is required for this + _, err := owner.PatchRole(ctx, codersdk.Role{ + Name: "Bad_Name", // No underscores allowed + DisplayName: "Testing Purposes", + // Basically creating a template admin manually + SitePermissions: codersdk.CreatePermissions(map[codersdk.RBACResource][]codersdk.RBACAction{ + codersdk.ResourceTemplate: {codersdk.ActionCreate, codersdk.ActionRead, codersdk.ActionUpdate, codersdk.ActionViewInsights}, + codersdk.ResourceFile: {codersdk.ActionCreate, codersdk.ActionRead}, + codersdk.ResourceWorkspace: {codersdk.ActionRead}, + }), + OrganizationPermissions: nil, + UserPermissions: nil, + }) + require.ErrorContains(t, err, "Invalid role name") + }) } diff --git a/site/src/api/typesGenerated.ts b/site/src/api/typesGenerated.ts index 08b1ac2732d82..a809b10220993 100644 --- a/site/src/api/typesGenerated.ts +++ b/site/src/api/typesGenerated.ts @@ -65,8 +65,9 @@ export interface ArchiveTemplateVersionsResponse { } // From codersdk/roles.go -export interface AssignableRoles extends SlimRole { +export interface AssignableRoles extends Role { readonly assignable: boolean; + readonly built_in: boolean; } // From codersdk/audit.go diff --git a/site/src/testHelpers/entities.ts b/site/src/testHelpers/entities.ts index 5fe1e9cc7b0ff..22a4c5db6edd9 100644 --- a/site/src/testHelpers/entities.ts +++ b/site/src/testHelpers/entities.ts @@ -229,19 +229,28 @@ export const MockUpdateCheck: TypesGen.UpdateCheckResponse = { version: "v99.999.9999+c9cdf14", }; -export const MockOwnerRole: TypesGen.SlimRole = { +export const MockOwnerRole: TypesGen.Role = { name: "owner", display_name: "Owner", + site_permissions: [], + organization_permissions: {}, + user_permissions: [], }; -export const MockUserAdminRole: TypesGen.SlimRole = { +export const MockUserAdminRole: TypesGen.Role = { name: "user_admin", display_name: "User Admin", + site_permissions: [], + organization_permissions: {}, + user_permissions: [], }; -export const MockTemplateAdminRole: TypesGen.SlimRole = { +export const MockTemplateAdminRole: TypesGen.Role = { name: "template_admin", display_name: "Template Admin", + site_permissions: [], + organization_permissions: {}, + user_permissions: [], }; export const MockMemberRole: TypesGen.SlimRole = { @@ -249,20 +258,24 @@ export const MockMemberRole: TypesGen.SlimRole = { display_name: "Member", }; -export const MockAuditorRole: TypesGen.SlimRole = { +export const MockAuditorRole: TypesGen.Role = { name: "auditor", display_name: "Auditor", + site_permissions: [], + organization_permissions: {}, + user_permissions: [], }; // assignableRole takes a role and a boolean. The boolean implies if the // actor can assign (add/remove) the role from other users. export function assignableRole( - role: TypesGen.SlimRole, + role: TypesGen.Role, assignable: boolean, ): TypesGen.AssignableRoles { return { ...role, assignable: assignable, + built_in: true, }; } From 0a86d6d1767a4d790fe98680cf9eb8b8abb8f3d5 Mon Sep 17 00:00:00 2001 From: Steven Masley Date: Tue, 21 May 2024 13:26:34 -0500 Subject: [PATCH 084/149] chore: expose formatExamples enterprise commands (#13304) Exporting it allows enterprise functions to also use it. --- cli/configssh.go | 6 +++--- cli/create.go | 4 ++-- cli/dotfiles.go | 4 ++-- cli/externalauth.go | 6 +++--- cli/organization.go | 6 +++--- cli/portforward.go | 12 ++++++------ cli/root.go | 16 ++++++++-------- cli/root_internal_test.go | 8 ++++---- cli/schedule.go | 12 ++++++------ cli/templates.go | 6 +++--- cli/templateversions.go | 4 ++-- cli/tokens.go | 8 ++++---- cli/userlist.go | 4 ++-- cli/userstatus.go | 4 ++-- 14 files changed, 50 insertions(+), 50 deletions(-) diff --git a/cli/configssh.go b/cli/configssh.go index 87cfe88a64253..26465bf75fe83 100644 --- a/cli/configssh.go +++ b/cli/configssh.go @@ -230,12 +230,12 @@ func (r *RootCmd) configSSH() *serpent.Command { Annotations: workspaceCommand, Use: "config-ssh", Short: "Add an SSH Host entry for your workspaces \"ssh coder.workspace\"", - Long: formatExamples( - example{ + Long: FormatExamples( + Example{ Description: "You can use -o (or --ssh-option) so set SSH options to be used for all your workspaces", Command: "coder config-ssh -o ForwardAgent=yes", }, - example{ + Example{ Description: "You can use --dry-run (or -n) to see the changes that would be made", Command: "coder config-ssh --dry-run", }, diff --git a/cli/create.go b/cli/create.go index ab9a204fad3cf..46d67c22663d2 100644 --- a/cli/create.go +++ b/cli/create.go @@ -35,8 +35,8 @@ func (r *RootCmd) create() *serpent.Command { Annotations: workspaceCommand, Use: "create [name]", Short: "Create a workspace", - Long: formatExamples( - example{ + Long: FormatExamples( + Example{ Description: "Create a workspace for another user (if you have permission)", Command: "coder create /", }, diff --git a/cli/dotfiles.go b/cli/dotfiles.go index 3dd0b2847940f..03ac9f40dafd1 100644 --- a/cli/dotfiles.go +++ b/cli/dotfiles.go @@ -28,8 +28,8 @@ func (r *RootCmd) dotfiles() *serpent.Command { Use: "dotfiles ", Middleware: serpent.RequireNArgs(1), Short: "Personalize your workspace by applying a canonical dotfiles repository", - Long: formatExamples( - example{ + Long: FormatExamples( + Example{ Description: "Check out and install a dotfiles repository without prompts", Command: "coder dotfiles --yes git@github.com:example/dotfiles.git", }, diff --git a/cli/externalauth.go b/cli/externalauth.go index d3dd407ae8e14..61d2139eb349d 100644 --- a/cli/externalauth.go +++ b/cli/externalauth.go @@ -35,8 +35,8 @@ func (r *RootCmd) externalAuthAccessToken() *serpent.Command { Short: "Print auth for an external provider", Long: "Print an access-token for an external auth provider. " + "The access-token will be validated and sent to stdout with exit code 0. " + - "If a valid access-token cannot be obtained, the URL to authenticate will be sent to stdout with exit code 1\n" + formatExamples( - example{ + "If a valid access-token cannot be obtained, the URL to authenticate will be sent to stdout with exit code 1\n" + FormatExamples( + Example{ Description: "Ensure that the user is authenticated with GitHub before cloning.", Command: `#!/usr/bin/env sh @@ -49,7 +49,7 @@ else fi `, }, - example{ + Example{ Description: "Obtain an extra property of an access token for additional metadata.", Command: "coder external-auth access-token slack --extra \"authed_user.id\"", }, diff --git a/cli/organization.go b/cli/organization.go index a2942b0c642e0..d9ea5c7aaf4ac 100644 --- a/cli/organization.go +++ b/cli/organization.go @@ -43,12 +43,12 @@ func (r *RootCmd) switchOrganization() *serpent.Command { cmd := &serpent.Command{ Use: "set ", Short: "set the organization used by the CLI. Pass an empty string to reset to the default organization.", - Long: "set the organization used by the CLI. Pass an empty string to reset to the default organization.\n" + formatExamples( - example{ + Long: "set the organization used by the CLI. Pass an empty string to reset to the default organization.\n" + FormatExamples( + Example{ Description: "Remove the current organization and defer to the default.", Command: "coder organizations set ''", }, - example{ + Example{ Description: "Switch to a custom organization.", Command: "coder organizations set my-org", }, diff --git a/cli/portforward.go b/cli/portforward.go index 8de89bd39078d..2c027a217c5ba 100644 --- a/cli/portforward.go +++ b/cli/portforward.go @@ -35,24 +35,24 @@ func (r *RootCmd) portForward() *serpent.Command { Use: "port-forward ", Short: `Forward ports from a workspace to the local machine. For reverse port forwarding, use "coder ssh -R".`, Aliases: []string{"tunnel"}, - Long: formatExamples( - example{ + Long: FormatExamples( + Example{ Description: "Port forward a single TCP port from 1234 in the workspace to port 5678 on your local machine", Command: "coder port-forward --tcp 5678:1234", }, - example{ + Example{ Description: "Port forward a single UDP port from port 9000 to port 9000 on your local machine", Command: "coder port-forward --udp 9000", }, - example{ + Example{ Description: "Port forward multiple TCP ports and a UDP port", Command: "coder port-forward --tcp 8080:8080 --tcp 9000:3000 --udp 5353:53", }, - example{ + Example{ Description: "Port forward multiple ports (TCP or UDP) in condensed syntax", Command: "coder port-forward --tcp 8080,9000:3000,9090-9092,10000-10002:10010-10012", }, - example{ + Example{ Description: "Port forward specifying the local address to bind to", Command: "coder port-forward --tcp 1.2.3.4:8080:8080", }, diff --git a/cli/root.go b/cli/root.go index d9407cf21766c..2c7443cde5749 100644 --- a/cli/root.go +++ b/cli/root.go @@ -181,12 +181,12 @@ func (r *RootCmd) Command(subcommands []*serpent.Command) (*serpent.Command, err ` cmd := &serpent.Command{ Use: "coder [global-flags] ", - Long: fmt.Sprintf(fmtLong, buildinfo.Version()) + formatExamples( - example{ + Long: fmt.Sprintf(fmtLong, buildinfo.Version()) + FormatExamples( + Example{ Description: "Start a Coder server", Command: "coder server", }, - example{ + Example{ Description: "Get started by creating a template from an example", Command: "coder templates init", }, @@ -753,16 +753,16 @@ func isTTYWriter(inv *serpent.Invocation, writer io.Writer) bool { return isatty.IsTerminal(file.Fd()) } -// example represents a standard example for command usage, to be used -// with formatExamples. -type example struct { +// Example represents a standard example for command usage, to be used +// with FormatExamples. +type Example struct { Description string Command string } -// formatExamples formats the examples as width wrapped bulletpoint +// FormatExamples formats the examples as width wrapped bulletpoint // descriptions with the command underneath. -func formatExamples(examples ...example) string { +func FormatExamples(examples ...Example) string { var sb strings.Builder padStyle := cliui.DefaultStyles.Wrap.With(pretty.XPad(4, 0)) diff --git a/cli/root_internal_test.go b/cli/root_internal_test.go index 9bb05a33b1995..c10c853769900 100644 --- a/cli/root_internal_test.go +++ b/cli/root_internal_test.go @@ -45,7 +45,7 @@ func Test_formatExamples(t *testing.T) { tests := []struct { name string - examples []example + examples []Example wantMatches []string }{ { @@ -55,7 +55,7 @@ func Test_formatExamples(t *testing.T) { }, { name: "Output examples", - examples: []example{ + examples: []Example{ { Description: "Hello world.", Command: "echo hello", @@ -72,7 +72,7 @@ func Test_formatExamples(t *testing.T) { }, { name: "No description outputs commands", - examples: []example{ + examples: []Example{ { Command: "echo hello", }, @@ -87,7 +87,7 @@ func Test_formatExamples(t *testing.T) { t.Run(tt.name, func(t *testing.T) { t.Parallel() - got := formatExamples(tt.examples...) + got := FormatExamples(tt.examples...) if len(tt.wantMatches) == 0 { require.Empty(t, got) } else { diff --git a/cli/schedule.go b/cli/schedule.go index a3ba597df0937..80fdc873fb205 100644 --- a/cli/schedule.go +++ b/cli/schedule.go @@ -140,8 +140,8 @@ func (r *RootCmd) scheduleStart() *serpent.Command { client := new(codersdk.Client) cmd := &serpent.Command{ Use: "start { [day-of-week] [location] | manual }", - Long: scheduleStartDescriptionLong + "\n" + formatExamples( - example{ + Long: scheduleStartDescriptionLong + "\n" + FormatExamples( + Example{ Description: "Set the workspace to start at 9:30am (in Dublin) from Monday to Friday", Command: "coder schedule start my-workspace 9:30AM Mon-Fri Europe/Dublin", }, @@ -189,8 +189,8 @@ func (r *RootCmd) scheduleStop() *serpent.Command { client := new(codersdk.Client) return &serpent.Command{ Use: "stop { | manual }", - Long: scheduleStopDescriptionLong + "\n" + formatExamples( - example{ + Long: scheduleStopDescriptionLong + "\n" + FormatExamples( + Example{ Command: "coder schedule stop my-workspace 2h30m", }, ), @@ -234,8 +234,8 @@ func (r *RootCmd) scheduleOverride() *serpent.Command { overrideCmd := &serpent.Command{ Use: "override-stop ", Short: "Override the stop time of a currently running workspace instance.", - Long: scheduleOverrideDescriptionLong + "\n" + formatExamples( - example{ + Long: scheduleOverrideDescriptionLong + "\n" + FormatExamples( + Example{ Command: "coder schedule override-stop my-workspace 90m", }, ), diff --git a/cli/templates.go b/cli/templates.go index 4843ec440e8c3..cb5d47f901e07 100644 --- a/cli/templates.go +++ b/cli/templates.go @@ -16,12 +16,12 @@ func (r *RootCmd) templates() *serpent.Command { cmd := &serpent.Command{ Use: "templates", Short: "Manage templates", - Long: "Templates are written in standard Terraform and describe the infrastructure for workspaces\n" + formatExamples( - example{ + Long: "Templates are written in standard Terraform and describe the infrastructure for workspaces\n" + FormatExamples( + Example{ Description: "Make changes to your template, and plan the changes", Command: "coder templates plan my-template", }, - example{ + Example{ Description: "Create or push an update to the template. Your developers can update their workspaces", Command: "coder templates push my-template", }, diff --git a/cli/templateversions.go b/cli/templateversions.go index aa33d7d7d2ba4..4460c3b5bfee5 100644 --- a/cli/templateversions.go +++ b/cli/templateversions.go @@ -19,8 +19,8 @@ func (r *RootCmd) templateVersions() *serpent.Command { Use: "versions", Short: "Manage different versions of the specified template", Aliases: []string{"version"}, - Long: formatExamples( - example{ + Long: FormatExamples( + Example{ Description: "List versions of a specific template", Command: "coder templates versions list my-template", }, diff --git a/cli/tokens.go b/cli/tokens.go index eb9a57762e13d..4961ac7e3e9b5 100644 --- a/cli/tokens.go +++ b/cli/tokens.go @@ -17,16 +17,16 @@ func (r *RootCmd) tokens() *serpent.Command { cmd := &serpent.Command{ Use: "tokens", Short: "Manage personal access tokens", - Long: "Tokens are used to authenticate automated clients to Coder.\n" + formatExamples( - example{ + Long: "Tokens are used to authenticate automated clients to Coder.\n" + FormatExamples( + Example{ Description: "Create a token for automation", Command: "coder tokens create", }, - example{ + Example{ Description: "List your tokens", Command: "coder tokens ls", }, - example{ + Example{ Description: "Remove a token by ID", Command: "coder tokens rm WuoWs4ZsMX", }, diff --git a/cli/userlist.go b/cli/userlist.go index 5344522f89c4a..955154ce30f62 100644 --- a/cli/userlist.go +++ b/cli/userlist.go @@ -57,8 +57,8 @@ func (r *RootCmd) userSingle() *serpent.Command { cmd := &serpent.Command{ Use: "show ", Short: "Show a single user. Use 'me' to indicate the currently authenticated user.", - Long: formatExamples( - example{ + Long: FormatExamples( + Example{ Command: "coder users show me", }, ), diff --git a/cli/userstatus.go b/cli/userstatus.go index 923247e669186..fae2805de710d 100644 --- a/cli/userstatus.go +++ b/cli/userstatus.go @@ -40,8 +40,8 @@ func (r *RootCmd) createUserStatusCommand(sdkStatus codersdk.UserStatus) *serpen Use: fmt.Sprintf("%s ", verb), Short: short, Aliases: aliases, - Long: formatExamples( - example{ + Long: FormatExamples( + Example{ Command: fmt.Sprintf("coder users %s example_user", verb), }, ), From 3f1e9c038ace3a84ff1785e778537635989eed4d Mon Sep 17 00:00:00 2001 From: Kayla Washburn-Love Date: Tue, 21 May 2024 12:46:31 -0600 Subject: [PATCH 085/149] feat(coderd): add endpoints for editing and deleting organizations (#13287) --- .vscode/settings.json | 1 - coderd/apidoc/docs.go | 87 ++++++++++++ coderd/apidoc/swagger.json | 75 +++++++++++ coderd/coderd.go | 2 + coderd/database/dbauthz/dbauthz.go | 11 ++ coderd/database/dbauthz/dbauthz_test.go | 19 ++- coderd/database/dbmem/dbmem.go | 79 ++++++++--- coderd/database/dbmetrics/dbmetrics.go | 14 ++ coderd/database/dbmock/dbmock.go | 29 ++++ coderd/database/querier.go | 2 + coderd/database/queries.sql.go | 44 ++++++ coderd/database/queries/organizations.sql | 17 +++ coderd/organizations.go | 91 +++++++++++++ coderd/organizations_test.go | 156 +++++++++++++++++++--- codersdk/organizations.go | 57 ++++++++ codersdk/users.go | 20 --- docs/api/organizations.md | 95 +++++++++++++ docs/api/schemas.md | 14 ++ site/src/api/typesGenerated.ts | 7 +- 19 files changed, 757 insertions(+), 63 deletions(-) diff --git a/.vscode/settings.json b/.vscode/settings.json index c95554245cab5..c824ea4edb783 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -195,7 +195,6 @@ "**.pb.go": true, "**/*.gen.json": true, "**/testdata/*": true, - "**Generated.ts": true, "coderd/apidoc/**": true, "docs/api/*.md": true, "docs/templates/*.md": true, diff --git a/coderd/apidoc/docs.go b/coderd/apidoc/docs.go index 6dde991904811..34c4c6b529d19 100644 --- a/coderd/apidoc/docs.go +++ b/coderd/apidoc/docs.go @@ -1987,6 +1987,82 @@ const docTemplate = `{ } } } + }, + "delete": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "produces": [ + "application/json" + ], + "tags": [ + "Organizations" + ], + "summary": "Delete organization", + "operationId": "delete-organization", + "parameters": [ + { + "type": "string", + "description": "Organization ID or name", + "name": "organization", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/codersdk.Response" + } + } + } + }, + "patch": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Organizations" + ], + "summary": "Update organization", + "operationId": "update-organization", + "parameters": [ + { + "type": "string", + "description": "Organization ID or name", + "name": "organization", + "in": "path", + "required": true + }, + { + "description": "Patch organization request", + "name": "request", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/codersdk.UpdateOrganizationRequest" + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/codersdk.Organization" + } + } + } } }, "/organizations/{organization}/groups": { @@ -12099,6 +12175,17 @@ const docTemplate = `{ } } }, + "codersdk.UpdateOrganizationRequest": { + "type": "object", + "required": [ + "name" + ], + "properties": { + "name": { + "type": "string" + } + } + }, "codersdk.UpdateRoles": { "type": "object", "properties": { diff --git a/coderd/apidoc/swagger.json b/coderd/apidoc/swagger.json index d52e3c515d7d2..43aacb5e0cc32 100644 --- a/coderd/apidoc/swagger.json +++ b/coderd/apidoc/swagger.json @@ -1732,6 +1732,72 @@ } } } + }, + "delete": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "produces": ["application/json"], + "tags": ["Organizations"], + "summary": "Delete organization", + "operationId": "delete-organization", + "parameters": [ + { + "type": "string", + "description": "Organization ID or name", + "name": "organization", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/codersdk.Response" + } + } + } + }, + "patch": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "consumes": ["application/json"], + "produces": ["application/json"], + "tags": ["Organizations"], + "summary": "Update organization", + "operationId": "update-organization", + "parameters": [ + { + "type": "string", + "description": "Organization ID or name", + "name": "organization", + "in": "path", + "required": true + }, + { + "description": "Patch organization request", + "name": "request", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/codersdk.UpdateOrganizationRequest" + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/codersdk.Organization" + } + } + } } }, "/organizations/{organization}/groups": { @@ -10958,6 +11024,15 @@ } } }, + "codersdk.UpdateOrganizationRequest": { + "type": "object", + "required": ["name"], + "properties": { + "name": { + "type": "string" + } + } + }, "codersdk.UpdateRoles": { "type": "object", "properties": { diff --git a/coderd/coderd.go b/coderd/coderd.go index 80f77d92ee672..9ee21a23cf79f 100644 --- a/coderd/coderd.go +++ b/coderd/coderd.go @@ -812,6 +812,8 @@ func New(options *Options) *API { httpmw.ExtractOrganizationParam(options.Database), ) r.Get("/", api.organization) + r.Patch("/", api.patchOrganization) + r.Delete("/", api.deleteOrganization) r.Post("/templateversions", api.postTemplateVersionsByOrganization) r.Route("/templates", func(r chi.Router) { r.Post("/", api.postTemplateByOrganization) diff --git a/coderd/database/dbauthz/dbauthz.go b/coderd/database/dbauthz/dbauthz.go index bfb28ece948c3..0ab78e75fe196 100644 --- a/coderd/database/dbauthz/dbauthz.go +++ b/coderd/database/dbauthz/dbauthz.go @@ -984,6 +984,10 @@ func (q *querier) DeleteOldWorkspaceAgentStats(ctx context.Context) error { return q.db.DeleteOldWorkspaceAgentStats(ctx) } +func (q *querier) DeleteOrganization(ctx context.Context, id uuid.UUID) error { + return deleteQ(q.log, q.auth, q.db.GetOrganizationByID, q.db.DeleteOrganization)(ctx, id) +} + func (q *querier) DeleteReplicasUpdatedBefore(ctx context.Context, updatedAt time.Time) error { if err := q.authorizeContext(ctx, policy.ActionDelete, rbac.ResourceSystem); err != nil { return err @@ -2853,6 +2857,13 @@ func (q *querier) UpdateOAuth2ProviderAppSecretByID(ctx context.Context, arg dat return q.db.UpdateOAuth2ProviderAppSecretByID(ctx, arg) } +func (q *querier) UpdateOrganization(ctx context.Context, arg database.UpdateOrganizationParams) (database.Organization, error) { + fetch := func(ctx context.Context, arg database.UpdateOrganizationParams) (database.Organization, error) { + return q.db.GetOrganizationByID(ctx, arg.ID) + } + return updateWithReturn(q.log, q.auth, fetch, q.db.UpdateOrganization)(ctx, arg) +} + func (q *querier) UpdateProvisionerDaemonLastSeenAt(ctx context.Context, arg database.UpdateProvisionerDaemonLastSeenAtParams) error { if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceProvisionerDaemon); err != nil { return err diff --git a/coderd/database/dbauthz/dbauthz_test.go b/coderd/database/dbauthz/dbauthz_test.go index e2b6171b587c3..8e84f4644b91e 100644 --- a/coderd/database/dbauthz/dbauthz_test.go +++ b/coderd/database/dbauthz/dbauthz_test.go @@ -624,7 +624,7 @@ func (s *MethodTestSuite) TestOrganization() { s.Run("InsertOrganization", s.Subtest(func(db database.Store, check *expects) { check.Args(database.InsertOrganizationParams{ ID: uuid.New(), - Name: "random", + Name: "new-org", }).Asserts(rbac.ResourceOrganization, policy.ActionCreate) })) s.Run("InsertOrganizationMember", s.Subtest(func(db database.Store, check *expects) { @@ -639,6 +639,23 @@ func (s *MethodTestSuite) TestOrganization() { rbac.ResourceAssignRole.InOrg(o.ID), policy.ActionAssign, rbac.ResourceOrganizationMember.InOrg(o.ID).WithID(u.ID), policy.ActionCreate) })) + s.Run("UpdateOrganization", s.Subtest(func(db database.Store, check *expects) { + o := dbgen.Organization(s.T(), db, database.Organization{ + Name: "something-unique", + }) + check.Args(database.UpdateOrganizationParams{ + ID: o.ID, + Name: "something-different", + }).Asserts(o, policy.ActionUpdate) + })) + s.Run("DeleteOrganization", s.Subtest(func(db database.Store, check *expects) { + o := dbgen.Organization(s.T(), db, database.Organization{ + Name: "doomed", + }) + check.Args( + o.ID, + ).Asserts(o, policy.ActionDelete) + })) s.Run("UpdateMemberRoles", s.Subtest(func(db database.Store, check *expects) { o := dbgen.Organization(s.T(), db, database.Organization{}) u := dbgen.User(s.T(), db, database.User{}) diff --git a/coderd/database/dbmem/dbmem.go b/coderd/database/dbmem/dbmem.go index 0a8fe6e24a8a6..5f2ebbff25003 100644 --- a/coderd/database/dbmem/dbmem.go +++ b/coderd/database/dbmem/dbmem.go @@ -33,15 +33,18 @@ import ( var validProxyByHostnameRegex = regexp.MustCompile(`^[a-zA-Z0-9._-]+$`) -var errForeignKeyConstraint = &pq.Error{ - Code: "23503", - Message: "update or delete on table violates foreign key constraint", -} - -var errDuplicateKey = &pq.Error{ - Code: "23505", - Message: "duplicate key value violates unique constraint", -} +// A full mapping of error codes from pq v1.10.9 can be found here: +// https://github.com/lib/pq/blob/2a217b94f5ccd3de31aec4152a541b9ff64bed05/error.go#L75 +var ( + errForeignKeyConstraint = &pq.Error{ + Code: "23503", // "foreign_key_violation" + Message: "update or delete on table violates foreign key constraint", + } + errUniqueConstraint = &pq.Error{ + Code: "23505", // "unique_violation" + Message: "duplicate key value violates unique constraint", + } +) // New returns an in-memory fake of the database. func New() database.Store { @@ -1601,6 +1604,19 @@ func (q *FakeQuerier) DeleteOldWorkspaceAgentStats(_ context.Context) error { return nil } +func (q *FakeQuerier) DeleteOrganization(_ context.Context, id uuid.UUID) error { + q.mutex.Lock() + defer q.mutex.Unlock() + + for i, org := range q.organizations { + if org.ID == id && !org.IsDefault { + q.organizations = append(q.organizations[:i], q.organizations[i+1:]...) + return nil + } + } + return sql.ErrNoRows +} + func (q *FakeQuerier) DeleteReplicasUpdatedBefore(_ context.Context, before time.Time) error { q.mutex.Lock() defer q.mutex.Unlock() @@ -5823,7 +5839,7 @@ func (q *FakeQuerier) InsertDBCryptKey(_ context.Context, arg database.InsertDBC for _, key := range q.dbcryptKeys { if key.Number == arg.Number { - return errDuplicateKey + return errUniqueConstraint } } @@ -5927,7 +5943,7 @@ func (q *FakeQuerier) InsertGroup(_ context.Context, arg database.InsertGroupPar for _, group := range q.groups { if group.OrganizationID == arg.OrganizationID && group.Name == arg.Name { - return database.Group{}, errDuplicateKey + return database.Group{}, errUniqueConstraint } } @@ -5958,7 +5974,7 @@ func (q *FakeQuerier) InsertGroupMember(_ context.Context, arg database.InsertGr for _, member := range q.groupMembers { if member.GroupID == arg.GroupID && member.UserID == arg.UserID { - return errDuplicateKey + return errUniqueConstraint } } @@ -6042,7 +6058,7 @@ func (q *FakeQuerier) InsertOAuth2ProviderApp(_ context.Context, arg database.In for _, app := range q.oauth2ProviderApps { if app.Name == arg.Name { - return database.OAuth2ProviderApp{}, errDuplicateKey + return database.OAuth2ProviderApp{}, errUniqueConstraint } } @@ -6423,7 +6439,7 @@ func (q *FakeQuerier) InsertUser(_ context.Context, arg database.InsertUserParam for _, user := range q.users { if user.Username == arg.Username && !user.Deleted { - return database.User{}, errDuplicateKey + return database.User{}, errUniqueConstraint } } @@ -6836,7 +6852,7 @@ func (q *FakeQuerier) InsertWorkspaceProxy(_ context.Context, arg database.Inser lastRegionID := int32(0) for _, p := range q.workspaceProxies { if !p.Deleted && p.Name == arg.Name { - return database.WorkspaceProxy{}, errDuplicateKey + return database.WorkspaceProxy{}, errUniqueConstraint } if p.RegionID > lastRegionID { lastRegionID = p.RegionID @@ -7230,7 +7246,7 @@ func (q *FakeQuerier) UpdateOAuth2ProviderAppByID(_ context.Context, arg databas for _, app := range q.oauth2ProviderApps { if app.Name == arg.Name && app.ID != arg.ID { - return database.OAuth2ProviderApp{}, errDuplicateKey + return database.OAuth2ProviderApp{}, errUniqueConstraint } } @@ -7278,6 +7294,33 @@ func (q *FakeQuerier) UpdateOAuth2ProviderAppSecretByID(_ context.Context, arg d return database.OAuth2ProviderAppSecret{}, sql.ErrNoRows } +func (q *FakeQuerier) UpdateOrganization(_ context.Context, arg database.UpdateOrganizationParams) (database.Organization, error) { + err := validateDatabaseType(arg) + if err != nil { + return database.Organization{}, err + } + + q.mutex.Lock() + defer q.mutex.Unlock() + + // Enforce the unique constraint, because the API endpoint relies on the database catching + // non-unique names during updates. + for _, org := range q.organizations { + if org.Name == arg.Name && org.ID != arg.ID { + return database.Organization{}, errUniqueConstraint + } + } + + for i, org := range q.organizations { + if org.ID == arg.ID { + org.Name = arg.Name + q.organizations[i] = org + return org, nil + } + } + return database.Organization{}, sql.ErrNoRows +} + func (q *FakeQuerier) UpdateProvisionerDaemonLastSeenAt(_ context.Context, arg database.UpdateProvisionerDaemonLastSeenAtParams) error { err := validateDatabaseType(arg) if err != nil { @@ -7875,7 +7918,7 @@ func (q *FakeQuerier) UpdateWorkspace(_ context.Context, arg database.UpdateWork continue } if other.Name == arg.Name { - return database.Workspace{}, errDuplicateKey + return database.Workspace{}, errUniqueConstraint } } @@ -8215,7 +8258,7 @@ func (q *FakeQuerier) UpdateWorkspaceProxy(_ context.Context, arg database.Updat for _, p := range q.workspaceProxies { if p.Name == arg.Name && p.ID != arg.ID { - return database.WorkspaceProxy{}, errDuplicateKey + return database.WorkspaceProxy{}, errUniqueConstraint } } diff --git a/coderd/database/dbmetrics/dbmetrics.go b/coderd/database/dbmetrics/dbmetrics.go index 1b59724a6ea21..bb5a38ef82c61 100644 --- a/coderd/database/dbmetrics/dbmetrics.go +++ b/coderd/database/dbmetrics/dbmetrics.go @@ -284,6 +284,13 @@ func (m metricsStore) DeleteOldWorkspaceAgentStats(ctx context.Context) error { return err } +func (m metricsStore) DeleteOrganization(ctx context.Context, id uuid.UUID) error { + start := time.Now() + r0 := m.s.DeleteOrganization(ctx, id) + m.queryLatencies.WithLabelValues("DeleteOrganization").Observe(time.Since(start).Seconds()) + return r0 +} + func (m metricsStore) DeleteReplicasUpdatedBefore(ctx context.Context, updatedAt time.Time) error { start := time.Now() err := m.s.DeleteReplicasUpdatedBefore(ctx, updatedAt) @@ -1845,6 +1852,13 @@ func (m metricsStore) UpdateOAuth2ProviderAppSecretByID(ctx context.Context, arg return r0, r1 } +func (m metricsStore) UpdateOrganization(ctx context.Context, arg database.UpdateOrganizationParams) (database.Organization, error) { + start := time.Now() + r0, r1 := m.s.UpdateOrganization(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateOrganization").Observe(time.Since(start).Seconds()) + return r0, r1 +} + func (m metricsStore) UpdateProvisionerDaemonLastSeenAt(ctx context.Context, arg database.UpdateProvisionerDaemonLastSeenAtParams) error { start := time.Now() r0 := m.s.UpdateProvisionerDaemonLastSeenAt(ctx, arg) diff --git a/coderd/database/dbmock/dbmock.go b/coderd/database/dbmock/dbmock.go index 128b76cfcd0c6..90d7a20eb6ff8 100644 --- a/coderd/database/dbmock/dbmock.go +++ b/coderd/database/dbmock/dbmock.go @@ -455,6 +455,20 @@ func (mr *MockStoreMockRecorder) DeleteOldWorkspaceAgentStats(arg0 any) *gomock. return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteOldWorkspaceAgentStats", reflect.TypeOf((*MockStore)(nil).DeleteOldWorkspaceAgentStats), arg0) } +// DeleteOrganization mocks base method. +func (m *MockStore) DeleteOrganization(arg0 context.Context, arg1 uuid.UUID) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "DeleteOrganization", arg0, arg1) + ret0, _ := ret[0].(error) + return ret0 +} + +// DeleteOrganization indicates an expected call of DeleteOrganization. +func (mr *MockStoreMockRecorder) DeleteOrganization(arg0, arg1 any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "DeleteOrganization", reflect.TypeOf((*MockStore)(nil).DeleteOrganization), arg0, arg1) +} + // DeleteReplicasUpdatedBefore mocks base method. func (m *MockStore) DeleteReplicasUpdatedBefore(arg0 context.Context, arg1 time.Time) error { m.ctrl.T.Helper() @@ -3881,6 +3895,21 @@ func (mr *MockStoreMockRecorder) UpdateOAuth2ProviderAppSecretByID(arg0, arg1 an return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateOAuth2ProviderAppSecretByID", reflect.TypeOf((*MockStore)(nil).UpdateOAuth2ProviderAppSecretByID), arg0, arg1) } +// UpdateOrganization mocks base method. +func (m *MockStore) UpdateOrganization(arg0 context.Context, arg1 database.UpdateOrganizationParams) (database.Organization, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "UpdateOrganization", arg0, arg1) + ret0, _ := ret[0].(database.Organization) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// UpdateOrganization indicates an expected call of UpdateOrganization. +func (mr *MockStoreMockRecorder) UpdateOrganization(arg0, arg1 any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateOrganization", reflect.TypeOf((*MockStore)(nil).UpdateOrganization), arg0, arg1) +} + // UpdateProvisionerDaemonLastSeenAt mocks base method. func (m *MockStore) UpdateProvisionerDaemonLastSeenAt(arg0 context.Context, arg1 database.UpdateProvisionerDaemonLastSeenAtParams) error { m.ctrl.T.Helper() diff --git a/coderd/database/querier.go b/coderd/database/querier.go index 8c75b9dcb53a9..a590ae87bc8fd 100644 --- a/coderd/database/querier.go +++ b/coderd/database/querier.go @@ -74,6 +74,7 @@ type sqlcQuerier interface { // Logs can take up a lot of space, so it's important we clean up frequently. DeleteOldWorkspaceAgentLogs(ctx context.Context) error DeleteOldWorkspaceAgentStats(ctx context.Context) error + DeleteOrganization(ctx context.Context, id uuid.UUID) error DeleteReplicasUpdatedBefore(ctx context.Context, updatedAt time.Time) error DeleteTailnetAgent(ctx context.Context, arg DeleteTailnetAgentParams) (DeleteTailnetAgentRow, error) DeleteTailnetClient(ctx context.Context, arg DeleteTailnetClientParams) (DeleteTailnetClientRow, error) @@ -368,6 +369,7 @@ type sqlcQuerier interface { UpdateMemberRoles(ctx context.Context, arg UpdateMemberRolesParams) (OrganizationMember, error) UpdateOAuth2ProviderAppByID(ctx context.Context, arg UpdateOAuth2ProviderAppByIDParams) (OAuth2ProviderApp, error) UpdateOAuth2ProviderAppSecretByID(ctx context.Context, arg UpdateOAuth2ProviderAppSecretByIDParams) (OAuth2ProviderAppSecret, error) + UpdateOrganization(ctx context.Context, arg UpdateOrganizationParams) (Organization, error) UpdateProvisionerDaemonLastSeenAt(ctx context.Context, arg UpdateProvisionerDaemonLastSeenAtParams) error UpdateProvisionerJobByID(ctx context.Context, arg UpdateProvisionerJobByIDParams) error UpdateProvisionerJobWithCancelByID(ctx context.Context, arg UpdateProvisionerJobWithCancelByIDParams) error diff --git a/coderd/database/queries.sql.go b/coderd/database/queries.sql.go index c38de30b4cb84..8f5a879d75f5c 100644 --- a/coderd/database/queries.sql.go +++ b/coderd/database/queries.sql.go @@ -3934,6 +3934,19 @@ func (q *sqlQuerier) UpdateMemberRoles(ctx context.Context, arg UpdateMemberRole return i, err } +const deleteOrganization = `-- name: DeleteOrganization :exec +DELETE FROM + organizations +WHERE + id = $1 AND + is_default = false +` + +func (q *sqlQuerier) DeleteOrganization(ctx context.Context, id uuid.UUID) error { + _, err := q.db.ExecContext(ctx, deleteOrganization, id) + return err +} + const getDefaultOrganization = `-- name: GetDefaultOrganization :one SELECT id, name, description, created_at, updated_at, is_default @@ -4126,6 +4139,37 @@ func (q *sqlQuerier) InsertOrganization(ctx context.Context, arg InsertOrganizat return i, err } +const updateOrganization = `-- name: UpdateOrganization :one +UPDATE + organizations +SET + updated_at = $1, + name = $2 +WHERE + id = $3 +RETURNING id, name, description, created_at, updated_at, is_default +` + +type UpdateOrganizationParams struct { + UpdatedAt time.Time `db:"updated_at" json:"updated_at"` + Name string `db:"name" json:"name"` + ID uuid.UUID `db:"id" json:"id"` +} + +func (q *sqlQuerier) UpdateOrganization(ctx context.Context, arg UpdateOrganizationParams) (Organization, error) { + row := q.db.QueryRowContext(ctx, updateOrganization, arg.UpdatedAt, arg.Name, arg.ID) + var i Organization + err := row.Scan( + &i.ID, + &i.Name, + &i.Description, + &i.CreatedAt, + &i.UpdatedAt, + &i.IsDefault, + ) + return i, err +} + const getParameterSchemasByJobID = `-- name: GetParameterSchemasByJobID :many SELECT id, created_at, job_id, name, description, default_source_scheme, default_source_value, allow_override_source, default_destination_scheme, allow_override_destination, default_refresh, redisplay_value, validation_error, validation_condition, validation_type_system, validation_value_type, index diff --git a/coderd/database/queries/organizations.sql b/coderd/database/queries/organizations.sql index e809b386926a3..9d5cec1324fe6 100644 --- a/coderd/database/queries/organizations.sql +++ b/coderd/database/queries/organizations.sql @@ -53,3 +53,20 @@ INSERT INTO VALUES -- If no organizations exist, and this is the first, make it the default. ($1, $2, $3, $4, $5, (SELECT TRUE FROM organizations LIMIT 1) IS NULL) RETURNING *; + +-- name: UpdateOrganization :one +UPDATE + organizations +SET + updated_at = @updated_at, + name = @name +WHERE + id = @id +RETURNING *; + +-- name: DeleteOrganization :exec +DELETE FROM + organizations +WHERE + id = $1 AND + is_default = false; diff --git a/coderd/organizations.go b/coderd/organizations.go index e5098a9697caf..2a43ed2a7011a 100644 --- a/coderd/organizations.go +++ b/coderd/organizations.go @@ -118,6 +118,97 @@ func (api *API) postOrganizations(rw http.ResponseWriter, r *http.Request) { httpapi.Write(ctx, rw, http.StatusCreated, convertOrganization(organization)) } +// @Summary Update organization +// @ID update-organization +// @Security CoderSessionToken +// @Accept json +// @Produce json +// @Tags Organizations +// @Param organization path string true "Organization ID or name" +// @Param request body codersdk.UpdateOrganizationRequest true "Patch organization request" +// @Success 200 {object} codersdk.Organization +// @Router /organizations/{organization} [patch] +func (api *API) patchOrganization(rw http.ResponseWriter, r *http.Request) { + ctx := r.Context() + organization := httpmw.OrganizationParam(r) + + var req codersdk.UpdateOrganizationRequest + if !httpapi.Read(ctx, rw, r, &req) { + return + } + + // "default" is a reserved name that always refers to the default org (much like the way we + // use "me" for users). + if req.Name == codersdk.DefaultOrganization { + httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{ + Message: fmt.Sprintf("Organization name %q is reserved.", codersdk.DefaultOrganization), + }) + return + } + + organization, err := api.Database.UpdateOrganization(ctx, database.UpdateOrganizationParams{ + ID: organization.ID, + UpdatedAt: dbtime.Now(), + Name: req.Name, + }) + if httpapi.Is404Error(err) { + httpapi.ResourceNotFound(rw) + return + } + if database.IsUniqueViolation(err) { + httpapi.Write(ctx, rw, http.StatusConflict, codersdk.Response{ + Message: fmt.Sprintf("Organization already exists with the name %q.", req.Name), + Validations: []codersdk.ValidationError{{ + Field: "name", + Detail: "This value is already in use and should be unique.", + }}, + }) + return + } + if err != nil { + httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ + Message: "Internal error updating organization.", + Detail: fmt.Sprintf("update organization: %s", err.Error()), + }) + return + } + + httpapi.Write(ctx, rw, http.StatusOK, convertOrganization(organization)) +} + +// @Summary Delete organization +// @ID delete-organization +// @Security CoderSessionToken +// @Produce json +// @Tags Organizations +// @Param organization path string true "Organization ID or name" +// @Success 200 {object} codersdk.Response +// @Router /organizations/{organization} [delete] +func (api *API) deleteOrganization(rw http.ResponseWriter, r *http.Request) { + ctx := r.Context() + organization := httpmw.OrganizationParam(r) + + if organization.IsDefault { + httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{ + Message: "Default organization cannot be deleted.", + }) + return + } + + err := api.Database.DeleteOrganization(ctx, organization.ID) + if err != nil { + httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ + Message: "Internal error deleting organization.", + Detail: fmt.Sprintf("delete organization: %s", err.Error()), + }) + return + } + + httpapi.Write(ctx, rw, http.StatusOK, codersdk.Response{ + Message: "Organization has been deleted.", + }) +} + // convertOrganization consumes the database representation and outputs an API friendly representation. func convertOrganization(organization database.Organization) codersdk.Organization { return codersdk.Organization{ diff --git a/coderd/organizations_test.go b/coderd/organizations_test.go index e176c7a6d858c..8ce39c5593d90 100644 --- a/coderd/organizations_test.go +++ b/coderd/organizations_test.go @@ -1,7 +1,6 @@ package coderd_test import ( - "context" "net/http" "testing" @@ -16,9 +15,7 @@ func TestMultiOrgFetch(t *testing.T) { t.Parallel() client := coderdtest.New(t, nil) _ = coderdtest.CreateFirstUser(t, client) - - ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) - defer cancel() + ctx := testutil.Context(t, testutil.WaitLong) makeOrgs := []string{"foo", "bar", "baz"} for _, name := range makeOrgs { @@ -38,9 +35,7 @@ func TestOrganizationsByUser(t *testing.T) { t.Parallel() client := coderdtest.New(t, nil) _ = coderdtest.CreateFirstUser(t, client) - - ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) - defer cancel() + ctx := testutil.Context(t, testutil.WaitLong) orgs, err := client.OrganizationsByUser(ctx, codersdk.Me) require.NoError(t, err) @@ -62,9 +57,7 @@ func TestOrganizationByUserAndName(t *testing.T) { t.Parallel() client := coderdtest.New(t, nil) coderdtest.CreateFirstUser(t, client) - - ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) - defer cancel() + ctx := testutil.Context(t, testutil.WaitLong) _, err := client.OrganizationByUserAndName(ctx, codersdk.Me, "nothing") var apiErr *codersdk.Error @@ -77,9 +70,7 @@ func TestOrganizationByUserAndName(t *testing.T) { client := coderdtest.New(t, nil) first := coderdtest.CreateFirstUser(t, client) other, _ := coderdtest.CreateAnotherUser(t, client, first.OrganizationID) - - ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) - defer cancel() + ctx := testutil.Context(t, testutil.WaitLong) org, err := client.CreateOrganization(ctx, codersdk.CreateOrganizationRequest{ Name: "another", @@ -95,9 +86,7 @@ func TestOrganizationByUserAndName(t *testing.T) { t.Parallel() client := coderdtest.New(t, nil) user := coderdtest.CreateFirstUser(t, client) - - ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) - defer cancel() + ctx := testutil.Context(t, testutil.WaitLong) org, err := client.Organization(ctx, user.OrganizationID) require.NoError(t, err) @@ -112,9 +101,7 @@ func TestPostOrganizationsByUser(t *testing.T) { t.Parallel() client := coderdtest.New(t, nil) user := coderdtest.CreateFirstUser(t, client) - - ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) - defer cancel() + ctx := testutil.Context(t, testutil.WaitLong) org, err := client.Organization(ctx, user.OrganizationID) require.NoError(t, err) @@ -130,9 +117,7 @@ func TestPostOrganizationsByUser(t *testing.T) { t.Parallel() client := coderdtest.New(t, nil) _ = coderdtest.CreateFirstUser(t, client) - - ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) - defer cancel() + ctx := testutil.Context(t, testutil.WaitLong) _, err := client.CreateOrganization(ctx, codersdk.CreateOrganizationRequest{ Name: "new", @@ -140,3 +125,130 @@ func TestPostOrganizationsByUser(t *testing.T) { require.NoError(t, err) }) } + +func TestPatchOrganizationsByUser(t *testing.T) { + t.Parallel() + t.Run("Conflict", func(t *testing.T) { + t.Parallel() + client := coderdtest.New(t, nil) + user := coderdtest.CreateFirstUser(t, client) + ctx := testutil.Context(t, testutil.WaitMedium) + + originalOrg, err := client.Organization(ctx, user.OrganizationID) + require.NoError(t, err) + o, err := client.CreateOrganization(ctx, codersdk.CreateOrganizationRequest{ + Name: "something-unique", + }) + require.NoError(t, err) + + _, err = client.UpdateOrganization(ctx, o.ID.String(), codersdk.UpdateOrganizationRequest{ + Name: originalOrg.Name, + }) + var apiErr *codersdk.Error + require.ErrorAs(t, err, &apiErr) + require.Equal(t, http.StatusConflict, apiErr.StatusCode()) + }) + + t.Run("ReservedName", func(t *testing.T) { + t.Parallel() + client := coderdtest.New(t, nil) + _ = coderdtest.CreateFirstUser(t, client) + ctx := testutil.Context(t, testutil.WaitMedium) + + o, err := client.CreateOrganization(ctx, codersdk.CreateOrganizationRequest{ + Name: "something-unique", + }) + require.NoError(t, err) + + _, err = client.UpdateOrganization(ctx, o.ID.String(), codersdk.UpdateOrganizationRequest{ + Name: codersdk.DefaultOrganization, + }) + var apiErr *codersdk.Error + require.ErrorAs(t, err, &apiErr) + require.Equal(t, http.StatusBadRequest, apiErr.StatusCode()) + }) + + t.Run("UpdateById", func(t *testing.T) { + t.Parallel() + client := coderdtest.New(t, nil) + _ = coderdtest.CreateFirstUser(t, client) + ctx := testutil.Context(t, testutil.WaitMedium) + + o, err := client.CreateOrganization(ctx, codersdk.CreateOrganizationRequest{ + Name: "new", + }) + require.NoError(t, err) + + o, err = client.UpdateOrganization(ctx, o.ID.String(), codersdk.UpdateOrganizationRequest{ + Name: "new-new", + }) + require.NoError(t, err) + require.Equal(t, "new-new", o.Name) + }) + + t.Run("UpdateByName", func(t *testing.T) { + t.Parallel() + client := coderdtest.New(t, nil) + _ = coderdtest.CreateFirstUser(t, client) + ctx := testutil.Context(t, testutil.WaitMedium) + + o, err := client.CreateOrganization(ctx, codersdk.CreateOrganizationRequest{ + Name: "new", + }) + require.NoError(t, err) + + o, err = client.UpdateOrganization(ctx, o.Name, codersdk.UpdateOrganizationRequest{ + Name: "new-new", + }) + require.NoError(t, err) + require.Equal(t, "new-new", o.Name) + }) +} + +func TestDeleteOrganizationsByUser(t *testing.T) { + t.Parallel() + t.Run("Default", func(t *testing.T) { + t.Parallel() + client := coderdtest.New(t, nil) + user := coderdtest.CreateFirstUser(t, client) + ctx := testutil.Context(t, testutil.WaitMedium) + + o, err := client.Organization(ctx, user.OrganizationID) + require.NoError(t, err) + + err = client.DeleteOrganization(ctx, o.ID.String()) + var apiErr *codersdk.Error + require.ErrorAs(t, err, &apiErr) + require.Equal(t, http.StatusBadRequest, apiErr.StatusCode()) + }) + + t.Run("DeleteById", func(t *testing.T) { + t.Parallel() + client := coderdtest.New(t, nil) + _ = coderdtest.CreateFirstUser(t, client) + ctx := testutil.Context(t, testutil.WaitMedium) + + o, err := client.CreateOrganization(ctx, codersdk.CreateOrganizationRequest{ + Name: "doomed", + }) + require.NoError(t, err) + + err = client.DeleteOrganization(ctx, o.ID.String()) + require.NoError(t, err) + }) + + t.Run("DeleteByName", func(t *testing.T) { + t.Parallel() + client := coderdtest.New(t, nil) + _ = coderdtest.CreateFirstUser(t, client) + ctx := testutil.Context(t, testutil.WaitMedium) + + o, err := client.CreateOrganization(ctx, codersdk.CreateOrganizationRequest{ + Name: "doomed", + }) + require.NoError(t, err) + + err = client.DeleteOrganization(ctx, o.Name) + require.NoError(t, err) + }) +} diff --git a/codersdk/organizations.go b/codersdk/organizations.go index 4c9cf81c497d3..646eae71d2475 100644 --- a/codersdk/organizations.go +++ b/codersdk/organizations.go @@ -55,6 +55,14 @@ type OrganizationMember struct { Roles []SlimRole `db:"roles" json:"roles"` } +type CreateOrganizationRequest struct { + Name string `json:"name" validate:"required,username"` +} + +type UpdateOrganizationRequest struct { + Name string `json:"name" validate:"required,username"` +} + // CreateTemplateVersionRequest enables callers to create a new Template Version. type CreateTemplateVersionRequest struct { Name string `json:"name,omitempty" validate:"omitempty,template_version_name"` @@ -187,6 +195,55 @@ func (c *Client) Organization(ctx context.Context, id uuid.UUID) (Organization, return c.OrganizationByName(ctx, id.String()) } +// CreateOrganization creates an organization and adds the user making the request as an owner. +func (c *Client) CreateOrganization(ctx context.Context, req CreateOrganizationRequest) (Organization, error) { + res, err := c.Request(ctx, http.MethodPost, "/api/v2/organizations", req) + if err != nil { + return Organization{}, err + } + defer res.Body.Close() + + if res.StatusCode != http.StatusCreated { + return Organization{}, ReadBodyAsError(res) + } + + var org Organization + return org, json.NewDecoder(res.Body).Decode(&org) +} + +// UpdateOrganization will update information about the corresponding organization, based on +// the UUID/name provided as `orgID`. +func (c *Client) UpdateOrganization(ctx context.Context, orgID string, req UpdateOrganizationRequest) (Organization, error) { + res, err := c.Request(ctx, http.MethodPatch, fmt.Sprintf("/api/v2/organizations/%s", orgID), req) + if err != nil { + return Organization{}, xerrors.Errorf("execute request: %w", err) + } + defer res.Body.Close() + + if res.StatusCode != http.StatusOK { + return Organization{}, ReadBodyAsError(res) + } + + var organization Organization + return organization, json.NewDecoder(res.Body).Decode(&organization) +} + +// DeleteOrganization will remove the corresponding organization from the deployment, based on +// the UUID/name provided as `orgID`. +func (c *Client) DeleteOrganization(ctx context.Context, orgID string) error { + res, err := c.Request(ctx, http.MethodDelete, fmt.Sprintf("/api/v2/organizations/%s", orgID), nil) + if err != nil { + return xerrors.Errorf("execute request: %w", err) + } + defer res.Body.Close() + + if res.StatusCode != http.StatusOK { + return ReadBodyAsError(res) + } + + return nil +} + // ProvisionerDaemons returns provisioner daemons available. func (c *Client) ProvisionerDaemons(ctx context.Context) ([]ProvisionerDaemon, error) { res, err := c.Request(ctx, http.MethodGet, diff --git a/codersdk/users.go b/codersdk/users.go index 80ca583141c9b..003ede2f9bd60 100644 --- a/codersdk/users.go +++ b/codersdk/users.go @@ -203,10 +203,6 @@ type OAuthConversionResponse struct { UserID uuid.UUID `json:"user_id" format:"uuid"` } -type CreateOrganizationRequest struct { - Name string `json:"name" validate:"required,username"` -} - // AuthMethods contains authentication method information like whether they are enabled or not or custom text, etc. type AuthMethods struct { TermsOfServiceURL string `json:"terms_of_service_url,omitempty"` @@ -587,22 +583,6 @@ func (c *Client) OrganizationByUserAndName(ctx context.Context, user string, nam return org, json.NewDecoder(res.Body).Decode(&org) } -// CreateOrganization creates an organization and adds the provided user as an admin. -func (c *Client) CreateOrganization(ctx context.Context, req CreateOrganizationRequest) (Organization, error) { - res, err := c.Request(ctx, http.MethodPost, "/api/v2/organizations", req) - if err != nil { - return Organization{}, err - } - defer res.Body.Close() - - if res.StatusCode != http.StatusCreated { - return Organization{}, ReadBodyAsError(res) - } - - var org Organization - return org, json.NewDecoder(res.Body).Decode(&org) -} - // AuthMethods returns types of authentication available to the user. func (c *Client) AuthMethods(ctx context.Context) (AuthMethods, error) { res, err := c.Request(ctx, http.MethodGet, "/api/v2/users/authmethods", nil) diff --git a/docs/api/organizations.md b/docs/api/organizations.md index 478c8aba56648..c6f4514eb9bad 100644 --- a/docs/api/organizations.md +++ b/docs/api/organizations.md @@ -177,3 +177,98 @@ curl -X GET http://coder-server:8080/api/v2/organizations/{organization} \ | 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | [codersdk.Organization](schemas.md#codersdkorganization) | To perform this operation, you must be authenticated. [Learn more](authentication.md). + +## Delete organization + +### Code samples + +```shell +# Example request using curl +curl -X DELETE http://coder-server:8080/api/v2/organizations/{organization} \ + -H 'Accept: application/json' \ + -H 'Coder-Session-Token: API_KEY' +``` + +`DELETE /organizations/{organization}` + +### Parameters + +| Name | In | Type | Required | Description | +| -------------- | ---- | ------ | -------- | ----------------------- | +| `organization` | path | string | true | Organization ID or name | + +### Example responses + +> 200 Response + +```json +{ + "detail": "string", + "message": "string", + "validations": [ + { + "detail": "string", + "field": "string" + } + ] +} +``` + +### Responses + +| Status | Meaning | Description | Schema | +| ------ | ------------------------------------------------------- | ----------- | ------------------------------------------------ | +| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | [codersdk.Response](schemas.md#codersdkresponse) | + +To perform this operation, you must be authenticated. [Learn more](authentication.md). + +## Update organization + +### Code samples + +```shell +# Example request using curl +curl -X PATCH http://coder-server:8080/api/v2/organizations/{organization} \ + -H 'Content-Type: application/json' \ + -H 'Accept: application/json' \ + -H 'Coder-Session-Token: API_KEY' +``` + +`PATCH /organizations/{organization}` + +> Body parameter + +```json +{ + "name": "string" +} +``` + +### Parameters + +| Name | In | Type | Required | Description | +| -------------- | ---- | ---------------------------------------------------------------------------------- | -------- | -------------------------- | +| `organization` | path | string | true | Organization ID or name | +| `body` | body | [codersdk.UpdateOrganizationRequest](schemas.md#codersdkupdateorganizationrequest) | true | Patch organization request | + +### Example responses + +> 200 Response + +```json +{ + "created_at": "2019-08-24T14:15:22Z", + "id": "497f6eca-6276-4993-bfeb-53cbbbba6f08", + "is_default": true, + "name": "string", + "updated_at": "2019-08-24T14:15:22Z" +} +``` + +### Responses + +| Status | Meaning | Description | Schema | +| ------ | ------------------------------------------------------- | ----------- | -------------------------------------------------------- | +| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | [codersdk.Organization](schemas.md#codersdkorganization) | + +To perform this operation, you must be authenticated. [Learn more](authentication.md). diff --git a/docs/api/schemas.md b/docs/api/schemas.md index d1b6c6a3d82e0..67fb461ee1b0b 100644 --- a/docs/api/schemas.md +++ b/docs/api/schemas.md @@ -5361,6 +5361,20 @@ CreateWorkspaceRequest provides options for creating a new workspace. Only one o | `url` | string | false | | URL to download the latest release of Coder. | | `version` | string | false | | Version is the semantic version for the latest release of Coder. | +## codersdk.UpdateOrganizationRequest + +```json +{ + "name": "string" +} +``` + +### Properties + +| Name | Type | Required | Restrictions | Description | +| ------ | ------ | -------- | ------------ | ----------- | +| `name` | string | true | | | + ## codersdk.UpdateRoles ```json diff --git a/site/src/api/typesGenerated.ts b/site/src/api/typesGenerated.ts index a809b10220993..db1b39fdbed26 100644 --- a/site/src/api/typesGenerated.ts +++ b/site/src/api/typesGenerated.ts @@ -223,7 +223,7 @@ export interface CreateGroupRequest { readonly quota_allowance: number; } -// From codersdk/users.go +// From codersdk/organizations.go export interface CreateOrganizationRequest { readonly name: string; } @@ -1318,6 +1318,11 @@ export interface UpdateCheckResponse { readonly url: string; } +// From codersdk/organizations.go +export interface UpdateOrganizationRequest { + readonly name: string; +} + // From codersdk/users.go export interface UpdateRoles { readonly roles: readonly string[]; From f27f5c0002a96f428167580714571316d5bd5077 Mon Sep 17 00:00:00 2001 From: Bruno Quaresma Date: Tue, 21 May 2024 16:04:41 -0300 Subject: [PATCH 086/149] feat(site): show number of times coder_app is opened (#13335) --- .../TemplateInsightsPage.tsx | 55 ++++++++++++++----- 1 file changed, 41 insertions(+), 14 deletions(-) diff --git a/site/src/pages/TemplatePage/TemplateInsightsPage/TemplateInsightsPage.tsx b/site/src/pages/TemplatePage/TemplateInsightsPage/TemplateInsightsPage.tsx index 5839fb044941e..7e824b140edd6 100644 --- a/site/src/pages/TemplatePage/TemplateInsightsPage/TemplateInsightsPage.tsx +++ b/site/src/pages/TemplatePage/TemplateInsightsPage/TemplateInsightsPage.tsx @@ -52,6 +52,7 @@ import { HelpTooltipTrigger, } from "components/HelpTooltip/HelpTooltip"; import { Loader } from "components/Loader/Loader"; +import { Stack } from "components/Stack/Stack"; import { UserAvatar } from "components/UserAvatar/UserAvatar"; import { useEmbeddedMetadata } from "hooks/useEmbeddedMetadata"; import { useTemplateLayoutContext } from "pages/TemplatePage/TemplateLayout"; @@ -451,7 +452,7 @@ const TemplateUsagePanel: FC = ({ return (
= ({ }, }} /> -
{formatTime(usage.seconds)} -
+ + Opened {usage.times_used.toLocaleString()}{" "} + {usage.times_used === 1 ? "time" : "times"} + +
); })} @@ -869,20 +881,35 @@ const TextValue: FC = ({ children }) => { }; function formatTime(seconds: number): string { - if (seconds < 60) { - return seconds + " seconds"; - } else if (seconds >= 60 && seconds < 3600) { - const minutes = Math.floor(seconds / 60); - return minutes + " minutes"; + let value: { + amount: number; + unit: "seconds" | "minutes" | "hours"; + } = { + amount: seconds, + unit: "seconds", + }; + + if (seconds >= 60 && seconds < 3600) { + value = { + amount: Math.floor(seconds / 60), + unit: "minutes", + }; } else { - const hours = seconds / 3600; - const minutes = Math.floor(seconds % 3600); - if (minutes === 0) { - return hours.toFixed(0) + " hours"; - } + value = { + amount: seconds / 3600, + unit: "hours", + }; + } - return hours.toFixed(1) + " hours"; + if (value.amount === 1) { + const singularUnit = value.unit.slice(0, -1); + return `${value.amount} ${singularUnit}`; } + + return `${value.amount.toLocaleString(undefined, { + maximumFractionDigits: 1, + minimumFractionDigits: 0, + })} ${value.unit}`; } function toISOLocal(d: Date, offset: number) { From 78deaba481c5c41ad6c82ce7f4d3b0aa4c9484f2 Mon Sep 17 00:00:00 2001 From: Bruno Quaresma Date: Tue, 21 May 2024 16:29:54 -0300 Subject: [PATCH 087/149] feat(site): show "update and start" button when update is forced (#13334) --- .../WorkspaceActions/Buttons.tsx | 24 ++- .../WorkspaceActions/WorkspaceActions.tsx | 14 +- .../WorkspaceActions/constants.ts | 156 +++++++++++------- 3 files changed, 123 insertions(+), 71 deletions(-) diff --git a/site/src/pages/WorkspacePage/WorkspaceActions/Buttons.tsx b/site/src/pages/WorkspacePage/WorkspaceActions/Buttons.tsx index 2eb39b04d3f43..caa034d77c29f 100644 --- a/site/src/pages/WorkspacePage/WorkspaceActions/Buttons.tsx +++ b/site/src/pages/WorkspacePage/WorkspaceActions/Buttons.tsx @@ -117,10 +117,8 @@ export const RestartButton: FC = ({ handleAction, loading, workspace, - disabled, - tooltipText, }) => { - const buttonContent = ( + return ( = ({ borderLeft: "1px solid #FFF", }, }} - disabled={disabled} > } onClick={() => handleAction()} data-testid="workspace-restart-button" - disabled={disabled || loading} + disabled={loading} > {loading ? <>Restarting… : <>Restart…} @@ -147,11 +144,20 @@ export const RestartButton: FC = ({ /> ); +}; - return tooltipText ? ( - {buttonContent} - ) : ( - buttonContent +export const UpdateAndStartButton: FC = ({ + handleAction, +}) => { + return ( + + } + onClick={() => handleAction()} + > + Update and start… + + ); }; diff --git a/site/src/pages/WorkspacePage/WorkspaceActions/WorkspaceActions.tsx b/site/src/pages/WorkspacePage/WorkspaceActions/WorkspaceActions.tsx index e90d844dbda70..ad79ce1be9c95 100644 --- a/site/src/pages/WorkspacePage/WorkspaceActions/WorkspaceActions.tsx +++ b/site/src/pages/WorkspacePage/WorkspaceActions/WorkspaceActions.tsx @@ -24,6 +24,7 @@ import { UpdateButton, ActivateButton, FavoriteButton, + UpdateAndStartButton, } from "./Buttons"; import { type ActionType, abilitiesByWorkspaceStatus } from "./constants"; import { DebugButton } from "./DebugButton"; @@ -89,6 +90,7 @@ export const WorkspaceActions: FC = ({ // A mapping of button type to the corresponding React component const buttonMapping: Record = { update: , + updateAndStart: , updating: , start: ( = ({ data-testid="workspace-actions" > {canBeUpdated && ( - <>{isUpdating ? buttonMapping.updating : buttonMapping.update} + <> + {isUpdating + ? buttonMapping.updating + : workspace.template_require_active_version + ? buttonMapping.updateAndStart + : buttonMapping.update} + )} {isRestarting @@ -236,10 +244,6 @@ function getTooltipText( return "This template requires automatic updates on workspace startup, but template administrators can ignore this policy."; } - if (workspace.template_require_active_version) { - return "This template requires automatic updates on workspace startup. Contact your administrator if you want to preserve the template version."; - } - if (workspace.automatic_updates === "always") { return "Automatic updates are enabled for this workspace. Modify the update policy in workspace settings if you want to preserve the template version."; } diff --git a/site/src/pages/WorkspacePage/WorkspaceActions/constants.ts b/site/src/pages/WorkspacePage/WorkspaceActions/constants.ts index 3c7347cc52864..c2a85da8cb121 100644 --- a/site/src/pages/WorkspacePage/WorkspaceActions/constants.ts +++ b/site/src/pages/WorkspacePage/WorkspaceActions/constants.ts @@ -1,4 +1,4 @@ -import type { Workspace, WorkspaceStatus } from "api/typesGenerated"; +import type { Workspace } from "api/typesGenerated"; /** * An iterable of all action types supported by the workspace UI @@ -23,6 +23,10 @@ export const actionTypes = [ "retry", "debug", + // When a template requires updates, we aim to display a distinct update + // button that clearly indicates a mandatory update. + "updateAndStart", + // These are buttons that should be used with disabled UI elements "canceling", "deleted", @@ -52,67 +56,105 @@ export const abilitiesByWorkspaceStatus = ( const status = workspace.latest_build.status; if (status === "failed" && canDebug) { return { - ...statusToAbility.failed, actions: ["retry", "debug"], + canCancel: false, + canAcceptJobs: true, }; } - return statusToAbility[status]; -}; + switch (status) { + case "starting": { + return { + actions: ["starting"], + canCancel: true, + canAcceptJobs: false, + }; + } + case "running": { + const actions: ActionType[] = ["stop"]; -const statusToAbility: Record = { - starting: { - actions: ["starting"], - canCancel: true, - canAcceptJobs: false, - }, - running: { - actions: ["stop", "restart"], - canCancel: false, - canAcceptJobs: true, - }, - stopping: { - actions: ["stopping"], - canCancel: true, - canAcceptJobs: false, - }, - stopped: { - actions: ["start"], - canCancel: false, - canAcceptJobs: true, - }, - canceled: { - actions: ["start", "stop"], - canCancel: false, - canAcceptJobs: true, - }, + // If the template requires the latest version, we prevent the user from + // restarting the workspace without updating it first. In the Buttons + // component, we display an UpdateAndStart component to facilitate this. + if (!workspace.template_require_active_version) { + actions.push("restart"); + } - // in the case of an error - failed: { - actions: ["retry"], - canCancel: false, - canAcceptJobs: true, - }, + return { + actions, + canCancel: false, + canAcceptJobs: true, + }; + } + case "stopping": { + return { + actions: ["stopping"], + canCancel: true, + canAcceptJobs: false, + }; + } + case "stopped": { + const actions: ActionType[] = []; - // Disabled states - canceling: { - actions: ["canceling"], - canCancel: false, - canAcceptJobs: false, - }, - deleting: { - actions: ["deleting"], - canCancel: true, - canAcceptJobs: false, - }, - deleted: { - actions: ["deleted"], - canCancel: false, - canAcceptJobs: false, - }, - pending: { - actions: ["pending"], - canCancel: false, - canAcceptJobs: false, - }, + // If the template requires the latest version, we prevent the user from + // starting the workspace without updating it first. In the Buttons + // component, we display an UpdateAndStart component to facilitate this. + if (!workspace.template_require_active_version) { + actions.push("start"); + } + + return { + actions, + canCancel: false, + canAcceptJobs: true, + }; + } + case "canceled": { + return { + actions: ["start", "stop"], + canCancel: false, + canAcceptJobs: true, + }; + } + case "failed": { + return { + actions: ["retry"], + canCancel: false, + canAcceptJobs: true, + }; + } + + // Disabled states + case "canceling": { + return { + actions: ["canceling"], + canCancel: false, + canAcceptJobs: false, + }; + } + case "deleting": { + return { + actions: ["deleting"], + canCancel: true, + canAcceptJobs: false, + }; + } + case "deleted": { + return { + actions: ["deleted"], + canCancel: false, + canAcceptJobs: false, + }; + } + case "pending": { + return { + actions: ["pending"], + canCancel: false, + canAcceptJobs: false, + }; + } + default: { + throw new Error(`Unknown workspace status: ${status}`); + } + } }; From 7ea4a89a205c6e29ba8148c7f2bdb46f0c481cd0 Mon Sep 17 00:00:00 2001 From: Justin Shoffstall Date: Wed, 22 May 2024 05:24:28 -0400 Subject: [PATCH 088/149] chore: update kubernetes.md, bumping stable from v2.9.4 to v2.10.2 (#13275) --- docs/install/kubernetes.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/install/kubernetes.md b/docs/install/kubernetes.md index e8d781ee5dcb6..de8de4cf93b5a 100644 --- a/docs/install/kubernetes.md +++ b/docs/install/kubernetes.md @@ -145,7 +145,7 @@ locally in order to log in and manage templates. helm install coder coder-v2/coder \ --namespace coder \ --values values.yaml \ - --version 2.9.4 + --version 2.10.2 ``` You can watch Coder start up by running `kubectl get pods -n coder`. Once From 390ff9ac0553c360568bf520de470baf322ee6f4 Mon Sep 17 00:00:00 2001 From: Bruno Quaresma Date: Wed, 22 May 2024 10:26:59 -0300 Subject: [PATCH 089/149] refactor(site): hide unavailable usage information (#13341) --- .../TemplateInsightsPage.stories.tsx | 4 ++-- .../TemplateInsightsPage.tsx | 20 ++++++++++--------- 2 files changed, 13 insertions(+), 11 deletions(-) diff --git a/site/src/pages/TemplatePage/TemplateInsightsPage/TemplateInsightsPage.stories.tsx b/site/src/pages/TemplatePage/TemplateInsightsPage/TemplateInsightsPage.stories.tsx index 3630a936929a3..59417c5c69e72 100644 --- a/site/src/pages/TemplatePage/TemplateInsightsPage/TemplateInsightsPage.stories.tsx +++ b/site/src/pages/TemplatePage/TemplateInsightsPage/TemplateInsightsPage.stories.tsx @@ -76,8 +76,8 @@ export const Loaded: Story = { display_name: "JetBrains", slug: "jetbrains", icon: "/icon/intellij.svg", - seconds: 0, - times_used: 0, + seconds: 2013400, + times_used: 20, }, { template_ids: ["0d286645-29aa-4eaf-9b52-cc5d2740c90b"], diff --git a/site/src/pages/TemplatePage/TemplateInsightsPage/TemplateInsightsPage.tsx b/site/src/pages/TemplatePage/TemplateInsightsPage/TemplateInsightsPage.tsx index 7e824b140edd6..2e9ff245f0635 100644 --- a/site/src/pages/TemplatePage/TemplateInsightsPage/TemplateInsightsPage.tsx +++ b/site/src/pages/TemplatePage/TemplateInsightsPage/TemplateInsightsPage.tsx @@ -504,15 +504,17 @@ const TemplateUsagePanel: FC = ({ }} > {formatTime(usage.seconds)} - - Opened {usage.times_used.toLocaleString()}{" "} - {usage.times_used === 1 ? "time" : "times"} - + {usage.times_used > 0 && ( + + Opened {usage.times_used.toLocaleString()}{" "} + {usage.times_used === 1 ? "time" : "times"} + + )}
); From ed6ee9aaa888774d3adc1ee220dd91941aa8c728 Mon Sep 17 00:00:00 2001 From: Ammar Bandukwala Date: Wed, 22 May 2024 12:01:29 -0500 Subject: [PATCH 090/149] chore(README): add hiring link (#13345) --- README.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/README.md b/README.md index c2fbef68026cf..a39b8219074b2 100644 --- a/README.md +++ b/README.md @@ -122,3 +122,7 @@ We are always working on new integrations. Feel free to open an issue to request We are always happy to see new contributors to Coder. If you are new to the Coder codebase, we have [a guide on how to get started](https://coder.com/docs/v2/latest/CONTRIBUTING). We'd love to see your contributions! + +## Hiring + +Apply [here](https://cdr.co/github-apply) if you're interested in joining our team. From 3364abecdd8bbbf89a82d073d2b405fc623bb174 Mon Sep 17 00:00:00 2001 From: Kyle Carberry Date: Wed, 22 May 2024 13:45:47 -0400 Subject: [PATCH 091/149] chore: generate terraform testdata with matching terraform version (#13343) Terraform changed the default output of the `terraform graph` command. You must put `-type=plan` to keep the prior behavior. Co-authored-by: Colin Adler --- .github/workflows/ci.yaml | 3 + Makefile | 7 + provisioner/terraform/executor.go | 14 +- .../testdata/calling-module/calling-module.tf | 2 +- .../calling-module/calling-module.tfplan.dot | 1 - .../calling-module/calling-module.tfplan.json | 32 ++++- .../calling-module/calling-module.tfstate.dot | 1 - .../calling-module.tfstate.json | 31 ++++- .../chaining-resources/chaining-resources.tf | 2 +- .../chaining-resources.tfplan.dot | 1 - .../chaining-resources.tfplan.json | 32 ++++- .../chaining-resources.tfstate.dot | 1 - .../chaining-resources.tfstate.json | 31 ++++- .../conflicting-resources.tf | 2 +- .../conflicting-resources.tfplan.dot | 1 - .../conflicting-resources.tfplan.json | 32 ++++- .../conflicting-resources.tfstate.dot | 1 - .../conflicting-resources.tfstate.json | 31 ++++- .../display-apps-disabled.tf | 2 +- .../display-apps-disabled.tfplan.dot | 1 - .../display-apps-disabled.tfplan.json | 9 +- .../display-apps-disabled.tfstate.dot | 1 - .../display-apps-disabled.tfstate.json | 9 +- .../testdata/display-apps/display-apps.tf | 2 +- .../display-apps/display-apps.tfplan.dot | 1 - .../display-apps/display-apps.tfplan.json | 9 +- .../display-apps/display-apps.tfstate.dot | 1 - .../display-apps/display-apps.tfstate.json | 9 +- .../external-auth-providers.tf | 2 +- .../external-auth-providers.tfplan.dot | 1 - .../external-auth-providers.tfplan.json | 10 +- .../external-auth-providers.tfstate.dot | 1 - .../external-auth-providers.tfstate.json | 9 +- provisioner/terraform/testdata/generate.sh | 6 +- .../git-auth-providers/git-auth-providers.tf | 2 +- .../git-auth-providers.tfplan.dot | 1 - .../git-auth-providers.tfplan.json | 24 +++- .../git-auth-providers.tfstate.dot | 1 - .../git-auth-providers.tfstate.json | 24 +++- .../testdata/instance-id/instance-id.tf | 2 +- .../instance-id/instance-id.tfplan.dot | 1 - .../instance-id/instance-id.tfplan.json | 32 ++++- .../instance-id/instance-id.tfstate.dot | 1 - .../instance-id/instance-id.tfstate.json | 33 ++++- .../kubernetes-metadata.tf | 2 +- .../testdata/mapped-apps/mapped-apps.tf | 2 +- .../mapped-apps/mapped-apps.tfplan.dot | 1 - .../mapped-apps/mapped-apps.tfplan.json | 40 +++++- .../mapped-apps/mapped-apps.tfstate.dot | 1 - .../mapped-apps/mapped-apps.tfstate.json | 41 ++++-- .../multiple-agents/multiple-agents.tf | 2 +- .../multiple-agents.tfplan.dot | 1 - .../multiple-agents.tfplan.json | 27 +++- .../multiple-agents.tfstate.dot | 1 - .../multiple-agents.tfstate.json | 72 ++++++++-- .../testdata/multiple-apps/multiple-apps.tf | 2 +- .../multiple-apps/multiple-apps.tfplan.dot | 1 - .../multiple-apps/multiple-apps.tfplan.json | 44 +++++- .../multiple-apps/multiple-apps.tfstate.dot | 1 - .../multiple-apps/multiple-apps.tfstate.json | 47 +++++-- .../resource-metadata-duplicate.tf | 2 +- .../resource-metadata-duplicate.tfplan.dot | 1 - .../resource-metadata-duplicate.tfplan.json | 14 +- .../resource-metadata-duplicate.tfstate.dot | 1 - .../resource-metadata-duplicate.tfstate.json | 30 ++-- .../resource-metadata/resource-metadata.tf | 2 +- .../resource-metadata.tfplan.dot | 26 ++-- .../resource-metadata.tfplan.json | 8 +- .../resource-metadata.tfstate.dot | 26 ++-- .../resource-metadata.tfstate.json | 13 +- .../rich-parameters-order.tf | 2 +- .../rich-parameters-order.tfplan.dot | 1 - .../rich-parameters-order.tfplan.json | 24 ++-- .../rich-parameters-order.tfstate.dot | 1 - .../rich-parameters-order.tfstate.json | 31 +++-- .../rich-parameters-validation.tf | 2 +- .../rich-parameters-validation.tfplan.dot | 1 - .../rich-parameters-validation.tfplan.json | 26 ++-- .../rich-parameters-validation.tfstate.dot | 1 - .../rich-parameters-validation.tfstate.json | 33 +++-- .../child-external-module/main.tf | 2 +- .../rich-parameters/external-module/main.tf | 2 +- .../rich-parameters/rich-parameters.tf | 2 +- .../rich-parameters.tfplan.dot | 1 - .../rich-parameters.tfplan.json | 123 ++++++++++------- .../rich-parameters.tfstate.dot | 1 - .../rich-parameters.tfstate.json | 129 +++++++++++------- provisioner/terraform/testdata/version.txt | 1 + 88 files changed, 878 insertions(+), 332 deletions(-) create mode 100644 provisioner/terraform/testdata/version.txt diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 003319255580b..8f89a4b3fa3ea 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -211,6 +211,9 @@ jobs: - name: Setup sqlc uses: ./.github/actions/setup-sqlc + - name: Setup Terraform + uses: ./.github/actions/setup-tf + - name: go install tools run: | go install google.golang.org/protobuf/cmd/protoc-gen-go@v1.30 diff --git a/Makefile b/Makefile index 7a24a293d16a8..47cdea7cb653a 100644 --- a/Makefile +++ b/Makefile @@ -493,6 +493,7 @@ gen: \ coderd/apidoc/swagger.json \ .prettierignore.include \ .prettierignore \ + provisioner/terraform/testdata/version \ site/.prettierrc.yaml \ site/.prettierignore \ site/.eslintignore \ @@ -684,6 +685,12 @@ provisioner/terraform/testdata/.gen-golden: $(wildcard provisioner/terraform/tes go test ./provisioner/terraform -run="Test.*Golden$$" -update touch "$@" +provisioner/terraform/testdata/version: + if [[ "$(shell cat provisioner/terraform/testdata/version.txt)" != "$(shell terraform version -json | jq -r '.terraform_version')" ]]; then + ./provisioner/terraform/testdata/generate.sh + fi +.PHONY: provisioner/terraform/testdata/version + scripts/ci-report/testdata/.gen-golden: $(wildcard scripts/ci-report/testdata/*) $(wildcard scripts/ci-report/*.go) go test ./scripts/ci-report -run=TestOutputMatchesGoldenFile -update touch "$@" diff --git a/provisioner/terraform/executor.go b/provisioner/terraform/executor.go index 0a6c1df943595..b1a3ecadb5203 100644 --- a/provisioner/terraform/executor.go +++ b/provisioner/terraform/executor.go @@ -24,6 +24,8 @@ import ( "github.com/coder/coder/v2/provisionersdk/proto" ) +var version170 = version.Must(version.NewVersion("1.7.0")) + type executor struct { logger slog.Logger server *server @@ -346,8 +348,16 @@ func (e *executor) graph(ctx, killCtx context.Context) (string, error) { return "", ctx.Err() } + ver, err := e.version(ctx) + if err != nil { + return "", err + } + args := []string{"graph"} + if ver.GreaterThanOrEqual(version170) { + args = append(args, "-type=plan") + } var out strings.Builder - cmd := exec.CommandContext(killCtx, e.binaryPath, "graph") // #nosec + cmd := exec.CommandContext(killCtx, e.binaryPath, args...) // #nosec cmd.Stdout = &out cmd.Dir = e.workdir cmd.Env = e.basicEnv() @@ -356,7 +366,7 @@ func (e *executor) graph(ctx, killCtx context.Context) (string, error) { slog.F("binary_path", e.binaryPath), slog.F("args", "graph"), ) - err := cmd.Start() + err = cmd.Start() if err != nil { return "", err } diff --git a/provisioner/terraform/testdata/calling-module/calling-module.tf b/provisioner/terraform/testdata/calling-module/calling-module.tf index c83c7dd2245b0..14777169d9994 100644 --- a/provisioner/terraform/testdata/calling-module/calling-module.tf +++ b/provisioner/terraform/testdata/calling-module/calling-module.tf @@ -2,7 +2,7 @@ terraform { required_providers { coder = { source = "coder/coder" - version = "0.6.1" + version = "0.22.0" } } } diff --git a/provisioner/terraform/testdata/calling-module/calling-module.tfplan.dot b/provisioner/terraform/testdata/calling-module/calling-module.tfplan.dot index f3a28a65c5ecc..47f46d7ce79ba 100644 --- a/provisioner/terraform/testdata/calling-module/calling-module.tfplan.dot +++ b/provisioner/terraform/testdata/calling-module/calling-module.tfplan.dot @@ -21,4 +21,3 @@ digraph { "[root] root" -> "[root] provider[\"registry.terraform.io/hashicorp/null\"] (close)" } } - diff --git a/provisioner/terraform/testdata/calling-module/calling-module.tfplan.json b/provisioner/terraform/testdata/calling-module/calling-module.tfplan.json index e71a071e4fd9d..28a2b055ecf10 100644 --- a/provisioner/terraform/testdata/calling-module/calling-module.tfplan.json +++ b/provisioner/terraform/testdata/calling-module/calling-module.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.5.5", + "terraform_version": "1.7.5", "planned_values": { "root_module": { "resources": [ @@ -17,11 +17,22 @@ "connection_timeout": 120, "dir": null, "env": null, + "login_before_ready": true, + "metadata": [], + "motd_file": null, + "order": null, "os": "linux", + "shutdown_script": null, + "shutdown_script_timeout": 300, "startup_script": null, + "startup_script_behavior": null, + "startup_script_timeout": 300, "troubleshooting_url": null }, - "sensitive_values": {} + "sensitive_values": { + "display_apps": [], + "metadata": [] + } } ], "child_modules": [ @@ -78,17 +89,29 @@ "connection_timeout": 120, "dir": null, "env": null, + "login_before_ready": true, + "metadata": [], + "motd_file": null, + "order": null, "os": "linux", + "shutdown_script": null, + "shutdown_script_timeout": 300, "startup_script": null, + "startup_script_behavior": null, + "startup_script_timeout": 300, "troubleshooting_url": null }, "after_unknown": { + "display_apps": true, "id": true, "init_script": true, + "metadata": [], "token": true }, "before_sensitive": false, "after_sensitive": { + "display_apps": [], + "metadata": [], "token": true } } @@ -153,7 +176,7 @@ "coder": { "name": "coder", "full_name": "registry.terraform.io/coder/coder", - "version_constraint": "0.6.1" + "version_constraint": "0.22.0" }, "module.module:null": { "name": "null", @@ -236,5 +259,6 @@ ] } ], - "timestamp": "2023-08-30T19:24:59Z" + "timestamp": "2024-05-22T17:02:40Z", + "errored": false } diff --git a/provisioner/terraform/testdata/calling-module/calling-module.tfstate.dot b/provisioner/terraform/testdata/calling-module/calling-module.tfstate.dot index f3a28a65c5ecc..47f46d7ce79ba 100644 --- a/provisioner/terraform/testdata/calling-module/calling-module.tfstate.dot +++ b/provisioner/terraform/testdata/calling-module/calling-module.tfstate.dot @@ -21,4 +21,3 @@ digraph { "[root] root" -> "[root] provider[\"registry.terraform.io/hashicorp/null\"] (close)" } } - diff --git a/provisioner/terraform/testdata/calling-module/calling-module.tfstate.json b/provisioner/terraform/testdata/calling-module/calling-module.tfstate.json index dc3627f793ffc..5f8a795e2a894 100644 --- a/provisioner/terraform/testdata/calling-module/calling-module.tfstate.json +++ b/provisioner/terraform/testdata/calling-module/calling-module.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.5.5", + "terraform_version": "1.7.5", "values": { "root_module": { "resources": [ @@ -16,15 +16,36 @@ "auth": "token", "connection_timeout": 120, "dir": null, + "display_apps": [ + { + "port_forwarding_helper": true, + "ssh_helper": true, + "vscode": true, + "vscode_insiders": false, + "web_terminal": true + } + ], "env": null, - "id": "c6fd4a45-dc64-4830-8ff1-9a6c8074fca8", + "id": "f26b1d53-799e-4fbb-9fd3-71e60b37eacd", "init_script": "", + "login_before_ready": true, + "metadata": [], + "motd_file": null, + "order": null, "os": "linux", + "shutdown_script": null, + "shutdown_script_timeout": 300, "startup_script": null, - "token": "2559767b-afc6-4293-92cf-d57d98bda13a", + "startup_script_behavior": null, + "startup_script_timeout": 300, + "token": "ce663074-ebea-44cb-b6d1-321f590f7982", "troubleshooting_url": null }, "sensitive_values": { + "display_apps": [ + {} + ], + "metadata": [], "token": true } } @@ -48,7 +69,7 @@ "outputs": { "script": "" }, - "random": "5659889568915200015" + "random": "8031375470547649400" }, "sensitive_values": { "inputs": {}, @@ -63,7 +84,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "4052095409343470524", + "id": "3370916843136140681", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/chaining-resources/chaining-resources.tf b/provisioner/terraform/testdata/chaining-resources/chaining-resources.tf index 302a34fb17c03..3f210452dfee0 100644 --- a/provisioner/terraform/testdata/chaining-resources/chaining-resources.tf +++ b/provisioner/terraform/testdata/chaining-resources/chaining-resources.tf @@ -2,7 +2,7 @@ terraform { required_providers { coder = { source = "coder/coder" - version = "0.6.1" + version = "0.22.0" } } } diff --git a/provisioner/terraform/testdata/chaining-resources/chaining-resources.tfplan.dot b/provisioner/terraform/testdata/chaining-resources/chaining-resources.tfplan.dot index 5ebd454aba477..47a4798719ca0 100644 --- a/provisioner/terraform/testdata/chaining-resources/chaining-resources.tfplan.dot +++ b/provisioner/terraform/testdata/chaining-resources/chaining-resources.tfplan.dot @@ -17,4 +17,3 @@ digraph { "[root] root" -> "[root] provider[\"registry.terraform.io/hashicorp/null\"] (close)" } } - diff --git a/provisioner/terraform/testdata/chaining-resources/chaining-resources.tfplan.json b/provisioner/terraform/testdata/chaining-resources/chaining-resources.tfplan.json index c34eba1bf5e2c..9717ddd34b128 100644 --- a/provisioner/terraform/testdata/chaining-resources/chaining-resources.tfplan.json +++ b/provisioner/terraform/testdata/chaining-resources/chaining-resources.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.5.5", + "terraform_version": "1.7.5", "planned_values": { "root_module": { "resources": [ @@ -17,11 +17,22 @@ "connection_timeout": 120, "dir": null, "env": null, + "login_before_ready": true, + "metadata": [], + "motd_file": null, + "order": null, "os": "linux", + "shutdown_script": null, + "shutdown_script_timeout": 300, "startup_script": null, + "startup_script_behavior": null, + "startup_script_timeout": 300, "troubleshooting_url": null }, - "sensitive_values": {} + "sensitive_values": { + "display_apps": [], + "metadata": [] + } }, { "address": "null_resource.a", @@ -68,17 +79,29 @@ "connection_timeout": 120, "dir": null, "env": null, + "login_before_ready": true, + "metadata": [], + "motd_file": null, + "order": null, "os": "linux", + "shutdown_script": null, + "shutdown_script_timeout": 300, "startup_script": null, + "startup_script_behavior": null, + "startup_script_timeout": 300, "troubleshooting_url": null }, "after_unknown": { + "display_apps": true, "id": true, "init_script": true, + "metadata": [], "token": true }, "before_sensitive": false, "after_sensitive": { + "display_apps": [], + "metadata": [], "token": true } } @@ -131,7 +154,7 @@ "coder": { "name": "coder", "full_name": "registry.terraform.io/coder/coder", - "version_constraint": "0.6.1" + "version_constraint": "0.22.0" }, "null": { "name": "null", @@ -181,5 +204,6 @@ ] } }, - "timestamp": "2023-08-30T19:25:02Z" + "timestamp": "2024-05-22T17:02:43Z", + "errored": false } diff --git a/provisioner/terraform/testdata/chaining-resources/chaining-resources.tfstate.dot b/provisioner/terraform/testdata/chaining-resources/chaining-resources.tfstate.dot index 5ebd454aba477..47a4798719ca0 100644 --- a/provisioner/terraform/testdata/chaining-resources/chaining-resources.tfstate.dot +++ b/provisioner/terraform/testdata/chaining-resources/chaining-resources.tfstate.dot @@ -17,4 +17,3 @@ digraph { "[root] root" -> "[root] provider[\"registry.terraform.io/hashicorp/null\"] (close)" } } - diff --git a/provisioner/terraform/testdata/chaining-resources/chaining-resources.tfstate.json b/provisioner/terraform/testdata/chaining-resources/chaining-resources.tfstate.json index 60821742c70b5..304e9703b9073 100644 --- a/provisioner/terraform/testdata/chaining-resources/chaining-resources.tfstate.json +++ b/provisioner/terraform/testdata/chaining-resources/chaining-resources.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.5.5", + "terraform_version": "1.7.5", "values": { "root_module": { "resources": [ @@ -16,15 +16,36 @@ "auth": "token", "connection_timeout": 120, "dir": null, + "display_apps": [ + { + "port_forwarding_helper": true, + "ssh_helper": true, + "vscode": true, + "vscode_insiders": false, + "web_terminal": true + } + ], "env": null, - "id": "9fb263ae-2d96-414f-abfa-7874e73695d2", + "id": "9d869fc3-c185-4278-a5d2-873f809a4449", "init_script": "", + "login_before_ready": true, + "metadata": [], + "motd_file": null, + "order": null, "os": "linux", + "shutdown_script": null, + "shutdown_script_timeout": 300, "startup_script": null, - "token": "4f391c60-20f9-4d57-906e-92e2f3e1e3c1", + "startup_script_behavior": null, + "startup_script_timeout": 300, + "token": "418bb1d6-49d8-4340-ac84-ed6991457ff9", "troubleshooting_url": null }, "sensitive_values": { + "display_apps": [ + {} + ], + "metadata": [], "token": true } }, @@ -36,7 +57,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "2616597461049838347", + "id": "3681188688307687011", "triggers": null }, "sensitive_values": {}, @@ -53,7 +74,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "6759504907417146954", + "id": "6055360096088266226", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/conflicting-resources/conflicting-resources.tf b/provisioner/terraform/testdata/conflicting-resources/conflicting-resources.tf index e51020602ba31..8c7b200fca7b0 100644 --- a/provisioner/terraform/testdata/conflicting-resources/conflicting-resources.tf +++ b/provisioner/terraform/testdata/conflicting-resources/conflicting-resources.tf @@ -2,7 +2,7 @@ terraform { required_providers { coder = { source = "coder/coder" - version = "0.6.1" + version = "0.22.0" } } } diff --git a/provisioner/terraform/testdata/conflicting-resources/conflicting-resources.tfplan.dot b/provisioner/terraform/testdata/conflicting-resources/conflicting-resources.tfplan.dot index b1478de04e121..c887bda7e2672 100644 --- a/provisioner/terraform/testdata/conflicting-resources/conflicting-resources.tfplan.dot +++ b/provisioner/terraform/testdata/conflicting-resources/conflicting-resources.tfplan.dot @@ -19,4 +19,3 @@ digraph { "[root] root" -> "[root] provider[\"registry.terraform.io/hashicorp/null\"] (close)" } } - diff --git a/provisioner/terraform/testdata/conflicting-resources/conflicting-resources.tfplan.json b/provisioner/terraform/testdata/conflicting-resources/conflicting-resources.tfplan.json index ec759bd57e6e6..a62fa814bea53 100644 --- a/provisioner/terraform/testdata/conflicting-resources/conflicting-resources.tfplan.json +++ b/provisioner/terraform/testdata/conflicting-resources/conflicting-resources.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.5.5", + "terraform_version": "1.7.5", "planned_values": { "root_module": { "resources": [ @@ -17,11 +17,22 @@ "connection_timeout": 120, "dir": null, "env": null, + "login_before_ready": true, + "metadata": [], + "motd_file": null, + "order": null, "os": "linux", + "shutdown_script": null, + "shutdown_script_timeout": 300, "startup_script": null, + "startup_script_behavior": null, + "startup_script_timeout": 300, "troubleshooting_url": null }, - "sensitive_values": {} + "sensitive_values": { + "display_apps": [], + "metadata": [] + } }, { "address": "null_resource.first", @@ -68,17 +79,29 @@ "connection_timeout": 120, "dir": null, "env": null, + "login_before_ready": true, + "metadata": [], + "motd_file": null, + "order": null, "os": "linux", + "shutdown_script": null, + "shutdown_script_timeout": 300, "startup_script": null, + "startup_script_behavior": null, + "startup_script_timeout": 300, "troubleshooting_url": null }, "after_unknown": { + "display_apps": true, "id": true, "init_script": true, + "metadata": [], "token": true }, "before_sensitive": false, "after_sensitive": { + "display_apps": [], + "metadata": [], "token": true } } @@ -131,7 +154,7 @@ "coder": { "name": "coder", "full_name": "registry.terraform.io/coder/coder", - "version_constraint": "0.6.1" + "version_constraint": "0.22.0" }, "null": { "name": "null", @@ -181,5 +204,6 @@ ] } }, - "timestamp": "2023-08-30T19:25:04Z" + "timestamp": "2024-05-22T17:02:45Z", + "errored": false } diff --git a/provisioner/terraform/testdata/conflicting-resources/conflicting-resources.tfstate.dot b/provisioner/terraform/testdata/conflicting-resources/conflicting-resources.tfstate.dot index b1478de04e121..c887bda7e2672 100644 --- a/provisioner/terraform/testdata/conflicting-resources/conflicting-resources.tfstate.dot +++ b/provisioner/terraform/testdata/conflicting-resources/conflicting-resources.tfstate.dot @@ -19,4 +19,3 @@ digraph { "[root] root" -> "[root] provider[\"registry.terraform.io/hashicorp/null\"] (close)" } } - diff --git a/provisioner/terraform/testdata/conflicting-resources/conflicting-resources.tfstate.json b/provisioner/terraform/testdata/conflicting-resources/conflicting-resources.tfstate.json index cc9d6c4d07bed..4aa66de56d2c9 100644 --- a/provisioner/terraform/testdata/conflicting-resources/conflicting-resources.tfstate.json +++ b/provisioner/terraform/testdata/conflicting-resources/conflicting-resources.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.5.5", + "terraform_version": "1.7.5", "values": { "root_module": { "resources": [ @@ -16,15 +16,36 @@ "auth": "token", "connection_timeout": 120, "dir": null, + "display_apps": [ + { + "port_forwarding_helper": true, + "ssh_helper": true, + "vscode": true, + "vscode_insiders": false, + "web_terminal": true + } + ], "env": null, - "id": "56d6f6e2-a7f8-4594-9bc3-044a4fd3b021", + "id": "d9c497fe-1dc4-4551-b46d-282f775e9509", "init_script": "", + "login_before_ready": true, + "metadata": [], + "motd_file": null, + "order": null, "os": "linux", + "shutdown_script": null, + "shutdown_script_timeout": 300, "startup_script": null, - "token": "715216d1-fca1-4652-9032-d5367072706f", + "startup_script_behavior": null, + "startup_script_timeout": 300, + "token": "6fa01f69-de93-4610-b942-b787118146f8", "troubleshooting_url": null }, "sensitive_values": { + "display_apps": [ + {} + ], + "metadata": [], "token": true } }, @@ -36,7 +57,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "7470209964325643389", + "id": "2012753940926517215", "triggers": null }, "sensitive_values": {}, @@ -52,7 +73,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "251158623761758523", + "id": "2163283012438694669", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/display-apps-disabled/display-apps-disabled.tf b/provisioner/terraform/testdata/display-apps-disabled/display-apps-disabled.tf index ab6c4cd551802..494e0acafb48f 100644 --- a/provisioner/terraform/testdata/display-apps-disabled/display-apps-disabled.tf +++ b/provisioner/terraform/testdata/display-apps-disabled/display-apps-disabled.tf @@ -2,7 +2,7 @@ terraform { required_providers { coder = { source = "coder/coder" - version = "0.11.2" + version = "0.22.0" } } } diff --git a/provisioner/terraform/testdata/display-apps-disabled/display-apps-disabled.tfplan.dot b/provisioner/terraform/testdata/display-apps-disabled/display-apps-disabled.tfplan.dot index a1dd4289708f0..0b8e5a1594998 100644 --- a/provisioner/terraform/testdata/display-apps-disabled/display-apps-disabled.tfplan.dot +++ b/provisioner/terraform/testdata/display-apps-disabled/display-apps-disabled.tfplan.dot @@ -15,4 +15,3 @@ digraph { "[root] root" -> "[root] provider[\"registry.terraform.io/hashicorp/null\"] (close)" } } - diff --git a/provisioner/terraform/testdata/display-apps-disabled/display-apps-disabled.tfplan.json b/provisioner/terraform/testdata/display-apps-disabled/display-apps-disabled.tfplan.json index 07d7647d1ec07..de8d982bef577 100644 --- a/provisioner/terraform/testdata/display-apps-disabled/display-apps-disabled.tfplan.json +++ b/provisioner/terraform/testdata/display-apps-disabled/display-apps-disabled.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.5.5", + "terraform_version": "1.7.5", "planned_values": { "root_module": { "resources": [ @@ -29,6 +29,7 @@ "login_before_ready": true, "metadata": [], "motd_file": null, + "order": null, "os": "linux", "shutdown_script": null, "shutdown_script_timeout": 300, @@ -89,6 +90,7 @@ "login_before_ready": true, "metadata": [], "motd_file": null, + "order": null, "os": "linux", "shutdown_script": null, "shutdown_script_timeout": 300, @@ -143,7 +145,7 @@ "coder": { "name": "coder", "full_name": "registry.terraform.io/coder/coder", - "version_constraint": "0.11.2" + "version_constraint": "0.22.0" }, "null": { "name": "null", @@ -201,5 +203,6 @@ ] } }, - "timestamp": "2023-08-30T19:25:10Z" + "timestamp": "2024-05-22T17:02:50Z", + "errored": false } diff --git a/provisioner/terraform/testdata/display-apps-disabled/display-apps-disabled.tfstate.dot b/provisioner/terraform/testdata/display-apps-disabled/display-apps-disabled.tfstate.dot index a1dd4289708f0..0b8e5a1594998 100644 --- a/provisioner/terraform/testdata/display-apps-disabled/display-apps-disabled.tfstate.dot +++ b/provisioner/terraform/testdata/display-apps-disabled/display-apps-disabled.tfstate.dot @@ -15,4 +15,3 @@ digraph { "[root] root" -> "[root] provider[\"registry.terraform.io/hashicorp/null\"] (close)" } } - diff --git a/provisioner/terraform/testdata/display-apps-disabled/display-apps-disabled.tfstate.json b/provisioner/terraform/testdata/display-apps-disabled/display-apps-disabled.tfstate.json index dd0f7eed39ed9..3567c75133732 100644 --- a/provisioner/terraform/testdata/display-apps-disabled/display-apps-disabled.tfstate.json +++ b/provisioner/terraform/testdata/display-apps-disabled/display-apps-disabled.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.5.5", + "terraform_version": "1.7.5", "values": { "root_module": { "resources": [ @@ -26,18 +26,19 @@ } ], "env": null, - "id": "ba0faeb0-5a14-4908-946e-360329a8c852", + "id": "c55cfcad-5422-46e5-a144-e933660bacd3", "init_script": "", "login_before_ready": true, "metadata": [], "motd_file": null, + "order": null, "os": "linux", "shutdown_script": null, "shutdown_script_timeout": 300, "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "010c13b9-95aa-4b66-a2ad-5937e467134a", + "token": "e170615d-a3a2-4dc4-a65e-4990ceeb79e5", "troubleshooting_url": null }, "sensitive_values": { @@ -56,7 +57,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "7220106781059326067", + "id": "3512108359019802900", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/display-apps/display-apps.tf b/provisioner/terraform/testdata/display-apps/display-apps.tf index f4398bcdf34c2..a36b68cd3b1cc 100644 --- a/provisioner/terraform/testdata/display-apps/display-apps.tf +++ b/provisioner/terraform/testdata/display-apps/display-apps.tf @@ -2,7 +2,7 @@ terraform { required_providers { coder = { source = "coder/coder" - version = "0.11.2" + version = "0.22.0" } } } diff --git a/provisioner/terraform/testdata/display-apps/display-apps.tfplan.dot b/provisioner/terraform/testdata/display-apps/display-apps.tfplan.dot index a1dd4289708f0..0b8e5a1594998 100644 --- a/provisioner/terraform/testdata/display-apps/display-apps.tfplan.dot +++ b/provisioner/terraform/testdata/display-apps/display-apps.tfplan.dot @@ -15,4 +15,3 @@ digraph { "[root] root" -> "[root] provider[\"registry.terraform.io/hashicorp/null\"] (close)" } } - diff --git a/provisioner/terraform/testdata/display-apps/display-apps.tfplan.json b/provisioner/terraform/testdata/display-apps/display-apps.tfplan.json index 135f576b99422..d41c6e03541d0 100644 --- a/provisioner/terraform/testdata/display-apps/display-apps.tfplan.json +++ b/provisioner/terraform/testdata/display-apps/display-apps.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.5.5", + "terraform_version": "1.7.5", "planned_values": { "root_module": { "resources": [ @@ -29,6 +29,7 @@ "login_before_ready": true, "metadata": [], "motd_file": null, + "order": null, "os": "linux", "shutdown_script": null, "shutdown_script_timeout": 300, @@ -89,6 +90,7 @@ "login_before_ready": true, "metadata": [], "motd_file": null, + "order": null, "os": "linux", "shutdown_script": null, "shutdown_script_timeout": 300, @@ -143,7 +145,7 @@ "coder": { "name": "coder", "full_name": "registry.terraform.io/coder/coder", - "version_constraint": "0.11.2" + "version_constraint": "0.22.0" }, "null": { "name": "null", @@ -201,5 +203,6 @@ ] } }, - "timestamp": "2023-08-30T19:25:07Z" + "timestamp": "2024-05-22T17:02:48Z", + "errored": false } diff --git a/provisioner/terraform/testdata/display-apps/display-apps.tfstate.dot b/provisioner/terraform/testdata/display-apps/display-apps.tfstate.dot index a1dd4289708f0..0b8e5a1594998 100644 --- a/provisioner/terraform/testdata/display-apps/display-apps.tfstate.dot +++ b/provisioner/terraform/testdata/display-apps/display-apps.tfstate.dot @@ -15,4 +15,3 @@ digraph { "[root] root" -> "[root] provider[\"registry.terraform.io/hashicorp/null\"] (close)" } } - diff --git a/provisioner/terraform/testdata/display-apps/display-apps.tfstate.json b/provisioner/terraform/testdata/display-apps/display-apps.tfstate.json index 6742240dd2800..79b2e6dd6490f 100644 --- a/provisioner/terraform/testdata/display-apps/display-apps.tfstate.json +++ b/provisioner/terraform/testdata/display-apps/display-apps.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.5.5", + "terraform_version": "1.7.5", "values": { "root_module": { "resources": [ @@ -26,18 +26,19 @@ } ], "env": null, - "id": "a7b8ff17-66ba-47b4-a4b4-51da1ad835fc", + "id": "3fb63a4e-bb0e-4380-9ed9-8b1581943b1f", "init_script": "", "login_before_ready": true, "metadata": [], "motd_file": null, + "order": null, "os": "linux", "shutdown_script": null, "shutdown_script_timeout": 300, "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "75fc044a-b120-4e86-be94-056cec981bd9", + "token": "eb5720a7-91fd-4e37-8085-af3c8205702c", "troubleshooting_url": null }, "sensitive_values": { @@ -56,7 +57,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "4184951391452107661", + "id": "2929624824161973000", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/external-auth-providers/external-auth-providers.tf b/provisioner/terraform/testdata/external-auth-providers/external-auth-providers.tf index 7afa655792cea..0b68bbe5710fe 100644 --- a/provisioner/terraform/testdata/external-auth-providers/external-auth-providers.tf +++ b/provisioner/terraform/testdata/external-auth-providers/external-auth-providers.tf @@ -2,7 +2,7 @@ terraform { required_providers { coder = { source = "coder/coder" - version = "0.16.0" + version = "0.22.0" } } } diff --git a/provisioner/terraform/testdata/external-auth-providers/external-auth-providers.tfplan.dot b/provisioner/terraform/testdata/external-auth-providers/external-auth-providers.tfplan.dot index a011d7c85e40e..06ec61c86c754 100644 --- a/provisioner/terraform/testdata/external-auth-providers/external-auth-providers.tfplan.dot +++ b/provisioner/terraform/testdata/external-auth-providers/external-auth-providers.tfplan.dot @@ -21,4 +21,3 @@ digraph { "[root] root" -> "[root] provider[\"registry.terraform.io/hashicorp/null\"] (close)" } } - diff --git a/provisioner/terraform/testdata/external-auth-providers/external-auth-providers.tfplan.json b/provisioner/terraform/testdata/external-auth-providers/external-auth-providers.tfplan.json index afd5d60812138..837d50255a3a1 100644 --- a/provisioner/terraform/testdata/external-auth-providers/external-auth-providers.tfplan.json +++ b/provisioner/terraform/testdata/external-auth-providers/external-auth-providers.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.6.6", + "terraform_version": "1.7.5", "planned_values": { "root_module": { "resources": [ @@ -20,6 +20,7 @@ "login_before_ready": true, "metadata": [], "motd_file": null, + "order": null, "os": "linux", "shutdown_script": null, "shutdown_script_timeout": 300, @@ -69,6 +70,7 @@ "login_before_ready": true, "metadata": [], "motd_file": null, + "order": null, "os": "linux", "shutdown_script": null, "shutdown_script_timeout": 300, @@ -116,7 +118,7 @@ ], "prior_state": { "format_version": "1.0", - "terraform_version": "1.6.6", + "terraform_version": "1.7.5", "values": { "root_module": { "resources": [ @@ -157,7 +159,7 @@ "coder": { "name": "coder", "full_name": "registry.terraform.io/coder/coder", - "version_constraint": "0.16.0" + "version_constraint": "0.22.0" }, "null": { "name": "null", @@ -225,6 +227,6 @@ ] } }, - "timestamp": "2024-02-12T23:11:52Z", + "timestamp": "2024-05-22T17:02:52Z", "errored": false } diff --git a/provisioner/terraform/testdata/external-auth-providers/external-auth-providers.tfstate.dot b/provisioner/terraform/testdata/external-auth-providers/external-auth-providers.tfstate.dot index a011d7c85e40e..06ec61c86c754 100644 --- a/provisioner/terraform/testdata/external-auth-providers/external-auth-providers.tfstate.dot +++ b/provisioner/terraform/testdata/external-auth-providers/external-auth-providers.tfstate.dot @@ -21,4 +21,3 @@ digraph { "[root] root" -> "[root] provider[\"registry.terraform.io/hashicorp/null\"] (close)" } } - diff --git a/provisioner/terraform/testdata/external-auth-providers/external-auth-providers.tfstate.json b/provisioner/terraform/testdata/external-auth-providers/external-auth-providers.tfstate.json index 21f66e44b607b..125cea74bcc3c 100644 --- a/provisioner/terraform/testdata/external-auth-providers/external-auth-providers.tfstate.json +++ b/provisioner/terraform/testdata/external-auth-providers/external-auth-providers.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.6.6", + "terraform_version": "1.7.5", "values": { "root_module": { "resources": [ @@ -54,18 +54,19 @@ } ], "env": null, - "id": "d1f23602-ef8e-4ecf-aa5a-df8aa476344e", + "id": "923df4d0-cf96-4cf8-aaff-426e58927a81", "init_script": "", "login_before_ready": true, "metadata": [], "motd_file": null, + "order": null, "os": "linux", "shutdown_script": null, "shutdown_script_timeout": 300, "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "90440015-11c8-442b-adba-9f2bd279b5c7", + "token": "f5328221-90c7-4056-83b4-7b76d6f46580", "troubleshooting_url": null }, "sensitive_values": { @@ -84,7 +85,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "8183284779544326910", + "id": "4621387386750422041", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/generate.sh b/provisioner/terraform/testdata/generate.sh index 4ae1a87fb2504..04ac7bdef3c64 100755 --- a/provisioner/terraform/testdata/generate.sh +++ b/provisioner/terraform/testdata/generate.sh @@ -22,11 +22,13 @@ for d in */; do terraform init -upgrade terraform plan -out terraform.tfplan terraform show -json ./terraform.tfplan | jq >"$name".tfplan.json - terraform graph >"$name".tfplan.dot + terraform graph -type=plan >"$name".tfplan.dot rm terraform.tfplan terraform apply -auto-approve terraform show -json ./terraform.tfstate | jq >"$name".tfstate.json rm terraform.tfstate - terraform graph >"$name".tfstate.dot + terraform graph -type=plan >"$name".tfstate.dot popd done + +terraform version -json | jq -r '.terraform_version' >version.txt diff --git a/provisioner/terraform/testdata/git-auth-providers/git-auth-providers.tf b/provisioner/terraform/testdata/git-auth-providers/git-auth-providers.tf index e76479c459043..337699a36cccd 100644 --- a/provisioner/terraform/testdata/git-auth-providers/git-auth-providers.tf +++ b/provisioner/terraform/testdata/git-auth-providers/git-auth-providers.tf @@ -2,7 +2,7 @@ terraform { required_providers { coder = { source = "coder/coder" - version = "0.6.13" + version = "0.22.0" } } } diff --git a/provisioner/terraform/testdata/git-auth-providers/git-auth-providers.tfplan.dot b/provisioner/terraform/testdata/git-auth-providers/git-auth-providers.tfplan.dot index 3d0775104e9c8..119f00d4b3840 100644 --- a/provisioner/terraform/testdata/git-auth-providers/git-auth-providers.tfplan.dot +++ b/provisioner/terraform/testdata/git-auth-providers/git-auth-providers.tfplan.dot @@ -21,4 +21,3 @@ digraph { "[root] root" -> "[root] provider[\"registry.terraform.io/hashicorp/null\"] (close)" } } - diff --git a/provisioner/terraform/testdata/git-auth-providers/git-auth-providers.tfplan.json b/provisioner/terraform/testdata/git-auth-providers/git-auth-providers.tfplan.json index e5976f1d4341d..bd9286692d328 100644 --- a/provisioner/terraform/testdata/git-auth-providers/git-auth-providers.tfplan.json +++ b/provisioner/terraform/testdata/git-auth-providers/git-auth-providers.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.5.5", + "terraform_version": "1.7.5", "planned_values": { "root_module": { "resources": [ @@ -18,15 +18,21 @@ "dir": null, "env": null, "login_before_ready": true, + "metadata": [], "motd_file": null, + "order": null, "os": "linux", "shutdown_script": null, "shutdown_script_timeout": 300, "startup_script": null, + "startup_script_behavior": null, "startup_script_timeout": 300, "troubleshooting_url": null }, - "sensitive_values": {} + "sensitive_values": { + "display_apps": [], + "metadata": [] + } }, { "address": "null_resource.dev", @@ -62,21 +68,28 @@ "dir": null, "env": null, "login_before_ready": true, + "metadata": [], "motd_file": null, + "order": null, "os": "linux", "shutdown_script": null, "shutdown_script_timeout": 300, "startup_script": null, + "startup_script_behavior": null, "startup_script_timeout": 300, "troubleshooting_url": null }, "after_unknown": { + "display_apps": true, "id": true, "init_script": true, + "metadata": [], "token": true }, "before_sensitive": false, "after_sensitive": { + "display_apps": [], + "metadata": [], "token": true } } @@ -105,7 +118,7 @@ ], "prior_state": { "format_version": "1.0", - "terraform_version": "1.5.5", + "terraform_version": "1.7.5", "values": { "root_module": { "resources": [ @@ -144,7 +157,7 @@ "coder": { "name": "coder", "full_name": "registry.terraform.io/coder/coder", - "version_constraint": "0.6.13" + "version_constraint": "0.22.0" }, "null": { "name": "null", @@ -209,5 +222,6 @@ ] } }, - "timestamp": "2023-08-30T19:25:13Z" + "timestamp": "2024-05-22T17:02:55Z", + "errored": false } diff --git a/provisioner/terraform/testdata/git-auth-providers/git-auth-providers.tfstate.dot b/provisioner/terraform/testdata/git-auth-providers/git-auth-providers.tfstate.dot index 3d0775104e9c8..119f00d4b3840 100644 --- a/provisioner/terraform/testdata/git-auth-providers/git-auth-providers.tfstate.dot +++ b/provisioner/terraform/testdata/git-auth-providers/git-auth-providers.tfstate.dot @@ -21,4 +21,3 @@ digraph { "[root] root" -> "[root] provider[\"registry.terraform.io/hashicorp/null\"] (close)" } } - diff --git a/provisioner/terraform/testdata/git-auth-providers/git-auth-providers.tfstate.json b/provisioner/terraform/testdata/git-auth-providers/git-auth-providers.tfstate.json index 0abc4e8a4cf32..509c6d5a9d7fc 100644 --- a/provisioner/terraform/testdata/git-auth-providers/git-auth-providers.tfstate.json +++ b/provisioner/terraform/testdata/git-auth-providers/git-auth-providers.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.5.5", + "terraform_version": "1.7.5", "values": { "root_module": { "resources": [ @@ -42,20 +42,36 @@ "auth": "token", "connection_timeout": 120, "dir": null, + "display_apps": [ + { + "port_forwarding_helper": true, + "ssh_helper": true, + "vscode": true, + "vscode_insiders": false, + "web_terminal": true + } + ], "env": null, - "id": "a8139f31-219b-4ee5-9e64-60d8dd94be27", + "id": "48a24332-1a90-48d9-9e03-b4e9f09c6eab", "init_script": "", "login_before_ready": true, + "metadata": [], "motd_file": null, + "order": null, "os": "linux", "shutdown_script": null, "shutdown_script_timeout": 300, "startup_script": null, + "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "20cdf0ee-2da9-432e-a3ad-674b900ed3c1", + "token": "6a2ae93f-3f25-423d-aa97-b2f1c5d9c20b", "troubleshooting_url": null }, "sensitive_values": { + "display_apps": [ + {} + ], + "metadata": [], "token": true } }, @@ -67,7 +83,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "8246789295692160686", + "id": "8095584601893320918", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/instance-id/instance-id.tf b/provisioner/terraform/testdata/instance-id/instance-id.tf index 328ac453c490f..1cd4ab828b4f0 100644 --- a/provisioner/terraform/testdata/instance-id/instance-id.tf +++ b/provisioner/terraform/testdata/instance-id/instance-id.tf @@ -2,7 +2,7 @@ terraform { required_providers { coder = { source = "coder/coder" - version = "0.6.1" + version = "0.22.0" } } } diff --git a/provisioner/terraform/testdata/instance-id/instance-id.tfplan.dot b/provisioner/terraform/testdata/instance-id/instance-id.tfplan.dot index eff161be511b3..543bd3679ea9c 100644 --- a/provisioner/terraform/testdata/instance-id/instance-id.tfplan.dot +++ b/provisioner/terraform/testdata/instance-id/instance-id.tfplan.dot @@ -17,4 +17,3 @@ digraph { "[root] root" -> "[root] provider[\"registry.terraform.io/hashicorp/null\"] (close)" } } - diff --git a/provisioner/terraform/testdata/instance-id/instance-id.tfplan.json b/provisioner/terraform/testdata/instance-id/instance-id.tfplan.json index cd94915162d1c..fe875367359c0 100644 --- a/provisioner/terraform/testdata/instance-id/instance-id.tfplan.json +++ b/provisioner/terraform/testdata/instance-id/instance-id.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.5.5", + "terraform_version": "1.7.5", "planned_values": { "root_module": { "resources": [ @@ -17,11 +17,22 @@ "connection_timeout": 120, "dir": null, "env": null, + "login_before_ready": true, + "metadata": [], + "motd_file": null, + "order": null, "os": "linux", + "shutdown_script": null, + "shutdown_script_timeout": 300, "startup_script": null, + "startup_script_behavior": null, + "startup_script_timeout": 300, "troubleshooting_url": null }, - "sensitive_values": {} + "sensitive_values": { + "display_apps": [], + "metadata": [] + } }, { "address": "coder_agent_instance.main", @@ -68,17 +79,29 @@ "connection_timeout": 120, "dir": null, "env": null, + "login_before_ready": true, + "metadata": [], + "motd_file": null, + "order": null, "os": "linux", + "shutdown_script": null, + "shutdown_script_timeout": 300, "startup_script": null, + "startup_script_behavior": null, + "startup_script_timeout": 300, "troubleshooting_url": null }, "after_unknown": { + "display_apps": true, "id": true, "init_script": true, + "metadata": [], "token": true }, "before_sensitive": false, "after_sensitive": { + "display_apps": [], + "metadata": [], "token": true } } @@ -132,7 +155,7 @@ "coder": { "name": "coder", "full_name": "registry.terraform.io/coder/coder", - "version_constraint": "0.6.1" + "version_constraint": "0.22.0" }, "null": { "name": "null", @@ -201,5 +224,6 @@ ] } ], - "timestamp": "2023-08-30T19:25:15Z" + "timestamp": "2024-05-22T17:02:57Z", + "errored": false } diff --git a/provisioner/terraform/testdata/instance-id/instance-id.tfstate.dot b/provisioner/terraform/testdata/instance-id/instance-id.tfstate.dot index eff161be511b3..543bd3679ea9c 100644 --- a/provisioner/terraform/testdata/instance-id/instance-id.tfstate.dot +++ b/provisioner/terraform/testdata/instance-id/instance-id.tfstate.dot @@ -17,4 +17,3 @@ digraph { "[root] root" -> "[root] provider[\"registry.terraform.io/hashicorp/null\"] (close)" } } - diff --git a/provisioner/terraform/testdata/instance-id/instance-id.tfstate.json b/provisioner/terraform/testdata/instance-id/instance-id.tfstate.json index 6b91850750048..ef5346a2ac822 100644 --- a/provisioner/terraform/testdata/instance-id/instance-id.tfstate.json +++ b/provisioner/terraform/testdata/instance-id/instance-id.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.5.5", + "terraform_version": "1.7.5", "values": { "root_module": { "resources": [ @@ -16,15 +16,36 @@ "auth": "google-instance-identity", "connection_timeout": 120, "dir": null, + "display_apps": [ + { + "port_forwarding_helper": true, + "ssh_helper": true, + "vscode": true, + "vscode_insiders": false, + "web_terminal": true + } + ], "env": null, - "id": "07c39e97-3461-4912-87c6-aab06714fb79", + "id": "3bc8e20f-2024-4014-ac11-806e7e1a1e24", "init_script": "", + "login_before_ready": true, + "metadata": [], + "motd_file": null, + "order": null, "os": "linux", + "shutdown_script": null, + "shutdown_script_timeout": 300, "startup_script": null, - "token": "4d389c4e-479b-4004-8ad1-b10da989bbdb", + "startup_script_behavior": null, + "startup_script_timeout": 300, + "token": "6ef0492b-8dbe-4c61-8eb8-a37acb671278", "troubleshooting_url": null }, "sensitive_values": { + "display_apps": [ + {} + ], + "metadata": [], "token": true } }, @@ -36,8 +57,8 @@ "provider_name": "registry.terraform.io/coder/coder", "schema_version": 0, "values": { - "agent_id": "07c39e97-3461-4912-87c6-aab06714fb79", - "id": "13ac93bd-54bf-4e93-b2a1-35534139e255", + "agent_id": "3bc8e20f-2024-4014-ac11-806e7e1a1e24", + "id": "7ba714fa-f2b8-4d33-8987-f67466505033", "instance_id": "example" }, "sensitive_values": {}, @@ -53,7 +74,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "8984327635720248545", + "id": "4065206823139127011", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/kubernetes-metadata/kubernetes-metadata.tf b/provisioner/terraform/testdata/kubernetes-metadata/kubernetes-metadata.tf index e8d6b1d08b3dc..2ae1298904fbb 100644 --- a/provisioner/terraform/testdata/kubernetes-metadata/kubernetes-metadata.tf +++ b/provisioner/terraform/testdata/kubernetes-metadata/kubernetes-metadata.tf @@ -2,7 +2,7 @@ terraform { required_providers { coder = { source = "coder/coder" - version = "0.6.5" + version = "0.22.0" } kubernetes = { source = "hashicorp/kubernetes" diff --git a/provisioner/terraform/testdata/mapped-apps/mapped-apps.tf b/provisioner/terraform/testdata/mapped-apps/mapped-apps.tf index 6ed5f0d18276b..1e13495d6ebc7 100644 --- a/provisioner/terraform/testdata/mapped-apps/mapped-apps.tf +++ b/provisioner/terraform/testdata/mapped-apps/mapped-apps.tf @@ -2,7 +2,7 @@ terraform { required_providers { coder = { source = "coder/coder" - version = "0.6.1" + version = "0.22.0" } } } diff --git a/provisioner/terraform/testdata/mapped-apps/mapped-apps.tfplan.dot b/provisioner/terraform/testdata/mapped-apps/mapped-apps.tfplan.dot index a54bed2003cc0..963c7c228deda 100644 --- a/provisioner/terraform/testdata/mapped-apps/mapped-apps.tfplan.dot +++ b/provisioner/terraform/testdata/mapped-apps/mapped-apps.tfplan.dot @@ -18,4 +18,3 @@ digraph { "[root] root" -> "[root] provider[\"registry.terraform.io/hashicorp/null\"] (close)" } } - diff --git a/provisioner/terraform/testdata/mapped-apps/mapped-apps.tfplan.json b/provisioner/terraform/testdata/mapped-apps/mapped-apps.tfplan.json index 600373f73aeb0..9fad4b322a02d 100644 --- a/provisioner/terraform/testdata/mapped-apps/mapped-apps.tfplan.json +++ b/provisioner/terraform/testdata/mapped-apps/mapped-apps.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.5.5", + "terraform_version": "1.7.5", "planned_values": { "root_module": { "resources": [ @@ -17,11 +17,22 @@ "connection_timeout": 120, "dir": null, "env": null, + "login_before_ready": true, + "metadata": [], + "motd_file": null, + "order": null, "os": "linux", + "shutdown_script": null, + "shutdown_script_timeout": 300, "startup_script": null, + "startup_script_behavior": null, + "startup_script_timeout": 300, "troubleshooting_url": null }, - "sensitive_values": {} + "sensitive_values": { + "display_apps": [], + "metadata": [] + } }, { "address": "coder_app.apps[\"app1\"]", @@ -34,9 +45,11 @@ "values": { "command": null, "display_name": "app1", + "external": false, "healthcheck": [], "icon": null, "name": null, + "order": null, "relative_path": null, "share": "owner", "slug": "app1", @@ -58,9 +71,11 @@ "values": { "command": null, "display_name": "app2", + "external": false, "healthcheck": [], "icon": null, "name": null, + "order": null, "relative_path": null, "share": "owner", "slug": "app2", @@ -104,17 +119,29 @@ "connection_timeout": 120, "dir": null, "env": null, + "login_before_ready": true, + "metadata": [], + "motd_file": null, + "order": null, "os": "linux", + "shutdown_script": null, + "shutdown_script_timeout": 300, "startup_script": null, + "startup_script_behavior": null, + "startup_script_timeout": 300, "troubleshooting_url": null }, "after_unknown": { + "display_apps": true, "id": true, "init_script": true, + "metadata": [], "token": true }, "before_sensitive": false, "after_sensitive": { + "display_apps": [], + "metadata": [], "token": true } } @@ -134,9 +161,11 @@ "after": { "command": null, "display_name": "app1", + "external": false, "healthcheck": [], "icon": null, "name": null, + "order": null, "relative_path": null, "share": "owner", "slug": "app1", @@ -169,9 +198,11 @@ "after": { "command": null, "display_name": "app2", + "external": false, "healthcheck": [], "icon": null, "name": null, + "order": null, "relative_path": null, "share": "owner", "slug": "app2", @@ -216,7 +247,7 @@ "coder": { "name": "coder", "full_name": "registry.terraform.io/coder/coder", - "version_constraint": "0.6.1" + "version_constraint": "0.22.0" }, "null": { "name": "null", @@ -295,5 +326,6 @@ ] } ], - "timestamp": "2023-08-30T19:25:17Z" + "timestamp": "2024-05-22T17:02:59Z", + "errored": false } diff --git a/provisioner/terraform/testdata/mapped-apps/mapped-apps.tfstate.dot b/provisioner/terraform/testdata/mapped-apps/mapped-apps.tfstate.dot index a54bed2003cc0..963c7c228deda 100644 --- a/provisioner/terraform/testdata/mapped-apps/mapped-apps.tfstate.dot +++ b/provisioner/terraform/testdata/mapped-apps/mapped-apps.tfstate.dot @@ -18,4 +18,3 @@ digraph { "[root] root" -> "[root] provider[\"registry.terraform.io/hashicorp/null\"] (close)" } } - diff --git a/provisioner/terraform/testdata/mapped-apps/mapped-apps.tfstate.json b/provisioner/terraform/testdata/mapped-apps/mapped-apps.tfstate.json index 99ab3f5adad8a..e19a8b484bf6a 100644 --- a/provisioner/terraform/testdata/mapped-apps/mapped-apps.tfstate.json +++ b/provisioner/terraform/testdata/mapped-apps/mapped-apps.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.5.5", + "terraform_version": "1.7.5", "values": { "root_module": { "resources": [ @@ -16,15 +16,36 @@ "auth": "token", "connection_timeout": 120, "dir": null, + "display_apps": [ + { + "port_forwarding_helper": true, + "ssh_helper": true, + "vscode": true, + "vscode_insiders": false, + "web_terminal": true + } + ], "env": null, - "id": "9a8356cf-b5ef-4da0-9b4e-cfeaca1fbfcf", + "id": "d8d2ed23-193d-4784-9ce5-7bc0d879bb14", "init_script": "", + "login_before_ready": true, + "metadata": [], + "motd_file": null, + "order": null, "os": "linux", + "shutdown_script": null, + "shutdown_script_timeout": 300, "startup_script": null, - "token": "7116ebd2-5205-4427-8cdb-5f86ec819911", + "startup_script_behavior": null, + "startup_script_timeout": 300, + "token": "0555adfc-e969-4fd2-8cfd-47560bd1b5a3", "troubleshooting_url": null }, "sensitive_values": { + "display_apps": [ + {} + ], + "metadata": [], "token": true } }, @@ -37,13 +58,15 @@ "provider_name": "registry.terraform.io/coder/coder", "schema_version": 0, "values": { - "agent_id": "9a8356cf-b5ef-4da0-9b4e-cfeaca1fbfcf", + "agent_id": "d8d2ed23-193d-4784-9ce5-7bc0d879bb14", "command": null, "display_name": "app1", + "external": false, "healthcheck": [], "icon": null, - "id": "8ad9b3c3-0951-4612-adea-5c89ac12642a", + "id": "11fa3ff2-d6ba-41ca-b1df-6c98d395c0b8", "name": null, + "order": null, "relative_path": null, "share": "owner", "slug": "app1", @@ -66,13 +89,15 @@ "provider_name": "registry.terraform.io/coder/coder", "schema_version": 0, "values": { - "agent_id": "9a8356cf-b5ef-4da0-9b4e-cfeaca1fbfcf", + "agent_id": "d8d2ed23-193d-4784-9ce5-7bc0d879bb14", "command": null, "display_name": "app2", + "external": false, "healthcheck": [], "icon": null, - "id": "b3cbb3eb-62d8-485f-8378-2d2ed751aa38", + "id": "cd1a2e37-adbc-49f0-bd99-033c62a1533e", "name": null, + "order": null, "relative_path": null, "share": "owner", "slug": "app2", @@ -94,7 +119,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "5757307222275435634", + "id": "4490911212417021152", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/multiple-agents/multiple-agents.tf b/provisioner/terraform/testdata/multiple-agents/multiple-agents.tf index 978bbb164d604..d44a981d168bb 100644 --- a/provisioner/terraform/testdata/multiple-agents/multiple-agents.tf +++ b/provisioner/terraform/testdata/multiple-agents/multiple-agents.tf @@ -2,7 +2,7 @@ terraform { required_providers { coder = { source = "coder/coder" - version = "0.8.3" + version = "0.22.0" } } } diff --git a/provisioner/terraform/testdata/multiple-agents/multiple-agents.tfplan.dot b/provisioner/terraform/testdata/multiple-agents/multiple-agents.tfplan.dot index 02839b24d696d..b988d02d15ef8 100644 --- a/provisioner/terraform/testdata/multiple-agents/multiple-agents.tfplan.dot +++ b/provisioner/terraform/testdata/multiple-agents/multiple-agents.tfplan.dot @@ -27,4 +27,3 @@ digraph { "[root] root" -> "[root] provider[\"registry.terraform.io/hashicorp/null\"] (close)" } } - diff --git a/provisioner/terraform/testdata/multiple-agents/multiple-agents.tfplan.json b/provisioner/terraform/testdata/multiple-agents/multiple-agents.tfplan.json index 4cdf0a05ee33b..7f44aa45ca7d9 100644 --- a/provisioner/terraform/testdata/multiple-agents/multiple-agents.tfplan.json +++ b/provisioner/terraform/testdata/multiple-agents/multiple-agents.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.5.5", + "terraform_version": "1.7.5", "planned_values": { "root_module": { "resources": [ @@ -20,6 +20,7 @@ "login_before_ready": true, "metadata": [], "motd_file": null, + "order": null, "os": "linux", "shutdown_script": null, "shutdown_script_timeout": 300, @@ -29,6 +30,7 @@ "troubleshooting_url": null }, "sensitive_values": { + "display_apps": [], "metadata": [] } }, @@ -48,6 +50,7 @@ "login_before_ready": true, "metadata": [], "motd_file": "/etc/motd", + "order": null, "os": "darwin", "shutdown_script": "echo bye bye", "shutdown_script_timeout": 30, @@ -57,6 +60,7 @@ "troubleshooting_url": null }, "sensitive_values": { + "display_apps": [], "metadata": [] } }, @@ -76,6 +80,7 @@ "login_before_ready": true, "metadata": [], "motd_file": null, + "order": null, "os": "windows", "shutdown_script": null, "shutdown_script_timeout": 300, @@ -85,6 +90,7 @@ "troubleshooting_url": "https://coder.com/troubleshoot" }, "sensitive_values": { + "display_apps": [], "metadata": [] } }, @@ -104,6 +110,7 @@ "login_before_ready": false, "metadata": [], "motd_file": null, + "order": null, "os": "linux", "shutdown_script": null, "shutdown_script_timeout": 300, @@ -113,6 +120,7 @@ "troubleshooting_url": null }, "sensitive_values": { + "display_apps": [], "metadata": [] } }, @@ -152,6 +160,7 @@ "login_before_ready": true, "metadata": [], "motd_file": null, + "order": null, "os": "linux", "shutdown_script": null, "shutdown_script_timeout": 300, @@ -161,6 +170,7 @@ "troubleshooting_url": null }, "after_unknown": { + "display_apps": true, "id": true, "init_script": true, "metadata": [], @@ -168,6 +178,7 @@ }, "before_sensitive": false, "after_sensitive": { + "display_apps": [], "metadata": [], "token": true } @@ -193,6 +204,7 @@ "login_before_ready": true, "metadata": [], "motd_file": "/etc/motd", + "order": null, "os": "darwin", "shutdown_script": "echo bye bye", "shutdown_script_timeout": 30, @@ -202,6 +214,7 @@ "troubleshooting_url": null }, "after_unknown": { + "display_apps": true, "id": true, "init_script": true, "metadata": [], @@ -209,6 +222,7 @@ }, "before_sensitive": false, "after_sensitive": { + "display_apps": [], "metadata": [], "token": true } @@ -234,6 +248,7 @@ "login_before_ready": true, "metadata": [], "motd_file": null, + "order": null, "os": "windows", "shutdown_script": null, "shutdown_script_timeout": 300, @@ -243,6 +258,7 @@ "troubleshooting_url": "https://coder.com/troubleshoot" }, "after_unknown": { + "display_apps": true, "id": true, "init_script": true, "metadata": [], @@ -250,6 +266,7 @@ }, "before_sensitive": false, "after_sensitive": { + "display_apps": [], "metadata": [], "token": true } @@ -275,6 +292,7 @@ "login_before_ready": false, "metadata": [], "motd_file": null, + "order": null, "os": "linux", "shutdown_script": null, "shutdown_script_timeout": 300, @@ -284,6 +302,7 @@ "troubleshooting_url": null }, "after_unknown": { + "display_apps": true, "id": true, "init_script": true, "metadata": [], @@ -291,6 +310,7 @@ }, "before_sensitive": false, "after_sensitive": { + "display_apps": [], "metadata": [], "token": true } @@ -323,7 +343,7 @@ "coder": { "name": "coder", "full_name": "registry.terraform.io/coder/coder", - "version_constraint": "0.8.3" + "version_constraint": "0.22.0" }, "null": { "name": "null", @@ -440,5 +460,6 @@ ] } }, - "timestamp": "2023-08-30T19:25:20Z" + "timestamp": "2024-05-22T17:03:01Z", + "errored": false } diff --git a/provisioner/terraform/testdata/multiple-agents/multiple-agents.tfstate.dot b/provisioner/terraform/testdata/multiple-agents/multiple-agents.tfstate.dot index 02839b24d696d..b988d02d15ef8 100644 --- a/provisioner/terraform/testdata/multiple-agents/multiple-agents.tfstate.dot +++ b/provisioner/terraform/testdata/multiple-agents/multiple-agents.tfstate.dot @@ -27,4 +27,3 @@ digraph { "[root] root" -> "[root] provider[\"registry.terraform.io/hashicorp/null\"] (close)" } } - diff --git a/provisioner/terraform/testdata/multiple-agents/multiple-agents.tfstate.json b/provisioner/terraform/testdata/multiple-agents/multiple-agents.tfstate.json index d9bfc636cd442..0bbd45fa5a3df 100644 --- a/provisioner/terraform/testdata/multiple-agents/multiple-agents.tfstate.json +++ b/provisioner/terraform/testdata/multiple-agents/multiple-agents.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.5.5", + "terraform_version": "1.7.5", "values": { "root_module": { "resources": [ @@ -16,22 +16,35 @@ "auth": "token", "connection_timeout": 120, "dir": null, + "display_apps": [ + { + "port_forwarding_helper": true, + "ssh_helper": true, + "vscode": true, + "vscode_insiders": false, + "web_terminal": true + } + ], "env": null, - "id": "094d300c-f07a-4357-870f-6ca1fc9154a2", + "id": "0ffc6582-b017-404e-b83f-48e4a5ab38bc", "init_script": "", "login_before_ready": true, "metadata": [], "motd_file": null, + "order": null, "os": "linux", "shutdown_script": null, "shutdown_script_timeout": 300, "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "27bd44bc-0126-4c8d-9b98-8f27619e3656", + "token": "b7f0a913-ecb1-4c80-8559-fbcb435d53d0", "troubleshooting_url": null }, "sensitive_values": { + "display_apps": [ + {} + ], "metadata": [], "token": true } @@ -48,22 +61,35 @@ "auth": "token", "connection_timeout": 1, "dir": null, + "display_apps": [ + { + "port_forwarding_helper": true, + "ssh_helper": true, + "vscode": true, + "vscode_insiders": false, + "web_terminal": true + } + ], "env": null, - "id": "bb844516-2cdd-419c-87e1-d0d3ea69fe78", + "id": "1780ae95-844c-4d5c-94fb-6ccfe4a7656d", "init_script": "", "login_before_ready": true, "metadata": [], "motd_file": "/etc/motd", + "order": null, "os": "darwin", "shutdown_script": "echo bye bye", "shutdown_script_timeout": 30, "startup_script": null, "startup_script_behavior": "non-blocking", "startup_script_timeout": 30, - "token": "8a31b688-d3d2-4c22-b37e-c9810b9b329a", + "token": "695f8765-3d3d-4da0-9a5a-bb7b1f568bde", "troubleshooting_url": null }, "sensitive_values": { + "display_apps": [ + {} + ], "metadata": [], "token": true } @@ -80,22 +106,35 @@ "auth": "token", "connection_timeout": 120, "dir": null, + "display_apps": [ + { + "port_forwarding_helper": true, + "ssh_helper": true, + "vscode": true, + "vscode_insiders": false, + "web_terminal": true + } + ], "env": null, - "id": "c6123c01-0543-4102-bdcf-f0ee2a9c1269", + "id": "333b7856-24ac-46be-9ae3-e4981b25481d", "init_script": "", "login_before_ready": true, "metadata": [], "motd_file": null, + "order": null, "os": "windows", "shutdown_script": null, "shutdown_script_timeout": 300, "startup_script": null, "startup_script_behavior": "blocking", "startup_script_timeout": 300, - "token": "64185462-292f-4b75-b350-625326ba596e", + "token": "50ddfb93-264f-4f64-8c8d-db7d8d37c0a1", "troubleshooting_url": "https://coder.com/troubleshoot" }, "sensitive_values": { + "display_apps": [ + {} + ], "metadata": [], "token": true } @@ -112,22 +151,35 @@ "auth": "token", "connection_timeout": 120, "dir": null, + "display_apps": [ + { + "port_forwarding_helper": true, + "ssh_helper": true, + "vscode": true, + "vscode_insiders": false, + "web_terminal": true + } + ], "env": null, - "id": "85d0614c-3e44-4f20-b4bf-a015c8dfcaac", + "id": "90736626-71c9-4b76-bdfc-f6ce9b3dda05", "init_script": "", "login_before_ready": false, "metadata": [], "motd_file": null, + "order": null, "os": "linux", "shutdown_script": null, "shutdown_script_timeout": 300, "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "021b1139-fa63-42ba-be1a-85f8456f3c28", + "token": "8c4ae7b9-12b7-4a9c-a55a-a98cfb049103", "troubleshooting_url": null }, "sensitive_values": { + "display_apps": [ + {} + ], "metadata": [], "token": true } @@ -140,7 +192,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "6753149467284740901", + "id": "6980014108785645805", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/multiple-apps/multiple-apps.tf b/provisioner/terraform/testdata/multiple-apps/multiple-apps.tf index 3a713df629218..c7c4f9968b5c3 100644 --- a/provisioner/terraform/testdata/multiple-apps/multiple-apps.tf +++ b/provisioner/terraform/testdata/multiple-apps/multiple-apps.tf @@ -2,7 +2,7 @@ terraform { required_providers { coder = { source = "coder/coder" - version = "0.6.1" + version = "0.22.0" } } } diff --git a/provisioner/terraform/testdata/multiple-apps/multiple-apps.tfplan.dot b/provisioner/terraform/testdata/multiple-apps/multiple-apps.tfplan.dot index b072ccafce750..d844163e70c1e 100644 --- a/provisioner/terraform/testdata/multiple-apps/multiple-apps.tfplan.dot +++ b/provisioner/terraform/testdata/multiple-apps/multiple-apps.tfplan.dot @@ -23,4 +23,3 @@ digraph { "[root] root" -> "[root] provider[\"registry.terraform.io/hashicorp/null\"] (close)" } } - diff --git a/provisioner/terraform/testdata/multiple-apps/multiple-apps.tfplan.json b/provisioner/terraform/testdata/multiple-apps/multiple-apps.tfplan.json index 27958fe02d975..eee1d09317ba1 100644 --- a/provisioner/terraform/testdata/multiple-apps/multiple-apps.tfplan.json +++ b/provisioner/terraform/testdata/multiple-apps/multiple-apps.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.5.5", + "terraform_version": "1.7.5", "planned_values": { "root_module": { "resources": [ @@ -17,11 +17,22 @@ "connection_timeout": 120, "dir": null, "env": null, + "login_before_ready": true, + "metadata": [], + "motd_file": null, + "order": null, "os": "linux", + "shutdown_script": null, + "shutdown_script_timeout": 300, "startup_script": null, + "startup_script_behavior": null, + "startup_script_timeout": 300, "troubleshooting_url": null }, - "sensitive_values": {} + "sensitive_values": { + "display_apps": [], + "metadata": [] + } }, { "address": "coder_app.app1", @@ -33,9 +44,11 @@ "values": { "command": null, "display_name": null, + "external": false, "healthcheck": [], "icon": null, "name": null, + "order": null, "relative_path": null, "share": "owner", "slug": "app1", @@ -56,6 +69,7 @@ "values": { "command": null, "display_name": null, + "external": false, "healthcheck": [ { "interval": 5, @@ -65,6 +79,7 @@ ], "icon": null, "name": null, + "order": null, "relative_path": null, "share": "owner", "slug": "app2", @@ -87,9 +102,11 @@ "values": { "command": null, "display_name": null, + "external": false, "healthcheck": [], "icon": null, "name": null, + "order": null, "relative_path": null, "share": "owner", "slug": "app3", @@ -133,17 +150,29 @@ "connection_timeout": 120, "dir": null, "env": null, + "login_before_ready": true, + "metadata": [], + "motd_file": null, + "order": null, "os": "linux", + "shutdown_script": null, + "shutdown_script_timeout": 300, "startup_script": null, + "startup_script_behavior": null, + "startup_script_timeout": 300, "troubleshooting_url": null }, "after_unknown": { + "display_apps": true, "id": true, "init_script": true, + "metadata": [], "token": true }, "before_sensitive": false, "after_sensitive": { + "display_apps": [], + "metadata": [], "token": true } } @@ -162,9 +191,11 @@ "after": { "command": null, "display_name": null, + "external": false, "healthcheck": [], "icon": null, "name": null, + "order": null, "relative_path": null, "share": "owner", "slug": "app1", @@ -196,6 +227,7 @@ "after": { "command": null, "display_name": null, + "external": false, "healthcheck": [ { "interval": 5, @@ -205,6 +237,7 @@ ], "icon": null, "name": null, + "order": null, "relative_path": null, "share": "owner", "slug": "app2", @@ -240,9 +273,11 @@ "after": { "command": null, "display_name": null, + "external": false, "healthcheck": [], "icon": null, "name": null, + "order": null, "relative_path": null, "share": "owner", "slug": "app3", @@ -287,7 +322,7 @@ "coder": { "name": "coder", "full_name": "registry.terraform.io/coder/coder", - "version_constraint": "0.6.1" + "version_constraint": "0.22.0" }, "null": { "name": "null", @@ -410,5 +445,6 @@ ] } ], - "timestamp": "2023-08-30T19:25:22Z" + "timestamp": "2024-05-22T17:03:03Z", + "errored": false } diff --git a/provisioner/terraform/testdata/multiple-apps/multiple-apps.tfstate.dot b/provisioner/terraform/testdata/multiple-apps/multiple-apps.tfstate.dot index b072ccafce750..d844163e70c1e 100644 --- a/provisioner/terraform/testdata/multiple-apps/multiple-apps.tfstate.dot +++ b/provisioner/terraform/testdata/multiple-apps/multiple-apps.tfstate.dot @@ -23,4 +23,3 @@ digraph { "[root] root" -> "[root] provider[\"registry.terraform.io/hashicorp/null\"] (close)" } } - diff --git a/provisioner/terraform/testdata/multiple-apps/multiple-apps.tfstate.json b/provisioner/terraform/testdata/multiple-apps/multiple-apps.tfstate.json index 92ede7e786e85..3ed04ae6ecab0 100644 --- a/provisioner/terraform/testdata/multiple-apps/multiple-apps.tfstate.json +++ b/provisioner/terraform/testdata/multiple-apps/multiple-apps.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.5.5", + "terraform_version": "1.7.5", "values": { "root_module": { "resources": [ @@ -16,15 +16,36 @@ "auth": "token", "connection_timeout": 120, "dir": null, + "display_apps": [ + { + "port_forwarding_helper": true, + "ssh_helper": true, + "vscode": true, + "vscode_insiders": false, + "web_terminal": true + } + ], "env": null, - "id": "c8dab94d-651c-4d9b-a19a-1c067a2976ea", + "id": "c950352c-7c4a-41cc-9049-ad07ded85c47", "init_script": "", + "login_before_ready": true, + "metadata": [], + "motd_file": null, + "order": null, "os": "linux", + "shutdown_script": null, + "shutdown_script_timeout": 300, "startup_script": null, - "token": "96745539-f607-45f5-aa71-4f70f593ca6a", + "startup_script_behavior": null, + "startup_script_timeout": 300, + "token": "143c3974-49f5-4898-815b-c4044283ebc8", "troubleshooting_url": null }, "sensitive_values": { + "display_apps": [ + {} + ], + "metadata": [], "token": true } }, @@ -36,13 +57,15 @@ "provider_name": "registry.terraform.io/coder/coder", "schema_version": 0, "values": { - "agent_id": "c8dab94d-651c-4d9b-a19a-1c067a2976ea", + "agent_id": "c950352c-7c4a-41cc-9049-ad07ded85c47", "command": null, "display_name": null, + "external": false, "healthcheck": [], "icon": null, - "id": "de5959cb-248c-44a0-bd04-9d5f28dfb415", + "id": "23135384-0e9f-4efc-b74c-d3e5e878ed67", "name": null, + "order": null, "relative_path": null, "share": "owner", "slug": "app1", @@ -64,9 +87,10 @@ "provider_name": "registry.terraform.io/coder/coder", "schema_version": 0, "values": { - "agent_id": "c8dab94d-651c-4d9b-a19a-1c067a2976ea", + "agent_id": "c950352c-7c4a-41cc-9049-ad07ded85c47", "command": null, "display_name": null, + "external": false, "healthcheck": [ { "interval": 5, @@ -75,8 +99,9 @@ } ], "icon": null, - "id": "60aaa860-01d1-4d42-804b-2dc689676307", + "id": "01e73639-0fd1-4bcb-bd88-d22eb8244627", "name": null, + "order": null, "relative_path": null, "share": "owner", "slug": "app2", @@ -100,13 +125,15 @@ "provider_name": "registry.terraform.io/coder/coder", "schema_version": 0, "values": { - "agent_id": "c8dab94d-651c-4d9b-a19a-1c067a2976ea", + "agent_id": "c950352c-7c4a-41cc-9049-ad07ded85c47", "command": null, "display_name": null, + "external": false, "healthcheck": [], "icon": null, - "id": "3455e899-9bf9-4c0e-ac5b-6f861d5541a0", + "id": "058c9054-9714-4a5f-9fde-8a451ab58620", "name": null, + "order": null, "relative_path": null, "share": "owner", "slug": "app3", @@ -128,7 +155,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "7562947701260361048", + "id": "9051436019409847411", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/resource-metadata-duplicate/resource-metadata-duplicate.tf b/provisioner/terraform/testdata/resource-metadata-duplicate/resource-metadata-duplicate.tf index 21e6f4206499c..b316db7c3cdf1 100644 --- a/provisioner/terraform/testdata/resource-metadata-duplicate/resource-metadata-duplicate.tf +++ b/provisioner/terraform/testdata/resource-metadata-duplicate/resource-metadata-duplicate.tf @@ -2,7 +2,7 @@ terraform { required_providers { coder = { source = "coder/coder" - version = "0.9.0" + version = "0.22.0" } } } diff --git a/provisioner/terraform/testdata/resource-metadata-duplicate/resource-metadata-duplicate.tfplan.dot b/provisioner/terraform/testdata/resource-metadata-duplicate/resource-metadata-duplicate.tfplan.dot index 34f1ea8f3cb29..cbeae141ae3d0 100644 --- a/provisioner/terraform/testdata/resource-metadata-duplicate/resource-metadata-duplicate.tfplan.dot +++ b/provisioner/terraform/testdata/resource-metadata-duplicate/resource-metadata-duplicate.tfplan.dot @@ -20,4 +20,3 @@ digraph { "[root] root" -> "[root] provider[\"registry.terraform.io/hashicorp/null\"] (close)" } } - diff --git a/provisioner/terraform/testdata/resource-metadata-duplicate/resource-metadata-duplicate.tfplan.json b/provisioner/terraform/testdata/resource-metadata-duplicate/resource-metadata-duplicate.tfplan.json index 54a7edb51063b..6084ae4435990 100644 --- a/provisioner/terraform/testdata/resource-metadata-duplicate/resource-metadata-duplicate.tfplan.json +++ b/provisioner/terraform/testdata/resource-metadata-duplicate/resource-metadata-duplicate.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.5.5", + "terraform_version": "1.7.5", "planned_values": { "root_module": { "resources": [ @@ -23,11 +23,13 @@ "display_name": "Process Count", "interval": 5, "key": "process_count", + "order": null, "script": "ps -ef | wc -l", "timeout": 1 } ], "motd_file": null, + "order": null, "os": "linux", "shutdown_script": null, "shutdown_script_timeout": 300, @@ -37,6 +39,7 @@ "troubleshooting_url": null }, "sensitive_values": { + "display_apps": [], "metadata": [ {} ] @@ -137,11 +140,13 @@ "display_name": "Process Count", "interval": 5, "key": "process_count", + "order": null, "script": "ps -ef | wc -l", "timeout": 1 } ], "motd_file": null, + "order": null, "os": "linux", "shutdown_script": null, "shutdown_script_timeout": 300, @@ -151,6 +156,7 @@ "troubleshooting_url": null }, "after_unknown": { + "display_apps": true, "id": true, "init_script": true, "metadata": [ @@ -160,6 +166,7 @@ }, "before_sensitive": false, "after_sensitive": { + "display_apps": [], "metadata": [ {} ], @@ -283,7 +290,7 @@ "coder": { "name": "coder", "full_name": "registry.terraform.io/coder/coder", - "version_constraint": "0.9.0" + "version_constraint": "0.22.0" }, "null": { "name": "null", @@ -424,5 +431,6 @@ ] } ], - "timestamp": "2023-08-30T19:25:27Z" + "timestamp": "2024-05-22T17:03:06Z", + "errored": false } diff --git a/provisioner/terraform/testdata/resource-metadata-duplicate/resource-metadata-duplicate.tfstate.dot b/provisioner/terraform/testdata/resource-metadata-duplicate/resource-metadata-duplicate.tfstate.dot index 34f1ea8f3cb29..cbeae141ae3d0 100644 --- a/provisioner/terraform/testdata/resource-metadata-duplicate/resource-metadata-duplicate.tfstate.dot +++ b/provisioner/terraform/testdata/resource-metadata-duplicate/resource-metadata-duplicate.tfstate.dot @@ -20,4 +20,3 @@ digraph { "[root] root" -> "[root] provider[\"registry.terraform.io/hashicorp/null\"] (close)" } } - diff --git a/provisioner/terraform/testdata/resource-metadata-duplicate/resource-metadata-duplicate.tfstate.json b/provisioner/terraform/testdata/resource-metadata-duplicate/resource-metadata-duplicate.tfstate.json index f09fea579e70f..e617f565156ab 100644 --- a/provisioner/terraform/testdata/resource-metadata-duplicate/resource-metadata-duplicate.tfstate.json +++ b/provisioner/terraform/testdata/resource-metadata-duplicate/resource-metadata-duplicate.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.5.5", + "terraform_version": "1.7.5", "values": { "root_module": { "resources": [ @@ -16,8 +16,17 @@ "auth": "token", "connection_timeout": 120, "dir": null, + "display_apps": [ + { + "port_forwarding_helper": true, + "ssh_helper": true, + "vscode": true, + "vscode_insiders": false, + "web_terminal": true + } + ], "env": null, - "id": "4d2791c5-e623-4c79-9c3a-81d70fde0f1d", + "id": "8352a117-1250-44ef-bba2-0abdb2a77665", "init_script": "", "login_before_ready": true, "metadata": [ @@ -25,21 +34,26 @@ "display_name": "Process Count", "interval": 5, "key": "process_count", + "order": 0, "script": "ps -ef | wc -l", "timeout": 1 } ], "motd_file": null, + "order": null, "os": "linux", "shutdown_script": null, "shutdown_script_timeout": 300, "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "b068b430-4ecb-4116-a103-de3aaa1abd3e", + "token": "b46fd197-3be4-42f8-9c47-5a9e71a76ef6", "troubleshooting_url": null }, "sensitive_values": { + "display_apps": [ + {} + ], "metadata": [ {} ], @@ -57,7 +71,7 @@ "daily_cost": 29, "hide": true, "icon": "/icon/server.svg", - "id": "0a46d060-c676-4324-a016-8dcdc7581d36", + "id": "1f7911d4-5b64-4e20-af9b-b6ee2aff602b", "item": [ { "is_null": false, @@ -72,7 +86,7 @@ "value": "" } ], - "resource_id": "6477445272839759515" + "resource_id": "7229373774865666851" }, "sensitive_values": { "item": [ @@ -96,7 +110,7 @@ "daily_cost": 20, "hide": true, "icon": "/icon/server.svg", - "id": "77a107bc-073e-4180-9f7f-0e60fc42b6c2", + "id": "34fe7a46-2a2f-4628-8946-ef80a7ffdb5e", "item": [ { "is_null": false, @@ -105,7 +119,7 @@ "value": "world" } ], - "resource_id": "6477445272839759515" + "resource_id": "7229373774865666851" }, "sensitive_values": { "item": [ @@ -125,7 +139,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "6477445272839759515", + "id": "7229373774865666851", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/resource-metadata/resource-metadata.tf b/provisioner/terraform/testdata/resource-metadata/resource-metadata.tf index c8ae6ca470cbf..cd46057ce8526 100644 --- a/provisioner/terraform/testdata/resource-metadata/resource-metadata.tf +++ b/provisioner/terraform/testdata/resource-metadata/resource-metadata.tf @@ -2,7 +2,7 @@ terraform { required_providers { coder = { source = "coder/coder" - version = "0.14.1" + version = "0.22.0" } } } diff --git a/provisioner/terraform/testdata/resource-metadata/resource-metadata.tfplan.dot b/provisioner/terraform/testdata/resource-metadata/resource-metadata.tfplan.dot index ffe2fdf71254d..f3de2ca20df25 100644 --- a/provisioner/terraform/testdata/resource-metadata/resource-metadata.tfplan.dot +++ b/provisioner/terraform/testdata/resource-metadata/resource-metadata.tfplan.dot @@ -1,9 +1,19 @@ -digraph G { - rankdir = "RL"; - node [shape = rect, fontname = "sans-serif"]; - "coder_agent.main" [label="coder_agent.main"]; - "coder_metadata.about_info" [label="coder_metadata.about_info"]; - "null_resource.about" [label="null_resource.about"]; - "coder_metadata.about_info" -> "null_resource.about"; - "null_resource.about" -> "coder_agent.main"; +digraph { + compound = "true" + newrank = "true" + subgraph "root" { + "[root] coder_agent.main (expand)" [label = "coder_agent.main", shape = "box"] + "[root] coder_metadata.about_info (expand)" [label = "coder_metadata.about_info", shape = "box"] + "[root] null_resource.about (expand)" [label = "null_resource.about", shape = "box"] + "[root] provider[\"registry.terraform.io/coder/coder\"]" [label = "provider[\"registry.terraform.io/coder/coder\"]", shape = "diamond"] + "[root] provider[\"registry.terraform.io/hashicorp/null\"]" [label = "provider[\"registry.terraform.io/hashicorp/null\"]", shape = "diamond"] + "[root] coder_agent.main (expand)" -> "[root] provider[\"registry.terraform.io/coder/coder\"]" + "[root] coder_metadata.about_info (expand)" -> "[root] null_resource.about (expand)" + "[root] null_resource.about (expand)" -> "[root] coder_agent.main (expand)" + "[root] null_resource.about (expand)" -> "[root] provider[\"registry.terraform.io/hashicorp/null\"]" + "[root] provider[\"registry.terraform.io/coder/coder\"] (close)" -> "[root] coder_metadata.about_info (expand)" + "[root] provider[\"registry.terraform.io/hashicorp/null\"] (close)" -> "[root] null_resource.about (expand)" + "[root] root" -> "[root] provider[\"registry.terraform.io/coder/coder\"] (close)" + "[root] root" -> "[root] provider[\"registry.terraform.io/hashicorp/null\"] (close)" + } } diff --git a/provisioner/terraform/testdata/resource-metadata/resource-metadata.tfplan.json b/provisioner/terraform/testdata/resource-metadata/resource-metadata.tfplan.json index 0d257f8115cd0..a03346a724115 100644 --- a/provisioner/terraform/testdata/resource-metadata/resource-metadata.tfplan.json +++ b/provisioner/terraform/testdata/resource-metadata/resource-metadata.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.7.1-dev", + "terraform_version": "1.7.5", "planned_values": { "root_module": { "resources": [ @@ -29,6 +29,7 @@ } ], "motd_file": null, + "order": null, "os": "linux", "shutdown_script": null, "shutdown_script_timeout": 300, @@ -132,6 +133,7 @@ } ], "motd_file": null, + "order": null, "os": "linux", "shutdown_script": null, "shutdown_script_timeout": 300, @@ -253,7 +255,7 @@ "coder": { "name": "coder", "full_name": "registry.terraform.io/coder/coder", - "version_constraint": "0.14.1" + "version_constraint": "0.22.0" }, "null": { "name": "null", @@ -381,6 +383,6 @@ ] } ], - "timestamp": "2024-02-08T11:45:52Z", + "timestamp": "2024-05-22T17:03:05Z", "errored": false } diff --git a/provisioner/terraform/testdata/resource-metadata/resource-metadata.tfstate.dot b/provisioner/terraform/testdata/resource-metadata/resource-metadata.tfstate.dot index ffe2fdf71254d..f3de2ca20df25 100644 --- a/provisioner/terraform/testdata/resource-metadata/resource-metadata.tfstate.dot +++ b/provisioner/terraform/testdata/resource-metadata/resource-metadata.tfstate.dot @@ -1,9 +1,19 @@ -digraph G { - rankdir = "RL"; - node [shape = rect, fontname = "sans-serif"]; - "coder_agent.main" [label="coder_agent.main"]; - "coder_metadata.about_info" [label="coder_metadata.about_info"]; - "null_resource.about" [label="null_resource.about"]; - "coder_metadata.about_info" -> "null_resource.about"; - "null_resource.about" -> "coder_agent.main"; +digraph { + compound = "true" + newrank = "true" + subgraph "root" { + "[root] coder_agent.main (expand)" [label = "coder_agent.main", shape = "box"] + "[root] coder_metadata.about_info (expand)" [label = "coder_metadata.about_info", shape = "box"] + "[root] null_resource.about (expand)" [label = "null_resource.about", shape = "box"] + "[root] provider[\"registry.terraform.io/coder/coder\"]" [label = "provider[\"registry.terraform.io/coder/coder\"]", shape = "diamond"] + "[root] provider[\"registry.terraform.io/hashicorp/null\"]" [label = "provider[\"registry.terraform.io/hashicorp/null\"]", shape = "diamond"] + "[root] coder_agent.main (expand)" -> "[root] provider[\"registry.terraform.io/coder/coder\"]" + "[root] coder_metadata.about_info (expand)" -> "[root] null_resource.about (expand)" + "[root] null_resource.about (expand)" -> "[root] coder_agent.main (expand)" + "[root] null_resource.about (expand)" -> "[root] provider[\"registry.terraform.io/hashicorp/null\"]" + "[root] provider[\"registry.terraform.io/coder/coder\"] (close)" -> "[root] coder_metadata.about_info (expand)" + "[root] provider[\"registry.terraform.io/hashicorp/null\"] (close)" -> "[root] null_resource.about (expand)" + "[root] root" -> "[root] provider[\"registry.terraform.io/coder/coder\"] (close)" + "[root] root" -> "[root] provider[\"registry.terraform.io/hashicorp/null\"] (close)" + } } diff --git a/provisioner/terraform/testdata/resource-metadata/resource-metadata.tfstate.json b/provisioner/terraform/testdata/resource-metadata/resource-metadata.tfstate.json index a33f027b67731..f8abe064ec94b 100644 --- a/provisioner/terraform/testdata/resource-metadata/resource-metadata.tfstate.json +++ b/provisioner/terraform/testdata/resource-metadata/resource-metadata.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.7.1", + "terraform_version": "1.7.5", "values": { "root_module": { "resources": [ @@ -26,7 +26,7 @@ } ], "env": null, - "id": "90e2f3da-90d8-4cfd-8cfd-6e9c9da29a37", + "id": "847150eb-c3b6-497d-9dad-8e62d478cfff", "init_script": "", "login_before_ready": true, "metadata": [ @@ -40,13 +40,14 @@ } ], "motd_file": null, + "order": null, "os": "linux", "shutdown_script": null, "shutdown_script_timeout": 300, "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "f6f9a6c5-d352-4029-b476-67edfe802806", + "token": "a0c4f2f5-cc40-4731-9028-636033229c9c", "troubleshooting_url": null }, "sensitive_values": { @@ -70,7 +71,7 @@ "daily_cost": 29, "hide": true, "icon": "/icon/server.svg", - "id": "a485c7b2-2b6c-42ab-b6d5-f51b681a5a41", + "id": "3feec3a3-6f9e-4cfb-b122-2273e345def0", "item": [ { "is_null": false, @@ -97,7 +98,7 @@ "value": "squirrel" } ], - "resource_id": "5837178340504502573" + "resource_id": "160324296641913729" }, "sensitive_values": { "item": [ @@ -120,7 +121,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "5837178340504502573", + "id": "160324296641913729", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/rich-parameters-order/rich-parameters-order.tf b/provisioner/terraform/testdata/rich-parameters-order/rich-parameters-order.tf index 0f6fcdfa423e6..82e7a6f95694e 100644 --- a/provisioner/terraform/testdata/rich-parameters-order/rich-parameters-order.tf +++ b/provisioner/terraform/testdata/rich-parameters-order/rich-parameters-order.tf @@ -2,7 +2,7 @@ terraform { required_providers { coder = { source = "coder/coder" - version = "0.9.0" + version = "0.22.0" } } } diff --git a/provisioner/terraform/testdata/rich-parameters-order/rich-parameters-order.tfplan.dot b/provisioner/terraform/testdata/rich-parameters-order/rich-parameters-order.tfplan.dot index ba97f97407426..ef32a2ea2bc0a 100644 --- a/provisioner/terraform/testdata/rich-parameters-order/rich-parameters-order.tfplan.dot +++ b/provisioner/terraform/testdata/rich-parameters-order/rich-parameters-order.tfplan.dot @@ -21,4 +21,3 @@ digraph { "[root] root" -> "[root] provider[\"registry.terraform.io/hashicorp/null\"] (close)" } } - diff --git a/provisioner/terraform/testdata/rich-parameters-order/rich-parameters-order.tfplan.json b/provisioner/terraform/testdata/rich-parameters-order/rich-parameters-order.tfplan.json index 169a8883f2596..12a6aaccdd7b7 100644 --- a/provisioner/terraform/testdata/rich-parameters-order/rich-parameters-order.tfplan.json +++ b/provisioner/terraform/testdata/rich-parameters-order/rich-parameters-order.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.5.5", + "terraform_version": "1.7.5", "planned_values": { "root_module": { "resources": [ @@ -20,6 +20,7 @@ "login_before_ready": true, "metadata": [], "motd_file": null, + "order": null, "os": "windows", "shutdown_script": null, "shutdown_script_timeout": 300, @@ -29,6 +30,7 @@ "troubleshooting_url": null }, "sensitive_values": { + "display_apps": [], "metadata": [] } }, @@ -68,6 +70,7 @@ "login_before_ready": true, "metadata": [], "motd_file": null, + "order": null, "os": "windows", "shutdown_script": null, "shutdown_script_timeout": 300, @@ -77,6 +80,7 @@ "troubleshooting_url": null }, "after_unknown": { + "display_apps": true, "id": true, "init_script": true, "metadata": [], @@ -84,6 +88,7 @@ }, "before_sensitive": false, "after_sensitive": { + "display_apps": [], "metadata": [], "token": true } @@ -113,7 +118,7 @@ ], "prior_state": { "format_version": "1.0", - "terraform_version": "1.5.5", + "terraform_version": "1.7.5", "values": { "root_module": { "resources": [ @@ -128,10 +133,9 @@ "default": null, "description": null, "display_name": null, + "ephemeral": false, "icon": null, - "id": "245304bd-d7c0-4dc0-b4b2-90a036245af0", - "legacy_variable": null, - "legacy_variable_name": null, + "id": "7fb346d2-b8c2-4f2a-99d1-a8fd54cc479e", "mutable": false, "name": "Example", "option": null, @@ -156,10 +160,9 @@ "default": "ok", "description": "blah blah", "display_name": null, + "ephemeral": false, "icon": null, - "id": "bccaddc6-97f1-48aa-a1c0-3438cc96139d", - "legacy_variable": null, - "legacy_variable_name": null, + "id": "0581cc2a-9e6d-4f04-93a6-88fcbd0757f0", "mutable": false, "name": "Sample", "option": null, @@ -182,7 +185,7 @@ "coder": { "name": "coder", "full_name": "registry.terraform.io/coder/coder", - "version_constraint": "0.9.0" + "version_constraint": "0.22.0" }, "null": { "name": "null", @@ -265,5 +268,6 @@ ] } }, - "timestamp": "2023-08-30T19:25:32Z" + "timestamp": "2024-05-22T17:03:11Z", + "errored": false } diff --git a/provisioner/terraform/testdata/rich-parameters-order/rich-parameters-order.tfstate.dot b/provisioner/terraform/testdata/rich-parameters-order/rich-parameters-order.tfstate.dot index ba97f97407426..ef32a2ea2bc0a 100644 --- a/provisioner/terraform/testdata/rich-parameters-order/rich-parameters-order.tfstate.dot +++ b/provisioner/terraform/testdata/rich-parameters-order/rich-parameters-order.tfstate.dot @@ -21,4 +21,3 @@ digraph { "[root] root" -> "[root] provider[\"registry.terraform.io/hashicorp/null\"] (close)" } } - diff --git a/provisioner/terraform/testdata/rich-parameters-order/rich-parameters-order.tfstate.json b/provisioner/terraform/testdata/rich-parameters-order/rich-parameters-order.tfstate.json index c46df3e313f47..ce08e87bce074 100644 --- a/provisioner/terraform/testdata/rich-parameters-order/rich-parameters-order.tfstate.json +++ b/provisioner/terraform/testdata/rich-parameters-order/rich-parameters-order.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.5.5", + "terraform_version": "1.7.5", "values": { "root_module": { "resources": [ @@ -15,10 +15,9 @@ "default": null, "description": null, "display_name": null, + "ephemeral": false, "icon": null, - "id": "20e486cd-35aa-4916-8cbf-c8b6fd235cd1", - "legacy_variable": null, - "legacy_variable_name": null, + "id": "5c9f037b-3cc1-4616-b4ba-9e7322856575", "mutable": false, "name": "Example", "option": null, @@ -43,10 +42,9 @@ "default": "ok", "description": "blah blah", "display_name": null, + "ephemeral": false, "icon": null, - "id": "6c077b3f-ba6c-482b-9232-12a3d4892700", - "legacy_variable": null, - "legacy_variable_name": null, + "id": "71a4bcc8-bbcb-4619-9641-df3bc296f58e", "mutable": false, "name": "Sample", "option": null, @@ -72,22 +70,35 @@ "auth": "token", "connection_timeout": 120, "dir": null, + "display_apps": [ + { + "port_forwarding_helper": true, + "ssh_helper": true, + "vscode": true, + "vscode_insiders": false, + "web_terminal": true + } + ], "env": null, - "id": "1414c0f9-be31-4efa-b1c9-57ab7c951b97", + "id": "327e8ab1-90be-4c87-ac7d-09630ae46827", "init_script": "", "login_before_ready": true, "metadata": [], "motd_file": null, + "order": null, "os": "windows", "shutdown_script": null, "shutdown_script_timeout": 300, "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "712872cf-fde6-4683-91a3-9ad9fc759e14", + "token": "794a8a86-3bb9-4b3d-bbea-acff8b513964", "troubleshooting_url": null }, "sensitive_values": { + "display_apps": [ + {} + ], "metadata": [], "token": true } @@ -100,7 +111,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "9132401905565595068", + "id": "3735840255017039964", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/rich-parameters-validation/rich-parameters-validation.tf b/provisioner/terraform/testdata/rich-parameters-validation/rich-parameters-validation.tf index d0c04b904d7e6..c05e8d5d4ae32 100644 --- a/provisioner/terraform/testdata/rich-parameters-validation/rich-parameters-validation.tf +++ b/provisioner/terraform/testdata/rich-parameters-validation/rich-parameters-validation.tf @@ -2,7 +2,7 @@ terraform { required_providers { coder = { source = "coder/coder" - version = "0.11.0" + version = "0.22.0" } } } diff --git a/provisioner/terraform/testdata/rich-parameters-validation/rich-parameters-validation.tfplan.dot b/provisioner/terraform/testdata/rich-parameters-validation/rich-parameters-validation.tfplan.dot index 5ed08dde2ae7e..04e1353360488 100644 --- a/provisioner/terraform/testdata/rich-parameters-validation/rich-parameters-validation.tfplan.dot +++ b/provisioner/terraform/testdata/rich-parameters-validation/rich-parameters-validation.tfplan.dot @@ -33,4 +33,3 @@ digraph { "[root] root" -> "[root] provider[\"registry.terraform.io/hashicorp/null\"] (close)" } } - diff --git a/provisioner/terraform/testdata/rich-parameters-validation/rich-parameters-validation.tfplan.json b/provisioner/terraform/testdata/rich-parameters-validation/rich-parameters-validation.tfplan.json index 7da089a43ea98..d4f402ce40102 100644 --- a/provisioner/terraform/testdata/rich-parameters-validation/rich-parameters-validation.tfplan.json +++ b/provisioner/terraform/testdata/rich-parameters-validation/rich-parameters-validation.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.5.5", + "terraform_version": "1.7.5", "planned_values": { "root_module": { "resources": [ @@ -20,6 +20,7 @@ "login_before_ready": true, "metadata": [], "motd_file": null, + "order": null, "os": "windows", "shutdown_script": null, "shutdown_script_timeout": 300, @@ -29,6 +30,7 @@ "troubleshooting_url": null }, "sensitive_values": { + "display_apps": [], "metadata": [] } }, @@ -68,6 +70,7 @@ "login_before_ready": true, "metadata": [], "motd_file": null, + "order": null, "os": "windows", "shutdown_script": null, "shutdown_script_timeout": 300, @@ -77,6 +80,7 @@ "troubleshooting_url": null }, "after_unknown": { + "display_apps": true, "id": true, "init_script": true, "metadata": [], @@ -84,6 +88,7 @@ }, "before_sensitive": false, "after_sensitive": { + "display_apps": [], "metadata": [], "token": true } @@ -113,7 +118,7 @@ ], "prior_state": { "format_version": "1.0", - "terraform_version": "1.5.5", + "terraform_version": "1.7.5", "values": { "root_module": { "resources": [ @@ -130,7 +135,7 @@ "display_name": null, "ephemeral": true, "icon": null, - "id": "858cb978-eef0-47e6-b7b8-7f9093303ad9", + "id": "1e85f9f5-54c2-4a6b-ba7f-8627386b94b7", "mutable": true, "name": "number_example", "option": null, @@ -157,7 +162,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "0add04ee-5c08-4702-b32e-727fc8c3fcd7", + "id": "9908f4c5-87f5-496c-9479-d0f7d49f0fdf", "mutable": false, "name": "number_example_max", "option": null, @@ -196,7 +201,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "90bc3085-b65d-496a-b52c-2a6bfda1c439", + "id": "3f2d0054-0440-4a00-98f6-befa9475a5f4", "mutable": false, "name": "number_example_max_zero", "option": null, @@ -235,7 +240,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "2499264c-7fa4-41da-9c78-6b5c86ddfd9c", + "id": "29abca17-5bd3-4ae3-9bd3-1e45301fc509", "mutable": false, "name": "number_example_min", "option": null, @@ -274,7 +279,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "dd6c2f30-6320-4e4a-ba82-deef628330f1", + "id": "95630cc0-8040-4126-92bb-967dbf8eb2ed", "mutable": false, "name": "number_example_min_max", "option": null, @@ -313,7 +318,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "8e04ddc9-c245-408d-92b0-dec669259b4a", + "id": "c256c60a-fdfe-42f1-bbaa-27880816a7bf", "mutable": false, "name": "number_example_min_zero", "option": null, @@ -348,7 +353,7 @@ "coder": { "name": "coder", "full_name": "registry.terraform.io/coder/coder", - "version_constraint": "0.11.0" + "version_constraint": "0.22.0" }, "null": { "name": "null", @@ -545,5 +550,6 @@ ] } }, - "timestamp": "2023-08-30T19:25:35Z" + "timestamp": "2024-05-22T17:03:12Z", + "errored": false } diff --git a/provisioner/terraform/testdata/rich-parameters-validation/rich-parameters-validation.tfstate.dot b/provisioner/terraform/testdata/rich-parameters-validation/rich-parameters-validation.tfstate.dot index 5ed08dde2ae7e..04e1353360488 100644 --- a/provisioner/terraform/testdata/rich-parameters-validation/rich-parameters-validation.tfstate.dot +++ b/provisioner/terraform/testdata/rich-parameters-validation/rich-parameters-validation.tfstate.dot @@ -33,4 +33,3 @@ digraph { "[root] root" -> "[root] provider[\"registry.terraform.io/hashicorp/null\"] (close)" } } - diff --git a/provisioner/terraform/testdata/rich-parameters-validation/rich-parameters-validation.tfstate.json b/provisioner/terraform/testdata/rich-parameters-validation/rich-parameters-validation.tfstate.json index d04c4ef4027ab..a09880e54e903 100644 --- a/provisioner/terraform/testdata/rich-parameters-validation/rich-parameters-validation.tfstate.json +++ b/provisioner/terraform/testdata/rich-parameters-validation/rich-parameters-validation.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.5.5", + "terraform_version": "1.7.5", "values": { "root_module": { "resources": [ @@ -17,7 +17,7 @@ "display_name": null, "ephemeral": true, "icon": null, - "id": "3eac44eb-b74f-471e-ae3a-783083f33b58", + "id": "f7cabe8c-f091-4ced-bc9b-873f54edf61b", "mutable": true, "name": "number_example", "option": null, @@ -44,7 +44,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "b767a52b-0b1d-4bea-a1b1-23180308a25d", + "id": "13b33312-d49b-4df3-af89-5d6ec840a6e4", "mutable": false, "name": "number_example_max", "option": null, @@ -83,7 +83,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "f6857c45-04cf-47ae-85bc-caab3341ead5", + "id": "d5ff002b-d039-42e6-b638-6bc2e3d54c2b", "mutable": false, "name": "number_example_max_zero", "option": null, @@ -122,7 +122,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "634a2e89-47c0-4d4b-aed6-b20177c959d5", + "id": "f382fcba-2634-44e7-ab26-866228d0679a", "mutable": false, "name": "number_example_min", "option": null, @@ -161,7 +161,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "9ae1f0ff-2fe9-460c-97b8-6bb0cb7fb2c7", + "id": "7f1c3032-1ed9-4602-80f8-cc84489bafc9", "mutable": false, "name": "number_example_min_max", "option": null, @@ -200,7 +200,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "e6951857-18a9-44b8-bc0d-d78375fdf92d", + "id": "c474219f-f1e7-4eca-921a-1ace9a8391ee", "mutable": false, "name": "number_example_min_zero", "option": null, @@ -238,22 +238,35 @@ "auth": "token", "connection_timeout": 120, "dir": null, + "display_apps": [ + { + "port_forwarding_helper": true, + "ssh_helper": true, + "vscode": true, + "vscode_insiders": false, + "web_terminal": true + } + ], "env": null, - "id": "870767c4-6479-414c-aa08-a3f659ea3ec2", + "id": "138f6db3-bd8d-4a9a-8e61-abc1fdf3c3af", "init_script": "", "login_before_ready": true, "metadata": [], "motd_file": null, + "order": null, "os": "windows", "shutdown_script": null, "shutdown_script_timeout": 300, "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "59f08143-3fcb-48d6-a80d-3a87863cd865", + "token": "1ef5dec0-3339-4e24-b781-0166cc6a9820", "troubleshooting_url": null }, "sensitive_values": { + "display_apps": [ + {} + ], "metadata": [], "token": true } @@ -266,7 +279,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "643597385910559727", + "id": "5975950266738511043", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/rich-parameters/external-module/child-external-module/main.tf b/provisioner/terraform/testdata/rich-parameters/external-module/child-external-module/main.tf index a9a604f71d5d6..ac6f4c621a9d0 100644 --- a/provisioner/terraform/testdata/rich-parameters/external-module/child-external-module/main.tf +++ b/provisioner/terraform/testdata/rich-parameters/external-module/child-external-module/main.tf @@ -2,7 +2,7 @@ terraform { required_providers { coder = { source = "coder/coder" - version = "0.7.0" + version = "0.22.0" } docker = { source = "kreuzwerker/docker" diff --git a/provisioner/terraform/testdata/rich-parameters/external-module/main.tf b/provisioner/terraform/testdata/rich-parameters/external-module/main.tf index 946e1343451a0..55e942ec24e1f 100644 --- a/provisioner/terraform/testdata/rich-parameters/external-module/main.tf +++ b/provisioner/terraform/testdata/rich-parameters/external-module/main.tf @@ -2,7 +2,7 @@ terraform { required_providers { coder = { source = "coder/coder" - version = "0.7.0" + version = "0.22.0" } docker = { source = "kreuzwerker/docker" diff --git a/provisioner/terraform/testdata/rich-parameters/rich-parameters.tf b/provisioner/terraform/testdata/rich-parameters/rich-parameters.tf index 15e8a03d759ec..fc85769c8e9cc 100644 --- a/provisioner/terraform/testdata/rich-parameters/rich-parameters.tf +++ b/provisioner/terraform/testdata/rich-parameters/rich-parameters.tf @@ -2,7 +2,7 @@ terraform { required_providers { coder = { source = "coder/coder" - version = "0.7.0" + version = "0.22.0" } } } diff --git a/provisioner/terraform/testdata/rich-parameters/rich-parameters.tfplan.dot b/provisioner/terraform/testdata/rich-parameters/rich-parameters.tfplan.dot index 2ecfcae1a2b5d..2deee6a1d36a2 100644 --- a/provisioner/terraform/testdata/rich-parameters/rich-parameters.tfplan.dot +++ b/provisioner/terraform/testdata/rich-parameters/rich-parameters.tfplan.dot @@ -56,4 +56,3 @@ digraph { "[root] root" -> "[root] provider[\"registry.terraform.io/hashicorp/null\"] (close)" } } - diff --git a/provisioner/terraform/testdata/rich-parameters/rich-parameters.tfplan.json b/provisioner/terraform/testdata/rich-parameters/rich-parameters.tfplan.json index e3d5497b4d3e1..a881255a41e12 100644 --- a/provisioner/terraform/testdata/rich-parameters/rich-parameters.tfplan.json +++ b/provisioner/terraform/testdata/rich-parameters/rich-parameters.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.5.5", + "terraform_version": "1.7.5", "planned_values": { "root_module": { "resources": [ @@ -20,14 +20,17 @@ "login_before_ready": true, "metadata": [], "motd_file": null, + "order": null, "os": "windows", "shutdown_script": null, "shutdown_script_timeout": 300, "startup_script": null, + "startup_script_behavior": null, "startup_script_timeout": 300, "troubleshooting_url": null }, "sensitive_values": { + "display_apps": [], "metadata": [] } }, @@ -67,14 +70,17 @@ "login_before_ready": true, "metadata": [], "motd_file": null, + "order": null, "os": "windows", "shutdown_script": null, "shutdown_script_timeout": 300, "startup_script": null, + "startup_script_behavior": null, "startup_script_timeout": 300, "troubleshooting_url": null }, "after_unknown": { + "display_apps": true, "id": true, "init_script": true, "metadata": [], @@ -82,6 +88,7 @@ }, "before_sensitive": false, "after_sensitive": { + "display_apps": [], "metadata": [], "token": true } @@ -111,7 +118,7 @@ ], "prior_state": { "format_version": "1.0", - "terraform_version": "1.5.5", + "terraform_version": "1.7.5", "values": { "root_module": { "resources": [ @@ -126,10 +133,9 @@ "default": null, "description": null, "display_name": null, + "ephemeral": false, "icon": null, - "id": "67c923e2-cb0c-4955-b7bb-cdb8b7fab8be", - "legacy_variable": null, - "legacy_variable_name": null, + "id": "2be3cd75-c44b-482e-8f78-679067d8e0a4", "mutable": false, "name": "Example", "option": [ @@ -147,15 +153,17 @@ } ], "optional": false, + "order": null, "type": "string", - "validation": null, + "validation": [], "value": "" }, "sensitive_values": { "option": [ {}, {} - ] + ], + "validation": [] } }, { @@ -169,19 +177,21 @@ "default": "4", "description": null, "display_name": null, + "ephemeral": false, "icon": null, - "id": "69ab9bf0-dadf-47ed-8486-b38b8d521c67", - "legacy_variable": null, - "legacy_variable_name": null, + "id": "5a2f0407-8f11-4ac8-980d-75f919959f08", "mutable": false, "name": "number_example", "option": null, "optional": true, + "order": null, "type": "number", - "validation": null, + "validation": [], "value": "4" }, - "sensitive_values": {} + "sensitive_values": { + "validation": [] + } }, { "address": "data.coder_parameter.number_example_max_zero", @@ -194,20 +204,22 @@ "default": "-2", "description": null, "display_name": null, + "ephemeral": false, "icon": null, - "id": "4dcc41e1-8d07-4018-98df-de5fadce5aa3", - "legacy_variable": null, - "legacy_variable_name": null, + "id": "cf4b28cf-ec3c-4f53-ae27-4733a9f7d71a", "mutable": false, "name": "number_example_max_zero", "option": null, "optional": true, + "order": null, "type": "number", "validation": [ { "error": "", "max": 0, + "max_disabled": false, "min": -3, + "min_disabled": false, "monotonic": "", "regex": "" } @@ -231,20 +243,22 @@ "default": "4", "description": null, "display_name": null, + "ephemeral": false, "icon": null, - "id": "c64e111c-496f-458d-924c-5ee13460f2ee", - "legacy_variable": null, - "legacy_variable_name": null, + "id": "70d63380-2020-4377-ae05-cecb12c0d709", "mutable": false, "name": "number_example_min_max", "option": null, "optional": true, + "order": null, "type": "number", "validation": [ { "error": "", "max": 6, + "max_disabled": false, "min": 3, + "min_disabled": false, "monotonic": "", "regex": "" } @@ -268,20 +282,22 @@ "default": "4", "description": null, "display_name": null, + "ephemeral": false, "icon": null, - "id": "ecc13f6b-a8bd-423a-8585-ac08882cd25c", - "legacy_variable": null, - "legacy_variable_name": null, + "id": "ec5827c2-2511-4f16-bd85-6249517c9e5b", "mutable": false, "name": "number_example_min_zero", "option": null, "optional": true, + "order": null, "type": "number", "validation": [ { "error": "", "max": 6, + "max_disabled": false, "min": 0, + "min_disabled": false, "monotonic": "", "regex": "" } @@ -305,19 +321,21 @@ "default": "ok", "description": "blah blah", "display_name": null, + "ephemeral": false, "icon": null, - "id": "ea6bbb0a-fdf5-46b4-8c68-22b59283fa6d", - "legacy_variable": null, - "legacy_variable_name": null, + "id": "eec8845e-4316-450a-a5b7-eaa9567f469a", "mutable": false, "name": "Sample", "option": null, "optional": true, + "order": null, "type": "string", - "validation": null, + "validation": [], "value": "ok" }, - "sensitive_values": {} + "sensitive_values": { + "validation": [] + } } ], "child_modules": [ @@ -334,19 +352,21 @@ "default": "abcdef", "description": "First parameter from module", "display_name": null, + "ephemeral": false, "icon": null, - "id": "69e9bbe9-114a-43df-a050-a030efb3b89a", - "legacy_variable": null, - "legacy_variable_name": null, + "id": "3b860d24-85ac-4540-b309-9321e732dfc4", "mutable": true, "name": "First parameter from module", "option": null, "optional": true, + "order": null, "type": "string", - "validation": null, + "validation": [], "value": "abcdef" }, - "sensitive_values": {} + "sensitive_values": { + "validation": [] + } }, { "address": "module.this_is_external_module.data.coder_parameter.second_parameter_from_module", @@ -359,19 +379,21 @@ "default": "ghijkl", "description": "Second parameter from module", "display_name": null, + "ephemeral": false, "icon": null, - "id": "607e122d-a7fd-4200-834f-c24e0e9a12c5", - "legacy_variable": null, - "legacy_variable_name": null, + "id": "b36105e3-9bf1-43c7-a857-078ef1e8f95d", "mutable": true, "name": "Second parameter from module", "option": null, "optional": true, + "order": null, "type": "string", - "validation": null, + "validation": [], "value": "ghijkl" }, - "sensitive_values": {} + "sensitive_values": { + "validation": [] + } } ], "address": "module.this_is_external_module", @@ -389,19 +411,21 @@ "default": "abcdef", "description": "First parameter from child module", "display_name": null, + "ephemeral": false, "icon": null, - "id": "8cc5d1b7-391f-43ff-91e6-0293724a915b", - "legacy_variable": null, - "legacy_variable_name": null, + "id": "a2bee9f2-8a3c-404c-839b-01b6cd840707", "mutable": true, "name": "First parameter from child module", "option": null, "optional": true, + "order": null, "type": "string", - "validation": null, + "validation": [], "value": "abcdef" }, - "sensitive_values": {} + "sensitive_values": { + "validation": [] + } }, { "address": "module.this_is_external_module.module.this_is_external_child_module.data.coder_parameter.child_second_parameter_from_module", @@ -414,19 +438,21 @@ "default": "ghijkl", "description": "Second parameter from child module", "display_name": null, + "ephemeral": false, "icon": null, - "id": "dfb6a1c4-82fd-47d3-b58c-65beddcd8b0d", - "legacy_variable": null, - "legacy_variable_name": null, + "id": "deb13c45-ed6d-45b6-b6eb-d319143fa8f2", "mutable": true, "name": "Second parameter from child module", "option": null, "optional": true, + "order": null, "type": "string", - "validation": null, + "validation": [], "value": "ghijkl" }, - "sensitive_values": {} + "sensitive_values": { + "validation": [] + } } ], "address": "module.this_is_external_module.module.this_is_external_child_module" @@ -442,7 +468,7 @@ "coder": { "name": "coder", "full_name": "registry.terraform.io/coder/coder", - "version_constraint": "0.7.0" + "version_constraint": "0.22.0" }, "module.this_is_external_module:docker": { "name": "docker", @@ -767,5 +793,6 @@ } } }, - "timestamp": "2023-08-30T19:25:30Z" + "timestamp": "2024-05-22T17:03:08Z", + "errored": false } diff --git a/provisioner/terraform/testdata/rich-parameters/rich-parameters.tfstate.dot b/provisioner/terraform/testdata/rich-parameters/rich-parameters.tfstate.dot index 2ecfcae1a2b5d..2deee6a1d36a2 100644 --- a/provisioner/terraform/testdata/rich-parameters/rich-parameters.tfstate.dot +++ b/provisioner/terraform/testdata/rich-parameters/rich-parameters.tfstate.dot @@ -56,4 +56,3 @@ digraph { "[root] root" -> "[root] provider[\"registry.terraform.io/hashicorp/null\"] (close)" } } - diff --git a/provisioner/terraform/testdata/rich-parameters/rich-parameters.tfstate.json b/provisioner/terraform/testdata/rich-parameters/rich-parameters.tfstate.json index b53dcd8568cef..a82bb9ea1925c 100644 --- a/provisioner/terraform/testdata/rich-parameters/rich-parameters.tfstate.json +++ b/provisioner/terraform/testdata/rich-parameters/rich-parameters.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.5.5", + "terraform_version": "1.7.5", "values": { "root_module": { "resources": [ @@ -15,10 +15,9 @@ "default": null, "description": null, "display_name": null, + "ephemeral": false, "icon": null, - "id": "3d3a933a-b52b-4b38-bf91-0937615b1b29", - "legacy_variable": null, - "legacy_variable_name": null, + "id": "7fa1e2f7-36a4-49cd-b92a-b3fc8732d359", "mutable": false, "name": "Example", "option": [ @@ -36,15 +35,17 @@ } ], "optional": false, + "order": null, "type": "string", - "validation": null, + "validation": [], "value": "" }, "sensitive_values": { "option": [ {}, {} - ] + ], + "validation": [] } }, { @@ -58,19 +59,21 @@ "default": "4", "description": null, "display_name": null, + "ephemeral": false, "icon": null, - "id": "f8b06fc2-f0c6-4483-8d10-d4601dfdd787", - "legacy_variable": null, - "legacy_variable_name": null, + "id": "86a60580-7221-4bab-b229-9cb61bdb56a0", "mutable": false, "name": "number_example", "option": null, "optional": true, + "order": null, "type": "number", - "validation": null, + "validation": [], "value": "4" }, - "sensitive_values": {} + "sensitive_values": { + "validation": [] + } }, { "address": "data.coder_parameter.number_example_max_zero", @@ -83,20 +86,22 @@ "default": "-2", "description": null, "display_name": null, + "ephemeral": false, "icon": null, - "id": "886575fc-1863-49be-9a7d-125077df0ca5", - "legacy_variable": null, - "legacy_variable_name": null, + "id": "ed6bc6e5-b4ff-48b9-88b0-df5faa74ae66", "mutable": false, "name": "number_example_max_zero", "option": null, "optional": true, + "order": null, "type": "number", "validation": [ { "error": "", "max": 0, + "max_disabled": false, "min": -3, + "min_disabled": false, "monotonic": "", "regex": "" } @@ -120,20 +125,22 @@ "default": "4", "description": null, "display_name": null, + "ephemeral": false, "icon": null, - "id": "feb32685-cfdc-4aed-b8bd-290d7e41822f", - "legacy_variable": null, - "legacy_variable_name": null, + "id": "340b19e1-f651-4321-96b1-7908c2c66914", "mutable": false, "name": "number_example_min_max", "option": null, "optional": true, + "order": null, "type": "number", "validation": [ { "error": "", "max": 6, + "max_disabled": false, "min": 3, + "min_disabled": false, "monotonic": "", "regex": "" } @@ -157,20 +164,22 @@ "default": "4", "description": null, "display_name": null, + "ephemeral": false, "icon": null, - "id": "a5e72ae7-67f8-442c-837e-cce15f49fff0", - "legacy_variable": null, - "legacy_variable_name": null, + "id": "f19c6763-2e55-40dd-9b49-82e9181e5b1b", "mutable": false, "name": "number_example_min_zero", "option": null, "optional": true, + "order": null, "type": "number", "validation": [ { "error": "", "max": 6, + "max_disabled": false, "min": 0, + "min_disabled": false, "monotonic": "", "regex": "" } @@ -194,19 +203,21 @@ "default": "ok", "description": "blah blah", "display_name": null, + "ephemeral": false, "icon": null, - "id": "1dcf470f-25f5-4c1d-a68e-1833f8239591", - "legacy_variable": null, - "legacy_variable_name": null, + "id": "02169810-8080-4dc6-a656-5fbda745659e", "mutable": false, "name": "Sample", "option": null, "optional": true, + "order": null, "type": "string", - "validation": null, + "validation": [], "value": "ok" }, - "sensitive_values": {} + "sensitive_values": { + "validation": [] + } }, { "address": "coder_agent.dev", @@ -220,21 +231,35 @@ "auth": "token", "connection_timeout": 120, "dir": null, + "display_apps": [ + { + "port_forwarding_helper": true, + "ssh_helper": true, + "vscode": true, + "vscode_insiders": false, + "web_terminal": true + } + ], "env": null, - "id": "126f2d92-9556-4187-be69-5827ba3e7ddd", + "id": "42edc650-ddb6-4ed9-9624-7788d60d1507", "init_script": "", "login_before_ready": true, "metadata": [], "motd_file": null, + "order": null, "os": "windows", "shutdown_script": null, "shutdown_script_timeout": 300, "startup_script": null, + "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "e2021a4f-4db5-4e26-8ecd-c4b6c6e79e92", + "token": "c767a648-e670-4c6b-a28b-8559033e92a7", "troubleshooting_url": null }, "sensitive_values": { + "display_apps": [ + {} + ], "metadata": [], "token": true } @@ -247,7 +272,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "3170372688900630060", + "id": "7506678111935039701", "triggers": null }, "sensitive_values": {}, @@ -270,19 +295,21 @@ "default": "abcdef", "description": "First parameter from module", "display_name": null, + "ephemeral": false, "icon": null, - "id": "24874d90-5faf-4574-b54d-01a12e25159d", - "legacy_variable": null, - "legacy_variable_name": null, + "id": "11b1ae03-cf81-4f60-9be1-bd4c0586516d", "mutable": true, "name": "First parameter from module", "option": null, "optional": true, + "order": null, "type": "string", - "validation": null, + "validation": [], "value": "abcdef" }, - "sensitive_values": {} + "sensitive_values": { + "validation": [] + } }, { "address": "module.this_is_external_module.data.coder_parameter.second_parameter_from_module", @@ -295,19 +322,21 @@ "default": "ghijkl", "description": "Second parameter from module", "display_name": null, + "ephemeral": false, "icon": null, - "id": "015a8629-347a-43f9-ba79-33d895f3b5b7", - "legacy_variable": null, - "legacy_variable_name": null, + "id": "79d87261-bfda-46ee-958d-7d62252101ad", "mutable": true, "name": "Second parameter from module", "option": null, "optional": true, + "order": null, "type": "string", - "validation": null, + "validation": [], "value": "ghijkl" }, - "sensitive_values": {} + "sensitive_values": { + "validation": [] + } } ], "address": "module.this_is_external_module", @@ -325,19 +354,21 @@ "default": "abcdef", "description": "First parameter from child module", "display_name": null, + "ephemeral": false, "icon": null, - "id": "85793115-42a5-4e52-be7b-77dcf337ffb6", - "legacy_variable": null, - "legacy_variable_name": null, + "id": "30c4c518-116a-4591-a571-886101cfcdfa", "mutable": true, "name": "First parameter from child module", "option": null, "optional": true, + "order": null, "type": "string", - "validation": null, + "validation": [], "value": "abcdef" }, - "sensitive_values": {} + "sensitive_values": { + "validation": [] + } }, { "address": "module.this_is_external_module.module.this_is_external_child_module.data.coder_parameter.child_second_parameter_from_module", @@ -350,19 +381,21 @@ "default": "ghijkl", "description": "Second parameter from child module", "display_name": null, + "ephemeral": false, "icon": null, - "id": "7754596b-a8b1-4a64-9ff1-27dd9473924c", - "legacy_variable": null, - "legacy_variable_name": null, + "id": "4c7d9f15-da45-453e-85eb-1d22c9baa54c", "mutable": true, "name": "Second parameter from child module", "option": null, "optional": true, + "order": null, "type": "string", - "validation": null, + "validation": [], "value": "ghijkl" }, - "sensitive_values": {} + "sensitive_values": { + "validation": [] + } } ], "address": "module.this_is_external_module.module.this_is_external_child_module" diff --git a/provisioner/terraform/testdata/version.txt b/provisioner/terraform/testdata/version.txt new file mode 100644 index 0000000000000..6a126f402d53d --- /dev/null +++ b/provisioner/terraform/testdata/version.txt @@ -0,0 +1 @@ +1.7.5 From a40e954afce8e0c1bd8a2ad813171e2b039947cd Mon Sep 17 00:00:00 2001 From: Colin Adler Date: Wed, 22 May 2024 14:01:11 -0500 Subject: [PATCH 092/149] chore(docs): update k8s mainline version (#13346) --- docs/install/kubernetes.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/install/kubernetes.md b/docs/install/kubernetes.md index de8de4cf93b5a..0b6d01a150297 100644 --- a/docs/install/kubernetes.md +++ b/docs/install/kubernetes.md @@ -134,7 +134,7 @@ locally in order to log in and manage templates. helm install coder coder-v2/coder \ --namespace coder \ --values values.yaml \ - --version 2.11.1 + --version 2.11.2 ``` For the **stable** Coder release: From fa9edc1f427d6afd98af0abe1cac38e445708a5f Mon Sep 17 00:00:00 2001 From: Colin Adler Date: Wed, 22 May 2024 14:28:21 -0500 Subject: [PATCH 093/149] chore(scripts): remove `gh_auth` from `release.sh` (#13347) It breaks the `gh` cli for creating workflows. --- scripts/release.sh | 3 --- 1 file changed, 3 deletions(-) diff --git a/scripts/release.sh b/scripts/release.sh index 01da3c7728ec5..c2500aee2ec7d 100755 --- a/scripts/release.sh +++ b/scripts/release.sh @@ -113,9 +113,6 @@ done # Check dependencies. dependencies gh jq sort -# Authenticate gh CLI -gh_auth - if [[ -z $increment ]]; then # Default to patch versions. increment="patch" From c2837a62e471f03650d1416194dfbcdc4fd4a730 Mon Sep 17 00:00:00 2001 From: Marcin Tojek Date: Thu, 23 May 2024 09:53:51 +0200 Subject: [PATCH 094/149] feat: evaluate provisioner tags (#13333) --- coderd/wsbuilder/wsbuilder.go | 139 +++++++++++++++++++++++++++-- coderd/wsbuilder/wsbuilder_test.go | 133 ++++++++++++++++++++++++--- 2 files changed, 252 insertions(+), 20 deletions(-) diff --git a/coderd/wsbuilder/wsbuilder.go b/coderd/wsbuilder/wsbuilder.go index b34eb9ce3c858..9e8de1d688768 100644 --- a/coderd/wsbuilder/wsbuilder.go +++ b/coderd/wsbuilder/wsbuilder.go @@ -10,6 +10,10 @@ import ( "net/http" "time" + "github.com/hashicorp/hcl/v2" + "github.com/hashicorp/hcl/v2/hclsyntax" + "github.com/zclconf/go-cty/cty" + "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/provisionersdk" @@ -55,14 +59,17 @@ type Builder struct { store database.Store // cache of objects, so we only fetch once - template *database.Template - templateVersion *database.TemplateVersion - templateVersionJob *database.ProvisionerJob - templateVersionParameters *[]database.TemplateVersionParameter - lastBuild *database.WorkspaceBuild - lastBuildErr *error - lastBuildParameters *[]database.WorkspaceBuildParameter - lastBuildJob *database.ProvisionerJob + template *database.Template + templateVersion *database.TemplateVersion + templateVersionJob *database.ProvisionerJob + templateVersionParameters *[]database.TemplateVersionParameter + templateVersionWorkspaceTags *[]database.TemplateVersionWorkspaceTag + lastBuild *database.WorkspaceBuild + lastBuildErr *error + lastBuildParameters *[]database.WorkspaceBuildParameter + lastBuildJob *database.ProvisionerJob + parameterNames *[]string + parameterValues *[]string verifyNoLegacyParametersOnce bool } @@ -297,7 +304,11 @@ func (b *Builder) buildTx(authFunc func(action policy.Action, object rbac.Object if err != nil { return nil, nil, BuildError{http.StatusInternalServerError, "marshal metadata", err} } - tags := provisionersdk.MutateTags(b.workspace.OwnerID, templateVersionJob.Tags) + + tags, err := b.getProvisionerTags() + if err != nil { + return nil, nil, err // already wrapped BuildError + } now := dbtime.Now() provisionerJob, err := b.store.InsertProvisionerJob(b.ctx, database.InsertProvisionerJobParams{ @@ -364,6 +375,7 @@ func (b *Builder) buildTx(authFunc func(action policy.Action, object rbac.Object // getParameters already wraps errors in BuildError return err } + err = store.InsertWorkspaceBuildParameters(b.ctx, database.InsertWorkspaceBuildParametersParams{ WorkspaceBuildID: workspaceBuildID, Name: names, @@ -502,6 +514,10 @@ func (b *Builder) getState() ([]byte, error) { } func (b *Builder) getParameters() (names, values []string, err error) { + if b.parameterNames != nil { + return *b.parameterNames, *b.parameterValues, nil + } + templateVersionParameters, err := b.getTemplateVersionParameters() if err != nil { return nil, nil, BuildError{http.StatusInternalServerError, "failed to fetch template version parameters", err} @@ -535,6 +551,9 @@ func (b *Builder) getParameters() (names, values []string, err error) { names = append(names, templateVersionParameter.Name) values = append(values, value) } + + b.parameterNames = &names + b.parameterValues = &values return names, values, nil } @@ -632,6 +651,108 @@ func (b *Builder) getLastBuildJob() (*database.ProvisionerJob, error) { return b.lastBuildJob, nil } +func (b *Builder) getProvisionerTags() (map[string]string, error) { + // Step 1: Mutate template version tags + templateVersionJob, err := b.getTemplateVersionJob() + if err != nil { + return nil, BuildError{http.StatusInternalServerError, "failed to fetch template version job", err} + } + annotationTags := provisionersdk.MutateTags(b.workspace.OwnerID, templateVersionJob.Tags) + + tags := map[string]string{} + for name, value := range annotationTags { + tags[name] = value + } + + // Step 2: Mutate workspace tags + workspaceTags, err := b.getTemplateVersionWorkspaceTags() + if err != nil { + return nil, BuildError{http.StatusInternalServerError, "failed to fetch template version workspace tags", err} + } + parameterNames, parameterValues, err := b.getParameters() + if err != nil { + return nil, err // already wrapped BuildError + } + + evalCtx := buildParametersEvalContext(parameterNames, parameterValues) + for _, workspaceTag := range workspaceTags { + expr, diags := hclsyntax.ParseExpression([]byte(workspaceTag.Value), "expression.hcl", hcl.InitialPos) + if diags.HasErrors() { + return nil, BuildError{http.StatusBadRequest, "failed to parse workspace tag value", xerrors.Errorf(diags.Error())} + } + + val, diags := expr.Value(evalCtx) + if diags.HasErrors() { + return nil, BuildError{http.StatusBadRequest, "failed to evaluate workspace tag value", xerrors.Errorf(diags.Error())} + } + + // Do not use "val.AsString()" as it can panic + str, err := ctyValueString(val) + if err != nil { + return nil, BuildError{http.StatusBadRequest, "failed to marshal cty.Value as string", err} + } + tags[workspaceTag.Key] = str + } + return tags, nil +} + +func buildParametersEvalContext(names, values []string) *hcl.EvalContext { + m := map[string]cty.Value{} + for i, name := range names { + m[name] = cty.MapVal(map[string]cty.Value{ + "value": cty.StringVal(values[i]), + }) + } + + if len(m) == 0 { + return nil // otherwise, panic: must not call MapVal with empty map + } + + return &hcl.EvalContext{ + Variables: map[string]cty.Value{ + "data": cty.MapVal(map[string]cty.Value{ + "coder_parameter": cty.MapVal(m), + }), + }, + } +} + +func ctyValueString(val cty.Value) (string, error) { + switch val.Type() { + case cty.Bool: + if val.True() { + return "true", nil + } else { + return "false", nil + } + case cty.Number: + return val.AsBigFloat().String(), nil + case cty.String: + return val.AsString(), nil + default: + return "", xerrors.Errorf("only primitive types are supported - bool, number, and string") + } +} + +func (b *Builder) getTemplateVersionWorkspaceTags() ([]database.TemplateVersionWorkspaceTag, error) { + if b.templateVersionWorkspaceTags != nil { + return *b.templateVersionWorkspaceTags, nil + } + + templateVersion, err := b.getTemplateVersion() + if err != nil { + return nil, xerrors.Errorf("get template version: %w", err) + } + + workspaceTags, err := b.store.GetTemplateVersionWorkspaceTags(b.ctx, templateVersion.ID) + if err != nil && !xerrors.Is(err, sql.ErrNoRows) { + return nil, xerrors.Errorf("get template version workspace tags: %w", err) + } + + b.templateVersionWorkspaceTags = &workspaceTags + return *b.templateVersionWorkspaceTags, nil +} + // authorize performs build authorization pre-checks using the provided authFunc func (b *Builder) authorize(authFunc func(action policy.Action, object rbac.Objecter) bool) error { // Doing this up front saves a lot of work if the user doesn't have permission. diff --git a/coderd/wsbuilder/wsbuilder_test.go b/coderd/wsbuilder/wsbuilder_test.go index f1c7e6b62a493..ad53cd7d45609 100644 --- a/coderd/wsbuilder/wsbuilder_test.go +++ b/coderd/wsbuilder/wsbuilder_test.go @@ -60,6 +60,7 @@ func TestBuilder_NoOptions(t *testing.T) { withLastBuildFound, withRichParameters(nil), withParameterSchemas(inactiveJobID, nil), + withWorkspaceTags(inactiveVersionID, nil), // Outputs expectProvisionerJob(func(job database.InsertProvisionerJobParams) { @@ -112,6 +113,7 @@ func TestBuilder_Initiator(t *testing.T) { withLastBuildFound, withRichParameters(nil), withParameterSchemas(inactiveJobID, nil), + withWorkspaceTags(inactiveVersionID, nil), // Outputs expectProvisionerJob(func(job database.InsertProvisionerJobParams) { @@ -154,6 +156,7 @@ func TestBuilder_Baggage(t *testing.T) { withLastBuildFound, withRichParameters(nil), withParameterSchemas(inactiveJobID, nil), + withWorkspaceTags(inactiveVersionID, nil), // Outputs expectProvisionerJob(func(job database.InsertProvisionerJobParams) { @@ -188,9 +191,10 @@ func TestBuilder_Reason(t *testing.T) { withLastBuildFound, withRichParameters(nil), withParameterSchemas(inactiveJobID, nil), + withWorkspaceTags(inactiveVersionID, nil), // Outputs - expectProvisionerJob(func(job database.InsertProvisionerJobParams) { + expectProvisionerJob(func(_ database.InsertProvisionerJobParams) { }), withInTx, expectBuild(func(bld database.InsertWorkspaceBuildParams) { @@ -221,6 +225,7 @@ func TestBuilder_ActiveVersion(t *testing.T) { withActiveVersion(nil), withLastBuildNotFound, withParameterSchemas(activeJobID, nil), + withWorkspaceTags(activeVersionID, nil), // previous rich parameters are not queried because there is no previous build. // Outputs @@ -246,6 +251,102 @@ func TestBuilder_ActiveVersion(t *testing.T) { req.NoError(err) } +func TestWorkspaceBuildWithTags(t *testing.T) { + t.Parallel() + + asrt := assert.New(t) + req := require.New(t) + + workspaceTags := []database.TemplateVersionWorkspaceTag{ + { + Key: "fruits_tag", + Value: "data.coder_parameter.number_of_apples.value + data.coder_parameter.number_of_oranges.value", + }, + { + Key: "cluster_tag", + Value: `"best_developers"`, + }, + { + Key: "project_tag", + Value: `"${data.coder_parameter.project.value}+12345"`, + }, + { + Key: "team_tag", + Value: `data.coder_parameter.team.value`, + }, + { + Key: "yes_or_no", + Value: `data.coder_parameter.is_debug_build.value`, + }, + { + Key: "actually_no", + Value: `!data.coder_parameter.is_debug_build.value`, + }, + { + Key: "is_debug_build", + Value: `data.coder_parameter.is_debug_build.value == "true" ? "in-debug-mode" : "no-debug"`, + }, + } + + richParameters := []database.TemplateVersionParameter{ + // Parameters can be mutable although it is discouraged as the workspace can be moved between provisioner nodes. + {Name: "project", Description: "This is first parameter", Mutable: true, Options: json.RawMessage("[]")}, + {Name: "team", Description: "This is second parameter", Mutable: true, DefaultValue: "godzilla", Options: json.RawMessage("[]")}, + {Name: "is_debug_build", Type: "bool", Description: "This is third parameter", Mutable: false, DefaultValue: "false", Options: json.RawMessage("[]")}, + {Name: "number_of_apples", Type: "number", Description: "This is fourth parameter", Mutable: false, DefaultValue: "4", Options: json.RawMessage("[]")}, + {Name: "number_of_oranges", Type: "number", Description: "This is fifth parameter", Mutable: false, DefaultValue: "6", Options: json.RawMessage("[]")}, + } + + buildParameters := []codersdk.WorkspaceBuildParameter{ + {Name: "project", Value: "foobar-foobaz"}, + {Name: "is_debug_build", Value: "true"}, + // Parameters "team", "number_of_apples", "number_of_oranges" are skipped, so default value is selected + } + + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + + mDB := expectDB(t, + // Inputs + withTemplate, + withInactiveVersion(richParameters), + withLastBuildFound, + withRichParameters(nil), + withParameterSchemas(inactiveJobID, nil), + withWorkspaceTags(inactiveVersionID, workspaceTags), + + // Outputs + expectProvisionerJob(func(job database.InsertProvisionerJobParams) { + asrt.Len(job.Tags, 10) + + expected := database.StringMap{ + "actually_no": "false", + "cluster_tag": "best_developers", + "fruits_tag": "10", + "is_debug_build": "in-debug-mode", + "project_tag": "foobar-foobaz+12345", + "team_tag": "godzilla", + "yes_or_no": "true", + + "scope": "user", + "version": "inactive", + "owner": userID.String(), + } + asrt.Equal(job.Tags, expected) + }), + withInTx, + expectBuild(func(_ database.InsertWorkspaceBuildParams) {}), + expectBuildParameters(func(_ database.InsertWorkspaceBuildParametersParams) { + }), + withBuild, + ) + + ws := database.Workspace{ID: workspaceID, TemplateID: templateID, OwnerID: userID} + uut := wsbuilder.New(ws, database.WorkspaceTransitionStart).RichParameterValues(buildParameters) + _, _, err := uut.Build(ctx, mDB, nil, audit.WorkspaceBuildBaggage{}) + req.NoError(err) +} + func TestWorkspaceBuildWithRichParameters(t *testing.T) { t.Parallel() @@ -302,6 +403,7 @@ func TestWorkspaceBuildWithRichParameters(t *testing.T) { withLastBuildFound, withRichParameters(initialBuildParameters), withParameterSchemas(inactiveJobID, nil), + withWorkspaceTags(inactiveVersionID, nil), // Outputs expectProvisionerJob(func(job database.InsertProvisionerJobParams) {}), @@ -345,6 +447,7 @@ func TestWorkspaceBuildWithRichParameters(t *testing.T) { withLastBuildFound, withRichParameters(initialBuildParameters), withParameterSchemas(inactiveJobID, nil), + withWorkspaceTags(inactiveVersionID, nil), // Outputs expectProvisionerJob(func(job database.InsertProvisionerJobParams) {}), @@ -394,11 +497,7 @@ func TestWorkspaceBuildWithRichParameters(t *testing.T) { withLastBuildFound, withRichParameters(nil), withParameterSchemas(inactiveJobID, schemas), - - // Outputs - expectProvisionerJob(func(job database.InsertProvisionerJobParams) {}), - withInTx, - expectBuild(func(bld database.InsertWorkspaceBuildParams) {}), + withWorkspaceTags(inactiveVersionID, nil), ) ws := database.Workspace{ID: workspaceID, TemplateID: templateID, OwnerID: userID} @@ -429,13 +528,10 @@ func TestWorkspaceBuildWithRichParameters(t *testing.T) { withLastBuildFound, withRichParameters(initialBuildParameters), withParameterSchemas(inactiveJobID, nil), + withWorkspaceTags(inactiveVersionID, nil), // Outputs - expectProvisionerJob(func(job database.InsertProvisionerJobParams) {}), - withInTx, - expectBuild(func(bld database.InsertWorkspaceBuildParams) {}), - // no build parameters, since we hit an error validating. - // expectBuildParameters(func(params database.InsertWorkspaceBuildParametersParams) {}), + // no transaction, since we failed fast while validation build parameters ) ws := database.Workspace{ID: workspaceID, TemplateID: templateID, OwnerID: userID} @@ -482,6 +578,7 @@ func TestWorkspaceBuildWithRichParameters(t *testing.T) { withLastBuildFound, withRichParameters(initialBuildParameters), withParameterSchemas(activeJobID, nil), + withWorkspaceTags(activeVersionID, nil), // Outputs expectProvisionerJob(func(job database.InsertProvisionerJobParams) {}), @@ -542,6 +639,7 @@ func TestWorkspaceBuildWithRichParameters(t *testing.T) { withLastBuildFound, withRichParameters(initialBuildParameters), withParameterSchemas(activeJobID, nil), + withWorkspaceTags(activeVersionID, nil), // Outputs expectProvisionerJob(func(job database.InsertProvisionerJobParams) {}), @@ -600,6 +698,7 @@ func TestWorkspaceBuildWithRichParameters(t *testing.T) { withLastBuildFound, withRichParameters(initialBuildParameters), withParameterSchemas(activeJobID, nil), + withWorkspaceTags(activeVersionID, nil), // Outputs expectProvisionerJob(func(job database.InsertProvisionerJobParams) {}), @@ -813,6 +912,18 @@ func withRichParameters(params []database.WorkspaceBuildParameter) func(mTx *dbm } } +func withWorkspaceTags(versionID uuid.UUID, tags []database.TemplateVersionWorkspaceTag) func(mTx *dbmock.MockStore) { + return func(mTx *dbmock.MockStore) { + c := mTx.EXPECT().GetTemplateVersionWorkspaceTags(gomock.Any(), versionID). + Times(1) + if len(tags) > 0 { + c.Return(tags, nil) + } else { + c.Return(nil, sql.ErrNoRows) + } + } +} + // Since there is expected to be only one each of job, build, and build-parameters inserted, instead // of building matchers, we match any call and then assert its parameters. This will feel // more familiar to the way we write other tests. From c67eba10d50c3cfa4371de4040ac82b9349fda07 Mon Sep 17 00:00:00 2001 From: Cian Johnston Date: Thu, 23 May 2024 10:00:23 +0100 Subject: [PATCH 095/149] chore: update scale docs to include guidelines for wsproxies (#13350) --- docs/admin/architectures/index.md | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/docs/admin/architectures/index.md b/docs/admin/architectures/index.md index 5b147b76e7e6d..85c06a650dee9 100644 --- a/docs/admin/architectures/index.md +++ b/docs/admin/architectures/index.md @@ -210,8 +210,9 @@ Inactive users do not consume Coder resources. When determining scaling requirements, consider the following factors: -- `1 vCPU x 2 GB memory x 250 users`: A reasonable formula to determine resource - allocation based on the number of users and their expected usage patterns. +- `1 vCPU x 2 GB memory` for every 250 users: A reasonable formula to determine + resource allocation based on the number of users and their expected usage + patterns. - API latency/response time: Monitor API latency and response times to ensure optimal performance under varying loads. - Average number of HTTP requests: Track the average number of HTTP requests to @@ -239,6 +240,14 @@ We recommend disabling the autoscaling for `coderd` nodes. Autoscaling can cause interruptions for user connections, see [Autoscaling](../scale.md#autoscaling) for more details. +### Control plane: Workspace Proxies + +When scaling [workspace proxies](../workspace-proxies.md), follow the same +guidelines as for `coderd` above: + +- `1 vCPU x 2 GB memory` for every 250 users. +- Disable autoscaling. + ### Control plane: provisionerd Each external provisioner can run a single concurrent workspace build. For From b43344b6720d6cffcca7a7707f0318dff3191ac9 Mon Sep 17 00:00:00 2001 From: Spike Curtis Date: Thu, 23 May 2024 16:22:44 +0400 Subject: [PATCH 096/149] feat: use latest gVisor and go 1.22.3 (#13338) --- .github/actions/setup-go/action.yaml | 2 +- .github/workflows/ci.yaml | 2 +- cli/server_internal_test.go | 4 ++-- coderd/apidoc/docs.go | 2 +- coderd/apidoc/swagger.json | 2 +- coderd/devtunnel/tunnel.go | 2 +- docs/api/schemas.md | 2 +- dogfood/Dockerfile | 2 +- flake.lock | 18 +++++++------- flake.nix | 6 ++--- go.mod | 35 +++++++--------------------- go.sum | 22 ++++++++--------- 12 files changed, 40 insertions(+), 59 deletions(-) diff --git a/.github/actions/setup-go/action.yaml b/.github/actions/setup-go/action.yaml index a05fc271ebf37..c53ce28c3c591 100644 --- a/.github/actions/setup-go/action.yaml +++ b/.github/actions/setup-go/action.yaml @@ -4,7 +4,7 @@ description: | inputs: version: description: "The Go version to use." - default: "1.21.9" + default: "1.22.3" runs: using: "composite" steps: diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 8f89a4b3fa3ea..e092cef28ab02 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -922,7 +922,7 @@ jobs: uses: actions/dependency-review-action@v4.3.2 with: allow-licenses: Apache-2.0, BSD-2-Clause, BSD-3-Clause, CC0-1.0, ISC, MIT, MIT-0, MPL-2.0 - allow-dependencies-licenses: "pkg:golang/github.com/pelletier/go-toml/v2" + allow-dependencies-licenses: "pkg:golang/github.com/coder/wgtunnel@0.1.13-0.20240522110300-ade90dfb2da0" license-check: true vulnerability-check: false - name: "Report" diff --git a/cli/server_internal_test.go b/cli/server_internal_test.go index 72ca6f3a644e1..4e4f3b01c6ce5 100644 --- a/cli/server_internal_test.go +++ b/cli/server_internal_test.go @@ -141,8 +141,8 @@ func Test_configureCipherSuites(t *testing.T) { name: "TLSUnsupported", minTLS: tls.VersionTLS10, maxTLS: tls.VersionTLS13, - // TLS_RSA_WITH_AES_128_GCM_SHA256 only supports tls 1.2 - inputCiphers: []string{"TLS_RSA_WITH_AES_128_GCM_SHA256"}, + // TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256 only supports tls 1.2 + inputCiphers: []string{"TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256"}, wantErr: "no tls ciphers supported for tls versions", }, { diff --git a/coderd/apidoc/docs.go b/coderd/apidoc/docs.go index 34c4c6b529d19..af8492ae15961 100644 --- a/coderd/apidoc/docs.go +++ b/coderd/apidoc/docs.go @@ -14489,7 +14489,7 @@ const docTemplate = `{ "type": "string" }, "host": { - "description": "host or host:port", + "description": "host or host:port (see Hostname and Port methods)", "type": "string" }, "omitHost": { diff --git a/coderd/apidoc/swagger.json b/coderd/apidoc/swagger.json index 43aacb5e0cc32..e68a61af19788 100644 --- a/coderd/apidoc/swagger.json +++ b/coderd/apidoc/swagger.json @@ -13205,7 +13205,7 @@ "type": "string" }, "host": { - "description": "host or host:port", + "description": "host or host:port (see Hostname and Port methods)", "type": "string" }, "omitHost": { diff --git a/coderd/devtunnel/tunnel.go b/coderd/devtunnel/tunnel.go index 89ceace6e4849..d1f3c75c3d6da 100644 --- a/coderd/devtunnel/tunnel.go +++ b/coderd/devtunnel/tunnel.go @@ -11,8 +11,8 @@ import ( "time" "github.com/briandowns/spinner" + "github.com/tailscale/wireguard-go/device" "golang.org/x/xerrors" - "golang.zx2c4.com/wireguard/device" "cdr.dev/slog" "github.com/coder/coder/v2/cli/cliui" diff --git a/docs/api/schemas.md b/docs/api/schemas.md index 67fb461ee1b0b..eb2f7cbeb8aa1 100644 --- a/docs/api/schemas.md +++ b/docs/api/schemas.md @@ -8923,7 +8923,7 @@ _None_ | ------------- | ---------------------------- | -------- | ------------ | -------------------------------------------------- | | `forceQuery` | boolean | false | | append a query ('?') even if RawQuery is empty | | `fragment` | string | false | | fragment for references, without '#' | -| `host` | string | false | | host or host:port | +| `host` | string | false | | host or host:port (see Hostname and Port methods) | | `omitHost` | boolean | false | | do not emit empty host (authority) | | `opaque` | string | false | | encoded opaque data | | `path` | string | false | | path (relative paths may omit leading slash) | diff --git a/dogfood/Dockerfile b/dogfood/Dockerfile index 735b87dea27f7..4aa46e83c8fd7 100644 --- a/dogfood/Dockerfile +++ b/dogfood/Dockerfile @@ -8,7 +8,7 @@ FROM ubuntu:jammy AS go RUN apt-get update && apt-get install --yes curl gcc # Install Go manually, so that we can control the version -ARG GO_VERSION=1.21.9 +ARG GO_VERSION=1.22.3 RUN mkdir --parents /usr/local/go # Boring Go is needed to build FIPS-compliant binaries. diff --git a/flake.lock b/flake.lock index 8a460beeb9782..c6853aa477736 100644 --- a/flake.lock +++ b/flake.lock @@ -61,11 +61,11 @@ "systems": "systems_3" }, "locked": { - "lastModified": 1710146030, - "narHash": "sha256-SZ5L6eA7HJ/nmkzGG7/ISclqe6oZdOZTNoesiInkXPQ=", + "lastModified": 1701680307, + "narHash": "sha256-kAuep2h5ajznlPMD9rnQyffWG8EM/C73lejGofXvdM8=", "owner": "numtide", "repo": "flake-utils", - "rev": "b1d9ab70662946ef0850d488da1c9019f3a9752a", + "rev": "4022d587cbbfd70fe950c1e2083a02621806a725", "type": "github" }, "original": { @@ -91,11 +91,11 @@ }, "nixpkgs_2": { "locked": { - "lastModified": 1712439257, - "narHash": "sha256-aSpiNepFOMk9932HOax0XwNxbA38GOUVOiXfUVPOrck=", + "lastModified": 1715087517, + "narHash": "sha256-CLU5Tsg24Ke4+7sH8azHWXKd0CFd4mhLWfhYgUiDBpQ=", "owner": "nixos", "repo": "nixpkgs", - "rev": "ff0dbd94265ac470dda06a657d5fe49de93b4599", + "rev": "b211b392b8486ee79df6cdfb1157ad2133427a29", "type": "github" }, "original": { @@ -107,11 +107,11 @@ }, "nixpkgs_3": { "locked": { - "lastModified": 1714906307, - "narHash": "sha256-UlRZtrCnhPFSJlDQE7M0eyhgvuuHBTe1eJ9N9AQlJQ0=", + "lastModified": 1702151865, + "narHash": "sha256-9VAt19t6yQa7pHZLDbil/QctAgVsA66DLnzdRGqDisg=", "owner": "nixos", "repo": "nixpkgs", - "rev": "25865a40d14b3f9cf19f19b924e2ab4069b09588", + "rev": "666fc80e7b2afb570462423cb0e1cf1a3a34fedd", "type": "github" }, "original": { diff --git a/flake.nix b/flake.nix index fb33091b2f60c..c8262aa7432cc 100644 --- a/flake.nix +++ b/flake.nix @@ -39,7 +39,7 @@ git gnumake gnused - go_1_21 + go_1_22 go-migrate golangci-lint gopls @@ -93,11 +93,11 @@ # To make faster subsequent builds, you could extract the `.zst` # slim bundle into it's own derivation. buildFat = osArch: - pkgs.buildGo121Module { + pkgs.buildGo122Module { name = "coder-${osArch}"; # Updated with ./scripts/update-flake.sh`. # This should be updated whenever go.mod changes! - vendorHash = "sha256-tBczWqmpIfr8zwftHl/W3nw3qiSei+aIw3fZmtl0SwI="; + vendorHash = "sha256-YOXZ3tJAky2XmNH81MNj3eNq95ucGFUJIC0O8Z+xk20="; proxyVendor = true; src = ./.; nativeBuildInputs = with pkgs; [ getopt openssl zstd ]; diff --git a/go.mod b/go.mod index e67821bcdfb52..a23642db50da4 100644 --- a/go.mod +++ b/go.mod @@ -1,6 +1,6 @@ module github.com/coder/coder/v2 -go 1.21.4 +go 1.22.3 // Required until a v3 of chroma is created to lazily initialize all XML files. // None of our dependencies seem to use the registries anyways, so this @@ -42,28 +42,12 @@ replace github.com/dlclark/regexp2 => github.com/dlclark/regexp2 v1.7.0 // There are a few minor changes we make to Tailscale that we're slowly upstreaming. Compare here: // https://github.com/tailscale/tailscale/compare/main...coder:tailscale:main -replace tailscale.com => github.com/coder/tailscale v1.1.1-0.20240501025849-d8a4721c3162 +replace tailscale.com => github.com/coder/tailscale v1.1.1-0.20240522100209-5cd256cdcb39 -// Fixes a race-condition in coder/wgtunnel. -// Upstream PR: https://github.com/WireGuard/wireguard-go/pull/85 -replace golang.zx2c4.com/wireguard => github.com/coder/wireguard-go v0.0.0-20230920225835-b7d43c468619 - -// This is replaced to include a fix that causes a deadlock when closing the -// wireguard network. -// The branch used is from https://github.com/coder/wireguard-go/tree/colin/tailscale -// It is based on https://github.com/tailscale/wireguard-go/tree/tailscale, but -// includes the upstream fix https://github.com/WireGuard/wireguard-go/commit/b7cd547315bed421a648d0a0f1ee5a0fc1b1151e -replace github.com/tailscale/wireguard-go => github.com/coder/wireguard-go v0.0.0-20230807234434-d825b45ccbf5 - -// Use our tempfork of gvisor that includes a fix for TCP connection stalls: -// https://github.com/coder/coder/issues/7388 -// The basis for this fork is: gvisor.dev/gvisor v0.0.0-20230504175454-7b0a1988a28f -// This is the same version as used by Tailscale `main`: -// https://github.com/tailscale/tailscale/blob/c19b5bfbc391637b11c2acb3c725909a0046d849/go.mod#L88 -// -// Latest gvisor otherwise has refactored packages and is currently incompatible with -// Tailscale, to remove our tempfork this needs to be addressed. -replace gvisor.dev/gvisor => github.com/coder/gvisor v0.0.0-20230714132058-be2e4ac102c3 +// This is replaced to include +// 1. a fix for a data race: c.f. https://github.com/tailscale/wireguard-go/pull/25 +// 2. update to the latest gVisor +replace github.com/tailscale/wireguard-go => github.com/coder/wireguard-go v0.0.0-20240522052547-769cdd7f7818 // Switch to our fork that imports fixes from http://github.com/tailscale/ssh. // See: https://github.com/coder/coder/issues/3371 @@ -105,7 +89,7 @@ require ( github.com/coder/pretty v0.0.0-20230908205945-e89ba86370e0 github.com/coder/retry v1.5.1 github.com/coder/terraform-provider-coder v0.22.0 - github.com/coder/wgtunnel v0.1.13-0.20231127054351-578bfff9b92a + github.com/coder/wgtunnel v0.1.13-0.20240522110300-ade90dfb2da0 github.com/coreos/go-oidc/v3 v3.10.0 github.com/coreos/go-systemd v0.0.0-20191104093116-d3cd4ed1dbcf github.com/creack/pty v1.1.21 @@ -199,14 +183,13 @@ require ( golang.org/x/text v0.15.0 golang.org/x/tools v0.21.0 golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028 - golang.zx2c4.com/wireguard v0.0.0-20230704135630-469159ecf7d1 google.golang.org/api v0.180.0 google.golang.org/grpc v1.63.2 google.golang.org/protobuf v1.34.1 gopkg.in/DataDog/dd-trace-go.v1 v1.61.0 gopkg.in/natefinch/lumberjack.v2 v2.2.1 gopkg.in/yaml.v3 v3.0.1 - gvisor.dev/gvisor v0.0.0-20230504175454-7b0a1988a28f + gvisor.dev/gvisor v0.0.0-20240509041132-65b30f7869dc nhooyr.io/websocket v1.8.7 storj.io/drpc v0.0.33 tailscale.com v1.46.1 @@ -395,7 +378,7 @@ require ( github.com/tailscale/golang-x-crypto v0.0.0-20230713185742-f0b76a10a08e // indirect github.com/tailscale/goupnp v1.0.1-0.20210804011211-c64d0f06ea05 // indirect github.com/tailscale/netlink v1.1.1-0.20211101221916-cabfb018fe85 - github.com/tailscale/wireguard-go v0.0.0-20230710185534-bb2c8f22eccf // indirect + github.com/tailscale/wireguard-go v0.0.0-20231121184858-cc193a0b3272 github.com/tchap/go-patricia/v2 v2.3.1 // indirect github.com/tcnksm/go-httpstat v0.2.0 // indirect github.com/tdewolff/parse/v2 v2.7.12 // indirect diff --git a/go.sum b/go.sum index 8c5ef8240b246..e92cc869ed29d 100644 --- a/go.sum +++ b/go.sum @@ -188,8 +188,8 @@ github.com/chromedp/chromedp v0.9.2 h1:dKtNz4kApb06KuSXoTQIyUC2TrA0fhGDwNZf3bcgf github.com/chromedp/chromedp v0.9.2/go.mod h1:LkSXJKONWTCHAfQasKFUZI+mxqS4tZqhmtGzzhLsnLs= github.com/chromedp/sysutil v1.0.0 h1:+ZxhTpfpZlmchB58ih/LBHX52ky7w2VhQVKQMucy3Ic= github.com/chromedp/sysutil v1.0.0/go.mod h1:kgWmDdq8fTzXYcKIBqIYvRRTnYb9aNS9moAV0xufSww= -github.com/cilium/ebpf v0.11.0 h1:V8gS/bTCCjX9uUnkUFUpPsksM8n1lXBAvHcpiFk1X2Y= -github.com/cilium/ebpf v0.11.0/go.mod h1:WE7CZAnqOL2RouJ4f1uyNhqr2P4CCvXFIqdRDUgWsVs= +github.com/cilium/ebpf v0.12.3 h1:8ht6F9MquybnY97at+VDZb3eQQr8ev79RueWeVaEcG4= +github.com/cilium/ebpf v0.12.3/go.mod h1:TctK1ivibvI3znr66ljgi4hqOT8EYQjz1KWBfb1UVgM= github.com/clbanning/mxj/v2 v2.7.0 h1:WA/La7UGCanFe5NpHF0Q3DNtnCsVoxbPKuyBNHWRyME= github.com/clbanning/mxj/v2 v2.7.0/go.mod h1:hNiWqW14h+kc+MdF9C6/YoRfjEJoR3ou6tn/Qo+ve2s= github.com/cli/safeexec v1.0.0/go.mod h1:Z/D4tTN8Vs5gXYHDCbaM1S/anmEDnJb1iW0+EJ5zx3Q= @@ -207,8 +207,6 @@ github.com/coder/go-httpstat v0.0.0-20230801153223-321c88088322 h1:m0lPZjlQ7vdVp github.com/coder/go-httpstat v0.0.0-20230801153223-321c88088322/go.mod h1:rOLFDDVKVFiDqZFXoteXc97YXx7kFi9kYqR+2ETPkLQ= github.com/coder/go-scim/pkg/v2 v2.0.0-20230221055123-1d63c1222136 h1:0RgB61LcNs24WOxc3PBvygSNTQurm0PYPujJjLLOzs0= github.com/coder/go-scim/pkg/v2 v2.0.0-20230221055123-1d63c1222136/go.mod h1:VkD1P761nykiq75dz+4iFqIQIZka189tx1BQLOp0Skc= -github.com/coder/gvisor v0.0.0-20230714132058-be2e4ac102c3 h1:gtuDFa+InmMVUYiurBV+XYu24AeMGv57qlZ23i6rmyE= -github.com/coder/gvisor v0.0.0-20230714132058-be2e4ac102c3/go.mod h1:pzr6sy8gDLfVmDAg8OYrlKvGEHw5C3PGTiBXBTCx76Q= github.com/coder/pretty v0.0.0-20230908205945-e89ba86370e0 h1:3A0ES21Ke+FxEM8CXx9n47SZOKOpgSE1bbJzlE4qPVs= github.com/coder/pretty v0.0.0-20230908205945-e89ba86370e0/go.mod h1:5UuS2Ts+nTToAMeOjNlnHFkPahrtDkmpydBen/3wgZc= github.com/coder/retry v1.5.1 h1:iWu8YnD8YqHs3XwqrqsjoBTAVqT9ml6z9ViJ2wlMiqc= @@ -217,16 +215,14 @@ github.com/coder/serpent v0.7.0 h1:zGpD2GlF3lKIVkMjNGKbkip88qzd5r/TRcc30X/SrT0= github.com/coder/serpent v0.7.0/go.mod h1:REkJ5ZFHQUWFTPLExhXYZ1CaHFjxvGNRlLXLdsI08YA= github.com/coder/ssh v0.0.0-20231128192721-70855dedb788 h1:YoUSJ19E8AtuUFVYBpXuOD6a/zVP3rcxezNsoDseTUw= github.com/coder/ssh v0.0.0-20231128192721-70855dedb788/go.mod h1:aGQbuCLyhRLMzZF067xc84Lh7JDs1FKwCmF1Crl9dxQ= -github.com/coder/tailscale v1.1.1-0.20240501025849-d8a4721c3162 h1:1uXQe8UAXMjvXYQ+XtpCT3LjbideQI3xpeejN9kh56A= -github.com/coder/tailscale v1.1.1-0.20240501025849-d8a4721c3162/go.mod h1:L8tPrwSi31RAMEMV8rjb0vYTGs7rXt8rAHbqY/p41j4= +github.com/coder/tailscale v1.1.1-0.20240522100209-5cd256cdcb39 h1:v3x8FBqk45mbBybU1QrQy7CGiUpQDPBJT0C5g8bfGHE= +github.com/coder/tailscale v1.1.1-0.20240522100209-5cd256cdcb39/go.mod h1:rp6BIJxCp127/hvvDWNkHC9MxAlKvQfoOtBr8s5sCqo= github.com/coder/terraform-provider-coder v0.22.0 h1:L72WFa9/6sc/nnXENPS8LpWi/2NBV+DRUW0WT//pEaU= github.com/coder/terraform-provider-coder v0.22.0/go.mod h1:wMun9UZ9HT2CzF6qPPBup1odzBpVUc0/xSFoXgdI3tk= -github.com/coder/wgtunnel v0.1.13-0.20231127054351-578bfff9b92a h1:KhR9LUVllMZ+e9lhubZ1HNrtJDgH5YLoTvpKwmrGag4= -github.com/coder/wgtunnel v0.1.13-0.20231127054351-578bfff9b92a/go.mod h1:QzfptVUdEO+XbkzMKx1kw13i9wwpJlfI1RrZ6SNZ0hA= -github.com/coder/wireguard-go v0.0.0-20230807234434-d825b45ccbf5 h1:eDk/42Kj4xN4yfE504LsvcFEo3dWUiCOaBiWJ2uIH2A= -github.com/coder/wireguard-go v0.0.0-20230807234434-d825b45ccbf5/go.mod h1:QRIcq2+DbdIC5sKh/gcAZhuqu6WT6L6G8/ALPN5wqYw= -github.com/coder/wireguard-go v0.0.0-20230920225835-b7d43c468619 h1:Ug4+d7ooZNjQPVHL+zrHF2hLCr0FOpxHdB2Urr77VmY= -github.com/coder/wireguard-go v0.0.0-20230920225835-b7d43c468619/go.mod h1:tqur9LnfstdR9ep2LaJT4lFUl0EjlHtge+gAjmsHUG4= +github.com/coder/wgtunnel v0.1.13-0.20240522110300-ade90dfb2da0 h1:C2/eCr+r0a5Auuw3YOiSyLNHkdMtyCZHPFBx7syN4rk= +github.com/coder/wgtunnel v0.1.13-0.20240522110300-ade90dfb2da0/go.mod h1:qANbdpqyAGlo2bg+4gQKPj24H1ZWa3bQU2Q5/bV5B3Y= +github.com/coder/wireguard-go v0.0.0-20240522052547-769cdd7f7818 h1:bNhUTaKl3q0bFn78bBRq7iIwo72kNTvUD9Ll5TTzDDk= +github.com/coder/wireguard-go v0.0.0-20240522052547-769cdd7f7818/go.mod h1:fAlLM6hUgnf4Sagxn2Uy5Us0PBgOYWz+63HwHUVGEbw= github.com/containerd/continuity v0.4.2 h1:v3y/4Yz5jwnvqPKJJ+7Wf93fyWoCB3F5EclWG023MDM= github.com/containerd/continuity v0.4.2/go.mod h1:F6PTNCKepoxEaXLQp3wDAjygEnImnZ/7o4JzpodfroQ= github.com/coreos/go-iptables v0.6.0 h1:is9qnZMPYjLd8LYqmm/qlE+wwEgJIkTYdhV3rfZo4jk= @@ -1217,6 +1213,8 @@ gotest.tools v2.2.0+incompatible h1:VsBPFP1AI068pPrMxtb/S8Zkgf9xEmTLJjfM+P5UIEo= gotest.tools v2.2.0+incompatible/go.mod h1:DsYFclhRJ6vuDpmuTbkuFWG+y2sxOXAzmJt81HFBacw= gotest.tools/v3 v3.4.0 h1:ZazjZUfuVeZGLAmlKKuyv3IKP5orXcwtOwDQH6YVr6o= gotest.tools/v3 v3.4.0/go.mod h1:CtbdzLSsqVhDgMtKsx03ird5YTGB3ar27v0u/yKBW5g= +gvisor.dev/gvisor v0.0.0-20240509041132-65b30f7869dc h1:DXLLFYv/k/xr0rWcwVEvWme1GR36Oc4kNMspg38JeiE= +gvisor.dev/gvisor v0.0.0-20240509041132-65b30f7869dc/go.mod h1:sxc3Uvk/vHcd3tj7/DHVBoR5wvWT/MmRq2pj7HRJnwU= honnef.co/go/gotraceui v0.2.0 h1:dmNsfQ9Vl3GwbiVD7Z8d/osC6WtGGrasyrC2suc4ZIQ= honnef.co/go/gotraceui v0.2.0/go.mod h1:qHo4/W75cA3bX0QQoSvDjbJa4R8mAyyFjbWAj63XElc= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= From 98fa823c799c3a6724a416946ebabf44514b4a17 Mon Sep 17 00:00:00 2001 From: Marcin Tojek Date: Thu, 23 May 2024 15:20:50 +0200 Subject: [PATCH 097/149] docs: describe workspace tags (#13352) --- docs/manifest.json | 5 + docs/templates/workspace-tags.md | 87 +++++++++ examples/parameters-dynamic-options/README.md | 1 - examples/workspace-tags/README.md | 26 +++ examples/workspace-tags/main.tf | 170 ++++++++++++++++++ 5 files changed, 288 insertions(+), 1 deletion(-) create mode 100644 docs/templates/workspace-tags.md create mode 100644 examples/workspace-tags/README.md create mode 100644 examples/workspace-tags/main.tf diff --git a/docs/manifest.json b/docs/manifest.json index 13b1b72cceb0f..85f5c250066ff 100644 --- a/docs/manifest.json +++ b/docs/manifest.json @@ -200,6 +200,11 @@ "description": "Prompt the template administrator for additional information about a template", "path": "./templates/variables.md" }, + { + "title": "Workspace Tags", + "description": "Control provisioning using Workspace Tags and Parameters", + "path": "./templates/workspace-tags.md" + }, { "title": "Administering templates", "description": "Configuration settings for template admins", diff --git a/docs/templates/workspace-tags.md b/docs/templates/workspace-tags.md new file mode 100644 index 0000000000000..ce886629abfe3 --- /dev/null +++ b/docs/templates/workspace-tags.md @@ -0,0 +1,87 @@ +# Workspace Tags + +Template administrators can leverage static template tags to limit workspace +provisioning to designated provisioner groups that have locally deployed +credentials for creating workspace resources. While this method ensures +controlled access, it offers limited flexibility and does not permit users to +select the nodes for their workspace creation. + +By using `coder_workspace_tags` and `coder_parameter`s, template administrators +can enable dynamic tag selection and modify static template tags. + +## Dynamic tag selection + +Here is a sample `coder_workspace_tags` data resource with a few workspace tags +specified: + +```hcl +data "coder_workspace_tags" "custom_workspace_tags" { + tags = { + "zone" = "developers" + "runtime" = data.coder_parameter.runtime_selector.value + "project_id" = "PROJECT_${data.coder_parameter.project_name.value}" + "cache" = data.coder_parameter.feature_cache_enabled.value == "true" ? "with-cache" : "no-cache" + } +} +``` + +**Legend** + +- `zone` - static tag value set to `developers` +- `runtime` - supported by the string-type `coder_parameter` to select + provisioner runtime, `runtime_selector` +- `project_id` - a formatted string supported by the string-type + `coder_parameter`, `project_name` +- `cache` - an HCL condition involving boolean-type `coder_parameter`, + `feature_cache_enabled` + +Review the +[full template example](https://github.com/coder/coder/tree/main/examples/workspace-tags) +using `coder_workspace_tags` and `coder_parameter`s. + +## Constraints + +### Tagged provisioners + +It is possible to choose tag combinations that no provisioner can handle. This +will cause the provisioner job to get stuck in the queue until a provisioner is +added that can handle its combination of tags. + +Before releasing the template version with configurable workspace tags, ensure +that every tag set is associated with at least one healthy provisioner. + +### Parameters types + +Provisioners require job tags to be defined in plain string format. When a +workspace tag refers to a `coder_parameter` without involving the string +formatter, for example, +(`"runtime" = data.coder_parameter.runtime_selector.value`), the Coder +provisioner server can transform only the following parameter types to strings: +_string_, _number_, and _bool_. + +### Mutability + +A mutable `coder_parameter` can be dangerous for a workspace tag as it allows +the workspace owner to change a provisioner group (due to different tags). In +most cases, `coder_parameter`s backing `coder_workspace_tags` should be marked +as immutable and set only once, during workspace creation. + +### HCL syntax + +When importing the template version with `coder_workspace_tags`, the Coder +provisioner server extracts raw partial queries for each workspace tag and +stores them in the database. During workspace build time, the Coder server uses +the [Hashicorp HCL library](https://github.com/hashicorp/hcl) to evaluate these +raw queries on-the-fly without processing the entire Terraform template. This +evaluation is simpler but also limited in terms of available functions, +variables, and references to other resources. + +**Supported syntax** + +- Static string: `foobar_tag = "foobaz"` +- Formatted string: `foobar_tag = "foobaz ${data.coder_parameter.foobaz.value}"` +- Reference to `coder_parameter`: + `foobar_tag = data.coder_parameter.foobar.value` +- Boolean logic: `production_tag = !data.coder_parameter.staging_env.value` +- Condition: + `cache = data.coder_parameter.feature_cache_enabled.value == "true" ? "with-cache" : "no-cache"` diff --git a/examples/parameters-dynamic-options/README.md b/examples/parameters-dynamic-options/README.md index 2c6c00d6acc83..b1c3f2dd3c5e0 100644 --- a/examples/parameters-dynamic-options/README.md +++ b/examples/parameters-dynamic-options/README.md @@ -35,6 +35,5 @@ Update the template and push it using the following command: ./scripts/coder-dev.sh templates push examples-parameters-dynamic-options \ -d examples/parameters-dynamic-options \ --variables-file examples/parameters-dynamic-options/variables.yml \ - --create \ -y ``` diff --git a/examples/workspace-tags/README.md b/examples/workspace-tags/README.md new file mode 100644 index 0000000000000..f3fbc86ae8fc1 --- /dev/null +++ b/examples/workspace-tags/README.md @@ -0,0 +1,26 @@ +--- +name: Sample Template with Workspace Tags +description: Review the sample template and introduce dynamic workspace tags to your template +tags: [local, docker, workspace-tags] +icon: /icon/docker.png +--- + +# Overview + +This Coder template presents use of [Workspace Tags](https://coder.com/docs/v2/latest/templates/workspace-tags) [Coder Parameters](https://coder.com/docs/v2/latest/templates/parameters). + +# Use case + +Template administrators can use static tags to control workspace provisioning, limiting it to specific provisioner groups. However, this restricts workspace users from choosing their preferred workspace nodes. + +By using `coder_workspace_tags` and `coder_parameter`s, template administrators can allow dynamic tag selection, avoiding the need to push the same template multiple times with different tags. + +## Development + +Update the template and push it using the following command: + +``` +./scripts/coder-dev.sh templates push examples-workspace-tags \ + -d examples/workspace-tags \ + -y +``` diff --git a/examples/workspace-tags/main.tf b/examples/workspace-tags/main.tf new file mode 100644 index 0000000000000..f74286741cbb0 --- /dev/null +++ b/examples/workspace-tags/main.tf @@ -0,0 +1,170 @@ +terraform { + required_providers { + coder = { + source = "coder/coder" + } + docker = { + source = "kreuzwerker/docker" + } + } +} + +locals { + username = data.coder_workspace.me.owner +} + +data "coder_provisioner" "me" { +} + +data "coder_workspace" "me" { +} + +data "coder_workspace_tags" "custom_workspace_tags" { + tags = { + "zone" = "developers" + "runtime" = data.coder_parameter.runtime_selector.value + "project_id" = "PROJECT_${data.coder_parameter.project_name.value}" + "cache" = data.coder_parameter.feature_cache_enabled.value == "true" ? "with-cache" : "no-cache" + } +} + +data "coder_parameter" "runtime_selector" { + name = "runtime_selector" + display_name = "Provisioner Runtime" + default = "development" + + option { + name = "Development (free zone)" + value = "development" + } + option { + name = "Staging (internal access)" + value = "staging" + } + option { + name = "Production (air-gapped)" + value = "production" + } + + mutable = false +} + +data "coder_parameter" "project_name" { + name = "project_name" + display_name = "Project name" + description = "Specify the project name." + + mutable = false +} + +data "coder_parameter" "feature_cache_enabled" { + name = "feature_cache_enabled" + display_name = "Enable cache?" + type = "bool" + default = false + + mutable = false +} + +resource "coder_agent" "main" { + arch = data.coder_provisioner.me.arch + os = "linux" + startup_script = < Date: Thu, 23 May 2024 11:25:10 -0600 Subject: [PATCH 098/149] fix(site): correct the size and position of the timeline trail in safari (#13348) --- site/src/components/Timeline/TimelineEntry.tsx | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/site/src/components/Timeline/TimelineEntry.tsx b/site/src/components/Timeline/TimelineEntry.tsx index b18a341803693..308edef8233e7 100644 --- a/site/src/components/Timeline/TimelineEntry.tsx +++ b/site/src/components/Timeline/TimelineEntry.tsx @@ -14,16 +14,19 @@ export const TimelineEntry = forwardRef< ref={ref} css={(theme) => [ { - position: "relative", "&:focus": { outlineStyle: "solid", outlineOffset: -1, outlineWidth: 2, outlineColor: theme.palette.primary.main, }, + "& td": { + position: "relative", + overflow: "hidden", + }, "& td:before": { position: "absolute", - left: 50, + left: 49, // 50px - (width / 2) display: "block", content: "''", height: "100%", From 1b4ca0042832a68cb563763a8311da0348dd3c15 Mon Sep 17 00:00:00 2001 From: Steven Masley Date: Thu, 23 May 2024 07:54:59 -1000 Subject: [PATCH 099/149] chore: include custom roles in list org roles (#13336) * chore: include custom roles in list org roles * move cli show roles to org scope --- cli/organization.go | 1 + .../rolescmd.go => cli/organizationroles.go | 18 ++--- cli/organizationroles_test.go | 51 ++++++++++++++ coderd/apidoc/docs.go | 8 +++ coderd/apidoc/swagger.json | 8 +++ coderd/database/db2sdk/db2sdk.go | 11 ++- coderd/database/dbgen/dbgen.go | 14 ++++ coderd/database/dbmem/dbmem.go | 12 +++- coderd/database/queries.sql.go | 30 +++++--- coderd/database/queries/roles.sql | 15 +++- coderd/rbac/roles.go | 36 +++++----- coderd/rbac/rolestore/rolestore.go | 26 ++++++- coderd/rbac/rolestore/rolestore_test.go | 41 +++++++++++ coderd/roles.go | 25 ++++++- coderd/roles_test.go | 41 +++++++++++ codersdk/roles.go | 3 +- docs/api/members.md | 6 ++ docs/api/schemas.md | 4 ++ enterprise/cli/rolescmd_test.go | 68 ------------------- enterprise/cli/root.go | 1 - enterprise/coderd/roles.go | 3 + enterprise/coderd/roles_test.go | 2 +- site/src/api/typesGenerated.ts | 1 + site/src/testHelpers/entities.ts | 4 ++ 24 files changed, 312 insertions(+), 117 deletions(-) rename enterprise/cli/rolescmd.go => cli/organizationroles.go (89%) create mode 100644 cli/organizationroles_test.go create mode 100644 coderd/rbac/rolestore/rolestore_test.go delete mode 100644 enterprise/cli/rolescmd_test.go diff --git a/cli/organization.go b/cli/organization.go index d9ea5c7aaf4ac..beb52cb5df8f2 100644 --- a/cli/organization.go +++ b/cli/organization.go @@ -30,6 +30,7 @@ func (r *RootCmd) organizations() *serpent.Command { r.currentOrganization(), r.switchOrganization(), r.createOrganization(), + r.organizationRoles(), }, } diff --git a/enterprise/cli/rolescmd.go b/cli/organizationroles.go similarity index 89% rename from enterprise/cli/rolescmd.go rename to cli/organizationroles.go index b0a9346697a01..91d1b20f54dd4 100644 --- a/enterprise/cli/rolescmd.go +++ b/cli/organizationroles.go @@ -12,26 +12,23 @@ import ( "github.com/coder/serpent" ) -// **NOTE** Only covers site wide roles at present. Org scoped roles maybe -// should be nested under some command that scopes to an org?? - -func (r *RootCmd) roles() *serpent.Command { +func (r *RootCmd) organizationRoles() *serpent.Command { cmd := &serpent.Command{ Use: "roles", - Short: "Manage site-wide roles.", + Short: "Manage organization roles.", Aliases: []string{"role"}, Handler: func(inv *serpent.Invocation) error { return inv.Command.HelpHandler(inv) }, Hidden: true, Children: []*serpent.Command{ - r.showRole(), + r.showOrganizationRoles(), }, } return cmd } -func (r *RootCmd) showRole() *serpent.Command { +func (r *RootCmd) showOrganizationRoles() *serpent.Command { formatter := cliui.NewOutputFormatter( cliui.ChangeFormatterData( cliui.TableFormat([]assignableRolesTableRow{}, []string{"name", "display_name", "built_in", "site_permissions", "org_permissions", "user_permissions"}), @@ -67,7 +64,12 @@ func (r *RootCmd) showRole() *serpent.Command { ), Handler: func(inv *serpent.Invocation) error { ctx := inv.Context() - roles, err := client.ListSiteRoles(ctx) + org, err := CurrentOrganization(r, inv, client) + if err != nil { + return err + } + + roles, err := client.ListOrganizationRoles(ctx, org.ID) if err != nil { return xerrors.Errorf("listing roles: %w", err) } diff --git a/cli/organizationroles_test.go b/cli/organizationroles_test.go new file mode 100644 index 0000000000000..d96c38c4bb9d6 --- /dev/null +++ b/cli/organizationroles_test.go @@ -0,0 +1,51 @@ +package cli_test + +import ( + "bytes" + "testing" + + "github.com/google/uuid" + "github.com/stretchr/testify/require" + + "github.com/coder/coder/v2/cli/clitest" + "github.com/coder/coder/v2/coderd/coderdtest" + "github.com/coder/coder/v2/coderd/database" + "github.com/coder/coder/v2/coderd/database/dbgen" + "github.com/coder/coder/v2/coderd/rbac" + "github.com/coder/coder/v2/testutil" +) + +func TestShowOrganizationRoles(t *testing.T) { + t.Parallel() + + t.Run("OK", func(t *testing.T) { + t.Parallel() + + ownerClient, db := coderdtest.NewWithDatabase(t, &coderdtest.Options{}) + owner := coderdtest.CreateFirstUser(t, ownerClient) + client, _ := coderdtest.CreateAnotherUser(t, ownerClient, owner.OrganizationID, rbac.RoleUserAdmin()) + + const expectedRole = "test-role" + dbgen.CustomRole(t, db, database.CustomRole{ + Name: expectedRole, + DisplayName: "Expected", + SitePermissions: nil, + OrgPermissions: nil, + UserPermissions: nil, + OrganizationID: uuid.NullUUID{ + UUID: owner.OrganizationID, + Valid: true, + }, + }) + + ctx := testutil.Context(t, testutil.WaitMedium) + inv, root := clitest.New(t, "organization", "roles", "show") + clitest.SetupConfig(t, client, root) + + buf := new(bytes.Buffer) + inv.Stdout = buf + err := inv.WithContext(ctx).Run() + require.NoError(t, err) + require.Contains(t, buf.String(), expectedRole) + }) +} diff --git a/coderd/apidoc/docs.go b/coderd/apidoc/docs.go index af8492ae15961..37e121e483068 100644 --- a/coderd/apidoc/docs.go +++ b/coderd/apidoc/docs.go @@ -8421,6 +8421,10 @@ const docTemplate = `{ "name": { "type": "string" }, + "organization_id": { + "type": "string", + "format": "uuid" + }, "organization_permissions": { "description": "map[\u003corg_id\u003e] -\u003e Permissions", "type": "object", @@ -11241,6 +11245,10 @@ const docTemplate = `{ "name": { "type": "string" }, + "organization_id": { + "type": "string", + "format": "uuid" + }, "organization_permissions": { "description": "map[\u003corg_id\u003e] -\u003e Permissions", "type": "object", diff --git a/coderd/apidoc/swagger.json b/coderd/apidoc/swagger.json index e68a61af19788..293e9e8e65265 100644 --- a/coderd/apidoc/swagger.json +++ b/coderd/apidoc/swagger.json @@ -7476,6 +7476,10 @@ "name": { "type": "string" }, + "organization_id": { + "type": "string", + "format": "uuid" + }, "organization_permissions": { "description": "map[\u003corg_id\u003e] -\u003e Permissions", "type": "object", @@ -10133,6 +10137,10 @@ "name": { "type": "string" }, + "organization_id": { + "type": "string", + "format": "uuid" + }, "organization_permissions": { "description": "map[\u003corg_id\u003e] -\u003e Permissions", "type": "object", diff --git a/coderd/database/db2sdk/db2sdk.go b/coderd/database/db2sdk/db2sdk.go index ab6f3aa82b3f6..590183bd43dd1 100644 --- a/coderd/database/db2sdk/db2sdk.go +++ b/coderd/database/db2sdk/db2sdk.go @@ -527,12 +527,17 @@ func ProvisionerDaemon(dbDaemon database.ProvisionerDaemon) codersdk.Provisioner } func Role(role rbac.Role) codersdk.Role { + roleName, orgIDStr, err := rbac.RoleSplit(role.Name) + if err != nil { + roleName = role.Name + } return codersdk.Role{ - Name: role.Name, + Name: roleName, + OrganizationID: orgIDStr, DisplayName: role.DisplayName, SitePermissions: List(role.Site, Permission), OrganizationPermissions: Map(role.Org, ListLazy(Permission)), - UserPermissions: List(role.Site, Permission), + UserPermissions: List(role.User, Permission), } } @@ -546,7 +551,7 @@ func Permission(permission rbac.Permission) codersdk.Permission { func RoleToRBAC(role codersdk.Role) rbac.Role { return rbac.Role{ - Name: role.Name, + Name: rbac.RoleName(role.Name, role.OrganizationID), DisplayName: role.DisplayName, Site: List(role.SitePermissions, PermissionToRBAC), Org: Map(role.OrganizationPermissions, ListLazy(PermissionToRBAC)), diff --git a/coderd/database/dbgen/dbgen.go b/coderd/database/dbgen/dbgen.go index 93d629e71e49f..be612abc333f9 100644 --- a/coderd/database/dbgen/dbgen.go +++ b/coderd/database/dbgen/dbgen.go @@ -8,6 +8,7 @@ import ( "encoding/json" "fmt" "net" + "strings" "testing" "time" @@ -817,6 +818,19 @@ func OAuth2ProviderAppToken(t testing.TB, db database.Store, seed database.OAuth return token } +func CustomRole(t testing.TB, db database.Store, seed database.CustomRole) database.CustomRole { + role, err := db.UpsertCustomRole(genCtx, database.UpsertCustomRoleParams{ + Name: takeFirst(seed.Name, strings.ToLower(namesgenerator.GetRandomName(1))), + DisplayName: namesgenerator.GetRandomName(1), + OrganizationID: seed.OrganizationID, + SitePermissions: takeFirstSlice(seed.SitePermissions, []byte("[]")), + OrgPermissions: takeFirstSlice(seed.SitePermissions, []byte("{}")), + UserPermissions: takeFirstSlice(seed.SitePermissions, []byte("[]")), + }) + require.NoError(t, err, "insert custom role") + return role +} + func must[V any](v V, err error) V { if err != nil { panic(err) diff --git a/coderd/database/dbmem/dbmem.go b/coderd/database/dbmem/dbmem.go index 5f2ebbff25003..e9497880b274c 100644 --- a/coderd/database/dbmem/dbmem.go +++ b/coderd/database/dbmem/dbmem.go @@ -1187,7 +1187,11 @@ func (q *FakeQuerier) CustomRoles(_ context.Context, arg database.CustomRolesPar role := role if len(arg.LookupRoles) > 0 { if !slices.ContainsFunc(arg.LookupRoles, func(s string) bool { - return strings.EqualFold(s, role.Name) + roleName := rbac.RoleName(role.Name, "") + if role.OrganizationID.UUID != uuid.Nil { + roleName = rbac.RoleName(role.Name, role.OrganizationID.UUID.String()) + } + return strings.EqualFold(s, roleName) }) { continue } @@ -1197,6 +1201,10 @@ func (q *FakeQuerier) CustomRoles(_ context.Context, arg database.CustomRolesPar continue } + if arg.OrganizationID != uuid.Nil && role.OrganizationID.UUID != arg.OrganizationID { + continue + } + found = append(found, role) } @@ -8377,6 +8385,7 @@ func (q *FakeQuerier) UpsertCustomRole(_ context.Context, arg database.UpsertCus for i := range q.customRoles { if strings.EqualFold(q.customRoles[i].Name, arg.Name) { q.customRoles[i].DisplayName = arg.DisplayName + q.customRoles[i].OrganizationID = arg.OrganizationID q.customRoles[i].SitePermissions = arg.SitePermissions q.customRoles[i].OrgPermissions = arg.OrgPermissions q.customRoles[i].UserPermissions = arg.UserPermissions @@ -8388,6 +8397,7 @@ func (q *FakeQuerier) UpsertCustomRole(_ context.Context, arg database.UpsertCus role := database.CustomRole{ Name: arg.Name, DisplayName: arg.DisplayName, + OrganizationID: arg.OrganizationID, SitePermissions: arg.SitePermissions, OrgPermissions: arg.OrgPermissions, UserPermissions: arg.UserPermissions, diff --git a/coderd/database/queries.sql.go b/coderd/database/queries.sql.go index 8f5a879d75f5c..bcc961c88e048 100644 --- a/coderd/database/queries.sql.go +++ b/coderd/database/queries.sql.go @@ -5604,10 +5604,13 @@ FROM custom_roles WHERE true - -- Lookup roles filter + -- Lookup roles filter expects the role names to be in the rbac package + -- format. Eg: name[:] AND CASE WHEN array_length($1 :: text[], 1) > 0 THEN - -- Case insensitive - name ILIKE ANY($1 :: text []) + -- Case insensitive lookup with org_id appended (if non-null). + -- This will return just the name if org_id is null. It'll append + -- the org_id if not null + concat(name, NULLIF(concat(':', organization_id), ':')) ILIKE ANY($1 :: text []) ELSE true END -- Org scoping filter, to only fetch site wide roles @@ -5615,15 +5618,20 @@ WHERE organization_id IS null ELSE true END + AND CASE WHEN $3 :: uuid != '00000000-0000-0000-0000-000000000000'::uuid THEN + organization_id = $3 + ELSE true + END ` type CustomRolesParams struct { - LookupRoles []string `db:"lookup_roles" json:"lookup_roles"` - ExcludeOrgRoles bool `db:"exclude_org_roles" json:"exclude_org_roles"` + LookupRoles []string `db:"lookup_roles" json:"lookup_roles"` + ExcludeOrgRoles bool `db:"exclude_org_roles" json:"exclude_org_roles"` + OrganizationID uuid.UUID `db:"organization_id" json:"organization_id"` } func (q *sqlQuerier) CustomRoles(ctx context.Context, arg CustomRolesParams) ([]CustomRole, error) { - rows, err := q.db.QueryContext(ctx, customRoles, pq.Array(arg.LookupRoles), arg.ExcludeOrgRoles) + rows, err := q.db.QueryContext(ctx, customRoles, pq.Array(arg.LookupRoles), arg.ExcludeOrgRoles, arg.OrganizationID) if err != nil { return nil, err } @@ -5659,6 +5667,7 @@ INSERT INTO custom_roles ( name, display_name, + organization_id, site_permissions, org_permissions, user_permissions, @@ -5672,15 +5681,16 @@ VALUES ( $3, $4, $5, + $6, now(), now() ) ON CONFLICT (name) DO UPDATE SET display_name = $2, - site_permissions = $3, - org_permissions = $4, - user_permissions = $5, + site_permissions = $4, + org_permissions = $5, + user_permissions = $6, updated_at = now() RETURNING name, display_name, site_permissions, org_permissions, user_permissions, created_at, updated_at, organization_id ` @@ -5688,6 +5698,7 @@ RETURNING name, display_name, site_permissions, org_permissions, user_permission type UpsertCustomRoleParams struct { Name string `db:"name" json:"name"` DisplayName string `db:"display_name" json:"display_name"` + OrganizationID uuid.NullUUID `db:"organization_id" json:"organization_id"` SitePermissions json.RawMessage `db:"site_permissions" json:"site_permissions"` OrgPermissions json.RawMessage `db:"org_permissions" json:"org_permissions"` UserPermissions json.RawMessage `db:"user_permissions" json:"user_permissions"` @@ -5697,6 +5708,7 @@ func (q *sqlQuerier) UpsertCustomRole(ctx context.Context, arg UpsertCustomRoleP row := q.db.QueryRowContext(ctx, upsertCustomRole, arg.Name, arg.DisplayName, + arg.OrganizationID, arg.SitePermissions, arg.OrgPermissions, arg.UserPermissions, diff --git a/coderd/database/queries/roles.sql b/coderd/database/queries/roles.sql index 2137dea34b077..dd8816d40eecc 100644 --- a/coderd/database/queries/roles.sql +++ b/coderd/database/queries/roles.sql @@ -5,10 +5,13 @@ FROM custom_roles WHERE true - -- Lookup roles filter + -- Lookup roles filter expects the role names to be in the rbac package + -- format. Eg: name[:] AND CASE WHEN array_length(@lookup_roles :: text[], 1) > 0 THEN - -- Case insensitive - name ILIKE ANY(@lookup_roles :: text []) + -- Case insensitive lookup with org_id appended (if non-null). + -- This will return just the name if org_id is null. It'll append + -- the org_id if not null + concat(name, NULLIF(concat(':', organization_id), ':')) ILIKE ANY(@lookup_roles :: text []) ELSE true END -- Org scoping filter, to only fetch site wide roles @@ -16,6 +19,10 @@ WHERE organization_id IS null ELSE true END + AND CASE WHEN @organization_id :: uuid != '00000000-0000-0000-0000-000000000000'::uuid THEN + organization_id = @organization_id + ELSE true + END ; -- name: UpsertCustomRole :one @@ -23,6 +30,7 @@ INSERT INTO custom_roles ( name, display_name, + organization_id, site_permissions, org_permissions, user_permissions, @@ -33,6 +41,7 @@ VALUES ( -- Always force lowercase names lower(@name), @display_name, + @organization_id, @site_permissions, @org_permissions, @user_permissions, diff --git a/coderd/rbac/roles.go b/coderd/rbac/roles.go index 7086e2fe0e2a4..137d2c0c1258b 100644 --- a/coderd/rbac/roles.go +++ b/coderd/rbac/roles.go @@ -53,29 +53,29 @@ func (names RoleNames) Names() []string { // site and orgs, and these functions can be removed. func RoleOwner() string { - return roleName(owner, "") + return RoleName(owner, "") } -func CustomSiteRole() string { return roleName(customSiteRole, "") } +func CustomSiteRole() string { return RoleName(customSiteRole, "") } func RoleTemplateAdmin() string { - return roleName(templateAdmin, "") + return RoleName(templateAdmin, "") } func RoleUserAdmin() string { - return roleName(userAdmin, "") + return RoleName(userAdmin, "") } func RoleMember() string { - return roleName(member, "") + return RoleName(member, "") } func RoleOrgAdmin(organizationID uuid.UUID) string { - return roleName(orgAdmin, organizationID.String()) + return RoleName(orgAdmin, organizationID.String()) } func RoleOrgMember(organizationID uuid.UUID) string { - return roleName(orgMember, organizationID.String()) + return RoleName(orgMember, organizationID.String()) } func allPermsExcept(excepts ...Objecter) []Permission { @@ -273,7 +273,7 @@ func ReloadBuiltinRoles(opts *RoleOptions) { // organization scope. orgAdmin: func(organizationID string) Role { return Role{ - Name: roleName(orgAdmin, organizationID), + Name: RoleName(orgAdmin, organizationID), DisplayName: "Organization Admin", Site: []Permission{}, Org: map[string][]Permission{ @@ -291,7 +291,7 @@ func ReloadBuiltinRoles(opts *RoleOptions) { // in an organization. orgMember: func(organizationID string) Role { return Role{ - Name: roleName(orgMember, organizationID), + Name: RoleName(orgMember, organizationID), DisplayName: "", Site: []Permission{}, Org: map[string][]Permission{ @@ -475,13 +475,13 @@ func CanAssignRole(expandable ExpandableRoles, assignedRole string) bool { // For CanAssignRole, we only care about the names of the roles. roles := expandable.Names() - assigned, assignedOrg, err := roleSplit(assignedRole) + assigned, assignedOrg, err := RoleSplit(assignedRole) if err != nil { return false } for _, longRole := range roles { - role, orgID, err := roleSplit(longRole) + role, orgID, err := RoleSplit(longRole) if err != nil { continue } @@ -510,7 +510,7 @@ func CanAssignRole(expandable ExpandableRoles, assignedRole string) bool { // api. We should maybe make an exported function that returns just the // human-readable content of the Role struct (name + display name). func RoleByName(name string) (Role, error) { - roleName, orgID, err := roleSplit(name) + roleName, orgID, err := RoleSplit(name) if err != nil { return Role{}, xerrors.Errorf("parse role name: %w", err) } @@ -544,7 +544,7 @@ func rolesByNames(roleNames []string) ([]Role, error) { } func IsOrgRole(roleName string) (string, bool) { - _, orgID, err := roleSplit(roleName) + _, orgID, err := RoleSplit(roleName) if err == nil && orgID != "" { return orgID, true } @@ -561,7 +561,7 @@ func OrganizationRoles(organizationID uuid.UUID) []Role { var roles []Role for _, roleF := range builtInRoles { role := roleF(organizationID.String()) - _, scope, err := roleSplit(role.Name) + _, scope, err := RoleSplit(role.Name) if err != nil { // This should never happen continue @@ -582,7 +582,7 @@ func SiteRoles() []Role { var roles []Role for _, roleF := range builtInRoles { role := roleF("random") - _, scope, err := roleSplit(role.Name) + _, scope, err := RoleSplit(role.Name) if err != nil { // This should never happen continue @@ -625,19 +625,19 @@ func ChangeRoleSet(from []string, to []string) (added []string, removed []string return added, removed } -// roleName is a quick helper function to return +// RoleName is a quick helper function to return // // role_name:scopeID // // If no scopeID is required, only 'role_name' is returned -func roleName(name string, orgID string) string { +func RoleName(name string, orgID string) string { if orgID == "" { return name } return name + ":" + orgID } -func roleSplit(role string) (name string, orgID string, err error) { +func RoleSplit(role string) (name string, orgID string, err error) { arr := strings.Split(role, ":") if len(arr) > 2 { return "", "", xerrors.Errorf("too many colons in role name") diff --git a/coderd/rbac/rolestore/rolestore.go b/coderd/rbac/rolestore/rolestore.go index 9881cde028826..e0d199241fc9f 100644 --- a/coderd/rbac/rolestore/rolestore.go +++ b/coderd/rbac/rolestore/rolestore.go @@ -5,6 +5,7 @@ import ( "encoding/json" "net/http" + "github.com/google/uuid" "golang.org/x/xerrors" "github.com/coder/coder/v2/coderd/database" @@ -75,6 +76,7 @@ func Expand(ctx context.Context, db database.Store, names []string) (rbac.Roles, dbroles, err := db.CustomRoles(ctx, database.CustomRolesParams{ LookupRoles: lookup, ExcludeOrgRoles: false, + OrganizationID: uuid.Nil, }) if err != nil { return nil, xerrors.Errorf("fetch custom roles: %w", err) @@ -95,8 +97,12 @@ func Expand(ctx context.Context, db database.Store, names []string) (rbac.Roles, } func ConvertDBRole(dbRole database.CustomRole) (rbac.Role, error) { + name := dbRole.Name + if dbRole.OrganizationID.Valid { + name = rbac.RoleName(dbRole.Name, dbRole.OrganizationID.UUID.String()) + } role := rbac.Role{ - Name: dbRole.Name, + Name: name, DisplayName: dbRole.DisplayName, Site: nil, Org: nil, @@ -122,11 +128,27 @@ func ConvertDBRole(dbRole database.CustomRole) (rbac.Role, error) { } func ConvertRoleToDB(role rbac.Role) (database.CustomRole, error) { + roleName, orgIDStr, err := rbac.RoleSplit(role.Name) + if err != nil { + return database.CustomRole{}, xerrors.Errorf("split role %q: %w", role.Name, err) + } + dbRole := database.CustomRole{ - Name: role.Name, + Name: roleName, DisplayName: role.DisplayName, } + if orgIDStr != "" { + orgID, err := uuid.Parse(orgIDStr) + if err != nil { + return database.CustomRole{}, xerrors.Errorf("parse org id %q: %w", orgIDStr, err) + } + dbRole.OrganizationID = uuid.NullUUID{ + UUID: orgID, + Valid: true, + } + } + siteData, err := json.Marshal(role.Site) if err != nil { return dbRole, xerrors.Errorf("marshal site permissions: %w", err) diff --git a/coderd/rbac/rolestore/rolestore_test.go b/coderd/rbac/rolestore/rolestore_test.go new file mode 100644 index 0000000000000..318f2f579b340 --- /dev/null +++ b/coderd/rbac/rolestore/rolestore_test.go @@ -0,0 +1,41 @@ +package rolestore_test + +import ( + "testing" + + "github.com/google/uuid" + "github.com/stretchr/testify/require" + + "github.com/coder/coder/v2/coderd/database" + "github.com/coder/coder/v2/coderd/database/dbgen" + "github.com/coder/coder/v2/coderd/database/dbmem" + "github.com/coder/coder/v2/coderd/rbac" + "github.com/coder/coder/v2/coderd/rbac/rolestore" + "github.com/coder/coder/v2/testutil" +) + +func TestExpandCustomRoleRoles(t *testing.T) { + t.Parallel() + + db := dbmem.New() + + org := dbgen.Organization(t, db, database.Organization{}) + + const roleName = "test-role" + dbgen.CustomRole(t, db, database.CustomRole{ + Name: roleName, + DisplayName: "", + SitePermissions: nil, + OrgPermissions: nil, + UserPermissions: nil, + OrganizationID: uuid.NullUUID{ + UUID: org.ID, + Valid: true, + }, + }) + + ctx := testutil.Context(t, testutil.WaitShort) + roles, err := rolestore.Expand(ctx, db, []string{rbac.RoleName(roleName, org.ID.String())}) + require.NoError(t, err) + require.Len(t, roles, 1, "role found") +} diff --git a/coderd/roles.go b/coderd/roles.go index 3d6245f9d4594..a00af23ce98eb 100644 --- a/coderd/roles.go +++ b/coderd/roles.go @@ -3,6 +3,8 @@ package coderd import ( "net/http" + "github.com/google/uuid" + "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/database/db2sdk" "github.com/coder/coder/v2/coderd/httpmw" @@ -32,9 +34,10 @@ func (api *API) AssignableSiteRoles(rw http.ResponseWriter, r *http.Request) { } dbCustomRoles, err := api.Database.CustomRoles(ctx, database.CustomRolesParams{ + LookupRoles: nil, // Only site wide custom roles to be included ExcludeOrgRoles: true, - LookupRoles: nil, + OrganizationID: uuid.Nil, }) if err != nil { httpapi.InternalServerError(rw, err) @@ -73,7 +76,25 @@ func (api *API) assignableOrgRoles(rw http.ResponseWriter, r *http.Request) { } roles := rbac.OrganizationRoles(organization.ID) - httpapi.Write(ctx, rw, http.StatusOK, assignableRoles(actorRoles.Roles, roles, []rbac.Role{})) + dbCustomRoles, err := api.Database.CustomRoles(ctx, database.CustomRolesParams{ + LookupRoles: nil, + ExcludeOrgRoles: false, + OrganizationID: organization.ID, + }) + if err != nil { + httpapi.InternalServerError(rw, err) + return + } + + customRoles := make([]rbac.Role, 0, len(dbCustomRoles)) + for _, customRole := range dbCustomRoles { + rbacRole, err := rolestore.ConvertDBRole(customRole) + if err == nil { + customRoles = append(customRoles, rbacRole) + } + } + + httpapi.Write(ctx, rw, http.StatusOK, assignableRoles(actorRoles.Roles, roles, customRoles)) } func assignableRoles(actorRoles rbac.ExpandableRoles, roles []rbac.Role, customRoles []rbac.Role) []codersdk.AssignableRoles { diff --git a/coderd/roles_test.go b/coderd/roles_test.go index d82c03033cb54..6d4f4bb6fe789 100644 --- a/coderd/roles_test.go +++ b/coderd/roles_test.go @@ -3,13 +3,17 @@ package coderd_test import ( "context" "net/http" + "slices" "testing" "github.com/stretchr/testify/require" "github.com/coder/coder/v2/coderd/coderdtest" "github.com/coder/coder/v2/coderd/database/db2sdk" + "github.com/coder/coder/v2/coderd/database/dbgen" "github.com/coder/coder/v2/coderd/rbac" + "github.com/coder/coder/v2/coderd/rbac/policy" + "github.com/coder/coder/v2/coderd/rbac/rolestore" "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/testutil" ) @@ -156,6 +160,43 @@ func TestListRoles(t *testing.T) { } } +func TestListCustomRoles(t *testing.T) { + t.Parallel() + + t.Run("Organizations", func(t *testing.T) { + t.Parallel() + + client, db := coderdtest.NewWithDatabase(t, nil) + owner := coderdtest.CreateFirstUser(t, client) + + const roleName = "random_role" + dbgen.CustomRole(t, db, must(rolestore.ConvertRoleToDB(rbac.Role{ + Name: rbac.RoleName(roleName, owner.OrganizationID.String()), + DisplayName: "Random Role", + Site: nil, + Org: map[string][]rbac.Permission{ + owner.OrganizationID.String(): { + { + Negate: false, + ResourceType: rbac.ResourceWorkspace.Type, + Action: policy.ActionRead, + }, + }, + }, + User: nil, + }))) + + ctx := testutil.Context(t, testutil.WaitShort) + roles, err := client.ListOrganizationRoles(ctx, owner.OrganizationID) + require.NoError(t, err) + + found := slices.ContainsFunc(roles, func(element codersdk.AssignableRoles) bool { + return element.Name == roleName && element.OrganizationID == owner.OrganizationID.String() + }) + require.Truef(t, found, "custom organization role listed") + }) +} + func convertRole(roleName string) codersdk.Role { role, _ := rbac.RoleByName(roleName) return db2sdk.Role(role) diff --git a/codersdk/roles.go b/codersdk/roles.go index 29b0174931fbe..c803e92f44bb2 100644 --- a/codersdk/roles.go +++ b/codersdk/roles.go @@ -35,7 +35,8 @@ type Permission struct { // Role is a longer form of SlimRole used to edit custom roles. type Role struct { - Name string `json:"name" table:"name,default_sort"` + Name string `json:"name" table:"name,default_sort" validate:"username"` + OrganizationID string `json:"organization_id" table:"organization_id" format:"uuid"` DisplayName string `json:"display_name" table:"display_name"` SitePermissions []Permission `json:"site_permissions" table:"site_permissions"` // map[] -> Permissions diff --git a/docs/api/members.md b/docs/api/members.md index 8b34200e50e95..27536a6c836fa 100644 --- a/docs/api/members.md +++ b/docs/api/members.md @@ -30,6 +30,7 @@ curl -X GET http://coder-server:8080/api/v2/organizations/{organization}/members "built_in": true, "display_name": "string", "name": "string", + "organization_id": "7c60d51f-b44e-4682-87d6-449835ea4de6", "organization_permissions": { "property1": [ { @@ -81,6 +82,7 @@ Status Code **200** | `» built_in` | boolean | false | | Built in roles are immutable | | `» display_name` | string | false | | | | `» name` | string | false | | | +| `» organization_id` | string(uuid) | false | | | | `» organization_permissions` | object | false | | map[] -> Permissions | | `»» [any property]` | array | false | | | | `»»» action` | [codersdk.RBACAction](schemas.md#codersdkrbacaction) | false | | | @@ -215,6 +217,7 @@ curl -X GET http://coder-server:8080/api/v2/users/roles \ "built_in": true, "display_name": "string", "name": "string", + "organization_id": "7c60d51f-b44e-4682-87d6-449835ea4de6", "organization_permissions": { "property1": [ { @@ -266,6 +269,7 @@ Status Code **200** | `» built_in` | boolean | false | | Built in roles are immutable | | `» display_name` | string | false | | | | `» name` | string | false | | | +| `» organization_id` | string(uuid) | false | | | | `» organization_permissions` | object | false | | map[] -> Permissions | | `»» [any property]` | array | false | | | | `»»» action` | [codersdk.RBACAction](schemas.md#codersdkrbacaction) | false | | | @@ -341,6 +345,7 @@ curl -X PATCH http://coder-server:8080/api/v2/users/roles \ { "display_name": "string", "name": "string", + "organization_id": "7c60d51f-b44e-4682-87d6-449835ea4de6", "organization_permissions": { "property1": [ { @@ -390,6 +395,7 @@ Status Code **200** | `[array item]` | array | false | | | | `» display_name` | string | false | | | | `» name` | string | false | | | +| `» organization_id` | string(uuid) | false | | | | `» organization_permissions` | object | false | | map[] -> Permissions | | `»» [any property]` | array | false | | | | `»»» action` | [codersdk.RBACAction](schemas.md#codersdkrbacaction) | false | | | diff --git a/docs/api/schemas.md b/docs/api/schemas.md index eb2f7cbeb8aa1..ca7493ae53ec0 100644 --- a/docs/api/schemas.md +++ b/docs/api/schemas.md @@ -805,6 +805,7 @@ "built_in": true, "display_name": "string", "name": "string", + "organization_id": "7c60d51f-b44e-4682-87d6-449835ea4de6", "organization_permissions": { "property1": [ { @@ -846,6 +847,7 @@ | `built_in` | boolean | false | | Built in roles are immutable | | `display_name` | string | false | | | | `name` | string | false | | | +| `organization_id` | string | false | | | | `organization_permissions` | object | false | | map[] -> Permissions | | » `[any property]` | array of [codersdk.Permission](#codersdkpermission) | false | | | | `site_permissions` | array of [codersdk.Permission](#codersdkpermission) | false | | | @@ -4327,6 +4329,7 @@ CreateWorkspaceRequest provides options for creating a new workspace. Only one o { "display_name": "string", "name": "string", + "organization_id": "7c60d51f-b44e-4682-87d6-449835ea4de6", "organization_permissions": { "property1": [ { @@ -4366,6 +4369,7 @@ CreateWorkspaceRequest provides options for creating a new workspace. Only one o | -------------------------- | --------------------------------------------------- | -------- | ------------ | ---------------------------- | | `display_name` | string | false | | | | `name` | string | false | | | +| `organization_id` | string | false | | | | `organization_permissions` | object | false | | map[] -> Permissions | | » `[any property]` | array of [codersdk.Permission](#codersdkpermission) | false | | | | `site_permissions` | array of [codersdk.Permission](#codersdkpermission) | false | | | diff --git a/enterprise/cli/rolescmd_test.go b/enterprise/cli/rolescmd_test.go deleted file mode 100644 index df776603e0ac4..0000000000000 --- a/enterprise/cli/rolescmd_test.go +++ /dev/null @@ -1,68 +0,0 @@ -package cli_test - -import ( - "testing" - - "github.com/stretchr/testify/require" - - "github.com/coder/coder/v2/cli/clitest" - "github.com/coder/coder/v2/coderd/coderdtest" - "github.com/coder/coder/v2/coderd/rbac" - "github.com/coder/coder/v2/codersdk" - "github.com/coder/coder/v2/enterprise/coderd/coderdenttest" - "github.com/coder/coder/v2/enterprise/coderd/license" - "github.com/coder/coder/v2/pty/ptytest" - "github.com/coder/coder/v2/testutil" -) - -func TestShowRoles(t *testing.T) { - t.Parallel() - - t.Run("OK", func(t *testing.T) { - t.Parallel() - - dv := coderdtest.DeploymentValues(t) - dv.Experiments = []string{string(codersdk.ExperimentCustomRoles)} - owner, admin := coderdenttest.New(t, &coderdenttest.Options{ - Options: &coderdtest.Options{ - DeploymentValues: dv, - }, - LicenseOptions: &coderdenttest.LicenseOptions{ - Features: license.Features{ - codersdk.FeatureCustomRoles: 1, - }, - }, - }) - - // Requires an owner - client, _ := coderdtest.CreateAnotherUser(t, owner, admin.OrganizationID, rbac.RoleOwner()) - - const expectedRole = "test-role" - ctx := testutil.Context(t, testutil.WaitMedium) - _, err := client.PatchRole(ctx, codersdk.Role{ - Name: expectedRole, - DisplayName: "Test Role", - SitePermissions: codersdk.CreatePermissions(map[codersdk.RBACResource][]codersdk.RBACAction{ - codersdk.ResourceWorkspace: {codersdk.ActionRead, codersdk.ActionUpdate}, - }), - }) - require.NoError(t, err, "create role") - - inv, conf := newCLI(t, "roles", "show", "test-role") - - pty := ptytest.New(t) - inv.Stdout = pty.Output() - clitest.SetupConfig(t, client, conf) - - err = inv.Run() - require.NoError(t, err) - - matches := []string{ - "test-role", "2 permissions", - } - - for _, match := range matches { - pty.ExpectMatch(match) - } - }) -} diff --git a/enterprise/cli/root.go b/enterprise/cli/root.go index 69b686c4174aa..74615ff0e9d2e 100644 --- a/enterprise/cli/root.go +++ b/enterprise/cli/root.go @@ -17,7 +17,6 @@ func (r *RootCmd) enterpriseOnly() []*serpent.Command { r.licenses(), r.groups(), r.provisionerDaemons(), - r.roles(), } } diff --git a/enterprise/coderd/roles.go b/enterprise/coderd/roles.go index 552197f7c4401..8e0827c9b3b02 100644 --- a/enterprise/coderd/roles.go +++ b/enterprise/coderd/roles.go @@ -3,6 +3,8 @@ package coderd import ( "net/http" + "github.com/google/uuid" + "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/database/db2sdk" "github.com/coder/coder/v2/coderd/httpapi" @@ -59,6 +61,7 @@ func (api *API) patchRole(rw http.ResponseWriter, r *http.Request) { inserted, err := api.Database.UpsertCustomRole(ctx, database.UpsertCustomRoleParams{ Name: args.Name, DisplayName: args.DisplayName, + OrganizationID: uuid.NullUUID{}, SitePermissions: args.SitePermissions, OrgPermissions: args.OrgPermissions, UserPermissions: args.UserPermissions, diff --git a/enterprise/coderd/roles_test.go b/enterprise/coderd/roles_test.go index 67b863e63bacd..a7db9b718d946 100644 --- a/enterprise/coderd/roles_test.go +++ b/enterprise/coderd/roles_test.go @@ -198,6 +198,6 @@ func TestCustomRole(t *testing.T) { OrganizationPermissions: nil, UserPermissions: nil, }) - require.ErrorContains(t, err, "Invalid role name") + require.ErrorContains(t, err, "Validation") }) } diff --git a/site/src/api/typesGenerated.ts b/site/src/api/typesGenerated.ts index db1b39fdbed26..5d4d148758f36 100644 --- a/site/src/api/typesGenerated.ts +++ b/site/src/api/typesGenerated.ts @@ -977,6 +977,7 @@ export interface Response { // From codersdk/roles.go export interface Role { readonly name: string; + readonly organization_id: string; readonly display_name: string; readonly site_permissions: readonly Permission[]; readonly organization_permissions: Record; diff --git a/site/src/testHelpers/entities.ts b/site/src/testHelpers/entities.ts index 22a4c5db6edd9..1fbb18aa86a07 100644 --- a/site/src/testHelpers/entities.ts +++ b/site/src/testHelpers/entities.ts @@ -235,6 +235,7 @@ export const MockOwnerRole: TypesGen.Role = { site_permissions: [], organization_permissions: {}, user_permissions: [], + organization_id: "", }; export const MockUserAdminRole: TypesGen.Role = { @@ -243,6 +244,7 @@ export const MockUserAdminRole: TypesGen.Role = { site_permissions: [], organization_permissions: {}, user_permissions: [], + organization_id: "", }; export const MockTemplateAdminRole: TypesGen.Role = { @@ -251,6 +253,7 @@ export const MockTemplateAdminRole: TypesGen.Role = { site_permissions: [], organization_permissions: {}, user_permissions: [], + organization_id: "", }; export const MockMemberRole: TypesGen.SlimRole = { @@ -264,6 +267,7 @@ export const MockAuditorRole: TypesGen.Role = { site_permissions: [], organization_permissions: {}, user_permissions: [], + organization_id: "", }; // assignableRole takes a role and a boolean. The boolean implies if the From e5bb0a7a00e2aa75014b0094695c7ff7a04837d9 Mon Sep 17 00:00:00 2001 From: Dean Sheather Date: Thu, 23 May 2024 23:32:30 -0700 Subject: [PATCH 100/149] chore: add easy NAT integration tests part 2 (#13312) --- coderd/rbac/roles_test.go | 6 +- tailnet/test/integration/integration.go | 82 ++- tailnet/test/integration/integration_test.go | 169 ++++-- tailnet/test/integration/network.go | 570 ++++++++++++++----- tailnet/test/integration/remove_test_ns.sh | 24 + tailnet/test/integration/suite.go | 5 +- 6 files changed, 625 insertions(+), 231 deletions(-) create mode 100755 tailnet/test/integration/remove_test_ns.sh diff --git a/coderd/rbac/roles_test.go b/coderd/rbac/roles_test.go index d90f045284c5b..e6680d4d628cc 100644 --- a/coderd/rbac/roles_test.go +++ b/coderd/rbac/roles_test.go @@ -77,7 +77,7 @@ func TestOwnerExec(t *testing.T) { }) } -// nolint:tparallel,paralleltest -- subtests share a map, just run sequentially. +// nolint:tparallel,paralleltest // subtests share a map, just run sequentially. func TestRolePermissions(t *testing.T) { t.Parallel() @@ -557,7 +557,7 @@ func TestRolePermissions(t *testing.T) { // nolint:tparallel,paralleltest for _, c := range testCases { c := c - // nolint:tparallel,paralleltest -- These share the same remainingPermissions map + // nolint:tparallel,paralleltest // These share the same remainingPermissions map t.Run(c.Name, func(t *testing.T) { remainingSubjs := make(map[string]struct{}) for _, subj := range requiredSubjects { @@ -600,7 +600,7 @@ func TestRolePermissions(t *testing.T) { // Only run these if the tests on top passed. Otherwise, the error output is too noisy. if passed { for rtype, v := range remainingPermissions { - // nolint:tparallel,paralleltest -- Making a subtest for easier diagnosing failures. + // nolint:tparallel,paralleltest // Making a subtest for easier diagnosing failures. t.Run(fmt.Sprintf("%s-AllActions", rtype), func(t *testing.T) { if len(v) > 0 { assert.Equal(t, map[policy.Action]bool{}, v, "remaining permissions should be empty for type %q", rtype) diff --git a/tailnet/test/integration/integration.go b/tailnet/test/integration/integration.go index b26365ea3ee8b..3877542c8eafc 100644 --- a/tailnet/test/integration/integration.go +++ b/tailnet/test/integration/integration.go @@ -41,12 +41,34 @@ import ( "github.com/coder/coder/v2/testutil" ) -// IDs used in tests. -var ( - Client1ID = uuid.MustParse("00000000-0000-0000-0000-000000000001") - Client2ID = uuid.MustParse("00000000-0000-0000-0000-000000000002") +type ClientNumber int + +const ( + ClientNumber1 ClientNumber = 1 + ClientNumber2 ClientNumber = 2 ) +type Client struct { + Number ClientNumber + ID uuid.UUID + ListenPort uint16 + ShouldRunTests bool +} + +var Client1 = Client{ + Number: ClientNumber1, + ID: uuid.MustParse("00000000-0000-0000-0000-000000000001"), + ListenPort: client1Port, + ShouldRunTests: true, +} + +var Client2 = Client{ + Number: ClientNumber2, + ID: uuid.MustParse("00000000-0000-0000-0000-000000000002"), + ListenPort: client2Port, + ShouldRunTests: false, +} + type TestTopology struct { Name string // SetupNetworking creates interfaces and network namespaces for the test. @@ -59,12 +81,12 @@ type TestTopology struct { Server ServerStarter // StartClient gets called in each client subprocess. It's expected to // create the tailnet.Conn and ensure connectivity to it's peer. - StartClient func(t *testing.T, logger slog.Logger, serverURL *url.URL, myID uuid.UUID, peerID uuid.UUID) *tailnet.Conn + StartClient func(t *testing.T, logger slog.Logger, serverURL *url.URL, derpMap *tailcfg.DERPMap, me Client, peer Client) *tailnet.Conn // RunTests is the main test function. It's called in each of the client // subprocesses. If tests can only run once, they should check the client ID // and return early if it's not the expected one. - RunTests func(t *testing.T, logger slog.Logger, serverURL *url.URL, myID uuid.UUID, peerID uuid.UUID, conn *tailnet.Conn) + RunTests func(t *testing.T, logger slog.Logger, serverURL *url.URL, conn *tailnet.Conn, me Client, peer Client) } type ServerStarter interface { @@ -264,13 +286,14 @@ http { // StartClientDERP creates a client connection to the server for coordination // and creates a tailnet.Conn which will only use DERP to connect to the peer. -func StartClientDERP(t *testing.T, logger slog.Logger, serverURL *url.URL, myID, peerID uuid.UUID) *tailnet.Conn { - return startClientOptions(t, logger, serverURL, myID, peerID, &tailnet.Options{ - Addresses: []netip.Prefix{netip.PrefixFrom(tailnet.IPFromUUID(myID), 128)}, - DERPMap: basicDERPMap(t, serverURL), +func StartClientDERP(t *testing.T, logger slog.Logger, serverURL *url.URL, derpMap *tailcfg.DERPMap, me, peer Client) *tailnet.Conn { + return startClientOptions(t, logger, serverURL, me, peer, &tailnet.Options{ + Addresses: []netip.Prefix{netip.PrefixFrom(tailnet.IPFromUUID(me.ID), 128)}, + DERPMap: derpMap, BlockEndpoints: true, Logger: logger, DERPForceWebSockets: false, + ListenPort: me.ListenPort, // These tests don't have internet connection, so we need to force // magicsock to do anything. ForceNetworkUp: true, @@ -279,13 +302,14 @@ func StartClientDERP(t *testing.T, logger slog.Logger, serverURL *url.URL, myID, // StartClientDERPWebSockets does the same thing as StartClientDERP but will // only use DERP WebSocket fallback. -func StartClientDERPWebSockets(t *testing.T, logger slog.Logger, serverURL *url.URL, myID, peerID uuid.UUID) *tailnet.Conn { - return startClientOptions(t, logger, serverURL, myID, peerID, &tailnet.Options{ - Addresses: []netip.Prefix{netip.PrefixFrom(tailnet.IPFromUUID(myID), 128)}, - DERPMap: basicDERPMap(t, serverURL), +func StartClientDERPWebSockets(t *testing.T, logger slog.Logger, serverURL *url.URL, derpMap *tailcfg.DERPMap, me, peer Client) *tailnet.Conn { + return startClientOptions(t, logger, serverURL, me, peer, &tailnet.Options{ + Addresses: []netip.Prefix{netip.PrefixFrom(tailnet.IPFromUUID(me.ID), 128)}, + DERPMap: derpMap, BlockEndpoints: true, Logger: logger, DERPForceWebSockets: true, + ListenPort: me.ListenPort, // These tests don't have internet connection, so we need to force // magicsock to do anything. ForceNetworkUp: true, @@ -295,20 +319,21 @@ func StartClientDERPWebSockets(t *testing.T, logger slog.Logger, serverURL *url. // StartClientDirect does the same thing as StartClientDERP but disables // BlockEndpoints (which enables Direct connections), and waits for a direct // connection to be established between the two peers. -func StartClientDirect(t *testing.T, logger slog.Logger, serverURL *url.URL, myID, peerID uuid.UUID) *tailnet.Conn { - conn := startClientOptions(t, logger, serverURL, myID, peerID, &tailnet.Options{ - Addresses: []netip.Prefix{netip.PrefixFrom(tailnet.IPFromUUID(myID), 128)}, - DERPMap: basicDERPMap(t, serverURL), +func StartClientDirect(t *testing.T, logger slog.Logger, serverURL *url.URL, derpMap *tailcfg.DERPMap, me, peer Client) *tailnet.Conn { + conn := startClientOptions(t, logger, serverURL, me, peer, &tailnet.Options{ + Addresses: []netip.Prefix{netip.PrefixFrom(tailnet.IPFromUUID(me.ID), 128)}, + DERPMap: derpMap, BlockEndpoints: false, Logger: logger, DERPForceWebSockets: true, + ListenPort: me.ListenPort, // These tests don't have internet connection, so we need to force // magicsock to do anything. ForceNetworkUp: true, }) // Wait for direct connection to be established. - peerIP := tailnet.IPFromUUID(peerID) + peerIP := tailnet.IPFromUUID(peer.ID) require.Eventually(t, func() bool { t.Log("attempting ping to peer to judge direct connection") ctx := testutil.Context(t, testutil.WaitShort) @@ -332,8 +357,8 @@ type ClientStarter struct { Options *tailnet.Options } -func startClientOptions(t *testing.T, logger slog.Logger, serverURL *url.URL, myID, peerID uuid.UUID, options *tailnet.Options) *tailnet.Conn { - u, err := serverURL.Parse(fmt.Sprintf("/api/v2/workspaceagents/%s/coordinate", myID.String())) +func startClientOptions(t *testing.T, logger slog.Logger, serverURL *url.URL, me, peer Client, options *tailnet.Options) *tailnet.Conn { + u, err := serverURL.Parse(fmt.Sprintf("/api/v2/workspaceagents/%s/coordinate", me.ID.String())) require.NoError(t, err) //nolint:bodyclose ws, _, err := websocket.Dial(context.Background(), u.String(), nil) @@ -357,7 +382,7 @@ func startClientOptions(t *testing.T, logger slog.Logger, serverURL *url.URL, my _ = conn.Close() }) - coordination := tailnet.NewRemoteCoordination(logger, coord, conn, peerID) + coordination := tailnet.NewRemoteCoordination(logger, coord, conn, peer.ID) t.Cleanup(func() { _ = coordination.Close() }) @@ -365,10 +390,17 @@ func startClientOptions(t *testing.T, logger slog.Logger, serverURL *url.URL, my return conn } -func basicDERPMap(t *testing.T, serverURL *url.URL) *tailcfg.DERPMap { +func basicDERPMap(serverURLStr string) (*tailcfg.DERPMap, error) { + serverURL, err := url.Parse(serverURLStr) + if err != nil { + return nil, xerrors.Errorf("parse server URL %q: %w", serverURLStr, err) + } + portStr := serverURL.Port() port, err := strconv.Atoi(portStr) - require.NoError(t, err, "parse server port") + if err != nil { + return nil, xerrors.Errorf("parse port %q: %w", portStr, err) + } hostname := serverURL.Hostname() ipv4 := "" @@ -399,7 +431,7 @@ func basicDERPMap(t *testing.T, serverURL *url.URL) *tailcfg.DERPMap { }, }, }, - } + }, nil } // ExecBackground starts a subprocess with the given flags and returns a diff --git a/tailnet/test/integration/integration_test.go b/tailnet/test/integration/integration_test.go index 45d88145216c1..e23b716096048 100644 --- a/tailnet/test/integration/integration_test.go +++ b/tailnet/test/integration/integration_test.go @@ -4,19 +4,27 @@ package integration_test import ( + "context" + "encoding/json" "flag" "fmt" + "net" "net/http" "net/url" "os" "os/signal" + "path/filepath" "runtime" + "strconv" "syscall" "testing" "time" - "github.com/google/uuid" + "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "tailscale.com/net/stun/stuntest" + "tailscale.com/tailcfg" + "tailscale.com/types/nettype" "cdr.dev/slog" "cdr.dev/slog/sloggers/slogtest" @@ -30,17 +38,19 @@ const runTestEnv = "CODER_TAILNET_TESTS" var ( isSubprocess = flag.Bool("subprocess", false, "Signifies that this is a test subprocess") testID = flag.String("test-name", "", "Which test is being run") - role = flag.String("role", "", "The role of the test subprocess: server, client") + role = flag.String("role", "", "The role of the test subprocess: server, stun, client") // Role: server serverListenAddr = flag.String("server-listen-addr", "", "The address to listen on for the server") + // Role: stun + stunListenAddr = flag.String("stun-listen-addr", "", "The address to listen on for the STUN server") + // Role: client - clientName = flag.String("client-name", "", "The name of the client for logs") - clientServerURL = flag.String("client-server-url", "", "The url to connect to the server") - clientMyID = flag.String("client-id", "", "The id of the client") - clientPeerID = flag.String("client-peer-id", "", "The id of the other client") - clientRunTests = flag.Bool("client-run-tests", false, "Run the tests in the client subprocess") + clientName = flag.String("client-name", "", "The name of the client for logs") + clientNumber = flag.Int("client-number", 0, "The number of the client") + clientServerURL = flag.String("client-server-url", "", "The url to connect to the server") + clientDERPMapPath = flag.String("client-derp-map-path", "", "The path to the DERP map file to use on this client") ) func TestMain(m *testing.M) { @@ -87,7 +97,7 @@ var topologies = []integration.TestTopology{ // endpoints to connect as routing is enabled between client 1 and // client 2. Name: "EasyNATDirect", - SetupNetworking: integration.SetupNetworkingEasyNAT, + SetupNetworking: integration.SetupNetworkingEasyNATWithSTUN, Server: integration.SimpleServerOptions{}, StartClient: integration.StartClientDirect, RunTests: integration.TestSuite, @@ -143,17 +153,41 @@ func TestIntegration(t *testing.T) { log := slogtest.Make(t, nil).Leveled(slog.LevelDebug) networking := topo.SetupNetworking(t, log) - // Fork the three child processes. + // Useful for debugging network namespaces by avoiding cleanup. + // t.Cleanup(func() { + // time.Sleep(time.Minute * 15) + // }) + closeServer := startServerSubprocess(t, topo.Name, networking) + + closeSTUN := func() error { return nil } + if networking.STUN.ListenAddr != "" { + closeSTUN = startSTUNSubprocess(t, topo.Name, networking) + } + + // Write the DERP maps to a file. + tempDir := t.TempDir() + client1DERPMapPath := filepath.Join(tempDir, "client1-derp-map.json") + client1DERPMap, err := networking.Client1.ResolveDERPMap() + require.NoError(t, err, "resolve client 1 DERP map") + err = writeDERPMapToFile(client1DERPMapPath, client1DERPMap) + require.NoError(t, err, "write client 1 DERP map") + client2DERPMapPath := filepath.Join(tempDir, "client2-derp-map.json") + client2DERPMap, err := networking.Client2.ResolveDERPMap() + require.NoError(t, err, "resolve client 2 DERP map") + err = writeDERPMapToFile(client2DERPMapPath, client2DERPMap) + require.NoError(t, err, "write client 2 DERP map") + // client1 runs the tests. - client1ErrCh, _ := startClientSubprocess(t, topo.Name, networking, 1) - _, closeClient2 := startClientSubprocess(t, topo.Name, networking, 2) + client1ErrCh, _ := startClientSubprocess(t, topo.Name, networking, integration.Client1, client1DERPMapPath) + _, closeClient2 := startClientSubprocess(t, topo.Name, networking, integration.Client2, client2DERPMapPath) // Wait for client1 to exit. require.NoError(t, <-client1ErrCh, "client 1 exited") // Close client2 and the server. require.NoError(t, closeClient2(), "client 2 exited") + require.NoError(t, closeSTUN(), "stun exited") require.NoError(t, closeServer(), "server exited") }) } @@ -169,10 +203,11 @@ func handleTestSubprocess(t *testing.T) { } } require.NotEmptyf(t, topo.Name, "unknown test topology %q", *testID) + require.Contains(t, []string{"server", "stun", "client"}, *role, "unknown role %q", *role) testName := topo.Name + "/" - if *role == "server" { - testName += "server" + if *role == "server" || *role == "stun" { + testName += *role } else { testName += *clientName } @@ -185,27 +220,44 @@ func handleTestSubprocess(t *testing.T) { topo.Server.StartServer(t, logger, *serverListenAddr) // no exit + case "stun": + launchSTUNServer(t, *stunListenAddr) + // no exit + case "client": logger = logger.Named(*clientName) + if *clientNumber != int(integration.ClientNumber1) && *clientNumber != int(integration.ClientNumber2) { + t.Fatalf("invalid client number %d", clientNumber) + } + me, peer := integration.Client1, integration.Client2 + if *clientNumber == int(integration.ClientNumber2) { + me, peer = peer, me + } + serverURL, err := url.Parse(*clientServerURL) require.NoErrorf(t, err, "parse server url %q", *clientServerURL) - myID, err := uuid.Parse(*clientMyID) - require.NoErrorf(t, err, "parse client id %q", *clientMyID) - peerID, err := uuid.Parse(*clientPeerID) - require.NoErrorf(t, err, "parse peer id %q", *clientPeerID) + + // Load the DERP map. + var derpMap tailcfg.DERPMap + derpMapPath := *clientDERPMapPath + f, err := os.Open(derpMapPath) + require.NoErrorf(t, err, "open DERP map %q", derpMapPath) + err = json.NewDecoder(f).Decode(&derpMap) + _ = f.Close() + require.NoErrorf(t, err, "decode DERP map %q", derpMapPath) waitForServerAvailable(t, serverURL) - conn := topo.StartClient(t, logger, serverURL, myID, peerID) + conn := topo.StartClient(t, logger, serverURL, &derpMap, me, peer) - if *clientRunTests { + if me.ShouldRunTests { // Wait for connectivity. - peerIP := tailnet.IPFromUUID(peerID) + peerIP := tailnet.IPFromUUID(peer.ID) if !conn.AwaitReachable(testutil.Context(t, testutil.WaitLong), peerIP) { t.Fatalf("peer %v did not become reachable", peerIP) } - topo.RunTests(t, logger, serverURL, myID, peerID, conn) + topo.RunTests(t, logger, serverURL, conn, me, peer) // then exit return } @@ -218,6 +270,23 @@ func handleTestSubprocess(t *testing.T) { }) } +type forcedAddrPacketListener struct { + addr string +} + +var _ nettype.PacketListener = forcedAddrPacketListener{} + +func (ln forcedAddrPacketListener) ListenPacket(ctx context.Context, network, _ string) (net.PacketConn, error) { + return nettype.Std{}.ListenPacket(ctx, network, ln.addr) +} + +func launchSTUNServer(t *testing.T, listenAddr string) { + ln := forcedAddrPacketListener{addr: listenAddr} + addr, cleanup := stuntest.ServeWithPacketListener(t, ln) + t.Cleanup(cleanup) + assert.Equal(t, listenAddr, addr.String(), "listen address should match forced addr") +} + func waitForServerAvailable(t *testing.T, serverURL *url.URL) { const delay = 100 * time.Millisecond const reqTimeout = 2 * time.Second @@ -247,29 +316,32 @@ func waitForServerAvailable(t *testing.T, serverURL *url.URL) { } func startServerSubprocess(t *testing.T, topologyName string, networking integration.TestNetworking) func() error { - _, closeFn := startSubprocess(t, "server", networking.ProcessServer.NetNS, []string{ + _, closeFn := startSubprocess(t, "server", networking.Server.Process.NetNS, []string{ "--subprocess", "--test-name=" + topologyName, "--role=server", - "--server-listen-addr=" + networking.ServerListenAddr, + "--server-listen-addr=" + networking.Server.ListenAddr, }) return closeFn } -func startClientSubprocess(t *testing.T, topologyName string, networking integration.TestNetworking, clientNumber int) (<-chan error, func() error) { - require.True(t, clientNumber == 1 || clientNumber == 2) +func startSTUNSubprocess(t *testing.T, topologyName string, networking integration.TestNetworking) func() error { + _, closeFn := startSubprocess(t, "stun", networking.STUN.Process.NetNS, []string{ + "--subprocess", + "--test-name=" + topologyName, + "--role=stun", + "--stun-listen-addr=" + networking.STUN.ListenAddr, + }) + return closeFn +} +func startClientSubprocess(t *testing.T, topologyName string, networking integration.TestNetworking, me integration.Client, derpMapPath string) (<-chan error, func() error) { var ( - clientName = fmt.Sprintf("client%d", clientNumber) - myID = integration.Client1ID - peerID = integration.Client2ID - accessURL = networking.ServerAccessURLClient1 - netNS = networking.ProcessClient1.NetNS + clientName = fmt.Sprintf("client%d", me.Number) + clientProcessConfig = networking.Client1 ) - if clientNumber == 2 { - myID, peerID = peerID, myID - accessURL = networking.ServerAccessURLClient2 - netNS = networking.ProcessClient2.NetNS + if me.Number == integration.ClientNumber2 { + clientProcessConfig = networking.Client2 } flags := []string{ @@ -277,15 +349,12 @@ func startClientSubprocess(t *testing.T, topologyName string, networking integra "--test-name=" + topologyName, "--role=client", "--client-name=" + clientName, - "--client-server-url=" + accessURL, - "--client-id=" + myID.String(), - "--client-peer-id=" + peerID.String(), - } - if clientNumber == 1 { - flags = append(flags, "--client-run-tests") + "--client-number=" + strconv.Itoa(int(me.Number)), + "--client-server-url=" + clientProcessConfig.ServerAccessURL, + "--client-derp-map-path=" + derpMapPath, } - return startSubprocess(t, clientName, netNS, flags) + return startSubprocess(t, clientName, clientProcessConfig.Process.NetNS, flags) } // startSubprocess launches the test binary with the same flags as the test, but @@ -295,6 +364,22 @@ func startClientSubprocess(t *testing.T, topologyName string, networking integra func startSubprocess(t *testing.T, processName string, netNS *os.File, flags []string) (<-chan error, func() error) { name := os.Args[0] // Always use verbose mode since it gets piped to the parent test anyways. - args := append(os.Args[1:], append([]string{"-test.v=true"}, flags...)...) + args := append(os.Args[1:], append([]string{"-test.v=true"}, flags...)...) //nolint:gocritic return integration.ExecBackground(t, processName, netNS, name, args) } + +func writeDERPMapToFile(path string, derpMap *tailcfg.DERPMap) error { + f, err := os.Create(path) + if err != nil { + return err + } + defer f.Close() + + enc := json.NewEncoder(f) + enc.SetIndent("", " ") + err = enc.Encode(derpMap) + if err != nil { + return err + } + return nil +} diff --git a/tailnet/test/integration/network.go b/tailnet/test/integration/network.go index 80eeb6048bd66..e0d8f7109c167 100644 --- a/tailnet/test/integration/network.go +++ b/tailnet/test/integration/network.go @@ -13,27 +13,55 @@ import ( "github.com/stretchr/testify/require" "github.com/tailscale/netlink" "golang.org/x/xerrors" + "tailscale.com/tailcfg" "cdr.dev/slog" "github.com/coder/coder/v2/cryptorand" ) +const ( + client1Port = 48001 + client1RouterPort = 48011 + client2Port = 48002 + client2RouterPort = 48012 +) + type TestNetworking struct { - // ServerListenAddr is the IP address and port that the server listens on, - // passed to StartServer. - ServerListenAddr string - // ServerAccessURLClient1 is the hostname and port that the first client - // uses to access the server. - ServerAccessURLClient1 string - // ServerAccessURLClient2 is the hostname and port that the second client - // uses to access the server. - ServerAccessURLClient2 string - - // Networking settings for each subprocess. - ProcessServer TestNetworkingProcess - ProcessClient1 TestNetworkingProcess - ProcessClient2 TestNetworkingProcess + Server TestNetworkingServer + STUN TestNetworkingSTUN + Client1 TestNetworkingClient + Client2 TestNetworkingClient +} + +type TestNetworkingServer struct { + Process TestNetworkingProcess + ListenAddr string +} + +type TestNetworkingSTUN struct { + Process TestNetworkingProcess + // If empty, no STUN subprocess is launched. + ListenAddr string +} + +type TestNetworkingClient struct { + Process TestNetworkingProcess + // ServerAccessURL is the hostname and port that the client uses to access + // the server over HTTP for coordination. + ServerAccessURL string + // DERPMap is the DERP map that the client uses. If nil, a basic DERP map + // containing only a single DERP with `ServerAccessURL` is used with no + // STUN servers. + DERPMap *tailcfg.DERPMap +} + +func (c TestNetworkingClient) ResolveDERPMap() (*tailcfg.DERPMap, error) { + if c.DERPMap != nil { + return c.DERPMap, nil + } + + return basicDERPMap(c.ServerAccessURL) } type TestNetworkingProcess struct { @@ -46,14 +74,9 @@ type TestNetworkingProcess struct { // namespace only exists for isolation on the host and doesn't serve any routing // purpose. func SetupNetworkingLoopback(t *testing.T, _ slog.Logger) TestNetworking { - netNSName := "codertest_netns_" - randStr, err := cryptorand.String(4) - require.NoError(t, err, "generate random string for netns name") - netNSName += randStr - // Create a single network namespace for all tests so we can have an // isolated loopback interface. - netNSFile := createNetNS(t, netNSName) + netNSFile := createNetNS(t, uniqNetName(t)) var ( listenAddr = "127.0.0.1:8080" @@ -62,176 +85,323 @@ func SetupNetworkingLoopback(t *testing.T, _ slog.Logger) TestNetworking { } ) return TestNetworking{ - ServerListenAddr: listenAddr, - ServerAccessURLClient1: "http://" + listenAddr, - ServerAccessURLClient2: "http://" + listenAddr, - ProcessServer: process, - ProcessClient1: process, - ProcessClient2: process, + Server: TestNetworkingServer{ + Process: process, + ListenAddr: listenAddr, + }, + Client1: TestNetworkingClient{ + Process: process, + ServerAccessURL: "http://" + listenAddr, + }, + Client2: TestNetworkingClient{ + Process: process, + ServerAccessURL: "http://" + listenAddr, + }, } } -// SetupNetworkingEasyNAT creates a network namespace with a router that NATs -// packets between two clients and a server. -// See createFakeRouter for the full topology. +func easyNAT(t *testing.T) fakeInternet { + internet := createFakeInternet(t) + + _, err := commandInNetNS(internet.BridgeNetNS, "sysctl", []string{"-w", "net.ipv4.ip_forward=1"}).Output() + require.NoError(t, wrapExitErr(err), "enable IP forwarding in bridge NetNS") + + // Set up iptables masquerade rules to allow each router to NAT packets. + leaves := []struct { + fakeRouterLeaf + clientPort int + natPort int + }{ + {internet.Client1, client1Port, client1RouterPort}, + {internet.Client2, client2Port, client2RouterPort}, + } + for _, leaf := range leaves { + _, err := commandInNetNS(leaf.RouterNetNS, "sysctl", []string{"-w", "net.ipv4.ip_forward=1"}).Output() + require.NoError(t, wrapExitErr(err), "enable IP forwarding in router NetNS") + + // All non-UDP traffic should use regular masquerade e.g. for HTTP. + _, err = commandInNetNS(leaf.RouterNetNS, "iptables", []string{ + "-t", "nat", + "-A", "POSTROUTING", + // Every interface except loopback. + "!", "-o", "lo", + // Every protocol except UDP. + "!", "-p", "udp", + "-j", "MASQUERADE", + }).Output() + require.NoError(t, wrapExitErr(err), "add iptables non-UDP masquerade rule") + + // Outgoing traffic should get NATed to the router's IP. + _, err = commandInNetNS(leaf.RouterNetNS, "iptables", []string{ + "-t", "nat", + "-A", "POSTROUTING", + "-p", "udp", + "--sport", fmt.Sprint(leaf.clientPort), + "-j", "SNAT", + "--to-source", fmt.Sprintf("%s:%d", leaf.RouterIP, leaf.natPort), + }).Output() + require.NoError(t, wrapExitErr(err), "add iptables SNAT rule") + + // Incoming traffic should be forwarded to the client's IP. + _, err = commandInNetNS(leaf.RouterNetNS, "iptables", []string{ + "-t", "nat", + "-A", "PREROUTING", + "-p", "udp", + "--dport", fmt.Sprint(leaf.natPort), + "-j", "DNAT", + "--to-destination", fmt.Sprintf("%s:%d", leaf.ClientIP, leaf.clientPort), + }).Output() + require.NoError(t, wrapExitErr(err), "add iptables DNAT rule") + } + + return internet +} + +// SetupNetworkingEasyNAT creates a fake internet and sets up "easy NAT" +// forwarding rules. +// See createFakeInternet. // NAT is achieved through a single iptables masquerade rule. func SetupNetworkingEasyNAT(t *testing.T, _ slog.Logger) TestNetworking { - router := createFakeRouter(t) - - // Set up iptables masquerade rules to allow the router to NAT packets - // between the Three Kingdoms. - _, err := commandInNetNS(router.RouterNetNS, "sysctl", []string{"-w", "net.ipv4.ip_forward=1"}).Output() - require.NoError(t, wrapExitErr(err), "enable IP forwarding in router NetNS") - _, err = commandInNetNS(router.RouterNetNS, "iptables", []string{ - "-t", "nat", - "-A", "POSTROUTING", - // Every interface except loopback. - "!", "-o", "lo", - "-j", "MASQUERADE", - }).Output() - require.NoError(t, wrapExitErr(err), "add iptables masquerade rule") - - return router.Net + return easyNAT(t).Net } -type fakeRouter struct { - Net TestNetworking +// SetupNetworkingEasyNATWithSTUN does the same as SetupNetworkingEasyNAT, but +// also creates a namespace and bridge address for a STUN server. +func SetupNetworkingEasyNATWithSTUN(t *testing.T, _ slog.Logger) TestNetworking { + internet := easyNAT(t) - RouterNetNS *os.File - RouterVeths struct { - Server string - Client1 string - Client2 string + // Create another network namespace for the STUN server. + stunNetNS := createNetNS(t, internet.NamePrefix+"stun") + internet.Net.STUN.Process = TestNetworkingProcess{ + NetNS: stunNetNS, + } + + const ip = "10.0.0.64" + err := joinBridge(joinBridgeOpts{ + bridgeNetNS: internet.BridgeNetNS, + netNS: stunNetNS, + bridgeName: internet.BridgeName, + vethPair: vethPair{ + Outer: internet.NamePrefix + "b-stun", + Inner: internet.NamePrefix + "stun-b", + }, + ip: ip, + }) + require.NoError(t, err, "join bridge with STUN server") + internet.Net.STUN.ListenAddr = ip + ":3478" + + // Define custom DERP map. + stunRegion := &tailcfg.DERPRegion{ + RegionID: 10000, + RegionCode: "stun0", + RegionName: "STUN0", + Nodes: []*tailcfg.DERPNode{ + { + Name: "stun0a", + RegionID: 1, + IPv4: ip, + IPv6: "none", + STUNPort: 3478, + STUNOnly: true, + }, + }, } - ServerNetNS *os.File - ServerVeth string - Client1NetNS *os.File - Client1Veth string - Client2NetNS *os.File - Client2Veth string + client1DERP, err := internet.Net.Client1.ResolveDERPMap() + require.NoError(t, err, "resolve DERP map for client 1") + client1DERP.Regions[stunRegion.RegionID] = stunRegion + internet.Net.Client1.DERPMap = client1DERP + client2DERP, err := internet.Net.Client2.ResolveDERPMap() + require.NoError(t, err, "resolve DERP map for client 2") + client2DERP.Regions[stunRegion.RegionID] = stunRegion + internet.Net.Client2.DERPMap = client2DERP + + return internet.Net } -// fakeRouter creates multiple namespaces with veth pairs between them with -// the following topology: -// -// namespaces: -// - router -// - server -// - client1 -// - client2 +type vethPair struct { + Outer string + Inner string +} + +type fakeRouterLeaf struct { + // RouterIP is the IP address of the router on the bridge. + RouterIP string + // ClientIP is the IP address of the client on the router. + ClientIP string + // RouterNetNS is the router for this specific leaf. + RouterNetNS *os.File + // ClientNetNS is where the "user" is. + ClientNetNS *os.File + // Veth pair between the router and the bridge. + OuterVethPair vethPair + // Veth pair between the user and the router. + InnerVethPair vethPair +} + +type fakeInternet struct { + Net TestNetworking + + NamePrefix string + BridgeNetNS *os.File + BridgeName string + ServerNetNS *os.File + ServerVethPair vethPair // between bridge and server NS + Client1 fakeRouterLeaf + Client2 fakeRouterLeaf +} + +// createFakeInternet creates multiple namespaces with veth pairs between them +// with the following topology: // -// veth pairs: -// - router-server (10.0.1.1) <-> server-router (10.0.1.2) -// - router-client1 (10.0.2.1) <-> client1-router (10.0.2.2) -// - router-client2 (10.0.3.1) <-> client2-router (10.0.3.2) +// . veth ┌────────┐ veth +// . ┌─────────────────┤ Bridge ├───────────────────┐ +// . │ └───┬────┘ │ +// . │ │ │ +// . │10.0.0.1 veth│10.0.0.2 │10.0.0.3 +// . ┌───────┴───────┐ ┌───────┴─────────┐ ┌────────┴────────┐ +// . │ Server │ │ Client 1 router │ │ Client 2 router │ +// . └───────────────┘ └───────┬─────────┘ └────────┬────────┘ +// . │10.0.2.1 │10.0.3.1 +// . veth│ veth│ +// . │10.0.2.2 │10.0.3.2 +// . ┌───────┴─────────┐ ┌────────┴────────┐ +// . │ Client 1 │ │ Client 2 │ +// . └─────────────────┘ └─────────────────┘ // // No iptables rules are created, so packets will not be forwarded out of the -// box. Routes are created between all namespaces based on the veth pairs, -// however. -func createFakeRouter(t *testing.T) fakeRouter { +// box. Default routes are created from the edge namespaces (client1, client2) +// to their respective routers, but no NAT rules are created. +func createFakeInternet(t *testing.T) fakeInternet { t.Helper() const ( - routerServerPrefix = "10.0.1." - routerServerIP = routerServerPrefix + "1" - serverIP = routerServerPrefix + "2" - routerClient1Prefix = "10.0.2." - routerClient1IP = routerClient1Prefix + "1" - client1IP = routerClient1Prefix + "2" - routerClient2Prefix = "10.0.3." - routerClient2IP = routerClient2Prefix + "1" - client2IP = routerClient2Prefix + "2" + bridgePrefix = "10.0.0." + serverIP = bridgePrefix + "1" + client1Prefix = "10.0.2." + client2Prefix = "10.0.3." ) + var ( + namePrefix = uniqNetName(t) + "_" + router = fakeInternet{ + NamePrefix: namePrefix, + BridgeName: namePrefix + "b", + } + ) + + // Create bridge namespace and bridge interface. + router.BridgeNetNS = createNetNS(t, router.BridgeName) + err := createBridge(router.BridgeNetNS, router.BridgeName) + require.NoError(t, err, "create bridge in netns") - prefix := uniqNetName(t) + "_" - router := fakeRouter{} - router.RouterVeths.Server = prefix + "r-s" - router.RouterVeths.Client1 = prefix + "r-c1" - router.RouterVeths.Client2 = prefix + "r-c2" - router.ServerVeth = prefix + "s-r" - router.Client1Veth = prefix + "c1-r" - router.Client2Veth = prefix + "c2-r" - - // Create namespaces. - router.RouterNetNS = createNetNS(t, prefix+"r") - serverNS := createNetNS(t, prefix+"s") - client1NS := createNetNS(t, prefix+"c1") - client2NS := createNetNS(t, prefix+"c2") - - vethPairs := []struct { - parentName string - peerName string - parentNS *os.File - peerNS *os.File - parentIP string - peerIP string + // Create server namespace and veth pair between bridge and server. + router.ServerNetNS = createNetNS(t, namePrefix+"s") + router.ServerVethPair = vethPair{ + Outer: namePrefix + "b-s", + Inner: namePrefix + "s-b", + } + err = joinBridge(joinBridgeOpts{ + bridgeNetNS: router.BridgeNetNS, + netNS: router.ServerNetNS, + bridgeName: router.BridgeName, + vethPair: router.ServerVethPair, + ip: serverIP, + }) + require.NoError(t, err, "join bridge with server") + + leaves := []struct { + leaf *fakeRouterLeaf + routerName string + clientName string + routerBridgeIP string + routerClientIP string + clientIP string }{ { - parentName: router.RouterVeths.Server, - peerName: router.ServerVeth, - parentNS: router.RouterNetNS, - peerNS: serverNS, - parentIP: routerServerIP, - peerIP: serverIP, - }, - { - parentName: router.RouterVeths.Client1, - peerName: router.Client1Veth, - parentNS: router.RouterNetNS, - peerNS: client1NS, - parentIP: routerClient1IP, - peerIP: client1IP, + leaf: &router.Client1, + routerName: "c1r", + clientName: "c1", + routerBridgeIP: bridgePrefix + "2", + routerClientIP: client1Prefix + "1", + clientIP: client1Prefix + "2", }, { - parentName: router.RouterVeths.Client2, - peerName: router.Client2Veth, - parentNS: router.RouterNetNS, - peerNS: client2NS, - parentIP: routerClient2IP, - peerIP: client2IP, + leaf: &router.Client2, + routerName: "c2r", + clientName: "c2", + routerBridgeIP: bridgePrefix + "3", + routerClientIP: client2Prefix + "1", + clientIP: client2Prefix + "2", }, } - for _, vethPair := range vethPairs { - err := createVethPair(vethPair.parentName, vethPair.peerName) - require.NoErrorf(t, err, "create veth pair %q <-> %q", vethPair.parentName, vethPair.peerName) - - // Move the veth interfaces to the respective network namespaces. - err = setVethNetNS(vethPair.parentName, int(vethPair.parentNS.Fd())) - require.NoErrorf(t, err, "set veth %q to NetNS", vethPair.parentName) - err = setVethNetNS(vethPair.peerName, int(vethPair.peerNS.Fd())) - require.NoErrorf(t, err, "set veth %q to NetNS", vethPair.peerName) + for _, leaf := range leaves { + leaf.leaf.RouterIP = leaf.routerBridgeIP + leaf.leaf.ClientIP = leaf.clientIP - // Set IP addresses on the interfaces. - err = setInterfaceIP(vethPair.parentNS, vethPair.parentName, vethPair.parentIP) - require.NoErrorf(t, err, "set IP %q on interface %q", vethPair.parentIP, vethPair.parentName) - err = setInterfaceIP(vethPair.peerNS, vethPair.peerName, vethPair.peerIP) - require.NoErrorf(t, err, "set IP %q on interface %q", vethPair.peerIP, vethPair.peerName) + // Create two network namespaces for each leaf: one for the router and + // one for the "client". + leaf.leaf.RouterNetNS = createNetNS(t, namePrefix+leaf.routerName) + leaf.leaf.ClientNetNS = createNetNS(t, namePrefix+leaf.clientName) - // Bring up both interfaces. - err = setInterfaceUp(vethPair.parentNS, vethPair.parentName) - require.NoErrorf(t, err, "bring up interface %q", vethPair.parentName) - err = setInterfaceUp(vethPair.peerNS, vethPair.peerName) - require.NoErrorf(t, err, "bring up interface %q", vethPair.parentName) + // Join the bridge. + leaf.leaf.OuterVethPair = vethPair{ + Outer: namePrefix + "b-" + leaf.routerName, + Inner: namePrefix + leaf.routerName + "-b", + } + err = joinBridge(joinBridgeOpts{ + bridgeNetNS: router.BridgeNetNS, + netNS: leaf.leaf.RouterNetNS, + bridgeName: router.BridgeName, + vethPair: leaf.leaf.OuterVethPair, + ip: leaf.routerBridgeIP, + }) + require.NoError(t, err, "join bridge with router") + + // Create inner veth pair between the router and the client. + leaf.leaf.InnerVethPair = vethPair{ + Outer: namePrefix + leaf.routerName + "-" + leaf.clientName, + Inner: namePrefix + leaf.clientName + "-" + leaf.routerName, + } + err = createVethPair(leaf.leaf.InnerVethPair.Outer, leaf.leaf.InnerVethPair.Inner) + require.NoErrorf(t, err, "create veth pair %q <-> %q", leaf.leaf.InnerVethPair.Outer, leaf.leaf.InnerVethPair.Inner) + + // Move the network interfaces to the respective network namespaces. + err = setVethNetNS(leaf.leaf.InnerVethPair.Outer, int(leaf.leaf.RouterNetNS.Fd())) + require.NoErrorf(t, err, "set veth %q to NetNS", leaf.leaf.InnerVethPair.Outer) + err = setVethNetNS(leaf.leaf.InnerVethPair.Inner, int(leaf.leaf.ClientNetNS.Fd())) + require.NoErrorf(t, err, "set veth %q to NetNS", leaf.leaf.InnerVethPair.Inner) + + // Set router's "local" IP on the veth. + err = setInterfaceIP(leaf.leaf.RouterNetNS, leaf.leaf.InnerVethPair.Outer, leaf.routerClientIP) + require.NoErrorf(t, err, "set IP %q on interface %q", leaf.routerClientIP, leaf.leaf.InnerVethPair.Outer) + // Set client's IP on the veth. + err = setInterfaceIP(leaf.leaf.ClientNetNS, leaf.leaf.InnerVethPair.Inner, leaf.clientIP) + require.NoErrorf(t, err, "set IP %q on interface %q", leaf.clientIP, leaf.leaf.InnerVethPair.Inner) + + // Bring up the interfaces. + err = setInterfaceUp(leaf.leaf.RouterNetNS, leaf.leaf.InnerVethPair.Outer) + require.NoErrorf(t, err, "bring up interface %q", leaf.leaf.OuterVethPair.Outer) + err = setInterfaceUp(leaf.leaf.ClientNetNS, leaf.leaf.InnerVethPair.Inner) + require.NoErrorf(t, err, "bring up interface %q", leaf.leaf.InnerVethPair.Inner) // We don't need to add a route from parent to peer since the kernel // already adds a default route for the /24. We DO need to add a default // route from peer to parent, however. - err = addRouteInNetNS(vethPair.peerNS, []string{"default", "via", vethPair.parentIP, "dev", vethPair.peerName}) - require.NoErrorf(t, err, "add peer default route to %q", vethPair.peerName) + err = addRouteInNetNS(leaf.leaf.ClientNetNS, []string{"default", "via", leaf.routerClientIP, "dev", leaf.leaf.InnerVethPair.Inner}) + require.NoErrorf(t, err, "add peer default route to %q", leaf.leaf.InnerVethPair.Inner) } router.Net = TestNetworking{ - ServerListenAddr: serverIP + ":8080", - ServerAccessURLClient1: "http://" + serverIP + ":8080", - ServerAccessURLClient2: "http://" + serverIP + ":8080", - ProcessServer: TestNetworkingProcess{ - NetNS: serverNS, + Server: TestNetworkingServer{ + Process: TestNetworkingProcess{NetNS: router.ServerNetNS}, + ListenAddr: serverIP + ":8080", }, - ProcessClient1: TestNetworkingProcess{ - NetNS: client1NS, + Client1: TestNetworkingClient{ + Process: TestNetworkingProcess{NetNS: router.Client1.ClientNetNS}, + ServerAccessURL: "http://" + serverIP + ":8080", }, - ProcessClient2: TestNetworkingProcess{ - NetNS: client2NS, + Client2: TestNetworkingClient{ + Process: TestNetworkingProcess{NetNS: router.Client2.ClientNetNS}, + ServerAccessURL: "http://" + serverIP + ":8080", }, } return router @@ -246,6 +416,60 @@ func uniqNetName(t *testing.T) string { return netNSName } +type joinBridgeOpts struct { + bridgeNetNS *os.File + netNS *os.File + bridgeName string + // This vethPair will be created and should not already exist. + vethPair vethPair + ip string +} + +// joinBridge joins the given network namespace to the bridge. It creates a veth +// pair between the specified NetNS and the bridge NetNS, sets the IP address on +// the "child" veth, and brings up the interfaces. +func joinBridge(opts joinBridgeOpts) error { + // Create outer veth pair between the router and the bridge. + err := createVethPair(opts.vethPair.Outer, opts.vethPair.Inner) + if err != nil { + return xerrors.Errorf("create veth pair %q <-> %q: %w", opts.vethPair.Outer, opts.vethPair.Inner, err) + } + + // Move the network interfaces to the respective network namespaces. + err = setVethNetNS(opts.vethPair.Outer, int(opts.bridgeNetNS.Fd())) + if err != nil { + return xerrors.Errorf("set veth %q to NetNS: %w", opts.vethPair.Outer, err) + } + err = setVethNetNS(opts.vethPair.Inner, int(opts.netNS.Fd())) + if err != nil { + return xerrors.Errorf("set veth %q to NetNS: %w", opts.vethPair.Inner, err) + } + + // Connect the outer veth to the bridge. + err = setInterfaceBridge(opts.bridgeNetNS, opts.vethPair.Outer, opts.bridgeName) + if err != nil { + return xerrors.Errorf("set interface %q master to %q: %w", opts.vethPair.Outer, opts.bridgeName, err) + } + + // Set the bridge IP on the inner veth. + err = setInterfaceIP(opts.netNS, opts.vethPair.Inner, opts.ip) + if err != nil { + return xerrors.Errorf("set IP %q on interface %q: %w", opts.ip, opts.vethPair.Inner, err) + } + + // Bring up the interfaces. + err = setInterfaceUp(opts.bridgeNetNS, opts.vethPair.Outer) + if err != nil { + return xerrors.Errorf("bring up interface %q: %w", opts.vethPair.Outer, err) + } + err = setInterfaceUp(opts.netNS, opts.vethPair.Inner) + if err != nil { + return xerrors.Errorf("bring up interface %q: %w", opts.vethPair.Inner, err) + } + + return nil +} + // createNetNS creates a new network namespace with the given name. The returned // file is a file descriptor to the network namespace. // Note: all cleanup is handled for you, you do not need to call Close on the @@ -283,18 +507,48 @@ func createNetNS(t *testing.T, name string) *os.File { return file } +// createBridge creates a bridge in the given network namespace. The bridge is +// automatically brought up. +func createBridge(netNS *os.File, name string) error { + // While it might be possible to create a bridge directly in a NetNS or move + // an existing bridge to a NetNS, I couldn't figure out a way to do it. + // Creating it directly within the NetNS is the simplest way. + _, err := commandInNetNS(netNS, "ip", []string{"link", "add", name, "type", "bridge"}).Output() + if err != nil { + return xerrors.Errorf("create bridge %q in netns: %w", name, wrapExitErr(err)) + } + + _, err = commandInNetNS(netNS, "ip", []string{"link", "set", name, "up"}).Output() + if err != nil { + return xerrors.Errorf("set bridge %q up in netns: %w", name, wrapExitErr(err)) + } + + return nil +} + +// setInterfaceBridge sets the master of the given interface to the specified +// bridge. +func setInterfaceBridge(netNS *os.File, ifaceName, bridgeName string) error { + _, err := commandInNetNS(netNS, "ip", []string{"link", "set", ifaceName, "master", bridgeName}).Output() + if err != nil { + return xerrors.Errorf("set interface %q master to %q in netns: %w", ifaceName, bridgeName, wrapExitErr(err)) + } + + return nil +} + // createVethPair creates a veth pair with the given names. func createVethPair(parentVethName, peerVethName string) error { - vethLinkAttrs := netlink.NewLinkAttrs() - vethLinkAttrs.Name = parentVethName + linkAttrs := netlink.NewLinkAttrs() + linkAttrs.Name = parentVethName veth := &netlink.Veth{ - LinkAttrs: vethLinkAttrs, + LinkAttrs: linkAttrs, PeerName: peerVethName, } err := netlink.LinkAdd(veth) if err != nil { - return xerrors.Errorf("LinkAdd(name: %q, peerName: %q): %w", parentVethName, peerVethName, err) + return xerrors.Errorf("LinkAdd(type: veth, name: %q, peerName: %q): %w", parentVethName, peerVethName, err) } return nil diff --git a/tailnet/test/integration/remove_test_ns.sh b/tailnet/test/integration/remove_test_ns.sh new file mode 100755 index 0000000000000..464aac6c8eff0 --- /dev/null +++ b/tailnet/test/integration/remove_test_ns.sh @@ -0,0 +1,24 @@ +#!/bin/bash + +set -euo pipefail + +if [[ $(id -u) -ne 0 ]]; then + echo "Please run with sudo" + exit 1 +fi + +to_delete=$(ip netns list | grep -o 'cdr_.*_.*' | cut -d' ' -f1) +echo "Will delete:" +for ns in $to_delete; do + echo "- $ns" +done + +read -p "Continue? [y/N] " -n 1 -r +echo +if [[ ! $REPLY =~ ^[Yy]$ ]]; then + exit 1 +fi + +for ns in $to_delete; do + ip netns delete "$ns" +done diff --git a/tailnet/test/integration/suite.go b/tailnet/test/integration/suite.go index 54fb0856a21af..32d9adb2e4a14 100644 --- a/tailnet/test/integration/suite.go +++ b/tailnet/test/integration/suite.go @@ -7,7 +7,6 @@ import ( "net/url" "testing" - "github.com/google/uuid" "github.com/stretchr/testify/require" "cdr.dev/slog" @@ -17,12 +16,12 @@ import ( // TODO: instead of reusing one conn for each suite, maybe we should make a new // one for each subtest? -func TestSuite(t *testing.T, _ slog.Logger, _ *url.URL, _, peerID uuid.UUID, conn *tailnet.Conn) { +func TestSuite(t *testing.T, _ slog.Logger, _ *url.URL, conn *tailnet.Conn, _, peer Client) { t.Parallel() t.Run("Connectivity", func(t *testing.T) { t.Parallel() - peerIP := tailnet.IPFromUUID(peerID) + peerIP := tailnet.IPFromUUID(peer.ID) _, _, _, err := conn.Ping(testutil.Context(t, testutil.WaitLong), peerIP) require.NoError(t, err, "ping peer") }) From a0962ba08914f4a42657198efb6c87b4e19959ca Mon Sep 17 00:00:00 2001 From: Spike Curtis Date: Fri, 24 May 2024 12:01:03 +0400 Subject: [PATCH 101/149] fix: wait for PGCoordinator to clean up db state (#13351) c.f. https://github.com/coder/coder/pull/13192#issuecomment-2097657692 We need to wait for PGCoordinator to finish its work before returning on `Close()`, so that we delete database state (best effort -- if this fails others will filter it out based on heartbeats). --- enterprise/tailnet/pgcoord.go | 23 +++++++++- enterprise/tailnet/pgcoord_internal_test.go | 1 + enterprise/tailnet/pgcoord_test.go | 47 +++++++++++++++++++++ 3 files changed, 69 insertions(+), 2 deletions(-) diff --git a/enterprise/tailnet/pgcoord.go b/enterprise/tailnet/pgcoord.go index baccfe66a7fd7..857cdafe94e79 100644 --- a/enterprise/tailnet/pgcoord.go +++ b/enterprise/tailnet/pgcoord.go @@ -161,11 +161,12 @@ func newPGCoordInternal( closed: make(chan struct{}), } go func() { - // when the main context is canceled, or the coordinator closed, the binder and tunneler - // always eventually stop. Once they stop it's safe to cancel the querier context, which + // when the main context is canceled, or the coordinator closed, the binder, tunneler, and + // handshaker always eventually stop. Once they stop it's safe to cancel the querier context, which // has the effect of deleting the coordinator from the database and ceasing heartbeats. c.binder.workerWG.Wait() c.tunneler.workerWG.Wait() + c.handshaker.workerWG.Wait() querierCancel() }() logger.Info(ctx, "starting coordinator") @@ -231,6 +232,7 @@ func (c *pgCoord) Close() error { c.logger.Info(c.ctx, "closing coordinator") c.cancel() c.closeOnce.Do(func() { close(c.closed) }) + c.querier.wait() return nil } @@ -795,6 +797,8 @@ type querier struct { workQ *workQ[querierWorkKey] + wg sync.WaitGroup + heartbeats *heartbeats updates <-chan hbUpdate @@ -831,6 +835,7 @@ func newQuerier(ctx context.Context, } q.subscribe() + q.wg.Add(2 + numWorkers) go func() { <-firstHeartbeat go q.handleIncoming() @@ -842,7 +847,13 @@ func newQuerier(ctx context.Context, return q } +func (q *querier) wait() { + q.wg.Wait() + q.heartbeats.wg.Wait() +} + func (q *querier) handleIncoming() { + defer q.wg.Done() for { select { case <-q.ctx.Done(): @@ -919,6 +930,7 @@ func (q *querier) cleanupConn(c *connIO) { } func (q *querier) worker() { + defer q.wg.Done() eb := backoff.NewExponentialBackOff() eb.MaxElapsedTime = 0 // retry indefinitely eb.MaxInterval = dbMaxBackoff @@ -1204,6 +1216,7 @@ func (q *querier) resyncPeerMappings() { } func (q *querier) handleUpdates() { + defer q.wg.Done() for { select { case <-q.ctx.Done(): @@ -1451,6 +1464,8 @@ type heartbeats struct { coordinators map[uuid.UUID]time.Time timer *time.Timer + wg sync.WaitGroup + // overwritten in tests, but otherwise constant cleanupPeriod time.Duration } @@ -1472,6 +1487,7 @@ func newHeartbeats( coordinators: make(map[uuid.UUID]time.Time), cleanupPeriod: cleanupPeriod, } + h.wg.Add(3) go h.subscribe() go h.sendBeats() go h.cleanupLoop() @@ -1502,6 +1518,7 @@ func (h *heartbeats) filter(mappings []mapping) []mapping { } func (h *heartbeats) subscribe() { + defer h.wg.Done() eb := backoff.NewExponentialBackOff() eb.MaxElapsedTime = 0 // retry indefinitely eb.MaxInterval = dbMaxBackoff @@ -1611,6 +1628,7 @@ func (h *heartbeats) checkExpiry() { } func (h *heartbeats) sendBeats() { + defer h.wg.Done() // send an initial heartbeat so that other coordinators can start using our bindings right away. h.sendBeat() close(h.firstHeartbeat) // signal binder it can start writing @@ -1662,6 +1680,7 @@ func (h *heartbeats) sendDelete() { } func (h *heartbeats) cleanupLoop() { + defer h.wg.Done() h.cleanup() tkr := time.NewTicker(h.cleanupPeriod) defer tkr.Stop() diff --git a/enterprise/tailnet/pgcoord_internal_test.go b/enterprise/tailnet/pgcoord_internal_test.go index 53fd61d73f066..4607e6fb2ab2f 100644 --- a/enterprise/tailnet/pgcoord_internal_test.go +++ b/enterprise/tailnet/pgcoord_internal_test.go @@ -66,6 +66,7 @@ func TestHeartbeats_Cleanup(t *testing.T) { store: mStore, cleanupPeriod: time.Millisecond, } + uut.wg.Add(1) go uut.cleanupLoop() for i := 0; i < 6; i++ { diff --git a/enterprise/tailnet/pgcoord_test.go b/enterprise/tailnet/pgcoord_test.go index 5bd722533dc39..9c363ee700570 100644 --- a/enterprise/tailnet/pgcoord_test.go +++ b/enterprise/tailnet/pgcoord_test.go @@ -864,6 +864,53 @@ func TestPGCoordinator_Lost(t *testing.T) { agpltest.LostTest(ctx, t, coordinator) } +func TestPGCoordinator_DeleteOnClose(t *testing.T) { + t.Parallel() + + ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitSuperLong) + defer cancel() + ctrl := gomock.NewController(t) + mStore := dbmock.NewMockStore(ctrl) + ps := pubsub.NewInMemory() + logger := slogtest.Make(t, &slogtest.Options{IgnoreErrors: true}).Leveled(slog.LevelDebug) + + upsertDone := make(chan struct{}) + deleteCalled := make(chan struct{}) + finishDelete := make(chan struct{}) + mStore.EXPECT().UpsertTailnetCoordinator(gomock.Any(), gomock.Any()). + MinTimes(1). + Do(func(_ context.Context, _ uuid.UUID) { close(upsertDone) }). + Return(database.TailnetCoordinator{}, nil) + mStore.EXPECT().DeleteCoordinator(gomock.Any(), gomock.Any()). + Times(1). + Do(func(_ context.Context, _ uuid.UUID) { + close(deleteCalled) + <-finishDelete + }). + Return(nil) + + // extra calls we don't particularly care about for this test + mStore.EXPECT().CleanTailnetCoordinators(gomock.Any()).AnyTimes().Return(nil) + mStore.EXPECT().CleanTailnetLostPeers(gomock.Any()).AnyTimes().Return(nil) + mStore.EXPECT().CleanTailnetTunnels(gomock.Any()).AnyTimes().Return(nil) + + uut, err := tailnet.NewPGCoord(ctx, logger, ps, mStore) + require.NoError(t, err) + testutil.RequireRecvCtx(ctx, t, upsertDone) + closeErr := make(chan error, 1) + go func() { + closeErr <- uut.Close() + }() + select { + case <-closeErr: + t.Fatal("close returned before DeleteCoordinator called") + case <-deleteCalled: + close(finishDelete) + err := testutil.RequireRecvCtx(ctx, t, closeErr) + require.NoError(t, err) + } +} + type testConn struct { ws, serverWS net.Conn nodeChan chan []*agpl.Node From ff617cc545d42d28c8e03b4f71a682ac975523fe Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 24 May 2024 18:46:57 +0300 Subject: [PATCH 102/149] chore: bump github.com/valyala/fasthttp from 1.52.0 to 1.53.0 (#13318) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- go.mod | 4 ++-- go.sum | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/go.mod b/go.mod index a23642db50da4..74ac28d0fcc6e 100644 --- a/go.mod +++ b/go.mod @@ -132,7 +132,7 @@ require ( github.com/justinas/nosurf v1.1.1 github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 github.com/kirsle/configdir v0.0.0-20170128060238-e45d2f54772f - github.com/klauspost/compress v1.17.6 + github.com/klauspost/compress v1.17.7 github.com/lib/pq v1.10.9 github.com/mattn/go-isatty v0.0.20 github.com/mitchellh/go-wordwrap v1.0.1 @@ -160,7 +160,7 @@ require ( github.com/tidwall/gjson v1.17.0 github.com/u-root/u-root v0.14.0 github.com/unrolled/secure v1.14.0 - github.com/valyala/fasthttp v1.52.0 + github.com/valyala/fasthttp v1.53.0 github.com/wagslane/go-password-validator v0.3.0 go.mozilla.org/pkcs7 v0.0.0-20200128120323-432b2356ecb1 go.nhat.io/otelsql v0.13.0 diff --git a/go.sum b/go.sum index e92cc869ed29d..4d2899af5da07 100644 --- a/go.sum +++ b/go.sum @@ -597,8 +597,8 @@ github.com/kirsle/configdir v0.0.0-20170128060238-e45d2f54772f/go.mod h1:4rEELDS github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/klauspost/compress v1.10.3/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= -github.com/klauspost/compress v1.17.6 h1:60eq2E/jlfwQXtvZEeBUYADs+BwKBWURIY+Gj2eRGjI= -github.com/klauspost/compress v1.17.6/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM= +github.com/klauspost/compress v1.17.7 h1:ehO88t2UGzQK66LMdE8tibEd1ErmzZjNEqWkjLAKQQg= +github.com/klauspost/compress v1.17.7/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw= github.com/klauspost/cpuid/v2 v2.2.5 h1:0E5MSMDEoAulmXNFquVs//DdoomxaoTY1kUhbc/qbZg= github.com/klauspost/cpuid/v2 v2.2.5/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= github.com/kortschak/wol v0.0.0-20200729010619-da482cc4850a h1:+RR6SqnTkDLWyICxS1xpjCi/3dhyV+TgZwA6Ww3KncQ= @@ -893,8 +893,8 @@ github.com/unrolled/secure v1.14.0 h1:u9vJTU/pR4Bny0ntLUMxdfLtmIRGvQf2sEFuA0TG9A github.com/unrolled/secure v1.14.0/go.mod h1:BmF5hyM6tXczk3MpQkFf1hpKSRqCyhqcbiQtiAF7+40= github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= -github.com/valyala/fasthttp v1.52.0 h1:wqBQpxH71XW0e2g+Og4dzQM8pk34aFYlA1Ga8db7gU0= -github.com/valyala/fasthttp v1.52.0/go.mod h1:hf5C4QnVMkNXMspnsUlfM3WitlgYflyhHYoKol/szxQ= +github.com/valyala/fasthttp v1.53.0 h1:lW/+SUkOxCx2vlIu0iaImv4JLrVRnbbkpCoaawvA4zc= +github.com/valyala/fasthttp v1.53.0/go.mod h1:6dt4/8olwq9QARP/TDuPmWyWcl4byhpvTJ4AAtcz+QM= github.com/vishvananda/netlink v1.2.1-beta.2 h1:Llsql0lnQEbHj0I1OuKyp8otXp0r3q0mPkuhwHfStVs= github.com/vishvananda/netlink v1.2.1-beta.2/go.mod h1:twkDnbuQxJYemMlGd4JFIcuhgX83tXhKS2B/PRMpOho= github.com/vishvananda/netns v0.0.0-20200728191858-db3c7e526aae/go.mod h1:DD4vA1DwXk04H54A1oHXtwZmA0grkVMdPxx/VGLCah0= From 87dd87877970cb6cbb6e130209c204762afd380b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 24 May 2024 19:33:35 +0300 Subject: [PATCH 103/149] chore: bump google.golang.org/api from 0.180.0 to 0.181.0 (#13317) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- flake.nix | 2 +- go.mod | 10 +++++----- go.sum | 20 ++++++++++---------- 3 files changed, 16 insertions(+), 16 deletions(-) diff --git a/flake.nix b/flake.nix index c8262aa7432cc..e11e3181d8db3 100644 --- a/flake.nix +++ b/flake.nix @@ -97,7 +97,7 @@ name = "coder-${osArch}"; # Updated with ./scripts/update-flake.sh`. # This should be updated whenever go.mod changes! - vendorHash = "sha256-YOXZ3tJAky2XmNH81MNj3eNq95ucGFUJIC0O8Z+xk20="; + vendorHash = "sha256-YGoQ9JgE9aeS4k3oNQw9EMAf2aIAGa+mclBjoIz7CWs="; proxyVendor = true; src = ./.; nativeBuildInputs = with pkgs; [ getopt openssl zstd ]; diff --git a/go.mod b/go.mod index 74ac28d0fcc6e..5bf9c85807399 100644 --- a/go.mod +++ b/go.mod @@ -183,7 +183,7 @@ require ( golang.org/x/text v0.15.0 golang.org/x/tools v0.21.0 golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028 - google.golang.org/api v0.180.0 + google.golang.org/api v0.181.0 google.golang.org/grpc v1.63.2 google.golang.org/protobuf v1.34.1 gopkg.in/DataDog/dd-trace-go.v1 v1.61.0 @@ -219,7 +219,7 @@ require ( require ( cloud.google.com/go/logging v1.9.0 // indirect - cloud.google.com/go/longrunning v0.5.5 // indirect + cloud.google.com/go/longrunning v0.5.6 // indirect filippo.io/edwards25519 v1.1.0 // indirect github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161 // indirect github.com/DataDog/appsec-internal-go v1.4.1 // indirect @@ -411,9 +411,9 @@ require ( golang.zx2c4.com/wireguard/wgctrl v0.0.0-20230429144221-925a1e7659e6 // indirect golang.zx2c4.com/wireguard/windows v0.5.3 // indirect google.golang.org/appengine v1.6.8 // indirect - google.golang.org/genproto v0.0.0-20240227224415-6ceb2ff114de // indirect - google.golang.org/genproto/googleapis/api v0.0.0-20240314234333-6e1732d8331c // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20240429193739-8cf5692501f6 // indirect + google.golang.org/genproto v0.0.0-20240401170217-c3f982113cda // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20240506185236-b8a5c65736ae // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20240513163218-0867130af1f8 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect howett.net/plist v1.0.0 // indirect inet.af/peercred v0.0.0-20210906144145-0893ea02156a // indirect diff --git a/go.sum b/go.sum index 4d2899af5da07..323dc7e500354 100644 --- a/go.sum +++ b/go.sum @@ -9,8 +9,8 @@ cloud.google.com/go/compute/metadata v0.3.0 h1:Tz+eQXMEqDIKRsmY3cHTL6FVaynIjX2Qx cloud.google.com/go/compute/metadata v0.3.0/go.mod h1:zFmK7XCadkQkj6TtorcaGlCW1hT1fIilQDwofLpJ20k= cloud.google.com/go/logging v1.9.0 h1:iEIOXFO9EmSiTjDmfpbRjOxECO7R8C7b8IXUGOj7xZw= cloud.google.com/go/logging v1.9.0/go.mod h1:1Io0vnZv4onoUnsVUQY3HZ3Igb1nBchky0A0y7BBBhE= -cloud.google.com/go/longrunning v0.5.5 h1:GOE6pZFdSrTb4KAiKnXsJBtlE6mEyaW44oKyMILWnOg= -cloud.google.com/go/longrunning v0.5.5/go.mod h1:WV2LAxD8/rg5Z1cNW6FJ/ZpX4E4VnDnoTk0yawPBB7s= +cloud.google.com/go/longrunning v0.5.6 h1:xAe8+0YaWoCKr9t1+aWe+OeQgN/iJK1fEgZSXmjuEaE= +cloud.google.com/go/longrunning v0.5.6/go.mod h1:vUaDrWYOMKRuhiv6JBnn49YxCPz2Ayn9GqyjaBT8/mA= filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA= filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4= filippo.io/mkcert v1.4.4 h1:8eVbbwfVlaqUM7OwuftKc2nuYOoTDQWqsoXmzoXZdbc= @@ -1152,8 +1152,8 @@ golang.zx2c4.com/wireguard/wgctrl v0.0.0-20230429144221-925a1e7659e6 h1:CawjfCvY golang.zx2c4.com/wireguard/wgctrl v0.0.0-20230429144221-925a1e7659e6/go.mod h1:3rxYc4HtVcSG9gVaTs2GEBdehh+sYPOwKtyUWEOTb80= golang.zx2c4.com/wireguard/windows v0.5.3 h1:On6j2Rpn3OEMXqBq00QEDC7bWSZrPIHKIus8eIuExIE= golang.zx2c4.com/wireguard/windows v0.5.3/go.mod h1:9TEe8TJmtwyQebdFwAkEWOPr3prrtqm+REGFifP60hI= -google.golang.org/api v0.180.0 h1:M2D87Yo0rGBPWpo1orwfCLehUUL6E7/TYe5gvMQWDh4= -google.golang.org/api v0.180.0/go.mod h1:51AiyoEg1MJPSZ9zvklA8VnRILPXxn1iVen9v25XHAE= +google.golang.org/api v0.181.0 h1:rPdjwnWgiPPOJx3IcSAQ2III5aX5tCer6wMpa/xmZi4= +google.golang.org/api v0.181.0/go.mod h1:MnQ+M0CFsfUwA5beZ+g/vCBCPXvtmZwRz2qzZk8ih1k= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= @@ -1162,12 +1162,12 @@ google.golang.org/appengine v1.6.8/go.mod h1:1jJ3jBArFh5pcgW8gCtRJnepW8FzD1V44FJ google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= -google.golang.org/genproto v0.0.0-20240227224415-6ceb2ff114de h1:F6qOa9AZTYJXOUEr4jDysRDLrm4PHePlge4v4TGAlxY= -google.golang.org/genproto v0.0.0-20240227224415-6ceb2ff114de/go.mod h1:VUhTRKeHn9wwcdrk73nvdC9gF178Tzhmt/qyaFcPLSo= -google.golang.org/genproto/googleapis/api v0.0.0-20240314234333-6e1732d8331c h1:kaI7oewGK5YnVwj+Y+EJBO/YN1ht8iTL9XkFHtVZLsc= -google.golang.org/genproto/googleapis/api v0.0.0-20240314234333-6e1732d8331c/go.mod h1:VQW3tUculP/D4B+xVCo+VgSq8As6wA9ZjHl//pmk+6s= -google.golang.org/genproto/googleapis/rpc v0.0.0-20240429193739-8cf5692501f6 h1:DujSIu+2tC9Ht0aPNA7jgj23Iq8Ewi5sgkQ++wdvonE= -google.golang.org/genproto/googleapis/rpc v0.0.0-20240429193739-8cf5692501f6/go.mod h1:WtryC6hu0hhx87FDGxWCDptyssuo68sk10vYjF+T9fY= +google.golang.org/genproto v0.0.0-20240401170217-c3f982113cda h1:wu/KJm9KJwpfHWhkkZGohVC6KRrc1oJNr4jwtQMOQXw= +google.golang.org/genproto v0.0.0-20240401170217-c3f982113cda/go.mod h1:g2LLCvCeCSir/JJSWosk19BR4NVxGqHUC6rxIRsd7Aw= +google.golang.org/genproto/googleapis/api v0.0.0-20240506185236-b8a5c65736ae h1:AH34z6WAGVNkllnKs5raNq3yRq93VnjBG6rpfub/jYk= +google.golang.org/genproto/googleapis/api v0.0.0-20240506185236-b8a5c65736ae/go.mod h1:FfiGhwUm6CJviekPrc0oJ+7h29e+DmWU6UtjX0ZvI7Y= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240513163218-0867130af1f8 h1:mxSlqyb8ZAHsYDCfiXN1EDdNTdvjUJSLY+OnAUtYNYA= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240513163218-0867130af1f8/go.mod h1:I7Y+G38R2bu5j1aLzfFmQfTcU/WnFuqDwLZAbvKTKpM= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= From 60224fa21606717d57bbcac0430623bb55c4e1a4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 24 May 2024 16:45:11 +0000 Subject: [PATCH 104/149] chore: bump github.com/fatih/color from 1.16.0 to 1.17.0 (#13321) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Muhammad Atif Ali --- go.mod | 2 +- go.sum | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/go.mod b/go.mod index 5bf9c85807399..5642c5ee15344 100644 --- a/go.mod +++ b/go.mod @@ -96,7 +96,7 @@ require ( github.com/dave/dst v0.27.2 github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc github.com/elastic/go-sysinfo v1.14.0 - github.com/fatih/color v1.16.0 + github.com/fatih/color v1.17.0 github.com/fatih/structs v1.1.0 github.com/fatih/structtag v1.2.0 github.com/fergusstrange/embedded-postgres v1.27.0 diff --git a/go.sum b/go.sum index 323dc7e500354..c49d35deb537d 100644 --- a/go.sum +++ b/go.sum @@ -285,8 +285,8 @@ github.com/evanw/esbuild v0.20.2 h1:E4Y0iJsothpUCq7y0D+ERfqpJmPWrZpNybJA3x3I4p8= github.com/evanw/esbuild v0.20.2/go.mod h1:D2vIQZqV/vIf/VRHtViaUtViZmG7o+kKmlBfVQuRi48= github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= -github.com/fatih/color v1.16.0 h1:zmkK9Ngbjj+K0yRhTVONQh1p/HknKYSlNT+vZCzyokM= -github.com/fatih/color v1.16.0/go.mod h1:fL2Sau1YI5c0pdGEVCbKQbLXB6edEj1ZgiY4NijnWvE= +github.com/fatih/color v1.17.0 h1:GlRw1BRJxkpqUCBKzKOw098ed57fEsKeNjpTe3cSjK4= +github.com/fatih/color v1.17.0/go.mod h1:YZ7TlrGPkiz6ku9fK3TLD/pl3CpsiFyu8N92HLgmosI= github.com/fatih/structs v1.1.0 h1:Q7juDM0QtcnhCpeyLGQKyg4TOIghuNXrkL32pHAUMxo= github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M= github.com/fatih/structtag v1.2.0 h1:/OdNE99OxoI/PqaW/SuSK9uxxT3f/tcSZgon/ssNSx4= From 47f8f5d9631dac19890c0898ffdacf83b65974e2 Mon Sep 17 00:00:00 2001 From: Muhammad Atif Ali Date: Fri, 24 May 2024 23:15:29 +0300 Subject: [PATCH 105/149] chore(docs): update github app permission to read org members (#13362) --- docs/admin/external-auth.md | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/admin/external-auth.md b/docs/admin/external-auth.md index 9165c1c67604c..168028ecae06e 100644 --- a/docs/admin/external-auth.md +++ b/docs/admin/external-auth.md @@ -70,6 +70,7 @@ GitHub provider). | Pull requests | Read & Write | Grants access to create and update pull requests. | | Workflows | Read & Write | Grants access to update files in `.github/workflows/`. | | Metadata | Read-only | Grants access to metadata written by GitHub Apps. | + | Members | Rad-only | Grabts access to organization members and teams. | 3. Install the App for your organization. You may select a subset of repositories to grant access to. From a1d3b82dd128838dd9a1edc3d8bca247e3618a52 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 27 May 2024 19:57:36 +0300 Subject: [PATCH 106/149] ci: bump aquasecurity/trivy-action from 0.20.0 to 0.21.0 in the github-actions group (#13376) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/security.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/security.yaml b/.github/workflows/security.yaml index fb1238afec267..1bf0bf4b63180 100644 --- a/.github/workflows/security.yaml +++ b/.github/workflows/security.yaml @@ -114,7 +114,7 @@ jobs: echo "image=$(cat "$image_job")" >> $GITHUB_OUTPUT - name: Run Trivy vulnerability scanner - uses: aquasecurity/trivy-action@b2933f565dbc598b29947660e66259e3c7bc8561 + uses: aquasecurity/trivy-action@fd25fed6972e341ff0007ddb61f77e88103953c2 with: image-ref: ${{ steps.build.outputs.image }} format: sarif From 79d73f77f5f54a7de4bccf793ca6ac45655c9f65 Mon Sep 17 00:00:00 2001 From: Kyle Carberry Date: Tue, 28 May 2024 11:45:41 -0500 Subject: [PATCH 107/149] chore: skip Azure `TestExpiresSoon` (#13385) Adds some context to the test skip so it can be removed or enabled in the future. --- coderd/azureidentity/azureidentity_test.go | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/coderd/azureidentity/azureidentity_test.go b/coderd/azureidentity/azureidentity_test.go index 854f95a6e4933..bd55ae2538d3a 100644 --- a/coderd/azureidentity/azureidentity_test.go +++ b/coderd/azureidentity/azureidentity_test.go @@ -64,6 +64,10 @@ func TestValidate(t *testing.T) { func TestExpiresSoon(t *testing.T) { t.Parallel() + // TODO (@kylecarbs): It's unknown why Microsoft does not have new certificates live... + // The certificate is automatically fetched if it's not found in our database, + // so in a worst-case scenario expired certificates will only impact 100% airgapped users. + t.Skip() const threshold = 1 for _, c := range azureidentity.Certificates { From 5b78ec97b601027fdba5a55ff70e612bc3103185 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 28 May 2024 12:45:16 -0500 Subject: [PATCH 108/149] chore: bump alpine from 3.19.1 to 3.20.0 in /scripts (#13375) Bumps alpine from 3.19.1 to 3.20.0. --- updated-dependencies: - dependency-name: alpine dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- scripts/Dockerfile.base | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/Dockerfile.base b/scripts/Dockerfile.base index fa249f6a62cf9..afa00dd4cc7a9 100644 --- a/scripts/Dockerfile.base +++ b/scripts/Dockerfile.base @@ -1,7 +1,7 @@ # This is the base image used for Coder images. It's a multi-arch image that is # built in depot.dev for all supported architectures. Since it's built on real # hardware and not cross-compiled, it can have "RUN" commands. -FROM alpine:3.19.1 +FROM alpine:3.20.0 # We use a single RUN command to reduce the number of layers in the image. # NOTE: Keep the Terraform version in sync with minTerraformVersion and From 6293c3374625984684d0ba7697a1c6005bcac821 Mon Sep 17 00:00:00 2001 From: Steven Masley Date: Tue, 28 May 2024 14:07:22 -0500 Subject: [PATCH 109/149] chore: add refresh token and error to user's external auth page (#13380) * chore: add story for failed refresh error * chore: add refresh icon to tokens that can refresh --- .../ExternalAuthPageView.stories.tsx | 30 +++++++ .../ExternalAuthPage/ExternalAuthPageView.tsx | 84 ++++++++++++++++--- 2 files changed, 101 insertions(+), 13 deletions(-) diff --git a/site/src/pages/UserSettingsPage/ExternalAuthPage/ExternalAuthPageView.stories.tsx b/site/src/pages/UserSettingsPage/ExternalAuthPage/ExternalAuthPageView.stories.tsx index f5f5cb3e21963..4f04feab54b9f 100644 --- a/site/src/pages/UserSettingsPage/ExternalAuthPage/ExternalAuthPageView.stories.tsx +++ b/site/src/pages/UserSettingsPage/ExternalAuthPage/ExternalAuthPageView.stories.tsx @@ -60,3 +60,33 @@ export const Unauthenticated: Story = { }, }, }; + +export const Failed: Story = { + args: { + ...meta.args, + auths: { + providers: [MockGithubExternalProvider], + links: [ + { + ...MockGithubAuthLink, + validate_error: "Failed to refresh token", + }, + ], + }, + }, +}; + +export const NoRefresh: Story = { + args: { + ...meta.args, + auths: { + providers: [MockGithubExternalProvider], + links: [ + { + ...MockGithubAuthLink, + has_refresh_token: false, + }, + ], + }, + }, +}; diff --git a/site/src/pages/UserSettingsPage/ExternalAuthPage/ExternalAuthPageView.tsx b/site/src/pages/UserSettingsPage/ExternalAuthPage/ExternalAuthPageView.tsx index 4433fee43045b..b73286a6158f0 100644 --- a/site/src/pages/UserSettingsPage/ExternalAuthPage/ExternalAuthPageView.tsx +++ b/site/src/pages/UserSettingsPage/ExternalAuthPage/ExternalAuthPageView.tsx @@ -1,11 +1,16 @@ +import { useTheme } from "@emotion/react"; +import AutorenewIcon from "@mui/icons-material/Autorenew"; import LoadingButton from "@mui/lab/LoadingButton"; +import Badge from "@mui/material/Badge"; import Divider from "@mui/material/Divider"; +import { styled } from "@mui/material/styles"; import Table from "@mui/material/Table"; import TableBody from "@mui/material/TableBody"; import TableCell from "@mui/material/TableCell"; import TableContainer from "@mui/material/TableContainer"; import TableHead from "@mui/material/TableHead"; import TableRow from "@mui/material/TableRow"; +import Tooltip from "@mui/material/Tooltip"; import visuallyHidden from "@mui/utils/visuallyHidden"; import { type FC, useState, useCallback, useEffect } from "react"; import { useQuery } from "react-query"; @@ -104,6 +109,25 @@ interface ExternalAuthRowProps { onValidateExternalAuth: () => void; } +const StyledBadge = styled(Badge)(({ theme }) => ({ + "& .MuiBadge-badge": { + // Make a circular background for the icon. Background provides contrast, with a thin + // border to separate it from the avatar image. + backgroundColor: `${theme.palette.background.paper}`, + borderStyle: "solid", + borderColor: `${theme.palette.secondary.main}`, + borderWidth: "thin", + + // Override the default minimum sizes, as they are larger than what we want. + minHeight: "0px", + minWidth: "0px", + // Override the default "height", which is usually set to some constant value. + height: "auto", + // Padding adds some room for the icon to live in. + padding: "0.1em", + }, +})); + const ExternalAuthRow: FC = ({ app, unlinked, @@ -111,6 +135,7 @@ const ExternalAuthRow: FC = ({ onUnlinkExternalAuth, onValidateExternalAuth, }) => { + const theme = useTheme(); const name = app.display_name || app.id || app.type; const authURL = "/external-auth/" + app.id; @@ -125,22 +150,55 @@ const ExternalAuthRow: FC = ({ ? externalAuth.authenticated : link?.authenticated ?? false; + let avatar = app.display_icon ? ( + + ) : ( + {name} + ); + + // If the link is authenticated and has a refresh token, show that it will automatically + // attempt to authenticate when the token expires. + if (link?.has_refresh_token && authenticated) { + avatar = ( + + + + } + > + {avatar} + + ); + } + return ( - - ) - } - /> + + {link?.validate_error && ( + <> + + Error:{" "} + + {link?.validate_error} + + )} Date: Tue, 28 May 2024 19:48:23 +0000 Subject: [PATCH 110/149] chore: bump github.com/aws/aws-sdk-go-v2 from 1.26.1 to 1.27.0 (#13324) Bumps [github.com/aws/aws-sdk-go-v2](https://github.com/aws/aws-sdk-go-v2) from 1.26.1 to 1.27.0. - [Release notes](https://github.com/aws/aws-sdk-go-v2/releases) - [Commits](https://github.com/aws/aws-sdk-go-v2/compare/v1.26.1...v1.27.0) --- updated-dependencies: - dependency-name: github.com/aws/aws-sdk-go-v2 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- go.mod | 2 +- go.sum | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/go.mod b/go.mod index 5642c5ee15344..0be7154072413 100644 --- a/go.mod +++ b/go.mod @@ -241,7 +241,7 @@ require ( github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be // indirect github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect github.com/armon/go-radix v1.0.1-0.20221118154546-54df44f2176c // indirect - github.com/aws/aws-sdk-go-v2 v1.26.1 + github.com/aws/aws-sdk-go-v2 v1.27.0 github.com/aws/aws-sdk-go-v2/config v1.27.7 github.com/aws/aws-sdk-go-v2/credentials v1.17.7 // indirect github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.15.3 // indirect diff --git a/go.sum b/go.sum index c49d35deb537d..2a1d560cf7594 100644 --- a/go.sum +++ b/go.sum @@ -92,8 +92,8 @@ github.com/armon/go-radix v1.0.1-0.20221118154546-54df44f2176c h1:651/eoCRnQ7YtS github.com/armon/go-radix v1.0.1-0.20221118154546-54df44f2176c/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= github.com/awalterschulze/gographviz v2.0.3+incompatible h1:9sVEXJBJLwGX7EQVhLm2elIKCm7P2YHFC8v6096G09E= github.com/awalterschulze/gographviz v2.0.3+incompatible/go.mod h1:GEV5wmg4YquNw7v1kkyoX9etIk8yVmXj+AkDHuuETHs= -github.com/aws/aws-sdk-go-v2 v1.26.1 h1:5554eUqIYVWpU0YmeeYZ0wU64H2VLBs8TlhRB2L+EkA= -github.com/aws/aws-sdk-go-v2 v1.26.1/go.mod h1:ffIFB97e2yNsv4aTSGkqtHnppsIJzw7G7BReUZ3jCXM= +github.com/aws/aws-sdk-go-v2 v1.27.0 h1:7bZWKoXhzI+mMR/HjdMx8ZCC5+6fY0lS5tr0bbgiLlo= +github.com/aws/aws-sdk-go-v2 v1.27.0/go.mod h1:ffIFB97e2yNsv4aTSGkqtHnppsIJzw7G7BReUZ3jCXM= github.com/aws/aws-sdk-go-v2/config v1.27.7 h1:JSfb5nOQF01iOgxFI5OIKWwDiEXWTyTgg1Mm1mHi0A4= github.com/aws/aws-sdk-go-v2/config v1.27.7/go.mod h1:PH0/cNpoMO+B04qET699o5W92Ca79fVtbUnvMIZro4I= github.com/aws/aws-sdk-go-v2/credentials v1.17.7 h1:WJd+ubWKoBeRh7A5iNMnxEOs982SyVKOJD+K8HIezu4= From 1edd46dd5f5b235d6e8a2aee6da188a5bc610174 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 28 May 2024 19:54:06 +0000 Subject: [PATCH 111/149] chore: bump github.com/hashicorp/terraform-json from 0.21.0 to 0.22.1 (#13322) Bumps [github.com/hashicorp/terraform-json](https://github.com/hashicorp/terraform-json) from 0.21.0 to 0.22.1. - [Release notes](https://github.com/hashicorp/terraform-json/releases) - [Commits](https://github.com/hashicorp/terraform-json/compare/v0.21.0...v0.22.1) --- updated-dependencies: - dependency-name: github.com/hashicorp/terraform-json dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- go.mod | 4 ++-- go.sum | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/go.mod b/go.mod index 0be7154072413..6e53f41676250 100644 --- a/go.mod +++ b/go.mod @@ -123,7 +123,7 @@ require ( github.com/hashicorp/go-version v1.6.0 github.com/hashicorp/hc-install v0.6.3 github.com/hashicorp/terraform-config-inspect v0.0.0-20211115214459-90acf1ca460f - github.com/hashicorp/terraform-json v0.21.0 + github.com/hashicorp/terraform-json v0.22.1 github.com/hashicorp/yamux v0.1.1 github.com/hinshun/vt10x v0.0.0-20220301184237-5011da428d02 github.com/imulab/go-scim/pkg/v2 v2.2.0 @@ -399,7 +399,7 @@ require ( github.com/yashtewari/glob-intersection v0.2.0 // indirect github.com/yuin/goldmark v1.7.1 // indirect github.com/yuin/goldmark-emoji v1.0.2 // indirect - github.com/zclconf/go-cty v1.14.1 + github.com/zclconf/go-cty v1.14.4 github.com/zeebo/errs v1.3.0 // indirect go.opencensus.io v0.24.0 // indirect go.opentelemetry.io/contrib v1.19.0 // indirect diff --git a/go.sum b/go.sum index 2a1d560cf7594..569ef20f6f907 100644 --- a/go.sum +++ b/go.sum @@ -535,8 +535,8 @@ github.com/hashicorp/logutils v1.0.0 h1:dLEQVugN8vlakKOUE3ihGLTZJRB4j+M2cdTm/ORI github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= github.com/hashicorp/terraform-exec v0.17.2 h1:EU7i3Fh7vDUI9nNRdMATCEfnm9axzTnad8zszYZ73Go= github.com/hashicorp/terraform-exec v0.17.2/go.mod h1:tuIbsL2l4MlwwIZx9HPM+LOV9vVyEfBYu2GsO1uH3/8= -github.com/hashicorp/terraform-json v0.21.0 h1:9NQxbLNqPbEMze+S6+YluEdXgJmhQykRyRNd+zTI05U= -github.com/hashicorp/terraform-json v0.21.0/go.mod h1:qdeBs11ovMzo5puhrRibdD6d2Dq6TyE/28JiU4tIQxk= +github.com/hashicorp/terraform-json v0.22.1 h1:xft84GZR0QzjPVWs4lRUwvTcPnegqlyS7orfb5Ltvec= +github.com/hashicorp/terraform-json v0.22.1/go.mod h1:JbWSQCLFSXFFhg42T7l9iJwdGXBYV8fmmD6o/ML4p3A= github.com/hashicorp/terraform-plugin-go v0.12.0 h1:6wW9mT1dSs0Xq4LR6HXj1heQ5ovr5GxXNJwkErZzpJw= github.com/hashicorp/terraform-plugin-go v0.12.0/go.mod h1:kwhmaWHNDvT1B3QiSJdAtrB/D4RaKSY/v3r2BuoWK4M= github.com/hashicorp/terraform-plugin-log v0.7.0 h1:SDxJUyT8TwN4l5b5/VkiTIaQgY6R+Y2BQ0sRZftGKQs= @@ -937,8 +937,8 @@ github.com/yuin/goldmark v1.7.1/go.mod h1:uzxRWxtg69N339t3louHJ7+O03ezfj6PlliRla github.com/yuin/goldmark-emoji v1.0.2 h1:c/RgTShNgHTtc6xdz2KKI74jJr6rWi7FPgnP9GAsO5s= github.com/yuin/goldmark-emoji v1.0.2/go.mod h1:RhP/RWpexdp+KHs7ghKnifRoIs/Bq4nDS7tRbCkOwKY= github.com/zclconf/go-cty v1.1.0/go.mod h1:xnAOWiHeOqg2nWS62VtQ7pbOu17FtxJNW8RLEih+O3s= -github.com/zclconf/go-cty v1.14.1 h1:t9fyA35fwjjUMcmL5hLER+e/rEPqrbCK1/OSE4SI9KA= -github.com/zclconf/go-cty v1.14.1/go.mod h1:VvMs5i0vgZdhYawQNq5kePSpLAoz8u1xvZgrPIxfnZE= +github.com/zclconf/go-cty v1.14.4 h1:uXXczd9QDGsgu0i/QFR/hzI5NYCHLf6NQw/atrbnhq8= +github.com/zclconf/go-cty v1.14.4/go.mod h1:VvMs5i0vgZdhYawQNq5kePSpLAoz8u1xvZgrPIxfnZE= github.com/zeebo/assert v1.3.0 h1:g7C04CbJuIDKNPFHmsk4hwZDO5O+kntRxzaUoNXj+IQ= github.com/zeebo/assert v1.3.0/go.mod h1:Pq9JiuJQpG8JLJdtkwrJESF0Foym2/D9XMU5ciN/wJ0= github.com/zeebo/errs v1.3.0 h1:hmiaKqgYZzcVgRL1Vkc1Mn2914BbzB0IBxs+ebeutGs= From e5d848f19d86ef57036a3afb0a8da12104dc238f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 28 May 2024 15:01:03 -0500 Subject: [PATCH 112/149] chore: bump github.com/valyala/fasthttp from 1.53.0 to 1.54.0 (#13373) Bumps [github.com/valyala/fasthttp](https://github.com/valyala/fasthttp) from 1.53.0 to 1.54.0. - [Release notes](https://github.com/valyala/fasthttp/releases) - [Commits](https://github.com/valyala/fasthttp/compare/v1.53.0...1.54.0) --- updated-dependencies: - dependency-name: github.com/valyala/fasthttp dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- go.mod | 2 +- go.sum | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/go.mod b/go.mod index 6e53f41676250..8422b9cb486ad 100644 --- a/go.mod +++ b/go.mod @@ -160,7 +160,7 @@ require ( github.com/tidwall/gjson v1.17.0 github.com/u-root/u-root v0.14.0 github.com/unrolled/secure v1.14.0 - github.com/valyala/fasthttp v1.53.0 + github.com/valyala/fasthttp v1.54.0 github.com/wagslane/go-password-validator v0.3.0 go.mozilla.org/pkcs7 v0.0.0-20200128120323-432b2356ecb1 go.nhat.io/otelsql v0.13.0 diff --git a/go.sum b/go.sum index 569ef20f6f907..fbf810862f2bd 100644 --- a/go.sum +++ b/go.sum @@ -893,8 +893,8 @@ github.com/unrolled/secure v1.14.0 h1:u9vJTU/pR4Bny0ntLUMxdfLtmIRGvQf2sEFuA0TG9A github.com/unrolled/secure v1.14.0/go.mod h1:BmF5hyM6tXczk3MpQkFf1hpKSRqCyhqcbiQtiAF7+40= github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= -github.com/valyala/fasthttp v1.53.0 h1:lW/+SUkOxCx2vlIu0iaImv4JLrVRnbbkpCoaawvA4zc= -github.com/valyala/fasthttp v1.53.0/go.mod h1:6dt4/8olwq9QARP/TDuPmWyWcl4byhpvTJ4AAtcz+QM= +github.com/valyala/fasthttp v1.54.0 h1:cCL+ZZR3z3HPLMVfEYVUMtJqVaui0+gu7Lx63unHwS0= +github.com/valyala/fasthttp v1.54.0/go.mod h1:6dt4/8olwq9QARP/TDuPmWyWcl4byhpvTJ4AAtcz+QM= github.com/vishvananda/netlink v1.2.1-beta.2 h1:Llsql0lnQEbHj0I1OuKyp8otXp0r3q0mPkuhwHfStVs= github.com/vishvananda/netlink v1.2.1-beta.2/go.mod h1:twkDnbuQxJYemMlGd4JFIcuhgX83tXhKS2B/PRMpOho= github.com/vishvananda/netns v0.0.0-20200728191858-db3c7e526aae/go.mod h1:DD4vA1DwXk04H54A1oHXtwZmA0grkVMdPxx/VGLCah0= From 9299e9f6ba2239583ffbe436e8809b33a1b21277 Mon Sep 17 00:00:00 2001 From: Dean Sheather Date: Tue, 28 May 2024 13:04:07 -0700 Subject: [PATCH 113/149] chore: hard NAT <-> easy NAT integration test (#13314) --- tailnet/test/integration/integration_test.go | 46 ++-- tailnet/test/integration/network.go | 238 +++++++++++++++---- 2 files changed, 222 insertions(+), 62 deletions(-) diff --git a/tailnet/test/integration/integration_test.go b/tailnet/test/integration/integration_test.go index e23b716096048..142df60db0d5b 100644 --- a/tailnet/test/integration/integration_test.go +++ b/tailnet/test/integration/integration_test.go @@ -44,6 +44,7 @@ var ( serverListenAddr = flag.String("server-listen-addr", "", "The address to listen on for the server") // Role: stun + stunNumber = flag.Int("stun-number", 0, "The number of the STUN server") stunListenAddr = flag.String("stun-listen-addr", "", "The address to listen on for the STUN server") // Role: client @@ -84,8 +85,8 @@ var topologies = []integration.TestTopology{ }, { // Test that DERP over "easy" NAT works. The server, client 1 and client - // 2 are on different networks with a shared router, and the router - // masquerades the traffic. + // 2 are on different networks with their own routers, which are joined + // by a bridge. Name: "EasyNATDERP", SetupNetworking: integration.SetupNetworkingEasyNAT, Server: integration.SimpleServerOptions{}, @@ -93,15 +94,22 @@ var topologies = []integration.TestTopology{ RunTests: integration.TestSuite, }, { - // Test that direct over "easy" NAT works. This should use local - // endpoints to connect as routing is enabled between client 1 and - // client 2. + // Test that direct over "easy" NAT works with IP/ports grabbed from + // STUN. Name: "EasyNATDirect", SetupNetworking: integration.SetupNetworkingEasyNATWithSTUN, Server: integration.SimpleServerOptions{}, StartClient: integration.StartClientDirect, RunTests: integration.TestSuite, }, + { + // Test that direct over hard NAT <=> easy NAT works. + Name: "HardNATEasyNATDirect", + SetupNetworking: integration.SetupNetworkingHardNATEasyNATDirect, + Server: integration.SimpleServerOptions{}, + StartClient: integration.StartClientDirect, + RunTests: integration.TestSuite, + }, { // Test that DERP over WebSocket (as well as DERPForceWebSockets works). // This does not test the actual DERP failure detection code and @@ -160,9 +168,9 @@ func TestIntegration(t *testing.T) { closeServer := startServerSubprocess(t, topo.Name, networking) - closeSTUN := func() error { return nil } - if networking.STUN.ListenAddr != "" { - closeSTUN = startSTUNSubprocess(t, topo.Name, networking) + stunClosers := make([]func() error, len(networking.STUNs)) + for i, stun := range networking.STUNs { + stunClosers[i] = startSTUNSubprocess(t, topo.Name, i, stun) } // Write the DERP maps to a file. @@ -187,7 +195,9 @@ func TestIntegration(t *testing.T) { // Close client2 and the server. require.NoError(t, closeClient2(), "client 2 exited") - require.NoError(t, closeSTUN(), "stun exited") + for i, closeSTUN := range stunClosers { + require.NoErrorf(t, closeSTUN(), "stun %v exited", i) + } require.NoError(t, closeServer(), "server exited") }) } @@ -206,10 +216,15 @@ func handleTestSubprocess(t *testing.T) { require.Contains(t, []string{"server", "stun", "client"}, *role, "unknown role %q", *role) testName := topo.Name + "/" - if *role == "server" || *role == "stun" { - testName += *role - } else { + switch *role { + case "server": + testName += "server" + case "stun": + testName += fmt.Sprintf("stun%d", *stunNumber) + case "client": testName += *clientName + default: + t.Fatalf("unknown role %q", *role) } t.Run(testName, func(t *testing.T) { @@ -325,12 +340,13 @@ func startServerSubprocess(t *testing.T, topologyName string, networking integra return closeFn } -func startSTUNSubprocess(t *testing.T, topologyName string, networking integration.TestNetworking) func() error { - _, closeFn := startSubprocess(t, "stun", networking.STUN.Process.NetNS, []string{ +func startSTUNSubprocess(t *testing.T, topologyName string, number int, stun integration.TestNetworkingSTUN) func() error { + _, closeFn := startSubprocess(t, "stun", stun.Process.NetNS, []string{ "--subprocess", "--test-name=" + topologyName, "--role=stun", - "--stun-listen-addr=" + networking.STUN.ListenAddr, + "--stun-number=" + strconv.Itoa(number), + "--stun-listen-addr=" + stun.ListenAddr, }) return closeFn } diff --git a/tailnet/test/integration/network.go b/tailnet/test/integration/network.go index e0d8f7109c167..b496879fd1219 100644 --- a/tailnet/test/integration/network.go +++ b/tailnet/test/integration/network.go @@ -21,15 +21,17 @@ import ( ) const ( - client1Port = 48001 - client1RouterPort = 48011 - client2Port = 48002 - client2RouterPort = 48012 + client1Port = 48001 + client1RouterPort = 48011 // used in easy and hard NAT + client1RouterPortSTUN = 48201 // used in hard NAT + client2Port = 48002 + client2RouterPort = 48012 // used in easy and hard NAT + client2RouterPortSTUN = 48101 // used in hard NAT ) type TestNetworking struct { Server TestNetworkingServer - STUN TestNetworkingSTUN + STUNs []TestNetworkingSTUN Client1 TestNetworkingClient Client2 TestNetworkingClient } @@ -40,8 +42,8 @@ type TestNetworkingServer struct { } type TestNetworkingSTUN struct { - Process TestNetworkingProcess - // If empty, no STUN subprocess is launched. + Process TestNetworkingProcess + IP string ListenAddr string } @@ -169,53 +171,82 @@ func SetupNetworkingEasyNAT(t *testing.T, _ slog.Logger) TestNetworking { // also creates a namespace and bridge address for a STUN server. func SetupNetworkingEasyNATWithSTUN(t *testing.T, _ slog.Logger) TestNetworking { internet := easyNAT(t) + internet.Net.STUNs = []TestNetworkingSTUN{ + prepareSTUNServer(t, &internet, 0), + } - // Create another network namespace for the STUN server. - stunNetNS := createNetNS(t, internet.NamePrefix+"stun") - internet.Net.STUN.Process = TestNetworkingProcess{ - NetNS: stunNetNS, + return internet.Net +} + +// hardNAT creates a fake internet with multiple STUN servers and sets up "hard +// NAT" forwarding rules. If bothHard is false, only the first client will have +// hard NAT rules, and the second client will have easy NAT rules. +// +//nolint:revive +func hardNAT(t *testing.T, stunCount int, bothHard bool) fakeInternet { + internet := createFakeInternet(t) + internet.Net.STUNs = make([]TestNetworkingSTUN, stunCount) + for i := 0; i < stunCount; i++ { + internet.Net.STUNs[i] = prepareSTUNServer(t, &internet, i) } - const ip = "10.0.0.64" - err := joinBridge(joinBridgeOpts{ - bridgeNetNS: internet.BridgeNetNS, - netNS: stunNetNS, - bridgeName: internet.BridgeName, - vethPair: vethPair{ - Outer: internet.NamePrefix + "b-stun", - Inner: internet.NamePrefix + "stun-b", - }, - ip: ip, - }) - require.NoError(t, err, "join bridge with STUN server") - internet.Net.STUN.ListenAddr = ip + ":3478" + _, err := commandInNetNS(internet.BridgeNetNS, "sysctl", []string{"-w", "net.ipv4.ip_forward=1"}).Output() + require.NoError(t, wrapExitErr(err), "enable IP forwarding in bridge NetNS") - // Define custom DERP map. - stunRegion := &tailcfg.DERPRegion{ - RegionID: 10000, - RegionCode: "stun0", - RegionName: "STUN0", - Nodes: []*tailcfg.DERPNode{ - { - Name: "stun0a", - RegionID: 1, - IPv4: ip, - IPv6: "none", - STUNPort: 3478, - STUNOnly: true, - }, + // Set up iptables masquerade rules to allow each router to NAT packets. + leaves := []struct { + fakeRouterLeaf + peerIP string + clientPort int + natPortPeer int + natStartPortSTUN int + }{ + { + fakeRouterLeaf: internet.Client1, + peerIP: internet.Client2.RouterIP, + clientPort: client1Port, + natPortPeer: client1RouterPort, + natStartPortSTUN: client1RouterPortSTUN, + }, + { + fakeRouterLeaf: internet.Client2, + // If peerIP is empty, we do easy NAT (even for STUN) + peerIP: func() string { + if bothHard { + return internet.Client1.RouterIP + } + return "" + }(), + clientPort: client2Port, + natPortPeer: client2RouterPort, + natStartPortSTUN: client2RouterPortSTUN, }, } - client1DERP, err := internet.Net.Client1.ResolveDERPMap() - require.NoError(t, err, "resolve DERP map for client 1") - client1DERP.Regions[stunRegion.RegionID] = stunRegion - internet.Net.Client1.DERPMap = client1DERP - client2DERP, err := internet.Net.Client2.ResolveDERPMap() - require.NoError(t, err, "resolve DERP map for client 2") - client2DERP.Regions[stunRegion.RegionID] = stunRegion - internet.Net.Client2.DERPMap = client2DERP + for _, leaf := range leaves { + _, err := commandInNetNS(leaf.RouterNetNS, "sysctl", []string{"-w", "net.ipv4.ip_forward=1"}).Output() + require.NoError(t, wrapExitErr(err), "enable IP forwarding in router NetNS") - return internet.Net + // All non-UDP traffic should use regular masquerade e.g. for HTTP. + iptablesMasqueradeNonUDP(t, leaf.RouterNetNS) + + // NAT from this client to its peer. + iptablesNAT(t, leaf.RouterNetNS, leaf.ClientIP, leaf.clientPort, leaf.RouterIP, leaf.natPortPeer, leaf.peerIP) + + // NAT from this client to each STUN server. Only do this if we're doing + // hard NAT, as the rule above will also touch STUN traffic in easy NAT. + if leaf.peerIP != "" { + for i, stun := range internet.Net.STUNs { + natPort := leaf.natStartPortSTUN + i + iptablesNAT(t, leaf.RouterNetNS, leaf.ClientIP, leaf.clientPort, leaf.RouterIP, natPort, stun.IP) + } + } + } + + return internet +} + +func SetupNetworkingHardNATEasyNATDirect(t *testing.T, _ slog.Logger) TestNetworking { + return hardNAT(t, 2, false).Net } type vethPair struct { @@ -600,6 +631,119 @@ func addRouteInNetNS(netNS *os.File, route []string) error { return nil } +// prepareSTUNServer creates a STUN server networking spec in a network +// namespace and joins it to the bridge. It also sets up the DERP map for the +// clients to use the STUN. +func prepareSTUNServer(t *testing.T, internet *fakeInternet, number int) TestNetworkingSTUN { + name := fmt.Sprintf("stn%d", number) + + stunNetNS := createNetNS(t, internet.NamePrefix+name) + stun := TestNetworkingSTUN{ + Process: TestNetworkingProcess{ + NetNS: stunNetNS, + }, + } + + stun.IP = "10.0.0." + fmt.Sprint(64+number) + err := joinBridge(joinBridgeOpts{ + bridgeNetNS: internet.BridgeNetNS, + netNS: stunNetNS, + bridgeName: internet.BridgeName, + vethPair: vethPair{ + Outer: internet.NamePrefix + "b-" + name, + Inner: internet.NamePrefix + name + "-b", + }, + ip: stun.IP, + }) + require.NoError(t, err, "join bridge with STUN server") + stun.ListenAddr = stun.IP + ":3478" + + // Define custom DERP map. + stunRegion := &tailcfg.DERPRegion{ + RegionID: 10000 + number, + RegionCode: name, + RegionName: name, + Nodes: []*tailcfg.DERPNode{ + { + Name: name + "a", + RegionID: 1, + IPv4: stun.IP, + IPv6: "none", + STUNPort: 3478, + STUNOnly: true, + }, + }, + } + client1DERP, err := internet.Net.Client1.ResolveDERPMap() + require.NoError(t, err, "resolve DERP map for client 1") + client1DERP.Regions[stunRegion.RegionID] = stunRegion + internet.Net.Client1.DERPMap = client1DERP + client2DERP, err := internet.Net.Client2.ResolveDERPMap() + require.NoError(t, err, "resolve DERP map for client 2") + client2DERP.Regions[stunRegion.RegionID] = stunRegion + internet.Net.Client2.DERPMap = client2DERP + + return stun +} + +func iptablesMasqueradeNonUDP(t *testing.T, netNS *os.File) { + t.Helper() + _, err := commandInNetNS(netNS, "iptables", []string{ + "-t", "nat", + "-A", "POSTROUTING", + // Every interface except loopback. + "!", "-o", "lo", + // Every protocol except UDP. + "!", "-p", "udp", + "-j", "MASQUERADE", + }).Output() + require.NoError(t, wrapExitErr(err), "add iptables non-UDP masquerade rule") +} + +// iptablesNAT sets up iptables rules for NAT forwarding. If destIP is +// specified, the forwarding rule will only apply to traffic to/from that IP +// (mapvarydest). +func iptablesNAT(t *testing.T, netNS *os.File, clientIP string, clientPort int, routerIP string, routerPort int, destIP string) { + t.Helper() + + snatArgs := []string{ + "-t", "nat", + "-A", "POSTROUTING", + "-p", "udp", + "--sport", fmt.Sprint(clientPort), + "-j", "SNAT", + "--to-source", fmt.Sprintf("%s:%d", routerIP, routerPort), + } + if destIP != "" { + // Insert `-d $destIP` after the --sport flag+value. + newSnatArgs := append([]string{}, snatArgs[:8]...) + newSnatArgs = append(newSnatArgs, "-d", destIP) + newSnatArgs = append(newSnatArgs, snatArgs[8:]...) + snatArgs = newSnatArgs + } + _, err := commandInNetNS(netNS, "iptables", snatArgs).Output() + require.NoError(t, wrapExitErr(err), "add iptables SNAT rule") + + // Incoming traffic should be forwarded to the client's IP. + dnatArgs := []string{ + "-t", "nat", + "-A", "PREROUTING", + "-p", "udp", + "--dport", fmt.Sprint(routerPort), + "-j", "DNAT", + "--to-destination", fmt.Sprintf("%s:%d", clientIP, clientPort), + } + if destIP != "" { + // Insert `-s $destIP` before the --dport flag+value. + newDnatArgs := append([]string{}, dnatArgs[:6]...) + newDnatArgs = append(newDnatArgs, "-s", destIP) + newDnatArgs = append(newDnatArgs, dnatArgs[6:]...) + dnatArgs = newDnatArgs + } + _, err = commandInNetNS(netNS, "iptables", dnatArgs).Output() + require.NoError(t, wrapExitErr(err), "add iptables DNAT rule") +} + func commandInNetNS(netNS *os.File, bin string, args []string) *exec.Cmd { //nolint:gosec cmd := exec.Command("nsenter", append([]string{"--net=/proc/self/fd/3", bin}, args...)...) From 00f6cfe3cf6b5ac8b7b87eee164f0c08056f92d6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 28 May 2024 20:04:49 +0000 Subject: [PATCH 114/149] chore: bump github.com/hashicorp/go-version from 1.6.0 to 1.7.0 (#13374) Bumps [github.com/hashicorp/go-version](https://github.com/hashicorp/go-version) from 1.6.0 to 1.7.0. - [Release notes](https://github.com/hashicorp/go-version/releases) - [Changelog](https://github.com/hashicorp/go-version/blob/main/CHANGELOG.md) - [Commits](https://github.com/hashicorp/go-version/compare/v1.6.0...v1.7.0) --- updated-dependencies: - dependency-name: github.com/hashicorp/go-version dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- go.mod | 2 +- go.sum | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/go.mod b/go.mod index 8422b9cb486ad..ad2bb8c30b229 100644 --- a/go.mod +++ b/go.mod @@ -120,7 +120,7 @@ require ( github.com/google/uuid v1.6.0 github.com/hashicorp/go-multierror v1.1.1 github.com/hashicorp/go-reap v0.0.0-20170704170343-bf58d8a43e7b - github.com/hashicorp/go-version v1.6.0 + github.com/hashicorp/go-version v1.7.0 github.com/hashicorp/hc-install v0.6.3 github.com/hashicorp/terraform-config-inspect v0.0.0-20211115214459-90acf1ca460f github.com/hashicorp/terraform-json v0.22.1 diff --git a/go.sum b/go.sum index fbf810862f2bd..c5bff82f728c4 100644 --- a/go.sum +++ b/go.sum @@ -518,8 +518,8 @@ github.com/hashicorp/go-reap v0.0.0-20170704170343-bf58d8a43e7b h1:3GrpnZQBxcMj1 github.com/hashicorp/go-reap v0.0.0-20170704170343-bf58d8a43e7b/go.mod h1:qIFzeFcJU3OIFk/7JreWXcUjFmcCaeHTH9KoNyHYVCs= github.com/hashicorp/go-uuid v1.0.3 h1:2gKiV6YVmrJ1i2CKKa9obLvRieoRGviZFL26PcT/Co8= github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= -github.com/hashicorp/go-version v1.6.0 h1:feTTfFNnjP967rlCxM/I9g701jU+RN74YKx2mOkIeek= -github.com/hashicorp/go-version v1.6.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= +github.com/hashicorp/go-version v1.7.0 h1:5tqGy27NaOTB8yJKUZELlFAS/LTKJkrmONwQKeRZfjY= +github.com/hashicorp/go-version v1.7.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= github.com/hashicorp/golang-lru v1.0.2 h1:dV3g9Z/unq5DpblPpw+Oqcv4dU/1omnb4Ok8iPY6p1c= github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k= github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM= From 5a8a254c93b7834f997b4f329ff1afd3c91a9dc8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 28 May 2024 20:17:23 +0000 Subject: [PATCH 115/149] chore: bump github.com/hashicorp/hc-install from 0.6.3 to 0.7.0 (#13372) Bumps [github.com/hashicorp/hc-install](https://github.com/hashicorp/hc-install) from 0.6.3 to 0.7.0. - [Release notes](https://github.com/hashicorp/hc-install/releases) - [Commits](https://github.com/hashicorp/hc-install/compare/v0.6.3...v0.7.0) --- updated-dependencies: - dependency-name: github.com/hashicorp/hc-install dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- go.mod | 4 ++-- go.sum | 12 ++++++------ 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/go.mod b/go.mod index ad2bb8c30b229..60479ece37786 100644 --- a/go.mod +++ b/go.mod @@ -121,7 +121,7 @@ require ( github.com/hashicorp/go-multierror v1.1.1 github.com/hashicorp/go-reap v0.0.0-20170704170343-bf58d8a43e7b github.com/hashicorp/go-version v1.7.0 - github.com/hashicorp/hc-install v0.6.3 + github.com/hashicorp/hc-install v0.7.0 github.com/hashicorp/terraform-config-inspect v0.0.0-20211115214459-90acf1ca460f github.com/hashicorp/terraform-json v0.22.1 github.com/hashicorp/yamux v0.1.1 @@ -233,7 +233,7 @@ require ( github.com/Microsoft/go-winio v0.6.1 // indirect github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5 // indirect github.com/OneOfOne/xxhash v1.2.8 // indirect - github.com/ProtonMail/go-crypto v1.1.0-alpha.0 // indirect + github.com/ProtonMail/go-crypto v1.1.0-alpha.2 // indirect github.com/agext/levenshtein v1.2.3 // indirect github.com/agnivade/levenshtein v1.1.1 // indirect github.com/akutz/memconn v0.1.0 // indirect diff --git a/go.sum b/go.sum index c5bff82f728c4..0fc653cd876d5 100644 --- a/go.sum +++ b/go.sum @@ -51,8 +51,8 @@ github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5 h1:TngWCqHvy9oXAN6lEV github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5/go.mod h1:lmUJ/7eu/Q8D7ML55dXQrVaamCz2vxCfdQBasLZfHKk= github.com/OneOfOne/xxhash v1.2.8 h1:31czK/TI9sNkxIKfaUfGlU47BAxQ0ztGgd9vPyqimf8= github.com/OneOfOne/xxhash v1.2.8/go.mod h1:eZbhyaAYD41SGSSsnmcpxVoRiQ/MPUTjUdIIOT9Um7Q= -github.com/ProtonMail/go-crypto v1.1.0-alpha.0 h1:nHGfwXmFvJrSR9xu8qL7BkO4DqTHXE9N5vPhgY2I+j0= -github.com/ProtonMail/go-crypto v1.1.0-alpha.0/go.mod h1:rA3QumHc/FZ8pAHreoekgiAbzpNsfQAosU5td4SnOrE= +github.com/ProtonMail/go-crypto v1.1.0-alpha.2 h1:bkyFVUP+ROOARdgCiJzNQo2V2kiB97LyUpzH9P6Hrlg= +github.com/ProtonMail/go-crypto v1.1.0-alpha.2/go.mod h1:rA3QumHc/FZ8pAHreoekgiAbzpNsfQAosU5td4SnOrE= github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d h1:licZJFw2RwpHMqeKTCYkitsPqHNxTmd4SNR5r94FGM8= github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d/go.mod h1:asat636LX7Bqt5lYEZ27JNDcqxfjdBQuJ/MM4CN/Lzo= github.com/adrg/xdg v0.4.0 h1:RzRqFcjH4nE5C6oTAxhBtoE2IRyjBSa62SCbyPidvls= @@ -523,8 +523,8 @@ github.com/hashicorp/go-version v1.7.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09 github.com/hashicorp/golang-lru v1.0.2 h1:dV3g9Z/unq5DpblPpw+Oqcv4dU/1omnb4Ok8iPY6p1c= github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k= github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM= -github.com/hashicorp/hc-install v0.6.3 h1:yE/r1yJvWbtrJ0STwScgEnCanb0U9v7zp0Gbkmcoxqs= -github.com/hashicorp/hc-install v0.6.3/go.mod h1:KamGdbodYzlufbWh4r9NRo8y6GLHWZP2GBtdnms1Ln0= +github.com/hashicorp/hc-install v0.7.0 h1:Uu9edVqjKQxxuD28mR5TikkKDd/p55S8vzPC1659aBk= +github.com/hashicorp/hc-install v0.7.0/go.mod h1:ELmmzZlGnEcqoUMKUuykHaPCIR1sYLYX+KSggWSKZuA= github.com/hashicorp/hcl v0.0.0-20170504190234-a4b07c25de5f/go.mod h1:oZtUIOe8dh44I2q6ScRibXws4Ajl+d+nod3AaR9vL5w= github.com/hashicorp/hcl v1.0.1-vault-5 h1:kI3hhbbyzr4dldA8UdTb7ZlVVlI2DACdCfz31RPDgJM= github.com/hashicorp/hcl v1.0.1-vault-5/go.mod h1:XYhtn6ijBSAj6n4YqAaf7RBPS4I06AItNorpy+MoQNM= @@ -802,8 +802,8 @@ github.com/satori/go.uuid v1.2.1-0.20181028125025-b2ce2384e17b/go.mod h1:dA0hQrY github.com/secure-systems-lab/go-securesystemslib v0.7.0 h1:OwvJ5jQf9LnIAS83waAjPbcMsODrTQUpJ02eNLUoxBg= github.com/secure-systems-lab/go-securesystemslib v0.7.0/go.mod h1:/2gYnlnHVQ6xeGtfIqFy7Do03K4cdCY0A/GlJLDKLHI= github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= -github.com/sergi/go-diff v1.3.1 h1:xkr+Oxo4BOQKmkn/B9eMK0g5Kg/983T9DqqPHwYqD+8= -github.com/sergi/go-diff v1.3.1/go.mod h1:aMJSSKb2lpPvRNec0+w3fl7LP9IOFzdc9Pa4NFbPK1I= +github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 h1:n661drycOFuPLCN3Uc8sB6B/s6Z4t2xvBgU1htSHuq8= +github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3/go.mod h1:A0bzQcvG0E7Rwjx0REVgAGH58e96+X0MeOfepqsbeW4= github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= From 18692058a9e793e87ee48d37026fe8171da6f21d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 28 May 2024 15:48:58 -0500 Subject: [PATCH 116/149] chore: bump google.golang.org/grpc from 1.63.2 to 1.64.0 (#13319) Bumps [google.golang.org/grpc](https://github.com/grpc/grpc-go) from 1.63.2 to 1.64.0. - [Release notes](https://github.com/grpc/grpc-go/releases) - [Commits](https://github.com/grpc/grpc-go/compare/v1.63.2...v1.64.0) --- updated-dependencies: - dependency-name: google.golang.org/grpc dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- go.mod | 2 +- go.sum | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/go.mod b/go.mod index 60479ece37786..681f3b6a714cc 100644 --- a/go.mod +++ b/go.mod @@ -184,7 +184,7 @@ require ( golang.org/x/tools v0.21.0 golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028 google.golang.org/api v0.181.0 - google.golang.org/grpc v1.63.2 + google.golang.org/grpc v1.64.0 google.golang.org/protobuf v1.34.1 gopkg.in/DataDog/dd-trace-go.v1 v1.61.0 gopkg.in/natefinch/lumberjack.v2 v2.2.1 diff --git a/go.sum b/go.sum index 0fc653cd876d5..b6ac39d81aece 100644 --- a/go.sum +++ b/go.sum @@ -1173,8 +1173,8 @@ google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyac google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= -google.golang.org/grpc v1.63.2 h1:MUeiw1B2maTVZthpU5xvASfTh3LDbxHd6IJ6QQVU+xM= -google.golang.org/grpc v1.63.2/go.mod h1:WAX/8DgncnokcFUldAxq7GeB5DXHDbMF+lLvDomNkRA= +google.golang.org/grpc v1.64.0 h1:KH3VH9y/MgNQg1dE7b3XfVK0GsPSIzJwdF617gUSbvY= +google.golang.org/grpc v1.64.0/go.mod h1:oxjF8E3FBnjp+/gVFYdWacaLDx9na1aqy9oovLpxQYg= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= From 7ea510e0911f539158efaa91683505984e2b9c13 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 28 May 2024 15:49:34 -0500 Subject: [PATCH 117/149] chore: bump github.com/gohugoio/hugo from 0.125.3 to 0.126.1 (#13323) Bumps [github.com/gohugoio/hugo](https://github.com/gohugoio/hugo) from 0.125.3 to 0.126.1. - [Release notes](https://github.com/gohugoio/hugo/releases) - [Changelog](https://github.com/gohugoio/hugo/blob/master/hugoreleaser.toml) - [Commits](https://github.com/gohugoio/hugo/compare/v0.125.3...v0.126.1) --- updated-dependencies: - dependency-name: github.com/gohugoio/hugo dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- go.mod | 6 +++--- go.sum | 26 ++++++++++++++------------ 2 files changed, 17 insertions(+), 15 deletions(-) diff --git a/go.mod b/go.mod index 681f3b6a714cc..4c007cdfa0f81 100644 --- a/go.mod +++ b/go.mod @@ -112,7 +112,7 @@ require ( github.com/go-ping/ping v1.1.0 github.com/go-playground/validator/v10 v10.19.0 github.com/gofrs/flock v0.8.1 - github.com/gohugoio/hugo v0.125.3 + github.com/gohugoio/hugo v0.126.1 github.com/golang-jwt/jwt/v4 v4.5.0 github.com/golang-migrate/migrate/v4 v4.17.0 github.com/google/go-cmp v0.6.0 @@ -357,7 +357,7 @@ require ( github.com/opencontainers/image-spec v1.1.0-rc5 // indirect github.com/opencontainers/runc v1.1.12 // indirect github.com/outcaste-io/ristretto v0.2.3 // indirect - github.com/pelletier/go-toml/v2 v2.2.1 // indirect + github.com/pelletier/go-toml/v2 v2.2.2 // indirect github.com/philhofer/fwd v1.1.2 // indirect github.com/pierrec/lz4/v4 v4.1.18 // indirect github.com/pkg/errors v0.9.1 // indirect @@ -381,7 +381,7 @@ require ( github.com/tailscale/wireguard-go v0.0.0-20231121184858-cc193a0b3272 github.com/tchap/go-patricia/v2 v2.3.1 // indirect github.com/tcnksm/go-httpstat v0.2.0 // indirect - github.com/tdewolff/parse/v2 v2.7.12 // indirect + github.com/tdewolff/parse/v2 v2.7.13 // indirect github.com/tidwall/match v1.1.1 // indirect github.com/tidwall/pretty v1.2.1 // indirect github.com/tinylib/msgp v1.1.8 // indirect diff --git a/go.sum b/go.sum index b6ac39d81aece..c806b7983b43e 100644 --- a/go.sum +++ b/go.sum @@ -314,8 +314,8 @@ github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uq github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk= github.com/gen2brain/beeep v0.0.0-20220402123239-6a3042f4b71a h1:fwNLHrP5Rbg/mGSXCjtPdpbqv2GucVTA/KMi8wEm6mE= github.com/gen2brain/beeep v0.0.0-20220402123239-6a3042f4b71a/go.mod h1:/WeFVhhxMOGypVKS0w8DUJxUBbHypnWkUVnW7p5c9Pw= -github.com/getkin/kin-openapi v0.124.0 h1:VSFNMB9C9rTKBnQ/fpyDU8ytMTr4dWI9QovSKj9kz/M= -github.com/getkin/kin-openapi v0.124.0/go.mod h1:wb1aSZA/iWmorQP9KTAS/phLj/t17B5jT7+fS8ed9NM= +github.com/getkin/kin-openapi v0.123.0 h1:zIik0mRwFNLyvtXK274Q6ut+dPh6nlxBp0x7mNrPhs8= +github.com/getkin/kin-openapi v0.123.0/go.mod h1:wb1aSZA/iWmorQP9KTAS/phLj/t17B5jT7+fS8ed9NM= github.com/ghodss/yaml v1.0.0 h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE= @@ -406,8 +406,10 @@ github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= github.com/gohugoio/go-i18n/v2 v2.1.3-0.20230805085216-e63c13218d0e h1:QArsSubW7eDh8APMXkByjQWvuljwPGAGQpJEFn0F0wY= github.com/gohugoio/go-i18n/v2 v2.1.3-0.20230805085216-e63c13218d0e/go.mod h1:3Ltoo9Banwq0gOtcOwxuHG6omk+AwsQPADyw2vQYOJQ= -github.com/gohugoio/hugo v0.125.3 h1:94q41c1EpMI+qTEYTSNqC6+O4Tfw/IkaTBYApt3niZ4= -github.com/gohugoio/hugo v0.125.3/go.mod h1:gNl67fhGCJSKN+lDnivkzaT8z4oRNLYNKxwYIMA2vpY= +github.com/gohugoio/hugo v0.126.1 h1:jzs1VX6Ru/NR0luf4Z9ahKLVmYzQEox4Cxd/kyzgN9A= +github.com/gohugoio/hugo v0.126.1/go.mod h1:wo66RnKrp9Mx0WeeF22LJxPY6YB+v2weKdZpHa8fI/A= +github.com/gohugoio/hugo-goldmark-extensions/extras v0.1.0 h1:YhxZNU8y2vxV6Ibr7QJzzUlpr8oHHWX/l+Q1R/a5Zao= +github.com/gohugoio/hugo-goldmark-extensions/extras v0.1.0/go.mod h1:0cuvOnGKW7WeXA3i7qK6IS07FH1bgJ2XzOjQ7BMJYH4= github.com/gohugoio/hugo-goldmark-extensions/passthrough v0.2.0 h1:PCtO5l++psZf48yen2LxQ3JiOXxaRC6v0594NeHvGZg= github.com/gohugoio/hugo-goldmark-extensions/passthrough v0.2.0/go.mod h1:g9CCh+Ci2IMbPUrVJuXbBTrA+rIIx5+hDQ4EXYaQDoM= github.com/gohugoio/locales v0.14.0 h1:Q0gpsZwfv7ATHMbcTNepFd59H7GoykzWJIxi113XGDc= @@ -745,8 +747,8 @@ github.com/outcaste-io/ristretto v0.2.3 h1:AK4zt/fJ76kjlYObOeNwh4T3asEuaCmp26pOv github.com/outcaste-io/ristretto v0.2.3/go.mod h1:W8HywhmtlopSB1jeMg3JtdIhf+DYkLAr0VN/s4+MHac= github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58 h1:onHthvaw9LFnH4t2DcNVpwGmV9E1BkGknEliJkfwQj0= github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58/go.mod h1:DXv8WO4yhMYhSNPKjeNKa5WY9YCIEBRbNzFFPJbWO6Y= -github.com/pelletier/go-toml/v2 v2.2.1 h1:9TA9+T8+8CUCO2+WYnDLCgrYi9+omqKXyjDtosvtEhg= -github.com/pelletier/go-toml/v2 v2.2.1/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs= +github.com/pelletier/go-toml/v2 v2.2.2 h1:aYUidT7k73Pcl9nb2gScu7NSrKCSHIDE89b3+6Wq+LM= +github.com/pelletier/go-toml/v2 v2.2.2/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs= github.com/perimeterx/marshmallow v1.1.5 h1:a2LALqQ1BlHM8PZblsDdidgv1mWi1DgC2UmX50IvK2s= github.com/perimeterx/marshmallow v1.1.5/go.mod h1:dsXbUu8CRzfYP5a87xpp0xq9S3u0Vchtcl8we9tYaXw= github.com/philhofer/fwd v1.1.2 h1:bnDivRJ1EWPjUIRXV5KfORO897HTbpFAQddBdE8t7Gw= @@ -860,10 +862,10 @@ github.com/tailscale/netlink v1.1.1-0.20211101221916-cabfb018fe85 h1:zrsUcqrG2uQ github.com/tailscale/netlink v1.1.1-0.20211101221916-cabfb018fe85/go.mod h1:NzVQi3Mleb+qzq8VmcWpSkcSYxXIg0DkI6XDzpVkhJ0= github.com/tchap/go-patricia/v2 v2.3.1 h1:6rQp39lgIYZ+MHmdEq4xzuk1t7OdC35z/xm0BGhTkes= github.com/tchap/go-patricia/v2 v2.3.1/go.mod h1:VZRHKAb53DLaG+nA9EaYYiaEx6YztwDlLElMsnSHD4k= -github.com/tdewolff/minify/v2 v2.20.19 h1:tX0SR0LUrIqGoLjXnkIzRSIbKJ7PaNnSENLD4CyH6Xo= -github.com/tdewolff/minify/v2 v2.20.19/go.mod h1:ulkFoeAVWMLEyjuDz1ZIWOA31g5aWOawCFRp9R/MudM= -github.com/tdewolff/parse/v2 v2.7.12 h1:tgavkHc2ZDEQVKy1oWxwIyh5bP4F5fEh/JmBwPP/3LQ= -github.com/tdewolff/parse/v2 v2.7.12/go.mod h1:3FbJWZp3XT9OWVN3Hmfp0p/a08v4h8J9W1aghka0soA= +github.com/tdewolff/minify/v2 v2.20.20 h1:vhULb+VsW2twkplgsawAoUY957efb+EdiZ7zu5fUhhk= +github.com/tdewolff/minify/v2 v2.20.20/go.mod h1:GYaLXFpIIwsX99apQHXfGdISUdlA98wmaoWxjT9C37k= +github.com/tdewolff/parse/v2 v2.7.13 h1:iSiwOUkCYLNfapHoqdLcqZVgvQ0jrsao8YYKP/UJYTI= +github.com/tdewolff/parse/v2 v2.7.13/go.mod h1:3FbJWZp3XT9OWVN3Hmfp0p/a08v4h8J9W1aghka0soA= github.com/tdewolff/test v1.0.11-0.20231101010635-f1265d231d52/go.mod h1:6DAvZliBAAnD7rhVgwaM7DE5/d9NMOAJ09SqYqeK4QE= github.com/tdewolff/test v1.0.11-0.20240106005702-7de5f7df4739 h1:IkjBCtQOOjIn03u/dMQK9g+Iw9ewps4mCl1nB8Sscbo= github.com/tdewolff/test v1.0.11-0.20240106005702-7de5f7df4739/go.mod h1:XPuWBzvdUzhCuxWO1ojpXsyzsA5bFoS3tO/Q3kFuTG8= @@ -1005,8 +1007,8 @@ golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20240222234643-814bf88cf225 h1:LfspQV/FYTatPTr/3HzIcmiUFH7PGP+OQ6mgDYo3yuQ= golang.org/x/exp v0.0.0-20240222234643-814bf88cf225/go.mod h1:CxmFvTBINI24O/j8iY7H1xHzx2i4OsyguNBmN/uPtqc= -golang.org/x/image v0.15.0 h1:kOELfmgrmJlw4Cdb7g/QGuB3CvDrXbqEIww/pNtNBm8= -golang.org/x/image v0.15.0/go.mod h1:HUYqC05R2ZcZ3ejNQsIHQDQiwWM4JBqmm6MKANTp4LE= +golang.org/x/image v0.16.0 h1:9kloLAKhUufZhA12l5fwnx2NZW39/we1UhBesW433jw= +golang.org/x/image v0.16.0/go.mod h1:ugSZItdV4nOxyqp56HmXwH0Ry0nBCpjnZdpDaIHdoPs= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= From 02c36868b20bcdf5fd27d810c0c37cc720b7774b Mon Sep 17 00:00:00 2001 From: Colin Adler Date: Tue, 28 May 2024 17:15:37 -0500 Subject: [PATCH 118/149] chore: upgrade `go.uber.org/goleak` (#13388) The latest published version is broken on go 1.20 --- flake.nix | 2 +- go.mod | 2 +- go.sum | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/flake.nix b/flake.nix index e11e3181d8db3..099ef5559cb1e 100644 --- a/flake.nix +++ b/flake.nix @@ -97,7 +97,7 @@ name = "coder-${osArch}"; # Updated with ./scripts/update-flake.sh`. # This should be updated whenever go.mod changes! - vendorHash = "sha256-YGoQ9JgE9aeS4k3oNQw9EMAf2aIAGa+mclBjoIz7CWs="; + vendorHash = "sha256-TRnB8wXSM8lJHAET+fMr6uKcqD0A4ilna1wPkgxn5/E="; proxyVendor = true; src = ./.; nativeBuildInputs = with pkgs; [ getopt openssl zstd ]; diff --git a/go.mod b/go.mod index 4c007cdfa0f81..2454740a75245 100644 --- a/go.mod +++ b/go.mod @@ -170,7 +170,7 @@ require ( go.opentelemetry.io/otel/sdk v1.24.0 go.opentelemetry.io/otel/trace v1.24.0 go.uber.org/atomic v1.11.0 - go.uber.org/goleak v1.2.1 + go.uber.org/goleak v1.3.1-0.20240429205332-517bace7cc29 go4.org/netipx v0.0.0-20230728180743-ad4cb58a6516 golang.org/x/crypto v0.23.0 golang.org/x/exp v0.0.0-20240222234643-814bf88cf225 diff --git a/go.sum b/go.sum index c806b7983b43e..b05adda59ded2 100644 --- a/go.sum +++ b/go.sum @@ -982,8 +982,8 @@ go.opentelemetry.io/proto/otlp v1.0.0/go.mod h1:Sy6pihPLfYHkr3NkUbEhGHFhINUSI/v8 go.uber.org/atomic v1.9.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= go.uber.org/atomic v1.11.0 h1:ZvwS0R+56ePWxUNi+Atn9dWONBPp/AUETXlHW0DxSjE= go.uber.org/atomic v1.11.0/go.mod h1:LUxbIzbOniOlMKjJjyPfpl4v+PKK2cNJn91OQbhoJI0= -go.uber.org/goleak v1.2.1 h1:NBol2c7O1ZokfZ0LEU9K6Whx/KnwvepVetCUhtKja4A= -go.uber.org/goleak v1.2.1/go.mod h1:qlT2yGI9QafXHhZZLxlSuNsMw3FFLxBr+tBRlmO1xH4= +go.uber.org/goleak v1.3.1-0.20240429205332-517bace7cc29 h1:w0QrHuh0hhUZ++UTQaBM2DMdrWQghZ/UsUb+Wb1+8YE= +go.uber.org/goleak v1.3.1-0.20240429205332-517bace7cc29/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= go.uber.org/mock v0.4.0 h1:VcM4ZOtdbR4f6VXfiOpwpVJDL6lCReaZ6mw31wqh7KU= go.uber.org/mock v0.4.0/go.mod h1:a6FSlNadKUHUa9IP5Vyt1zh4fC7uAwxMutEAscFbkZc= go4.org/mem v0.0.0-20220726221520-4f986261bf13 h1:CbZeCBZ0aZj8EfVgnqQcYZgf0lpZ3H9rmp5nkDTAst8= From ef4ed64a298a88dad483c666a737d124244216fc Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 29 May 2024 06:51:59 +0300 Subject: [PATCH 119/149] chore: bump gopkg.in/DataDog/dd-trace-go.v1 from 1.61.0 to 1.64.0 (#13316) Bumps gopkg.in/DataDog/dd-trace-go.v1 from 1.61.0 to 1.64.0. --- updated-dependencies: - dependency-name: gopkg.in/DataDog/dd-trace-go.v1 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- go.mod | 6 +++--- go.sum | 12 ++++++------ 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/go.mod b/go.mod index 2454740a75245..9a53b87457d0f 100644 --- a/go.mod +++ b/go.mod @@ -186,7 +186,7 @@ require ( google.golang.org/api v0.181.0 google.golang.org/grpc v1.64.0 google.golang.org/protobuf v1.34.1 - gopkg.in/DataDog/dd-trace-go.v1 v1.61.0 + gopkg.in/DataDog/dd-trace-go.v1 v1.64.0 gopkg.in/natefinch/lumberjack.v2 v2.2.1 gopkg.in/yaml.v3 v3.0.1 gvisor.dev/gvisor v0.0.0-20240509041132-65b30f7869dc @@ -207,7 +207,7 @@ require ( require ( cloud.google.com/go/auth v0.4.1 // indirect cloud.google.com/go/auth/oauth2adapt v0.2.2 // indirect - github.com/DataDog/go-libddwaf/v2 v2.3.1 // indirect + github.com/DataDog/go-libddwaf/v2 v2.4.2 // indirect github.com/alecthomas/chroma/v2 v2.13.0 // indirect github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.11.1 // indirect github.com/go-jose/go-jose/v4 v4.0.1 // indirect @@ -222,7 +222,7 @@ require ( cloud.google.com/go/longrunning v0.5.6 // indirect filippo.io/edwards25519 v1.1.0 // indirect github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161 // indirect - github.com/DataDog/appsec-internal-go v1.4.1 // indirect + github.com/DataDog/appsec-internal-go v1.5.0 // indirect github.com/DataDog/datadog-agent/pkg/obfuscate v0.48.0 // indirect github.com/DataDog/datadog-agent/pkg/remoteconfig/state v0.48.1 // indirect github.com/DataDog/datadog-go/v5 v5.3.0 // indirect diff --git a/go.sum b/go.sum index b05adda59ded2..ce61ee5166334 100644 --- a/go.sum +++ b/go.sum @@ -24,16 +24,16 @@ github.com/BurntSushi/locker v0.0.0-20171006230638-a6e239ea1c69/go.mod h1:L1AbZd github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/DATA-DOG/go-sqlmock v1.5.2 h1:OcvFkGmslmlZibjAjaHm3L//6LiuBgolP7OputlJIzU= github.com/DATA-DOG/go-sqlmock v1.5.2/go.mod h1:88MAG/4G7SMwSE3CeA0ZKzrT5CiOU3OJ+JlNzwDqpNU= -github.com/DataDog/appsec-internal-go v1.4.1 h1:xpAS/hBo429pVh7rngquAK2DezUaJjfsX7Wd8cw0aIk= -github.com/DataDog/appsec-internal-go v1.4.1/go.mod h1:rmZ+tpq5ZPKmeOUMYjWFg+q1mRd13mxZwSLBG+xa1ik= +github.com/DataDog/appsec-internal-go v1.5.0 h1:8kS5zSx5T49uZ8dZTdT19QVAvC/B8ByyZdhQKYQWHno= +github.com/DataDog/appsec-internal-go v1.5.0/go.mod h1:pEp8gjfNLtEOmz+iZqC8bXhu0h4k7NUsW/qiQb34k1U= github.com/DataDog/datadog-agent/pkg/obfuscate v0.48.0 h1:bUMSNsw1iofWiju9yc1f+kBd33E3hMJtq9GuU602Iy8= github.com/DataDog/datadog-agent/pkg/obfuscate v0.48.0/go.mod h1:HzySONXnAgSmIQfL6gOv9hWprKJkx8CicuXuUbmgWfo= github.com/DataDog/datadog-agent/pkg/remoteconfig/state v0.48.1 h1:5nE6N3JSs2IG3xzMthNFhXfOaXlrsdgqmJ73lndFf8c= github.com/DataDog/datadog-agent/pkg/remoteconfig/state v0.48.1/go.mod h1:Vc+snp0Bey4MrrJyiV2tVxxJb6BmLomPvN1RgAvjGaQ= github.com/DataDog/datadog-go/v5 v5.3.0 h1:2q2qjFOb3RwAZNU+ez27ZVDwErJv5/VpbBPprz7Z+s8= github.com/DataDog/datadog-go/v5 v5.3.0/go.mod h1:XRDJk1pTc00gm+ZDiBKsjh7oOOtJfYfglVCmFb8C2+Q= -github.com/DataDog/go-libddwaf/v2 v2.3.1 h1:bujaT5+KnLDFQqVA5ilvVvW+evUSHow9FrTHRgUwN4A= -github.com/DataDog/go-libddwaf/v2 v2.3.1/go.mod h1:gsCdoijYQfj8ce/T2bEDNPZFIYnmHluAgVDpuQOWMZE= +github.com/DataDog/go-libddwaf/v2 v2.4.2 h1:ilquGKUmN9/Ty0sIxiEyznVRxP3hKfmH15Y1SMq5gjA= +github.com/DataDog/go-libddwaf/v2 v2.4.2/go.mod h1:gsCdoijYQfj8ce/T2bEDNPZFIYnmHluAgVDpuQOWMZE= github.com/DataDog/go-tuf v1.0.2-0.5.2 h1:EeZr937eKAWPxJ26IykAdWA4A0jQXJgkhUjqEI/w7+I= github.com/DataDog/go-tuf v1.0.2-0.5.2/go.mod h1:zBcq6f654iVqmkk8n2Cx81E1JnNTMOAx1UEO/wZR+P0= github.com/DataDog/gostackparse v0.7.0 h1:i7dLkXHvYzHV308hnkvVGDL3BR4FWl7IsXNPz/IGQh4= @@ -1192,8 +1192,8 @@ google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQ google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= google.golang.org/protobuf v1.34.1 h1:9ddQBjfCyZPOHPUiPxpYESBLc+T8P3E+Vo4IbKZgFWg= google.golang.org/protobuf v1.34.1/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= -gopkg.in/DataDog/dd-trace-go.v1 v1.61.0 h1:XKO91GwTjpIRhd56Xif/BZ2YgHkQufVTOvtkbRYSPi8= -gopkg.in/DataDog/dd-trace-go.v1 v1.61.0/go.mod h1:NHKX1t9eKmajySb6H+zLdgZizCFzbt5iKvrTyxEyy8w= +gopkg.in/DataDog/dd-trace-go.v1 v1.64.0 h1:zXQo6iv+dKRrDBxMXjRXLSKN2lY9uM34XFI4nPyp0eA= +gopkg.in/DataDog/dd-trace-go.v1 v1.64.0/go.mod h1:qzwVu8Qr8CqzQNw2oKEXRdD+fMnjYatjYMGE0tdCVG4= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= From ec78f54941c6387e40a4ba689397af97e90550a8 Mon Sep 17 00:00:00 2001 From: Stephen Kirby <58410745+stirby@users.noreply.github.com> Date: Tue, 28 May 2024 22:55:22 -0500 Subject: [PATCH 120/149] added jetbrains fleet link to manifest.json (#13363) --- docs/manifest.json | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/docs/manifest.json b/docs/manifest.json index 85f5c250066ff..7e23120d83b6a 100644 --- a/docs/manifest.json +++ b/docs/manifest.json @@ -285,6 +285,11 @@ "description": "Learn how to configure JetBrains Gateway for your workspaces", "path": "./ides/gateway.md" }, + { + "title": "JetBrains Fleet", + "description": "Learn how to configure JetBrains Fleet for your workspaces", + "path": "./ides/fleet.md" + }, { "title": "Emacs", "description": "Learn how to configure Emacs with TRAMP in Coder", From a551aa51ab2d4055d17d4e1abde788ad1bf0e5b5 Mon Sep 17 00:00:00 2001 From: Spike Curtis Date: Wed, 29 May 2024 08:07:48 +0400 Subject: [PATCH 121/149] fix: respect --disable-direct-connections on coder speedtest (#13377) --- cli/speedtest.go | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/cli/speedtest.go b/cli/speedtest.go index e88872019e2d2..db68556a00533 100644 --- a/cli/speedtest.go +++ b/cli/speedtest.go @@ -39,6 +39,10 @@ func (r *RootCmd) speedtest() *serpent.Command { ctx, cancel := context.WithCancel(inv.Context()) defer cancel() + if direct && r.disableDirect { + return xerrors.Errorf("--direct (-d) is incompatible with --%s", varDisableDirect) + } + _, workspaceAgent, err := getWorkspaceAndAgent(ctx, inv, client, false, inv.Args[0]) if err != nil { return err @@ -57,12 +61,13 @@ func (r *RootCmd) speedtest() *serpent.Command { logger = logger.Leveled(slog.LevelDebug) } - if r.disableDirect { - _, _ = fmt.Fprintln(inv.Stderr, "Direct connections disabled.") - } opts := &workspacesdk.DialAgentOptions{ Logger: logger, } + if r.disableDirect { + _, _ = fmt.Fprintln(inv.Stderr, "Direct connections disabled.") + opts.BlockEndpoints = true + } if pcapFile != "" { s := capture.New() opts.CaptureHook = s.LogPacket From 84b3121777ca9dea3d7fd831ccf6a51083f333de Mon Sep 17 00:00:00 2001 From: Spike Curtis Date: Wed, 29 May 2024 08:17:35 +0400 Subject: [PATCH 122/149] fix: stop logging workspace agent unless verbose (#13378) --- cli/ping.go | 12 +++++------- cli/portforward.go | 11 +++++------ cli/speedtest.go | 8 ++------ 3 files changed, 12 insertions(+), 19 deletions(-) diff --git a/cli/ping.go b/cli/ping.go index d20cab81f72c0..82becb016bde7 100644 --- a/cli/ping.go +++ b/cli/ping.go @@ -48,19 +48,17 @@ func (r *RootCmd) ping() *serpent.Command { return err } - logger := inv.Logger + opts := &workspacesdk.DialAgentOptions{} + if r.verbose { - logger = logger.AppendSinks(sloghuman.Sink(inv.Stdout)).Leveled(slog.LevelDebug) + opts.Logger = inv.Logger.AppendSinks(sloghuman.Sink(inv.Stdout)).Leveled(slog.LevelDebug) } if r.disableDirect { _, _ = fmt.Fprintln(inv.Stderr, "Direct connections disabled.") + opts.BlockEndpoints = true } - conn, err := workspacesdk.New(client). - DialAgent(ctx, workspaceAgent.ID, &workspacesdk.DialAgentOptions{ - Logger: logger, - BlockEndpoints: r.disableDirect, - }) + conn, err := workspacesdk.New(client).DialAgent(ctx, workspaceAgent.ID, opts) if err != nil { return err } diff --git a/cli/portforward.go b/cli/portforward.go index 2c027a217c5ba..4c0b1d772eecc 100644 --- a/cli/portforward.go +++ b/cli/portforward.go @@ -95,19 +95,18 @@ func (r *RootCmd) portForward() *serpent.Command { return xerrors.Errorf("await agent: %w", err) } + opts := &workspacesdk.DialAgentOptions{} + logger := inv.Logger if r.verbose { - logger = logger.AppendSinks(sloghuman.Sink(inv.Stdout)).Leveled(slog.LevelDebug) + opts.Logger = logger.AppendSinks(sloghuman.Sink(inv.Stdout)).Leveled(slog.LevelDebug) } if r.disableDirect { _, _ = fmt.Fprintln(inv.Stderr, "Direct connections disabled.") + opts.BlockEndpoints = true } - conn, err := workspacesdk.New(client). - DialAgent(ctx, workspaceAgent.ID, &workspacesdk.DialAgentOptions{ - Logger: logger, - BlockEndpoints: r.disableDirect, - }) + conn, err := workspacesdk.New(client).DialAgent(ctx, workspaceAgent.ID, opts) if err != nil { return err } diff --git a/cli/speedtest.go b/cli/speedtest.go index db68556a00533..9f8090ef99731 100644 --- a/cli/speedtest.go +++ b/cli/speedtest.go @@ -56,13 +56,9 @@ func (r *RootCmd) speedtest() *serpent.Command { return xerrors.Errorf("await agent: %w", err) } - logger := inv.Logger.AppendSinks(sloghuman.Sink(inv.Stderr)) + opts := &workspacesdk.DialAgentOptions{} if r.verbose { - logger = logger.Leveled(slog.LevelDebug) - } - - opts := &workspacesdk.DialAgentOptions{ - Logger: logger, + opts.Logger = inv.Logger.AppendSinks(sloghuman.Sink(inv.Stderr)).Leveled(slog.LevelDebug) } if r.disableDirect { _, _ = fmt.Fprintln(inv.Stderr, "Direct connections disabled.") From b7edf5bbc7fb633e2ef9621f975cf24da6077a85 Mon Sep 17 00:00:00 2001 From: Spike Curtis Date: Wed, 29 May 2024 14:30:24 +0400 Subject: [PATCH 123/149] fix: block writes from gVisor to tailscale instead of dropping (#13389) fixes: #13108 upgrades our tailscale fork to include https://github.com/coder/tailscale/pull/52 --- flake.nix | 2 +- go.mod | 2 +- go.sum | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/flake.nix b/flake.nix index 099ef5559cb1e..afe7a8246feba 100644 --- a/flake.nix +++ b/flake.nix @@ -97,7 +97,7 @@ name = "coder-${osArch}"; # Updated with ./scripts/update-flake.sh`. # This should be updated whenever go.mod changes! - vendorHash = "sha256-TRnB8wXSM8lJHAET+fMr6uKcqD0A4ilna1wPkgxn5/E="; + vendorHash = "sha256-/tj3Pit5h3zJbJS7A970hUB5dJT8VwOUcpLLIfBL96c="; proxyVendor = true; src = ./.; nativeBuildInputs = with pkgs; [ getopt openssl zstd ]; diff --git a/go.mod b/go.mod index 9a53b87457d0f..bd90484747f96 100644 --- a/go.mod +++ b/go.mod @@ -42,7 +42,7 @@ replace github.com/dlclark/regexp2 => github.com/dlclark/regexp2 v1.7.0 // There are a few minor changes we make to Tailscale that we're slowly upstreaming. Compare here: // https://github.com/tailscale/tailscale/compare/main...coder:tailscale:main -replace tailscale.com => github.com/coder/tailscale v1.1.1-0.20240522100209-5cd256cdcb39 +replace tailscale.com => github.com/coder/tailscale v1.1.1-0.20240528123714-e0fddea2bf02 // This is replaced to include // 1. a fix for a data race: c.f. https://github.com/tailscale/wireguard-go/pull/25 diff --git a/go.sum b/go.sum index ce61ee5166334..d8c3c4d9654ee 100644 --- a/go.sum +++ b/go.sum @@ -215,8 +215,8 @@ github.com/coder/serpent v0.7.0 h1:zGpD2GlF3lKIVkMjNGKbkip88qzd5r/TRcc30X/SrT0= github.com/coder/serpent v0.7.0/go.mod h1:REkJ5ZFHQUWFTPLExhXYZ1CaHFjxvGNRlLXLdsI08YA= github.com/coder/ssh v0.0.0-20231128192721-70855dedb788 h1:YoUSJ19E8AtuUFVYBpXuOD6a/zVP3rcxezNsoDseTUw= github.com/coder/ssh v0.0.0-20231128192721-70855dedb788/go.mod h1:aGQbuCLyhRLMzZF067xc84Lh7JDs1FKwCmF1Crl9dxQ= -github.com/coder/tailscale v1.1.1-0.20240522100209-5cd256cdcb39 h1:v3x8FBqk45mbBybU1QrQy7CGiUpQDPBJT0C5g8bfGHE= -github.com/coder/tailscale v1.1.1-0.20240522100209-5cd256cdcb39/go.mod h1:rp6BIJxCp127/hvvDWNkHC9MxAlKvQfoOtBr8s5sCqo= +github.com/coder/tailscale v1.1.1-0.20240528123714-e0fddea2bf02 h1:nieqQqcg7Swa1NywfEi93yAagpj/LaszHsIt7wtq58M= +github.com/coder/tailscale v1.1.1-0.20240528123714-e0fddea2bf02/go.mod h1:rp6BIJxCp127/hvvDWNkHC9MxAlKvQfoOtBr8s5sCqo= github.com/coder/terraform-provider-coder v0.22.0 h1:L72WFa9/6sc/nnXENPS8LpWi/2NBV+DRUW0WT//pEaU= github.com/coder/terraform-provider-coder v0.22.0/go.mod h1:wMun9UZ9HT2CzF6qPPBup1odzBpVUc0/xSFoXgdI3tk= github.com/coder/wgtunnel v0.1.13-0.20240522110300-ade90dfb2da0 h1:C2/eCr+r0a5Auuw3YOiSyLNHkdMtyCZHPFBx7syN4rk= From cca3cb1c55a67267fdf9ac13a54cc09d48d39687 Mon Sep 17 00:00:00 2001 From: Cian Johnston Date: Wed, 29 May 2024 11:43:08 +0100 Subject: [PATCH 124/149] feat(provisioner): pass owner git ssh key (#13366) --- .../provisionerdserver/provisionerdserver.go | 11 + .../provisionerdserver_test.go | 5 + provisioner/terraform/provision.go | 2 + provisioner/terraform/provision_test.go | 46 +++ provisionersdk/proto/provisioner.pb.go | 297 ++++++++++-------- provisionersdk/proto/provisioner.proto | 2 + site/e2e/provisionerGenerated.ts | 8 + 7 files changed, 235 insertions(+), 136 deletions(-) diff --git a/coderd/provisionerdserver/provisionerdserver.go b/coderd/provisionerdserver/provisionerdserver.go index e47aba8a56eab..3f5876d644617 100644 --- a/coderd/provisionerdserver/provisionerdserver.go +++ b/coderd/provisionerdserver/provisionerdserver.go @@ -467,6 +467,15 @@ func (s *server) acquireProtoJob(ctx context.Context, job database.ProvisionerJo if err != nil { return nil, failJob(fmt.Sprintf("get owner: %s", err)) } + var ownerSSHPublicKey, ownerSSHPrivateKey string + if ownerSSHKey, err := s.Database.GetGitSSHKey(ctx, owner.ID); err != nil { + if !xerrors.Is(err, sql.ErrNoRows) { + return nil, failJob(fmt.Sprintf("get owner ssh key: %s", err)) + } + } else { + ownerSSHPublicKey = ownerSSHKey.PublicKey + ownerSSHPrivateKey = ownerSSHKey.PrivateKey + } ownerGroups, err := s.Database.GetGroupsByOrganizationAndUserID(ctx, database.GetGroupsByOrganizationAndUserIDParams{ UserID: owner.ID, OrganizationID: s.OrganizationID, @@ -586,6 +595,8 @@ func (s *server) acquireProtoJob(ctx context.Context, job database.ProvisionerJo TemplateName: template.Name, TemplateVersion: templateVersion.Name, WorkspaceOwnerSessionToken: sessionToken, + WorkspaceOwnerSshPublicKey: ownerSSHPublicKey, + WorkspaceOwnerSshPrivateKey: ownerSSHPrivateKey, }, LogLevel: input.LogLevel, }, diff --git a/coderd/provisionerdserver/provisionerdserver_test.go b/coderd/provisionerdserver/provisionerdserver_test.go index f7b5ef7e29625..e0403b7c7db2d 100644 --- a/coderd/provisionerdserver/provisionerdserver_test.go +++ b/coderd/provisionerdserver/provisionerdserver_test.go @@ -190,6 +190,9 @@ func TestAcquireJob(t *testing.T) { Name: "group1", OrganizationID: pd.OrganizationID, }) + sshKey := dbgen.GitSSHKey(t, db, database.GitSSHKey{ + UserID: user.ID, + }) err := db.InsertGroupMember(ctx, database.InsertGroupMemberParams{ UserID: user.ID, GroupID: group1.ID, @@ -360,6 +363,8 @@ func TestAcquireJob(t *testing.T) { TemplateName: template.Name, TemplateVersion: version.Name, WorkspaceOwnerSessionToken: sessionToken, + WorkspaceOwnerSshPublicKey: sshKey.PublicKey, + WorkspaceOwnerSshPrivateKey: sshKey.PrivateKey, }, }, }) diff --git a/provisioner/terraform/provision.go b/provisioner/terraform/provision.go index 542006f27e87f..20135c6034818 100644 --- a/provisioner/terraform/provision.go +++ b/provisioner/terraform/provision.go @@ -202,6 +202,8 @@ func provisionEnv( "CODER_WORKSPACE_OWNER_NAME="+metadata.GetWorkspaceOwnerName(), "CODER_WORKSPACE_OWNER_OIDC_ACCESS_TOKEN="+metadata.GetWorkspaceOwnerOidcAccessToken(), "CODER_WORKSPACE_OWNER_GROUPS="+string(ownerGroups), + "CODER_WORKSPACE_OWNER_SSH_PUBLIC_KEY="+metadata.GetWorkspaceOwnerSshPublicKey(), + "CODER_WORKSPACE_OWNER_SSH_PRIVATE_KEY="+metadata.GetWorkspaceOwnerSshPrivateKey(), "CODER_WORKSPACE_ID="+metadata.GetWorkspaceId(), "CODER_WORKSPACE_OWNER_ID="+metadata.GetWorkspaceOwnerId(), "CODER_WORKSPACE_OWNER_SESSION_TOKEN="+metadata.GetWorkspaceOwnerSessionToken(), diff --git a/provisioner/terraform/provision_test.go b/provisioner/terraform/provision_test.go index 1395877b18331..37ccaddbb2989 100644 --- a/provisioner/terraform/provision_test.go +++ b/provisioner/terraform/provision_test.go @@ -572,6 +572,52 @@ func TestProvision(t *testing.T) { }}, }, }, + { + Name: "ssh-key", + Files: map[string]string{ + "main.tf": `terraform { + required_providers { + coder = { + source = "coder/coder" + } + } + } + + resource "null_resource" "example" {} + data "coder_workspace_owner" "me" {} + resource "coder_metadata" "example" { + resource_id = null_resource.example.id + item { + key = "pubkey" + value = data.coder_workspace_owner.me.ssh_public_key + } + item { + key = "privkey" + value = data.coder_workspace_owner.me.ssh_private_key + } + } + `, + }, + Request: &proto.PlanRequest{ + Metadata: &proto.Metadata{ + WorkspaceOwnerSshPublicKey: "fake public key", + WorkspaceOwnerSshPrivateKey: "fake private key", + }, + }, + Response: &proto.PlanComplete{ + Resources: []*proto.Resource{{ + Name: "example", + Type: "null_resource", + Metadata: []*proto.Resource_Metadata{{ + Key: "pubkey", + Value: "fake public key", + }, { + Key: "privkey", + Value: "fake private key", + }}, + }}, + }, + }, } for _, testCase := range testCases { diff --git a/provisionersdk/proto/provisioner.pb.go b/provisionersdk/proto/provisioner.pb.go index f6ead5e28ba16..8e8b2ca085eb3 100644 --- a/provisionersdk/proto/provisioner.pb.go +++ b/provisionersdk/proto/provisioner.pb.go @@ -1638,6 +1638,8 @@ type Metadata struct { TemplateId string `protobuf:"bytes,12,opt,name=template_id,json=templateId,proto3" json:"template_id,omitempty"` WorkspaceOwnerName string `protobuf:"bytes,13,opt,name=workspace_owner_name,json=workspaceOwnerName,proto3" json:"workspace_owner_name,omitempty"` WorkspaceOwnerGroups []string `protobuf:"bytes,14,rep,name=workspace_owner_groups,json=workspaceOwnerGroups,proto3" json:"workspace_owner_groups,omitempty"` + WorkspaceOwnerSshPublicKey string `protobuf:"bytes,15,opt,name=workspace_owner_ssh_public_key,json=workspaceOwnerSshPublicKey,proto3" json:"workspace_owner_ssh_public_key,omitempty"` + WorkspaceOwnerSshPrivateKey string `protobuf:"bytes,16,opt,name=workspace_owner_ssh_private_key,json=workspaceOwnerSshPrivateKey,proto3" json:"workspace_owner_ssh_private_key,omitempty"` } func (x *Metadata) Reset() { @@ -1770,6 +1772,20 @@ func (x *Metadata) GetWorkspaceOwnerGroups() []string { return nil } +func (x *Metadata) GetWorkspaceOwnerSshPublicKey() string { + if x != nil { + return x.WorkspaceOwnerSshPublicKey + } + return "" +} + +func (x *Metadata) GetWorkspaceOwnerSshPrivateKey() string { + if x != nil { + return x.WorkspaceOwnerSshPrivateKey + } + return "" +} + // Config represents execution configuration shared by all subsequent requests in the Session type Config struct { state protoimpl.MessageState @@ -2884,7 +2900,7 @@ var file_provisionersdk_proto_provisioner_proto_rawDesc = []byte{ 0x0a, 0x09, 0x73, 0x65, 0x6e, 0x73, 0x69, 0x74, 0x69, 0x76, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x09, 0x73, 0x65, 0x6e, 0x73, 0x69, 0x74, 0x69, 0x76, 0x65, 0x12, 0x17, 0x0a, 0x07, 0x69, 0x73, 0x5f, 0x6e, 0x75, 0x6c, 0x6c, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, 0x06, 0x69, - 0x73, 0x4e, 0x75, 0x6c, 0x6c, 0x22, 0xb7, 0x05, 0x0a, 0x08, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, + 0x73, 0x4e, 0x75, 0x6c, 0x6c, 0x22, 0xc1, 0x06, 0x0a, 0x08, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x1b, 0x0a, 0x09, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x5f, 0x75, 0x72, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x55, 0x72, 0x6c, 0x12, 0x53, 0x0a, 0x14, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x74, 0x72, 0x61, @@ -2927,143 +2943,152 @@ var file_provisionersdk_proto_provisioner_proto_rawDesc = []byte{ 0x4f, 0x77, 0x6e, 0x65, 0x72, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x34, 0x0a, 0x16, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x5f, 0x67, 0x72, 0x6f, 0x75, 0x70, 0x73, 0x18, 0x0e, 0x20, 0x03, 0x28, 0x09, 0x52, 0x14, 0x77, 0x6f, 0x72, 0x6b, 0x73, - 0x70, 0x61, 0x63, 0x65, 0x4f, 0x77, 0x6e, 0x65, 0x72, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x73, 0x22, - 0x8a, 0x01, 0x0a, 0x06, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x36, 0x0a, 0x17, 0x74, 0x65, - 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x61, 0x72, - 0x63, 0x68, 0x69, 0x76, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x15, 0x74, 0x65, 0x6d, - 0x70, 0x6c, 0x61, 0x74, 0x65, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x41, 0x72, 0x63, 0x68, 0x69, - 0x76, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x0c, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x12, 0x32, 0x0a, 0x15, 0x70, 0x72, 0x6f, 0x76, - 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x5f, 0x6c, 0x6f, 0x67, 0x5f, 0x6c, 0x65, 0x76, 0x65, - 0x6c, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x13, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, - 0x6f, 0x6e, 0x65, 0x72, 0x4c, 0x6f, 0x67, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x22, 0x0e, 0x0a, 0x0c, - 0x50, 0x61, 0x72, 0x73, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0xa3, 0x02, 0x0a, - 0x0d, 0x50, 0x61, 0x72, 0x73, 0x65, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x12, 0x14, - 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x65, - 0x72, 0x72, 0x6f, 0x72, 0x12, 0x4c, 0x0a, 0x12, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, - 0x5f, 0x76, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, - 0x32, 0x1d, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x54, - 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x52, - 0x11, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, - 0x65, 0x73, 0x12, 0x16, 0x0a, 0x06, 0x72, 0x65, 0x61, 0x64, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, - 0x28, 0x0c, 0x52, 0x06, 0x72, 0x65, 0x61, 0x64, 0x6d, 0x65, 0x12, 0x54, 0x0a, 0x0e, 0x77, 0x6f, - 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x74, 0x61, 0x67, 0x73, 0x18, 0x04, 0x20, 0x03, - 0x28, 0x0b, 0x32, 0x2d, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, - 0x2e, 0x50, 0x61, 0x72, 0x73, 0x65, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x2e, 0x57, - 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x61, 0x67, 0x73, 0x45, 0x6e, 0x74, 0x72, - 0x79, 0x52, 0x0d, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x61, 0x67, 0x73, - 0x1a, 0x40, 0x0a, 0x12, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x61, 0x67, - 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, - 0x38, 0x01, 0x22, 0xb5, 0x02, 0x0a, 0x0b, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x12, 0x31, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, - 0x65, 0x72, 0x2e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x08, 0x6d, 0x65, 0x74, - 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x53, 0x0a, 0x15, 0x72, 0x69, 0x63, 0x68, 0x5f, 0x70, 0x61, - 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x02, - 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, + 0x70, 0x61, 0x63, 0x65, 0x4f, 0x77, 0x6e, 0x65, 0x72, 0x47, 0x72, 0x6f, 0x75, 0x70, 0x73, 0x12, + 0x42, 0x0a, 0x1e, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x6f, 0x77, 0x6e, + 0x65, 0x72, 0x5f, 0x73, 0x73, 0x68, 0x5f, 0x70, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x5f, 0x6b, 0x65, + 0x79, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x09, 0x52, 0x1a, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, + 0x63, 0x65, 0x4f, 0x77, 0x6e, 0x65, 0x72, 0x53, 0x73, 0x68, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x63, + 0x4b, 0x65, 0x79, 0x12, 0x44, 0x0a, 0x1f, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, + 0x5f, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x5f, 0x73, 0x73, 0x68, 0x5f, 0x70, 0x72, 0x69, 0x76, 0x61, + 0x74, 0x65, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x10, 0x20, 0x01, 0x28, 0x09, 0x52, 0x1b, 0x77, 0x6f, + 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x4f, 0x77, 0x6e, 0x65, 0x72, 0x53, 0x73, 0x68, 0x50, + 0x72, 0x69, 0x76, 0x61, 0x74, 0x65, 0x4b, 0x65, 0x79, 0x22, 0x8a, 0x01, 0x0a, 0x06, 0x43, 0x6f, + 0x6e, 0x66, 0x69, 0x67, 0x12, 0x36, 0x0a, 0x17, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, + 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x61, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x15, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x53, + 0x6f, 0x75, 0x72, 0x63, 0x65, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, 0x65, 0x12, 0x14, 0x0a, 0x05, + 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x73, 0x74, 0x61, + 0x74, 0x65, 0x12, 0x32, 0x0a, 0x15, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, + 0x72, 0x5f, 0x6c, 0x6f, 0x67, 0x5f, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x18, 0x03, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x13, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x4c, 0x6f, + 0x67, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x22, 0x0e, 0x0a, 0x0c, 0x50, 0x61, 0x72, 0x73, 0x65, 0x52, + 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0xa3, 0x02, 0x0a, 0x0d, 0x50, 0x61, 0x72, 0x73, 0x65, + 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, + 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x4c, + 0x0a, 0x12, 0x74, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, 0x65, 0x5f, 0x76, 0x61, 0x72, 0x69, 0x61, + 0x62, 0x6c, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x70, 0x72, 0x6f, + 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x54, 0x65, 0x6d, 0x70, 0x6c, 0x61, 0x74, + 0x65, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x52, 0x11, 0x74, 0x65, 0x6d, 0x70, 0x6c, + 0x61, 0x74, 0x65, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x73, 0x12, 0x16, 0x0a, 0x06, + 0x72, 0x65, 0x61, 0x64, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x06, 0x72, 0x65, + 0x61, 0x64, 0x6d, 0x65, 0x12, 0x54, 0x0a, 0x0e, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, + 0x65, 0x5f, 0x74, 0x61, 0x67, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2d, 0x2e, 0x70, + 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x73, 0x65, + 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, + 0x63, 0x65, 0x54, 0x61, 0x67, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0d, 0x77, 0x6f, 0x72, + 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x61, 0x67, 0x73, 0x1a, 0x40, 0x0a, 0x12, 0x57, 0x6f, + 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x61, 0x67, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, + 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, + 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0xb5, 0x02, 0x0a, + 0x0b, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x31, 0x0a, 0x08, + 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, + 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x4d, 0x65, 0x74, + 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, + 0x53, 0x0a, 0x15, 0x72, 0x69, 0x63, 0x68, 0x5f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, + 0x72, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1f, + 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x69, 0x63, + 0x68, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, + 0x13, 0x72, 0x69, 0x63, 0x68, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, + 0x6c, 0x75, 0x65, 0x73, 0x12, 0x43, 0x0a, 0x0f, 0x76, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, + 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1a, 0x2e, + 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x56, 0x61, 0x72, 0x69, + 0x61, 0x62, 0x6c, 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0e, 0x76, 0x61, 0x72, 0x69, 0x61, + 0x62, 0x6c, 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x59, 0x0a, 0x17, 0x65, 0x78, 0x74, + 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x61, 0x75, 0x74, 0x68, 0x5f, 0x70, 0x72, 0x6f, 0x76, 0x69, + 0x64, 0x65, 0x72, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x70, 0x72, 0x6f, + 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x45, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, + 0x6c, 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x52, 0x15, 0x65, + 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, + 0x64, 0x65, 0x72, 0x73, 0x22, 0xf8, 0x01, 0x0a, 0x0c, 0x50, 0x6c, 0x61, 0x6e, 0x43, 0x6f, 0x6d, + 0x70, 0x6c, 0x65, 0x74, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x33, 0x0a, 0x09, 0x72, + 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, + 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x65, 0x73, + 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, + 0x12, 0x3a, 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x18, 0x03, + 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x69, 0x63, 0x68, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, - 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x13, 0x72, 0x69, 0x63, 0x68, 0x50, 0x61, 0x72, 0x61, 0x6d, - 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x43, 0x0a, 0x0f, 0x76, 0x61, - 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x03, 0x20, - 0x03, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, - 0x72, 0x2e, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, - 0x0e, 0x76, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, - 0x59, 0x0a, 0x17, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x61, 0x75, 0x74, 0x68, - 0x5f, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, - 0x32, 0x21, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x45, + 0x52, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x12, 0x61, 0x0a, 0x17, + 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x61, 0x75, 0x74, 0x68, 0x5f, 0x70, 0x72, + 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x29, 0x2e, + 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x45, 0x78, 0x74, 0x65, + 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, + 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x15, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, + 0x61, 0x6c, 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x73, 0x22, + 0x41, 0x0a, 0x0c, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, + 0x31, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, + 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, + 0x74, 0x61, 0x22, 0x8f, 0x02, 0x0a, 0x0d, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x43, 0x6f, 0x6d, 0x70, + 0x6c, 0x65, 0x74, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x0c, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x72, + 0x72, 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, + 0x12, 0x33, 0x0a, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x03, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, + 0x72, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x09, 0x72, 0x65, 0x73, 0x6f, + 0x75, 0x72, 0x63, 0x65, 0x73, 0x12, 0x3a, 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, + 0x65, 0x72, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, + 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x69, 0x63, 0x68, 0x50, 0x61, 0x72, 0x61, + 0x6d, 0x65, 0x74, 0x65, 0x72, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, + 0x73, 0x12, 0x61, 0x0a, 0x17, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x61, 0x75, + 0x74, 0x68, 0x5f, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x73, 0x18, 0x05, 0x20, 0x03, + 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, + 0x2e, 0x45, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, + 0x76, 0x69, 0x64, 0x65, 0x72, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x15, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, - 0x64, 0x65, 0x72, 0x52, 0x15, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x75, 0x74, - 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x73, 0x22, 0xf8, 0x01, 0x0a, 0x0c, 0x50, - 0x6c, 0x61, 0x6e, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x65, - 0x72, 0x72, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, - 0x72, 0x12, 0x33, 0x0a, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x02, - 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, - 0x65, 0x72, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x09, 0x72, 0x65, 0x73, - 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x12, 0x3a, 0x0a, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, - 0x74, 0x65, 0x72, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, - 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x69, 0x63, 0x68, 0x50, 0x61, 0x72, - 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x52, 0x0a, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, - 0x72, 0x73, 0x12, 0x61, 0x0a, 0x17, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x5f, 0x61, - 0x75, 0x74, 0x68, 0x5f, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x73, 0x18, 0x04, 0x20, - 0x03, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, - 0x72, 0x2e, 0x45, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, - 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x15, - 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, - 0x69, 0x64, 0x65, 0x72, 0x73, 0x22, 0x41, 0x0a, 0x0c, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x31, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, - 0x61, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, - 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x52, 0x08, - 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x22, 0x8f, 0x02, 0x0a, 0x0d, 0x41, 0x70, 0x70, - 0x6c, 0x79, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, - 0x61, 0x74, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, - 0x12, 0x14, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x33, 0x0a, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, - 0x63, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, - 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, - 0x52, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x12, 0x3a, 0x0a, 0x0a, 0x70, - 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, - 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x69, - 0x63, 0x68, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x52, 0x0a, 0x70, 0x61, 0x72, - 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x73, 0x12, 0x61, 0x0a, 0x17, 0x65, 0x78, 0x74, 0x65, 0x72, - 0x6e, 0x61, 0x6c, 0x5f, 0x61, 0x75, 0x74, 0x68, 0x5f, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, - 0x72, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, - 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x45, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, - 0x75, 0x74, 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x52, 0x65, 0x73, 0x6f, 0x75, - 0x72, 0x63, 0x65, 0x52, 0x15, 0x65, 0x78, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x41, 0x75, 0x74, - 0x68, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x73, 0x22, 0x0f, 0x0a, 0x0d, 0x43, 0x61, - 0x6e, 0x63, 0x65, 0x6c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x8c, 0x02, 0x0a, 0x07, - 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2d, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, - 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, - 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x48, 0x00, 0x52, 0x06, - 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x31, 0x0a, 0x05, 0x70, 0x61, 0x72, 0x73, 0x65, 0x18, - 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, - 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x73, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, - 0x48, 0x00, 0x52, 0x05, 0x70, 0x61, 0x72, 0x73, 0x65, 0x12, 0x2e, 0x0a, 0x04, 0x70, 0x6c, 0x61, - 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x18, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, - 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x48, 0x00, 0x52, 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x12, 0x31, 0x0a, 0x05, 0x61, 0x70, 0x70, - 0x6c, 0x79, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, - 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x52, 0x65, 0x71, 0x75, - 0x65, 0x73, 0x74, 0x48, 0x00, 0x52, 0x05, 0x61, 0x70, 0x70, 0x6c, 0x79, 0x12, 0x34, 0x0a, 0x06, - 0x63, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, - 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x43, 0x61, 0x6e, 0x63, 0x65, - 0x6c, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x48, 0x00, 0x52, 0x06, 0x63, 0x61, 0x6e, 0x63, - 0x65, 0x6c, 0x42, 0x06, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x22, 0xd1, 0x01, 0x0a, 0x08, 0x52, - 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x24, 0x0a, 0x03, 0x6c, 0x6f, 0x67, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, - 0x65, 0x72, 0x2e, 0x4c, 0x6f, 0x67, 0x48, 0x00, 0x52, 0x03, 0x6c, 0x6f, 0x67, 0x12, 0x32, 0x0a, - 0x05, 0x70, 0x61, 0x72, 0x73, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, - 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x73, 0x65, - 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x48, 0x00, 0x52, 0x05, 0x70, 0x61, 0x72, 0x73, - 0x65, 0x12, 0x2f, 0x0a, 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x19, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x6c, - 0x61, 0x6e, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x48, 0x00, 0x52, 0x04, 0x70, 0x6c, - 0x61, 0x6e, 0x12, 0x32, 0x0a, 0x05, 0x61, 0x70, 0x70, 0x6c, 0x79, 0x18, 0x04, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, - 0x41, 0x70, 0x70, 0x6c, 0x79, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x48, 0x00, 0x52, - 0x05, 0x61, 0x70, 0x70, 0x6c, 0x79, 0x42, 0x06, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x2a, 0x3f, - 0x0a, 0x08, 0x4c, 0x6f, 0x67, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x12, 0x09, 0x0a, 0x05, 0x54, 0x52, - 0x41, 0x43, 0x45, 0x10, 0x00, 0x12, 0x09, 0x0a, 0x05, 0x44, 0x45, 0x42, 0x55, 0x47, 0x10, 0x01, - 0x12, 0x08, 0x0a, 0x04, 0x49, 0x4e, 0x46, 0x4f, 0x10, 0x02, 0x12, 0x08, 0x0a, 0x04, 0x57, 0x41, - 0x52, 0x4e, 0x10, 0x03, 0x12, 0x09, 0x0a, 0x05, 0x45, 0x52, 0x52, 0x4f, 0x52, 0x10, 0x04, 0x2a, - 0x3b, 0x0a, 0x0f, 0x41, 0x70, 0x70, 0x53, 0x68, 0x61, 0x72, 0x69, 0x6e, 0x67, 0x4c, 0x65, 0x76, - 0x65, 0x6c, 0x12, 0x09, 0x0a, 0x05, 0x4f, 0x57, 0x4e, 0x45, 0x52, 0x10, 0x00, 0x12, 0x11, 0x0a, - 0x0d, 0x41, 0x55, 0x54, 0x48, 0x45, 0x4e, 0x54, 0x49, 0x43, 0x41, 0x54, 0x45, 0x44, 0x10, 0x01, - 0x12, 0x0a, 0x0a, 0x06, 0x50, 0x55, 0x42, 0x4c, 0x49, 0x43, 0x10, 0x02, 0x2a, 0x37, 0x0a, 0x13, - 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x69, 0x74, - 0x69, 0x6f, 0x6e, 0x12, 0x09, 0x0a, 0x05, 0x53, 0x54, 0x41, 0x52, 0x54, 0x10, 0x00, 0x12, 0x08, - 0x0a, 0x04, 0x53, 0x54, 0x4f, 0x50, 0x10, 0x01, 0x12, 0x0b, 0x0a, 0x07, 0x44, 0x45, 0x53, 0x54, - 0x52, 0x4f, 0x59, 0x10, 0x02, 0x32, 0x49, 0x0a, 0x0b, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, - 0x6f, 0x6e, 0x65, 0x72, 0x12, 0x3a, 0x0a, 0x07, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x12, - 0x14, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, - 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x28, 0x01, 0x30, 0x01, - 0x42, 0x30, 0x5a, 0x2e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x63, - 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x76, 0x32, 0x2f, 0x70, 0x72, - 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x73, 0x64, 0x6b, 0x2f, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x64, 0x65, 0x72, 0x73, 0x22, 0x0f, 0x0a, 0x0d, 0x43, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x8c, 0x02, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x12, 0x2d, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, + 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x48, 0x00, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, + 0x12, 0x31, 0x0a, 0x05, 0x70, 0x61, 0x72, 0x73, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x19, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, + 0x72, 0x73, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x48, 0x00, 0x52, 0x05, 0x70, 0x61, + 0x72, 0x73, 0x65, 0x12, 0x2e, 0x0a, 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x18, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, + 0x50, 0x6c, 0x61, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x48, 0x00, 0x52, 0x04, 0x70, + 0x6c, 0x61, 0x6e, 0x12, 0x31, 0x0a, 0x05, 0x61, 0x70, 0x70, 0x6c, 0x79, 0x18, 0x04, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, + 0x2e, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x48, 0x00, 0x52, + 0x05, 0x61, 0x70, 0x70, 0x6c, 0x79, 0x12, 0x34, 0x0a, 0x06, 0x63, 0x61, 0x6e, 0x63, 0x65, 0x6c, + 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, + 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x43, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x48, 0x00, 0x52, 0x06, 0x63, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x42, 0x06, 0x0a, 0x04, + 0x74, 0x79, 0x70, 0x65, 0x22, 0xd1, 0x01, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x12, 0x24, 0x0a, 0x03, 0x6c, 0x6f, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, + 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x4c, 0x6f, 0x67, + 0x48, 0x00, 0x52, 0x03, 0x6c, 0x6f, 0x67, 0x12, 0x32, 0x0a, 0x05, 0x70, 0x61, 0x72, 0x73, 0x65, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, + 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x73, 0x65, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, + 0x74, 0x65, 0x48, 0x00, 0x52, 0x05, 0x70, 0x61, 0x72, 0x73, 0x65, 0x12, 0x2f, 0x0a, 0x04, 0x70, + 0x6c, 0x61, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x70, 0x72, 0x6f, 0x76, + 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x6c, 0x61, 0x6e, 0x43, 0x6f, 0x6d, 0x70, + 0x6c, 0x65, 0x74, 0x65, 0x48, 0x00, 0x52, 0x04, 0x70, 0x6c, 0x61, 0x6e, 0x12, 0x32, 0x0a, 0x05, + 0x61, 0x70, 0x70, 0x6c, 0x79, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x70, 0x72, + 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x41, 0x70, 0x70, 0x6c, 0x79, 0x43, + 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x48, 0x00, 0x52, 0x05, 0x61, 0x70, 0x70, 0x6c, 0x79, + 0x42, 0x06, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x2a, 0x3f, 0x0a, 0x08, 0x4c, 0x6f, 0x67, 0x4c, + 0x65, 0x76, 0x65, 0x6c, 0x12, 0x09, 0x0a, 0x05, 0x54, 0x52, 0x41, 0x43, 0x45, 0x10, 0x00, 0x12, + 0x09, 0x0a, 0x05, 0x44, 0x45, 0x42, 0x55, 0x47, 0x10, 0x01, 0x12, 0x08, 0x0a, 0x04, 0x49, 0x4e, + 0x46, 0x4f, 0x10, 0x02, 0x12, 0x08, 0x0a, 0x04, 0x57, 0x41, 0x52, 0x4e, 0x10, 0x03, 0x12, 0x09, + 0x0a, 0x05, 0x45, 0x52, 0x52, 0x4f, 0x52, 0x10, 0x04, 0x2a, 0x3b, 0x0a, 0x0f, 0x41, 0x70, 0x70, + 0x53, 0x68, 0x61, 0x72, 0x69, 0x6e, 0x67, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x12, 0x09, 0x0a, 0x05, + 0x4f, 0x57, 0x4e, 0x45, 0x52, 0x10, 0x00, 0x12, 0x11, 0x0a, 0x0d, 0x41, 0x55, 0x54, 0x48, 0x45, + 0x4e, 0x54, 0x49, 0x43, 0x41, 0x54, 0x45, 0x44, 0x10, 0x01, 0x12, 0x0a, 0x0a, 0x06, 0x50, 0x55, + 0x42, 0x4c, 0x49, 0x43, 0x10, 0x02, 0x2a, 0x37, 0x0a, 0x13, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, + 0x61, 0x63, 0x65, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x09, 0x0a, + 0x05, 0x53, 0x54, 0x41, 0x52, 0x54, 0x10, 0x00, 0x12, 0x08, 0x0a, 0x04, 0x53, 0x54, 0x4f, 0x50, + 0x10, 0x01, 0x12, 0x0b, 0x0a, 0x07, 0x44, 0x45, 0x53, 0x54, 0x52, 0x4f, 0x59, 0x10, 0x02, 0x32, + 0x49, 0x0a, 0x0b, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x12, 0x3a, + 0x0a, 0x07, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x14, 0x2e, 0x70, 0x72, 0x6f, 0x76, + 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, + 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x65, + 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x28, 0x01, 0x30, 0x01, 0x42, 0x30, 0x5a, 0x2e, 0x67, 0x69, + 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x63, + 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x76, 0x32, 0x2f, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, + 0x6e, 0x65, 0x72, 0x73, 0x64, 0x6b, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x33, } var ( diff --git a/provisionersdk/proto/provisioner.proto b/provisionersdk/proto/provisioner.proto index e378ff79dcd46..a17b78bcc3ba4 100644 --- a/provisionersdk/proto/provisioner.proto +++ b/provisionersdk/proto/provisioner.proto @@ -226,6 +226,8 @@ message Metadata { string template_id = 12; string workspace_owner_name = 13; repeated string workspace_owner_groups = 14; + string workspace_owner_ssh_public_key = 15; + string workspace_owner_ssh_private_key = 16; } // Config represents execution configuration shared by all subsequent requests in the Session diff --git a/site/e2e/provisionerGenerated.ts b/site/e2e/provisionerGenerated.ts index 744b5c0918f32..5c3d7ebc53512 100644 --- a/site/e2e/provisionerGenerated.ts +++ b/site/e2e/provisionerGenerated.ts @@ -231,6 +231,8 @@ export interface Metadata { templateId: string; workspaceOwnerName: string; workspaceOwnerGroups: string[]; + workspaceOwnerSshPublicKey: string; + workspaceOwnerSshPrivateKey: string; } /** Config represents execution configuration shared by all subsequent requests in the Session */ @@ -842,6 +844,12 @@ export const Metadata = { for (const v of message.workspaceOwnerGroups) { writer.uint32(114).string(v!); } + if (message.workspaceOwnerSshPublicKey !== "") { + writer.uint32(122).string(message.workspaceOwnerSshPublicKey); + } + if (message.workspaceOwnerSshPrivateKey !== "") { + writer.uint32(130).string(message.workspaceOwnerSshPrivateKey); + } return writer; }, }; From b69f6358f0c030f5d66d0312ebb5b15830f06209 Mon Sep 17 00:00:00 2001 From: Matt Vollmer Date: Wed, 29 May 2024 08:37:06 -0400 Subject: [PATCH 125/149] Update manifest.json (#13391) --- docs/manifest.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/manifest.json b/docs/manifest.json index 7e23120d83b6a..59254175289b7 100644 --- a/docs/manifest.json +++ b/docs/manifest.json @@ -1106,8 +1106,8 @@ "path": "./guides/artifactory-integration.md" }, { - "title": "Island Secure Browser Integration", - "description": "Integrate Coder with Island's Secure Browser", + "title": "Island Enterprise Browser Integration", + "description": "Integrate Coder with Island's Enterprise Browser", "path": "./guides/island-integration.md" }, { From afd9d3b35ff7c9f1cee28bd370a81b9e88a8bde5 Mon Sep 17 00:00:00 2001 From: Steven Masley Date: Wed, 29 May 2024 09:49:43 -0500 Subject: [PATCH 126/149] feat: add api for patching custom org roles (#13357) * chore: implement patching custom organization roles --- coderd/apidoc/docs.go | 84 ++++---- coderd/apidoc/swagger.json | 76 ++++---- coderd/coderd.go | 12 +- coderd/database/db2sdk/db2sdk.go | 23 ++- coderd/database/dbauthz/dbauthz.go | 17 +- coderd/members.go | 37 +--- coderd/roles.go | 47 +++++ codersdk/roles.go | 24 ++- docs/api/members.md | 298 +++++++++++++---------------- docs/api/schemas.md | 84 ++++---- enterprise/coderd/coderd.go | 21 +- enterprise/coderd/roles.go | 79 ++++---- enterprise/coderd/roles_test.go | 192 ++++++++++++++----- enterprise/coderd/users.go | 25 --- site/src/api/typesGenerated.ts | 2 +- site/src/testHelpers/entities.ts | 8 +- 16 files changed, 565 insertions(+), 464 deletions(-) diff --git a/coderd/apidoc/docs.go b/coderd/apidoc/docs.go index 37e121e483068..f373e0079a780 100644 --- a/coderd/apidoc/docs.go +++ b/coderd/apidoc/docs.go @@ -2225,6 +2225,42 @@ const docTemplate = `{ } } } + }, + "patch": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "produces": [ + "application/json" + ], + "tags": [ + "Members" + ], + "summary": "Upsert a custom organization role", + "operationId": "upsert-a-custom-organization-role", + "parameters": [ + { + "type": "string", + "format": "uuid", + "description": "Organization ID", + "name": "organization", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/codersdk.Role" + } + } + } + } } }, "/organizations/{organization}/members/{user}/roles": { @@ -4362,32 +4398,6 @@ const docTemplate = `{ } } } - }, - "patch": { - "security": [ - { - "CoderSessionToken": [] - } - ], - "produces": [ - "application/json" - ], - "tags": [ - "Members" - ], - "summary": "Upsert a custom site-wide role", - "operationId": "upsert-a-custom-site-wide-role", - "responses": { - "200": { - "description": "OK", - "schema": { - "type": "array", - "items": { - "$ref": "#/definitions/codersdk.Role" - } - } - } - } } }, "/users/{user}": { @@ -8426,13 +8436,10 @@ const docTemplate = `{ "format": "uuid" }, "organization_permissions": { - "description": "map[\u003corg_id\u003e] -\u003e Permissions", - "type": "object", - "additionalProperties": { - "type": "array", - "items": { - "$ref": "#/definitions/codersdk.Permission" - } + "description": "OrganizationPermissions are specific for the organization in the field 'OrganizationID' above.", + "type": "array", + "items": { + "$ref": "#/definitions/codersdk.Permission" } }, "site_permissions": { @@ -11250,13 +11257,10 @@ const docTemplate = `{ "format": "uuid" }, "organization_permissions": { - "description": "map[\u003corg_id\u003e] -\u003e Permissions", - "type": "object", - "additionalProperties": { - "type": "array", - "items": { - "$ref": "#/definitions/codersdk.Permission" - } + "description": "OrganizationPermissions are specific for the organization in the field 'OrganizationID' above.", + "type": "array", + "items": { + "$ref": "#/definitions/codersdk.Permission" } }, "site_permissions": { diff --git a/coderd/apidoc/swagger.json b/coderd/apidoc/swagger.json index 293e9e8e65265..84bb41c44fcdd 100644 --- a/coderd/apidoc/swagger.json +++ b/coderd/apidoc/swagger.json @@ -1942,6 +1942,38 @@ } } } + }, + "patch": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "produces": ["application/json"], + "tags": ["Members"], + "summary": "Upsert a custom organization role", + "operationId": "upsert-a-custom-organization-role", + "parameters": [ + { + "type": "string", + "format": "uuid", + "description": "Organization ID", + "name": "organization", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/codersdk.Role" + } + } + } + } } }, "/organizations/{organization}/members/{user}/roles": { @@ -3841,28 +3873,6 @@ } } } - }, - "patch": { - "security": [ - { - "CoderSessionToken": [] - } - ], - "produces": ["application/json"], - "tags": ["Members"], - "summary": "Upsert a custom site-wide role", - "operationId": "upsert-a-custom-site-wide-role", - "responses": { - "200": { - "description": "OK", - "schema": { - "type": "array", - "items": { - "$ref": "#/definitions/codersdk.Role" - } - } - } - } } }, "/users/{user}": { @@ -7481,13 +7491,10 @@ "format": "uuid" }, "organization_permissions": { - "description": "map[\u003corg_id\u003e] -\u003e Permissions", - "type": "object", - "additionalProperties": { - "type": "array", - "items": { - "$ref": "#/definitions/codersdk.Permission" - } + "description": "OrganizationPermissions are specific for the organization in the field 'OrganizationID' above.", + "type": "array", + "items": { + "$ref": "#/definitions/codersdk.Permission" } }, "site_permissions": { @@ -10142,13 +10149,10 @@ "format": "uuid" }, "organization_permissions": { - "description": "map[\u003corg_id\u003e] -\u003e Permissions", - "type": "object", - "additionalProperties": { - "type": "array", - "items": { - "$ref": "#/definitions/codersdk.Permission" - } + "description": "OrganizationPermissions are specific for the organization in the field 'OrganizationID' above.", + "type": "array", + "items": { + "$ref": "#/definitions/codersdk.Permission" } }, "site_permissions": { diff --git a/coderd/coderd.go b/coderd/coderd.go index 9ee21a23cf79f..9c748d06eeb71 100644 --- a/coderd/coderd.go +++ b/coderd/coderd.go @@ -424,6 +424,7 @@ func New(options *Options) *API { TemplateScheduleStore: options.TemplateScheduleStore, UserQuietHoursScheduleStore: options.UserQuietHoursScheduleStore, AccessControlStore: options.AccessControlStore, + CustomRoleHandler: atomic.Pointer[CustomRoleHandler]{}, Experiments: experiments, healthCheckGroup: &singleflight.Group[string, *healthsdk.HealthcheckReport]{}, Acquirer: provisionerdserver.NewAcquirer( @@ -436,6 +437,8 @@ func New(options *Options) *API { workspaceUsageTracker: options.WorkspaceUsageTracker, } + var customRoleHandler CustomRoleHandler = &agplCustomRoleHandler{} + api.CustomRoleHandler.Store(&customRoleHandler) api.AppearanceFetcher.Store(&appearance.DefaultFetcher) api.PortSharer.Store(&portsharing.DefaultPortSharer) buildInfo := codersdk.BuildInfoResponse{ @@ -828,7 +831,12 @@ func New(options *Options) *API { }) }) r.Route("/members", func(r chi.Router) { - r.Get("/roles", api.assignableOrgRoles) + r.Route("/roles", func(r chi.Router) { + r.Get("/", api.assignableOrgRoles) + r.With(httpmw.RequireExperiment(api.Experiments, codersdk.ExperimentCustomRoles)). + Patch("/", api.patchOrgRoles) + }) + r.Route("/{user}", func(r chi.Router) { r.Use( httpmw.ExtractOrganizationMemberParam(options.Database), @@ -1249,6 +1257,8 @@ type API struct { // passed to dbauthz. AccessControlStore *atomic.Pointer[dbauthz.AccessControlStore] PortSharer atomic.Pointer[portsharing.PortSharer] + // CustomRoleHandler is the AGPL/Enterprise implementation for custom roles. + CustomRoleHandler atomic.Pointer[CustomRoleHandler] HTTPAuth *HTTPAuthorizer diff --git a/coderd/database/db2sdk/db2sdk.go b/coderd/database/db2sdk/db2sdk.go index 590183bd43dd1..2fe9ac9af7a3d 100644 --- a/coderd/database/db2sdk/db2sdk.go +++ b/coderd/database/db2sdk/db2sdk.go @@ -531,12 +531,16 @@ func Role(role rbac.Role) codersdk.Role { if err != nil { roleName = role.Name } + return codersdk.Role{ - Name: roleName, - OrganizationID: orgIDStr, - DisplayName: role.DisplayName, - SitePermissions: List(role.Site, Permission), - OrganizationPermissions: Map(role.Org, ListLazy(Permission)), + Name: roleName, + OrganizationID: orgIDStr, + DisplayName: role.DisplayName, + SitePermissions: List(role.Site, Permission), + // This is not perfect. If there are organization permissions in another + // organization, they will be omitted. This should not be allowed, so + // should never happen. + OrganizationPermissions: List(role.Org[orgIDStr], Permission), UserPermissions: List(role.User, Permission), } } @@ -550,11 +554,18 @@ func Permission(permission rbac.Permission) codersdk.Permission { } func RoleToRBAC(role codersdk.Role) rbac.Role { + orgPerms := map[string][]rbac.Permission{} + if role.OrganizationID != "" { + orgPerms = map[string][]rbac.Permission{ + role.OrganizationID: List(role.OrganizationPermissions, PermissionToRBAC), + } + } + return rbac.Role{ Name: rbac.RoleName(role.Name, role.OrganizationID), DisplayName: role.DisplayName, Site: List(role.SitePermissions, PermissionToRBAC), - Org: Map(role.OrganizationPermissions, ListLazy(PermissionToRBAC)), + Org: orgPerms, User: List(role.UserPermissions, PermissionToRBAC), } } diff --git a/coderd/database/dbauthz/dbauthz.go b/coderd/database/dbauthz/dbauthz.go index 0ab78e75fe196..ec9d14bb57de6 100644 --- a/coderd/database/dbauthz/dbauthz.go +++ b/coderd/database/dbauthz/dbauthz.go @@ -600,7 +600,7 @@ func (q *querier) canAssignRoles(ctx context.Context, orgID *uuid.UUID, added, r customRoles := make([]string, 0) // Validate that the roles being assigned are valid. for _, r := range grantedRoles { - _, isOrgRole := rbac.IsOrgRole(r) + roleOrgIDStr, isOrgRole := rbac.IsOrgRole(r) if shouldBeOrgRoles && !isOrgRole { return xerrors.Errorf("Must only update org roles") } @@ -608,6 +608,21 @@ func (q *querier) canAssignRoles(ctx context.Context, orgID *uuid.UUID, added, r return xerrors.Errorf("Must only update site wide roles") } + if shouldBeOrgRoles { + roleOrgID, err := uuid.Parse(roleOrgIDStr) + if err != nil { + return xerrors.Errorf("role %q has invalid uuid for org: %w", r, err) + } + + if orgID == nil { + return xerrors.Errorf("should never happen, orgID is nil, but trying to assign an organization role") + } + + if roleOrgID != *orgID { + return xerrors.Errorf("attempted to assign role from a different org, role %q to %q", r, orgID.String()) + } + } + // All roles should be valid roles if _, err := rbac.RoleByName(r); err != nil { customRoles = append(customRoles, r) diff --git a/coderd/members.go b/coderd/members.go index 6a3fe3b2bcb09..beae302ab3124 100644 --- a/coderd/members.go +++ b/coderd/members.go @@ -1,13 +1,8 @@ package coderd import ( - "context" "net/http" - "github.com/google/uuid" - - "golang.org/x/xerrors" - "github.com/coder/coder/v2/coderd/database/db2sdk" "github.com/coder/coder/v2/coderd/rbac" @@ -48,7 +43,7 @@ func (api *API) putMemberRoles(rw http.ResponseWriter, r *http.Request) { return } - updatedUser, err := api.updateOrganizationMemberRoles(ctx, database.UpdateMemberRolesParams{ + updatedUser, err := api.Database.UpdateMemberRoles(ctx, database.UpdateMemberRolesParams{ GrantedRoles: params.Roles, UserID: member.UserID, OrgID: organization.ID, @@ -63,36 +58,6 @@ func (api *API) putMemberRoles(rw http.ResponseWriter, r *http.Request) { httpapi.Write(ctx, rw, http.StatusOK, convertOrganizationMember(updatedUser)) } -func (api *API) updateOrganizationMemberRoles(ctx context.Context, args database.UpdateMemberRolesParams) (database.OrganizationMember, error) { - // Enforce only site wide roles - for _, r := range args.GrantedRoles { - // Must be an org role for the org in the args - orgID, ok := rbac.IsOrgRole(r) - if !ok { - return database.OrganizationMember{}, xerrors.Errorf("must only update organization roles") - } - - roleOrg, err := uuid.Parse(orgID) - if err != nil { - return database.OrganizationMember{}, xerrors.Errorf("Role must have proper UUIDs for organization, %q does not", r) - } - - if roleOrg != args.OrgID { - return database.OrganizationMember{}, xerrors.Errorf("Must only pass roles for org %q", args.OrgID.String()) - } - - if _, err := rbac.RoleByName(r); err != nil { - return database.OrganizationMember{}, xerrors.Errorf("%q is not a supported organization role", r) - } - } - - updatedUser, err := api.Database.UpdateMemberRoles(ctx, args) - if err != nil { - return database.OrganizationMember{}, xerrors.Errorf("Update site roles: %w", err) - } - return updatedUser, nil -} - func convertOrganizationMember(mem database.OrganizationMember) codersdk.OrganizationMember { convertedMember := codersdk.OrganizationMember{ UserID: mem.UserID, diff --git a/coderd/roles.go b/coderd/roles.go index a00af23ce98eb..e8505baa4d255 100644 --- a/coderd/roles.go +++ b/coderd/roles.go @@ -1,6 +1,7 @@ package coderd import ( + "context" "net/http" "github.com/google/uuid" @@ -16,6 +17,52 @@ import ( "github.com/coder/coder/v2/coderd/rbac" ) +// CustomRoleHandler handles AGPL/Enterprise interface for handling custom +// roles. Ideally only included in the enterprise package, but the routes are +// intermixed with AGPL endpoints. +type CustomRoleHandler interface { + PatchOrganizationRole(ctx context.Context, db database.Store, rw http.ResponseWriter, orgID uuid.UUID, role codersdk.Role) (codersdk.Role, bool) +} + +type agplCustomRoleHandler struct{} + +func (agplCustomRoleHandler) PatchOrganizationRole(ctx context.Context, _ database.Store, rw http.ResponseWriter, _ uuid.UUID, _ codersdk.Role) (codersdk.Role, bool) { + httpapi.Write(ctx, rw, http.StatusForbidden, codersdk.Response{ + Message: "Creating and updating custom roles is an Enterprise feature. Contact sales!", + }) + return codersdk.Role{}, false +} + +// patchRole will allow creating a custom organization role +// +// @Summary Upsert a custom organization role +// @ID upsert-a-custom-organization-role +// @Security CoderSessionToken +// @Produce json +// @Param organization path string true "Organization ID" format(uuid) +// @Tags Members +// @Success 200 {array} codersdk.Role +// @Router /organizations/{organization}/members/roles [patch] +func (api *API) patchOrgRoles(rw http.ResponseWriter, r *http.Request) { + var ( + ctx = r.Context() + handler = *api.CustomRoleHandler.Load() + organization = httpmw.OrganizationParam(r) + ) + + var req codersdk.Role + if !httpapi.Read(ctx, rw, r, &req) { + return + } + + updated, ok := handler.PatchOrganizationRole(ctx, api.Database, rw, organization.ID, req) + if !ok { + return + } + + httpapi.Write(ctx, rw, http.StatusOK, updated) +} + // AssignableSiteRoles returns all site wide roles that can be assigned. // // @Summary Get site member roles diff --git a/codersdk/roles.go b/codersdk/roles.go index c803e92f44bb2..8b119e935a6c6 100644 --- a/codersdk/roles.go +++ b/codersdk/roles.go @@ -39,14 +39,26 @@ type Role struct { OrganizationID string `json:"organization_id" table:"organization_id" format:"uuid"` DisplayName string `json:"display_name" table:"display_name"` SitePermissions []Permission `json:"site_permissions" table:"site_permissions"` - // map[] -> Permissions - OrganizationPermissions map[string][]Permission `json:"organization_permissions" table:"org_permissions"` - UserPermissions []Permission `json:"user_permissions" table:"user_permissions"` + // OrganizationPermissions are specific for the organization in the field 'OrganizationID' above. + OrganizationPermissions []Permission `json:"organization_permissions" table:"org_permissions"` + UserPermissions []Permission `json:"user_permissions" table:"user_permissions"` } -// PatchRole will upsert a custom site wide role -func (c *Client) PatchRole(ctx context.Context, req Role) (Role, error) { - res, err := c.Request(ctx, http.MethodPatch, "/api/v2/users/roles", req) +// FullName returns the role name scoped to the organization ID. This is useful if +// printing a set of roles from different scopes, as duplicated names across multiple +// scopes will become unique. +// In practice, this is primarily used in testing. +func (r Role) FullName() string { + if r.OrganizationID == "" { + return r.Name + } + return r.Name + ":" + r.OrganizationID +} + +// PatchOrganizationRole will upsert a custom organization role +func (c *Client) PatchOrganizationRole(ctx context.Context, organizationID uuid.UUID, req Role) (Role, error) { + res, err := c.Request(ctx, http.MethodPatch, + fmt.Sprintf("/api/v2/organizations/%s/members/roles", organizationID.String()), req) if err != nil { return Role{}, err } diff --git a/docs/api/members.md b/docs/api/members.md index 27536a6c836fa..6364b08ca528e 100644 --- a/docs/api/members.md +++ b/docs/api/members.md @@ -31,22 +31,13 @@ curl -X GET http://coder-server:8080/api/v2/organizations/{organization}/members "display_name": "string", "name": "string", "organization_id": "7c60d51f-b44e-4682-87d6-449835ea4de6", - "organization_permissions": { - "property1": [ - { - "action": "application_connect", - "negate": true, - "resource_type": "*" - } - ], - "property2": [ - { - "action": "application_connect", - "negate": true, - "resource_type": "*" - } - ] - }, + "organization_permissions": [ + { + "action": "application_connect", + "negate": true, + "resource_type": "*" + } + ], "site_permissions": [ { "action": "application_connect", @@ -75,21 +66,20 @@ curl -X GET http://coder-server:8080/api/v2/organizations/{organization}/members Status Code **200** -| Name | Type | Required | Restrictions | Description | -| ---------------------------- | -------------------------------------------------------- | -------- | ------------ | --------------------------------------- | -| `[array item]` | array | false | | | -| `» assignable` | boolean | false | | | -| `» built_in` | boolean | false | | Built in roles are immutable | -| `» display_name` | string | false | | | -| `» name` | string | false | | | -| `» organization_id` | string(uuid) | false | | | -| `» organization_permissions` | object | false | | map[] -> Permissions | -| `»» [any property]` | array | false | | | -| `»»» action` | [codersdk.RBACAction](schemas.md#codersdkrbacaction) | false | | | -| `»»» negate` | boolean | false | | Negate makes this a negative permission | -| `»»» resource_type` | [codersdk.RBACResource](schemas.md#codersdkrbacresource) | false | | | -| `» site_permissions` | array | false | | | -| `» user_permissions` | array | false | | | +| Name | Type | Required | Restrictions | Description | +| ---------------------------- | -------------------------------------------------------- | -------- | ------------ | ----------------------------------------------------------------------------------------------- | +| `[array item]` | array | false | | | +| `» assignable` | boolean | false | | | +| `» built_in` | boolean | false | | Built in roles are immutable | +| `» display_name` | string | false | | | +| `» name` | string | false | | | +| `» organization_id` | string(uuid) | false | | | +| `» organization_permissions` | array | false | | Organization permissions are specific for the organization in the field 'OrganizationID' above. | +| `»» action` | [codersdk.RBACAction](schemas.md#codersdkrbacaction) | false | | | +| `»» negate` | boolean | false | | Negate makes this a negative permission | +| `»» resource_type` | [codersdk.RBACResource](schemas.md#codersdkrbacresource) | false | | | +| `» site_permissions` | array | false | | | +| `» user_permissions` | array | false | | | #### Enumerated Values @@ -136,75 +126,24 @@ Status Code **200** To perform this operation, you must be authenticated. [Learn more](authentication.md). -## Assign role to organization member +## Upsert a custom organization role ### Code samples ```shell # Example request using curl -curl -X PUT http://coder-server:8080/api/v2/organizations/{organization}/members/{user}/roles \ - -H 'Content-Type: application/json' \ +curl -X PATCH http://coder-server:8080/api/v2/organizations/{organization}/members/roles \ -H 'Accept: application/json' \ -H 'Coder-Session-Token: API_KEY' ``` -`PUT /organizations/{organization}/members/{user}/roles` - -> Body parameter - -```json -{ - "roles": ["string"] -} -``` +`PATCH /organizations/{organization}/members/roles` ### Parameters -| Name | In | Type | Required | Description | -| -------------- | ---- | ------------------------------------------------------ | -------- | -------------------- | -| `organization` | path | string | true | Organization ID | -| `user` | path | string | true | User ID, name, or me | -| `body` | body | [codersdk.UpdateRoles](schemas.md#codersdkupdateroles) | true | Update roles request | - -### Example responses - -> 200 Response - -```json -{ - "created_at": "2019-08-24T14:15:22Z", - "organization_id": "7c60d51f-b44e-4682-87d6-449835ea4de6", - "roles": [ - { - "display_name": "string", - "name": "string" - } - ], - "updated_at": "2019-08-24T14:15:22Z", - "user_id": "a169451c-8525-4352-b8ca-070dd449a1a5" -} -``` - -### Responses - -| Status | Meaning | Description | Schema | -| ------ | ------------------------------------------------------- | ----------- | -------------------------------------------------------------------- | -| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | [codersdk.OrganizationMember](schemas.md#codersdkorganizationmember) | - -To perform this operation, you must be authenticated. [Learn more](authentication.md). - -## Get site member roles - -### Code samples - -```shell -# Example request using curl -curl -X GET http://coder-server:8080/api/v2/users/roles \ - -H 'Accept: application/json' \ - -H 'Coder-Session-Token: API_KEY' -``` - -`GET /users/roles` +| Name | In | Type | Required | Description | +| -------------- | ---- | ------------ | -------- | --------------- | +| `organization` | path | string(uuid) | true | Organization ID | ### Example responses @@ -213,27 +152,16 @@ curl -X GET http://coder-server:8080/api/v2/users/roles \ ```json [ { - "assignable": true, - "built_in": true, "display_name": "string", "name": "string", "organization_id": "7c60d51f-b44e-4682-87d6-449835ea4de6", - "organization_permissions": { - "property1": [ - { - "action": "application_connect", - "negate": true, - "resource_type": "*" - } - ], - "property2": [ - { - "action": "application_connect", - "negate": true, - "resource_type": "*" - } - ] - }, + "organization_permissions": [ + { + "action": "application_connect", + "negate": true, + "resource_type": "*" + } + ], "site_permissions": [ { "action": "application_connect", @@ -254,29 +182,26 @@ curl -X GET http://coder-server:8080/api/v2/users/roles \ ### Responses -| Status | Meaning | Description | Schema | -| ------ | ------------------------------------------------------- | ----------- | ----------------------------------------------------------------------- | -| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | array of [codersdk.AssignableRoles](schemas.md#codersdkassignableroles) | +| Status | Meaning | Description | Schema | +| ------ | ------------------------------------------------------- | ----------- | ------------------------------------------------- | +| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | array of [codersdk.Role](schemas.md#codersdkrole) | -

Response Schema

+

Response Schema

Status Code **200** -| Name | Type | Required | Restrictions | Description | -| ---------------------------- | -------------------------------------------------------- | -------- | ------------ | --------------------------------------- | -| `[array item]` | array | false | | | -| `» assignable` | boolean | false | | | -| `» built_in` | boolean | false | | Built in roles are immutable | -| `» display_name` | string | false | | | -| `» name` | string | false | | | -| `» organization_id` | string(uuid) | false | | | -| `» organization_permissions` | object | false | | map[] -> Permissions | -| `»» [any property]` | array | false | | | -| `»»» action` | [codersdk.RBACAction](schemas.md#codersdkrbacaction) | false | | | -| `»»» negate` | boolean | false | | Negate makes this a negative permission | -| `»»» resource_type` | [codersdk.RBACResource](schemas.md#codersdkrbacresource) | false | | | -| `» site_permissions` | array | false | | | -| `» user_permissions` | array | false | | | +| Name | Type | Required | Restrictions | Description | +| ---------------------------- | -------------------------------------------------------- | -------- | ------------ | ----------------------------------------------------------------------------------------------- | +| `[array item]` | array | false | | | +| `» display_name` | string | false | | | +| `» name` | string | false | | | +| `» organization_id` | string(uuid) | false | | | +| `» organization_permissions` | array | false | | Organization permissions are specific for the organization in the field 'OrganizationID' above. | +| `»» action` | [codersdk.RBACAction](schemas.md#codersdkrbacaction) | false | | | +| `»» negate` | boolean | false | | Negate makes this a negative permission | +| `»» resource_type` | [codersdk.RBACResource](schemas.md#codersdkrbacresource) | false | | | +| `» site_permissions` | array | false | | | +| `» user_permissions` | array | false | | | #### Enumerated Values @@ -323,18 +248,75 @@ Status Code **200** To perform this operation, you must be authenticated. [Learn more](authentication.md). -## Upsert a custom site-wide role +## Assign role to organization member + +### Code samples + +```shell +# Example request using curl +curl -X PUT http://coder-server:8080/api/v2/organizations/{organization}/members/{user}/roles \ + -H 'Content-Type: application/json' \ + -H 'Accept: application/json' \ + -H 'Coder-Session-Token: API_KEY' +``` + +`PUT /organizations/{organization}/members/{user}/roles` + +> Body parameter + +```json +{ + "roles": ["string"] +} +``` + +### Parameters + +| Name | In | Type | Required | Description | +| -------------- | ---- | ------------------------------------------------------ | -------- | -------------------- | +| `organization` | path | string | true | Organization ID | +| `user` | path | string | true | User ID, name, or me | +| `body` | body | [codersdk.UpdateRoles](schemas.md#codersdkupdateroles) | true | Update roles request | + +### Example responses + +> 200 Response + +```json +{ + "created_at": "2019-08-24T14:15:22Z", + "organization_id": "7c60d51f-b44e-4682-87d6-449835ea4de6", + "roles": [ + { + "display_name": "string", + "name": "string" + } + ], + "updated_at": "2019-08-24T14:15:22Z", + "user_id": "a169451c-8525-4352-b8ca-070dd449a1a5" +} +``` + +### Responses + +| Status | Meaning | Description | Schema | +| ------ | ------------------------------------------------------- | ----------- | -------------------------------------------------------------------- | +| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | [codersdk.OrganizationMember](schemas.md#codersdkorganizationmember) | + +To perform this operation, you must be authenticated. [Learn more](authentication.md). + +## Get site member roles ### Code samples ```shell # Example request using curl -curl -X PATCH http://coder-server:8080/api/v2/users/roles \ +curl -X GET http://coder-server:8080/api/v2/users/roles \ -H 'Accept: application/json' \ -H 'Coder-Session-Token: API_KEY' ``` -`PATCH /users/roles` +`GET /users/roles` ### Example responses @@ -343,25 +325,18 @@ curl -X PATCH http://coder-server:8080/api/v2/users/roles \ ```json [ { + "assignable": true, + "built_in": true, "display_name": "string", "name": "string", "organization_id": "7c60d51f-b44e-4682-87d6-449835ea4de6", - "organization_permissions": { - "property1": [ - { - "action": "application_connect", - "negate": true, - "resource_type": "*" - } - ], - "property2": [ - { - "action": "application_connect", - "negate": true, - "resource_type": "*" - } - ] - }, + "organization_permissions": [ + { + "action": "application_connect", + "negate": true, + "resource_type": "*" + } + ], "site_permissions": [ { "action": "application_connect", @@ -382,27 +357,28 @@ curl -X PATCH http://coder-server:8080/api/v2/users/roles \ ### Responses -| Status | Meaning | Description | Schema | -| ------ | ------------------------------------------------------- | ----------- | ------------------------------------------------- | -| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | array of [codersdk.Role](schemas.md#codersdkrole) | +| Status | Meaning | Description | Schema | +| ------ | ------------------------------------------------------- | ----------- | ----------------------------------------------------------------------- | +| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | array of [codersdk.AssignableRoles](schemas.md#codersdkassignableroles) | -

Response Schema

+

Response Schema

Status Code **200** -| Name | Type | Required | Restrictions | Description | -| ---------------------------- | -------------------------------------------------------- | -------- | ------------ | --------------------------------------- | -| `[array item]` | array | false | | | -| `» display_name` | string | false | | | -| `» name` | string | false | | | -| `» organization_id` | string(uuid) | false | | | -| `» organization_permissions` | object | false | | map[] -> Permissions | -| `»» [any property]` | array | false | | | -| `»»» action` | [codersdk.RBACAction](schemas.md#codersdkrbacaction) | false | | | -| `»»» negate` | boolean | false | | Negate makes this a negative permission | -| `»»» resource_type` | [codersdk.RBACResource](schemas.md#codersdkrbacresource) | false | | | -| `» site_permissions` | array | false | | | -| `» user_permissions` | array | false | | | +| Name | Type | Required | Restrictions | Description | +| ---------------------------- | -------------------------------------------------------- | -------- | ------------ | ----------------------------------------------------------------------------------------------- | +| `[array item]` | array | false | | | +| `» assignable` | boolean | false | | | +| `» built_in` | boolean | false | | Built in roles are immutable | +| `» display_name` | string | false | | | +| `» name` | string | false | | | +| `» organization_id` | string(uuid) | false | | | +| `» organization_permissions` | array | false | | Organization permissions are specific for the organization in the field 'OrganizationID' above. | +| `»» action` | [codersdk.RBACAction](schemas.md#codersdkrbacaction) | false | | | +| `»» negate` | boolean | false | | Negate makes this a negative permission | +| `»» resource_type` | [codersdk.RBACResource](schemas.md#codersdkrbacresource) | false | | | +| `» site_permissions` | array | false | | | +| `» user_permissions` | array | false | | | #### Enumerated Values diff --git a/docs/api/schemas.md b/docs/api/schemas.md index ca7493ae53ec0..978da35a58d02 100644 --- a/docs/api/schemas.md +++ b/docs/api/schemas.md @@ -806,22 +806,13 @@ "display_name": "string", "name": "string", "organization_id": "7c60d51f-b44e-4682-87d6-449835ea4de6", - "organization_permissions": { - "property1": [ - { - "action": "application_connect", - "negate": true, - "resource_type": "*" - } - ], - "property2": [ - { - "action": "application_connect", - "negate": true, - "resource_type": "*" - } - ] - }, + "organization_permissions": [ + { + "action": "application_connect", + "negate": true, + "resource_type": "*" + } + ], "site_permissions": [ { "action": "application_connect", @@ -841,17 +832,16 @@ ### Properties -| Name | Type | Required | Restrictions | Description | -| -------------------------- | --------------------------------------------------- | -------- | ------------ | ---------------------------- | -| `assignable` | boolean | false | | | -| `built_in` | boolean | false | | Built in roles are immutable | -| `display_name` | string | false | | | -| `name` | string | false | | | -| `organization_id` | string | false | | | -| `organization_permissions` | object | false | | map[] -> Permissions | -| » `[any property]` | array of [codersdk.Permission](#codersdkpermission) | false | | | -| `site_permissions` | array of [codersdk.Permission](#codersdkpermission) | false | | | -| `user_permissions` | array of [codersdk.Permission](#codersdkpermission) | false | | | +| Name | Type | Required | Restrictions | Description | +| -------------------------- | --------------------------------------------------- | -------- | ------------ | ----------------------------------------------------------------------------------------------- | +| `assignable` | boolean | false | | | +| `built_in` | boolean | false | | Built in roles are immutable | +| `display_name` | string | false | | | +| `name` | string | false | | | +| `organization_id` | string | false | | | +| `organization_permissions` | array of [codersdk.Permission](#codersdkpermission) | false | | Organization permissions are specific for the organization in the field 'OrganizationID' above. | +| `site_permissions` | array of [codersdk.Permission](#codersdkpermission) | false | | | +| `user_permissions` | array of [codersdk.Permission](#codersdkpermission) | false | | | ## codersdk.AuditAction @@ -4330,22 +4320,13 @@ CreateWorkspaceRequest provides options for creating a new workspace. Only one o "display_name": "string", "name": "string", "organization_id": "7c60d51f-b44e-4682-87d6-449835ea4de6", - "organization_permissions": { - "property1": [ - { - "action": "application_connect", - "negate": true, - "resource_type": "*" - } - ], - "property2": [ - { - "action": "application_connect", - "negate": true, - "resource_type": "*" - } - ] - }, + "organization_permissions": [ + { + "action": "application_connect", + "negate": true, + "resource_type": "*" + } + ], "site_permissions": [ { "action": "application_connect", @@ -4365,15 +4346,14 @@ CreateWorkspaceRequest provides options for creating a new workspace. Only one o ### Properties -| Name | Type | Required | Restrictions | Description | -| -------------------------- | --------------------------------------------------- | -------- | ------------ | ---------------------------- | -| `display_name` | string | false | | | -| `name` | string | false | | | -| `organization_id` | string | false | | | -| `organization_permissions` | object | false | | map[] -> Permissions | -| » `[any property]` | array of [codersdk.Permission](#codersdkpermission) | false | | | -| `site_permissions` | array of [codersdk.Permission](#codersdkpermission) | false | | | -| `user_permissions` | array of [codersdk.Permission](#codersdkpermission) | false | | | +| Name | Type | Required | Restrictions | Description | +| -------------------------- | --------------------------------------------------- | -------- | ------------ | ----------------------------------------------------------------------------------------------- | +| `display_name` | string | false | | | +| `name` | string | false | | | +| `organization_id` | string | false | | | +| `organization_permissions` | array of [codersdk.Permission](#codersdkpermission) | false | | Organization permissions are specific for the organization in the field 'OrganizationID' above. | +| `site_permissions` | array of [codersdk.Permission](#codersdkpermission) | false | | | +| `user_permissions` | array of [codersdk.Permission](#codersdkpermission) | false | | | ## codersdk.SSHConfig diff --git a/enterprise/coderd/coderd.go b/enterprise/coderd/coderd.go index 524bfd26f3d74..574d2c12dd2de 100644 --- a/enterprise/coderd/coderd.go +++ b/enterprise/coderd/coderd.go @@ -327,22 +327,6 @@ func New(ctx context.Context, options *Options) (_ *API, err error) { }) }) - r.Route("/users/roles", func(r chi.Router) { - r.Use( - apiKeyMiddleware, - ) - r.Group(func(r chi.Router) { - r.Use( - api.customRolesEnabledMW, - ) - r.Patch("/", api.patchRole) - }) - // Unfortunate, but this r.Route overrides the AGPL roles route. - // The AGPL does not have the entitlements to block the licensed - // routes, so we need to duplicate the AGPL here. - r.Get("/", api.AGPL.AssignableSiteRoles) - }) - r.Route("/users/{user}/quiet-hours", func(r chi.Router) { r.Use( api.autostopRequirementEnabledMW, @@ -761,6 +745,11 @@ func (api *API) updateEntitlements(ctx context.Context) error { api.AGPL.PortSharer.Store(&ps) } + if initial, changed, enabled := featureChanged(codersdk.FeatureCustomRoles); shouldUpdate(initial, changed, enabled) { + var handler coderd.CustomRoleHandler = &enterpriseCustomRoleHandler{Enabled: enabled} + api.AGPL.CustomRoleHandler.Store(&handler) + } + // External token encryption is soft-enforced featureExternalTokenEncryption := entitlements.Features[codersdk.FeatureExternalTokenEncryption] featureExternalTokenEncryption.Enabled = len(api.ExternalTokenEncryption) > 0 diff --git a/enterprise/coderd/roles.go b/enterprise/coderd/roles.go index 8e0827c9b3b02..448ec9f855cc0 100644 --- a/enterprise/coderd/roles.go +++ b/enterprise/coderd/roles.go @@ -1,6 +1,8 @@ package coderd import ( + "context" + "fmt" "net/http" "github.com/google/uuid" @@ -12,70 +14,83 @@ import ( "github.com/coder/coder/v2/codersdk" ) -// patchRole will allow creating a custom role -// -// @Summary Upsert a custom site-wide role -// @ID upsert-a-custom-site-wide-role -// @Security CoderSessionToken -// @Produce json -// @Tags Members -// @Success 200 {array} codersdk.Role -// @Router /users/roles [patch] -func (api *API) patchRole(rw http.ResponseWriter, r *http.Request) { - ctx := r.Context() +type enterpriseCustomRoleHandler struct { + Enabled bool +} - var req codersdk.Role - if !httpapi.Read(ctx, rw, r, &req) { - return +func (h enterpriseCustomRoleHandler) PatchOrganizationRole(ctx context.Context, db database.Store, rw http.ResponseWriter, orgID uuid.UUID, role codersdk.Role) (codersdk.Role, bool) { + if !h.Enabled { + httpapi.Write(ctx, rw, http.StatusForbidden, codersdk.Response{ + Message: "Custom roles are not enabled", + }) + return codersdk.Role{}, false } - if err := httpapi.NameValid(req.Name); err != nil { + if err := httpapi.NameValid(role.Name); err != nil { httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{ Message: "Invalid role name", Detail: err.Error(), }) - return + return codersdk.Role{}, false + } + + // Only organization permissions are allowed to be granted + if len(role.SitePermissions) > 0 { + httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{ + Message: "Invalid request, not allowed to assign site wide permissions for an organization role.", + Detail: "organization scoped roles may not contain site wide permissions", + }) + return codersdk.Role{}, false + } + + if len(role.UserPermissions) > 0 { + httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{ + Message: "Invalid request, not allowed to assign user permissions for an organization role.", + Detail: "organization scoped roles may not contain user permissions", + }) + return codersdk.Role{}, false } - if len(req.OrganizationPermissions) > 0 { - // Org perms should be assigned only in org specific roles. Otherwise, - // it gets complicated to keep track of who can do what. + if role.OrganizationID != orgID.String() { httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{ - Message: "Invalid request, not allowed to assign organization permissions for a site wide role.", - Detail: "site wide roles may not contain organization specific permissions", + Message: "Invalid request, organization in role and url must match", + Detail: fmt.Sprintf("role organization=%q does not match URL=%q", role.OrganizationID, orgID.String()), }) - return + return codersdk.Role{}, false } // Make sure all permissions inputted are valid according to our policy. - rbacRole := db2sdk.RoleToRBAC(req) + rbacRole := db2sdk.RoleToRBAC(role) args, err := rolestore.ConvertRoleToDB(rbacRole) if err != nil { httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{ Message: "Invalid request", Detail: err.Error(), }) - return + return codersdk.Role{}, false } - inserted, err := api.Database.UpsertCustomRole(ctx, database.UpsertCustomRoleParams{ - Name: args.Name, - DisplayName: args.DisplayName, - OrganizationID: uuid.NullUUID{}, + inserted, err := db.UpsertCustomRole(ctx, database.UpsertCustomRoleParams{ + Name: args.Name, + DisplayName: args.DisplayName, + OrganizationID: uuid.NullUUID{ + UUID: orgID, + Valid: true, + }, SitePermissions: args.SitePermissions, OrgPermissions: args.OrgPermissions, UserPermissions: args.UserPermissions, }) if httpapi.Is404Error(err) { httpapi.ResourceNotFound(rw) - return + return codersdk.Role{}, false } if err != nil { httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{ Message: "Failed to update role permissions", Detail: err.Error(), }) - return + return codersdk.Role{}, false } convertedInsert, err := rolestore.ConvertDBRole(inserted) @@ -84,8 +99,8 @@ func (api *API) patchRole(rw http.ResponseWriter, r *http.Request) { Message: "Permissions were updated, unable to read them back out of the database.", Detail: err.Error(), }) - return + return codersdk.Role{}, false } - httpapi.Write(ctx, rw, http.StatusOK, db2sdk.Role(convertedInsert)) + return db2sdk.Role(convertedInsert), true } diff --git a/enterprise/coderd/roles_test.go b/enterprise/coderd/roles_test.go index a7db9b718d946..e1d6855aff002 100644 --- a/enterprise/coderd/roles_test.go +++ b/enterprise/coderd/roles_test.go @@ -5,10 +5,10 @@ import ( "slices" "testing" + "github.com/google/uuid" "github.com/stretchr/testify/require" "github.com/coder/coder/v2/coderd/coderdtest" - "github.com/coder/coder/v2/coderd/database/db2sdk" "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/enterprise/coderd/coderdenttest" "github.com/coder/coder/v2/enterprise/coderd/license" @@ -16,19 +16,22 @@ import ( "github.com/coder/coder/v2/testutil" ) -func TestCustomRole(t *testing.T) { +func TestCustomOrganizationRole(t *testing.T) { t.Parallel() - templateAdminCustom := codersdk.Role{ - Name: "test-role", - DisplayName: "Testing Purposes", - // Basically creating a template admin manually - SitePermissions: codersdk.CreatePermissions(map[codersdk.RBACResource][]codersdk.RBACAction{ - codersdk.ResourceTemplate: {codersdk.ActionCreate, codersdk.ActionRead, codersdk.ActionUpdate, codersdk.ActionViewInsights}, - codersdk.ResourceFile: {codersdk.ActionCreate, codersdk.ActionRead}, - codersdk.ResourceWorkspace: {codersdk.ActionRead}, - }), - OrganizationPermissions: nil, - UserPermissions: nil, + templateAdminCustom := func(orgID uuid.UUID) codersdk.Role { + return codersdk.Role{ + Name: "test-role", + DisplayName: "Testing Purposes", + OrganizationID: orgID.String(), + // Basically creating a template admin manually + SitePermissions: nil, + OrganizationPermissions: codersdk.CreatePermissions(map[codersdk.RBACResource][]codersdk.RBACAction{ + codersdk.ResourceTemplate: {codersdk.ActionCreate, codersdk.ActionRead, codersdk.ActionUpdate, codersdk.ActionViewInsights}, + codersdk.ResourceFile: {codersdk.ActionCreate, codersdk.ActionRead}, + codersdk.ResourceWorkspace: {codersdk.ActionRead}, + }), + UserPermissions: nil, + } } // Create, assign, and use a custom role @@ -50,31 +53,43 @@ func TestCustomRole(t *testing.T) { ctx := testutil.Context(t, testutil.WaitMedium) //nolint:gocritic // owner is required for this - role, err := owner.PatchRole(ctx, templateAdminCustom) + role, err := owner.PatchOrganizationRole(ctx, first.OrganizationID, templateAdminCustom(first.OrganizationID)) require.NoError(t, err, "upsert role") // Assign the custom template admin role - tmplAdmin, user := coderdtest.CreateAnotherUser(t, owner, first.OrganizationID, role.Name) + tmplAdmin, _ := coderdtest.CreateAnotherUser(t, owner, first.OrganizationID, role.FullName()) // Assert the role exists - roleNamesF := func(role codersdk.SlimRole) string { return role.Name } - require.Contains(t, db2sdk.List(user.Roles, roleNamesF), role.Name) + // TODO: At present user roles are not returned by the user endpoints. + // Changing this might mess up the UI in how it renders the roles on the + // users page. When the users endpoint is updated, this should be uncommented. + // roleNamesF := func(role codersdk.SlimRole) string { return role.Name } + // require.Contains(t, db2sdk.List(user.Roles, roleNamesF), role.Name) // Try to create a template version coderdtest.CreateTemplateVersion(t, tmplAdmin, first.OrganizationID, nil) // Verify the role exists in the list - allRoles, err := tmplAdmin.ListSiteRoles(ctx) + allRoles, err := tmplAdmin.ListOrganizationRoles(ctx, first.OrganizationID) require.NoError(t, err) + var foundRole codersdk.AssignableRoles require.True(t, slices.ContainsFunc(allRoles, func(selected codersdk.AssignableRoles) bool { - return selected.Name == role.Name - }), "role missing from site role list") + if selected.Name == role.Name { + foundRole = selected + return true + } + return false + }), "role missing from org role list") + + require.Len(t, foundRole.SitePermissions, 0) + require.Len(t, foundRole.OrganizationPermissions, 7) + require.Len(t, foundRole.UserPermissions, 0) }) // Revoked licenses cannot modify/create custom roles, but they can // use the existing roles. - t.Run("Revoked License", func(t *testing.T) { + t.Run("RevokedLicense", func(t *testing.T) { t.Parallel() dv := coderdtest.DeploymentValues(t) dv.Experiments = []string{string(codersdk.ExperimentCustomRoles)} @@ -92,7 +107,7 @@ func TestCustomRole(t *testing.T) { ctx := testutil.Context(t, testutil.WaitMedium) //nolint:gocritic // owner is required for this - role, err := owner.PatchRole(ctx, templateAdminCustom) + role, err := owner.PatchOrganizationRole(ctx, first.OrganizationID, templateAdminCustom(first.OrganizationID)) require.NoError(t, err, "upsert role") // Remove the license to block enterprise functionality @@ -105,11 +120,11 @@ func TestCustomRole(t *testing.T) { } // Verify functionality is lost - _, err = owner.PatchRole(ctx, templateAdminCustom) - require.ErrorContains(t, err, "Custom roles is an Enterprise feature", "upsert role") + _, err = owner.PatchOrganizationRole(ctx, first.OrganizationID, templateAdminCustom(first.OrganizationID)) + require.ErrorContains(t, err, "roles are not enabled") // Assign the custom template admin role - tmplAdmin, _ := coderdtest.CreateAnotherUser(t, owner, first.OrganizationID, role.Name) + tmplAdmin, _ := coderdtest.CreateAnotherUser(t, owner, first.OrganizationID, role.FullName()) // Try to create a template version, eg using the custom role coderdtest.CreateTemplateVersion(t, tmplAdmin, first.OrganizationID, nil) @@ -133,26 +148,24 @@ func TestCustomRole(t *testing.T) { ctx := testutil.Context(t, testutil.WaitMedium) //nolint:gocritic // owner is required for this - role, err := owner.PatchRole(ctx, templateAdminCustom) + role, err := owner.PatchOrganizationRole(ctx, first.OrganizationID, templateAdminCustom(first.OrganizationID)) require.NoError(t, err, "upsert role") // Assign the custom template admin role - tmplAdmin, _ := coderdtest.CreateAnotherUser(t, owner, first.OrganizationID, role.Name) + tmplAdmin, _ := coderdtest.CreateAnotherUser(t, owner, first.OrganizationID, role.FullName()) // Try to create a template version, eg using the custom role coderdtest.CreateTemplateVersion(t, tmplAdmin, first.OrganizationID, nil) //nolint:gocritic // owner is required for this - role, err = owner.PatchRole(ctx, codersdk.Role{ - Name: templateAdminCustom.Name, - DisplayName: templateAdminCustom.DisplayName, - // These are all left nil, which sets the custom role to have 0 - // permissions. Omitting this does not "inherit" what already - // exists. - SitePermissions: nil, - OrganizationPermissions: nil, - UserPermissions: nil, - }) + newRole := templateAdminCustom(first.OrganizationID) + // These are all left nil, which sets the custom role to have 0 + // permissions. Omitting this does not "inherit" what already + // exists. + newRole.SitePermissions = nil + newRole.OrganizationPermissions = nil + newRole.UserPermissions = nil + _, err = owner.PatchOrganizationRole(ctx, first.OrganizationID, newRole) require.NoError(t, err, "upsert role with override") // The role should no longer have template perms @@ -172,7 +185,7 @@ func TestCustomRole(t *testing.T) { t.Parallel() dv := coderdtest.DeploymentValues(t) dv.Experiments = []string{string(codersdk.ExperimentCustomRoles)} - owner, _ := coderdenttest.New(t, &coderdenttest.Options{ + owner, first := coderdenttest.New(t, &coderdenttest.Options{ Options: &coderdtest.Options{ DeploymentValues: dv, }, @@ -186,18 +199,103 @@ func TestCustomRole(t *testing.T) { ctx := testutil.Context(t, testutil.WaitMedium) //nolint:gocritic // owner is required for this - _, err := owner.PatchRole(ctx, codersdk.Role{ - Name: "Bad_Name", // No underscores allowed - DisplayName: "Testing Purposes", - // Basically creating a template admin manually - SitePermissions: codersdk.CreatePermissions(map[codersdk.RBACResource][]codersdk.RBACAction{ - codersdk.ResourceTemplate: {codersdk.ActionCreate, codersdk.ActionRead, codersdk.ActionUpdate, codersdk.ActionViewInsights}, - codersdk.ResourceFile: {codersdk.ActionCreate, codersdk.ActionRead}, - codersdk.ResourceWorkspace: {codersdk.ActionRead}, - }), + _, err := owner.PatchOrganizationRole(ctx, first.OrganizationID, codersdk.Role{ + Name: "Bad_Name", // No underscores allowed + DisplayName: "Testing Purposes", + SitePermissions: nil, OrganizationPermissions: nil, UserPermissions: nil, }) require.ErrorContains(t, err, "Validation") }) + + t.Run("MismatchedOrganizations", func(t *testing.T) { + t.Parallel() + dv := coderdtest.DeploymentValues(t) + dv.Experiments = []string{string(codersdk.ExperimentCustomRoles)} + owner, first := coderdenttest.New(t, &coderdenttest.Options{ + Options: &coderdtest.Options{ + DeploymentValues: dv, + }, + LicenseOptions: &coderdenttest.LicenseOptions{ + Features: license.Features{ + codersdk.FeatureCustomRoles: 1, + }, + }, + }) + + ctx := testutil.Context(t, testutil.WaitMedium) + + //nolint:gocritic // owner is required for this + _, err := owner.PatchOrganizationRole(ctx, first.OrganizationID, templateAdminCustom(uuid.New())) + require.ErrorContains(t, err, "does not match") + }) + + // Attempt to add site & user permissions, which is not allowed + t.Run("ExcessPermissions", func(t *testing.T) { + t.Parallel() + dv := coderdtest.DeploymentValues(t) + dv.Experiments = []string{string(codersdk.ExperimentCustomRoles)} + owner, first := coderdenttest.New(t, &coderdenttest.Options{ + Options: &coderdtest.Options{ + DeploymentValues: dv, + }, + LicenseOptions: &coderdenttest.LicenseOptions{ + Features: license.Features{ + codersdk.FeatureCustomRoles: 1, + }, + }, + }) + + ctx := testutil.Context(t, testutil.WaitMedium) + + siteRole := templateAdminCustom(first.OrganizationID) + siteRole.SitePermissions = []codersdk.Permission{ + { + ResourceType: codersdk.ResourceWorkspace, + Action: codersdk.ActionRead, + }, + } + + //nolint:gocritic // owner is required for this + _, err := owner.PatchOrganizationRole(ctx, first.OrganizationID, siteRole) + require.ErrorContains(t, err, "site wide permissions") + + userRole := templateAdminCustom(first.OrganizationID) + userRole.UserPermissions = []codersdk.Permission{ + { + ResourceType: codersdk.ResourceWorkspace, + Action: codersdk.ActionRead, + }, + } + + //nolint:gocritic // owner is required for this + _, err = owner.PatchOrganizationRole(ctx, first.OrganizationID, userRole) + require.ErrorContains(t, err, "not allowed to assign user permissions") + }) + + t.Run("InvalidUUID", func(t *testing.T) { + t.Parallel() + dv := coderdtest.DeploymentValues(t) + dv.Experiments = []string{string(codersdk.ExperimentCustomRoles)} + owner, first := coderdenttest.New(t, &coderdenttest.Options{ + Options: &coderdtest.Options{ + DeploymentValues: dv, + }, + LicenseOptions: &coderdenttest.LicenseOptions{ + Features: license.Features{ + codersdk.FeatureCustomRoles: 1, + }, + }, + }) + + ctx := testutil.Context(t, testutil.WaitMedium) + + newRole := templateAdminCustom(first.OrganizationID) + newRole.OrganizationID = "0000" // This is not a valid uuid + + //nolint:gocritic // owner is required for this + _, err := owner.PatchOrganizationRole(ctx, first.OrganizationID, newRole) + require.ErrorContains(t, err, "Invalid request") + }) } diff --git a/enterprise/coderd/users.go b/enterprise/coderd/users.go index a29aa1836557d..935eeb8f6e689 100644 --- a/enterprise/coderd/users.go +++ b/enterprise/coderd/users.go @@ -14,31 +14,6 @@ import ( "github.com/coder/coder/v2/codersdk" ) -func (api *API) customRolesEnabledMW(next http.Handler) http.Handler { - return httpmw.RequireExperiment(api.AGPL.Experiments, codersdk.ExperimentCustomRoles)( - http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { - // Entitlement must be enabled. - api.entitlementsMu.RLock() - entitled := api.entitlements.Features[codersdk.FeatureCustomRoles].Entitlement != codersdk.EntitlementNotEntitled - enabled := api.entitlements.Features[codersdk.FeatureCustomRoles].Enabled - api.entitlementsMu.RUnlock() - if !entitled { - httpapi.Write(r.Context(), rw, http.StatusForbidden, codersdk.Response{ - Message: "Custom roles is an Enterprise feature. Contact sales!", - }) - return - } - if !enabled { - httpapi.Write(r.Context(), rw, http.StatusForbidden, codersdk.Response{ - Message: "Custom roles is not enabled", - }) - return - } - - next.ServeHTTP(rw, r) - })) -} - func (api *API) autostopRequirementEnabledMW(next http.Handler) http.Handler { return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { // Entitlement must be enabled. diff --git a/site/src/api/typesGenerated.ts b/site/src/api/typesGenerated.ts index 5d4d148758f36..171f6744680cb 100644 --- a/site/src/api/typesGenerated.ts +++ b/site/src/api/typesGenerated.ts @@ -980,7 +980,7 @@ export interface Role { readonly organization_id: string; readonly display_name: string; readonly site_permissions: readonly Permission[]; - readonly organization_permissions: Record; + readonly organization_permissions: readonly Permission[]; readonly user_permissions: readonly Permission[]; } diff --git a/site/src/testHelpers/entities.ts b/site/src/testHelpers/entities.ts index 1fbb18aa86a07..5ff5fa6cd84c7 100644 --- a/site/src/testHelpers/entities.ts +++ b/site/src/testHelpers/entities.ts @@ -233,7 +233,7 @@ export const MockOwnerRole: TypesGen.Role = { name: "owner", display_name: "Owner", site_permissions: [], - organization_permissions: {}, + organization_permissions: [], user_permissions: [], organization_id: "", }; @@ -242,7 +242,7 @@ export const MockUserAdminRole: TypesGen.Role = { name: "user_admin", display_name: "User Admin", site_permissions: [], - organization_permissions: {}, + organization_permissions: [], user_permissions: [], organization_id: "", }; @@ -251,7 +251,7 @@ export const MockTemplateAdminRole: TypesGen.Role = { name: "template_admin", display_name: "Template Admin", site_permissions: [], - organization_permissions: {}, + organization_permissions: [], user_permissions: [], organization_id: "", }; @@ -265,7 +265,7 @@ export const MockAuditorRole: TypesGen.Role = { name: "auditor", display_name: "Auditor", site_permissions: [], - organization_permissions: {}, + organization_permissions: [], user_permissions: [], organization_id: "", }; From 5789ea5397c080058e254e3d7aa895fb5e95ad16 Mon Sep 17 00:00:00 2001 From: Garrett Delfosse Date: Wed, 29 May 2024 11:49:08 -0400 Subject: [PATCH 127/149] chore: move stat reporting into workspacestats package (#13386) --- coderd/agentapi/api.go | 8 +- coderd/agentapi/stats.go | 93 +-------- coderd/agentapi/stats_test.go | 91 ++++---- coderd/coderd.go | 16 +- coderd/database/dbauthz/setup_test.go | 2 +- coderd/insights_test.go | 15 +- .../insights/metricscollector_test.go | 8 +- coderd/workspaceagents.go | 79 +------ coderd/workspaceagents_test.go | 28 +-- coderd/workspaceagentsrpc.go | 2 +- coderd/workspaceapps/apptest/apptest.go | 2 +- coderd/workspaceapps/stats.go | 101 +-------- coderd/workspaceapps/stats_test.go | 2 +- .../activitybump.go | 3 +- .../activitybump_test.go | 6 +- coderd/workspacestats/reporter.go | 194 ++++++++++++++++++ enterprise/coderd/workspaceproxy.go | 2 +- enterprise/wsproxy/appstatsreporter.go | 2 +- scripts/rules.go | 6 +- 19 files changed, 314 insertions(+), 346 deletions(-) rename coderd/{agentapi => workspacestats}/activitybump.go (96%) rename coderd/{agentapi => workspacestats}/activitybump_test.go (98%) create mode 100644 coderd/workspacestats/reporter.go diff --git a/coderd/agentapi/api.go b/coderd/agentapi/api.go index fa8563a141a45..b8b07672d6aa2 100644 --- a/coderd/agentapi/api.go +++ b/coderd/agentapi/api.go @@ -24,6 +24,7 @@ import ( "github.com/coder/coder/v2/coderd/prometheusmetrics" "github.com/coder/coder/v2/coderd/schedule" "github.com/coder/coder/v2/coderd/tracing" + "github.com/coder/coder/v2/coderd/workspacestats" "github.com/coder/coder/v2/codersdk/agentsdk" "github.com/coder/coder/v2/tailnet" tailnetproto "github.com/coder/coder/v2/tailnet/proto" @@ -59,7 +60,7 @@ type Options struct { DerpMapFn func() *tailcfg.DERPMap TailnetCoordinator *atomic.Pointer[tailnet.Coordinator] TemplateScheduleStore *atomic.Pointer[schedule.TemplateScheduleStore] - StatsBatcher StatsBatcher + StatsReporter *workspacestats.Reporter AppearanceFetcher *atomic.Pointer[appearance.Fetcher] PublishWorkspaceUpdateFn func(ctx context.Context, workspaceID uuid.UUID) PublishWorkspaceAgentLogsUpdateFn func(ctx context.Context, workspaceAgentID uuid.UUID, msg agentsdk.LogsNotifyMessage) @@ -114,12 +115,9 @@ func New(opts Options) *API { api.StatsAPI = &StatsAPI{ AgentFn: api.agent, Database: opts.Database, - Pubsub: opts.Pubsub, Log: opts.Log, - StatsBatcher: opts.StatsBatcher, - TemplateScheduleStore: opts.TemplateScheduleStore, + StatsReporter: opts.StatsReporter, AgentStatsRefreshInterval: opts.AgentStatsRefreshInterval, - UpdateAgentMetricsFn: opts.UpdateAgentMetricsFn, } api.LifecycleAPI = &LifecycleAPI{ diff --git a/coderd/agentapi/stats.go b/coderd/agentapi/stats.go index e91a3624e915d..ee17897572f3d 100644 --- a/coderd/agentapi/stats.go +++ b/coderd/agentapi/stats.go @@ -2,10 +2,8 @@ package agentapi import ( "context" - "sync/atomic" "time" - "golang.org/x/sync/errgroup" "golang.org/x/xerrors" "google.golang.org/protobuf/types/known/durationpb" @@ -15,10 +13,7 @@ import ( agentproto "github.com/coder/coder/v2/agent/proto" "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/database/dbtime" - "github.com/coder/coder/v2/coderd/database/pubsub" - "github.com/coder/coder/v2/coderd/prometheusmetrics" - "github.com/coder/coder/v2/coderd/schedule" - "github.com/coder/coder/v2/codersdk" + "github.com/coder/coder/v2/coderd/workspacestats" ) type StatsBatcher interface { @@ -28,12 +23,9 @@ type StatsBatcher interface { type StatsAPI struct { AgentFn func(context.Context) (database.WorkspaceAgent, error) Database database.Store - Pubsub pubsub.Pubsub Log slog.Logger - StatsBatcher StatsBatcher - TemplateScheduleStore *atomic.Pointer[schedule.TemplateScheduleStore] + StatsReporter *workspacestats.Reporter AgentStatsRefreshInterval time.Duration - UpdateAgentMetricsFn func(ctx context.Context, labels prometheusmetrics.AgentMetricLabels, metrics []*agentproto.Stats_Metric) TimeNowFn func() time.Time // defaults to dbtime.Now() } @@ -69,80 +61,17 @@ func (a *StatsAPI) UpdateStats(ctx context.Context, req *agentproto.UpdateStatsR slog.F("payload", req), ) - now := a.now() - if req.Stats.ConnectionCount > 0 { - var nextAutostart time.Time - if workspace.AutostartSchedule.String != "" { - templateSchedule, err := (*(a.TemplateScheduleStore.Load())).Get(ctx, a.Database, workspace.TemplateID) - // If the template schedule fails to load, just default to bumping - // without the next transition and log it. - if err != nil { - a.Log.Error(ctx, "failed to load template schedule bumping activity, defaulting to bumping by 60min", - slog.F("workspace_id", workspace.ID), - slog.F("template_id", workspace.TemplateID), - slog.Error(err), - ) - } else { - next, allowed := schedule.NextAutostart(now, workspace.AutostartSchedule.String, templateSchedule) - if allowed { - nextAutostart = next - } - } - } - ActivityBumpWorkspace(ctx, a.Log.Named("activity_bump"), a.Database, workspace.ID, nextAutostart) - } - - var errGroup errgroup.Group - errGroup.Go(func() error { - err := a.StatsBatcher.Add(now, workspaceAgent.ID, workspace.TemplateID, workspace.OwnerID, workspace.ID, req.Stats) - if err != nil { - a.Log.Error(ctx, "add agent stats to batcher", slog.Error(err)) - return xerrors.Errorf("insert workspace agent stats batch: %w", err) - } - return nil - }) - errGroup.Go(func() error { - // nolint:gocritic // (#13146) Will be moved soon as part of refactor. - err := a.Database.UpdateWorkspaceLastUsedAt(ctx, database.UpdateWorkspaceLastUsedAtParams{ - ID: workspace.ID, - LastUsedAt: now, - }) - if err != nil { - return xerrors.Errorf("update workspace LastUsedAt: %w", err) - } - return nil - }) - if a.UpdateAgentMetricsFn != nil { - errGroup.Go(func() error { - user, err := a.Database.GetUserByID(ctx, workspace.OwnerID) - if err != nil { - return xerrors.Errorf("get user: %w", err) - } - - a.UpdateAgentMetricsFn(ctx, prometheusmetrics.AgentMetricLabels{ - Username: user.Username, - WorkspaceName: workspace.Name, - AgentName: workspaceAgent.Name, - TemplateName: getWorkspaceAgentByIDRow.TemplateName, - }, req.Stats.Metrics) - return nil - }) - } - err = errGroup.Wait() + err = a.StatsReporter.ReportAgentStats( + ctx, + a.now(), + workspace, + workspaceAgent, + getWorkspaceAgentByIDRow.TemplateName, + req.Stats, + ) if err != nil { - return nil, xerrors.Errorf("update stats in database: %w", err) + return nil, xerrors.Errorf("report agent stats: %w", err) } - // Tell the frontend about the new agent report, now that everything is updated - a.publishWorkspaceAgentStats(ctx, workspace.ID) - return res, nil } - -func (a *StatsAPI) publishWorkspaceAgentStats(ctx context.Context, workspaceID uuid.UUID) { - err := a.Pubsub.Publish(codersdk.WorkspaceNotifyChannel(workspaceID), []byte{}) - if err != nil { - a.Log.Warn(ctx, "failed to publish workspace agent stats", - slog.F("workspace_id", workspaceID), slog.Error(err)) - } -} diff --git a/coderd/agentapi/stats_test.go b/coderd/agentapi/stats_test.go index 943c8e7ac0e17..c304dea93ecc9 100644 --- a/coderd/agentapi/stats_test.go +++ b/coderd/agentapi/stats_test.go @@ -22,6 +22,7 @@ import ( "github.com/coder/coder/v2/coderd/database/pubsub" "github.com/coder/coder/v2/coderd/prometheusmetrics" "github.com/coder/coder/v2/coderd/schedule" + "github.com/coder/coder/v2/coderd/workspacestats" "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/testutil" ) @@ -129,21 +130,24 @@ func TestUpdateStates(t *testing.T) { AgentFn: func(context.Context) (database.WorkspaceAgent, error) { return agent, nil }, - Database: dbM, - Pubsub: ps, - StatsBatcher: batcher, - TemplateScheduleStore: templateScheduleStorePtr(templateScheduleStore), + Database: dbM, + StatsReporter: workspacestats.NewReporter(workspacestats.ReporterOptions{ + Database: dbM, + Pubsub: ps, + StatsBatcher: batcher, + TemplateScheduleStore: templateScheduleStorePtr(templateScheduleStore), + UpdateAgentMetricsFn: func(ctx context.Context, labels prometheusmetrics.AgentMetricLabels, metrics []*agentproto.Stats_Metric) { + updateAgentMetricsFnCalled = true + assert.Equal(t, prometheusmetrics.AgentMetricLabels{ + Username: user.Username, + WorkspaceName: workspace.Name, + AgentName: agent.Name, + TemplateName: template.Name, + }, labels) + assert.Equal(t, req.Stats.Metrics, metrics) + }, + }), AgentStatsRefreshInterval: 10 * time.Second, - UpdateAgentMetricsFn: func(ctx context.Context, labels prometheusmetrics.AgentMetricLabels, metrics []*agentproto.Stats_Metric) { - updateAgentMetricsFnCalled = true - assert.Equal(t, prometheusmetrics.AgentMetricLabels{ - Username: user.Username, - WorkspaceName: workspace.Name, - AgentName: agent.Name, - TemplateName: template.Name, - }, labels) - assert.Equal(t, req.Stats.Metrics, metrics) - }, TimeNowFn: func() time.Time { return now }, @@ -232,13 +236,16 @@ func TestUpdateStates(t *testing.T) { AgentFn: func(context.Context) (database.WorkspaceAgent, error) { return agent, nil }, - Database: dbM, - Pubsub: ps, - StatsBatcher: batcher, - TemplateScheduleStore: templateScheduleStorePtr(templateScheduleStore), + Database: dbM, + StatsReporter: workspacestats.NewReporter(workspacestats.ReporterOptions{ + Database: dbM, + Pubsub: ps, + StatsBatcher: batcher, + TemplateScheduleStore: templateScheduleStorePtr(templateScheduleStore), + // Ignored when nil. + UpdateAgentMetricsFn: nil, + }), AgentStatsRefreshInterval: 10 * time.Second, - // Ignored when nil. - UpdateAgentMetricsFn: nil, TimeNowFn: func() time.Time { return now }, @@ -274,12 +281,15 @@ func TestUpdateStates(t *testing.T) { AgentFn: func(context.Context) (database.WorkspaceAgent, error) { return agent, nil }, - Database: dbM, - Pubsub: ps, - StatsBatcher: nil, // should not be called - TemplateScheduleStore: nil, // should not be called + Database: dbM, + StatsReporter: workspacestats.NewReporter(workspacestats.ReporterOptions{ + Database: dbM, + Pubsub: ps, + StatsBatcher: nil, // should not be called + TemplateScheduleStore: nil, // should not be called + UpdateAgentMetricsFn: nil, // should not be called + }), AgentStatsRefreshInterval: 10 * time.Second, - UpdateAgentMetricsFn: nil, // should not be called TimeNowFn: func() time.Time { panic("should not be called") }, @@ -343,21 +353,24 @@ func TestUpdateStates(t *testing.T) { AgentFn: func(context.Context) (database.WorkspaceAgent, error) { return agent, nil }, - Database: dbM, - Pubsub: ps, - StatsBatcher: batcher, - TemplateScheduleStore: templateScheduleStorePtr(templateScheduleStore), + Database: dbM, + StatsReporter: workspacestats.NewReporter(workspacestats.ReporterOptions{ + Database: dbM, + Pubsub: ps, + StatsBatcher: batcher, + TemplateScheduleStore: templateScheduleStorePtr(templateScheduleStore), + UpdateAgentMetricsFn: func(ctx context.Context, labels prometheusmetrics.AgentMetricLabels, metrics []*agentproto.Stats_Metric) { + updateAgentMetricsFnCalled = true + assert.Equal(t, prometheusmetrics.AgentMetricLabels{ + Username: user.Username, + WorkspaceName: workspace.Name, + AgentName: agent.Name, + TemplateName: template.Name, + }, labels) + assert.Equal(t, req.Stats.Metrics, metrics) + }, + }), AgentStatsRefreshInterval: 15 * time.Second, - UpdateAgentMetricsFn: func(ctx context.Context, labels prometheusmetrics.AgentMetricLabels, metrics []*agentproto.Stats_Metric) { - updateAgentMetricsFnCalled = true - assert.Equal(t, prometheusmetrics.AgentMetricLabels{ - Username: user.Username, - WorkspaceName: workspace.Name, - AgentName: agent.Name, - TemplateName: template.Name, - }, labels) - assert.Equal(t, req.Stats.Metrics, metrics) - }, TimeNowFn: func() time.Time { return now }, diff --git a/coderd/coderd.go b/coderd/coderd.go index 9c748d06eeb71..25763530db702 100644 --- a/coderd/coderd.go +++ b/coderd/coderd.go @@ -68,6 +68,7 @@ import ( "github.com/coder/coder/v2/coderd/updatecheck" "github.com/coder/coder/v2/coderd/util/slice" "github.com/coder/coder/v2/coderd/workspaceapps" + "github.com/coder/coder/v2/coderd/workspacestats" "github.com/coder/coder/v2/coderd/workspaceusage" "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/codersdk/drpc" @@ -550,13 +551,22 @@ func New(options *Options) *API { api.Logger.Fatal(api.ctx, "failed to initialize tailnet client service", slog.Error(err)) } + api.statsReporter = workspacestats.NewReporter(workspacestats.ReporterOptions{ + Database: options.Database, + Logger: options.Logger.Named("workspacestats"), + Pubsub: options.Pubsub, + TemplateScheduleStore: options.TemplateScheduleStore, + StatsBatcher: options.StatsBatcher, + UpdateAgentMetricsFn: options.UpdateAgentMetrics, + AppStatBatchSize: workspaceapps.DefaultStatsDBReporterBatchSize, + }) workspaceAppsLogger := options.Logger.Named("workspaceapps") if options.WorkspaceAppsStatsCollectorOptions.Logger == nil { named := workspaceAppsLogger.Named("stats_collector") options.WorkspaceAppsStatsCollectorOptions.Logger = &named } if options.WorkspaceAppsStatsCollectorOptions.Reporter == nil { - options.WorkspaceAppsStatsCollectorOptions.Reporter = workspaceapps.NewStatsDBReporter(options.Database, workspaceapps.DefaultStatsDBReporterBatchSize) + options.WorkspaceAppsStatsCollectorOptions.Reporter = api.statsReporter } api.workspaceAppServer = &workspaceapps.Server{ @@ -626,8 +636,6 @@ func New(options *Options) *API { cors := httpmw.Cors(options.DeploymentValues.Dangerous.AllowAllCors.Value()) prometheusMW := httpmw.Prometheus(options.PrometheusRegistry) - api.statsBatcher = options.StatsBatcher - r.Use( httpmw.Recover(api.Logger), tracing.StatusWriterMiddleware, @@ -1287,7 +1295,7 @@ type API struct { healthCheckGroup *singleflight.Group[string, *healthsdk.HealthcheckReport] healthCheckCache atomic.Pointer[healthsdk.HealthcheckReport] - statsBatcher *batchstats.Batcher + statsReporter *workspacestats.Reporter Acquirer *provisionerdserver.Acquirer // dbRolluper rolls up template usage stats from raw agent and app diff --git a/coderd/database/dbauthz/setup_test.go b/coderd/database/dbauthz/setup_test.go index 3385ca3f3240c..95d8b70a42b40 100644 --- a/coderd/database/dbauthz/setup_test.go +++ b/coderd/database/dbauthz/setup_test.go @@ -264,7 +264,7 @@ func (s *MethodTestSuite) NotAuthorizedErrorTest(ctx context.Context, az *coderd // any case where the error is nil and the response is an empty slice. if err != nil || !hasEmptySliceResponse(resp) { s.Errorf(err, "method should an error with cancellation") - s.ErrorIsf(err, context.Canceled, "error should match context.Cancelled") + s.ErrorIsf(err, context.Canceled, "error should match context.Canceled") } }) } diff --git a/coderd/insights_test.go b/coderd/insights_test.go index b6a28f7b0c59b..22e7ed6947bac 100644 --- a/coderd/insights_test.go +++ b/coderd/insights_test.go @@ -31,6 +31,7 @@ import ( "github.com/coder/coder/v2/coderd/rbac" "github.com/coder/coder/v2/coderd/rbac/policy" "github.com/coder/coder/v2/coderd/workspaceapps" + "github.com/coder/coder/v2/coderd/workspacestats" "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/codersdk/agentsdk" "github.com/coder/coder/v2/codersdk/workspacesdk" @@ -736,9 +737,12 @@ func TestTemplateInsights_Golden(t *testing.T) { }) } } - reporter := workspaceapps.NewStatsDBReporter(db, workspaceapps.DefaultStatsDBReporterBatchSize) + reporter := workspacestats.NewReporter(workspacestats.ReporterOptions{ + Database: db, + AppStatBatchSize: workspaceapps.DefaultStatsDBReporterBatchSize, + }) //nolint:gocritic // This is a test. - err = reporter.Report(dbauthz.AsSystemRestricted(ctx), stats) + err = reporter.ReportAppStats(dbauthz.AsSystemRestricted(ctx), stats) require.NoError(t, err, "want no error inserting app stats") return client, events @@ -1632,9 +1636,12 @@ func TestUserActivityInsights_Golden(t *testing.T) { }) } } - reporter := workspaceapps.NewStatsDBReporter(db, workspaceapps.DefaultStatsDBReporterBatchSize) + reporter := workspacestats.NewReporter(workspacestats.ReporterOptions{ + Database: db, + AppStatBatchSize: workspaceapps.DefaultStatsDBReporterBatchSize, + }) //nolint:gocritic // This is a test. - err = reporter.Report(dbauthz.AsSystemRestricted(ctx), stats) + err = reporter.ReportAppStats(dbauthz.AsSystemRestricted(ctx), stats) require.NoError(t, err, "want no error inserting app stats") return client, events diff --git a/coderd/prometheusmetrics/insights/metricscollector_test.go b/coderd/prometheusmetrics/insights/metricscollector_test.go index 598c154db08d8..91ef3c7ee88fa 100644 --- a/coderd/prometheusmetrics/insights/metricscollector_test.go +++ b/coderd/prometheusmetrics/insights/metricscollector_test.go @@ -25,6 +25,7 @@ import ( "github.com/coder/coder/v2/coderd/database/dbtestutil" "github.com/coder/coder/v2/coderd/prometheusmetrics/insights" "github.com/coder/coder/v2/coderd/workspaceapps" + "github.com/coder/coder/v2/coderd/workspacestats" "github.com/coder/coder/v2/codersdk/agentsdk" "github.com/coder/coder/v2/testutil" ) @@ -109,10 +110,13 @@ func TestCollectInsights(t *testing.T) { require.NoError(t, err, "unable to post fake stats") // Fake app usage - reporter := workspaceapps.NewStatsDBReporter(db, workspaceapps.DefaultStatsDBReporterBatchSize) + reporter := workspacestats.NewReporter(workspacestats.ReporterOptions{ + Database: db, + AppStatBatchSize: workspaceapps.DefaultStatsDBReporterBatchSize, + }) refTime := time.Now().Add(-3 * time.Minute).Truncate(time.Minute) //nolint:gocritic // This is a test. - err = reporter.Report(dbauthz.AsSystemRestricted(context.Background()), []workspaceapps.StatsReport{ + err = reporter.ReportAppStats(dbauthz.AsSystemRestricted(context.Background()), []workspaceapps.StatsReport{ { UserID: user.ID, WorkspaceID: workspace1.ID, diff --git a/coderd/workspaceagents.go b/coderd/workspaceagents.go index 9faae72f22ef7..1821948572e29 100644 --- a/coderd/workspaceagents.go +++ b/coderd/workspaceagents.go @@ -34,9 +34,7 @@ import ( "github.com/coder/coder/v2/coderd/externalauth" "github.com/coder/coder/v2/coderd/httpapi" "github.com/coder/coder/v2/coderd/httpmw" - "github.com/coder/coder/v2/coderd/prometheusmetrics" "github.com/coder/coder/v2/coderd/rbac/policy" - "github.com/coder/coder/v2/coderd/schedule" "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/codersdk/agentsdk" "github.com/coder/coder/v2/codersdk/workspacesdk" @@ -1167,35 +1165,6 @@ func (api *API) workspaceAgentReportStats(rw http.ResponseWriter, r *http.Reques slog.F("payload", req), ) - if req.ConnectionCount > 0 { - var nextAutostart time.Time - if workspace.AutostartSchedule.String != "" { - templateSchedule, err := (*(api.TemplateScheduleStore.Load())).Get(ctx, api.Database, workspace.TemplateID) - // If the template schedule fails to load, just default to bumping without the next transition and log it. - if err != nil { - // There's nothing we can do if the query was canceled, the - // client most likely went away so we just return an internal - // server error. - if database.IsQueryCanceledError(err) { - httpapi.InternalServerError(rw, err) - return - } - api.Logger.Error(ctx, "failed to load template schedule bumping activity, defaulting to bumping by 60min", - slog.F("workspace_id", workspace.ID), - slog.F("template_id", workspace.TemplateID), - slog.Error(err), - ) - } else { - next, allowed := schedule.NextAutostart(time.Now(), workspace.AutostartSchedule.String, templateSchedule) - if allowed { - nextAutostart = next - } - } - } - agentapi.ActivityBumpWorkspace(ctx, api.Logger.Named("activity_bump"), api.Database, workspace.ID, nextAutostart) - } - - now := dbtime.Now() protoStats := &agentproto.Stats{ ConnectionsByProto: req.ConnectionsByProto, ConnectionCount: req.ConnectionCount, @@ -1232,46 +1201,14 @@ func (api *API) workspaceAgentReportStats(rw http.ResponseWriter, r *http.Reques } } } - - var errGroup errgroup.Group - errGroup.Go(func() error { - err := api.statsBatcher.Add(time.Now(), workspaceAgent.ID, workspace.TemplateID, workspace.OwnerID, workspace.ID, protoStats) - if err != nil { - api.Logger.Error(ctx, "failed to add stats to batcher", slog.Error(err)) - return xerrors.Errorf("can't insert workspace agent stat: %w", err) - } - return nil - }) - if req.SessionCount() > 0 { - errGroup.Go(func() error { - // nolint:gocritic // (#13146) Will be moved soon as part of refactor. - err := api.Database.UpdateWorkspaceLastUsedAt(ctx, database.UpdateWorkspaceLastUsedAtParams{ - ID: workspace.ID, - LastUsedAt: now, - }) - if err != nil { - return xerrors.Errorf("can't update workspace LastUsedAt: %w", err) - } - return nil - }) - } - if api.Options.UpdateAgentMetrics != nil { - errGroup.Go(func() error { - user, err := api.Database.GetUserByID(ctx, workspace.OwnerID) - if err != nil { - return xerrors.Errorf("can't get user: %w", err) - } - - api.Options.UpdateAgentMetrics(ctx, prometheusmetrics.AgentMetricLabels{ - Username: user.Username, - WorkspaceName: workspace.Name, - AgentName: workspaceAgent.Name, - TemplateName: row.TemplateName, - }, protoStats.Metrics) - return nil - }) - } - err = errGroup.Wait() + err = api.statsReporter.ReportAgentStats( + ctx, + dbtime.Now(), + workspace, + workspaceAgent, + row.TemplateName, + protoStats, + ) if err != nil { httpapi.InternalServerError(rw, err) return diff --git a/coderd/workspaceagents_test.go b/coderd/workspaceagents_test.go index 73f0c74cd765a..e99b6a297c103 100644 --- a/coderd/workspaceagents_test.go +++ b/coderd/workspaceagents_test.go @@ -939,32 +939,6 @@ func TestWorkspaceAgentReportStats(t *testing.T) { agentClient.SetSessionToken(r.AgentToken) _, err := agentClient.PostStats(context.Background(), &agentsdk.Stats{ - ConnectionsByProto: map[string]int64{"TCP": 1}, - // Set connection count to 1 but all session counts to zero to - // assert we aren't updating last_used_at for a connections that may - // be spawned passively by the dashboard. - ConnectionCount: 1, - RxPackets: 1, - RxBytes: 1, - TxPackets: 1, - TxBytes: 1, - SessionCountVSCode: 0, - SessionCountJetBrains: 0, - SessionCountReconnectingPTY: 0, - SessionCountSSH: 0, - ConnectionMedianLatencyMS: 10, - }) - require.NoError(t, err) - - newWorkspace, err := client.Workspace(context.Background(), r.Workspace.ID) - require.NoError(t, err) - - assert.True(t, - newWorkspace.LastUsedAt.Equal(r.Workspace.LastUsedAt), - "%s and %s should not differ", newWorkspace.LastUsedAt, r.Workspace.LastUsedAt, - ) - - _, err = agentClient.PostStats(context.Background(), &agentsdk.Stats{ ConnectionsByProto: map[string]int64{"TCP": 1}, ConnectionCount: 1, RxPackets: 1, @@ -979,7 +953,7 @@ func TestWorkspaceAgentReportStats(t *testing.T) { }) require.NoError(t, err) - newWorkspace, err = client.Workspace(context.Background(), r.Workspace.ID) + newWorkspace, err := client.Workspace(context.Background(), r.Workspace.ID) require.NoError(t, err) assert.True(t, diff --git a/coderd/workspaceagentsrpc.go b/coderd/workspaceagentsrpc.go index a0cd4c1032e97..24b6088ddd8f2 100644 --- a/coderd/workspaceagentsrpc.go +++ b/coderd/workspaceagentsrpc.go @@ -132,7 +132,7 @@ func (api *API) workspaceAgentRPC(rw http.ResponseWriter, r *http.Request) { TailnetCoordinator: &api.TailnetCoordinator, TemplateScheduleStore: api.TemplateScheduleStore, AppearanceFetcher: &api.AppearanceFetcher, - StatsBatcher: api.statsBatcher, + StatsReporter: api.statsReporter, PublishWorkspaceUpdateFn: api.publishWorkspaceUpdate, PublishWorkspaceAgentLogsUpdateFn: api.publishWorkspaceAgentLogsUpdate, diff --git a/coderd/workspaceapps/apptest/apptest.go b/coderd/workspaceapps/apptest/apptest.go index 851d8ff144eb0..3cd5e5a2f9935 100644 --- a/coderd/workspaceapps/apptest/apptest.go +++ b/coderd/workspaceapps/apptest/apptest.go @@ -1688,7 +1688,7 @@ func (r *fakeStatsReporter) stats() []workspaceapps.StatsReport { return r.s } -func (r *fakeStatsReporter) Report(_ context.Context, stats []workspaceapps.StatsReport) error { +func (r *fakeStatsReporter) ReportAppStats(_ context.Context, stats []workspaceapps.StatsReport) error { r.mu.Lock() r.s = append(r.s, stats...) r.mu.Unlock() diff --git a/coderd/workspaceapps/stats.go b/coderd/workspaceapps/stats.go index 76a60c6fbb5df..53f9109c254b7 100644 --- a/coderd/workspaceapps/stats.go +++ b/coderd/workspaceapps/stats.go @@ -10,10 +10,8 @@ import ( "cdr.dev/slog" - "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/database/dbauthz" "github.com/coder/coder/v2/coderd/database/dbtime" - "github.com/coder/coder/v2/coderd/util/slice" ) const ( @@ -52,100 +50,7 @@ func newStatsReportFromSignedToken(token SignedToken) StatsReport { // StatsReporter reports workspace app StatsReports. type StatsReporter interface { - Report(context.Context, []StatsReport) error -} - -var _ StatsReporter = (*StatsDBReporter)(nil) - -// StatsDBReporter writes workspace app StatsReports to the database. -type StatsDBReporter struct { - db database.Store - batchSize int -} - -// NewStatsDBReporter returns a new StatsDBReporter. -func NewStatsDBReporter(db database.Store, batchSize int) *StatsDBReporter { - return &StatsDBReporter{ - db: db, - batchSize: batchSize, - } -} - -// Report writes the given StatsReports to the database. -func (r *StatsDBReporter) Report(ctx context.Context, stats []StatsReport) error { - err := r.db.InTx(func(tx database.Store) error { - maxBatchSize := r.batchSize - if len(stats) < maxBatchSize { - maxBatchSize = len(stats) - } - batch := database.InsertWorkspaceAppStatsParams{ - UserID: make([]uuid.UUID, 0, maxBatchSize), - WorkspaceID: make([]uuid.UUID, 0, maxBatchSize), - AgentID: make([]uuid.UUID, 0, maxBatchSize), - AccessMethod: make([]string, 0, maxBatchSize), - SlugOrPort: make([]string, 0, maxBatchSize), - SessionID: make([]uuid.UUID, 0, maxBatchSize), - SessionStartedAt: make([]time.Time, 0, maxBatchSize), - SessionEndedAt: make([]time.Time, 0, maxBatchSize), - Requests: make([]int32, 0, maxBatchSize), - } - for _, stat := range stats { - batch.UserID = append(batch.UserID, stat.UserID) - batch.WorkspaceID = append(batch.WorkspaceID, stat.WorkspaceID) - batch.AgentID = append(batch.AgentID, stat.AgentID) - batch.AccessMethod = append(batch.AccessMethod, string(stat.AccessMethod)) - batch.SlugOrPort = append(batch.SlugOrPort, stat.SlugOrPort) - batch.SessionID = append(batch.SessionID, stat.SessionID) - batch.SessionStartedAt = append(batch.SessionStartedAt, stat.SessionStartedAt) - batch.SessionEndedAt = append(batch.SessionEndedAt, stat.SessionEndedAt) - batch.Requests = append(batch.Requests, int32(stat.Requests)) - - if len(batch.UserID) >= r.batchSize { - err := tx.InsertWorkspaceAppStats(ctx, batch) - if err != nil { - return err - } - - // Reset batch. - batch.UserID = batch.UserID[:0] - batch.WorkspaceID = batch.WorkspaceID[:0] - batch.AgentID = batch.AgentID[:0] - batch.AccessMethod = batch.AccessMethod[:0] - batch.SlugOrPort = batch.SlugOrPort[:0] - batch.SessionID = batch.SessionID[:0] - batch.SessionStartedAt = batch.SessionStartedAt[:0] - batch.SessionEndedAt = batch.SessionEndedAt[:0] - batch.Requests = batch.Requests[:0] - } - } - if len(batch.UserID) == 0 { - return nil - } - - if err := tx.InsertWorkspaceAppStats(ctx, batch); err != nil { - return err - } - - // TODO: We currently measure workspace usage based on when we get stats from it. - // There are currently two paths for this: - // 1) From SSH -> workspace agent stats POSTed from agent - // 2) From workspace apps / rpty -> workspace app stats (from coderd / wsproxy) - // Ideally we would have a single code path for this. - uniqueIDs := slice.Unique(batch.WorkspaceID) - if err := tx.BatchUpdateWorkspaceLastUsedAt(ctx, database.BatchUpdateWorkspaceLastUsedAtParams{ - IDs: uniqueIDs, - LastUsedAt: dbtime.Now(), // This isn't 100% accurate, but it's good enough. - }); err != nil { - return err - } - - return nil - }, nil) - if err != nil { - return xerrors.Errorf("insert workspace app stats failed: %w", err) - } - - return nil + ReportAppStats(context.Context, []StatsReport) error } // This should match the database unique constraint. @@ -353,7 +258,7 @@ func (sc *StatsCollector) flush(ctx context.Context) (err error) { // backlog and the stats we're about to report, but it's not worth // the complexity. if len(sc.backlog) > 0 { - err = sc.opts.Reporter.Report(ctx, sc.backlog) + err = sc.opts.Reporter.ReportAppStats(ctx, sc.backlog) if err != nil { return xerrors.Errorf("report workspace app stats from backlog failed: %w", err) } @@ -366,7 +271,7 @@ func (sc *StatsCollector) flush(ctx context.Context) (err error) { return nil } - err = sc.opts.Reporter.Report(ctx, stats) + err = sc.opts.Reporter.ReportAppStats(ctx, stats) if err != nil { sc.backlog = stats return xerrors.Errorf("report workspace app stats failed: %w", err) diff --git a/coderd/workspaceapps/stats_test.go b/coderd/workspaceapps/stats_test.go index b1c4686197743..c2c722929ea83 100644 --- a/coderd/workspaceapps/stats_test.go +++ b/coderd/workspaceapps/stats_test.go @@ -43,7 +43,7 @@ func (r *fakeReporter) setError(err error) { r.err = err } -func (r *fakeReporter) Report(_ context.Context, stats []workspaceapps.StatsReport) error { +func (r *fakeReporter) ReportAppStats(_ context.Context, stats []workspaceapps.StatsReport) error { r.mu.Lock() if r.err != nil { r.errN++ diff --git a/coderd/agentapi/activitybump.go b/coderd/workspacestats/activitybump.go similarity index 96% rename from coderd/agentapi/activitybump.go rename to coderd/workspacestats/activitybump.go index a28ba695d018e..29c7dc3686dfe 100644 --- a/coderd/agentapi/activitybump.go +++ b/coderd/workspacestats/activitybump.go @@ -1,4 +1,4 @@ -package agentapi +package workspacestats import ( "context" @@ -41,7 +41,6 @@ func ActivityBumpWorkspace(ctx context.Context, log slog.Logger, db database.Sto // low priority operations fail first. ctx, cancel := context.WithTimeout(ctx, time.Second*15) defer cancel() - // nolint:gocritic // (#13146) Will be moved soon as part of refactor. err := db.ActivityBumpWorkspace(ctx, database.ActivityBumpWorkspaceParams{ NextAutostart: nextAutostart.UTC(), WorkspaceID: workspaceID, diff --git a/coderd/agentapi/activitybump_test.go b/coderd/workspacestats/activitybump_test.go similarity index 98% rename from coderd/agentapi/activitybump_test.go rename to coderd/workspacestats/activitybump_test.go index 5c82454c97cef..3abb46b7ab343 100644 --- a/coderd/agentapi/activitybump_test.go +++ b/coderd/workspacestats/activitybump_test.go @@ -1,4 +1,4 @@ -package agentapi_test +package workspacestats_test import ( "database/sql" @@ -8,12 +8,12 @@ import ( "github.com/google/uuid" "cdr.dev/slog/sloggers/slogtest" - "github.com/coder/coder/v2/coderd/agentapi" "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/database/dbgen" "github.com/coder/coder/v2/coderd/database/dbtestutil" "github.com/coder/coder/v2/coderd/database/dbtime" "github.com/coder/coder/v2/coderd/util/ptr" + "github.com/coder/coder/v2/coderd/workspacestats" "github.com/coder/coder/v2/testutil" "github.com/stretchr/testify/assert" @@ -272,7 +272,7 @@ func Test_ActivityBumpWorkspace(t *testing.T) { // Bump duration is measured from the time of the bump, so we measure from here. start := dbtime.Now() - agentapi.ActivityBumpWorkspace(ctx, log, db, bld.WorkspaceID, nextAutostart(start)) + workspacestats.ActivityBumpWorkspace(ctx, log, db, bld.WorkspaceID, nextAutostart(start)) end := dbtime.Now() // Validate our state after bump diff --git a/coderd/workspacestats/reporter.go b/coderd/workspacestats/reporter.go new file mode 100644 index 0000000000000..ec2c6a44fcb24 --- /dev/null +++ b/coderd/workspacestats/reporter.go @@ -0,0 +1,194 @@ +package workspacestats + +import ( + "context" + "sync/atomic" + "time" + + "github.com/google/uuid" + "golang.org/x/sync/errgroup" + "golang.org/x/xerrors" + + "cdr.dev/slog" + + agentproto "github.com/coder/coder/v2/agent/proto" + "github.com/coder/coder/v2/coderd/database" + "github.com/coder/coder/v2/coderd/database/dbtime" + "github.com/coder/coder/v2/coderd/database/pubsub" + "github.com/coder/coder/v2/coderd/prometheusmetrics" + "github.com/coder/coder/v2/coderd/schedule" + "github.com/coder/coder/v2/coderd/util/slice" + "github.com/coder/coder/v2/coderd/workspaceapps" + "github.com/coder/coder/v2/codersdk" +) + +type StatsBatcher interface { + Add(now time.Time, agentID uuid.UUID, templateID uuid.UUID, userID uuid.UUID, workspaceID uuid.UUID, st *agentproto.Stats) error +} + +type ReporterOptions struct { + Database database.Store + Logger slog.Logger + Pubsub pubsub.Pubsub + TemplateScheduleStore *atomic.Pointer[schedule.TemplateScheduleStore] + StatsBatcher StatsBatcher + UpdateAgentMetricsFn func(ctx context.Context, labels prometheusmetrics.AgentMetricLabels, metrics []*agentproto.Stats_Metric) + + AppStatBatchSize int +} + +type Reporter struct { + opts ReporterOptions +} + +func NewReporter(opts ReporterOptions) *Reporter { + return &Reporter{opts: opts} +} + +func (r *Reporter) ReportAppStats(ctx context.Context, stats []workspaceapps.StatsReport) error { + err := r.opts.Database.InTx(func(tx database.Store) error { + maxBatchSize := r.opts.AppStatBatchSize + if len(stats) < maxBatchSize { + maxBatchSize = len(stats) + } + batch := database.InsertWorkspaceAppStatsParams{ + UserID: make([]uuid.UUID, 0, maxBatchSize), + WorkspaceID: make([]uuid.UUID, 0, maxBatchSize), + AgentID: make([]uuid.UUID, 0, maxBatchSize), + AccessMethod: make([]string, 0, maxBatchSize), + SlugOrPort: make([]string, 0, maxBatchSize), + SessionID: make([]uuid.UUID, 0, maxBatchSize), + SessionStartedAt: make([]time.Time, 0, maxBatchSize), + SessionEndedAt: make([]time.Time, 0, maxBatchSize), + Requests: make([]int32, 0, maxBatchSize), + } + for _, stat := range stats { + batch.UserID = append(batch.UserID, stat.UserID) + batch.WorkspaceID = append(batch.WorkspaceID, stat.WorkspaceID) + batch.AgentID = append(batch.AgentID, stat.AgentID) + batch.AccessMethod = append(batch.AccessMethod, string(stat.AccessMethod)) + batch.SlugOrPort = append(batch.SlugOrPort, stat.SlugOrPort) + batch.SessionID = append(batch.SessionID, stat.SessionID) + batch.SessionStartedAt = append(batch.SessionStartedAt, stat.SessionStartedAt) + batch.SessionEndedAt = append(batch.SessionEndedAt, stat.SessionEndedAt) + batch.Requests = append(batch.Requests, int32(stat.Requests)) + + if len(batch.UserID) >= r.opts.AppStatBatchSize { + err := tx.InsertWorkspaceAppStats(ctx, batch) + if err != nil { + return err + } + + // Reset batch. + batch.UserID = batch.UserID[:0] + batch.WorkspaceID = batch.WorkspaceID[:0] + batch.AgentID = batch.AgentID[:0] + batch.AccessMethod = batch.AccessMethod[:0] + batch.SlugOrPort = batch.SlugOrPort[:0] + batch.SessionID = batch.SessionID[:0] + batch.SessionStartedAt = batch.SessionStartedAt[:0] + batch.SessionEndedAt = batch.SessionEndedAt[:0] + batch.Requests = batch.Requests[:0] + } + } + if len(batch.UserID) == 0 { + return nil + } + + if err := tx.InsertWorkspaceAppStats(ctx, batch); err != nil { + return err + } + + // TODO: We currently measure workspace usage based on when we get stats from it. + // There are currently two paths for this: + // 1) From SSH -> workspace agent stats POSTed from agent + // 2) From workspace apps / rpty -> workspace app stats (from coderd / wsproxy) + // Ideally we would have a single code path for this. + uniqueIDs := slice.Unique(batch.WorkspaceID) + if err := tx.BatchUpdateWorkspaceLastUsedAt(ctx, database.BatchUpdateWorkspaceLastUsedAtParams{ + IDs: uniqueIDs, + LastUsedAt: dbtime.Now(), // This isn't 100% accurate, but it's good enough. + }); err != nil { + return err + } + + return nil + }, nil) + if err != nil { + return xerrors.Errorf("insert workspace app stats failed: %w", err) + } + + return nil +} + +func (r *Reporter) ReportAgentStats(ctx context.Context, now time.Time, workspace database.Workspace, workspaceAgent database.WorkspaceAgent, templateName string, stats *agentproto.Stats) error { + if stats.ConnectionCount > 0 { + var nextAutostart time.Time + if workspace.AutostartSchedule.String != "" { + templateSchedule, err := (*(r.opts.TemplateScheduleStore.Load())).Get(ctx, r.opts.Database, workspace.TemplateID) + // If the template schedule fails to load, just default to bumping + // without the next transition and log it. + if err != nil { + r.opts.Logger.Error(ctx, "failed to load template schedule bumping activity, defaulting to bumping by 60min", + slog.F("workspace_id", workspace.ID), + slog.F("template_id", workspace.TemplateID), + slog.Error(err), + ) + } else { + next, allowed := schedule.NextAutostart(now, workspace.AutostartSchedule.String, templateSchedule) + if allowed { + nextAutostart = next + } + } + } + ActivityBumpWorkspace(ctx, r.opts.Logger.Named("activity_bump"), r.opts.Database, workspace.ID, nextAutostart) + } + + var errGroup errgroup.Group + errGroup.Go(func() error { + err := r.opts.StatsBatcher.Add(now, workspaceAgent.ID, workspace.TemplateID, workspace.OwnerID, workspace.ID, stats) + if err != nil { + r.opts.Logger.Error(ctx, "add agent stats to batcher", slog.Error(err)) + return xerrors.Errorf("insert workspace agent stats batch: %w", err) + } + return nil + }) + errGroup.Go(func() error { + err := r.opts.Database.UpdateWorkspaceLastUsedAt(ctx, database.UpdateWorkspaceLastUsedAtParams{ + ID: workspace.ID, + LastUsedAt: now, + }) + if err != nil { + return xerrors.Errorf("update workspace LastUsedAt: %w", err) + } + return nil + }) + if r.opts.UpdateAgentMetricsFn != nil { + errGroup.Go(func() error { + user, err := r.opts.Database.GetUserByID(ctx, workspace.OwnerID) + if err != nil { + return xerrors.Errorf("get user: %w", err) + } + + r.opts.UpdateAgentMetricsFn(ctx, prometheusmetrics.AgentMetricLabels{ + Username: user.Username, + WorkspaceName: workspace.Name, + AgentName: workspaceAgent.Name, + TemplateName: templateName, + }, stats.Metrics) + return nil + }) + } + err := errGroup.Wait() + if err != nil { + return xerrors.Errorf("update stats in database: %w", err) + } + + err = r.opts.Pubsub.Publish(codersdk.WorkspaceNotifyChannel(workspace.ID), []byte{}) + if err != nil { + r.opts.Logger.Warn(ctx, "failed to publish workspace agent stats", + slog.F("workspace_id", workspace.ID), slog.Error(err)) + } + + return nil +} diff --git a/enterprise/coderd/workspaceproxy.go b/enterprise/coderd/workspaceproxy.go index 22fe1bc747cbe..22bd360ccbffe 100644 --- a/enterprise/coderd/workspaceproxy.go +++ b/enterprise/coderd/workspaceproxy.go @@ -518,7 +518,7 @@ func (api *API) workspaceProxyReportAppStats(rw http.ResponseWriter, r *http.Req api.Logger.Debug(ctx, "report app stats", slog.F("stats", req.Stats)) reporter := api.WorkspaceAppsStatsCollectorOptions.Reporter - if err := reporter.Report(ctx, req.Stats); err != nil { + if err := reporter.ReportAppStats(ctx, req.Stats); err != nil { api.Logger.Error(ctx, "report app stats failed", slog.Error(err)) httpapi.InternalServerError(rw, err) return diff --git a/enterprise/wsproxy/appstatsreporter.go b/enterprise/wsproxy/appstatsreporter.go index 44ffe87e1a5e3..a4e420ddceea1 100644 --- a/enterprise/wsproxy/appstatsreporter.go +++ b/enterprise/wsproxy/appstatsreporter.go @@ -13,7 +13,7 @@ type appStatsReporter struct { Client *wsproxysdk.Client } -func (r *appStatsReporter) Report(ctx context.Context, stats []workspaceapps.StatsReport) error { +func (r *appStatsReporter) ReportAppStats(ctx context.Context, stats []workspaceapps.StatsReport) error { err := r.Client.ReportAppStats(ctx, wsproxysdk.ReportAppStatsRequest{ Stats: stats, }) diff --git a/scripts/rules.go b/scripts/rules.go index 2ff2a503b8503..46aebabab4a1a 100644 --- a/scripts/rules.go +++ b/scripts/rules.go @@ -468,7 +468,7 @@ func withTimezoneUTC(m dsl.Matcher) { At(m["tz"]) } -// workspaceActivity ensures that updating workspace activity is only done in the workspaceapps package. +// workspaceActivity ensures that updating workspace activity is only done in the workspacestats package. // //nolint:unused,deadcode,varnamelen func workspaceActivity(m dsl.Matcher) { @@ -481,9 +481,9 @@ func workspaceActivity(m dsl.Matcher) { `$_.InsertWorkspaceAgentStats($_, $_)`, `$_.InsertWorkspaceAppStats($_, $_)`, ).Where( - !m.File().PkgPath.Matches(`workspaceapps`) && + !m.File().PkgPath.Matches(`workspacestats`) && !m.File().PkgPath.Matches(`dbauthz$`) && !m.File().PkgPath.Matches(`dbgen$`) && !m.File().Name.Matches(`_test\.go$`), - ).Report("Updating workspace activity should always be done in the workspaceapps package.") + ).Report("Updating workspace activity should always be done in the workspacestats package.") } From bc8126fa45b7d8ea8e82f6ffc96a0317855210ee Mon Sep 17 00:00:00 2001 From: Michael Brewer Date: Wed, 29 May 2024 10:37:54 -0700 Subject: [PATCH 128/149] fix(cli): skip optional coder_external_auth (#13368) * fix(cli): skip over coder_external_auth that are optional * chore: Delete package-lock.json --- cli/cliui/externalauth.go | 3 +++ 1 file changed, 3 insertions(+) diff --git a/cli/cliui/externalauth.go b/cli/cliui/externalauth.go index 2e416ae3b5825..b1dce47994db2 100644 --- a/cli/cliui/externalauth.go +++ b/cli/cliui/externalauth.go @@ -37,6 +37,9 @@ func ExternalAuth(ctx context.Context, writer io.Writer, opts ExternalAuthOption if auth.Authenticated { return nil } + if auth.Optional { + continue + } _, _ = fmt.Fprintf(writer, "You must authenticate with %s to create a workspace with this template. Visit:\n\n\t%s\n\n", auth.DisplayName, auth.AuthenticateURL) From 374f0a0fd195e066a40e0205fc43b613a65eb2b7 Mon Sep 17 00:00:00 2001 From: Mathias Fredriksson Date: Wed, 29 May 2024 21:30:11 +0300 Subject: [PATCH 129/149] chore(scripts): handle renamed cherry-pick commits in release script (#13395) --- scripts/release/check_commit_metadata.sh | 35 +++++++++++++++++++++--- 1 file changed, 31 insertions(+), 4 deletions(-) diff --git a/scripts/release/check_commit_metadata.sh b/scripts/release/check_commit_metadata.sh index 906818412a4a9..ff1d61b512ebe 100755 --- a/scripts/release/check_commit_metadata.sh +++ b/scripts/release/check_commit_metadata.sh @@ -96,6 +96,8 @@ main() { # and main. These are sorted by commit title so that we can group # two cherry-picks together. declare -A cherry_pick_commits + declare -A renamed_cherry_pick_commits + declare -a renamed_cherry_pick_commits_pending git_cherry_out=$( { git log --no-merges --cherry-mark --pretty=format:"%m %H %s" "${to_ref}...origin/main" @@ -109,20 +111,45 @@ main() { # Iterate over the array in groups of two for ((i = 0; i < ${#cherry_picks[@]}; i += 2)); do mapfile -d ' ' -t parts1 <<<"${cherry_picks[i]}" - mapfile -d ' ' -t parts2 <<<"${cherry_picks[i + 1]}" commit1=${parts1[1]} title1=${parts1[*]:2} - commit2=${parts2[1]} - title2=${parts2[*]:2} + + title2= + if ((i + 1 < ${#cherry_picks[@]})); then + mapfile -d ' ' -t parts2 <<<"${cherry_picks[i + 1]}" + commit2=${parts2[1]} + title2=${parts2[*]:2} + fi if [[ ${title1} != "${title2}" ]]; then - error "Invariant failed, cherry-picked commits have different titles: ${title1} != ${title2}" + log "Invariant failed, cherry-picked commits have different titles: ${title1} != ${title2}, attempting to check commit body for cherry-pick information..." + + renamed=$(git show "${commit1}" | sed -ne 's/.*cherry picked from commit \([0-9a-f]*\).*/\1/p') + if [[ -n ${renamed} ]]; then + log "Found renamed cherry-pick commit ${commit1} -> ${renamed}" + renamed_cherry_pick_commits[${commit1}]=${renamed} + renamed_cherry_pick_commits[${renamed}]=${commit1} + continue + else + log "Not a cherry-pick commit, adding ${commit1} to pending list..." + renamed_cherry_pick_commits_pending+=("${commit1}") + fi + # error "Invariant failed, cherry-picked commits have different titles: ${title1} != ${title2}" + ((i--)) + continue fi cherry_pick_commits[${commit1}]=${commit2} cherry_pick_commits[${commit2}]=${commit1} done fi + for commit in "${renamed_cherry_pick_commits_pending[@]}"; do + log "Checking if pending commit ${commit} has a corresponding cherry-pick..." + if [[ ! -v renamed_cherry_pick_commits[${commit}] ]]; then + error "Invariant failed, cherry-picked commit ${commit} has no corresponding original commit" + fi + log "Found matching cherry-pick commit ${commit} -> ${renamed_cherry_pick_commits[${commit}]}" + done # Get abbreviated and full commit hashes and titles for each commit. git_log_out="$(git log --no-merges --left-right --pretty=format:"%m %h %H %s" "${range}")" From 9ae825ebae0d8783a8acec86dfcd031f8ed27300 Mon Sep 17 00:00:00 2001 From: Mathias Fredriksson Date: Wed, 29 May 2024 21:30:42 +0300 Subject: [PATCH 130/149] chore(scripts): push version bump pr branch in release script (#13397) --- scripts/release.sh | 3 +++ 1 file changed, 3 insertions(+) diff --git a/scripts/release.sh b/scripts/release.sh index c2500aee2ec7d..3847ea7ca0e90 100755 --- a/scripts/release.sh +++ b/scripts/release.sh @@ -348,6 +348,9 @@ You can follow the release progress [here](https://github.com/coder/coder/action maybedryrun "${dry_run}" git stash pop fi + # Push the branch so it's available for gh to create the PR. + maybedryrun "${dry_run}" git push -u "{remote}" "${pr_branch}" + log "Creating pull request..." maybedryrun "${dry_run}" gh pr create \ --assignee "${pr_review_assignee}" \ From 5fb231774c2d7467adf66eb3fde8b9d42f442b5d Mon Sep 17 00:00:00 2001 From: Mathias Fredriksson Date: Wed, 29 May 2024 21:37:04 +0300 Subject: [PATCH 131/149] chore(scripts): add custom gh auth to release script (#13396) --- scripts/release.sh | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/scripts/release.sh b/scripts/release.sh index 3847ea7ca0e90..e3a642e19e319 100755 --- a/scripts/release.sh +++ b/scripts/release.sh @@ -113,6 +113,18 @@ done # Check dependencies. dependencies gh jq sort +# Authenticate gh CLI. +# NOTE: Coder external-auth won't work because the GitHub App lacks permissions. +if [[ -z ${GITHUB_TOKEN:-} ]]; then + if [[ -n ${GH_TOKEN:-} ]]; then + export GITHUB_TOKEN=${GH_TOKEN} + elif token="$(gh auth token --hostname github.com 2>/dev/null)"; then + export GITHUB_TOKEN=${token} + else + error "GitHub authentication is required to run this command, please set GITHUB_TOKEN or run 'gh auth login'." + fi +fi + if [[ -z $increment ]]; then # Default to patch versions. increment="patch" From 9eb797eb5a2bfb115db1fe8eccad78908a5f8ec1 Mon Sep 17 00:00:00 2001 From: Mathias Fredriksson Date: Wed, 29 May 2024 22:01:10 +0300 Subject: [PATCH 132/149] chore(scripts): add safety check for difference between dry run release notes (#13398) --- scripts/release.sh | 59 ++++++++++++++++++++++++++++++++-------------- 1 file changed, 41 insertions(+), 18 deletions(-) diff --git a/scripts/release.sh b/scripts/release.sh index e3a642e19e319..66c30a6792821 100755 --- a/scripts/release.sh +++ b/scripts/release.sh @@ -221,8 +221,9 @@ release_notes="$(execrelative ./release/generate_release_notes.sh --old-version mkdir -p build release_notes_file="build/RELEASE-${new_version}.md" +release_notes_file_dryrun="build/RELEASE-${new_version}-DRYRUN.md" if ((dry_run)); then - release_notes_file="build/RELEASE-${new_version}-DRYRUN.md" + release_notes_file=${release_notes_file_dryrun} fi get_editor() { if command -v editor >/dev/null; then @@ -261,25 +262,47 @@ else fi log -if [[ -z ${editor} ]]; then - log "No editor found, please set the \$EDITOR environment variable for edit prompt." -else - while [[ ! ${edit:-} =~ ^[YyNn]$ ]]; do - read -p "Edit release notes in \"${editor}\"? (y/n) " -n 1 -r edit - log - done - if [[ ${edit} =~ ^[Yy]$ ]]; then - "${editor}" "${release_notes_file}" - release_notes2="$(<"$release_notes_file")" - if [[ "${release_notes}" != "${release_notes2}" ]]; then - log "Release notes have been updated!" - release_notes="${release_notes2}" - else - log "No changes detected..." +edit_release_notes() { + if [[ -z ${editor} ]]; then + log "No editor found, please set the \$EDITOR environment variable for edit prompt." + else + while [[ ! ${edit:-} =~ ^[YyNn]$ ]]; do + read -p "Edit release notes in \"${editor}\"? (y/n) " -n 1 -r edit + log + done + if [[ ${edit} =~ ^[Yy]$ ]]; then + "${editor}" "${release_notes_file}" + release_notes2="$(<"$release_notes_file")" + if [[ "${release_notes}" != "${release_notes2}" ]]; then + log "Release notes have been updated!" + release_notes="${release_notes2}" + else + log "No changes detected..." + fi fi fi -fi -log + log + + if ((!dry_run)) && [[ -f ${release_notes_file_dryrun} ]]; then + release_notes_dryrun="$(<"${release_notes_file_dryrun}")" + if [[ "${release_notes}" != "${release_notes_dryrun}" ]]; then + log "WARNING: Release notes differ from dry-run version:" + log + diff -u "${release_notes_file_dryrun}" "${release_notes_file}" || true + log + continue_with_new_release_notes= + while [[ ! ${continue_with_new_release_notes:-} =~ ^[YyNn]$ ]]; do + read -p "Continue with the new release notes anyway? (y/n) " -n 1 -r continue_with_new_release_notes + log + done + if [[ ${continue_with_new_release_notes} =~ ^[Nn]$ ]]; then + log + edit_release_notes + fi + fi + fi +} +edit_release_notes while [[ ! ${preview:-} =~ ^[YyNn]$ ]]; do read -p "Preview release notes? (y/n) " -n 1 -r preview From 5aea80381c90f2c5395e69eab4f4941e53d1a7b5 Mon Sep 17 00:00:00 2001 From: Spike Curtis Date: Thu, 30 May 2024 11:46:18 +0400 Subject: [PATCH 133/149] fix: increses DERP send queue length to 512 for increased throughput (#13406) --- flake.nix | 2 +- go.mod | 2 +- go.sum | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/flake.nix b/flake.nix index afe7a8246feba..8cc4ea27b5f00 100644 --- a/flake.nix +++ b/flake.nix @@ -97,7 +97,7 @@ name = "coder-${osArch}"; # Updated with ./scripts/update-flake.sh`. # This should be updated whenever go.mod changes! - vendorHash = "sha256-/tj3Pit5h3zJbJS7A970hUB5dJT8VwOUcpLLIfBL96c="; + vendorHash = "sha256-PDA+Yd/tYvMTJfw8zyperTYnSBijvECc6XjxqnYgtkw="; proxyVendor = true; src = ./.; nativeBuildInputs = with pkgs; [ getopt openssl zstd ]; diff --git a/go.mod b/go.mod index bd90484747f96..ac0b1fe109f20 100644 --- a/go.mod +++ b/go.mod @@ -42,7 +42,7 @@ replace github.com/dlclark/regexp2 => github.com/dlclark/regexp2 v1.7.0 // There are a few minor changes we make to Tailscale that we're slowly upstreaming. Compare here: // https://github.com/tailscale/tailscale/compare/main...coder:tailscale:main -replace tailscale.com => github.com/coder/tailscale v1.1.1-0.20240528123714-e0fddea2bf02 +replace tailscale.com => github.com/coder/tailscale v1.1.1-0.20240530071520-1ac63d3a4ee3 // This is replaced to include // 1. a fix for a data race: c.f. https://github.com/tailscale/wireguard-go/pull/25 diff --git a/go.sum b/go.sum index d8c3c4d9654ee..aca275325bcc1 100644 --- a/go.sum +++ b/go.sum @@ -215,8 +215,8 @@ github.com/coder/serpent v0.7.0 h1:zGpD2GlF3lKIVkMjNGKbkip88qzd5r/TRcc30X/SrT0= github.com/coder/serpent v0.7.0/go.mod h1:REkJ5ZFHQUWFTPLExhXYZ1CaHFjxvGNRlLXLdsI08YA= github.com/coder/ssh v0.0.0-20231128192721-70855dedb788 h1:YoUSJ19E8AtuUFVYBpXuOD6a/zVP3rcxezNsoDseTUw= github.com/coder/ssh v0.0.0-20231128192721-70855dedb788/go.mod h1:aGQbuCLyhRLMzZF067xc84Lh7JDs1FKwCmF1Crl9dxQ= -github.com/coder/tailscale v1.1.1-0.20240528123714-e0fddea2bf02 h1:nieqQqcg7Swa1NywfEi93yAagpj/LaszHsIt7wtq58M= -github.com/coder/tailscale v1.1.1-0.20240528123714-e0fddea2bf02/go.mod h1:rp6BIJxCp127/hvvDWNkHC9MxAlKvQfoOtBr8s5sCqo= +github.com/coder/tailscale v1.1.1-0.20240530071520-1ac63d3a4ee3 h1:F2QRxrwPJyMPmX5qU7UpwEenhsk9qDqHyvYFxON1RkI= +github.com/coder/tailscale v1.1.1-0.20240530071520-1ac63d3a4ee3/go.mod h1:rp6BIJxCp127/hvvDWNkHC9MxAlKvQfoOtBr8s5sCqo= github.com/coder/terraform-provider-coder v0.22.0 h1:L72WFa9/6sc/nnXENPS8LpWi/2NBV+DRUW0WT//pEaU= github.com/coder/terraform-provider-coder v0.22.0/go.mod h1:wMun9UZ9HT2CzF6qPPBup1odzBpVUc0/xSFoXgdI3tk= github.com/coder/wgtunnel v0.1.13-0.20240522110300-ade90dfb2da0 h1:C2/eCr+r0a5Auuw3YOiSyLNHkdMtyCZHPFBx7syN4rk= From 6730c24c58b226f12196f9ba0059bc9c08455aa0 Mon Sep 17 00:00:00 2001 From: Muhammad Atif Ali Date: Thu, 30 May 2024 11:35:37 +0300 Subject: [PATCH 134/149] ci: build base image on PRs (#13409) --- .github/workflows/docker-base.yaml | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/.github/workflows/docker-base.yaml b/.github/workflows/docker-base.yaml index c88bea3ef182a..a2845a4fdb5f3 100644 --- a/.github/workflows/docker-base.yaml +++ b/.github/workflows/docker-base.yaml @@ -8,6 +8,10 @@ on: - scripts/Dockerfile.base - scripts/Dockerfile + pull_request: + paths: + - scripts/Dockerfile.base + schedule: # Run every week at 09:43 on Monday, Wednesday and Friday. We build this # frequently to ensure that packages are up-to-date. @@ -57,11 +61,12 @@ jobs: platforms: linux/amd64,linux/arm64,linux/arm/v7 pull: true no-cache: true - push: true + push: github.event_name != 'pull_request' tags: | ghcr.io/coder/coder-base:latest - name: Verify that images are pushed properly + if: github.event_name != 'pull_request' run: | # retry 10 times with a 5 second delay as the images may not be # available immediately From a1671a633ced7b58e779f3dcc927f14fc7398de9 Mon Sep 17 00:00:00 2001 From: Danny Kopping Date: Thu, 30 May 2024 10:36:24 +0200 Subject: [PATCH 135/149] Upgrade to git v2.45.1 to fix alpine 3.20 builds (#13411) Possibly fixes https://github.com/coder/coder/issues/13407 Signed-off-by: Danny Kopping --- scripts/Dockerfile.base | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/Dockerfile.base b/scripts/Dockerfile.base index afa00dd4cc7a9..9537c9ca1de78 100644 --- a/scripts/Dockerfile.base +++ b/scripts/Dockerfile.base @@ -10,7 +10,7 @@ RUN apk add --no-cache \ curl \ wget \ bash \ - git=2.43.4-r0 \ + git=2.45.1-r0 \ openssl \ openssh-client && \ addgroup \ From 7a7bef0dab94435b5207cb592d1f5f6ad51e222d Mon Sep 17 00:00:00 2001 From: Muhammad Atif Ali Date: Thu, 30 May 2024 11:49:30 +0300 Subject: [PATCH 136/149] ci: fix syntax issue in docker-base.yaml (#13412) --- .github/workflows/docker-base.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/docker-base.yaml b/.github/workflows/docker-base.yaml index a2845a4fdb5f3..942d80cfa4679 100644 --- a/.github/workflows/docker-base.yaml +++ b/.github/workflows/docker-base.yaml @@ -11,6 +11,7 @@ on: pull_request: paths: - scripts/Dockerfile.base + - .github/workflows/docker-base.yaml schedule: # Run every week at 09:43 on Monday, Wednesday and Friday. We build this @@ -61,7 +62,7 @@ jobs: platforms: linux/amd64,linux/arm64,linux/arm/v7 pull: true no-cache: true - push: github.event_name != 'pull_request' + push: ${{ github.event_name != 'pull_request' }} tags: | ghcr.io/coder/coder-base:latest From 7cc96f5d40f206e4edca811a4046785bf48a0deb Mon Sep 17 00:00:00 2001 From: Cian Johnston Date: Thu, 30 May 2024 10:17:26 +0100 Subject: [PATCH 137/149] chore(docs): add recommendations for dependency management (#13400) --- docs/images/icons/dependency.svg | 4 ++ docs/manifest.json | 6 ++ docs/templates/dependencies.md | 113 +++++++++++++++++++++++++++++++ 3 files changed, 123 insertions(+) create mode 100644 docs/images/icons/dependency.svg create mode 100644 docs/templates/dependencies.md diff --git a/docs/images/icons/dependency.svg b/docs/images/icons/dependency.svg new file mode 100644 index 0000000000000..1d41f51c88b9d --- /dev/null +++ b/docs/images/icons/dependency.svg @@ -0,0 +1,4 @@ + + + + diff --git a/docs/manifest.json b/docs/manifest.json index 59254175289b7..067aecac8e69c 100644 --- a/docs/manifest.json +++ b/docs/manifest.json @@ -143,6 +143,12 @@ "description": "Best practices for writing templates", "path": "./templates/best-practices.md", "children": [ + { + "title": "Template Dependencies", + "description": "Manage dependencies of your templates", + "path": "./templates/dependencies.md", + "icon_path": "./images/icons/dependency.svg" + }, { "title": "Change management", "description": "Versioning templates with git and CI", diff --git a/docs/templates/dependencies.md b/docs/templates/dependencies.md new file mode 100644 index 0000000000000..a3949f1c0e4e6 --- /dev/null +++ b/docs/templates/dependencies.md @@ -0,0 +1,113 @@ +# Template Dependencies + +When creating Coder templates, it is unlikely that you will just be using +built-in providers. Part of Terraform's flexibility stems from its rich plugin +ecosystem, and it makes sense to take advantage of this. + +That having been said, here are some recommendations to follow, based on the +[Terraform documentation](https://developer.hashicorp.com/terraform/tutorials/configuration-language/provider-versioning). + +Following these recommendations will: + +- **Prevent unexpected changes:** Your templates will use the same versions of + Terraform providers each build. This will prevent issues related to changes in + providers. +- **Improve build performance:** Coder caches provider versions on each build. + If the same provider version can be re-used on subsequent builds, Coder will + simply re-use the cached version if it is available. +- **Improve build reliability:** As some providers are hundreds of megabytes in + size, interruptions in connectivity to the Terraform registry during a + workspace build can result in a failed build. If Coder is able to re-use a + cached provider version, the likelihood of this is greatly reduced. + +## Lock your provider and module versions + +If you add a Terraform provider to `required_providers` without specifying a +version requirement, Terraform will always fetch the latest version on each +invocation: + +```terraform +terraform { + required_providers { + coder = { + source = "coder/coder" + } + frobnicate = { + source = "acme/frobnicate" + } + } +} +``` + +Any new releases of the `coder` or `frobnicate` providers will be picked up upon +the next time a workspace is built using this template. This may include +breaking changes. + +To prevent this, add a +[version constraint](https://developer.hashicorp.com/terraform/language/expressions/version-constraints) +to each provider in the `required_providers` block: + +```terraform +terraform { + required_providers { + coder = { + source = "coder/coder" + version = ">= 0.2, < 0.3" + } + frobnicate = { + source = "acme/frobnicate" + version = "~> 1.0.0" + } + } +} +``` + +In the above example, the `coder/coder` provider will be limited to all versions +above or equal to `0.2.0` and below `0.3.0`, while the `acme/frobnicate` +provider will be limited to all versions matching `1.0.x`. + +The above also applies to Terraform modules. In the below example, the module +`razzledazzle` is locked to version `1.2.3`. + +```terraform +module "razzledazzle" { + source = "registry.example.com/modules/razzle/dazzle" + version = "1.2.3" + foo = "bar" +} +``` + +## Use a Dependency Lock File + +Terraform allows creating a +[dependency lock file](https://developer.hashicorp.com/terraform/language/files/dependency-lock) +to track which provider versions were selected previously. This allows you to +ensure that the next workspace build uses the same provider versions as with the +last build. + +To create a new Terraform lock file, run the +[`terraform init` command](https://developer.hashicorp.com/terraform/cli/commands/init) +inside a folder containing the Terraform source code for a given template. + +This will create a new file named `.terraform.lock.hcl` in the current +directory. When you next run [`coder templates push`](../cli/templates_push.md), +the lock file will be stored alongside with the other template source code. + +> Note: Terraform best practices also recommend checking in your +> `.terraform.lock.hcl` into Git or other VCS. + +The next time a workspace is built from that template, Coder will make sure to +use the same versions of those providers as specified in the lock file. + +If, at some point in future, you need to update the providers and versions you +specified within the version constraints of the template, run + +```console +terraform init -upgrade +``` + +This will check each provider, check the newest satisfiable version based on the +version constraints you specified, and update the `.terraform.lock.hcl` with +those new versions. When you next run `coder templates push`, again, the updated +lock file will be stored and used to determine the provider versions to use for +subsequent workspace builds. From e176867d772b2bdf7fdf5342961c2161b6b5d583 Mon Sep 17 00:00:00 2001 From: Cian Johnston Date: Thu, 30 May 2024 11:31:51 +0100 Subject: [PATCH 138/149] chore: update deprecated usage of coder_workspace.owner* fields (#13390) Per https://github.com/coder/terraform-provider-coder/releases/tag/v0.23.0 Performs a mechanical rename of existing usage deprecated fields in the latest version of the coder/coder provider. Closes #13382 --- .github/pr-deployments/template/main.tf | 21 +++++++------ docs/admin/quotas.md | 2 +- docs/faqs.md | 2 +- dogfood/main.tf | 13 ++++---- examples/jfrog/docker/main.tf | 9 +++--- examples/parameters-dynamic-options/main.tf | 21 +++++++------ examples/parameters/main.tf | 13 ++++---- examples/templates/aws-devcontainer/main.tf | 3 +- examples/templates/aws-linux/main.tf | 3 +- examples/templates/aws-windows/main.tf | 3 +- examples/templates/azure-linux/main.tf | 3 +- .../templates/devcontainer-docker/main.tf | 19 ++++++------ .../templates/devcontainer-kubernetes/main.tf | 21 +++++++------ examples/templates/do-linux/main.tf | 7 +++-- examples/templates/docker/main.tf | 21 +++++++------ examples/templates/envbox/main.tf | 5 +-- examples/templates/gcp-devcontainer/main.tf | 9 +++--- examples/templates/gcp-linux/main.tf | 5 +-- examples/templates/gcp-vm-container/main.tf | 3 +- examples/templates/gcp-windows/main.tf | 3 +- examples/templates/incus/main.tf | 7 +++-- examples/templates/kubernetes/main.tf | 21 +++++++------ examples/templates/nomad-docker/main.tf | 7 +++-- examples/workspace-tags/main.tf | 21 +++++++------ provisioner/terraform/diagnostic_test.go | 4 +-- scaletest/templates/kubernetes-large/main.tf | 9 +++--- .../kubernetes-medium-greedy/main.tf | 9 +++--- scaletest/templates/kubernetes-medium/main.tf | 9 +++--- .../templates/kubernetes-minimal/main.tf | 17 +++++----- scaletest/templates/kubernetes-small/main.tf | 9 +++--- scaletest/templates/scaletest-runner/main.tf | 31 ++++++++++--------- scaletest/terraform/k8s/coder.tf | 9 +++--- .../TemplateFiles/TemplateFiles.stories.tsx | 2 +- .../AccountPage/AccountForm.tsx | 2 +- 34 files changed, 186 insertions(+), 157 deletions(-) diff --git a/.github/pr-deployments/template/main.tf b/.github/pr-deployments/template/main.tf index 978209a4e16d1..2bd941dd7cc3d 100644 --- a/.github/pr-deployments/template/main.tf +++ b/.github/pr-deployments/template/main.tf @@ -86,6 +86,7 @@ provider "kubernetes" { } data "coder_workspace" "me" {} +data "coder_workspace_owner" "me" {} resource "coder_agent" "main" { os = "linux" @@ -175,21 +176,21 @@ resource "coder_app" "code-server" { resource "kubernetes_persistent_volume_claim" "home" { metadata { - name = "coder-${lower(data.coder_workspace.me.owner)}-${lower(data.coder_workspace.me.name)}-home" + name = "coder-${lower(data.coder_workspace_owner.me.name)}-${lower(data.coder_workspace.me.name)}-home" namespace = var.namespace labels = { "app.kubernetes.io/name" = "coder-pvc" - "app.kubernetes.io/instance" = "coder-pvc-${lower(data.coder_workspace.me.owner)}-${lower(data.coder_workspace.me.name)}" + "app.kubernetes.io/instance" = "coder-pvc-${lower(data.coder_workspace_owner.me.name)}-${lower(data.coder_workspace.me.name)}" "app.kubernetes.io/part-of" = "coder" //Coder-specific labels. "com.coder.resource" = "true" "com.coder.workspace.id" = data.coder_workspace.me.id "com.coder.workspace.name" = data.coder_workspace.me.name - "com.coder.user.id" = data.coder_workspace.me.owner_id - "com.coder.user.username" = data.coder_workspace.me.owner + "com.coder.user.id" = data.coder_workspace_owner.me.id + "com.coder.user.username" = data.coder_workspace_owner.me.name } annotations = { - "com.coder.user.email" = data.coder_workspace.me.owner_email + "com.coder.user.email" = data.coder_workspace_owner.me.email } } wait_until_bound = false @@ -210,20 +211,20 @@ resource "kubernetes_deployment" "main" { ] wait_for_rollout = false metadata { - name = "coder-${lower(data.coder_workspace.me.owner)}-${lower(data.coder_workspace.me.name)}" + name = "coder-${lower(data.coder_workspace_owner.me.name)}-${lower(data.coder_workspace.me.name)}" namespace = var.namespace labels = { "app.kubernetes.io/name" = "coder-workspace" - "app.kubernetes.io/instance" = "coder-workspace-${lower(data.coder_workspace.me.owner)}-${lower(data.coder_workspace.me.name)}" + "app.kubernetes.io/instance" = "coder-workspace-${lower(data.coder_workspace_owner.me.name)}-${lower(data.coder_workspace.me.name)}" "app.kubernetes.io/part-of" = "coder" "com.coder.resource" = "true" "com.coder.workspace.id" = data.coder_workspace.me.id "com.coder.workspace.name" = data.coder_workspace.me.name - "com.coder.user.id" = data.coder_workspace.me.owner_id - "com.coder.user.username" = data.coder_workspace.me.owner + "com.coder.user.id" = data.coder_workspace_owner.me.id + "com.coder.user.username" = data.coder_workspace_owner.me.name } annotations = { - "com.coder.user.email" = data.coder_workspace.me.owner_email + "com.coder.user.email" = data.coder_workspace_owner.me.email } } diff --git a/docs/admin/quotas.md b/docs/admin/quotas.md index aa12cf328c4d1..88ca4b27860dc 100644 --- a/docs/admin/quotas.md +++ b/docs/admin/quotas.md @@ -30,7 +30,7 @@ compute: ```hcl resource "docker_volume" "home_volume" { - name = "coder-${data.coder_workspace.me.owner}-${data.coder_workspace.me.name}-root" + name = "coder-${data.coder_workspace_owner.me.name}-${data.coder_workspace.me.name}-root" } resource "coder_metadata" "home_volume" { diff --git a/docs/faqs.md b/docs/faqs.md index 66c0e98a76dfc..9ee9d30ef26e1 100644 --- a/docs/faqs.md +++ b/docs/faqs.md @@ -315,7 +315,7 @@ This code produces a hashed value that will be difficult to replicate. ```hcl locals { - concatenated_string = "${data.coder_workspace.me.name}+${data.coder_workspace.me.owner}" + concatenated_string = "${data.coder_workspace.me.name}+${data.coder_workspace_owner.me.name}" hashed_string = md5(local.concatenated_string) truncated_hash = substr(local.hashed_string, 0, 16) } diff --git a/dogfood/main.tf b/dogfood/main.tf index 1656a223ae2a3..a2c1528ecaffa 100644 --- a/dogfood/main.tf +++ b/dogfood/main.tf @@ -24,7 +24,7 @@ locals { repo_base_dir = data.coder_parameter.repo_base_dir.value == "~" ? "/home/coder" : replace(data.coder_parameter.repo_base_dir.value, "/^~\\//", "/home/coder/") repo_dir = replace(module.git-clone.repo_dir, "/^~\\//", "/home/coder/") - container_name = "coder-${data.coder_workspace.me.owner}-${lower(data.coder_workspace.me.name)}" + container_name = "coder-${data.coder_workspace_owner.me.name}-${lower(data.coder_workspace.me.name)}" } data "coder_parameter" "repo_base_dir" { @@ -95,6 +95,7 @@ data "coder_external_auth" "github" { } data "coder_workspace" "me" {} +data "coder_workspace_owner" "me" {} module "slackme" { source = "registry.coder.com/modules/slackme/coder" @@ -267,11 +268,11 @@ resource "docker_volume" "home_volume" { # Add labels in Docker to keep track of orphan resources. labels { label = "coder.owner" - value = data.coder_workspace.me.owner + value = data.coder_workspace_owner.me.name } labels { label = "coder.owner_id" - value = data.coder_workspace.me.owner_id + value = data.coder_workspace_owner.me.id } labels { label = "coder.workspace_id" @@ -309,7 +310,7 @@ resource "docker_container" "workspace" { # Use the docker gateway if the access URL is 127.0.0.1 entrypoint = ["sh", "-c", coder_agent.dev.init_script] # CPU limits are unnecessary since Docker will load balance automatically - memory = data.coder_workspace.me.owner == "code-asher" ? 65536 : 32768 + memory = data.coder_workspace_owner.me.name == "code-asher" ? 65536 : 32768 runtime = "sysbox-runc" env = [ "CODER_AGENT_TOKEN=${coder_agent.dev.token}", @@ -330,11 +331,11 @@ resource "docker_container" "workspace" { # Add labels in Docker to keep track of orphan resources. labels { label = "coder.owner" - value = data.coder_workspace.me.owner + value = data.coder_workspace_owner.me.name } labels { label = "coder.owner_id" - value = data.coder_workspace.me.owner_id + value = data.coder_workspace_owner.me.id } labels { label = "coder.workspace_id" diff --git a/examples/jfrog/docker/main.tf b/examples/jfrog/docker/main.tf index 4bb807403657f..d7f1d57980906 100644 --- a/examples/jfrog/docker/main.tf +++ b/examples/jfrog/docker/main.tf @@ -15,13 +15,13 @@ terraform { locals { # Make sure to use the same field as the username field in the Artifactory # It can be either the username or the email address. - artifactory_username = data.coder_workspace.me.owner_email + artifactory_username = data.coder_workspace_owner.me.email artifactory_repository_keys = { "npm" = "npm" "python" = "python" "go" = "go" } - workspace_user = data.coder_workspace.me.owner + workspace_user = data.coder_workspace_owner.me.name jfrog_host = replace(var.jfrog_url, "^https://", "") } @@ -30,6 +30,7 @@ data "coder_provisioner" "me" {} provider "docker" {} data "coder_workspace" "me" {} +data "coder_workspace_owner" "me" {} variable "jfrog_url" { type = string @@ -83,7 +84,7 @@ resource "coder_agent" "main" { # Configure the `npm` CLI to use the Artifactory "npm" repository. cat << EOF > ~/.npmrc - email = ${data.coder_workspace.me.owner_email} + email = ${data.coder_workspace_owner.me.email} registry = ${var.jfrog_url}/artifactory/api/npm/${local.artifactory_repository_keys["npm"]} EOF jf rt curl /api/npm/auth >> .npmrc @@ -149,7 +150,7 @@ resource "docker_container" "workspace" { count = data.coder_workspace.me.start_count image = docker_image.main.name # Uses lower() to avoid Docker restriction on container names. - name = "coder-${data.coder_workspace.me.owner}-${lower(data.coder_workspace.me.name)}" + name = "coder-${data.coder_workspace_owner.me.name}-${lower(data.coder_workspace.me.name)}" # Hostname makes the shell more user friendly: coder@my-workspace:~$ hostname = data.coder_workspace.me.name entrypoint = ["sh", "-c", coder_agent.main.init_script] diff --git a/examples/parameters-dynamic-options/main.tf b/examples/parameters-dynamic-options/main.tf index d459a41f70461..19304e3b370dc 100644 --- a/examples/parameters-dynamic-options/main.tf +++ b/examples/parameters-dynamic-options/main.tf @@ -20,7 +20,7 @@ variable "java_image" { } locals { - username = data.coder_workspace.me.owner + username = data.coder_workspace_owner.me.name images = { "go" = var.go_image, @@ -33,6 +33,7 @@ data "coder_provisioner" "me" { data "coder_workspace" "me" { } +data "coder_workspace_owner" "me" {} data "coder_parameter" "container_image" { name = "container_image" @@ -61,10 +62,10 @@ resource "coder_agent" "main" { EOF env = { - GIT_AUTHOR_NAME = "${data.coder_workspace.me.owner}" - GIT_COMMITTER_NAME = "${data.coder_workspace.me.owner}" - GIT_AUTHOR_EMAIL = "${data.coder_workspace.me.owner_email}" - GIT_COMMITTER_EMAIL = "${data.coder_workspace.me.owner_email}" + GIT_AUTHOR_NAME = "${data.coder_workspace_owner.me.name}" + GIT_COMMITTER_NAME = "${data.coder_workspace_owner.me.name}" + GIT_AUTHOR_EMAIL = "${data.coder_workspace_owner.me.email}" + GIT_COMMITTER_EMAIL = "${data.coder_workspace_owner.me.email}" } } @@ -91,11 +92,11 @@ resource "docker_volume" "home_volume" { } labels { label = "coder.owner" - value = data.coder_workspace.me.owner + value = data.coder_workspace_owner.me.name } labels { label = "coder.owner_id" - value = data.coder_workspace.me.owner_id + value = data.coder_workspace_owner.me.id } labels { label = "coder.workspace_id" @@ -119,7 +120,7 @@ resource "coder_metadata" "home_info" { resource "docker_container" "workspace" { count = data.coder_workspace.me.start_count image = local.images[data.coder_parameter.container_image.value] - name = "coder-${data.coder_workspace.me.owner}-${lower(data.coder_workspace.me.name)}" + name = "coder-${data.coder_workspace_owner.me.name}-${lower(data.coder_workspace.me.name)}" hostname = data.coder_workspace.me.name entrypoint = ["sh", "-c", replace(coder_agent.main.init_script, "/localhost|127\\.0\\.0\\.1/", "host.docker.internal")] env = [ @@ -138,11 +139,11 @@ resource "docker_container" "workspace" { labels { label = "coder.owner" - value = data.coder_workspace.me.owner + value = data.coder_workspace_owner.me.name } labels { label = "coder.owner_id" - value = data.coder_workspace.me.owner_id + value = data.coder_workspace_owner.me.id } labels { label = "coder.workspace_id" diff --git a/examples/parameters/main.tf b/examples/parameters/main.tf index 407f09a208281..33d3e7f6aafcf 100644 --- a/examples/parameters/main.tf +++ b/examples/parameters/main.tf @@ -10,7 +10,7 @@ terraform { } locals { - username = data.coder_workspace.me.owner + username = data.coder_workspace_owner.me.name } data "coder_provisioner" "me" { @@ -21,6 +21,7 @@ provider "docker" { data "coder_workspace" "me" { } +data "coder_workspace_owner" "me" {} resource "coder_agent" "main" { arch = data.coder_provisioner.me.arch @@ -59,11 +60,11 @@ resource "docker_volume" "home_volume" { # Add labels in Docker to keep track of orphan resources. labels { label = "coder.owner" - value = data.coder_workspace.me.owner + value = data.coder_workspace_owner.me.name } labels { label = "coder.owner_id" - value = data.coder_workspace.me.owner_id + value = data.coder_workspace_owner.me.id } labels { label = "coder.workspace_id" @@ -94,7 +95,7 @@ resource "docker_container" "workspace" { count = data.coder_workspace.me.start_count image = docker_image.main.name # Uses lower() to avoid Docker restriction on container names. - name = "coder-${data.coder_workspace.me.owner}-${lower(data.coder_workspace.me.name)}" + name = "coder-${data.coder_workspace_owner.me.name}-${lower(data.coder_workspace.me.name)}" # Hostname makes the shell more user friendly: coder@my-workspace:~$ hostname = data.coder_workspace.me.name # Use the docker gateway if the access URL is 127.0.0.1 @@ -112,11 +113,11 @@ resource "docker_container" "workspace" { # Add labels in Docker to keep track of orphan resources. labels { label = "coder.owner" - value = data.coder_workspace.me.owner + value = data.coder_workspace_owner.me.name } labels { label = "coder.owner_id" - value = data.coder_workspace.me.owner_id + value = data.coder_workspace_owner.me.id } labels { label = "coder.workspace_id" diff --git a/examples/templates/aws-devcontainer/main.tf b/examples/templates/aws-devcontainer/main.tf index 664ace2713798..343e7c584e09c 100644 --- a/examples/templates/aws-devcontainer/main.tf +++ b/examples/templates/aws-devcontainer/main.tf @@ -52,6 +52,7 @@ provider "aws" { data "coder_workspace" "me" { } +data "coder_workspace_owner" "me" {} data "aws_ami" "ubuntu" { most_recent = true @@ -167,7 +168,7 @@ resource "aws_instance" "vm" { user_data = local.user_data tags = { - Name = "coder-${data.coder_workspace.me.owner}-${data.coder_workspace.me.name}" + Name = "coder-${data.coder_workspace_owner.me.name}-${data.coder_workspace.me.name}" # Required if you are using our example policy, see template README Coder_Provisioned = "true" } diff --git a/examples/templates/aws-linux/main.tf b/examples/templates/aws-linux/main.tf index 5815f27ed7340..51d2f16701ee6 100644 --- a/examples/templates/aws-linux/main.tf +++ b/examples/templates/aws-linux/main.tf @@ -142,6 +142,7 @@ provider "aws" { data "coder_workspace" "me" { } +data "coder_workspace_owner" "me" {} data "aws_ami" "ubuntu" { most_recent = true @@ -249,7 +250,7 @@ resource "aws_instance" "dev" { user_data = local.user_data tags = { - Name = "coder-${data.coder_workspace.me.owner}-${data.coder_workspace.me.name}" + Name = "coder-${data.coder_workspace_owner.me.name}-${data.coder_workspace.me.name}" # Required if you are using our example policy, see template README Coder_Provisioned = "true" } diff --git a/examples/templates/aws-windows/main.tf b/examples/templates/aws-windows/main.tf index 1b9bb54021499..167b1b69ffc70 100644 --- a/examples/templates/aws-windows/main.tf +++ b/examples/templates/aws-windows/main.tf @@ -142,6 +142,7 @@ provider "aws" { data "coder_workspace" "me" { } +data "coder_workspace_owner" "me" {} data "aws_ami" "windows" { most_recent = true @@ -187,7 +188,7 @@ resource "aws_instance" "dev" { user_data = data.coder_workspace.me.transition == "start" ? local.user_data_start : local.user_data_end tags = { - Name = "coder-${data.coder_workspace.me.owner}-${data.coder_workspace.me.name}" + Name = "coder-${data.coder_workspace_owner.me.name}-${data.coder_workspace.me.name}" # Required if you are using our example policy, see template README Coder_Provisioned = "true" } diff --git a/examples/templates/azure-linux/main.tf b/examples/templates/azure-linux/main.tf index 9afb876a3c753..2fb0cd5876a7c 100644 --- a/examples/templates/azure-linux/main.tf +++ b/examples/templates/azure-linux/main.tf @@ -221,6 +221,7 @@ provider "azurerm" { data "coder_workspace" "me" { } +data "coder_workspace_owner" "me" {} resource "coder_agent" "main" { arch = "amd64" @@ -263,7 +264,7 @@ resource "coder_agent" "main" { } locals { - prefix = "coder-${data.coder_workspace.me.owner}-${data.coder_workspace.me.name}" + prefix = "coder-${data.coder_workspace_owner.me.name}-${data.coder_workspace.me.name}" userdata = templatefile("cloud-config.yaml.tftpl", { username = "coder" # Ensure this user/group does not exist in your VM image diff --git a/examples/templates/devcontainer-docker/main.tf b/examples/templates/devcontainer-docker/main.tf index 349d6d41cd2ad..b400c1f0651d8 100644 --- a/examples/templates/devcontainer-docker/main.tf +++ b/examples/templates/devcontainer-docker/main.tf @@ -17,6 +17,7 @@ provider "docker" { data "coder_workspace" "me" { } +data "coder_workspace_owner" "me" {} resource "coder_agent" "main" { arch = data.coder_provisioner.me.arch @@ -35,10 +36,10 @@ resource "coder_agent" "main" { # You can remove this block if you'd prefer to configure Git manually or using # dotfiles. (see docs/dotfiles.md) env = { - GIT_AUTHOR_NAME = coalesce(data.coder_workspace.me.owner_name, data.coder_workspace.me.owner) - GIT_AUTHOR_EMAIL = "${data.coder_workspace.me.owner_email}" - GIT_COMMITTER_NAME = coalesce(data.coder_workspace.me.owner_name, data.coder_workspace.me.owner) - GIT_COMMITTER_EMAIL = "${data.coder_workspace.me.owner_email}" + GIT_AUTHOR_NAME = coalesce(data.coder_workspace_owner.me.full_name, data.coder_workspace_owner.me.name) + GIT_AUTHOR_EMAIL = "${data.coder_workspace_owner.me.email}" + GIT_COMMITTER_NAME = coalesce(data.coder_workspace_owner.me.full_name, data.coder_workspace_owner.me.name) + GIT_COMMITTER_EMAIL = "${data.coder_workspace_owner.me.email}" } # The following metadata blocks are optional. They are used to display @@ -134,11 +135,11 @@ resource "docker_volume" "workspaces" { # Add labels in Docker to keep track of orphan resources. labels { label = "coder.owner" - value = data.coder_workspace.me.owner + value = data.coder_workspace_owner.me.name } labels { label = "coder.owner_id" - value = data.coder_workspace.me.owner_id + value = data.coder_workspace_owner.me.id } labels { label = "coder.workspace_id" @@ -207,7 +208,7 @@ resource "docker_container" "workspace" { # https://github.com/coder/envbuilder/tags image = "ghcr.io/coder/envbuilder:0.2.1" # Uses lower() to avoid Docker restriction on container names. - name = "coder-${data.coder_workspace.me.owner}-${lower(data.coder_workspace.me.name)}" + name = "coder-${data.coder_workspace_owner.me.name}-${lower(data.coder_workspace.me.name)}" # Hostname makes the shell more user friendly: coder@my-workspace:~$ hostname = data.coder_workspace.me.name # Use the docker gateway if the access URL is 127.0.0.1 @@ -230,11 +231,11 @@ resource "docker_container" "workspace" { # Add labels in Docker to keep track of orphan resources. labels { label = "coder.owner" - value = data.coder_workspace.me.owner + value = data.coder_workspace_owner.me.name } labels { label = "coder.owner_id" - value = data.coder_workspace.me.owner_id + value = data.coder_workspace_owner.me.id } labels { label = "coder.workspace_id" diff --git a/examples/templates/devcontainer-kubernetes/main.tf b/examples/templates/devcontainer-kubernetes/main.tf index f4a1c029336c8..b030c02a4a7ca 100644 --- a/examples/templates/devcontainer-kubernetes/main.tf +++ b/examples/templates/devcontainer-kubernetes/main.tf @@ -42,6 +42,7 @@ provider "kubernetes" { data "coder_workspace" "me" { } +data "coder_workspace_owner" "me" {} resource "coder_agent" "main" { arch = data.coder_provisioner.me.arch @@ -60,10 +61,10 @@ resource "coder_agent" "main" { # You can remove this block if you'd prefer to configure Git manually or using # dotfiles. (see docs/dotfiles.md) env = { - GIT_AUTHOR_NAME = coalesce(data.coder_workspace.me.owner_name, data.coder_workspace.me.owner) - GIT_AUTHOR_EMAIL = "${data.coder_workspace.me.owner_email}" - GIT_COMMITTER_NAME = coalesce(data.coder_workspace.me.owner_name, data.coder_workspace.me.owner) - GIT_COMMITTER_EMAIL = "${data.coder_workspace.me.owner_email}" + GIT_AUTHOR_NAME = coalesce(data.coder_workspace_owner.me.full_name, data.coder_workspace_owner.me.name) + GIT_AUTHOR_EMAIL = "${data.coder_workspace_owner.me.email}" + GIT_COMMITTER_NAME = coalesce(data.coder_workspace_owner.me.full_name, data.coder_workspace_owner.me.name) + GIT_COMMITTER_EMAIL = "${data.coder_workspace_owner.me.email}" } } @@ -89,8 +90,8 @@ resource "kubernetes_persistent_volume_claim" "workspaces" { name = "coder-${data.coder_workspace.me.id}" namespace = var.namespace labels = { - "coder.owner" = data.coder_workspace.me.owner - "coder.owner_id" = data.coder_workspace.me.owner_id + "coder.owner" = data.coder_workspace_owner.me.name + "coder.owner_id" = data.coder_workspace_owner.me.id "coder.workspace_id" = data.coder_workspace.me.id "coder.workspace_name_at_creation" = data.coder_workspace.me.name } @@ -160,11 +161,11 @@ data "coder_parameter" "custom_repo_url" { resource "kubernetes_deployment" "workspace" { metadata { - name = "coder-${data.coder_workspace.me.owner}-${lower(data.coder_workspace.me.name)}" + name = "coder-${data.coder_workspace_owner.me.name}-${lower(data.coder_workspace.me.name)}" namespace = var.namespace labels = { - "coder.owner" = data.coder_workspace.me.owner - "coder.owner_id" = data.coder_workspace.me.owner_id + "coder.owner" = data.coder_workspace_owner.me.name + "coder.owner_id" = data.coder_workspace_owner.me.id "coder.workspace_id" = data.coder_workspace.me.id "coder.workspace_name" = data.coder_workspace.me.name } @@ -187,7 +188,7 @@ resource "kubernetes_deployment" "workspace" { } spec { container { - name = "coder-${data.coder_workspace.me.owner}-${lower(data.coder_workspace.me.name)}" + name = "coder-${data.coder_workspace_owner.me.name}-${lower(data.coder_workspace.me.name)}" # Find the latest version here: # https://github.com/coder/envbuilder/tags image = "ghcr.io/coder/envbuilder:0.2.1" diff --git a/examples/templates/do-linux/main.tf b/examples/templates/do-linux/main.tf index 3afcaebc59806..ee83e361e5531 100644 --- a/examples/templates/do-linux/main.tf +++ b/examples/templates/do-linux/main.tf @@ -235,6 +235,7 @@ provider "digitalocean" { } data "coder_workspace" "me" {} +data "coder_workspace_owner" "me" {} resource "coder_agent" "main" { os = "linux" @@ -259,7 +260,7 @@ resource "coder_agent" "main" { display_name = "Home Usage" interval = 600 # every 10 minutes timeout = 30 # df can take a while on large filesystems - script = "coder stat disk --path /home/${lower(data.coder_workspace.me.owner)}" + script = "coder stat disk --path /home/${lower(data.coder_workspace_owner.me.name)}" } } @@ -278,13 +279,13 @@ resource "digitalocean_volume" "home_volume" { resource "digitalocean_droplet" "workspace" { region = data.coder_parameter.region.value count = data.coder_workspace.me.start_count - name = "coder-${lower(data.coder_workspace.me.owner)}-${lower(data.coder_workspace.me.name)}" + name = "coder-${lower(data.coder_workspace_owner.me.name)}-${lower(data.coder_workspace.me.name)}" image = data.coder_parameter.droplet_image.value size = data.coder_parameter.droplet_size.value volume_ids = [digitalocean_volume.home_volume.id] user_data = templatefile("cloud-config.yaml.tftpl", { - username = lower(data.coder_workspace.me.owner) + username = lower(data.coder_workspace_owner.me.name) home_volume_label = digitalocean_volume.home_volume.initial_filesystem_label init_script = base64encode(coder_agent.main.init_script) coder_agent_token = coder_agent.main.token diff --git a/examples/templates/docker/main.tf b/examples/templates/docker/main.tf index 3d8bef5c594cc..6cc5344334905 100644 --- a/examples/templates/docker/main.tf +++ b/examples/templates/docker/main.tf @@ -10,7 +10,7 @@ terraform { } locals { - username = data.coder_workspace.me.owner + username = data.coder_workspace_owner.me.name } data "coder_provisioner" "me" { @@ -21,6 +21,7 @@ provider "docker" { data "coder_workspace" "me" { } +data "coder_workspace_owner" "me" {} resource "coder_agent" "main" { arch = data.coder_provisioner.me.arch @@ -44,10 +45,10 @@ resource "coder_agent" "main" { # You can remove this block if you'd prefer to configure Git manually or using # dotfiles. (see docs/dotfiles.md) env = { - GIT_AUTHOR_NAME = coalesce(data.coder_workspace.me.owner_name, data.coder_workspace.me.owner) - GIT_AUTHOR_EMAIL = "${data.coder_workspace.me.owner_email}" - GIT_COMMITTER_NAME = coalesce(data.coder_workspace.me.owner_name, data.coder_workspace.me.owner) - GIT_COMMITTER_EMAIL = "${data.coder_workspace.me.owner_email}" + GIT_AUTHOR_NAME = coalesce(data.coder_workspace_owner.me.full_name, data.coder_workspace_owner.me.name) + GIT_AUTHOR_EMAIL = "${data.coder_workspace_owner.me.email}" + GIT_COMMITTER_NAME = coalesce(data.coder_workspace_owner.me.full_name, data.coder_workspace_owner.me.name) + GIT_COMMITTER_EMAIL = "${data.coder_workspace_owner.me.email}" } # The following metadata blocks are optional. They are used to display @@ -142,11 +143,11 @@ resource "docker_volume" "home_volume" { # Add labels in Docker to keep track of orphan resources. labels { label = "coder.owner" - value = data.coder_workspace.me.owner + value = data.coder_workspace_owner.me.name } labels { label = "coder.owner_id" - value = data.coder_workspace.me.owner_id + value = data.coder_workspace_owner.me.id } labels { label = "coder.workspace_id" @@ -177,7 +178,7 @@ resource "docker_container" "workspace" { count = data.coder_workspace.me.start_count image = docker_image.main.name # Uses lower() to avoid Docker restriction on container names. - name = "coder-${data.coder_workspace.me.owner}-${lower(data.coder_workspace.me.name)}" + name = "coder-${data.coder_workspace_owner.me.name}-${lower(data.coder_workspace.me.name)}" # Hostname makes the shell more user friendly: coder@my-workspace:~$ hostname = data.coder_workspace.me.name # Use the docker gateway if the access URL is 127.0.0.1 @@ -196,11 +197,11 @@ resource "docker_container" "workspace" { # Add labels in Docker to keep track of orphan resources. labels { label = "coder.owner" - value = data.coder_workspace.me.owner + value = data.coder_workspace_owner.me.name } labels { label = "coder.owner_id" - value = data.coder_workspace.me.owner_id + value = data.coder_workspace_owner.me.id } labels { label = "coder.workspace_id" diff --git a/examples/templates/envbox/main.tf b/examples/templates/envbox/main.tf index e452ac450794f..3b52a65b4370d 100644 --- a/examples/templates/envbox/main.tf +++ b/examples/templates/envbox/main.tf @@ -88,6 +88,7 @@ provider "kubernetes" { } data "coder_workspace" "me" {} +data "coder_workspace_owner" "me" {} resource "coder_agent" "main" { os = "linux" @@ -126,7 +127,7 @@ resource "coder_app" "code-server" { resource "kubernetes_persistent_volume_claim" "home" { metadata { - name = "coder-${lower(data.coder_workspace.me.owner)}-${lower(data.coder_workspace.me.name)}-home" + name = "coder-${lower(data.coder_workspace_owner.me.name)}-${lower(data.coder_workspace.me.name)}-home" namespace = var.namespace } wait_until_bound = false @@ -144,7 +145,7 @@ resource "kubernetes_pod" "main" { count = data.coder_workspace.me.start_count metadata { - name = "coder-${lower(data.coder_workspace.me.owner)}-${lower(data.coder_workspace.me.name)}" + name = "coder-${lower(data.coder_workspace_owner.me.name)}-${lower(data.coder_workspace.me.name)}" namespace = var.namespace } diff --git a/examples/templates/gcp-devcontainer/main.tf b/examples/templates/gcp-devcontainer/main.tf index 00323ec20a246..5beaefe82e2ee 100644 --- a/examples/templates/gcp-devcontainer/main.tf +++ b/examples/templates/gcp-devcontainer/main.tf @@ -61,6 +61,7 @@ data "google_compute_default_service_account" "default" { data "coder_workspace" "me" { } +data "coder_workspace_owner" "me" {} resource "google_compute_disk" "root" { name = "coder-${data.coder_workspace.me.id}-root" @@ -117,10 +118,10 @@ module "code-server" { } resource "google_compute_instance" "vm" { - name = "coder-${lower(data.coder_workspace.me.owner)}-${lower(data.coder_workspace.me.name)}-root" + name = "coder-${lower(data.coder_workspace_owner.me.name)}-${lower(data.coder_workspace.me.name)}-root" machine_type = "e2-medium" - # data.coder_workspace.me.owner == "default" is a workaround to suppress error in the terraform plan phase while creating a new workspace. - desired_status = (data.coder_workspace.me.owner == "default" || data.coder_workspace.me.start_count == 1) ? "RUNNING" : "TERMINATED" + # data.coder_workspace_owner.me.name == "default" is a workaround to suppress error in the terraform plan phase while creating a new workspace. + desired_status = (data.coder_workspace_owner.me.name == "default" || data.coder_workspace.me.start_count == 1) ? "RUNNING" : "TERMINATED" network_interface { network = "default" @@ -179,7 +180,7 @@ resource "google_compute_instance" "vm" { locals { # Ensure Coder username is a valid Linux username - linux_user = lower(substr(data.coder_workspace.me.owner, 0, 32)) + linux_user = lower(substr(data.coder_workspace_owner.me.name, 0, 32)) } resource "coder_metadata" "workspace_info" { diff --git a/examples/templates/gcp-linux/main.tf b/examples/templates/gcp-linux/main.tf index 42e3cdcd3262d..0caa01cd83bca 100644 --- a/examples/templates/gcp-linux/main.tf +++ b/examples/templates/gcp-linux/main.tf @@ -61,6 +61,7 @@ data "google_compute_default_service_account" "default" { data "coder_workspace" "me" { } +data "coder_workspace_owner" "me" {} resource "google_compute_disk" "root" { name = "coder-${data.coder_workspace.me.id}-root" @@ -139,7 +140,7 @@ resource "coder_app" "code-server" { resource "google_compute_instance" "dev" { zone = data.coder_parameter.zone.value count = data.coder_workspace.me.start_count - name = "coder-${lower(data.coder_workspace.me.owner)}-${lower(data.coder_workspace.me.name)}-root" + name = "coder-${lower(data.coder_workspace_owner.me.name)}-${lower(data.coder_workspace.me.name)}-root" machine_type = "e2-medium" network_interface { network = "default" @@ -174,7 +175,7 @@ EOMETA locals { # Ensure Coder username is a valid Linux username - linux_user = lower(substr(data.coder_workspace.me.owner, 0, 32)) + linux_user = lower(substr(data.coder_workspace_owner.me.name, 0, 32)) } resource "coder_metadata" "workspace_info" { diff --git a/examples/templates/gcp-vm-container/main.tf b/examples/templates/gcp-vm-container/main.tf index d712859d222d5..a7ab81b4d2bac 100644 --- a/examples/templates/gcp-vm-container/main.tf +++ b/examples/templates/gcp-vm-container/main.tf @@ -61,6 +61,7 @@ data "google_compute_default_service_account" "default" { data "coder_workspace" "me" { } +data "coder_workspace_owner" "me" {} resource "coder_agent" "main" { auth = "google-instance-identity" @@ -109,7 +110,7 @@ module "gce-container" { resource "google_compute_instance" "dev" { zone = data.coder_parameter.zone.value count = data.coder_workspace.me.start_count - name = "coder-${lower(data.coder_workspace.me.owner)}-${lower(data.coder_workspace.me.name)}" + name = "coder-${lower(data.coder_workspace_owner.me.name)}-${lower(data.coder_workspace.me.name)}" machine_type = "e2-medium" network_interface { network = "default" diff --git a/examples/templates/gcp-windows/main.tf b/examples/templates/gcp-windows/main.tf index ea9032eaf3772..74ff06109a83a 100644 --- a/examples/templates/gcp-windows/main.tf +++ b/examples/templates/gcp-windows/main.tf @@ -58,6 +58,7 @@ provider "google" { data "coder_workspace" "me" { } +data "coder_workspace_owner" "me" {} data "google_compute_default_service_account" "default" { } @@ -82,7 +83,7 @@ resource "coder_agent" "main" { resource "google_compute_instance" "dev" { zone = data.coder_parameter.zone.value count = data.coder_workspace.me.start_count - name = "coder-${lower(data.coder_workspace.me.owner)}-${lower(data.coder_workspace.me.name)}" + name = "coder-${lower(data.coder_workspace_owner.me.name)}-${lower(data.coder_workspace.me.name)}" machine_type = "e2-medium" network_interface { network = "default" diff --git a/examples/templates/incus/main.tf b/examples/templates/incus/main.tf index ec7445bcb59f6..c51d088cc152b 100644 --- a/examples/templates/incus/main.tf +++ b/examples/templates/incus/main.tf @@ -14,6 +14,7 @@ data "coder_provisioner" "me" {} provider "incus" {} data "coder_workspace" "me" {} +data "coder_workspace_owner" "me" {} data "coder_parameter" "image" { name = "image" @@ -96,7 +97,7 @@ resource "coder_agent" "main" { metadata { display_name = "Home Disk" key = "3_home_disk" - script = "coder stat disk --path /home/${lower(data.coder_workspace.me.owner)}" + script = "coder stat disk --path /home/${lower(data.coder_workspace_owner.me.name)}" interval = 60 timeout = 1 } @@ -156,7 +157,7 @@ EOF resource "incus_instance" "dev" { running = data.coder_workspace.me.start_count == 1 - name = "coder-${lower(data.coder_workspace.me.owner)}-${lower(data.coder_workspace.me.name)}" + name = "coder-${lower(data.coder_workspace_owner.me.name)}-${lower(data.coder_workspace.me.name)}" image = incus_cached_image.image.fingerprint config = { @@ -273,7 +274,7 @@ EOF } locals { - workspace_user = lower(data.coder_workspace.me.owner) + workspace_user = lower(data.coder_workspace_owner.me.name) pool = "coder" repo_base_dir = data.coder_parameter.repo_base_dir.value == "~" ? "/home/${local.workspace_user}" : replace(data.coder_parameter.repo_base_dir.value, "/^~\\//", "/home/${local.workspace_user}/") repo_dir = module.git-clone.repo_dir diff --git a/examples/templates/kubernetes/main.tf b/examples/templates/kubernetes/main.tf index 19000d9e70d3c..649cc94c40a66 100644 --- a/examples/templates/kubernetes/main.tf +++ b/examples/templates/kubernetes/main.tf @@ -101,6 +101,7 @@ provider "kubernetes" { } data "coder_workspace" "me" {} +data "coder_workspace_owner" "me" {} resource "coder_agent" "main" { os = "linux" @@ -189,21 +190,21 @@ resource "coder_app" "code-server" { resource "kubernetes_persistent_volume_claim" "home" { metadata { - name = "coder-${lower(data.coder_workspace.me.owner)}-${lower(data.coder_workspace.me.name)}-home" + name = "coder-${lower(data.coder_workspace_owner.me.name)}-${lower(data.coder_workspace.me.name)}-home" namespace = var.namespace labels = { "app.kubernetes.io/name" = "coder-pvc" - "app.kubernetes.io/instance" = "coder-pvc-${lower(data.coder_workspace.me.owner)}-${lower(data.coder_workspace.me.name)}" + "app.kubernetes.io/instance" = "coder-pvc-${lower(data.coder_workspace_owner.me.name)}-${lower(data.coder_workspace.me.name)}" "app.kubernetes.io/part-of" = "coder" //Coder-specific labels. "com.coder.resource" = "true" "com.coder.workspace.id" = data.coder_workspace.me.id "com.coder.workspace.name" = data.coder_workspace.me.name - "com.coder.user.id" = data.coder_workspace.me.owner_id - "com.coder.user.username" = data.coder_workspace.me.owner + "com.coder.user.id" = data.coder_workspace_owner.me.id + "com.coder.user.username" = data.coder_workspace_owner.me.name } annotations = { - "com.coder.user.email" = data.coder_workspace.me.owner_email + "com.coder.user.email" = data.coder_workspace_owner.me.email } } wait_until_bound = false @@ -224,20 +225,20 @@ resource "kubernetes_deployment" "main" { ] wait_for_rollout = false metadata { - name = "coder-${lower(data.coder_workspace.me.owner)}-${lower(data.coder_workspace.me.name)}" + name = "coder-${lower(data.coder_workspace_owner.me.name)}-${lower(data.coder_workspace.me.name)}" namespace = var.namespace labels = { "app.kubernetes.io/name" = "coder-workspace" - "app.kubernetes.io/instance" = "coder-workspace-${lower(data.coder_workspace.me.owner)}-${lower(data.coder_workspace.me.name)}" + "app.kubernetes.io/instance" = "coder-workspace-${lower(data.coder_workspace_owner.me.name)}-${lower(data.coder_workspace.me.name)}" "app.kubernetes.io/part-of" = "coder" "com.coder.resource" = "true" "com.coder.workspace.id" = data.coder_workspace.me.id "com.coder.workspace.name" = data.coder_workspace.me.name - "com.coder.user.id" = data.coder_workspace.me.owner_id - "com.coder.user.username" = data.coder_workspace.me.owner + "com.coder.user.id" = data.coder_workspace_owner.me.id + "com.coder.user.username" = data.coder_workspace_owner.me.name } annotations = { - "com.coder.user.email" = data.coder_workspace.me.owner_email + "com.coder.user.email" = data.coder_workspace_owner.me.email } } diff --git a/examples/templates/nomad-docker/main.tf b/examples/templates/nomad-docker/main.tf index 28fd675b91933..97c1872f15e64 100644 --- a/examples/templates/nomad-docker/main.tf +++ b/examples/templates/nomad-docker/main.tf @@ -86,6 +86,7 @@ data "coder_parameter" "memory" { } data "coder_workspace" "me" {} +data "coder_workspace_owner" "me" {} resource "coder_agent" "main" { os = "linux" @@ -127,7 +128,7 @@ resource "coder_app" "code-server" { } locals { - workspace_tag = "coder-${data.coder_workspace.me.owner}-${data.coder_workspace.me.name}" + workspace_tag = "coder-${data.coder_workspace_owner.me.name}-${data.coder_workspace.me.name}" home_volume_name = "coder_${data.coder_workspace.me.id}_home" } @@ -135,7 +136,7 @@ resource "nomad_namespace" "coder_workspace" { name = local.workspace_tag description = "Coder workspace" meta = { - owner = data.coder_workspace.me.owner + owner = data.coder_workspace_owner.me.name } } @@ -169,7 +170,7 @@ resource "nomad_job" "workspace" { count = data.coder_workspace.me.start_count depends_on = [nomad_csi_volume.home_volume] jobspec = templatefile("${path.module}/workspace.nomad.tpl", { - coder_workspace_owner = data.coder_workspace.me.owner + coder_workspace_owner = data.coder_workspace_owner.me.name coder_workspace_name = data.coder_workspace.me.name workspace_tag = local.workspace_tag cores = tonumber(data.coder_parameter.cpu.value) diff --git a/examples/workspace-tags/main.tf b/examples/workspace-tags/main.tf index f74286741cbb0..711fed869640f 100644 --- a/examples/workspace-tags/main.tf +++ b/examples/workspace-tags/main.tf @@ -10,7 +10,7 @@ terraform { } locals { - username = data.coder_workspace.me.owner + username = data.coder_workspace_owner.me.name } data "coder_provisioner" "me" { @@ -18,6 +18,7 @@ data "coder_provisioner" "me" { data "coder_workspace" "me" { } +data "coder_workspace_owner" "me" {} data "coder_workspace_tags" "custom_workspace_tags" { tags = { @@ -77,10 +78,10 @@ resource "coder_agent" "main" { EOF env = { - GIT_AUTHOR_NAME = "${data.coder_workspace.me.owner}" - GIT_COMMITTER_NAME = "${data.coder_workspace.me.owner}" - GIT_AUTHOR_EMAIL = "${data.coder_workspace.me.owner_email}" - GIT_COMMITTER_EMAIL = "${data.coder_workspace.me.owner_email}" + GIT_AUTHOR_NAME = "${data.coder_workspace_owner.me.name}" + GIT_COMMITTER_NAME = "${data.coder_workspace_owner.me.name}" + GIT_AUTHOR_EMAIL = "${data.coder_workspace_owner.me.email}" + GIT_COMMITTER_EMAIL = "${data.coder_workspace_owner.me.email}" } } @@ -107,11 +108,11 @@ resource "docker_volume" "home_volume" { } labels { label = "coder.owner" - value = data.coder_workspace.me.owner + value = data.coder_workspace_owner.me.name } labels { label = "coder.owner_id" - value = data.coder_workspace.me.owner_id + value = data.coder_workspace_owner.me.id } labels { label = "coder.workspace_id" @@ -135,7 +136,7 @@ resource "coder_metadata" "home_info" { resource "docker_container" "workspace" { count = data.coder_workspace.me.start_count image = "ubuntu:22.04" - name = "coder-${data.coder_workspace.me.owner}-${lower(data.coder_workspace.me.name)}" + name = "coder-${data.coder_workspace_owner.me.name}-${lower(data.coder_workspace.me.name)}" hostname = data.coder_workspace.me.name entrypoint = ["sh", "-c", replace(coder_agent.main.init_script, "/localhost|127\\.0\\.0\\.1/", "host.docker.internal")] env = [ @@ -153,11 +154,11 @@ resource "docker_container" "workspace" { labels { label = "coder.owner" - value = data.coder_workspace.me.owner + value = data.coder_workspace_owner.me.name } labels { label = "coder.owner_id" - value = data.coder_workspace.me.owner_id + value = data.coder_workspace_owner.me.id } labels { label = "coder.workspace_id" diff --git a/provisioner/terraform/diagnostic_test.go b/provisioner/terraform/diagnostic_test.go index 54b5b6c5c35d3..8727256b75376 100644 --- a/provisioner/terraform/diagnostic_test.go +++ b/provisioner/terraform/diagnostic_test.go @@ -23,10 +23,10 @@ func TestFormatDiagnostic(t *testing.T) { expected []string }{ "Expression": { - input: `{"@level":"error","@message":"Error: Unsupported attribute","@module":"terraform.ui","@timestamp":"2023-03-17T10:33:38.761493+01:00","diagnostic":{"severity":"error","summary":"Unsupported attribute","detail":"This object has no argument, nested block, or exported attribute named \"foobar\".","range":{"filename":"main.tf","start":{"line":230,"column":81,"byte":5648},"end":{"line":230,"column":88,"byte":5655}},"snippet":{"context":"resource \"docker_container\" \"workspace\"","code":" name = \"coder-${data.coder_workspace.me.owner}-${lower(data.coder_workspace.me.foobar)}\"","start_line":230,"highlight_start_offset":80,"highlight_end_offset":87,"values":[]}},"type":"diagnostic"}`, + input: `{"@level":"error","@message":"Error: Unsupported attribute","@module":"terraform.ui","@timestamp":"2023-03-17T10:33:38.761493+01:00","diagnostic":{"severity":"error","summary":"Unsupported attribute","detail":"This object has no argument, nested block, or exported attribute named \"foobar\".","range":{"filename":"main.tf","start":{"line":230,"column":81,"byte":5648},"end":{"line":230,"column":88,"byte":5655}},"snippet":{"context":"resource \"docker_container\" \"workspace\"","code":" name = \"coder-${data.coder_workspace_owner.me.name}-${lower(data.coder_workspace.me.foobar)}\"","start_line":230,"highlight_start_offset":80,"highlight_end_offset":87,"values":[]}},"type":"diagnostic"}`, expected: []string{ "on main.tf line 230, in resource \"docker_container\" \"workspace\":", - " 230: name = \"coder-${data.coder_workspace.me.owner}-${lower(data.coder_workspace.me.foobar)}\"", + " 230: name = \"coder-${data.coder_workspace_owner.me.name}-${lower(data.coder_workspace.me.foobar)}\"", "", "This object has no argument, nested block, or exported attribute named \"foobar\".", }, diff --git a/scaletest/templates/kubernetes-large/main.tf b/scaletest/templates/kubernetes-large/main.tf index 161d4448bab64..b195f3574666a 100644 --- a/scaletest/templates/kubernetes-large/main.tf +++ b/scaletest/templates/kubernetes-large/main.tf @@ -2,11 +2,11 @@ terraform { required_providers { coder = { source = "coder/coder" - version = "~> 0.7.0" + version = "~> 0.23.0" } kubernetes = { source = "hashicorp/kubernetes" - version = "~> 2.18" + version = "~> 2.30" } } } @@ -24,6 +24,7 @@ variable "kubernetes_nodepool_workspaces" { } data "coder_workspace" "me" {} +data "coder_workspace_owner" "me" {} resource "coder_agent" "main" { os = "linux" @@ -35,11 +36,11 @@ resource "coder_agent" "main" { resource "kubernetes_pod" "main" { count = data.coder_workspace.me.start_count metadata { - name = "coder-${lower(data.coder_workspace.me.owner)}-${lower(data.coder_workspace.me.name)}" + name = "coder-${lower(data.coder_workspace_owner.me.name)}-${lower(data.coder_workspace.me.name)}" namespace = "coder-big" labels = { "app.kubernetes.io/name" = "coder-workspace" - "app.kubernetes.io/instance" = "coder-workspace-${lower(data.coder_workspace.me.owner)}-${lower(data.coder_workspace.me.name)}" + "app.kubernetes.io/instance" = "coder-workspace-${lower(data.coder_workspace_owner.me.name)}-${lower(data.coder_workspace.me.name)}" } } spec { diff --git a/scaletest/templates/kubernetes-medium-greedy/main.tf b/scaletest/templates/kubernetes-medium-greedy/main.tf index 8a70eced34426..f1fa04b2d6c3f 100644 --- a/scaletest/templates/kubernetes-medium-greedy/main.tf +++ b/scaletest/templates/kubernetes-medium-greedy/main.tf @@ -2,11 +2,11 @@ terraform { required_providers { coder = { source = "coder/coder" - version = "~> 0.7.0" + version = "~> 0.23.0" } kubernetes = { source = "hashicorp/kubernetes" - version = "~> 2.18" + version = "~> 2.30" } } } @@ -24,6 +24,7 @@ variable "kubernetes_nodepool_workspaces" { } data "coder_workspace" "me" {} +data "coder_workspace_owner" "me" {} resource "coder_agent" "main" { os = "linux" @@ -149,11 +150,11 @@ resource "coder_agent" "main" { resource "kubernetes_pod" "main" { count = data.coder_workspace.me.start_count metadata { - name = "coder-${lower(data.coder_workspace.me.owner)}-${lower(data.coder_workspace.me.name)}" + name = "coder-${lower(data.coder_workspace_owner.me.name)}-${lower(data.coder_workspace.me.name)}" namespace = "coder-big" labels = { "app.kubernetes.io/name" = "coder-workspace" - "app.kubernetes.io/instance" = "coder-workspace-${lower(data.coder_workspace.me.owner)}-${lower(data.coder_workspace.me.name)}" + "app.kubernetes.io/instance" = "coder-workspace-${lower(data.coder_workspace_owner.me.name)}-${lower(data.coder_workspace.me.name)}" } } spec { diff --git a/scaletest/templates/kubernetes-medium/main.tf b/scaletest/templates/kubernetes-medium/main.tf index 5e3980a0e252e..656e47dd44011 100644 --- a/scaletest/templates/kubernetes-medium/main.tf +++ b/scaletest/templates/kubernetes-medium/main.tf @@ -2,11 +2,11 @@ terraform { required_providers { coder = { source = "coder/coder" - version = "~> 0.7.0" + version = "~> 0.23.0" } kubernetes = { source = "hashicorp/kubernetes" - version = "~> 2.18" + version = "~> 2.30" } } } @@ -24,6 +24,7 @@ variable "kubernetes_nodepool_workspaces" { } data "coder_workspace" "me" {} +data "coder_workspace_owner" "me" {} resource "coder_agent" "main" { os = "linux" @@ -35,11 +36,11 @@ resource "coder_agent" "main" { resource "kubernetes_pod" "main" { count = data.coder_workspace.me.start_count metadata { - name = "coder-${lower(data.coder_workspace.me.owner)}-${lower(data.coder_workspace.me.name)}" + name = "coder-${lower(data.coder_workspace_owner.me.name)}-${lower(data.coder_workspace.me.name)}" namespace = "coder-big" labels = { "app.kubernetes.io/name" = "coder-workspace" - "app.kubernetes.io/instance" = "coder-workspace-${lower(data.coder_workspace.me.owner)}-${lower(data.coder_workspace.me.name)}" + "app.kubernetes.io/instance" = "coder-workspace-${lower(data.coder_workspace_owner.me.name)}-${lower(data.coder_workspace.me.name)}" } } spec { diff --git a/scaletest/templates/kubernetes-minimal/main.tf b/scaletest/templates/kubernetes-minimal/main.tf index 7ad97f7a89e85..514e8bffd4c38 100644 --- a/scaletest/templates/kubernetes-minimal/main.tf +++ b/scaletest/templates/kubernetes-minimal/main.tf @@ -2,11 +2,11 @@ terraform { required_providers { coder = { source = "coder/coder" - version = "~> 0.12.0" + version = "~> 0.23.0" } kubernetes = { source = "hashicorp/kubernetes" - version = "~> 2.18" + version = "~> 2.30" } } } @@ -24,6 +24,7 @@ variable "kubernetes_nodepool_workspaces" { } data "coder_workspace" "me" {} +data "coder_workspace_owner" "me" {} resource "coder_agent" "m" { os = "linux" @@ -92,24 +93,24 @@ resource "coder_app" "ws_discard" { resource "kubernetes_deployment" "main" { count = data.coder_workspace.me.start_count metadata { - name = "coder-${lower(data.coder_workspace.me.owner)}-${lower(data.coder_workspace.me.name)}" + name = "coder-${lower(data.coder_workspace_owner.me.name)}-${lower(data.coder_workspace.me.name)}" namespace = "coder-big" labels = { "app.kubernetes.io/name" = "coder-workspace" - "app.kubernetes.io/instance" = "coder-workspace-${lower(data.coder_workspace.me.owner)}-${lower(data.coder_workspace.me.name)}" + "app.kubernetes.io/instance" = "coder-workspace-${lower(data.coder_workspace_owner.me.name)}-${lower(data.coder_workspace.me.name)}" "app.kubernetes.io/part-of" = "coder" "com.coder.resource" = "true" "com.coder.workspace.id" = data.coder_workspace.me.id "com.coder.workspace.name" = data.coder_workspace.me.name - "com.coder.user.id" = data.coder_workspace.me.owner_id - "com.coder.user.username" = data.coder_workspace.me.owner + "com.coder.user.id" = data.coder_workspace_owner.me.id + "com.coder.user.username" = data.coder_workspace_owner.me.name } } spec { replicas = 1 selector { match_labels = { - "app.kubernetes.io/instance" = "coder-workspace-${lower(data.coder_workspace.me.owner)}-${lower(data.coder_workspace.me.name)}" + "app.kubernetes.io/instance" = "coder-workspace-${lower(data.coder_workspace_owner.me.name)}-${lower(data.coder_workspace.me.name)}" } } strategy { @@ -119,7 +120,7 @@ resource "kubernetes_deployment" "main" { metadata { labels = { "app.kubernetes.io/name" = "coder-workspace" - "app.kubernetes.io/instance" = "coder-workspace-${lower(data.coder_workspace.me.owner)}-${lower(data.coder_workspace.me.name)}" + "app.kubernetes.io/instance" = "coder-workspace-${lower(data.coder_workspace_owner.me.name)}-${lower(data.coder_workspace.me.name)}" } } spec { diff --git a/scaletest/templates/kubernetes-small/main.tf b/scaletest/templates/kubernetes-small/main.tf index 0c81ba245b1df..9da2d59a9c38e 100644 --- a/scaletest/templates/kubernetes-small/main.tf +++ b/scaletest/templates/kubernetes-small/main.tf @@ -2,11 +2,11 @@ terraform { required_providers { coder = { source = "coder/coder" - version = "~> 0.7.0" + version = "~> 0.23.0" } kubernetes = { source = "hashicorp/kubernetes" - version = "~> 2.18" + version = "~> 2.30" } } } @@ -24,6 +24,7 @@ variable "kubernetes_nodepool_workspaces" { } data "coder_workspace" "me" {} +data "coder_workspace_owner" "me" {} resource "coder_agent" "main" { os = "linux" @@ -35,11 +36,11 @@ resource "coder_agent" "main" { resource "kubernetes_pod" "main" { count = data.coder_workspace.me.start_count metadata { - name = "coder-${lower(data.coder_workspace.me.owner)}-${lower(data.coder_workspace.me.name)}" + name = "coder-${lower(data.coder_workspace_owner.me.name)}-${lower(data.coder_workspace.me.name)}" namespace = "coder-big" labels = { "app.kubernetes.io/name" = "coder-workspace" - "app.kubernetes.io/instance" = "coder-workspace-${lower(data.coder_workspace.me.owner)}-${lower(data.coder_workspace.me.name)}" + "app.kubernetes.io/instance" = "coder-workspace-${lower(data.coder_workspace_owner.me.name)}-${lower(data.coder_workspace.me.name)}" } } spec { diff --git a/scaletest/templates/scaletest-runner/main.tf b/scaletest/templates/scaletest-runner/main.tf index 2d17c66435f62..450fab44dce6c 100644 --- a/scaletest/templates/scaletest-runner/main.tf +++ b/scaletest/templates/scaletest-runner/main.tf @@ -2,11 +2,11 @@ terraform { required_providers { coder = { source = "coder/coder" - version = "~> 0.12" + version = "~> 0.23" } kubernetes = { source = "hashicorp/kubernetes" - version = "~> 2.22" + version = "~> 2.30" } } } @@ -17,7 +17,7 @@ resource "time_static" "start_time" { # the scaletest is restarted. triggers = { count : data.coder_workspace.me.start_count - token : data.coder_workspace.me.owner_session_token # Rely on this being re-generated every start. + token : data.coder_workspace_owner.me.session_token # Rely on this being re-generated every start. } } @@ -29,15 +29,15 @@ resource "null_resource" "permission_check" { # for the plan, and consequently, updating the template. lifecycle { precondition { - condition = can(regex("^(default/default|scaletest/runner)$", "${data.coder_workspace.me.owner}/${data.coder_workspace.me.name}")) + condition = can(regex("^(default/default|scaletest/runner)$", "${data.coder_workspace_owner.me.name}/${data.coder_workspace.me.name}")) error_message = "User and workspace name is not allowed, expected 'scaletest/runner'." } } } locals { - workspace_pod_name = "coder-scaletest-runner-${lower(data.coder_workspace.me.owner)}-${lower(data.coder_workspace.me.name)}" - workspace_pod_instance = "coder-workspace-${lower(data.coder_workspace.me.owner)}-${lower(data.coder_workspace.me.name)}" + workspace_pod_name = "coder-scaletest-runner-${lower(data.coder_workspace_owner.me.name)}-${lower(data.coder_workspace.me.name)}" + workspace_pod_instance = "coder-workspace-${lower(data.coder_workspace_owner.me.name)}-${lower(data.coder_workspace.me.name)}" workspace_pod_termination_grace_period_seconds = 5 * 60 * 60 # 5 hours (cleanup timeout). service_account_name = "scaletest-sa" home_disk_size = 10 @@ -54,6 +54,7 @@ data "coder_provisioner" "me" { data "coder_workspace" "me" { } +data "coder_workspace_owner" "me" {} data "coder_parameter" "verbose" { order = 1 @@ -562,9 +563,9 @@ resource "coder_agent" "main" { VERBOSE : data.coder_parameter.verbose.value ? "1" : "0", DRY_RUN : data.coder_parameter.dry_run.value ? "1" : "0", CODER_CONFIG_DIR : "/home/coder/.config/coderv2", - CODER_USER_TOKEN : data.coder_workspace.me.owner_session_token, + CODER_USER_TOKEN : data.coder_workspace_owner.me.session_token, CODER_URL : data.coder_workspace.me.access_url, - CODER_USER : data.coder_workspace.me.owner, + CODER_USER : data.coder_workspace_owner.me.name, CODER_WORKSPACE : data.coder_workspace.me.name, # Global scaletest envs that may affect each `coder exp scaletest` invocation. @@ -756,17 +757,17 @@ resource "kubernetes_persistent_volume_claim" "home" { namespace = data.coder_parameter.namespace.value labels = { "app.kubernetes.io/name" = "coder-pvc" - "app.kubernetes.io/instance" = "coder-pvc-${lower(data.coder_workspace.me.owner)}-${lower(data.coder_workspace.me.name)}" + "app.kubernetes.io/instance" = "coder-pvc-${lower(data.coder_workspace_owner.me.name)}-${lower(data.coder_workspace.me.name)}" "app.kubernetes.io/part-of" = "coder" // Coder specific labels. "com.coder.resource" = "true" "com.coder.workspace.id" = data.coder_workspace.me.id "com.coder.workspace.name" = data.coder_workspace.me.name - "com.coder.user.id" = data.coder_workspace.me.owner_id - "com.coder.user.username" = data.coder_workspace.me.owner + "com.coder.user.id" = data.coder_workspace_owner.me.id + "com.coder.user.username" = data.coder_workspace_owner.me.name } annotations = { - "com.coder.user.email" = data.coder_workspace.me.owner_email + "com.coder.user.email" = data.coder_workspace_owner.me.email } } wait_until_bound = false @@ -794,11 +795,11 @@ resource "kubernetes_pod" "main" { "com.coder.resource" = "true" "com.coder.workspace.id" = data.coder_workspace.me.id "com.coder.workspace.name" = data.coder_workspace.me.name - "com.coder.user.id" = data.coder_workspace.me.owner_id - "com.coder.user.username" = data.coder_workspace.me.owner + "com.coder.user.id" = data.coder_workspace_owner.me.id + "com.coder.user.username" = data.coder_workspace_owner.me.name } annotations = { - "com.coder.user.email" = data.coder_workspace.me.owner_email + "com.coder.user.email" = data.coder_workspace_owner.me.email } } # Set the pod delete timeout to termination_grace_period_seconds + 1m. diff --git a/scaletest/terraform/k8s/coder.tf b/scaletest/terraform/k8s/coder.tf index 7e8fcaa7128c4..ea83317127fd8 100644 --- a/scaletest/terraform/k8s/coder.tf +++ b/scaletest/terraform/k8s/coder.tf @@ -284,11 +284,11 @@ resource "local_file" "kubernetes_template" { required_providers { coder = { source = "coder/coder" - version = "~> 0.7.0" + version = "~> 0.23.0" } kubernetes = { source = "hashicorp/kubernetes" - version = "~> 2.18" + version = "~> 2.30" } } } @@ -300,6 +300,7 @@ resource "local_file" "kubernetes_template" { } data "coder_workspace" "me" {} + data "coder_workspace_owner" "me" {} resource "coder_agent" "main" { os = "linux" @@ -309,11 +310,11 @@ resource "local_file" "kubernetes_template" { resource "kubernetes_pod" "main" { count = data.coder_workspace.me.start_count metadata { - name = "coder-$${lower(data.coder_workspace.me.owner)}-$${lower(data.coder_workspace.me.name)}" + name = "coder-$${lower(data.coder_workspace_owner.me.name)}-$${lower(data.coder_workspace.me.name)}" namespace = "${local.coder_namespace}" labels = { "app.kubernetes.io/name" = "coder-workspace" - "app.kubernetes.io/instance" = "coder-workspace-$${lower(data.coder_workspace.me.owner)}-$${lower(data.coder_workspace.me.name)}" + "app.kubernetes.io/instance" = "coder-workspace-$${lower(data.coder_workspace_owner.me.name)}-$${lower(data.coder_workspace.me.name)}" } } spec { diff --git a/site/src/modules/templates/TemplateFiles/TemplateFiles.stories.tsx b/site/src/modules/templates/TemplateFiles/TemplateFiles.stories.tsx index e7f4e815b6003..8222a9bb4cca3 100644 --- a/site/src/modules/templates/TemplateFiles/TemplateFiles.stories.tsx +++ b/site/src/modules/templates/TemplateFiles/TemplateFiles.stories.tsx @@ -8,7 +8,7 @@ const exampleFiles = { "build/Dockerfile": 'FROM ubuntu\n\nRUN apt-get update \\\n\t&& apt-get install -y \\\n\tcurl \\\n\tgit \\\n\tgolang \\\n\tsudo \\\n\tvim \\\n\twget \\\n\t&& rm -rf /var/lib/apt/lists/*\n\nARG USER=coder\nRUN useradd --groups sudo --no-create-home --shell /bin/bash ${USER} \\\n\t&& echo "${USER} ALL=(ALL) NOPASSWD:ALL" >/etc/sudoers.d/${USER} \\\n\t&& chmod 0440 /etc/sudoers.d/${USER}\nUSER ${USER}\nWORKDIR /home/${USER}\n', "main.tf": - 'terraform {\n required_providers {\n coder = {\n source = "coder/coder"\n }\n docker = {\n source = "kreuzwerker/docker"\n }\n }\n}\n\nlocals {\n username = data.coder_workspace.me.owner\n}\n\ndata "coder_provisioner" "me" {\n}\n\nprovider "docker" {\n}\n\ndata "coder_workspace" "me" {\n}\n\nresource "coder_agent" "main" {\n arch = data.coder_provisioner.me.arch\n os = "linux"\n startup_script_timeout = 180\n startup_script = <<-EOT\n set -e\n\n # install and start code-server\n curl -fsSL https://code-server.dev/install.sh | sh -s -- --method=standalone --prefix=/tmp/code-server --version 4.11.0\n /tmp/code-server/bin/code-server --auth none --port 13337 >/tmp/code-server.log 2>&1 &\n EOT\n\n # These environment variables allow you to make Git commits right away after creating a\n # workspace. Note that they take precedence over configuration defined in ~/.gitconfig!\n # You can remove this block if you\'d prefer to configure Git manually or using\n # dotfiles. (see docs/dotfiles.md)\n env = {\n GIT_AUTHOR_NAME = "${data.coder_workspace.me.owner}"\n GIT_COMMITTER_NAME = "${data.coder_workspace.me.owner}"\n GIT_AUTHOR_EMAIL = "${data.coder_workspace.me.owner_email}"\n GIT_COMMITTER_EMAIL = "${data.coder_workspace.me.owner_email}"\n }\n\n # The following metadata blocks are optional. They are used to display\n # information about your workspace in the dashboard. You can remove them\n # if you don\'t want to display any information.\n # For basic resources, you can use the `coder stat` command.\n # If you need more control, you can write your own script.\n metadata {\n display_name = "CPU Usage"\n key = "0_cpu_usage"\n script = "coder stat cpu"\n interval = 10\n timeout = 1\n }\n\n metadata {\n display_name = "RAM Usage"\n key = "1_ram_usage"\n script = "coder stat mem"\n interval = 10\n timeout = 1\n }\n\n metadata {\n display_name = "Home Disk"\n key = "3_home_disk"\n script = "coder stat disk --path $${HOME}"\n interval = 60\n timeout = 1\n }\n\n metadata {\n display_name = "CPU Usage (Host)"\n key = "4_cpu_usage_host"\n script = "coder stat cpu --host"\n interval = 10\n timeout = 1\n }\n\n metadata {\n display_name = "Memory Usage (Host)"\n key = "5_mem_usage_host"\n script = "coder stat mem --host"\n interval = 10\n timeout = 1\n }\n\n metadata {\n display_name = "Load Average (Host)"\n key = "6_load_host"\n # get load avg scaled by number of cores\n script = </tmp/code-server.log 2>&1 &\n EOT\n\n # These environment variables allow you to make Git commits right away after creating a\n # workspace. Note that they take precedence over configuration defined in ~/.gitconfig!\n # You can remove this block if you\'d prefer to configure Git manually or using\n # dotfiles. (see docs/dotfiles.md)\n env = {\n GIT_AUTHOR_NAME = "${data.coder_workspace_owner.me.name}"\n GIT_COMMITTER_NAME = "${data.coder_workspace_owner.me.name}"\n GIT_AUTHOR_EMAIL = "${data.coder_workspace_owner.me.email}"\n GIT_COMMITTER_EMAIL = "${data.coder_workspace_owner.me.email}"\n }\n\n # The following metadata blocks are optional. They are used to display\n # information about your workspace in the dashboard. You can remove them\n # if you don\'t want to display any information.\n # For basic resources, you can use the `coder stat` command.\n # If you need more control, you can write your own script.\n metadata {\n display_name = "CPU Usage"\n key = "0_cpu_usage"\n script = "coder stat cpu"\n interval = 10\n timeout = 1\n }\n\n metadata {\n display_name = "RAM Usage"\n key = "1_ram_usage"\n script = "coder stat mem"\n interval = 10\n timeout = 1\n }\n\n metadata {\n display_name = "Home Disk"\n key = "3_home_disk"\n script = "coder stat disk --path $${HOME}"\n interval = 60\n timeout = 1\n }\n\n metadata {\n display_name = "CPU Usage (Host)"\n key = "4_cpu_usage_host"\n script = "coder stat cpu --host"\n interval = 10\n timeout = 1\n }\n\n metadata {\n display_name = "Memory Usage (Host)"\n key = "5_mem_usage_host"\n script = "coder stat mem --host"\n interval = 10\n timeout = 1\n }\n\n metadata {\n display_name = "Load Average (Host)"\n key = "6_load_host"\n # get load avg scaled by number of cores\n script = < = { diff --git a/site/src/pages/UserSettingsPage/AccountPage/AccountForm.tsx b/site/src/pages/UserSettingsPage/AccountPage/AccountForm.tsx index 28c772620e97e..84cdd04b39144 100644 --- a/site/src/pages/UserSettingsPage/AccountPage/AccountForm.tsx +++ b/site/src/pages/UserSettingsPage/AccountPage/AccountForm.tsx @@ -83,7 +83,7 @@ export const AccountForm: FC = ({ disabled={!editable} fullWidth label={Language.nameLabel} - helperText='The human-readable name is optional and can be accessed in a template via the "data.coder_workspace.me.owner_name" property.' + helperText='The human-readable name is optional and can be accessed in a template via the "data.coder_workspace_owner.me.full_name" property.' />
From 59ab5053b190b0982353a43cb3b9d7493ae8a4fe Mon Sep 17 00:00:00 2001 From: Danny Kopping Date: Thu, 30 May 2024 13:33:00 +0200 Subject: [PATCH 139/149] fix: return error if agent init script fails to download valid binary (#13280) --- provisionersdk/agent.go | 6 +- provisionersdk/agent_test.go | 137 ++++++++++++++++--- provisionersdk/scripts/bootstrap_darwin.sh | 12 +- provisionersdk/scripts/bootstrap_linux.sh | 12 +- provisionersdk/scripts/bootstrap_windows.ps1 | 13 ++ scripts/check_site_icons.sh | 2 +- 6 files changed, 153 insertions(+), 29 deletions(-) diff --git a/provisionersdk/agent.go b/provisionersdk/agent.go index 1a285577fabda..ce7abf1c0da67 100644 --- a/provisionersdk/agent.go +++ b/provisionersdk/agent.go @@ -39,9 +39,9 @@ var ( } ) -// AgentScriptEnv returns a key-pair of scripts that are consumed -// by the Coder Terraform Provider. See: -// https://github.com/coder/terraform-provider-coder/blob/main/internal/provider/provider.go#L97 +// AgentScriptEnv returns a key-pair of scripts that are consumed by the Coder Terraform Provider. +// https://github.com/coder/terraform-provider-coder/blob/main/provider/agent.go (updateInitScript) +// performs additional string substitutions. func AgentScriptEnv() map[string]string { env := map[string]string{} for operatingSystem, scripts := range agentScripts { diff --git a/provisionersdk/agent_test.go b/provisionersdk/agent_test.go index 96c0d531ad6f7..60a973c740340 100644 --- a/provisionersdk/agent_test.go +++ b/provisionersdk/agent_test.go @@ -7,6 +7,9 @@ package provisionersdk_test import ( + "bytes" + "context" + "errors" "fmt" "net/http" "net/http/httptest" @@ -14,50 +17,142 @@ import ( "os/exec" "runtime" "strings" + "sync" "testing" + "time" "github.com/go-chi/render" "github.com/stretchr/testify/require" + "github.com/coder/coder/v2/testutil" + "github.com/coder/coder/v2/provisionersdk" ) +// mimicking the --version output which we use to test the binary (see provisionersdk/scripts/bootstrap_*). +const versionOutput = `Coder v2.11.0+8979bfe Tue May 7 17:30:19 UTC 2024` + // bashEcho is a script that calls the local `echo` with the arguments. This is preferable to // sending the real `echo` binary since macOS 14.4+ immediately sigkills `echo` if it is copied to // another directory and run locally. const bashEcho = `#!/usr/bin/env bash -echo $@` +echo "` + versionOutput + `"` + +const unexpectedEcho = `#!/usr/bin/env bash +echo "this is not the agent you are looking for"` func TestAgentScript(t *testing.T) { t.Parallel() - t.Run("Run", func(t *testing.T) { + + t.Run("Valid", func(t *testing.T) { t.Parallel() - srv := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { - render.Status(r, http.StatusOK) - render.Data(rw, r, []byte(bashEcho)) - })) - defer srv.Close() - srvURL, err := url.Parse(srv.URL) - require.NoError(t, err) - script, exists := provisionersdk.AgentScriptEnv()[fmt.Sprintf("CODER_AGENT_SCRIPT_%s_%s", runtime.GOOS, runtime.GOARCH)] - if !exists { - t.Skip("Agent not supported...") - return - } - script = strings.ReplaceAll(script, "${ACCESS_URL}", srvURL.String()+"/") - script = strings.ReplaceAll(script, "${AUTH_TYPE}", "token") + script := serveScript(t, bashEcho) + + ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitShort) + t.Cleanup(cancel) + + var output bytes.Buffer // This is intentionally ran in single quotes to mimic how a customer may // embed our script. Our scripts should not include any single quotes. // nolint:gosec - output, err := exec.Command("sh", "-c", "sh -c '"+script+"'").CombinedOutput() - t.Log(string(output)) + cmd := exec.CommandContext(ctx, "sh", "-c", "sh -c '"+script+"'") + cmd.Stdout = &output + cmd.Stderr = &output + require.NoError(t, cmd.Start()) + + err := cmd.Wait() + if err != nil { + var exitErr *exec.ExitError + if errors.As(err, &exitErr) { + require.Equal(t, 0, exitErr.ExitCode()) + } else { + t.Fatalf("unexpected err: %s", err) + } + } + + t.Log(output.String()) require.NoError(t, err) // Ignore debug output from `set -x`, we're only interested in the last line. - lines := strings.Split(strings.TrimSpace(string(output)), "\n") + lines := strings.Split(strings.TrimSpace(output.String()), "\n") lastLine := lines[len(lines)-1] - // Because we use the "echo" binary, we should expect the arguments provided + // When we use the "bashEcho" binary, we should expect the arguments provided // as the response to executing our script. - require.Equal(t, "agent", lastLine) + require.Equal(t, versionOutput, lastLine) }) + + t.Run("Invalid", func(t *testing.T) { + t.Parallel() + + script := serveScript(t, unexpectedEcho) + + ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitShort) + t.Cleanup(cancel) + + var output bytes.Buffer + // This is intentionally ran in single quotes to mimic how a customer may + // embed our script. Our scripts should not include any single quotes. + // nolint:gosec + cmd := exec.CommandContext(ctx, "sh", "-c", "sh -c '"+script+"'") + cmd.WaitDelay = time.Second + cmd.Stdout = &output + cmd.Stderr = &output + require.NoError(t, cmd.Start()) + + done := make(chan error, 1) + var wg sync.WaitGroup + wg.Add(1) + go func() { + defer wg.Done() + + // The bootstrap scripts trap exit codes to allow operators to view the script logs and debug the process + // while it is still running. We do not expect Wait() to complete. + err := cmd.Wait() + done <- err + }() + + select { + case <-ctx.Done(): + // Timeout. + break + case err := <-done: + // If done signals before context times out, script behaved in an unexpected way. + if err != nil { + t.Fatalf("unexpected err: %s", err) + } + } + + // Kill the command, wait for the command to yield. + require.NoError(t, cmd.Cancel()) + wg.Wait() + + t.Log(output.String()) + + require.Eventually(t, func() bool { + return bytes.Contains(output.Bytes(), []byte("ERROR: Downloaded agent binary returned unexpected version output")) + }, testutil.WaitShort, testutil.IntervalSlow) + }) +} + +// serveScript creates a fake HTTP server which serves a requested "agent binary" (which is actually the given input string) +// which will be attempted to run to verify that it is correct. +func serveScript(t *testing.T, in string) string { + t.Helper() + + srv := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { + render.Status(r, http.StatusOK) + render.Data(rw, r, []byte(in)) + })) + t.Cleanup(srv.Close) + srvURL, err := url.Parse(srv.URL) + require.NoError(t, err) + + script, exists := provisionersdk.AgentScriptEnv()[fmt.Sprintf("CODER_AGENT_SCRIPT_%s_%s", runtime.GOOS, runtime.GOARCH)] + if !exists { + t.Skip("Agent not supported...") + return "" + } + script = strings.ReplaceAll(script, "${ACCESS_URL}", srvURL.String()+"/") + script = strings.ReplaceAll(script, "${AUTH_TYPE}", "token") + return script } diff --git a/provisionersdk/scripts/bootstrap_darwin.sh b/provisionersdk/scripts/bootstrap_darwin.sh index 70158594de7d6..501e43997619e 100644 --- a/provisionersdk/scripts/bootstrap_darwin.sh +++ b/provisionersdk/scripts/bootstrap_darwin.sh @@ -4,7 +4,7 @@ set -eux # This is to allow folks to exec into a failed workspace and poke around to # troubleshoot. waitonexit() { - echo "=== Agent script exited with non-zero code. Sleeping 24h to preserve logs..." + echo "=== Agent script exited with non-zero code ($?). Sleeping 24h to preserve logs..." sleep 86400 } trap waitonexit EXIT @@ -31,4 +31,12 @@ fi export CODER_AGENT_AUTH="${AUTH_TYPE}" export CODER_AGENT_URL="${ACCESS_URL}" -exec ./$BINARY_NAME agent + +output=$(./${BINARY_NAME} --version | head -n1) +if ! echo "${output}" | grep -q Coder; then + echo >&2 "ERROR: Downloaded agent binary returned unexpected version output" + echo >&2 "${BINARY_NAME} --version output: \"${output}\"" + exit 2 +fi + +exec ./${BINARY_NAME} agent diff --git a/provisionersdk/scripts/bootstrap_linux.sh b/provisionersdk/scripts/bootstrap_linux.sh index faf4b4a9bbfac..c07cbc3e01667 100755 --- a/provisionersdk/scripts/bootstrap_linux.sh +++ b/provisionersdk/scripts/bootstrap_linux.sh @@ -4,7 +4,7 @@ set -eux # This is to allow folks to exec into a failed workspace and poke around to # troubleshoot. waitonexit() { - echo "=== Agent script exited with non-zero code. Sleeping 24h to preserve logs..." + echo "=== Agent script exited with non-zero code ($?). Sleeping 24h to preserve logs..." sleep 86400 } trap waitonexit EXIT @@ -86,4 +86,12 @@ fi export CODER_AGENT_AUTH="${AUTH_TYPE}" export CODER_AGENT_URL="${ACCESS_URL}" -exec ./$BINARY_NAME agent + +output=$(./${BINARY_NAME} --version | head -n1) +if ! echo "${output}" | grep -q Coder; then + echo >&2 "ERROR: Downloaded agent binary returned unexpected version output" + echo >&2 "${BINARY_NAME} --version output: \"${output}\"" + exit 2 +fi + +exec ./${BINARY_NAME} agent diff --git a/provisionersdk/scripts/bootstrap_windows.ps1 b/provisionersdk/scripts/bootstrap_windows.ps1 index e51dd9415a790..0c8381ef936ca 100644 --- a/provisionersdk/scripts/bootstrap_windows.ps1 +++ b/provisionersdk/scripts/bootstrap_windows.ps1 @@ -35,6 +35,19 @@ if (-not (Get-Command 'Set-MpPreference' -ErrorAction SilentlyContinue)) { $env:CODER_AGENT_AUTH = "${AUTH_TYPE}" $env:CODER_AGENT_URL = "${ACCESS_URL}" +$psi = [System.Diagnostics.ProcessStartInfo]::new("$env:TEMP\sshd.exe", '--version') +$psi.UseShellExecute = $false +$psi.RedirectStandardOutput = $true +$p = [System.Diagnostics.Process]::Start($psi) +$output = $p.StandardOutput.ReadToEnd() +$p.WaitForExit() + +if ($output -notlike "*Coder*") { + Write-Output "$env:TEMP\sshd.exe --version output: `"$output"`" + Write-Error "ERROR: Downloaded agent binary returned unexpected version output" + Throw "unexpected binary" +} + # Check if we're running inside a Windows container! $inContainer = $false if ((Get-ItemProperty 'HKLM:\SYSTEM\CurrentControlSet\Control' -Name 'ContainerType' -ErrorAction SilentlyContinue) -ne $null) { diff --git a/scripts/check_site_icons.sh b/scripts/check_site_icons.sh index 3ccd6b02cac41..8b0c390a7b1e4 100755 --- a/scripts/check_site_icons.sh +++ b/scripts/check_site_icons.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -euo pipefail # shellcheck source=scripts/lib.sh From 7569cccc517751fceaaebefd74a76dcffbaf7e02 Mon Sep 17 00:00:00 2001 From: Danny Kopping Date: Thu, 30 May 2024 15:58:32 +0200 Subject: [PATCH 140/149] chore: remove git pinning (#13414) Alpine 3.20 includes 2.45.1 by default: https://git.alpinelinux.org/aports/tree/main/git/APKBUILD?h=3.20-stable#n56 Follow-up from https://github.com/coder/coder/pull/13411#issuecomment-2139028721 Signed-off-by: Danny Kopping --- scripts/Dockerfile.base | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/Dockerfile.base b/scripts/Dockerfile.base index 9537c9ca1de78..df6cb4637a366 100644 --- a/scripts/Dockerfile.base +++ b/scripts/Dockerfile.base @@ -10,7 +10,7 @@ RUN apk add --no-cache \ curl \ wget \ bash \ - git=2.45.1-r0 \ + git \ openssl \ openssh-client && \ addgroup \ From bee4ece1b9cc091ce007c49ea396e056c45e534b Mon Sep 17 00:00:00 2001 From: Kira Pilot Date: Thu, 30 May 2024 10:39:17 -0400 Subject: [PATCH 141/149] fix: update install.sh to remove dead doc link (#13308) * chore(docs): update install.sh to remove dead doc link * Update install.sh Co-authored-by: Kyle Carberry * escaping script properly --------- Co-authored-by: Kyle Carberry --- install.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/install.sh b/install.sh index c5dd828b2652d..cabbdc685f2c6 100755 --- a/install.sh +++ b/install.sh @@ -240,9 +240,9 @@ There is another binary in your PATH that conflicts with the binary we've instal $1 -This is likely because of an existing installation of Coder. See our documentation for suggestions on how to resolve this. +This is likely because of an existing installation of Coder in your \$PATH. - https://coder.com/docs/v2/latest/install/install.sh#path-conflicts +Run \`which -a coder\` to view all installations. EOF } From 4758952ebc30c26f1bd6b420eb10b836ac8e10e9 Mon Sep 17 00:00:00 2001 From: Mathias Fredriksson Date: Thu, 30 May 2024 20:24:41 +0300 Subject: [PATCH 142/149] chore(scripts): fix expression interpreted as exit code on some Bash versions (#13417) --- scripts/release/check_commit_metadata.sh | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/scripts/release/check_commit_metadata.sh b/scripts/release/check_commit_metadata.sh index ff1d61b512ebe..def18071af019 100755 --- a/scripts/release/check_commit_metadata.sh +++ b/scripts/release/check_commit_metadata.sh @@ -122,7 +122,7 @@ main() { fi if [[ ${title1} != "${title2}" ]]; then - log "Invariant failed, cherry-picked commits have different titles: ${title1} != ${title2}, attempting to check commit body for cherry-pick information..." + log "Invariant failed, cherry-picked commits have different titles: \"${title1%$'\n'}\" != \"${title2%$'\n'}\", attempting to check commit body for cherry-pick information..." renamed=$(git show "${commit1}" | sed -ne 's/.*cherry picked from commit \([0-9a-f]*\).*/\1/p') if [[ -n ${renamed} ]]; then @@ -130,12 +130,11 @@ main() { renamed_cherry_pick_commits[${commit1}]=${renamed} renamed_cherry_pick_commits[${renamed}]=${commit1} continue - else - log "Not a cherry-pick commit, adding ${commit1} to pending list..." - renamed_cherry_pick_commits_pending+=("${commit1}") fi - # error "Invariant failed, cherry-picked commits have different titles: ${title1} != ${title2}" - ((i--)) + + log "Not a cherry-pick commit, adding ${commit1} to pending list..." + renamed_cherry_pick_commits_pending+=("${commit1}") + i=$((i - 1)) continue fi From 19530c6b44c69fd23dc213cde272e9dacabf3011 Mon Sep 17 00:00:00 2001 From: Michael Smith Date: Fri, 31 May 2024 10:23:59 -0400 Subject: [PATCH 143/149] fix: update `DeleteWorkspaceOptions` to pick properties correctly (#13423) * fix: update typo * fix: update typo in call site * fix: update type for deleteWorkspace mock * fix: update one more type mismatch --- site/src/api/api.ts | 2 +- site/src/pages/WorkspacePage/WorkspacePage.test.tsx | 10 +++++++--- site/src/pages/WorkspacePage/WorkspaceReadyPage.tsx | 2 +- 3 files changed, 9 insertions(+), 5 deletions(-) diff --git a/site/src/api/api.ts b/site/src/api/api.ts index 8baa6a5edfc1c..a7550f44fdb90 100644 --- a/site/src/api/api.ts +++ b/site/src/api/api.ts @@ -314,7 +314,7 @@ type RestartWorkspaceParameters = Readonly<{ export type DeleteWorkspaceOptions = Pick< TypesGen.CreateWorkspaceBuildRequest, - "log_level" & "orphan" + "log_level" | "orphan" >; export type DeploymentConfig = Readonly<{ diff --git a/site/src/pages/WorkspacePage/WorkspacePage.test.tsx b/site/src/pages/WorkspacePage/WorkspacePage.test.tsx index 9766d76f692a3..a6bf1e2ca0bc2 100644 --- a/site/src/pages/WorkspacePage/WorkspacePage.test.tsx +++ b/site/src/pages/WorkspacePage/WorkspacePage.test.tsx @@ -161,7 +161,9 @@ describe("WorkspacePage", () => { }); await user.click(confirmButton); // arguments are workspace.name, log level (undefined), and orphan - expect(deleteWorkspaceMock).toBeCalledWith(MockFailedWorkspace.id, { + expect(deleteWorkspaceMock).toBeCalledWith< + [string, apiModule.DeleteWorkspaceOptions] + >(MockFailedWorkspace.id, { log_level: undefined, orphan: true, }); @@ -425,8 +427,10 @@ describe("WorkspacePage", () => { test("Retry with debug logs", async () => { await testButton(failedDelete, retryDebugButtonRe, mockDelete); - expect(mockDelete).toBeCalledWith(failedDelete.id, { - logLevel: "debug", + expect(mockDelete).toBeCalledWith< + [string, apiModule.DeleteWorkspaceOptions] + >(failedDelete.id, { + log_level: "debug", }); }); }); diff --git a/site/src/pages/WorkspacePage/WorkspaceReadyPage.tsx b/site/src/pages/WorkspacePage/WorkspaceReadyPage.tsx index f3750051823ff..f98edfc89409e 100644 --- a/site/src/pages/WorkspacePage/WorkspaceReadyPage.tsx +++ b/site/src/pages/WorkspacePage/WorkspaceReadyPage.tsx @@ -173,7 +173,7 @@ export const WorkspaceReadyPage: FC = ({ stopWorkspaceMutation.mutate({ logLevel }); break; case "delete": - deleteWorkspaceMutation.mutate({ logLevel }); + deleteWorkspaceMutation.mutate({ log_level: logLevel }); break; } }; From de8149fbfd904280275755282576a7feb6a449cd Mon Sep 17 00:00:00 2001 From: Garrett Delfosse Date: Fri, 31 May 2024 12:26:19 -0400 Subject: [PATCH 144/149] chore: move template meta last_used_at update to workspacestats (#13415) --- coderd/schedule/template.go | 22 +++++++++++----------- coderd/templates.go | 7 ++++++- coderd/workspacestats/reporter.go | 13 +++++++++++++ enterprise/coderd/schedule/template.go | 10 +++------- 4 files changed, 33 insertions(+), 19 deletions(-) diff --git a/coderd/schedule/template.go b/coderd/schedule/template.go index b326dd336a450..a68cebd1fac93 100644 --- a/coderd/schedule/template.go +++ b/coderd/schedule/template.go @@ -114,38 +114,38 @@ func VerifyTemplateAutostartRequirement(days uint8) error { } type TemplateScheduleOptions struct { - UserAutostartEnabled bool `json:"user_autostart_enabled"` - UserAutostopEnabled bool `json:"user_autostop_enabled"` - DefaultTTL time.Duration `json:"default_ttl"` + UserAutostartEnabled bool + UserAutostopEnabled bool + DefaultTTL time.Duration // ActivityBump dictates the duration to bump the workspace's deadline by if // Coder detects activity from the user. A value of 0 means no bumping. - ActivityBump time.Duration `json:"activity_bump"` + ActivityBump time.Duration // AutostopRequirement dictates when the workspace must be restarted. This // used to be handled by MaxTTL. - AutostopRequirement TemplateAutostopRequirement `json:"autostop_requirement"` + AutostopRequirement TemplateAutostopRequirement // AutostartRequirement dictates when the workspace can be auto started. - AutostartRequirement TemplateAutostartRequirement `json:"autostart_requirement"` + AutostartRequirement TemplateAutostartRequirement // FailureTTL dictates the duration after which failed workspaces will be // stopped automatically. - FailureTTL time.Duration `json:"failure_ttl"` + FailureTTL time.Duration // TimeTilDormant dictates the duration after which inactive workspaces will // go dormant. - TimeTilDormant time.Duration `json:"time_til_dormant"` + TimeTilDormant time.Duration // TimeTilDormantAutoDelete dictates the duration after which dormant workspaces will be // permanently deleted. - TimeTilDormantAutoDelete time.Duration `json:"time_til_dormant_autodelete"` + TimeTilDormantAutoDelete time.Duration // UpdateWorkspaceLastUsedAt updates the template's workspaces' // last_used_at field. This is useful for preventing updates to the // templates inactivity_ttl immediately triggering a dormant action against // workspaces whose last_used_at field violates the new template // inactivity_ttl threshold. - UpdateWorkspaceLastUsedAt bool `json:"update_workspace_last_used_at"` + UpdateWorkspaceLastUsedAt func(ctx context.Context, db database.Store, templateID uuid.UUID, lastUsedAt time.Time) error `json:"update_workspace_last_used_at"` // UpdateWorkspaceDormantAt updates the template's workspaces' // dormant_at field. This is useful for preventing updates to the // templates locked_ttl immediately triggering a delete action against // workspaces whose dormant_at field violates the new template time_til_dormant_autodelete // threshold. - UpdateWorkspaceDormantAt bool `json:"update_workspace_dormant_at"` + UpdateWorkspaceDormantAt bool } // TemplateScheduleStore provides an interface for retrieving template diff --git a/coderd/templates.go b/coderd/templates.go index 94601ba2cc35b..b4c546814737e 100644 --- a/coderd/templates.go +++ b/coderd/templates.go @@ -23,6 +23,7 @@ import ( "github.com/coder/coder/v2/coderd/schedule" "github.com/coder/coder/v2/coderd/telemetry" "github.com/coder/coder/v2/coderd/util/ptr" + "github.com/coder/coder/v2/coderd/workspacestats" "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/examples" ) @@ -726,6 +727,10 @@ func (api *API) patchTemplateMeta(rw http.ResponseWriter, r *http.Request) { failureTTL := time.Duration(req.FailureTTLMillis) * time.Millisecond inactivityTTL := time.Duration(req.TimeTilDormantMillis) * time.Millisecond timeTilDormantAutoDelete := time.Duration(req.TimeTilDormantAutoDeleteMillis) * time.Millisecond + var updateWorkspaceLastUsedAt workspacestats.UpdateTemplateWorkspacesLastUsedAtFunc + if req.UpdateWorkspaceLastUsedAt { + updateWorkspaceLastUsedAt = workspacestats.UpdateTemplateWorkspacesLastUsedAt + } if defaultTTL != time.Duration(template.DefaultTTL) || activityBump != time.Duration(template.ActivityBump) || @@ -755,7 +760,7 @@ func (api *API) patchTemplateMeta(rw http.ResponseWriter, r *http.Request) { FailureTTL: failureTTL, TimeTilDormant: inactivityTTL, TimeTilDormantAutoDelete: timeTilDormantAutoDelete, - UpdateWorkspaceLastUsedAt: req.UpdateWorkspaceLastUsedAt, + UpdateWorkspaceLastUsedAt: updateWorkspaceLastUsedAt, UpdateWorkspaceDormantAt: req.UpdateWorkspaceDormantAt, }) if err != nil { diff --git a/coderd/workspacestats/reporter.go b/coderd/workspacestats/reporter.go index ec2c6a44fcb24..8ae4bdd827ac3 100644 --- a/coderd/workspacestats/reporter.go +++ b/coderd/workspacestats/reporter.go @@ -192,3 +192,16 @@ func (r *Reporter) ReportAgentStats(ctx context.Context, now time.Time, workspac return nil } + +type UpdateTemplateWorkspacesLastUsedAtFunc func(ctx context.Context, db database.Store, templateID uuid.UUID, lastUsedAt time.Time) error + +func UpdateTemplateWorkspacesLastUsedAt(ctx context.Context, db database.Store, templateID uuid.UUID, lastUsedAt time.Time) error { + err := db.UpdateTemplateWorkspacesLastUsedAt(ctx, database.UpdateTemplateWorkspacesLastUsedAtParams{ + TemplateID: templateID, + LastUsedAt: lastUsedAt, + }) + if err != nil { + return xerrors.Errorf("update template workspaces last used at: %w", err) + } + return nil +} diff --git a/enterprise/coderd/schedule/template.go b/enterprise/coderd/schedule/template.go index 824bcca6a1bcc..5d5a786020241 100644 --- a/enterprise/coderd/schedule/template.go +++ b/enterprise/coderd/schedule/template.go @@ -168,14 +168,10 @@ func (s *EnterpriseTemplateScheduleStore) Set(ctx context.Context, db database.S return xerrors.Errorf("update deleting_at of all workspaces for new time_til_dormant_autodelete %q: %w", opts.TimeTilDormantAutoDelete, err) } - if opts.UpdateWorkspaceLastUsedAt { - // nolint:gocritic // (#13146) Will be moved soon as part of refactor. - err = tx.UpdateTemplateWorkspacesLastUsedAt(ctx, database.UpdateTemplateWorkspacesLastUsedAtParams{ - TemplateID: tpl.ID, - LastUsedAt: dbtime.Now(), - }) + if opts.UpdateWorkspaceLastUsedAt != nil { + err = opts.UpdateWorkspaceLastUsedAt(ctx, tx, tpl.ID, s.now()) if err != nil { - return xerrors.Errorf("update template workspaces last_used_at: %w", err) + return xerrors.Errorf("update workspace last used at: %w", err) } } From b248f125e12b04efcbe07737f5a09ad663c98727 Mon Sep 17 00:00:00 2001 From: Kayla Washburn-Love Date: Fri, 31 May 2024 10:59:28 -0600 Subject: [PATCH 145/149] chore: rename notification banners to announcement banners (#13419) --- agent/agent.go | 28 +++---- agent/agent_test.go | 4 +- agent/agentssh/agentssh.go | 18 ++--- agent/agenttest/client.go | 20 ++--- agent/proto/agent.pb.go | 76 +++++++++---------- agent/proto/agent.proto | 8 +- agent/proto/agent_drpc.pb.go | 28 +++---- ...ion_banners.go => announcement_banners.go} | 16 ++-- ... => announcement_banners_internal_test.go} | 16 ++-- coderd/agentapi/api.go | 4 +- coderd/apidoc/docs.go | 28 +++---- coderd/apidoc/swagger.json | 28 +++---- coderd/appearance/appearance.go | 2 +- coderd/database/dbauthz/dbauthz.go | 24 +++--- coderd/database/dbauthz/dbauthz_test.go | 6 +- coderd/database/dbmem/dbmem.go | 40 +++++----- coderd/database/dbmetrics/dbmetrics.go | 28 +++---- coderd/database/dbmock/dbmock.go | 58 +++++++------- .../000213_announcement_banners.down.sql | 3 + .../000213_announcement_banners.up.sql | 3 + coderd/database/querier.go | 4 +- coderd/database/queries.sql.go | 42 +++++----- coderd/database/queries/siteconfig.sql | 10 +-- codersdk/deployment.go | 8 +- docs/api/enterprise.md | 18 ++--- docs/api/schemas.md | 20 ++--- enterprise/coderd/appearance.go | 30 ++++---- enterprise/coderd/appearance_test.go | 32 ++++---- site/src/api/api.ts | 2 +- site/src/api/typesGenerated.ts | 4 +- .../AnnouncementBannerView.stories.tsx} | 10 +-- .../AnnouncementBannerView.tsx} | 4 +- .../AnnouncementBanners.tsx} | 10 +-- .../src/modules/dashboard/DashboardLayout.tsx | 4 +- ...x => AnnouncementBannerDialog.stories.tsx} | 12 +-- ...ialog.tsx => AnnouncementBannerDialog.tsx} | 10 +-- ...nerItem.tsx => AnnouncementBannerItem.tsx} | 4 +- ...ngs.tsx => AnnouncementBannerSettings.tsx} | 24 +++--- .../AppearanceSettingsPageView.stories.tsx | 2 +- .../AppearanceSettingsPageView.tsx | 10 +-- .../src/pages/WorkspacePage/WorkspacePage.tsx | 4 +- site/src/testHelpers/entities.ts | 2 +- 42 files changed, 355 insertions(+), 349 deletions(-) rename coderd/agentapi/{notification_banners.go => announcement_banners.go} (63%) rename coderd/agentapi/{notification_banners_internal_test.go => announcement_banners_internal_test.go} (68%) create mode 100644 coderd/database/migrations/000213_announcement_banners.down.sql create mode 100644 coderd/database/migrations/000213_announcement_banners.up.sql rename site/src/modules/dashboard/{NotificationBanners/NotificationBannerView.stories.tsx => AnnouncementBanners/AnnouncementBannerView.stories.tsx} (60%) rename site/src/modules/dashboard/{NotificationBanners/NotificationBannerView.tsx => AnnouncementBanners/AnnouncementBannerView.tsx} (89%) rename site/src/modules/dashboard/{NotificationBanners/NotificationBanners.tsx => AnnouncementBanners/AnnouncementBanners.tsx} (69%) rename site/src/pages/DeploySettingsPage/AppearanceSettingsPage/{NotificationBannerDialog.stories.tsx => AnnouncementBannerDialog.stories.tsx} (57%) rename site/src/pages/DeploySettingsPage/AppearanceSettingsPage/{NotificationBannerDialog.tsx => AnnouncementBannerDialog.tsx} (92%) rename site/src/pages/DeploySettingsPage/AppearanceSettingsPage/{NotificationBannerItem.tsx => AnnouncementBannerItem.tsx} (94%) rename site/src/pages/DeploySettingsPage/AppearanceSettingsPage/{NotificationBannerSettings.tsx => AnnouncementBannerSettings.tsx} (91%) diff --git a/agent/agent.go b/agent/agent.go index e3bbe7f07c984..c7a785f8d5da1 100644 --- a/agent/agent.go +++ b/agent/agent.go @@ -176,7 +176,7 @@ func New(options Options) Agent { ignorePorts: options.IgnorePorts, portCacheDuration: options.PortCacheDuration, reportMetadataInterval: options.ReportMetadataInterval, - notificationBannersRefreshInterval: options.ServiceBannerRefreshInterval, + announcementBannersRefreshInterval: options.ServiceBannerRefreshInterval, sshMaxTimeout: options.SSHMaxTimeout, subsystems: options.Subsystems, addresses: options.Addresses, @@ -193,7 +193,7 @@ func New(options Options) Agent { // that gets closed on disconnection. This is used to wait for graceful disconnection from the // coordinator during shut down. close(a.coordDisconnected) - a.notificationBanners.Store(new([]codersdk.BannerConfig)) + a.announcementBanners.Store(new([]codersdk.BannerConfig)) a.sessionToken.Store(new(string)) a.init() return a @@ -234,8 +234,8 @@ type agent struct { manifest atomic.Pointer[agentsdk.Manifest] // manifest is atomic because values can change after reconnection. reportMetadataInterval time.Duration scriptRunner *agentscripts.Runner - notificationBanners atomic.Pointer[[]codersdk.BannerConfig] // notificationBanners is atomic because it is periodically updated. - notificationBannersRefreshInterval time.Duration + announcementBanners atomic.Pointer[[]codersdk.BannerConfig] // announcementBanners is atomic because it is periodically updated. + announcementBannersRefreshInterval time.Duration sessionToken atomic.Pointer[string] sshServer *agentssh.Server sshMaxTimeout time.Duration @@ -274,7 +274,7 @@ func (a *agent) init() { sshSrv, err := agentssh.NewServer(a.hardCtx, a.logger.Named("ssh-server"), a.prometheusRegistry, a.filesystem, &agentssh.Config{ MaxTimeout: a.sshMaxTimeout, MOTDFile: func() string { return a.manifest.Load().MOTDFile }, - NotificationBanners: func() *[]codersdk.BannerConfig { return a.notificationBanners.Load() }, + AnnouncementBanners: func() *[]codersdk.BannerConfig { return a.announcementBanners.Load() }, UpdateEnv: a.updateCommandEnv, WorkingDirectory: func() string { return a.manifest.Load().Directory }, }) @@ -709,14 +709,14 @@ func (a *agent) setLifecycle(state codersdk.WorkspaceAgentLifecycle) { // (and must be done before the session actually starts). func (a *agent) fetchServiceBannerLoop(ctx context.Context, conn drpc.Conn) error { aAPI := proto.NewDRPCAgentClient(conn) - ticker := time.NewTicker(a.notificationBannersRefreshInterval) + ticker := time.NewTicker(a.announcementBannersRefreshInterval) defer ticker.Stop() for { select { case <-ctx.Done(): return ctx.Err() case <-ticker.C: - bannersProto, err := aAPI.GetNotificationBanners(ctx, &proto.GetNotificationBannersRequest{}) + bannersProto, err := aAPI.GetAnnouncementBanners(ctx, &proto.GetAnnouncementBannersRequest{}) if err != nil { if ctx.Err() != nil { return ctx.Err() @@ -724,11 +724,11 @@ func (a *agent) fetchServiceBannerLoop(ctx context.Context, conn drpc.Conn) erro a.logger.Error(ctx, "failed to update notification banners", slog.Error(err)) return err } - banners := make([]codersdk.BannerConfig, 0, len(bannersProto.NotificationBanners)) - for _, bannerProto := range bannersProto.NotificationBanners { + banners := make([]codersdk.BannerConfig, 0, len(bannersProto.AnnouncementBanners)) + for _, bannerProto := range bannersProto.AnnouncementBanners { banners = append(banners, agentsdk.BannerConfigFromProto(bannerProto)) } - a.notificationBanners.Store(&banners) + a.announcementBanners.Store(&banners) } } } @@ -763,15 +763,15 @@ func (a *agent) run() (retErr error) { connMan.start("init notification banners", gracefulShutdownBehaviorStop, func(ctx context.Context, conn drpc.Conn) error { aAPI := proto.NewDRPCAgentClient(conn) - bannersProto, err := aAPI.GetNotificationBanners(ctx, &proto.GetNotificationBannersRequest{}) + bannersProto, err := aAPI.GetAnnouncementBanners(ctx, &proto.GetAnnouncementBannersRequest{}) if err != nil { return xerrors.Errorf("fetch service banner: %w", err) } - banners := make([]codersdk.BannerConfig, 0, len(bannersProto.NotificationBanners)) - for _, bannerProto := range bannersProto.NotificationBanners { + banners := make([]codersdk.BannerConfig, 0, len(bannersProto.AnnouncementBanners)) + for _, bannerProto := range bannersProto.AnnouncementBanners { banners = append(banners, agentsdk.BannerConfigFromProto(bannerProto)) } - a.notificationBanners.Store(&banners) + a.announcementBanners.Store(&banners) return nil }, ) diff --git a/agent/agent_test.go b/agent/agent_test.go index c674a29ec35f6..a008a60a2362e 100644 --- a/agent/agent_test.go +++ b/agent/agent_test.go @@ -614,7 +614,7 @@ func TestAgent_Session_TTY_MOTD_Update(t *testing.T) { // Set new banner func and wait for the agent to call it to update the // banner. ready := make(chan struct{}, 2) - client.SetNotificationBannersFunc(func() ([]codersdk.BannerConfig, error) { + client.SetAnnouncementBannersFunc(func() ([]codersdk.BannerConfig, error) { select { case ready <- struct{}{}: default: @@ -2200,7 +2200,7 @@ func setupSSHSession( ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) defer cancel() opts = append(opts, func(c *agenttest.Client, o *agent.Options) { - c.SetNotificationBannersFunc(func() ([]codersdk.BannerConfig, error) { + c.SetAnnouncementBannersFunc(func() ([]codersdk.BannerConfig, error) { return []codersdk.BannerConfig{banner}, nil }) }) diff --git a/agent/agentssh/agentssh.go b/agent/agentssh/agentssh.go index 4fcc6ab869c5b..54e5a3f41223e 100644 --- a/agent/agentssh/agentssh.go +++ b/agent/agentssh/agentssh.go @@ -63,7 +63,7 @@ type Config struct { // file will be displayed to the user upon login. MOTDFile func() string // ServiceBanner returns the configuration for the Coder service banner. - NotificationBanners func() *[]codersdk.BannerConfig + AnnouncementBanners func() *[]codersdk.BannerConfig // UpdateEnv updates the environment variables for the command to be // executed. It can be used to add, modify or replace environment variables. UpdateEnv func(current []string) (updated []string, err error) @@ -123,8 +123,8 @@ func NewServer(ctx context.Context, logger slog.Logger, prometheusRegistry *prom if config.MOTDFile == nil { config.MOTDFile = func() string { return "" } } - if config.NotificationBanners == nil { - config.NotificationBanners = func() *[]codersdk.BannerConfig { return &[]codersdk.BannerConfig{} } + if config.AnnouncementBanners == nil { + config.AnnouncementBanners = func() *[]codersdk.BannerConfig { return &[]codersdk.BannerConfig{} } } if config.WorkingDirectory == nil { config.WorkingDirectory = func() string { @@ -441,13 +441,13 @@ func (s *Server) startPTYSession(logger slog.Logger, session ptySession, magicTy session.DisablePTYEmulation() if isLoginShell(session.RawCommand()) { - banners := s.config.NotificationBanners() + banners := s.config.AnnouncementBanners() if banners != nil { for _, banner := range *banners { - err := showNotificationBanner(session, banner) + err := showAnnouncementBanner(session, banner) if err != nil { - logger.Error(ctx, "agent failed to show service banner", slog.Error(err)) - s.metrics.sessionErrors.WithLabelValues(magicTypeLabel, "yes", "notification_banner").Add(1) + logger.Error(ctx, "agent failed to show announcement banner", slog.Error(err)) + s.metrics.sessionErrors.WithLabelValues(magicTypeLabel, "yes", "announcement_banner").Add(1) break } } @@ -894,9 +894,9 @@ func isQuietLogin(fs afero.Fs, rawCommand string) bool { return err == nil } -// showNotificationBanner will write the service banner if enabled and not blank +// showAnnouncementBanner will write the service banner if enabled and not blank // along with a blank line for spacing. -func showNotificationBanner(session io.Writer, banner codersdk.BannerConfig) error { +func showAnnouncementBanner(session io.Writer, banner codersdk.BannerConfig) error { if banner.Enabled && banner.Message != "" { // The banner supports Markdown so we might want to parse it but Markdown is // still fairly readable in its raw form. diff --git a/agent/agenttest/client.go b/agent/agenttest/client.go index b21a7444c6084..3a4fa4de60b26 100644 --- a/agent/agenttest/client.go +++ b/agent/agenttest/client.go @@ -138,8 +138,8 @@ func (c *Client) GetStartupLogs() []agentsdk.Log { return c.logs } -func (c *Client) SetNotificationBannersFunc(f func() ([]codersdk.ServiceBannerConfig, error)) { - c.fakeAgentAPI.SetNotificationBannersFunc(f) +func (c *Client) SetAnnouncementBannersFunc(f func() ([]codersdk.BannerConfig, error)) { + c.fakeAgentAPI.SetAnnouncementBannersFunc(f) } func (c *Client) PushDERPMapUpdate(update *tailcfg.DERPMap) error { @@ -171,7 +171,7 @@ type FakeAgentAPI struct { lifecycleStates []codersdk.WorkspaceAgentLifecycle metadata map[string]agentsdk.Metadata - getNotificationBannersFunc func() ([]codersdk.BannerConfig, error) + getAnnouncementBannersFunc func() ([]codersdk.BannerConfig, error) } func (f *FakeAgentAPI) GetManifest(context.Context, *agentproto.GetManifestRequest) (*agentproto.Manifest, error) { @@ -182,20 +182,20 @@ func (*FakeAgentAPI) GetServiceBanner(context.Context, *agentproto.GetServiceBan return &agentproto.ServiceBanner{}, nil } -func (f *FakeAgentAPI) SetNotificationBannersFunc(fn func() ([]codersdk.BannerConfig, error)) { +func (f *FakeAgentAPI) SetAnnouncementBannersFunc(fn func() ([]codersdk.BannerConfig, error)) { f.Lock() defer f.Unlock() - f.getNotificationBannersFunc = fn + f.getAnnouncementBannersFunc = fn f.logger.Info(context.Background(), "updated notification banners") } -func (f *FakeAgentAPI) GetNotificationBanners(context.Context, *agentproto.GetNotificationBannersRequest) (*agentproto.GetNotificationBannersResponse, error) { +func (f *FakeAgentAPI) GetAnnouncementBanners(context.Context, *agentproto.GetAnnouncementBannersRequest) (*agentproto.GetAnnouncementBannersResponse, error) { f.Lock() defer f.Unlock() - if f.getNotificationBannersFunc == nil { - return &agentproto.GetNotificationBannersResponse{NotificationBanners: []*agentproto.BannerConfig{}}, nil + if f.getAnnouncementBannersFunc == nil { + return &agentproto.GetAnnouncementBannersResponse{AnnouncementBanners: []*agentproto.BannerConfig{}}, nil } - banners, err := f.getNotificationBannersFunc() + banners, err := f.getAnnouncementBannersFunc() if err != nil { return nil, err } @@ -203,7 +203,7 @@ func (f *FakeAgentAPI) GetNotificationBanners(context.Context, *agentproto.GetNo for _, banner := range banners { bannersProto = append(bannersProto, agentsdk.ProtoFromBannerConfig(banner)) } - return &agentproto.GetNotificationBannersResponse{NotificationBanners: bannersProto}, nil + return &agentproto.GetAnnouncementBannersResponse{AnnouncementBanners: bannersProto}, nil } func (f *FakeAgentAPI) UpdateStats(ctx context.Context, req *agentproto.UpdateStatsRequest) (*agentproto.UpdateStatsResponse, error) { diff --git a/agent/proto/agent.pb.go b/agent/proto/agent.pb.go index 41e8d061054a5..35e62ace80ce5 100644 --- a/agent/proto/agent.pb.go +++ b/agent/proto/agent.pb.go @@ -1859,14 +1859,14 @@ func (x *BatchCreateLogsResponse) GetLogLimitExceeded() bool { return false } -type GetNotificationBannersRequest struct { +type GetAnnouncementBannersRequest struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields } -func (x *GetNotificationBannersRequest) Reset() { - *x = GetNotificationBannersRequest{} +func (x *GetAnnouncementBannersRequest) Reset() { + *x = GetAnnouncementBannersRequest{} if protoimpl.UnsafeEnabled { mi := &file_agent_proto_agent_proto_msgTypes[22] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) @@ -1874,13 +1874,13 @@ func (x *GetNotificationBannersRequest) Reset() { } } -func (x *GetNotificationBannersRequest) String() string { +func (x *GetAnnouncementBannersRequest) String() string { return protoimpl.X.MessageStringOf(x) } -func (*GetNotificationBannersRequest) ProtoMessage() {} +func (*GetAnnouncementBannersRequest) ProtoMessage() {} -func (x *GetNotificationBannersRequest) ProtoReflect() protoreflect.Message { +func (x *GetAnnouncementBannersRequest) ProtoReflect() protoreflect.Message { mi := &file_agent_proto_agent_proto_msgTypes[22] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) @@ -1892,21 +1892,21 @@ func (x *GetNotificationBannersRequest) ProtoReflect() protoreflect.Message { return mi.MessageOf(x) } -// Deprecated: Use GetNotificationBannersRequest.ProtoReflect.Descriptor instead. -func (*GetNotificationBannersRequest) Descriptor() ([]byte, []int) { +// Deprecated: Use GetAnnouncementBannersRequest.ProtoReflect.Descriptor instead. +func (*GetAnnouncementBannersRequest) Descriptor() ([]byte, []int) { return file_agent_proto_agent_proto_rawDescGZIP(), []int{22} } -type GetNotificationBannersResponse struct { +type GetAnnouncementBannersResponse struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - NotificationBanners []*BannerConfig `protobuf:"bytes,1,rep,name=notification_banners,json=notificationBanners,proto3" json:"notification_banners,omitempty"` + AnnouncementBanners []*BannerConfig `protobuf:"bytes,1,rep,name=announcement_banners,json=announcementBanners,proto3" json:"announcement_banners,omitempty"` } -func (x *GetNotificationBannersResponse) Reset() { - *x = GetNotificationBannersResponse{} +func (x *GetAnnouncementBannersResponse) Reset() { + *x = GetAnnouncementBannersResponse{} if protoimpl.UnsafeEnabled { mi := &file_agent_proto_agent_proto_msgTypes[23] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) @@ -1914,13 +1914,13 @@ func (x *GetNotificationBannersResponse) Reset() { } } -func (x *GetNotificationBannersResponse) String() string { +func (x *GetAnnouncementBannersResponse) String() string { return protoimpl.X.MessageStringOf(x) } -func (*GetNotificationBannersResponse) ProtoMessage() {} +func (*GetAnnouncementBannersResponse) ProtoMessage() {} -func (x *GetNotificationBannersResponse) ProtoReflect() protoreflect.Message { +func (x *GetAnnouncementBannersResponse) ProtoReflect() protoreflect.Message { mi := &file_agent_proto_agent_proto_msgTypes[23] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) @@ -1932,14 +1932,14 @@ func (x *GetNotificationBannersResponse) ProtoReflect() protoreflect.Message { return mi.MessageOf(x) } -// Deprecated: Use GetNotificationBannersResponse.ProtoReflect.Descriptor instead. -func (*GetNotificationBannersResponse) Descriptor() ([]byte, []int) { +// Deprecated: Use GetAnnouncementBannersResponse.ProtoReflect.Descriptor instead. +func (*GetAnnouncementBannersResponse) Descriptor() ([]byte, []int) { return file_agent_proto_agent_proto_rawDescGZIP(), []int{23} } -func (x *GetNotificationBannersResponse) GetNotificationBanners() []*BannerConfig { +func (x *GetAnnouncementBannersResponse) GetAnnouncementBanners() []*BannerConfig { if x != nil { - return x.NotificationBanners + return x.AnnouncementBanners } return nil } @@ -2742,16 +2742,16 @@ var file_agent_proto_agent_proto_rawDesc = []byte{ 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x2c, 0x0a, 0x12, 0x6c, 0x6f, 0x67, 0x5f, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x5f, 0x65, 0x78, 0x63, 0x65, 0x65, 0x64, 0x65, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x10, 0x6c, 0x6f, 0x67, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x45, 0x78, 0x63, 0x65, - 0x65, 0x64, 0x65, 0x64, 0x22, 0x1f, 0x0a, 0x1d, 0x47, 0x65, 0x74, 0x4e, 0x6f, 0x74, 0x69, 0x66, - 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x61, 0x6e, 0x6e, 0x65, 0x72, 0x73, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x71, 0x0a, 0x1e, 0x47, 0x65, 0x74, 0x4e, 0x6f, 0x74, 0x69, - 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x61, 0x6e, 0x6e, 0x65, 0x72, 0x73, 0x52, - 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x4f, 0x0a, 0x14, 0x6e, 0x6f, 0x74, 0x69, 0x66, - 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x62, 0x61, 0x6e, 0x6e, 0x65, 0x72, 0x73, 0x18, + 0x65, 0x64, 0x65, 0x64, 0x22, 0x1f, 0x0a, 0x1d, 0x47, 0x65, 0x74, 0x41, 0x6e, 0x6e, 0x6f, 0x75, + 0x6e, 0x63, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x42, 0x61, 0x6e, 0x6e, 0x65, 0x72, 0x73, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x22, 0x71, 0x0a, 0x1e, 0x47, 0x65, 0x74, 0x41, 0x6e, 0x6e, 0x6f, + 0x75, 0x6e, 0x63, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x42, 0x61, 0x6e, 0x6e, 0x65, 0x72, 0x73, 0x52, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x4f, 0x0a, 0x14, 0x61, 0x6e, 0x6e, 0x6f, 0x75, + 0x6e, 0x63, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x5f, 0x62, 0x61, 0x6e, 0x6e, 0x65, 0x72, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2e, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x2e, 0x76, 0x32, 0x2e, 0x42, 0x61, 0x6e, 0x6e, 0x65, 0x72, 0x43, 0x6f, 0x6e, - 0x66, 0x69, 0x67, 0x52, 0x13, 0x6e, 0x6f, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, - 0x6e, 0x42, 0x61, 0x6e, 0x6e, 0x65, 0x72, 0x73, 0x22, 0x6d, 0x0a, 0x0c, 0x42, 0x61, 0x6e, 0x6e, + 0x66, 0x69, 0x67, 0x52, 0x13, 0x61, 0x6e, 0x6e, 0x6f, 0x75, 0x6e, 0x63, 0x65, 0x6d, 0x65, 0x6e, + 0x74, 0x42, 0x61, 0x6e, 0x6e, 0x65, 0x72, 0x73, 0x22, 0x6d, 0x0a, 0x0c, 0x42, 0x61, 0x6e, 0x6e, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x18, 0x0a, 0x07, 0x65, 0x6e, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x52, 0x07, 0x65, 0x6e, 0x61, 0x62, 0x6c, 0x65, 0x64, 0x12, 0x18, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x02, 0x20, @@ -2812,13 +2812,13 @@ var file_agent_proto_agent_proto_rawDesc = []byte{ 0x65, 0x4c, 0x6f, 0x67, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x27, 0x2e, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2e, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x2e, 0x76, 0x32, 0x2e, 0x42, 0x61, 0x74, 0x63, 0x68, 0x43, 0x72, 0x65, 0x61, 0x74, 0x65, 0x4c, 0x6f, 0x67, 0x73, 0x52, 0x65, 0x73, - 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x77, 0x0a, 0x16, 0x47, 0x65, 0x74, 0x4e, 0x6f, 0x74, 0x69, - 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x61, 0x6e, 0x6e, 0x65, 0x72, 0x73, 0x12, + 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x77, 0x0a, 0x16, 0x47, 0x65, 0x74, 0x41, 0x6e, 0x6e, 0x6f, + 0x75, 0x6e, 0x63, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x42, 0x61, 0x6e, 0x6e, 0x65, 0x72, 0x73, 0x12, 0x2d, 0x2e, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2e, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x2e, 0x76, 0x32, - 0x2e, 0x47, 0x65, 0x74, 0x4e, 0x6f, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x2e, 0x47, 0x65, 0x74, 0x41, 0x6e, 0x6e, 0x6f, 0x75, 0x6e, 0x63, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x42, 0x61, 0x6e, 0x6e, 0x65, 0x72, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x2e, 0x2e, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2e, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x2e, 0x76, 0x32, 0x2e, - 0x47, 0x65, 0x74, 0x4e, 0x6f, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x42, + 0x47, 0x65, 0x74, 0x41, 0x6e, 0x6e, 0x6f, 0x75, 0x6e, 0x63, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x42, 0x61, 0x6e, 0x6e, 0x65, 0x72, 0x73, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x42, 0x27, 0x5a, 0x25, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x76, 0x32, 0x2f, 0x61, 0x67, 0x65, 0x6e, @@ -2869,8 +2869,8 @@ var file_agent_proto_agent_proto_goTypes = []interface{}{ (*Log)(nil), // 26: coder.agent.v2.Log (*BatchCreateLogsRequest)(nil), // 27: coder.agent.v2.BatchCreateLogsRequest (*BatchCreateLogsResponse)(nil), // 28: coder.agent.v2.BatchCreateLogsResponse - (*GetNotificationBannersRequest)(nil), // 29: coder.agent.v2.GetNotificationBannersRequest - (*GetNotificationBannersResponse)(nil), // 30: coder.agent.v2.GetNotificationBannersResponse + (*GetAnnouncementBannersRequest)(nil), // 29: coder.agent.v2.GetAnnouncementBannersRequest + (*GetAnnouncementBannersResponse)(nil), // 30: coder.agent.v2.GetAnnouncementBannersResponse (*BannerConfig)(nil), // 31: coder.agent.v2.BannerConfig (*WorkspaceApp_Healthcheck)(nil), // 32: coder.agent.v2.WorkspaceApp.Healthcheck (*WorkspaceAgentMetadata_Result)(nil), // 33: coder.agent.v2.WorkspaceAgentMetadata.Result @@ -2911,7 +2911,7 @@ var file_agent_proto_agent_proto_depIdxs = []int32{ 42, // 23: coder.agent.v2.Log.created_at:type_name -> google.protobuf.Timestamp 6, // 24: coder.agent.v2.Log.level:type_name -> coder.agent.v2.Log.Level 26, // 25: coder.agent.v2.BatchCreateLogsRequest.logs:type_name -> coder.agent.v2.Log - 31, // 26: coder.agent.v2.GetNotificationBannersResponse.notification_banners:type_name -> coder.agent.v2.BannerConfig + 31, // 26: coder.agent.v2.GetAnnouncementBannersResponse.announcement_banners:type_name -> coder.agent.v2.BannerConfig 40, // 27: coder.agent.v2.WorkspaceApp.Healthcheck.interval:type_name -> google.protobuf.Duration 42, // 28: coder.agent.v2.WorkspaceAgentMetadata.Result.collected_at:type_name -> google.protobuf.Timestamp 40, // 29: coder.agent.v2.WorkspaceAgentMetadata.Description.interval:type_name -> google.protobuf.Duration @@ -2927,7 +2927,7 @@ var file_agent_proto_agent_proto_depIdxs = []int32{ 22, // 39: coder.agent.v2.Agent.UpdateStartup:input_type -> coder.agent.v2.UpdateStartupRequest 24, // 40: coder.agent.v2.Agent.BatchUpdateMetadata:input_type -> coder.agent.v2.BatchUpdateMetadataRequest 27, // 41: coder.agent.v2.Agent.BatchCreateLogs:input_type -> coder.agent.v2.BatchCreateLogsRequest - 29, // 42: coder.agent.v2.Agent.GetNotificationBanners:input_type -> coder.agent.v2.GetNotificationBannersRequest + 29, // 42: coder.agent.v2.Agent.GetAnnouncementBanners:input_type -> coder.agent.v2.GetAnnouncementBannersRequest 10, // 43: coder.agent.v2.Agent.GetManifest:output_type -> coder.agent.v2.Manifest 12, // 44: coder.agent.v2.Agent.GetServiceBanner:output_type -> coder.agent.v2.ServiceBanner 16, // 45: coder.agent.v2.Agent.UpdateStats:output_type -> coder.agent.v2.UpdateStatsResponse @@ -2936,7 +2936,7 @@ var file_agent_proto_agent_proto_depIdxs = []int32{ 21, // 48: coder.agent.v2.Agent.UpdateStartup:output_type -> coder.agent.v2.Startup 25, // 49: coder.agent.v2.Agent.BatchUpdateMetadata:output_type -> coder.agent.v2.BatchUpdateMetadataResponse 28, // 50: coder.agent.v2.Agent.BatchCreateLogs:output_type -> coder.agent.v2.BatchCreateLogsResponse - 30, // 51: coder.agent.v2.Agent.GetNotificationBanners:output_type -> coder.agent.v2.GetNotificationBannersResponse + 30, // 51: coder.agent.v2.Agent.GetAnnouncementBanners:output_type -> coder.agent.v2.GetAnnouncementBannersResponse 43, // [43:52] is the sub-list for method output_type 34, // [34:43] is the sub-list for method input_type 34, // [34:34] is the sub-list for extension type_name @@ -3215,7 +3215,7 @@ func file_agent_proto_agent_proto_init() { } } file_agent_proto_agent_proto_msgTypes[22].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetNotificationBannersRequest); i { + switch v := v.(*GetAnnouncementBannersRequest); i { case 0: return &v.state case 1: @@ -3227,7 +3227,7 @@ func file_agent_proto_agent_proto_init() { } } file_agent_proto_agent_proto_msgTypes[23].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetNotificationBannersResponse); i { + switch v := v.(*GetAnnouncementBannersResponse); i { case 0: return &v.state case 1: diff --git a/agent/proto/agent.proto b/agent/proto/agent.proto index 8432fe8ef7f2b..4548ed8e7f2de 100644 --- a/agent/proto/agent.proto +++ b/agent/proto/agent.proto @@ -251,10 +251,10 @@ message BatchCreateLogsResponse { bool log_limit_exceeded = 1; } -message GetNotificationBannersRequest {} +message GetAnnouncementBannersRequest {} -message GetNotificationBannersResponse { - repeated BannerConfig notification_banners = 1; +message GetAnnouncementBannersResponse { + repeated BannerConfig announcement_banners = 1; } message BannerConfig { @@ -272,5 +272,5 @@ service Agent { rpc UpdateStartup(UpdateStartupRequest) returns (Startup); rpc BatchUpdateMetadata(BatchUpdateMetadataRequest) returns (BatchUpdateMetadataResponse); rpc BatchCreateLogs(BatchCreateLogsRequest) returns (BatchCreateLogsResponse); - rpc GetNotificationBanners(GetNotificationBannersRequest) returns (GetNotificationBannersResponse); + rpc GetAnnouncementBanners(GetAnnouncementBannersRequest) returns (GetAnnouncementBannersResponse); } diff --git a/agent/proto/agent_drpc.pb.go b/agent/proto/agent_drpc.pb.go index 0003a1fa4568a..09b3c972c2ce6 100644 --- a/agent/proto/agent_drpc.pb.go +++ b/agent/proto/agent_drpc.pb.go @@ -46,7 +46,7 @@ type DRPCAgentClient interface { UpdateStartup(ctx context.Context, in *UpdateStartupRequest) (*Startup, error) BatchUpdateMetadata(ctx context.Context, in *BatchUpdateMetadataRequest) (*BatchUpdateMetadataResponse, error) BatchCreateLogs(ctx context.Context, in *BatchCreateLogsRequest) (*BatchCreateLogsResponse, error) - GetNotificationBanners(ctx context.Context, in *GetNotificationBannersRequest) (*GetNotificationBannersResponse, error) + GetAnnouncementBanners(ctx context.Context, in *GetAnnouncementBannersRequest) (*GetAnnouncementBannersResponse, error) } type drpcAgentClient struct { @@ -131,9 +131,9 @@ func (c *drpcAgentClient) BatchCreateLogs(ctx context.Context, in *BatchCreateLo return out, nil } -func (c *drpcAgentClient) GetNotificationBanners(ctx context.Context, in *GetNotificationBannersRequest) (*GetNotificationBannersResponse, error) { - out := new(GetNotificationBannersResponse) - err := c.cc.Invoke(ctx, "/coder.agent.v2.Agent/GetNotificationBanners", drpcEncoding_File_agent_proto_agent_proto{}, in, out) +func (c *drpcAgentClient) GetAnnouncementBanners(ctx context.Context, in *GetAnnouncementBannersRequest) (*GetAnnouncementBannersResponse, error) { + out := new(GetAnnouncementBannersResponse) + err := c.cc.Invoke(ctx, "/coder.agent.v2.Agent/GetAnnouncementBanners", drpcEncoding_File_agent_proto_agent_proto{}, in, out) if err != nil { return nil, err } @@ -149,7 +149,7 @@ type DRPCAgentServer interface { UpdateStartup(context.Context, *UpdateStartupRequest) (*Startup, error) BatchUpdateMetadata(context.Context, *BatchUpdateMetadataRequest) (*BatchUpdateMetadataResponse, error) BatchCreateLogs(context.Context, *BatchCreateLogsRequest) (*BatchCreateLogsResponse, error) - GetNotificationBanners(context.Context, *GetNotificationBannersRequest) (*GetNotificationBannersResponse, error) + GetAnnouncementBanners(context.Context, *GetAnnouncementBannersRequest) (*GetAnnouncementBannersResponse, error) } type DRPCAgentUnimplementedServer struct{} @@ -186,7 +186,7 @@ func (s *DRPCAgentUnimplementedServer) BatchCreateLogs(context.Context, *BatchCr return nil, drpcerr.WithCode(errors.New("Unimplemented"), drpcerr.Unimplemented) } -func (s *DRPCAgentUnimplementedServer) GetNotificationBanners(context.Context, *GetNotificationBannersRequest) (*GetNotificationBannersResponse, error) { +func (s *DRPCAgentUnimplementedServer) GetAnnouncementBanners(context.Context, *GetAnnouncementBannersRequest) (*GetAnnouncementBannersResponse, error) { return nil, drpcerr.WithCode(errors.New("Unimplemented"), drpcerr.Unimplemented) } @@ -269,14 +269,14 @@ func (DRPCAgentDescription) Method(n int) (string, drpc.Encoding, drpc.Receiver, ) }, DRPCAgentServer.BatchCreateLogs, true case 8: - return "/coder.agent.v2.Agent/GetNotificationBanners", drpcEncoding_File_agent_proto_agent_proto{}, + return "/coder.agent.v2.Agent/GetAnnouncementBanners", drpcEncoding_File_agent_proto_agent_proto{}, func(srv interface{}, ctx context.Context, in1, in2 interface{}) (drpc.Message, error) { return srv.(DRPCAgentServer). - GetNotificationBanners( + GetAnnouncementBanners( ctx, - in1.(*GetNotificationBannersRequest), + in1.(*GetAnnouncementBannersRequest), ) - }, DRPCAgentServer.GetNotificationBanners, true + }, DRPCAgentServer.GetAnnouncementBanners, true default: return "", nil, nil, nil, false } @@ -414,16 +414,16 @@ func (x *drpcAgent_BatchCreateLogsStream) SendAndClose(m *BatchCreateLogsRespons return x.CloseSend() } -type DRPCAgent_GetNotificationBannersStream interface { +type DRPCAgent_GetAnnouncementBannersStream interface { drpc.Stream - SendAndClose(*GetNotificationBannersResponse) error + SendAndClose(*GetAnnouncementBannersResponse) error } -type drpcAgent_GetNotificationBannersStream struct { +type drpcAgent_GetAnnouncementBannersStream struct { drpc.Stream } -func (x *drpcAgent_GetNotificationBannersStream) SendAndClose(m *GetNotificationBannersResponse) error { +func (x *drpcAgent_GetAnnouncementBannersStream) SendAndClose(m *GetAnnouncementBannersResponse) error { if err := x.MsgSend(m, drpcEncoding_File_agent_proto_agent_proto{}); err != nil { return err } diff --git a/coderd/agentapi/notification_banners.go b/coderd/agentapi/announcement_banners.go similarity index 63% rename from coderd/agentapi/notification_banners.go rename to coderd/agentapi/announcement_banners.go index ab4e7dda96741..8eebb9ae0c9ea 100644 --- a/coderd/agentapi/notification_banners.go +++ b/coderd/agentapi/announcement_banners.go @@ -11,12 +11,12 @@ import ( "github.com/coder/coder/v2/codersdk/agentsdk" ) -type NotificationBannerAPI struct { +type AnnouncementBannerAPI struct { appearanceFetcher *atomic.Pointer[appearance.Fetcher] } -// Deprecated: GetServiceBanner has been deprecated in favor of GetNotificationBanners. -func (a *NotificationBannerAPI) GetServiceBanner(ctx context.Context, _ *proto.GetServiceBannerRequest) (*proto.ServiceBanner, error) { +// Deprecated: GetServiceBanner has been deprecated in favor of GetAnnouncementBanners. +func (a *AnnouncementBannerAPI) GetServiceBanner(ctx context.Context, _ *proto.GetServiceBannerRequest) (*proto.ServiceBanner, error) { cfg, err := (*a.appearanceFetcher.Load()).Fetch(ctx) if err != nil { return nil, xerrors.Errorf("fetch appearance: %w", err) @@ -24,16 +24,16 @@ func (a *NotificationBannerAPI) GetServiceBanner(ctx context.Context, _ *proto.G return agentsdk.ProtoFromServiceBanner(cfg.ServiceBanner), nil } -func (a *NotificationBannerAPI) GetNotificationBanners(ctx context.Context, _ *proto.GetNotificationBannersRequest) (*proto.GetNotificationBannersResponse, error) { +func (a *AnnouncementBannerAPI) GetAnnouncementBanners(ctx context.Context, _ *proto.GetAnnouncementBannersRequest) (*proto.GetAnnouncementBannersResponse, error) { cfg, err := (*a.appearanceFetcher.Load()).Fetch(ctx) if err != nil { return nil, xerrors.Errorf("fetch appearance: %w", err) } - banners := make([]*proto.BannerConfig, 0, len(cfg.NotificationBanners)) - for _, banner := range cfg.NotificationBanners { + banners := make([]*proto.BannerConfig, 0, len(cfg.AnnouncementBanners)) + for _, banner := range cfg.AnnouncementBanners { banners = append(banners, agentsdk.ProtoFromBannerConfig(banner)) } - return &proto.GetNotificationBannersResponse{ - NotificationBanners: banners, + return &proto.GetAnnouncementBannersResponse{ + AnnouncementBanners: banners, }, nil } diff --git a/coderd/agentapi/notification_banners_internal_test.go b/coderd/agentapi/announcement_banners_internal_test.go similarity index 68% rename from coderd/agentapi/notification_banners_internal_test.go rename to coderd/agentapi/announcement_banners_internal_test.go index 87f4df2d21764..145459a7c636e 100644 --- a/coderd/agentapi/notification_banners_internal_test.go +++ b/coderd/agentapi/announcement_banners_internal_test.go @@ -14,7 +14,7 @@ import ( "github.com/coder/coder/v2/codersdk/agentsdk" ) -func TestGetNotificationBanners(t *testing.T) { +func TestGetAnnouncementBanners(t *testing.T) { t.Parallel() t.Run("OK", func(t *testing.T) { @@ -26,15 +26,15 @@ func TestGetNotificationBanners(t *testing.T) { BackgroundColor: "#00FF00", }} - var ff appearance.Fetcher = fakeFetcher{cfg: codersdk.AppearanceConfig{NotificationBanners: cfg}} + var ff appearance.Fetcher = fakeFetcher{cfg: codersdk.AppearanceConfig{AnnouncementBanners: cfg}} ptr := atomic.Pointer[appearance.Fetcher]{} ptr.Store(&ff) - api := &NotificationBannerAPI{appearanceFetcher: &ptr} - resp, err := api.GetNotificationBanners(context.Background(), &agentproto.GetNotificationBannersRequest{}) + api := &AnnouncementBannerAPI{appearanceFetcher: &ptr} + resp, err := api.GetAnnouncementBanners(context.Background(), &agentproto.GetAnnouncementBannersRequest{}) require.NoError(t, err) - require.Len(t, resp.NotificationBanners, 1) - require.Equal(t, cfg[0], agentsdk.BannerConfigFromProto(resp.NotificationBanners[0])) + require.Len(t, resp.AnnouncementBanners, 1) + require.Equal(t, cfg[0], agentsdk.BannerConfigFromProto(resp.AnnouncementBanners[0])) }) t.Run("FetchError", func(t *testing.T) { @@ -45,8 +45,8 @@ func TestGetNotificationBanners(t *testing.T) { ptr := atomic.Pointer[appearance.Fetcher]{} ptr.Store(&ff) - api := &NotificationBannerAPI{appearanceFetcher: &ptr} - resp, err := api.GetNotificationBanners(context.Background(), &agentproto.GetNotificationBannersRequest{}) + api := &AnnouncementBannerAPI{appearanceFetcher: &ptr} + resp, err := api.GetAnnouncementBanners(context.Background(), &agentproto.GetAnnouncementBannersRequest{}) require.Error(t, err) require.ErrorIs(t, err, expectedErr) require.Nil(t, resp) diff --git a/coderd/agentapi/api.go b/coderd/agentapi/api.go index b8b07672d6aa2..ae0d594314e66 100644 --- a/coderd/agentapi/api.go +++ b/coderd/agentapi/api.go @@ -36,7 +36,7 @@ import ( type API struct { opts Options *ManifestAPI - *NotificationBannerAPI + *AnnouncementBannerAPI *StatsAPI *LifecycleAPI *AppsAPI @@ -108,7 +108,7 @@ func New(opts Options) *API { }, } - api.NotificationBannerAPI = &NotificationBannerAPI{ + api.AnnouncementBannerAPI = &AnnouncementBannerAPI{ appearanceFetcher: opts.AppearanceFetcher, } diff --git a/coderd/apidoc/docs.go b/coderd/apidoc/docs.go index f373e0079a780..a284e46d0a0bb 100644 --- a/coderd/apidoc/docs.go +++ b/coderd/apidoc/docs.go @@ -8378,20 +8378,20 @@ const docTemplate = `{ "codersdk.AppearanceConfig": { "type": "object", "properties": { + "announcement_banners": { + "type": "array", + "items": { + "$ref": "#/definitions/codersdk.BannerConfig" + } + }, "application_name": { "type": "string" }, "logo_url": { "type": "string" }, - "notification_banners": { - "type": "array", - "items": { - "$ref": "#/definitions/codersdk.BannerConfig" - } - }, "service_banner": { - "description": "Deprecated: ServiceBanner has been replaced by NotificationBanners.", + "description": "Deprecated: ServiceBanner has been replaced by AnnouncementBanners.", "allOf": [ { "$ref": "#/definitions/codersdk.BannerConfig" @@ -12148,20 +12148,20 @@ const docTemplate = `{ "codersdk.UpdateAppearanceConfig": { "type": "object", "properties": { + "announcement_banners": { + "type": "array", + "items": { + "$ref": "#/definitions/codersdk.BannerConfig" + } + }, "application_name": { "type": "string" }, "logo_url": { "type": "string" }, - "notification_banners": { - "type": "array", - "items": { - "$ref": "#/definitions/codersdk.BannerConfig" - } - }, "service_banner": { - "description": "Deprecated: ServiceBanner has been replaced by NotificationBanners.", + "description": "Deprecated: ServiceBanner has been replaced by AnnouncementBanners.", "allOf": [ { "$ref": "#/definitions/codersdk.BannerConfig" diff --git a/coderd/apidoc/swagger.json b/coderd/apidoc/swagger.json index 84bb41c44fcdd..28212bdaa8342 100644 --- a/coderd/apidoc/swagger.json +++ b/coderd/apidoc/swagger.json @@ -7433,20 +7433,20 @@ "codersdk.AppearanceConfig": { "type": "object", "properties": { + "announcement_banners": { + "type": "array", + "items": { + "$ref": "#/definitions/codersdk.BannerConfig" + } + }, "application_name": { "type": "string" }, "logo_url": { "type": "string" }, - "notification_banners": { - "type": "array", - "items": { - "$ref": "#/definitions/codersdk.BannerConfig" - } - }, "service_banner": { - "description": "Deprecated: ServiceBanner has been replaced by NotificationBanners.", + "description": "Deprecated: ServiceBanner has been replaced by AnnouncementBanners.", "allOf": [ { "$ref": "#/definitions/codersdk.BannerConfig" @@ -10997,20 +10997,20 @@ "codersdk.UpdateAppearanceConfig": { "type": "object", "properties": { + "announcement_banners": { + "type": "array", + "items": { + "$ref": "#/definitions/codersdk.BannerConfig" + } + }, "application_name": { "type": "string" }, "logo_url": { "type": "string" }, - "notification_banners": { - "type": "array", - "items": { - "$ref": "#/definitions/codersdk.BannerConfig" - } - }, "service_banner": { - "description": "Deprecated: ServiceBanner has been replaced by NotificationBanners.", + "description": "Deprecated: ServiceBanner has been replaced by AnnouncementBanners.", "allOf": [ { "$ref": "#/definitions/codersdk.BannerConfig" diff --git a/coderd/appearance/appearance.go b/coderd/appearance/appearance.go index f9809036ec84b..9b45884ce115e 100644 --- a/coderd/appearance/appearance.go +++ b/coderd/appearance/appearance.go @@ -32,7 +32,7 @@ type AGPLFetcher struct{} func (AGPLFetcher) Fetch(context.Context) (codersdk.AppearanceConfig, error) { return codersdk.AppearanceConfig{ - NotificationBanners: []codersdk.BannerConfig{}, + AnnouncementBanners: []codersdk.BannerConfig{}, SupportLinks: DefaultSupportLinks, }, nil } diff --git a/coderd/database/dbauthz/dbauthz.go b/coderd/database/dbauthz/dbauthz.go index ec9d14bb57de6..3a814cfed88d2 100644 --- a/coderd/database/dbauthz/dbauthz.go +++ b/coderd/database/dbauthz/dbauthz.go @@ -1142,6 +1142,11 @@ func (q *querier) GetAllTailnetTunnels(ctx context.Context) ([]database.TailnetT return q.db.GetAllTailnetTunnels(ctx) } +func (q *querier) GetAnnouncementBanners(ctx context.Context) (string, error) { + // No authz checks + return q.db.GetAnnouncementBanners(ctx) +} + func (q *querier) GetAppSecurityKey(ctx context.Context) (string, error) { // No authz checks return q.db.GetAppSecurityKey(ctx) @@ -1359,11 +1364,6 @@ func (q *querier) GetLogoURL(ctx context.Context) (string, error) { return q.db.GetLogoURL(ctx) } -func (q *querier) GetNotificationBanners(ctx context.Context) (string, error) { - // No authz checks - return q.db.GetNotificationBanners(ctx) -} - func (q *querier) GetOAuth2ProviderAppByID(ctx context.Context, id uuid.UUID) (database.OAuth2ProviderApp, error) { if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceOauth2App); err != nil { return database.OAuth2ProviderApp{}, err @@ -3405,6 +3405,13 @@ func (q *querier) UpdateWorkspacesDormantDeletingAtByTemplateID(ctx context.Cont return fetchAndExec(q.log, q.auth, policy.ActionUpdate, fetch, q.db.UpdateWorkspacesDormantDeletingAtByTemplateID)(ctx, arg) } +func (q *querier) UpsertAnnouncementBanners(ctx context.Context, value string) error { + if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceDeploymentConfig); err != nil { + return err + } + return q.db.UpsertAnnouncementBanners(ctx, value) +} + func (q *querier) UpsertAppSecurityKey(ctx context.Context, data string) error { // No authz checks as this is done during startup return q.db.UpsertAppSecurityKey(ctx, data) @@ -3538,13 +3545,6 @@ func (q *querier) UpsertLogoURL(ctx context.Context, value string) error { return q.db.UpsertLogoURL(ctx, value) } -func (q *querier) UpsertNotificationBanners(ctx context.Context, value string) error { - if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceDeploymentConfig); err != nil { - return err - } - return q.db.UpsertNotificationBanners(ctx, value) -} - func (q *querier) UpsertOAuthSigningKey(ctx context.Context, value string) error { if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceSystem); err != nil { return err diff --git a/coderd/database/dbauthz/dbauthz_test.go b/coderd/database/dbauthz/dbauthz_test.go index 8e84f4644b91e..9507e1b83c00e 100644 --- a/coderd/database/dbauthz/dbauthz_test.go +++ b/coderd/database/dbauthz/dbauthz_test.go @@ -528,7 +528,7 @@ func (s *MethodTestSuite) TestLicense() { s.Run("UpsertLogoURL", s.Subtest(func(db database.Store, check *expects) { check.Args("value").Asserts(rbac.ResourceDeploymentConfig, policy.ActionUpdate) })) - s.Run("UpsertNotificationBanners", s.Subtest(func(db database.Store, check *expects) { + s.Run("UpsertAnnouncementBanners", s.Subtest(func(db database.Store, check *expects) { check.Args("value").Asserts(rbac.ResourceDeploymentConfig, policy.ActionUpdate) })) s.Run("GetLicenseByID", s.Subtest(func(db database.Store, check *expects) { @@ -559,8 +559,8 @@ func (s *MethodTestSuite) TestLicense() { require.NoError(s.T(), err) check.Args().Asserts().Returns("value") })) - s.Run("GetNotificationBanners", s.Subtest(func(db database.Store, check *expects) { - err := db.UpsertNotificationBanners(context.Background(), "value") + s.Run("GetAnnouncementBanners", s.Subtest(func(db database.Store, check *expects) { + err := db.UpsertAnnouncementBanners(context.Background(), "value") require.NoError(s.T(), err) check.Args().Asserts().Returns("value") })) diff --git a/coderd/database/dbmem/dbmem.go b/coderd/database/dbmem/dbmem.go index e9497880b274c..fe9b56e35ebdb 100644 --- a/coderd/database/dbmem/dbmem.go +++ b/coderd/database/dbmem/dbmem.go @@ -191,7 +191,7 @@ type data struct { deploymentID string derpMeshKey string lastUpdateCheck []byte - notificationBanners []byte + announcementBanners []byte healthSettings []byte applicationName string logoURL string @@ -1857,6 +1857,17 @@ func (*FakeQuerier) GetAllTailnetTunnels(context.Context) ([]database.TailnetTun return nil, ErrUnimplemented } +func (q *FakeQuerier) GetAnnouncementBanners(_ context.Context) (string, error) { + q.mutex.RLock() + defer q.mutex.RUnlock() + + if q.announcementBanners == nil { + return "", sql.ErrNoRows + } + + return string(q.announcementBanners), nil +} + func (q *FakeQuerier) GetAppSecurityKey(_ context.Context) (string, error) { q.mutex.RLock() defer q.mutex.RUnlock() @@ -2540,17 +2551,6 @@ func (q *FakeQuerier) GetLogoURL(_ context.Context) (string, error) { return q.logoURL, nil } -func (q *FakeQuerier) GetNotificationBanners(_ context.Context) (string, error) { - q.mutex.RLock() - defer q.mutex.RUnlock() - - if q.notificationBanners == nil { - return "", sql.ErrNoRows - } - - return string(q.notificationBanners), nil -} - func (q *FakeQuerier) GetOAuth2ProviderAppByID(_ context.Context, id uuid.UUID) (database.OAuth2ProviderApp, error) { q.mutex.Lock() defer q.mutex.Unlock() @@ -8358,6 +8358,14 @@ func (q *FakeQuerier) UpdateWorkspacesDormantDeletingAtByTemplateID(_ context.Co return nil } +func (q *FakeQuerier) UpsertAnnouncementBanners(_ context.Context, data string) error { + q.mutex.RLock() + defer q.mutex.RUnlock() + + q.announcementBanners = []byte(data) + return nil +} + func (q *FakeQuerier) UpsertAppSecurityKey(_ context.Context, data string) error { q.mutex.Lock() defer q.mutex.Unlock() @@ -8472,14 +8480,6 @@ func (q *FakeQuerier) UpsertLogoURL(_ context.Context, data string) error { return nil } -func (q *FakeQuerier) UpsertNotificationBanners(_ context.Context, data string) error { - q.mutex.RLock() - defer q.mutex.RUnlock() - - q.notificationBanners = []byte(data) - return nil -} - func (q *FakeQuerier) UpsertOAuthSigningKey(_ context.Context, value string) error { q.mutex.Lock() defer q.mutex.Unlock() diff --git a/coderd/database/dbmetrics/dbmetrics.go b/coderd/database/dbmetrics/dbmetrics.go index bb5a38ef82c61..aff562fcdb89f 100644 --- a/coderd/database/dbmetrics/dbmetrics.go +++ b/coderd/database/dbmetrics/dbmetrics.go @@ -431,6 +431,13 @@ func (m metricsStore) GetAllTailnetTunnels(ctx context.Context) ([]database.Tail return r0, r1 } +func (m metricsStore) GetAnnouncementBanners(ctx context.Context) (string, error) { + start := time.Now() + r0, r1 := m.s.GetAnnouncementBanners(ctx) + m.queryLatencies.WithLabelValues("GetAnnouncementBanners").Observe(time.Since(start).Seconds()) + return r0, r1 +} + func (m metricsStore) GetAppSecurityKey(ctx context.Context) (string, error) { start := time.Now() key, err := m.s.GetAppSecurityKey(ctx) @@ -662,13 +669,6 @@ func (m metricsStore) GetLogoURL(ctx context.Context) (string, error) { return url, err } -func (m metricsStore) GetNotificationBanners(ctx context.Context) (string, error) { - start := time.Now() - r0, r1 := m.s.GetNotificationBanners(ctx) - m.queryLatencies.WithLabelValues("GetNotificationBanners").Observe(time.Since(start).Seconds()) - return r0, r1 -} - func (m metricsStore) GetOAuth2ProviderAppByID(ctx context.Context, id uuid.UUID) (database.OAuth2ProviderApp, error) { start := time.Now() r0, r1 := m.s.GetOAuth2ProviderAppByID(ctx, id) @@ -2174,6 +2174,13 @@ func (m metricsStore) UpdateWorkspacesDormantDeletingAtByTemplateID(ctx context. return r0 } +func (m metricsStore) UpsertAnnouncementBanners(ctx context.Context, value string) error { + start := time.Now() + r0 := m.s.UpsertAnnouncementBanners(ctx, value) + m.queryLatencies.WithLabelValues("UpsertAnnouncementBanners").Observe(time.Since(start).Seconds()) + return r0 +} + func (m metricsStore) UpsertAppSecurityKey(ctx context.Context, value string) error { start := time.Now() r0 := m.s.UpsertAppSecurityKey(ctx, value) @@ -2230,13 +2237,6 @@ func (m metricsStore) UpsertLogoURL(ctx context.Context, value string) error { return r0 } -func (m metricsStore) UpsertNotificationBanners(ctx context.Context, value string) error { - start := time.Now() - r0 := m.s.UpsertNotificationBanners(ctx, value) - m.queryLatencies.WithLabelValues("UpsertNotificationBanners").Observe(time.Since(start).Seconds()) - return r0 -} - func (m metricsStore) UpsertOAuthSigningKey(ctx context.Context, value string) error { start := time.Now() r0 := m.s.UpsertOAuthSigningKey(ctx, value) diff --git a/coderd/database/dbmock/dbmock.go b/coderd/database/dbmock/dbmock.go index 90d7a20eb6ff8..3ef96d13f8b33 100644 --- a/coderd/database/dbmock/dbmock.go +++ b/coderd/database/dbmock/dbmock.go @@ -764,6 +764,21 @@ func (mr *MockStoreMockRecorder) GetAllTailnetTunnels(arg0 any) *gomock.Call { return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAllTailnetTunnels", reflect.TypeOf((*MockStore)(nil).GetAllTailnetTunnels), arg0) } +// GetAnnouncementBanners mocks base method. +func (m *MockStore) GetAnnouncementBanners(arg0 context.Context) (string, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetAnnouncementBanners", arg0) + ret0, _ := ret[0].(string) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetAnnouncementBanners indicates an expected call of GetAnnouncementBanners. +func (mr *MockStoreMockRecorder) GetAnnouncementBanners(arg0 any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetAnnouncementBanners", reflect.TypeOf((*MockStore)(nil).GetAnnouncementBanners), arg0) +} + // GetAppSecurityKey mocks base method. func (m *MockStore) GetAppSecurityKey(arg0 context.Context) (string, error) { m.ctrl.T.Helper() @@ -1304,21 +1319,6 @@ func (mr *MockStoreMockRecorder) GetLogoURL(arg0 any) *gomock.Call { return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetLogoURL", reflect.TypeOf((*MockStore)(nil).GetLogoURL), arg0) } -// GetNotificationBanners mocks base method. -func (m *MockStore) GetNotificationBanners(arg0 context.Context) (string, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetNotificationBanners", arg0) - ret0, _ := ret[0].(string) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetNotificationBanners indicates an expected call of GetNotificationBanners. -func (mr *MockStoreMockRecorder) GetNotificationBanners(arg0 any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetNotificationBanners", reflect.TypeOf((*MockStore)(nil).GetNotificationBanners), arg0) -} - // GetOAuth2ProviderAppByID mocks base method. func (m *MockStore) GetOAuth2ProviderAppByID(arg0 context.Context, arg1 uuid.UUID) (database.OAuth2ProviderApp, error) { m.ctrl.T.Helper() @@ -4553,6 +4553,20 @@ func (mr *MockStoreMockRecorder) UpdateWorkspacesDormantDeletingAtByTemplateID(a return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateWorkspacesDormantDeletingAtByTemplateID", reflect.TypeOf((*MockStore)(nil).UpdateWorkspacesDormantDeletingAtByTemplateID), arg0, arg1) } +// UpsertAnnouncementBanners mocks base method. +func (m *MockStore) UpsertAnnouncementBanners(arg0 context.Context, arg1 string) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "UpsertAnnouncementBanners", arg0, arg1) + ret0, _ := ret[0].(error) + return ret0 +} + +// UpsertAnnouncementBanners indicates an expected call of UpsertAnnouncementBanners. +func (mr *MockStoreMockRecorder) UpsertAnnouncementBanners(arg0, arg1 any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpsertAnnouncementBanners", reflect.TypeOf((*MockStore)(nil).UpsertAnnouncementBanners), arg0, arg1) +} + // UpsertAppSecurityKey mocks base method. func (m *MockStore) UpsertAppSecurityKey(arg0 context.Context, arg1 string) error { m.ctrl.T.Helper() @@ -4666,20 +4680,6 @@ func (mr *MockStoreMockRecorder) UpsertLogoURL(arg0, arg1 any) *gomock.Call { return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpsertLogoURL", reflect.TypeOf((*MockStore)(nil).UpsertLogoURL), arg0, arg1) } -// UpsertNotificationBanners mocks base method. -func (m *MockStore) UpsertNotificationBanners(arg0 context.Context, arg1 string) error { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "UpsertNotificationBanners", arg0, arg1) - ret0, _ := ret[0].(error) - return ret0 -} - -// UpsertNotificationBanners indicates an expected call of UpsertNotificationBanners. -func (mr *MockStoreMockRecorder) UpsertNotificationBanners(arg0, arg1 any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpsertNotificationBanners", reflect.TypeOf((*MockStore)(nil).UpsertNotificationBanners), arg0, arg1) -} - // UpsertOAuthSigningKey mocks base method. func (m *MockStore) UpsertOAuthSigningKey(arg0 context.Context, arg1 string) error { m.ctrl.T.Helper() diff --git a/coderd/database/migrations/000213_announcement_banners.down.sql b/coderd/database/migrations/000213_announcement_banners.down.sql new file mode 100644 index 0000000000000..0ec90c4a9e05a --- /dev/null +++ b/coderd/database/migrations/000213_announcement_banners.down.sql @@ -0,0 +1,3 @@ +update site_configs SET + key = 'notification_banners' + where key = 'announcement_banners'; diff --git a/coderd/database/migrations/000213_announcement_banners.up.sql b/coderd/database/migrations/000213_announcement_banners.up.sql new file mode 100644 index 0000000000000..a76e4b6f25629 --- /dev/null +++ b/coderd/database/migrations/000213_announcement_banners.up.sql @@ -0,0 +1,3 @@ +update site_configs SET + key = 'announcement_banners' + where key = 'notification_banners'; diff --git a/coderd/database/querier.go b/coderd/database/querier.go index a590ae87bc8fd..6e2b1ff60cfdf 100644 --- a/coderd/database/querier.go +++ b/coderd/database/querier.go @@ -97,6 +97,7 @@ type sqlcQuerier interface { GetAllTailnetCoordinators(ctx context.Context) ([]TailnetCoordinator, error) GetAllTailnetPeers(ctx context.Context) ([]TailnetPeer, error) GetAllTailnetTunnels(ctx context.Context) ([]TailnetTunnel, error) + GetAnnouncementBanners(ctx context.Context) (string, error) GetAppSecurityKey(ctx context.Context) (string, error) GetApplicationName(ctx context.Context) (string, error) // GetAuditLogsBefore retrieves `row_limit` number of audit logs before the provided @@ -137,7 +138,6 @@ type sqlcQuerier interface { GetLicenseByID(ctx context.Context, id int32) (License, error) GetLicenses(ctx context.Context) ([]License, error) GetLogoURL(ctx context.Context) (string, error) - GetNotificationBanners(ctx context.Context) (string, error) GetOAuth2ProviderAppByID(ctx context.Context, id uuid.UUID) (OAuth2ProviderApp, error) GetOAuth2ProviderAppCodeByID(ctx context.Context, id uuid.UUID) (OAuth2ProviderAppCode, error) GetOAuth2ProviderAppCodeByPrefix(ctx context.Context, secretPrefix []byte) (OAuth2ProviderAppCode, error) @@ -416,6 +416,7 @@ type sqlcQuerier interface { UpdateWorkspaceProxyDeleted(ctx context.Context, arg UpdateWorkspaceProxyDeletedParams) error UpdateWorkspaceTTL(ctx context.Context, arg UpdateWorkspaceTTLParams) error UpdateWorkspacesDormantDeletingAtByTemplateID(ctx context.Context, arg UpdateWorkspacesDormantDeletingAtByTemplateIDParams) error + UpsertAnnouncementBanners(ctx context.Context, value string) error UpsertAppSecurityKey(ctx context.Context, value string) error UpsertApplicationName(ctx context.Context, value string) error UpsertCustomRole(ctx context.Context, arg UpsertCustomRoleParams) (CustomRole, error) @@ -427,7 +428,6 @@ type sqlcQuerier interface { UpsertJFrogXrayScanByWorkspaceAndAgentID(ctx context.Context, arg UpsertJFrogXrayScanByWorkspaceAndAgentIDParams) error UpsertLastUpdateCheck(ctx context.Context, value string) error UpsertLogoURL(ctx context.Context, value string) error - UpsertNotificationBanners(ctx context.Context, value string) error UpsertOAuthSigningKey(ctx context.Context, value string) error UpsertProvisionerDaemon(ctx context.Context, arg UpsertProvisionerDaemonParams) (ProvisionerDaemon, error) UpsertTailnetAgent(ctx context.Context, arg UpsertTailnetAgentParams) (TailnetAgent, error) diff --git a/coderd/database/queries.sql.go b/coderd/database/queries.sql.go index bcc961c88e048..56fcfaf998e4f 100644 --- a/coderd/database/queries.sql.go +++ b/coderd/database/queries.sql.go @@ -5727,6 +5727,17 @@ func (q *sqlQuerier) UpsertCustomRole(ctx context.Context, arg UpsertCustomRoleP return i, err } +const getAnnouncementBanners = `-- name: GetAnnouncementBanners :one +SELECT value FROM site_configs WHERE key = 'announcement_banners' +` + +func (q *sqlQuerier) GetAnnouncementBanners(ctx context.Context) (string, error) { + row := q.db.QueryRowContext(ctx, getAnnouncementBanners) + var value string + err := row.Scan(&value) + return value, err +} + const getAppSecurityKey = `-- name: GetAppSecurityKey :one SELECT value FROM site_configs WHERE key = 'app_signing_key' ` @@ -5823,17 +5834,6 @@ func (q *sqlQuerier) GetLogoURL(ctx context.Context) (string, error) { return value, err } -const getNotificationBanners = `-- name: GetNotificationBanners :one -SELECT value FROM site_configs WHERE key = 'notification_banners' -` - -func (q *sqlQuerier) GetNotificationBanners(ctx context.Context) (string, error) { - row := q.db.QueryRowContext(ctx, getNotificationBanners) - var value string - err := row.Scan(&value) - return value, err -} - const getOAuthSigningKey = `-- name: GetOAuthSigningKey :one SELECT value FROM site_configs WHERE key = 'oauth_signing_key' ` @@ -5863,6 +5863,16 @@ func (q *sqlQuerier) InsertDeploymentID(ctx context.Context, value string) error return err } +const upsertAnnouncementBanners = `-- name: UpsertAnnouncementBanners :exec +INSERT INTO site_configs (key, value) VALUES ('announcement_banners', $1) +ON CONFLICT (key) DO UPDATE SET value = $1 WHERE site_configs.key = 'announcement_banners' +` + +func (q *sqlQuerier) UpsertAnnouncementBanners(ctx context.Context, value string) error { + _, err := q.db.ExecContext(ctx, upsertAnnouncementBanners, value) + return err +} + const upsertAppSecurityKey = `-- name: UpsertAppSecurityKey :exec INSERT INTO site_configs (key, value) VALUES ('app_signing_key', $1) ON CONFLICT (key) DO UPDATE set value = $1 WHERE site_configs.key = 'app_signing_key' @@ -5936,16 +5946,6 @@ func (q *sqlQuerier) UpsertLogoURL(ctx context.Context, value string) error { return err } -const upsertNotificationBanners = `-- name: UpsertNotificationBanners :exec -INSERT INTO site_configs (key, value) VALUES ('notification_banners', $1) -ON CONFLICT (key) DO UPDATE SET value = $1 WHERE site_configs.key = 'notification_banners' -` - -func (q *sqlQuerier) UpsertNotificationBanners(ctx context.Context, value string) error { - _, err := q.db.ExecContext(ctx, upsertNotificationBanners, value) - return err -} - const upsertOAuthSigningKey = `-- name: UpsertOAuthSigningKey :exec INSERT INTO site_configs (key, value) VALUES ('oauth_signing_key', $1) ON CONFLICT (key) DO UPDATE set value = $1 WHERE site_configs.key = 'oauth_signing_key' diff --git a/coderd/database/queries/siteconfig.sql b/coderd/database/queries/siteconfig.sql index b827c6e19e959..2b56a6d1455af 100644 --- a/coderd/database/queries/siteconfig.sql +++ b/coderd/database/queries/siteconfig.sql @@ -36,12 +36,12 @@ ON CONFLICT (key) DO UPDATE SET value = $1 WHERE site_configs.key = 'last_update -- name: GetLastUpdateCheck :one SELECT value FROM site_configs WHERE key = 'last_update_check'; --- name: UpsertNotificationBanners :exec -INSERT INTO site_configs (key, value) VALUES ('notification_banners', $1) -ON CONFLICT (key) DO UPDATE SET value = $1 WHERE site_configs.key = 'notification_banners'; +-- name: UpsertAnnouncementBanners :exec +INSERT INTO site_configs (key, value) VALUES ('announcement_banners', $1) +ON CONFLICT (key) DO UPDATE SET value = $1 WHERE site_configs.key = 'announcement_banners'; --- name: GetNotificationBanners :one -SELECT value FROM site_configs WHERE key = 'notification_banners'; +-- name: GetAnnouncementBanners :one +SELECT value FROM site_configs WHERE key = 'announcement_banners'; -- name: UpsertLogoURL :exec INSERT INTO site_configs (key, value) VALUES ('logo_url', $1) diff --git a/codersdk/deployment.go b/codersdk/deployment.go index dd52cae77d1b4..c89a78668637d 100644 --- a/codersdk/deployment.go +++ b/codersdk/deployment.go @@ -2105,18 +2105,18 @@ func (c *Client) DeploymentStats(ctx context.Context) (DeploymentStats, error) { type AppearanceConfig struct { ApplicationName string `json:"application_name"` LogoURL string `json:"logo_url"` - // Deprecated: ServiceBanner has been replaced by NotificationBanners. + // Deprecated: ServiceBanner has been replaced by AnnouncementBanners. ServiceBanner BannerConfig `json:"service_banner"` - NotificationBanners []BannerConfig `json:"notification_banners"` + AnnouncementBanners []BannerConfig `json:"announcement_banners"` SupportLinks []LinkConfig `json:"support_links,omitempty"` } type UpdateAppearanceConfig struct { ApplicationName string `json:"application_name"` LogoURL string `json:"logo_url"` - // Deprecated: ServiceBanner has been replaced by NotificationBanners. + // Deprecated: ServiceBanner has been replaced by AnnouncementBanners. ServiceBanner BannerConfig `json:"service_banner"` - NotificationBanners []BannerConfig `json:"notification_banners"` + AnnouncementBanners []BannerConfig `json:"announcement_banners"` } // Deprecated: ServiceBannerConfig has been renamed to BannerConfig. diff --git a/docs/api/enterprise.md b/docs/api/enterprise.md index 800e9e517196d..3cf43102e7c77 100644 --- a/docs/api/enterprise.md +++ b/docs/api/enterprise.md @@ -19,15 +19,15 @@ curl -X GET http://coder-server:8080/api/v2/appearance \ ```json { - "application_name": "string", - "logo_url": "string", - "notification_banners": [ + "announcement_banners": [ { "background_color": "string", "enabled": true, "message": "string" } ], + "application_name": "string", + "logo_url": "string", "service_banner": { "background_color": "string", "enabled": true, @@ -69,15 +69,15 @@ curl -X PUT http://coder-server:8080/api/v2/appearance \ ```json { - "application_name": "string", - "logo_url": "string", - "notification_banners": [ + "announcement_banners": [ { "background_color": "string", "enabled": true, "message": "string" } ], + "application_name": "string", + "logo_url": "string", "service_banner": { "background_color": "string", "enabled": true, @@ -98,15 +98,15 @@ curl -X PUT http://coder-server:8080/api/v2/appearance \ ```json { - "application_name": "string", - "logo_url": "string", - "notification_banners": [ + "announcement_banners": [ { "background_color": "string", "enabled": true, "message": "string" } ], + "application_name": "string", + "logo_url": "string", "service_banner": { "background_color": "string", "enabled": true, diff --git a/docs/api/schemas.md b/docs/api/schemas.md index 978da35a58d02..82804508b0e96 100644 --- a/docs/api/schemas.md +++ b/docs/api/schemas.md @@ -749,15 +749,15 @@ ```json { - "application_name": "string", - "logo_url": "string", - "notification_banners": [ + "announcement_banners": [ { "background_color": "string", "enabled": true, "message": "string" } ], + "application_name": "string", + "logo_url": "string", "service_banner": { "background_color": "string", "enabled": true, @@ -777,10 +777,10 @@ | Name | Type | Required | Restrictions | Description | | ---------------------- | ------------------------------------------------------- | -------- | ------------ | ------------------------------------------------------------------- | +| `announcement_banners` | array of [codersdk.BannerConfig](#codersdkbannerconfig) | false | | | | `application_name` | string | false | | | | `logo_url` | string | false | | | -| `notification_banners` | array of [codersdk.BannerConfig](#codersdkbannerconfig) | false | | | -| `service_banner` | [codersdk.BannerConfig](#codersdkbannerconfig) | false | | Deprecated: ServiceBanner has been replaced by NotificationBanners. | +| `service_banner` | [codersdk.BannerConfig](#codersdkbannerconfig) | false | | Deprecated: ServiceBanner has been replaced by AnnouncementBanners. | | `support_links` | array of [codersdk.LinkConfig](#codersdklinkconfig) | false | | | ## codersdk.ArchiveTemplateVersionsRequest @@ -5301,15 +5301,15 @@ CreateWorkspaceRequest provides options for creating a new workspace. Only one o ```json { - "application_name": "string", - "logo_url": "string", - "notification_banners": [ + "announcement_banners": [ { "background_color": "string", "enabled": true, "message": "string" } ], + "application_name": "string", + "logo_url": "string", "service_banner": { "background_color": "string", "enabled": true, @@ -5322,10 +5322,10 @@ CreateWorkspaceRequest provides options for creating a new workspace. Only one o | Name | Type | Required | Restrictions | Description | | ---------------------- | ------------------------------------------------------- | -------- | ------------ | ------------------------------------------------------------------- | +| `announcement_banners` | array of [codersdk.BannerConfig](#codersdkbannerconfig) | false | | | | `application_name` | string | false | | | | `logo_url` | string | false | | | -| `notification_banners` | array of [codersdk.BannerConfig](#codersdkbannerconfig) | false | | | -| `service_banner` | [codersdk.BannerConfig](#codersdkbannerconfig) | false | | Deprecated: ServiceBanner has been replaced by NotificationBanners. | +| `service_banner` | [codersdk.BannerConfig](#codersdkbannerconfig) | false | | Deprecated: ServiceBanner has been replaced by AnnouncementBanners. | ## codersdk.UpdateCheckResponse diff --git a/enterprise/coderd/appearance.go b/enterprise/coderd/appearance.go index 8a9d51cdb9070..b53c812c3e748 100644 --- a/enterprise/coderd/appearance.go +++ b/enterprise/coderd/appearance.go @@ -58,7 +58,7 @@ func (f *appearanceFetcher) Fetch(ctx context.Context) (codersdk.AppearanceConfi var ( applicationName string logoURL string - notificationBannersJSON string + announcementBannersJSON string ) eg.Go(func() (err error) { applicationName, err = f.database.GetApplicationName(ctx) @@ -75,7 +75,7 @@ func (f *appearanceFetcher) Fetch(ctx context.Context) (codersdk.AppearanceConfi return nil }) eg.Go(func() (err error) { - notificationBannersJSON, err = f.database.GetNotificationBanners(ctx) + announcementBannersJSON, err = f.database.GetAnnouncementBanners(ctx) if err != nil && !errors.Is(err, sql.ErrNoRows) { return xerrors.Errorf("get notification banners: %w", err) } @@ -89,22 +89,22 @@ func (f *appearanceFetcher) Fetch(ctx context.Context) (codersdk.AppearanceConfi cfg := codersdk.AppearanceConfig{ ApplicationName: applicationName, LogoURL: logoURL, - NotificationBanners: []codersdk.BannerConfig{}, + AnnouncementBanners: []codersdk.BannerConfig{}, SupportLinks: agpl.DefaultSupportLinks, } - if notificationBannersJSON != "" { - err = json.Unmarshal([]byte(notificationBannersJSON), &cfg.NotificationBanners) + if announcementBannersJSON != "" { + err = json.Unmarshal([]byte(announcementBannersJSON), &cfg.AnnouncementBanners) if err != nil { return codersdk.AppearanceConfig{}, xerrors.Errorf( - "unmarshal notification banners json: %w, raw: %s", err, notificationBannersJSON, + "unmarshal announcement banners json: %w, raw: %s", err, announcementBannersJSON, ) } // Redundant, but improves compatibility with slightly mismatched agent versions. // Maybe we can remove this after a grace period? -Kayla, May 6th 2024 - if len(cfg.NotificationBanners) > 0 { - cfg.ServiceBanner = cfg.NotificationBanners[0] + if len(cfg.AnnouncementBanners) > 0 { + cfg.ServiceBanner = cfg.AnnouncementBanners[0] } } if len(f.supportLinks) > 0 { @@ -149,7 +149,7 @@ func (api *API) putAppearance(rw http.ResponseWriter, r *http.Request) { return } - for _, banner := range appearance.NotificationBanners { + for _, banner := range appearance.AnnouncementBanners { if err := validateHexColor(banner.BackgroundColor); err != nil { httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{ Message: fmt.Sprintf("Invalid color format: %q", banner.BackgroundColor), @@ -159,22 +159,22 @@ func (api *API) putAppearance(rw http.ResponseWriter, r *http.Request) { } } - if appearance.NotificationBanners == nil { - appearance.NotificationBanners = []codersdk.BannerConfig{} + if appearance.AnnouncementBanners == nil { + appearance.AnnouncementBanners = []codersdk.BannerConfig{} } - notificationBannersJSON, err := json.Marshal(appearance.NotificationBanners) + announcementBannersJSON, err := json.Marshal(appearance.AnnouncementBanners) if err != nil { httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{ - Message: "Unable to marshal notification banners", + Message: "Unable to marshal announcement banners", Detail: err.Error(), }) return } - err = api.Database.UpsertNotificationBanners(ctx, string(notificationBannersJSON)) + err = api.Database.UpsertAnnouncementBanners(ctx, string(announcementBannersJSON)) if err != nil { httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ - Message: "Unable to set notification banners", + Message: "Unable to set announcement banners", Detail: err.Error(), }) return diff --git a/enterprise/coderd/appearance_test.go b/enterprise/coderd/appearance_test.go index 745f90e00d03b..0e2358e1eef58 100644 --- a/enterprise/coderd/appearance_test.go +++ b/enterprise/coderd/appearance_test.go @@ -55,7 +55,7 @@ func TestCustomLogoAndCompanyName(t *testing.T) { require.Equal(t, uac.LogoURL, got.LogoURL) } -func TestNotificationBanners(t *testing.T) { +func TestAnnouncementBanners(t *testing.T) { t.Parallel() t.Run("User", func(t *testing.T) { @@ -70,7 +70,7 @@ func TestNotificationBanners(t *testing.T) { // Without a license, there should be no banners. sb, err := basicUserClient.Appearance(ctx) require.NoError(t, err) - require.Empty(t, sb.NotificationBanners) + require.Empty(t, sb.AnnouncementBanners) coderdenttest.AddLicense(t, adminClient, coderdenttest.LicenseOptions{ Features: license.Features{ @@ -81,11 +81,11 @@ func TestNotificationBanners(t *testing.T) { // Default state sb, err = basicUserClient.Appearance(ctx) require.NoError(t, err) - require.Empty(t, sb.NotificationBanners) + require.Empty(t, sb.AnnouncementBanners) // Regular user should be unable to set the banner uac := codersdk.UpdateAppearanceConfig{ - NotificationBanners: []codersdk.BannerConfig{{Enabled: true}}, + AnnouncementBanners: []codersdk.BannerConfig{{Enabled: true}}, } err = basicUserClient.UpdateAppearance(ctx, uac) require.Error(t, err) @@ -96,7 +96,7 @@ func TestNotificationBanners(t *testing.T) { // But an admin can wantBanner := codersdk.UpdateAppearanceConfig{ - NotificationBanners: []codersdk.BannerConfig{{ + AnnouncementBanners: []codersdk.BannerConfig{{ Enabled: true, Message: "The beep-bop will be boop-beeped on Saturday at 12AM PST.", BackgroundColor: "#00FF00", @@ -106,10 +106,10 @@ func TestNotificationBanners(t *testing.T) { require.NoError(t, err) gotBanner, err := adminClient.Appearance(ctx) //nolint:gocritic // we should assert at least once that the owner can get the banner require.NoError(t, err) - require.Equal(t, wantBanner.NotificationBanners, gotBanner.NotificationBanners) + require.Equal(t, wantBanner.AnnouncementBanners, gotBanner.AnnouncementBanners) // But even an admin can't give a bad color - wantBanner.NotificationBanners[0].BackgroundColor = "#bad color" + wantBanner.AnnouncementBanners[0].BackgroundColor = "#bad color" err = adminClient.UpdateAppearance(ctx, wantBanner) require.Error(t, err) var sdkErr *codersdk.Error @@ -139,7 +139,7 @@ func TestNotificationBanners(t *testing.T) { }, }) cfg := codersdk.UpdateAppearanceConfig{ - NotificationBanners: []codersdk.BannerConfig{{ + AnnouncementBanners: []codersdk.BannerConfig{{ Enabled: true, Message: "The beep-bop will be boop-beeped on Saturday at 12AM PST.", BackgroundColor: "#00FF00", @@ -155,35 +155,35 @@ func TestNotificationBanners(t *testing.T) { agentClient := agentsdk.New(client.URL) agentClient.SetSessionToken(r.AgentToken) - banners := requireGetNotificationBanners(ctx, t, agentClient) - require.Equal(t, cfg.NotificationBanners, banners) + banners := requireGetAnnouncementBanners(ctx, t, agentClient) + require.Equal(t, cfg.AnnouncementBanners, banners) // Create an AGPL Coderd against the same database agplClient := coderdtest.New(t, &coderdtest.Options{Database: store, Pubsub: ps}) agplAgentClient := agentsdk.New(agplClient.URL) agplAgentClient.SetSessionToken(r.AgentToken) - banners = requireGetNotificationBanners(ctx, t, agplAgentClient) + banners = requireGetAnnouncementBanners(ctx, t, agplAgentClient) require.Equal(t, []codersdk.BannerConfig{}, banners) // No license means no banner. err = client.DeleteLicense(ctx, lic.ID) require.NoError(t, err) - banners = requireGetNotificationBanners(ctx, t, agentClient) + banners = requireGetAnnouncementBanners(ctx, t, agentClient) require.Equal(t, []codersdk.BannerConfig{}, banners) }) } -func requireGetNotificationBanners(ctx context.Context, t *testing.T, client *agentsdk.Client) []codersdk.BannerConfig { +func requireGetAnnouncementBanners(ctx context.Context, t *testing.T, client *agentsdk.Client) []codersdk.BannerConfig { cc, err := client.ConnectRPC(ctx) require.NoError(t, err) defer func() { _ = cc.Close() }() aAPI := proto.NewDRPCAgentClient(cc) - bannersProto, err := aAPI.GetNotificationBanners(ctx, &proto.GetNotificationBannersRequest{}) + bannersProto, err := aAPI.GetAnnouncementBanners(ctx, &proto.GetAnnouncementBannersRequest{}) require.NoError(t, err) - banners := make([]codersdk.BannerConfig, 0, len(bannersProto.NotificationBanners)) - for _, bannerProto := range bannersProto.NotificationBanners { + banners := make([]codersdk.BannerConfig, 0, len(bannersProto.AnnouncementBanners)) + for _, bannerProto := range bannersProto.AnnouncementBanners { banners = append(banners, agentsdk.BannerConfigFromProto(bannerProto)) } return banners diff --git a/site/src/api/api.ts b/site/src/api/api.ts index a7550f44fdb90..7e8829201dc3a 100644 --- a/site/src/api/api.ts +++ b/site/src/api/api.ts @@ -1581,7 +1581,7 @@ class ApiMethods { return { application_name: "", logo_url: "", - notification_banners: [], + announcement_banners: [], service_banner: { enabled: false, }, diff --git a/site/src/api/typesGenerated.ts b/site/src/api/typesGenerated.ts index 171f6744680cb..88e5c7e508f67 100644 --- a/site/src/api/typesGenerated.ts +++ b/site/src/api/typesGenerated.ts @@ -49,7 +49,7 @@ export interface AppearanceConfig { readonly application_name: string; readonly logo_url: string; readonly service_banner: BannerConfig; - readonly notification_banners: readonly BannerConfig[]; + readonly announcement_banners: readonly BannerConfig[]; readonly support_links?: readonly LinkConfig[]; } @@ -1309,7 +1309,7 @@ export interface UpdateAppearanceConfig { readonly application_name: string; readonly logo_url: string; readonly service_banner: BannerConfig; - readonly notification_banners: readonly BannerConfig[]; + readonly announcement_banners: readonly BannerConfig[]; } // From codersdk/updatecheck.go diff --git a/site/src/modules/dashboard/NotificationBanners/NotificationBannerView.stories.tsx b/site/src/modules/dashboard/AnnouncementBanners/AnnouncementBannerView.stories.tsx similarity index 60% rename from site/src/modules/dashboard/NotificationBanners/NotificationBannerView.stories.tsx rename to site/src/modules/dashboard/AnnouncementBanners/AnnouncementBannerView.stories.tsx index ee5f8dece47ca..9515bd9f1cd46 100644 --- a/site/src/modules/dashboard/NotificationBanners/NotificationBannerView.stories.tsx +++ b/site/src/modules/dashboard/AnnouncementBanners/AnnouncementBannerView.stories.tsx @@ -1,13 +1,13 @@ import type { Meta, StoryObj } from "@storybook/react"; -import { NotificationBannerView } from "./NotificationBannerView"; +import { AnnouncementBannerView } from "./AnnouncementBannerView"; -const meta: Meta = { - title: "modules/dashboard/NotificationBannerView", - component: NotificationBannerView, +const meta: Meta = { + title: "modules/dashboard/AnnouncementBannerView", + component: AnnouncementBannerView, }; export default meta; -type Story = StoryObj; +type Story = StoryObj; export const Production: Story = { args: { diff --git a/site/src/modules/dashboard/NotificationBanners/NotificationBannerView.tsx b/site/src/modules/dashboard/AnnouncementBanners/AnnouncementBannerView.tsx similarity index 89% rename from site/src/modules/dashboard/NotificationBanners/NotificationBannerView.tsx rename to site/src/modules/dashboard/AnnouncementBanners/AnnouncementBannerView.tsx index 4832ea93f6065..91fe2276b6664 100644 --- a/site/src/modules/dashboard/NotificationBanners/NotificationBannerView.tsx +++ b/site/src/modules/dashboard/AnnouncementBanners/AnnouncementBannerView.tsx @@ -3,12 +3,12 @@ import type { FC } from "react"; import { InlineMarkdown } from "components/Markdown/Markdown"; import { readableForegroundColor } from "utils/colors"; -export interface NotificationBannerViewProps { +export interface AnnouncementBannerViewProps { message?: string; backgroundColor?: string; } -export const NotificationBannerView: FC = ({ +export const AnnouncementBannerView: FC = ({ message, backgroundColor, }) => { diff --git a/site/src/modules/dashboard/NotificationBanners/NotificationBanners.tsx b/site/src/modules/dashboard/AnnouncementBanners/AnnouncementBanners.tsx similarity index 69% rename from site/src/modules/dashboard/NotificationBanners/NotificationBanners.tsx rename to site/src/modules/dashboard/AnnouncementBanners/AnnouncementBanners.tsx index a8ab663721a46..5f1e32e3fd016 100644 --- a/site/src/modules/dashboard/NotificationBanners/NotificationBanners.tsx +++ b/site/src/modules/dashboard/AnnouncementBanners/AnnouncementBanners.tsx @@ -1,10 +1,10 @@ import type { FC } from "react"; import { useDashboard } from "modules/dashboard/useDashboard"; -import { NotificationBannerView } from "./NotificationBannerView"; +import { AnnouncementBannerView } from "./AnnouncementBannerView"; -export const NotificationBanners: FC = () => { +export const AnnouncementBanners: FC = () => { const { appearance, entitlements } = useDashboard(); - const notificationBanners = appearance.notification_banners; + const announcementBanners = appearance.announcement_banners; const isEntitled = entitlements.features.appearance.entitlement !== "not_entitled"; @@ -14,10 +14,10 @@ export const NotificationBanners: FC = () => { return ( <> - {notificationBanners + {announcementBanners .filter((banner) => banner.enabled) .map((banner) => ( - { return ( <> {canViewDeployment && } - +
= { - title: "pages/DeploySettingsPage/NotificationBannerDialog", - component: NotificationBannerDialog, +const meta: Meta = { + title: "pages/DeploySettingsPage/AnnouncementBannerDialog", + component: AnnouncementBannerDialog, args: { banner: { enabled: true, @@ -17,8 +17,8 @@ const meta: Meta = { }; export default meta; -type Story = StoryObj; +type Story = StoryObj; const Example: Story = {}; -export { Example as NotificationBannerDialog }; +export { Example as AnnouncementBannerDialog }; diff --git a/site/src/pages/DeploySettingsPage/AppearanceSettingsPage/NotificationBannerDialog.tsx b/site/src/pages/DeploySettingsPage/AppearanceSettingsPage/AnnouncementBannerDialog.tsx similarity index 92% rename from site/src/pages/DeploySettingsPage/AppearanceSettingsPage/NotificationBannerDialog.tsx rename to site/src/pages/DeploySettingsPage/AppearanceSettingsPage/AnnouncementBannerDialog.tsx index 6b5ffaf6fc27b..4664a5365fa44 100644 --- a/site/src/pages/DeploySettingsPage/AppearanceSettingsPage/NotificationBannerDialog.tsx +++ b/site/src/pages/DeploySettingsPage/AppearanceSettingsPage/AnnouncementBannerDialog.tsx @@ -7,16 +7,16 @@ import { BlockPicker } from "react-color"; import type { BannerConfig } from "api/typesGenerated"; import { Dialog, DialogActionButtons } from "components/Dialogs/Dialog"; import { Stack } from "components/Stack/Stack"; -import { NotificationBannerView } from "modules/dashboard/NotificationBanners/NotificationBannerView"; +import { AnnouncementBannerView } from "modules/dashboard/AnnouncementBanners/AnnouncementBannerView"; import { getFormHelpers } from "utils/formUtils"; -interface NotificationBannerDialogProps { +interface AnnouncementBannerDialogProps { banner: BannerConfig; onCancel: () => void; onUpdate: (banner: Partial) => Promise; } -export const NotificationBannerDialog: FC = ({ +export const AnnouncementBannerDialog: FC = ({ banner, onCancel, onUpdate, @@ -39,14 +39,14 @@ export const NotificationBannerDialog: FC = ({ {/* Banner preview */}
-
-

Notification banner

+

Announcement banner

Message

diff --git a/site/src/pages/DeploySettingsPage/AppearanceSettingsPage/NotificationBannerItem.tsx b/site/src/pages/DeploySettingsPage/AppearanceSettingsPage/AnnouncementBannerItem.tsx similarity index 94% rename from site/src/pages/DeploySettingsPage/AppearanceSettingsPage/NotificationBannerItem.tsx rename to site/src/pages/DeploySettingsPage/AppearanceSettingsPage/AnnouncementBannerItem.tsx index 76636a30c4492..7cd35969340b8 100644 --- a/site/src/pages/DeploySettingsPage/AppearanceSettingsPage/NotificationBannerItem.tsx +++ b/site/src/pages/DeploySettingsPage/AppearanceSettingsPage/AnnouncementBannerItem.tsx @@ -12,7 +12,7 @@ import { ThreeDotsButton, } from "components/MoreMenu/MoreMenu"; -interface NotificationBannerItemProps { +interface AnnouncementBannerItemProps { enabled: boolean; backgroundColor?: string; message?: string; @@ -21,7 +21,7 @@ interface NotificationBannerItemProps { onDelete: () => void; } -export const NotificationBannerItem: FC = ({ +export const AnnouncementBannerItem: FC = ({ enabled, backgroundColor = "#004852", message, diff --git a/site/src/pages/DeploySettingsPage/AppearanceSettingsPage/NotificationBannerSettings.tsx b/site/src/pages/DeploySettingsPage/AppearanceSettingsPage/AnnouncementBannerSettings.tsx similarity index 91% rename from site/src/pages/DeploySettingsPage/AppearanceSettingsPage/NotificationBannerSettings.tsx rename to site/src/pages/DeploySettingsPage/AppearanceSettingsPage/AnnouncementBannerSettings.tsx index d5611af119614..6d9b871ee24dd 100644 --- a/site/src/pages/DeploySettingsPage/AppearanceSettingsPage/NotificationBannerSettings.tsx +++ b/site/src/pages/DeploySettingsPage/AppearanceSettingsPage/AnnouncementBannerSettings.tsx @@ -13,20 +13,20 @@ import type { BannerConfig } from "api/typesGenerated"; import { ConfirmDialog } from "components/Dialogs/ConfirmDialog/ConfirmDialog"; import { EmptyState } from "components/EmptyState/EmptyState"; import { Stack } from "components/Stack/Stack"; -import { NotificationBannerDialog } from "./NotificationBannerDialog"; -import { NotificationBannerItem } from "./NotificationBannerItem"; +import { AnnouncementBannerDialog } from "./AnnouncementBannerDialog"; +import { AnnouncementBannerItem } from "./AnnouncementBannerItem"; -interface NotificationBannerSettingsProps { +interface AnnouncementBannersettingsProps { isEntitled: boolean; - notificationBanners: readonly BannerConfig[]; + announcementBanners: readonly BannerConfig[]; onSubmit: (banners: readonly BannerConfig[]) => Promise; } -export const NotificationBannerSettings: FC< - NotificationBannerSettingsProps -> = ({ isEntitled, notificationBanners, onSubmit }) => { +export const AnnouncementBannerSettings: FC< + AnnouncementBannersettingsProps +> = ({ isEntitled, announcementBanners, onSubmit }) => { const theme = useTheme(); - const [banners, setBanners] = useState(notificationBanners); + const [banners, setBanners] = useState(announcementBanners); const [editingBannerId, setEditingBannerId] = useState(null); const [deletingBannerId, setDeletingBannerId] = useState(null); @@ -84,7 +84,7 @@ export const NotificationBannerSettings: FC< fontWeight: 600, }} > - Notification Banners + Announcement Banners
- - onSaveAppearance({ notification_banners: notificationBanners }) + announcementBanners={appearance.announcement_banners || []} + onSubmit={(announcementBanners) => + onSaveAppearance({ announcement_banners: announcementBanners }) } /> diff --git a/site/src/pages/WorkspacePage/WorkspacePage.tsx b/site/src/pages/WorkspacePage/WorkspacePage.tsx index 11869d6254f82..a058d852f056b 100644 --- a/site/src/pages/WorkspacePage/WorkspacePage.tsx +++ b/site/src/pages/WorkspacePage/WorkspacePage.tsx @@ -11,8 +11,8 @@ import { ErrorAlert } from "components/Alert/ErrorAlert"; import { Loader } from "components/Loader/Loader"; import { Margins } from "components/Margins/Margins"; import { useEffectEvent } from "hooks/hookPolyfills"; +import { AnnouncementBanners } from "modules/dashboard/AnnouncementBanners/AnnouncementBanners"; import { Navbar } from "modules/dashboard/Navbar/Navbar"; -import { NotificationBanners } from "modules/dashboard/NotificationBanners/NotificationBanners"; import { useDashboard } from "modules/dashboard/useDashboard"; import { workspaceChecks, type WorkspacePermissions } from "./permissions"; import { WorkspaceReadyPage } from "./WorkspaceReadyPage"; @@ -106,7 +106,7 @@ export const WorkspacePage: FC = () => { return ( <> - +
{pageError ? ( diff --git a/site/src/testHelpers/entities.ts b/site/src/testHelpers/entities.ts index 5ff5fa6cd84c7..1e2cf21e23383 100644 --- a/site/src/testHelpers/entities.ts +++ b/site/src/testHelpers/entities.ts @@ -2372,7 +2372,7 @@ export const MockAppearanceConfig: TypesGen.AppearanceConfig = { service_banner: { enabled: false, }, - notification_banners: [], + announcement_banners: [], }; export const MockWorkspaceBuildParameter1: TypesGen.WorkspaceBuildParameter = { From 40af6206ccdb855d25c3e938d3ad8b7a3a0f6879 Mon Sep 17 00:00:00 2001 From: Colin Adler Date: Sun, 2 Jun 2024 12:10:28 -0500 Subject: [PATCH 146/149] chore: upgrade terraform to `v1.8.5` (#13429) (cherry picked from commit b723da9e918ad05f5eb3132387832ecc29f59483) --- .github/actions/setup-tf/action.yaml | 2 +- .github/workflows/typos.toml | 1 + docs/install/offline.md | 2 +- dogfood/Dockerfile | 2 +- install.sh | 2 +- provisioner/terraform/install.go | 4 +-- .../calling-module/calling-module.tfplan.json | 9 ++++-- .../calling-module.tfstate.json | 10 +++--- .../chaining-resources.tfplan.json | 9 ++++-- .../chaining-resources.tfstate.json | 10 +++--- .../conflicting-resources.tfplan.json | 9 ++++-- .../conflicting-resources.tfstate.json | 10 +++--- .../display-apps-disabled.tfplan.json | 9 ++++-- .../display-apps-disabled.tfstate.json | 8 ++--- .../display-apps/display-apps.tfplan.json | 9 ++++-- .../display-apps/display-apps.tfstate.json | 8 ++--- .../external-auth-providers.tfplan.json | 11 ++++--- .../external-auth-providers.tfstate.json | 8 ++--- .../git-auth-providers.tfplan.json | 11 ++++--- .../git-auth-providers.tfstate.json | 8 ++--- .../instance-id/instance-id.tfplan.json | 9 ++++-- .../instance-id/instance-id.tfstate.json | 12 +++---- .../mapped-apps/mapped-apps.tfplan.json | 9 ++++-- .../mapped-apps/mapped-apps.tfstate.json | 16 +++++----- .../multiple-agents.tfplan.json | 18 +++++++---- .../multiple-agents.tfstate.json | 20 ++++++------ .../multiple-apps/multiple-apps.tfplan.json | 9 ++++-- .../multiple-apps/multiple-apps.tfstate.json | 20 ++++++------ .../resource-metadata-duplicate.tfplan.json | 9 ++++-- .../resource-metadata-duplicate.tfstate.json | 16 +++++----- .../resource-metadata.tfplan.json | 9 ++++-- .../resource-metadata.tfstate.json | 12 +++---- .../rich-parameters-order.tfplan.json | 15 +++++---- .../rich-parameters-order.tfstate.json | 12 +++---- .../rich-parameters-validation.tfplan.json | 23 ++++++++------ .../rich-parameters-validation.tfstate.json | 20 ++++++------ .../rich-parameters.tfplan.json | 31 ++++++++++--------- .../rich-parameters.tfstate.json | 28 ++++++++--------- provisioner/terraform/testdata/version.txt | 2 +- scripts/Dockerfile.base | 2 +- 40 files changed, 243 insertions(+), 191 deletions(-) diff --git a/.github/actions/setup-tf/action.yaml b/.github/actions/setup-tf/action.yaml index 0fa40bdbfdefc..e660e6f3c3f5f 100644 --- a/.github/actions/setup-tf/action.yaml +++ b/.github/actions/setup-tf/action.yaml @@ -7,5 +7,5 @@ runs: - name: Install Terraform uses: hashicorp/setup-terraform@v3 with: - terraform_version: 1.7.5 + terraform_version: 1.8.4 terraform_wrapper: false diff --git a/.github/workflows/typos.toml b/.github/workflows/typos.toml index 559260e0f7f32..7ee9554f0cdc3 100644 --- a/.github/workflows/typos.toml +++ b/.github/workflows/typos.toml @@ -33,4 +33,5 @@ extend-exclude = [ "**/pnpm-lock.yaml", "tailnet/testdata/**", "site/src/pages/SetupPage/countries.tsx", + "provisioner/terraform/testdata/**", ] diff --git a/docs/install/offline.md b/docs/install/offline.md index 120aa5c9f76b7..d4d8d24c0c111 100644 --- a/docs/install/offline.md +++ b/docs/install/offline.md @@ -54,7 +54,7 @@ RUN mkdir -p /opt/terraform # The below step is optional if you wish to keep the existing version. # See https://github.com/coder/coder/blob/main/provisioner/terraform/install.go#L23-L24 # for supported Terraform versions. -ARG TERRAFORM_VERSION=1.7.5 +ARG TERRAFORM_VERSION=1.8.4 RUN apk update && \ apk del terraform && \ curl -LOs https://releases.hashicorp.com/terraform/${TERRAFORM_VERSION}/terraform_${TERRAFORM_VERSION}_linux_amd64.zip \ diff --git a/dogfood/Dockerfile b/dogfood/Dockerfile index 4aa46e83c8fd7..19723853aa7ac 100644 --- a/dogfood/Dockerfile +++ b/dogfood/Dockerfile @@ -171,7 +171,7 @@ RUN apt-get update --quiet && apt-get install --yes \ # NOTE: In scripts/Dockerfile.base we specifically install Terraform version 1.7.5. # Installing the same version here to match. -RUN wget -O /tmp/terraform.zip "https://releases.hashicorp.com/terraform/1.7.5/terraform_1.7.5_linux_amd64.zip" && \ +RUN wget -O /tmp/terraform.zip "https://releases.hashicorp.com/terraform/1.8.4/terraform_1.8.4_linux_amd64.zip" && \ unzip /tmp/terraform.zip -d /usr/local/bin && \ rm -f /tmp/terraform.zip && \ chmod +x /usr/local/bin/terraform && \ diff --git a/install.sh b/install.sh index cabbdc685f2c6..9b76d1b204b21 100755 --- a/install.sh +++ b/install.sh @@ -250,7 +250,7 @@ EOF main() { MAINLINE=1 STABLE=0 - TERRAFORM_VERSION="1.7.5" + TERRAFORM_VERSION="1.8.4" if [ "${TRACE-}" ]; then set -x diff --git a/provisioner/terraform/install.go b/provisioner/terraform/install.go index e3014fb8758be..7ebceb5820035 100644 --- a/provisioner/terraform/install.go +++ b/provisioner/terraform/install.go @@ -20,10 +20,10 @@ var ( // when Terraform is not available on the system. // NOTE: Keep this in sync with the version in scripts/Dockerfile.base. // NOTE: Keep this in sync with the version in install.sh. - TerraformVersion = version.Must(version.NewVersion("1.7.5")) + TerraformVersion = version.Must(version.NewVersion("1.8.4")) minTerraformVersion = version.Must(version.NewVersion("1.1.0")) - maxTerraformVersion = version.Must(version.NewVersion("1.7.9")) // use .9 to automatically allow patch releases + maxTerraformVersion = version.Must(version.NewVersion("1.8.9")) // use .9 to automatically allow patch releases terraformMinorVersionMismatch = xerrors.New("Terraform binary minor version mismatch.") ) diff --git a/provisioner/terraform/testdata/calling-module/calling-module.tfplan.json b/provisioner/terraform/testdata/calling-module/calling-module.tfplan.json index 28a2b055ecf10..e4693c3057db2 100644 --- a/provisioner/terraform/testdata/calling-module/calling-module.tfplan.json +++ b/provisioner/terraform/testdata/calling-module/calling-module.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.7.5", + "terraform_version": "1.8.4", "planned_values": { "root_module": { "resources": [ @@ -31,7 +31,8 @@ }, "sensitive_values": { "display_apps": [], - "metadata": [] + "metadata": [], + "token": true } } ], @@ -259,6 +260,8 @@ ] } ], - "timestamp": "2024-05-22T17:02:40Z", + "timestamp": "2024-05-31T22:25:19Z", + "applyable": true, + "complete": true, "errored": false } diff --git a/provisioner/terraform/testdata/calling-module/calling-module.tfstate.json b/provisioner/terraform/testdata/calling-module/calling-module.tfstate.json index 5f8a795e2a894..eed7ec7b0fe61 100644 --- a/provisioner/terraform/testdata/calling-module/calling-module.tfstate.json +++ b/provisioner/terraform/testdata/calling-module/calling-module.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.7.5", + "terraform_version": "1.8.4", "values": { "root_module": { "resources": [ @@ -26,7 +26,7 @@ } ], "env": null, - "id": "f26b1d53-799e-4fbb-9fd3-71e60b37eacd", + "id": "2941e1eb-40f5-41cf-9e08-8f0f1a80d430", "init_script": "", "login_before_ready": true, "metadata": [], @@ -38,7 +38,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "ce663074-ebea-44cb-b6d1-321f590f7982", + "token": "3105121f-9b54-4c91-b497-9da9bb05c5b6", "troubleshooting_url": null }, "sensitive_values": { @@ -69,7 +69,7 @@ "outputs": { "script": "" }, - "random": "8031375470547649400" + "random": "3895262600016319159" }, "sensitive_values": { "inputs": {}, @@ -84,7 +84,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "3370916843136140681", + "id": "5027788252939043492", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/chaining-resources/chaining-resources.tfplan.json b/provisioner/terraform/testdata/chaining-resources/chaining-resources.tfplan.json index 9717ddd34b128..8b02d13cdc75e 100644 --- a/provisioner/terraform/testdata/chaining-resources/chaining-resources.tfplan.json +++ b/provisioner/terraform/testdata/chaining-resources/chaining-resources.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.7.5", + "terraform_version": "1.8.4", "planned_values": { "root_module": { "resources": [ @@ -31,7 +31,8 @@ }, "sensitive_values": { "display_apps": [], - "metadata": [] + "metadata": [], + "token": true } }, { @@ -204,6 +205,8 @@ ] } }, - "timestamp": "2024-05-22T17:02:43Z", + "timestamp": "2024-05-31T22:25:20Z", + "applyable": true, + "complete": true, "errored": false } diff --git a/provisioner/terraform/testdata/chaining-resources/chaining-resources.tfstate.json b/provisioner/terraform/testdata/chaining-resources/chaining-resources.tfstate.json index 304e9703b9073..95db4fc47c82c 100644 --- a/provisioner/terraform/testdata/chaining-resources/chaining-resources.tfstate.json +++ b/provisioner/terraform/testdata/chaining-resources/chaining-resources.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.7.5", + "terraform_version": "1.8.4", "values": { "root_module": { "resources": [ @@ -26,7 +26,7 @@ } ], "env": null, - "id": "9d869fc3-c185-4278-a5d2-873f809a4449", + "id": "da093356-6550-4e76-bb9e-0269cede7e31", "init_script": "", "login_before_ready": true, "metadata": [], @@ -38,7 +38,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "418bb1d6-49d8-4340-ac84-ed6991457ff9", + "token": "ebcb7f0e-4b80-4972-b434-1a42aa650d78", "troubleshooting_url": null }, "sensitive_values": { @@ -57,7 +57,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "3681188688307687011", + "id": "2686005653093770315", "triggers": null }, "sensitive_values": {}, @@ -74,7 +74,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "6055360096088266226", + "id": "1732714319726388691", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/conflicting-resources/conflicting-resources.tfplan.json b/provisioner/terraform/testdata/conflicting-resources/conflicting-resources.tfplan.json index a62fa814bea53..948ce6580b63b 100644 --- a/provisioner/terraform/testdata/conflicting-resources/conflicting-resources.tfplan.json +++ b/provisioner/terraform/testdata/conflicting-resources/conflicting-resources.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.7.5", + "terraform_version": "1.8.4", "planned_values": { "root_module": { "resources": [ @@ -31,7 +31,8 @@ }, "sensitive_values": { "display_apps": [], - "metadata": [] + "metadata": [], + "token": true } }, { @@ -204,6 +205,8 @@ ] } }, - "timestamp": "2024-05-22T17:02:45Z", + "timestamp": "2024-05-31T22:25:22Z", + "applyable": true, + "complete": true, "errored": false } diff --git a/provisioner/terraform/testdata/conflicting-resources/conflicting-resources.tfstate.json b/provisioner/terraform/testdata/conflicting-resources/conflicting-resources.tfstate.json index 4aa66de56d2c9..15bfeec63e134 100644 --- a/provisioner/terraform/testdata/conflicting-resources/conflicting-resources.tfstate.json +++ b/provisioner/terraform/testdata/conflicting-resources/conflicting-resources.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.7.5", + "terraform_version": "1.8.4", "values": { "root_module": { "resources": [ @@ -26,7 +26,7 @@ } ], "env": null, - "id": "d9c497fe-1dc4-4551-b46d-282f775e9509", + "id": "e56c4e1a-6b1a-4007-880c-875dc6400b73", "init_script": "", "login_before_ready": true, "metadata": [], @@ -38,7 +38,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "6fa01f69-de93-4610-b942-b787118146f8", + "token": "b3666f42-cc88-454e-93bd-553f71306dbe", "troubleshooting_url": null }, "sensitive_values": { @@ -57,7 +57,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "2012753940926517215", + "id": "8818573993093135925", "triggers": null }, "sensitive_values": {}, @@ -73,7 +73,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "2163283012438694669", + "id": "2487290649323445841", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/display-apps-disabled/display-apps-disabled.tfplan.json b/provisioner/terraform/testdata/display-apps-disabled/display-apps-disabled.tfplan.json index de8d982bef577..e2bd6410a62c4 100644 --- a/provisioner/terraform/testdata/display-apps-disabled/display-apps-disabled.tfplan.json +++ b/provisioner/terraform/testdata/display-apps-disabled/display-apps-disabled.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.7.5", + "terraform_version": "1.8.4", "planned_values": { "root_module": { "resources": [ @@ -42,7 +42,8 @@ "display_apps": [ {} ], - "metadata": [] + "metadata": [], + "token": true } }, { @@ -203,6 +204,8 @@ ] } }, - "timestamp": "2024-05-22T17:02:50Z", + "timestamp": "2024-05-31T22:25:26Z", + "applyable": true, + "complete": true, "errored": false } diff --git a/provisioner/terraform/testdata/display-apps-disabled/display-apps-disabled.tfstate.json b/provisioner/terraform/testdata/display-apps-disabled/display-apps-disabled.tfstate.json index 3567c75133732..ce2facb3c5a1c 100644 --- a/provisioner/terraform/testdata/display-apps-disabled/display-apps-disabled.tfstate.json +++ b/provisioner/terraform/testdata/display-apps-disabled/display-apps-disabled.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.7.5", + "terraform_version": "1.8.4", "values": { "root_module": { "resources": [ @@ -26,7 +26,7 @@ } ], "env": null, - "id": "c55cfcad-5422-46e5-a144-e933660bacd3", + "id": "cd49cbe2-97f4-4980-9b13-4e4008f4d594", "init_script": "", "login_before_ready": true, "metadata": [], @@ -38,7 +38,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "e170615d-a3a2-4dc4-a65e-4990ceeb79e5", + "token": "4b1c44cb-d960-42ef-b19e-60d169085657", "troubleshooting_url": null }, "sensitive_values": { @@ -57,7 +57,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "3512108359019802900", + "id": "6613171819431602989", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/display-apps/display-apps.tfplan.json b/provisioner/terraform/testdata/display-apps/display-apps.tfplan.json index d41c6e03541d0..c3fe9046116ae 100644 --- a/provisioner/terraform/testdata/display-apps/display-apps.tfplan.json +++ b/provisioner/terraform/testdata/display-apps/display-apps.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.7.5", + "terraform_version": "1.8.4", "planned_values": { "root_module": { "resources": [ @@ -42,7 +42,8 @@ "display_apps": [ {} ], - "metadata": [] + "metadata": [], + "token": true } }, { @@ -203,6 +204,8 @@ ] } }, - "timestamp": "2024-05-22T17:02:48Z", + "timestamp": "2024-05-31T22:25:24Z", + "applyable": true, + "complete": true, "errored": false } diff --git a/provisioner/terraform/testdata/display-apps/display-apps.tfstate.json b/provisioner/terraform/testdata/display-apps/display-apps.tfstate.json index 79b2e6dd6490f..3ce1d2d34a181 100644 --- a/provisioner/terraform/testdata/display-apps/display-apps.tfstate.json +++ b/provisioner/terraform/testdata/display-apps/display-apps.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.7.5", + "terraform_version": "1.8.4", "values": { "root_module": { "resources": [ @@ -26,7 +26,7 @@ } ], "env": null, - "id": "3fb63a4e-bb0e-4380-9ed9-8b1581943b1f", + "id": "dac3e164-c9d2-43e2-89ee-54ce5955e551", "init_script": "", "login_before_ready": true, "metadata": [], @@ -38,7 +38,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "eb5720a7-91fd-4e37-8085-af3c8205702c", + "token": "99ccf297-47b1-4c7c-819e-0bac896b12bd", "troubleshooting_url": null }, "sensitive_values": { @@ -57,7 +57,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "2929624824161973000", + "id": "5268162908997861371", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/external-auth-providers/external-auth-providers.tfplan.json b/provisioner/terraform/testdata/external-auth-providers/external-auth-providers.tfplan.json index 837d50255a3a1..77cac08ba071d 100644 --- a/provisioner/terraform/testdata/external-auth-providers/external-auth-providers.tfplan.json +++ b/provisioner/terraform/testdata/external-auth-providers/external-auth-providers.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.7.5", + "terraform_version": "1.8.4", "planned_values": { "root_module": { "resources": [ @@ -31,7 +31,8 @@ }, "sensitive_values": { "display_apps": [], - "metadata": [] + "metadata": [], + "token": true } }, { @@ -118,7 +119,7 @@ ], "prior_state": { "format_version": "1.0", - "terraform_version": "1.7.5", + "terraform_version": "1.8.4", "values": { "root_module": { "resources": [ @@ -227,6 +228,8 @@ ] } }, - "timestamp": "2024-05-22T17:02:52Z", + "timestamp": "2024-05-31T22:25:28Z", + "applyable": true, + "complete": true, "errored": false } diff --git a/provisioner/terraform/testdata/external-auth-providers/external-auth-providers.tfstate.json b/provisioner/terraform/testdata/external-auth-providers/external-auth-providers.tfstate.json index 125cea74bcc3c..481e197946226 100644 --- a/provisioner/terraform/testdata/external-auth-providers/external-auth-providers.tfstate.json +++ b/provisioner/terraform/testdata/external-auth-providers/external-auth-providers.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.7.5", + "terraform_version": "1.8.4", "values": { "root_module": { "resources": [ @@ -54,7 +54,7 @@ } ], "env": null, - "id": "923df4d0-cf96-4cf8-aaff-426e58927a81", + "id": "2fcac464-b22b-4567-8391-7cdf592dae14", "init_script": "", "login_before_ready": true, "metadata": [], @@ -66,7 +66,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "f5328221-90c7-4056-83b4-7b76d6f46580", + "token": "57bcc78a-ed9b-46f9-9901-ffbdfb325871", "troubleshooting_url": null }, "sensitive_values": { @@ -85,7 +85,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "4621387386750422041", + "id": "7076770981685522602", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/git-auth-providers/git-auth-providers.tfplan.json b/provisioner/terraform/testdata/git-auth-providers/git-auth-providers.tfplan.json index bd9286692d328..ca6e7765c7a5b 100644 --- a/provisioner/terraform/testdata/git-auth-providers/git-auth-providers.tfplan.json +++ b/provisioner/terraform/testdata/git-auth-providers/git-auth-providers.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.7.5", + "terraform_version": "1.8.4", "planned_values": { "root_module": { "resources": [ @@ -31,7 +31,8 @@ }, "sensitive_values": { "display_apps": [], - "metadata": [] + "metadata": [], + "token": true } }, { @@ -118,7 +119,7 @@ ], "prior_state": { "format_version": "1.0", - "terraform_version": "1.7.5", + "terraform_version": "1.8.4", "values": { "root_module": { "resources": [ @@ -222,6 +223,8 @@ ] } }, - "timestamp": "2024-05-22T17:02:55Z", + "timestamp": "2024-05-31T22:25:30Z", + "applyable": true, + "complete": true, "errored": false } diff --git a/provisioner/terraform/testdata/git-auth-providers/git-auth-providers.tfstate.json b/provisioner/terraform/testdata/git-auth-providers/git-auth-providers.tfstate.json index 509c6d5a9d7fc..ae548c8f97f82 100644 --- a/provisioner/terraform/testdata/git-auth-providers/git-auth-providers.tfstate.json +++ b/provisioner/terraform/testdata/git-auth-providers/git-auth-providers.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.7.5", + "terraform_version": "1.8.4", "values": { "root_module": { "resources": [ @@ -52,7 +52,7 @@ } ], "env": null, - "id": "48a24332-1a90-48d9-9e03-b4e9f09c6eab", + "id": "c924e5b7-e2cb-4eb5-993e-3cc489ed5213", "init_script": "", "login_before_ready": true, "metadata": [], @@ -64,7 +64,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "6a2ae93f-3f25-423d-aa97-b2f1c5d9c20b", + "token": "cc8ceb98-822f-4b8f-b645-2162fada1dfb", "troubleshooting_url": null }, "sensitive_values": { @@ -83,7 +83,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "8095584601893320918", + "id": "7049248910828562611", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/instance-id/instance-id.tfplan.json b/provisioner/terraform/testdata/instance-id/instance-id.tfplan.json index fe875367359c0..2cdfdcf13345a 100644 --- a/provisioner/terraform/testdata/instance-id/instance-id.tfplan.json +++ b/provisioner/terraform/testdata/instance-id/instance-id.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.7.5", + "terraform_version": "1.8.4", "planned_values": { "root_module": { "resources": [ @@ -31,7 +31,8 @@ }, "sensitive_values": { "display_apps": [], - "metadata": [] + "metadata": [], + "token": true } }, { @@ -224,6 +225,8 @@ ] } ], - "timestamp": "2024-05-22T17:02:57Z", + "timestamp": "2024-05-31T22:25:32Z", + "applyable": true, + "complete": true, "errored": false } diff --git a/provisioner/terraform/testdata/instance-id/instance-id.tfstate.json b/provisioner/terraform/testdata/instance-id/instance-id.tfstate.json index ef5346a2ac822..40519b8266850 100644 --- a/provisioner/terraform/testdata/instance-id/instance-id.tfstate.json +++ b/provisioner/terraform/testdata/instance-id/instance-id.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.7.5", + "terraform_version": "1.8.4", "values": { "root_module": { "resources": [ @@ -26,7 +26,7 @@ } ], "env": null, - "id": "3bc8e20f-2024-4014-ac11-806e7e1a1e24", + "id": "b691d6a2-76de-4441-ac90-3260282dc1fb", "init_script": "", "login_before_ready": true, "metadata": [], @@ -38,7 +38,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "6ef0492b-8dbe-4c61-8eb8-a37acb671278", + "token": "244bf23b-b483-46f9-b2ff-7a6e746c836f", "troubleshooting_url": null }, "sensitive_values": { @@ -57,8 +57,8 @@ "provider_name": "registry.terraform.io/coder/coder", "schema_version": 0, "values": { - "agent_id": "3bc8e20f-2024-4014-ac11-806e7e1a1e24", - "id": "7ba714fa-f2b8-4d33-8987-f67466505033", + "agent_id": "b691d6a2-76de-4441-ac90-3260282dc1fb", + "id": "66ce959f-b821-4657-9bdb-6290c3b3a0b9", "instance_id": "example" }, "sensitive_values": {}, @@ -74,7 +74,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "4065206823139127011", + "id": "3867175311980978156", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/mapped-apps/mapped-apps.tfplan.json b/provisioner/terraform/testdata/mapped-apps/mapped-apps.tfplan.json index 9fad4b322a02d..2d63b29fac5e4 100644 --- a/provisioner/terraform/testdata/mapped-apps/mapped-apps.tfplan.json +++ b/provisioner/terraform/testdata/mapped-apps/mapped-apps.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.7.5", + "terraform_version": "1.8.4", "planned_values": { "root_module": { "resources": [ @@ -31,7 +31,8 @@ }, "sensitive_values": { "display_apps": [], - "metadata": [] + "metadata": [], + "token": true } }, { @@ -326,6 +327,8 @@ ] } ], - "timestamp": "2024-05-22T17:02:59Z", + "timestamp": "2024-05-31T22:25:34Z", + "applyable": true, + "complete": true, "errored": false } diff --git a/provisioner/terraform/testdata/mapped-apps/mapped-apps.tfstate.json b/provisioner/terraform/testdata/mapped-apps/mapped-apps.tfstate.json index e19a8b484bf6a..dc78ba27d9f46 100644 --- a/provisioner/terraform/testdata/mapped-apps/mapped-apps.tfstate.json +++ b/provisioner/terraform/testdata/mapped-apps/mapped-apps.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.7.5", + "terraform_version": "1.8.4", "values": { "root_module": { "resources": [ @@ -26,7 +26,7 @@ } ], "env": null, - "id": "d8d2ed23-193d-4784-9ce5-7bc0d879bb14", + "id": "d3eece5c-3d36-4e77-a67c-284d6a665004", "init_script": "", "login_before_ready": true, "metadata": [], @@ -38,7 +38,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "0555adfc-e969-4fd2-8cfd-47560bd1b5a3", + "token": "793d9e17-fe59-4e70-83ee-76397b81a5bd", "troubleshooting_url": null }, "sensitive_values": { @@ -58,13 +58,13 @@ "provider_name": "registry.terraform.io/coder/coder", "schema_version": 0, "values": { - "agent_id": "d8d2ed23-193d-4784-9ce5-7bc0d879bb14", + "agent_id": "d3eece5c-3d36-4e77-a67c-284d6a665004", "command": null, "display_name": "app1", "external": false, "healthcheck": [], "icon": null, - "id": "11fa3ff2-d6ba-41ca-b1df-6c98d395c0b8", + "id": "02a5c323-badd-4a9d-bb5e-6926b8c3f317", "name": null, "order": null, "relative_path": null, @@ -89,13 +89,13 @@ "provider_name": "registry.terraform.io/coder/coder", "schema_version": 0, "values": { - "agent_id": "d8d2ed23-193d-4784-9ce5-7bc0d879bb14", + "agent_id": "d3eece5c-3d36-4e77-a67c-284d6a665004", "command": null, "display_name": "app2", "external": false, "healthcheck": [], "icon": null, - "id": "cd1a2e37-adbc-49f0-bd99-033c62a1533e", + "id": "3f9b0fb0-fc06-49ed-b869-27b570b86b47", "name": null, "order": null, "relative_path": null, @@ -119,7 +119,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "4490911212417021152", + "id": "6739553050203442390", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/multiple-agents/multiple-agents.tfplan.json b/provisioner/terraform/testdata/multiple-agents/multiple-agents.tfplan.json index 7f44aa45ca7d9..8a27774498541 100644 --- a/provisioner/terraform/testdata/multiple-agents/multiple-agents.tfplan.json +++ b/provisioner/terraform/testdata/multiple-agents/multiple-agents.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.7.5", + "terraform_version": "1.8.4", "planned_values": { "root_module": { "resources": [ @@ -31,7 +31,8 @@ }, "sensitive_values": { "display_apps": [], - "metadata": [] + "metadata": [], + "token": true } }, { @@ -61,7 +62,8 @@ }, "sensitive_values": { "display_apps": [], - "metadata": [] + "metadata": [], + "token": true } }, { @@ -91,7 +93,8 @@ }, "sensitive_values": { "display_apps": [], - "metadata": [] + "metadata": [], + "token": true } }, { @@ -121,7 +124,8 @@ }, "sensitive_values": { "display_apps": [], - "metadata": [] + "metadata": [], + "token": true } }, { @@ -460,6 +464,8 @@ ] } }, - "timestamp": "2024-05-22T17:03:01Z", + "timestamp": "2024-05-31T22:25:36Z", + "applyable": true, + "complete": true, "errored": false } diff --git a/provisioner/terraform/testdata/multiple-agents/multiple-agents.tfstate.json b/provisioner/terraform/testdata/multiple-agents/multiple-agents.tfstate.json index 0bbd45fa5a3df..023f6ab52f0fc 100644 --- a/provisioner/terraform/testdata/multiple-agents/multiple-agents.tfstate.json +++ b/provisioner/terraform/testdata/multiple-agents/multiple-agents.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.7.5", + "terraform_version": "1.8.4", "values": { "root_module": { "resources": [ @@ -26,7 +26,7 @@ } ], "env": null, - "id": "0ffc6582-b017-404e-b83f-48e4a5ab38bc", + "id": "2cd8a28d-b73c-4801-8748-5681512b99ed", "init_script": "", "login_before_ready": true, "metadata": [], @@ -38,7 +38,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "b7f0a913-ecb1-4c80-8559-fbcb435d53d0", + "token": "68c874c4-2f0d-4dff-9fd7-67209e9a08c7", "troubleshooting_url": null }, "sensitive_values": { @@ -71,7 +71,7 @@ } ], "env": null, - "id": "1780ae95-844c-4d5c-94fb-6ccfe4a7656d", + "id": "2e773a6e-0e57-428d-bdf8-414c2aaa55fc", "init_script": "", "login_before_ready": true, "metadata": [], @@ -83,7 +83,7 @@ "startup_script": null, "startup_script_behavior": "non-blocking", "startup_script_timeout": 30, - "token": "695f8765-3d3d-4da0-9a5a-bb7b1f568bde", + "token": "98944f07-1265-4329-8fd3-c92aac95855c", "troubleshooting_url": null }, "sensitive_values": { @@ -116,7 +116,7 @@ } ], "env": null, - "id": "333b7856-24ac-46be-9ae3-e4981b25481d", + "id": "9568f00b-0bd8-4982-a502-7b37562b1fa3", "init_script": "", "login_before_ready": true, "metadata": [], @@ -128,7 +128,7 @@ "startup_script": null, "startup_script_behavior": "blocking", "startup_script_timeout": 300, - "token": "50ddfb93-264f-4f64-8c8d-db7d8d37c0a1", + "token": "8bf8789b-9efc-4517-aa30-89b99c46dd75", "troubleshooting_url": "https://coder.com/troubleshoot" }, "sensitive_values": { @@ -161,7 +161,7 @@ } ], "env": null, - "id": "90736626-71c9-4b76-bdfc-f6ce9b3dda05", + "id": "403e5299-2f3e-499c-b90a-2fa6fc9e44e6", "init_script": "", "login_before_ready": false, "metadata": [], @@ -173,7 +173,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "8c4ae7b9-12b7-4a9c-a55a-a98cfb049103", + "token": "a10e5bfb-9756-4210-a112-877f2cfbdc0a", "troubleshooting_url": null }, "sensitive_values": { @@ -192,7 +192,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "6980014108785645805", + "id": "2053669122262711043", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/multiple-apps/multiple-apps.tfplan.json b/provisioner/terraform/testdata/multiple-apps/multiple-apps.tfplan.json index eee1d09317ba1..4a07ac904a675 100644 --- a/provisioner/terraform/testdata/multiple-apps/multiple-apps.tfplan.json +++ b/provisioner/terraform/testdata/multiple-apps/multiple-apps.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.7.5", + "terraform_version": "1.8.4", "planned_values": { "root_module": { "resources": [ @@ -31,7 +31,8 @@ }, "sensitive_values": { "display_apps": [], - "metadata": [] + "metadata": [], + "token": true } }, { @@ -445,6 +446,8 @@ ] } ], - "timestamp": "2024-05-22T17:03:03Z", + "timestamp": "2024-05-31T22:25:38Z", + "applyable": true, + "complete": true, "errored": false } diff --git a/provisioner/terraform/testdata/multiple-apps/multiple-apps.tfstate.json b/provisioner/terraform/testdata/multiple-apps/multiple-apps.tfstate.json index 3ed04ae6ecab0..e5a64a6928388 100644 --- a/provisioner/terraform/testdata/multiple-apps/multiple-apps.tfstate.json +++ b/provisioner/terraform/testdata/multiple-apps/multiple-apps.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.7.5", + "terraform_version": "1.8.4", "values": { "root_module": { "resources": [ @@ -26,7 +26,7 @@ } ], "env": null, - "id": "c950352c-7c4a-41cc-9049-ad07ded85c47", + "id": "26bc229a-d911-4d91-8b18-c59a2f2939f4", "init_script": "", "login_before_ready": true, "metadata": [], @@ -38,7 +38,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "143c3974-49f5-4898-815b-c4044283ebc8", + "token": "3be506a9-b085-4bd8-a6e9-ac1769aedac5", "troubleshooting_url": null }, "sensitive_values": { @@ -57,13 +57,13 @@ "provider_name": "registry.terraform.io/coder/coder", "schema_version": 0, "values": { - "agent_id": "c950352c-7c4a-41cc-9049-ad07ded85c47", + "agent_id": "26bc229a-d911-4d91-8b18-c59a2f2939f4", "command": null, "display_name": null, "external": false, "healthcheck": [], "icon": null, - "id": "23135384-0e9f-4efc-b74c-d3e5e878ed67", + "id": "cbfb480c-49f0-41dc-a5e5-fa8ab21514e7", "name": null, "order": null, "relative_path": null, @@ -87,7 +87,7 @@ "provider_name": "registry.terraform.io/coder/coder", "schema_version": 0, "values": { - "agent_id": "c950352c-7c4a-41cc-9049-ad07ded85c47", + "agent_id": "26bc229a-d911-4d91-8b18-c59a2f2939f4", "command": null, "display_name": null, "external": false, @@ -99,7 +99,7 @@ } ], "icon": null, - "id": "01e73639-0fd1-4bcb-bd88-d22eb8244627", + "id": "6cc74cc4-edd4-482a-be9c-46243008081d", "name": null, "order": null, "relative_path": null, @@ -125,13 +125,13 @@ "provider_name": "registry.terraform.io/coder/coder", "schema_version": 0, "values": { - "agent_id": "c950352c-7c4a-41cc-9049-ad07ded85c47", + "agent_id": "26bc229a-d911-4d91-8b18-c59a2f2939f4", "command": null, "display_name": null, "external": false, "healthcheck": [], "icon": null, - "id": "058c9054-9714-4a5f-9fde-8a451ab58620", + "id": "7b2131ed-3850-439e-8942-6c83fe02ce0c", "name": null, "order": null, "relative_path": null, @@ -155,7 +155,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "9051436019409847411", + "id": "6270198559972381862", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/resource-metadata-duplicate/resource-metadata-duplicate.tfplan.json b/provisioner/terraform/testdata/resource-metadata-duplicate/resource-metadata-duplicate.tfplan.json index 6084ae4435990..70379dc90d732 100644 --- a/provisioner/terraform/testdata/resource-metadata-duplicate/resource-metadata-duplicate.tfplan.json +++ b/provisioner/terraform/testdata/resource-metadata-duplicate/resource-metadata-duplicate.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.7.5", + "terraform_version": "1.8.4", "planned_values": { "root_module": { "resources": [ @@ -42,7 +42,8 @@ "display_apps": [], "metadata": [ {} - ] + ], + "token": true } }, { @@ -431,6 +432,8 @@ ] } ], - "timestamp": "2024-05-22T17:03:06Z", + "timestamp": "2024-05-31T22:25:42Z", + "applyable": true, + "complete": true, "errored": false } diff --git a/provisioner/terraform/testdata/resource-metadata-duplicate/resource-metadata-duplicate.tfstate.json b/provisioner/terraform/testdata/resource-metadata-duplicate/resource-metadata-duplicate.tfstate.json index e617f565156ab..264edcf513f81 100644 --- a/provisioner/terraform/testdata/resource-metadata-duplicate/resource-metadata-duplicate.tfstate.json +++ b/provisioner/terraform/testdata/resource-metadata-duplicate/resource-metadata-duplicate.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.7.5", + "terraform_version": "1.8.4", "values": { "root_module": { "resources": [ @@ -26,7 +26,7 @@ } ], "env": null, - "id": "8352a117-1250-44ef-bba2-0abdb2a77665", + "id": "15b21cea-46cb-4e70-b648-56dceff97236", "init_script": "", "login_before_ready": true, "metadata": [ @@ -47,7 +47,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "b46fd197-3be4-42f8-9c47-5a9e71a76ef6", + "token": "3308a570-7944-4238-aca8-fbc3644d7548", "troubleshooting_url": null }, "sensitive_values": { @@ -71,7 +71,7 @@ "daily_cost": 29, "hide": true, "icon": "/icon/server.svg", - "id": "1f7911d4-5b64-4e20-af9b-b6ee2aff602b", + "id": "28db1106-e6f0-41ff-b707-3100a99cadff", "item": [ { "is_null": false, @@ -86,7 +86,7 @@ "value": "" } ], - "resource_id": "7229373774865666851" + "resource_id": "3221770356529482934" }, "sensitive_values": { "item": [ @@ -110,7 +110,7 @@ "daily_cost": 20, "hide": true, "icon": "/icon/server.svg", - "id": "34fe7a46-2a2f-4628-8946-ef80a7ffdb5e", + "id": "a30b56a6-c122-485a-a128-4210600ad17f", "item": [ { "is_null": false, @@ -119,7 +119,7 @@ "value": "world" } ], - "resource_id": "7229373774865666851" + "resource_id": "3221770356529482934" }, "sensitive_values": { "item": [ @@ -139,7 +139,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "7229373774865666851", + "id": "3221770356529482934", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/resource-metadata/resource-metadata.tfplan.json b/provisioner/terraform/testdata/resource-metadata/resource-metadata.tfplan.json index a03346a724115..8e06a483749ac 100644 --- a/provisioner/terraform/testdata/resource-metadata/resource-metadata.tfplan.json +++ b/provisioner/terraform/testdata/resource-metadata/resource-metadata.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.7.5", + "terraform_version": "1.8.4", "planned_values": { "root_module": { "resources": [ @@ -42,7 +42,8 @@ "display_apps": [], "metadata": [ {} - ] + ], + "token": true } }, { @@ -383,6 +384,8 @@ ] } ], - "timestamp": "2024-05-22T17:03:05Z", + "timestamp": "2024-05-31T22:25:40Z", + "applyable": true, + "complete": true, "errored": false } diff --git a/provisioner/terraform/testdata/resource-metadata/resource-metadata.tfstate.json b/provisioner/terraform/testdata/resource-metadata/resource-metadata.tfstate.json index f8abe064ec94b..80cb793a44704 100644 --- a/provisioner/terraform/testdata/resource-metadata/resource-metadata.tfstate.json +++ b/provisioner/terraform/testdata/resource-metadata/resource-metadata.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.7.5", + "terraform_version": "1.8.4", "values": { "root_module": { "resources": [ @@ -26,7 +26,7 @@ } ], "env": null, - "id": "847150eb-c3b6-497d-9dad-8e62d478cfff", + "id": "5d102462-7646-4aae-bdac-c8b9906fb5b3", "init_script": "", "login_before_ready": true, "metadata": [ @@ -47,7 +47,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "a0c4f2f5-cc40-4731-9028-636033229c9c", + "token": "1d1ccced-ce84-4cbf-a80f-f17a59e948a0", "troubleshooting_url": null }, "sensitive_values": { @@ -71,7 +71,7 @@ "daily_cost": 29, "hide": true, "icon": "/icon/server.svg", - "id": "3feec3a3-6f9e-4cfb-b122-2273e345def0", + "id": "35194a0a-0012-4da3-9e3a-a4d7bdcc9638", "item": [ { "is_null": false, @@ -98,7 +98,7 @@ "value": "squirrel" } ], - "resource_id": "160324296641913729" + "resource_id": "2094194534443319186" }, "sensitive_values": { "item": [ @@ -121,7 +121,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "160324296641913729", + "id": "2094194534443319186", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/rich-parameters-order/rich-parameters-order.tfplan.json b/provisioner/terraform/testdata/rich-parameters-order/rich-parameters-order.tfplan.json index 12a6aaccdd7b7..240c9affe23e0 100644 --- a/provisioner/terraform/testdata/rich-parameters-order/rich-parameters-order.tfplan.json +++ b/provisioner/terraform/testdata/rich-parameters-order/rich-parameters-order.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.7.5", + "terraform_version": "1.8.4", "planned_values": { "root_module": { "resources": [ @@ -31,7 +31,8 @@ }, "sensitive_values": { "display_apps": [], - "metadata": [] + "metadata": [], + "token": true } }, { @@ -118,7 +119,7 @@ ], "prior_state": { "format_version": "1.0", - "terraform_version": "1.7.5", + "terraform_version": "1.8.4", "values": { "root_module": { "resources": [ @@ -135,7 +136,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "7fb346d2-b8c2-4f2a-99d1-a8fd54cc479e", + "id": "5f79d935-c5bc-47e4-8152-eed302afc455", "mutable": false, "name": "Example", "option": null, @@ -162,7 +163,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "0581cc2a-9e6d-4f04-93a6-88fcbd0757f0", + "id": "e8af506e-91e7-457a-8e68-f33109f30e6a", "mutable": false, "name": "Sample", "option": null, @@ -268,6 +269,8 @@ ] } }, - "timestamp": "2024-05-22T17:03:11Z", + "timestamp": "2024-05-31T22:25:46Z", + "applyable": true, + "complete": true, "errored": false } diff --git a/provisioner/terraform/testdata/rich-parameters-order/rich-parameters-order.tfstate.json b/provisioner/terraform/testdata/rich-parameters-order/rich-parameters-order.tfstate.json index ce08e87bce074..4505699adf299 100644 --- a/provisioner/terraform/testdata/rich-parameters-order/rich-parameters-order.tfstate.json +++ b/provisioner/terraform/testdata/rich-parameters-order/rich-parameters-order.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.7.5", + "terraform_version": "1.8.4", "values": { "root_module": { "resources": [ @@ -17,7 +17,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "5c9f037b-3cc1-4616-b4ba-9e7322856575", + "id": "487e2328-8fa1-472f-a35d-5c017f5a2621", "mutable": false, "name": "Example", "option": null, @@ -44,7 +44,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "71a4bcc8-bbcb-4619-9641-df3bc296f58e", + "id": "c85ec281-458c-4932-a10d-049be7e1b8f8", "mutable": false, "name": "Sample", "option": null, @@ -80,7 +80,7 @@ } ], "env": null, - "id": "327e8ab1-90be-4c87-ac7d-09630ae46827", + "id": "3d98abaf-7a38-450f-9fc9-eaebbebb1f1f", "init_script": "", "login_before_ready": true, "metadata": [], @@ -92,7 +92,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "794a8a86-3bb9-4b3d-bbea-acff8b513964", + "token": "3000e759-60df-4470-8f51-50ea4bc6a1ad", "troubleshooting_url": null }, "sensitive_values": { @@ -111,7 +111,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "3735840255017039964", + "id": "4580074114866058503", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/rich-parameters-validation/rich-parameters-validation.tfplan.json b/provisioner/terraform/testdata/rich-parameters-validation/rich-parameters-validation.tfplan.json index d4f402ce40102..0535ccd50bb59 100644 --- a/provisioner/terraform/testdata/rich-parameters-validation/rich-parameters-validation.tfplan.json +++ b/provisioner/terraform/testdata/rich-parameters-validation/rich-parameters-validation.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.7.5", + "terraform_version": "1.8.4", "planned_values": { "root_module": { "resources": [ @@ -31,7 +31,8 @@ }, "sensitive_values": { "display_apps": [], - "metadata": [] + "metadata": [], + "token": true } }, { @@ -118,7 +119,7 @@ ], "prior_state": { "format_version": "1.0", - "terraform_version": "1.7.5", + "terraform_version": "1.8.4", "values": { "root_module": { "resources": [ @@ -135,7 +136,7 @@ "display_name": null, "ephemeral": true, "icon": null, - "id": "1e85f9f5-54c2-4a6b-ba7f-8627386b94b7", + "id": "c2d5292e-1dea-434b-b5cc-dc288c2a512b", "mutable": true, "name": "number_example", "option": null, @@ -162,7 +163,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "9908f4c5-87f5-496c-9479-d0f7d49f0fdf", + "id": "689418c1-935c-40ad-aa9f-37ab4f8d9501", "mutable": false, "name": "number_example_max", "option": null, @@ -201,7 +202,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "3f2d0054-0440-4a00-98f6-befa9475a5f4", + "id": "bc7db79f-d6ef-45a2-9bbf-50710eb1db8c", "mutable": false, "name": "number_example_max_zero", "option": null, @@ -240,7 +241,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "29abca17-5bd3-4ae3-9bd3-1e45301fc509", + "id": "5e88eade-4255-4693-86bf-2c0331ca2a06", "mutable": false, "name": "number_example_min", "option": null, @@ -279,7 +280,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "95630cc0-8040-4126-92bb-967dbf8eb2ed", + "id": "26c34bb9-535d-45d7-bebd-1dcb2300f242", "mutable": false, "name": "number_example_min_max", "option": null, @@ -318,7 +319,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "c256c60a-fdfe-42f1-bbaa-27880816a7bf", + "id": "3b55387f-0117-4d34-b585-14959f4a9267", "mutable": false, "name": "number_example_min_zero", "option": null, @@ -550,6 +551,8 @@ ] } }, - "timestamp": "2024-05-22T17:03:12Z", + "timestamp": "2024-05-31T22:25:48Z", + "applyable": true, + "complete": true, "errored": false } diff --git a/provisioner/terraform/testdata/rich-parameters-validation/rich-parameters-validation.tfstate.json b/provisioner/terraform/testdata/rich-parameters-validation/rich-parameters-validation.tfstate.json index a09880e54e903..e8415b0959bfa 100644 --- a/provisioner/terraform/testdata/rich-parameters-validation/rich-parameters-validation.tfstate.json +++ b/provisioner/terraform/testdata/rich-parameters-validation/rich-parameters-validation.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.7.5", + "terraform_version": "1.8.4", "values": { "root_module": { "resources": [ @@ -17,7 +17,7 @@ "display_name": null, "ephemeral": true, "icon": null, - "id": "f7cabe8c-f091-4ced-bc9b-873f54edf61b", + "id": "1f836366-337f-47a9-bc49-f4810b2f1078", "mutable": true, "name": "number_example", "option": null, @@ -44,7 +44,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "13b33312-d49b-4df3-af89-5d6ec840a6e4", + "id": "d58e721b-0134-42b6-b4b9-bb012f43a439", "mutable": false, "name": "number_example_max", "option": null, @@ -83,7 +83,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "d5ff002b-d039-42e6-b638-6bc2e3d54c2b", + "id": "4c3ff771-15ab-4a33-8067-45d5d44a5f7e", "mutable": false, "name": "number_example_max_zero", "option": null, @@ -122,7 +122,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "f382fcba-2634-44e7-ab26-866228d0679a", + "id": "11f8f368-f829-403a-8ad9-3a10df1db0bf", "mutable": false, "name": "number_example_min", "option": null, @@ -161,7 +161,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "7f1c3032-1ed9-4602-80f8-cc84489bafc9", + "id": "9de03421-e747-4084-b808-90464beb8ab4", "mutable": false, "name": "number_example_min_max", "option": null, @@ -200,7 +200,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "c474219f-f1e7-4eca-921a-1ace9a8391ee", + "id": "eb75256a-66d6-45d6-a0f5-331a885742e4", "mutable": false, "name": "number_example_min_zero", "option": null, @@ -248,7 +248,7 @@ } ], "env": null, - "id": "138f6db3-bd8d-4a9a-8e61-abc1fdf3c3af", + "id": "e6810890-032b-4a01-9562-b9a8428dcc97", "init_script": "", "login_before_ready": true, "metadata": [], @@ -260,7 +260,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "1ef5dec0-3339-4e24-b781-0166cc6a9820", + "token": "c162e35d-a066-472c-a469-91d6b116fa6f", "troubleshooting_url": null }, "sensitive_values": { @@ -279,7 +279,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "5975950266738511043", + "id": "8464994280406150541", "triggers": null }, "sensitive_values": {}, diff --git a/provisioner/terraform/testdata/rich-parameters/rich-parameters.tfplan.json b/provisioner/terraform/testdata/rich-parameters/rich-parameters.tfplan.json index a881255a41e12..393acb59fe5a2 100644 --- a/provisioner/terraform/testdata/rich-parameters/rich-parameters.tfplan.json +++ b/provisioner/terraform/testdata/rich-parameters/rich-parameters.tfplan.json @@ -1,6 +1,6 @@ { "format_version": "1.2", - "terraform_version": "1.7.5", + "terraform_version": "1.8.4", "planned_values": { "root_module": { "resources": [ @@ -31,7 +31,8 @@ }, "sensitive_values": { "display_apps": [], - "metadata": [] + "metadata": [], + "token": true } }, { @@ -118,7 +119,7 @@ ], "prior_state": { "format_version": "1.0", - "terraform_version": "1.7.5", + "terraform_version": "1.8.4", "values": { "root_module": { "resources": [ @@ -135,7 +136,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "2be3cd75-c44b-482e-8f78-679067d8e0a4", + "id": "e5891365-ddf0-417c-a5d7-9ae7cdc76754", "mutable": false, "name": "Example", "option": [ @@ -179,7 +180,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "5a2f0407-8f11-4ac8-980d-75f919959f08", + "id": "b95cd221-cdca-4d6e-98d0-e4fb6d90dc32", "mutable": false, "name": "number_example", "option": null, @@ -206,7 +207,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "cf4b28cf-ec3c-4f53-ae27-4733a9f7d71a", + "id": "e1e5bce0-ea22-401d-8253-1b9175077abc", "mutable": false, "name": "number_example_max_zero", "option": null, @@ -245,7 +246,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "70d63380-2020-4377-ae05-cecb12c0d709", + "id": "26a6eaca-c9ae-4130-a734-6c290637b250", "mutable": false, "name": "number_example_min_max", "option": null, @@ -284,7 +285,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "ec5827c2-2511-4f16-bd85-6249517c9e5b", + "id": "ad985f1d-21fe-4ce1-988d-903084016cb4", "mutable": false, "name": "number_example_min_zero", "option": null, @@ -323,7 +324,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "eec8845e-4316-450a-a5b7-eaa9567f469a", + "id": "9465cc3a-703a-4218-8fa4-d16a1631e648", "mutable": false, "name": "Sample", "option": null, @@ -354,7 +355,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "3b860d24-85ac-4540-b309-9321e732dfc4", + "id": "547f8420-0630-4c4d-9507-e2d63640d0d9", "mutable": true, "name": "First parameter from module", "option": null, @@ -381,7 +382,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "b36105e3-9bf1-43c7-a857-078ef1e8f95d", + "id": "5c32dcad-d54a-474f-97f0-fbcc8aaba9bd", "mutable": true, "name": "Second parameter from module", "option": null, @@ -413,7 +414,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "a2bee9f2-8a3c-404c-839b-01b6cd840707", + "id": "2362ba5e-0779-472c-bd3c-22446fd14075", "mutable": true, "name": "First parameter from child module", "option": null, @@ -440,7 +441,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "deb13c45-ed6d-45b6-b6eb-d319143fa8f2", + "id": "0a8f6df4-364f-4d5f-b935-7dee8c568e10", "mutable": true, "name": "Second parameter from child module", "option": null, @@ -793,6 +794,8 @@ } } }, - "timestamp": "2024-05-22T17:03:08Z", + "timestamp": "2024-05-31T22:25:44Z", + "applyable": true, + "complete": true, "errored": false } diff --git a/provisioner/terraform/testdata/rich-parameters/rich-parameters.tfstate.json b/provisioner/terraform/testdata/rich-parameters/rich-parameters.tfstate.json index a82bb9ea1925c..eeec6ba4ea9c9 100644 --- a/provisioner/terraform/testdata/rich-parameters/rich-parameters.tfstate.json +++ b/provisioner/terraform/testdata/rich-parameters/rich-parameters.tfstate.json @@ -1,6 +1,6 @@ { "format_version": "1.0", - "terraform_version": "1.7.5", + "terraform_version": "1.8.4", "values": { "root_module": { "resources": [ @@ -17,7 +17,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "7fa1e2f7-36a4-49cd-b92a-b3fc8732d359", + "id": "9f041124-ccf3-4b7b-9e0d-4d37335a6f98", "mutable": false, "name": "Example", "option": [ @@ -61,7 +61,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "86a60580-7221-4bab-b229-9cb61bdb56a0", + "id": "ab5035e4-8dab-453d-92bc-9b866af26c78", "mutable": false, "name": "number_example", "option": null, @@ -88,7 +88,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "ed6bc6e5-b4ff-48b9-88b0-df5faa74ae66", + "id": "bdf84ab6-1029-4645-a2df-cd897f30c145", "mutable": false, "name": "number_example_max_zero", "option": null, @@ -127,7 +127,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "340b19e1-f651-4321-96b1-7908c2c66914", + "id": "b283766e-7e58-459d-a81f-aa71a95bbc0b", "mutable": false, "name": "number_example_min_max", "option": null, @@ -166,7 +166,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "f19c6763-2e55-40dd-9b49-82e9181e5b1b", + "id": "7a4f8f6d-d81a-4b15-9d5b-6f221f2a6b07", "mutable": false, "name": "number_example_min_zero", "option": null, @@ -205,7 +205,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "02169810-8080-4dc6-a656-5fbda745659e", + "id": "fd12f0d0-87dc-4d88-bcdc-352c11bd2144", "mutable": false, "name": "Sample", "option": null, @@ -241,7 +241,7 @@ } ], "env": null, - "id": "42edc650-ddb6-4ed9-9624-7788d60d1507", + "id": "a20d4cf7-2d49-4ab8-8858-a9e1531e7033", "init_script": "", "login_before_ready": true, "metadata": [], @@ -253,7 +253,7 @@ "startup_script": null, "startup_script_behavior": null, "startup_script_timeout": 300, - "token": "c767a648-e670-4c6b-a28b-8559033e92a7", + "token": "0d8692b3-746f-4f2e-b0cc-7952ee240ba4", "troubleshooting_url": null }, "sensitive_values": { @@ -272,7 +272,7 @@ "provider_name": "registry.terraform.io/hashicorp/null", "schema_version": 0, "values": { - "id": "7506678111935039701", + "id": "9033341587141190203", "triggers": null }, "sensitive_values": {}, @@ -297,7 +297,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "11b1ae03-cf81-4f60-9be1-bd4c0586516d", + "id": "6be6ebff-574c-4ab6-b314-a65f4f20446e", "mutable": true, "name": "First parameter from module", "option": null, @@ -324,7 +324,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "79d87261-bfda-46ee-958d-7d62252101ad", + "id": "d7e3d42e-dc51-47f2-ae5f-1b1bdaa85e25", "mutable": true, "name": "Second parameter from module", "option": null, @@ -356,7 +356,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "30c4c518-116a-4591-a571-886101cfcdfa", + "id": "69f71896-5cc4-44d0-ae7a-b7a5514a07ae", "mutable": true, "name": "First parameter from child module", "option": null, @@ -383,7 +383,7 @@ "display_name": null, "ephemeral": false, "icon": null, - "id": "4c7d9f15-da45-453e-85eb-1d22c9baa54c", + "id": "9a2b177e-8f3c-4d6b-b302-3ba2f0e6c76b", "mutable": true, "name": "Second parameter from child module", "option": null, diff --git a/provisioner/terraform/testdata/version.txt b/provisioner/terraform/testdata/version.txt index 6a126f402d53d..bfa363e76ed71 100644 --- a/provisioner/terraform/testdata/version.txt +++ b/provisioner/terraform/testdata/version.txt @@ -1 +1 @@ -1.7.5 +1.8.4 diff --git a/scripts/Dockerfile.base b/scripts/Dockerfile.base index df6cb4637a366..1099e52e01a48 100644 --- a/scripts/Dockerfile.base +++ b/scripts/Dockerfile.base @@ -26,7 +26,7 @@ RUN apk add --no-cache \ # Terraform was disabled in the edge repo due to a build issue. # https://gitlab.alpinelinux.org/alpine/aports/-/commit/f3e263d94cfac02d594bef83790c280e045eba35 # Using wget for now. Note that busybox unzip doesn't support streaming. -RUN ARCH="$(arch)"; if [ "${ARCH}" == "x86_64" ]; then ARCH="amd64"; elif [ "${ARCH}" == "aarch64" ]; then ARCH="arm64"; fi; wget -O /tmp/terraform.zip "https://releases.hashicorp.com/terraform/1.7.5/terraform_1.7.5_linux_${ARCH}.zip" && \ +RUN ARCH="$(arch)"; if [ "${ARCH}" == "x86_64" ]; then ARCH="amd64"; elif [ "${ARCH}" == "aarch64" ]; then ARCH="arm64"; fi; wget -O /tmp/terraform.zip "https://releases.hashicorp.com/terraform/1.8.4/terraform_1.8.4_linux_${ARCH}.zip" && \ busybox unzip /tmp/terraform.zip -d /usr/local/bin && \ rm -f /tmp/terraform.zip && \ chmod +x /usr/local/bin/terraform && \ From ba0bf43de4d9c7bc7179ceb2d460cee9c550b623 Mon Sep 17 00:00:00 2001 From: Mathias Fredriksson Date: Tue, 4 Jun 2024 00:29:24 +0300 Subject: [PATCH 147/149] chore(scripts): fix unbound variable in tag_version.sh (#13428) (cherry picked from commit a51076a4cdd3818c5697a902db78c1f0ad288ac0) --- scripts/release/tag_version.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/release/tag_version.sh b/scripts/release/tag_version.sh index 16a2011016047..2bf3e88646cc2 100755 --- a/scripts/release/tag_version.sh +++ b/scripts/release/tag_version.sh @@ -86,8 +86,8 @@ fi # shellcheck source=scripts/release/check_commit_metadata.sh source "$SCRIPT_DIR/check_commit_metadata.sh" "$old_version" "$ref" +prev_increment=$increment if ((COMMIT_METADATA_BREAKING == 1)); then - prev_increment=$increment if [[ $increment == patch ]]; then increment=minor fi From 5e69a9d18b599552691c3616b65e7922ffd828a1 Mon Sep 17 00:00:00 2001 From: Colin Adler Date: Wed, 5 Jun 2024 16:50:52 -0500 Subject: [PATCH 148/149] fix(site): show workspace start button when require active version is enabled (#13482) (cherry picked from commit f1b42a15fa1c10ee122a116cf4eb6b2f37f2f829) --- .../pages/WorkspacePage/WorkspaceActions/WorkspaceActions.tsx | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/site/src/pages/WorkspacePage/WorkspaceActions/WorkspaceActions.tsx b/site/src/pages/WorkspacePage/WorkspaceActions/WorkspaceActions.tsx index ad79ce1be9c95..7e41b7662b20a 100644 --- a/site/src/pages/WorkspacePage/WorkspaceActions/WorkspaceActions.tsx +++ b/site/src/pages/WorkspacePage/WorkspaceActions/WorkspaceActions.tsx @@ -172,6 +172,10 @@ export const WorkspaceActions: FC = ({ )} + {!canBeUpdated && + workspace.template_require_active_version && + buttonMapping.start} + {isRestarting ? buttonMapping.restarting : actions.map((action) => ( From 3fbfb534d00144805cf527bff3869db7ffc45d30 Mon Sep 17 00:00:00 2001 From: Jon Ayers Date: Wed, 5 Jun 2024 21:52:49 -0500 Subject: [PATCH 149/149] fix: only render tooltip when require_active_version enabled (#13484) (cherry picked from commit 7995d7c3d6fa7dffd8ed420c3d0de43907d747db) --- .../WorkspacePage/WorkspaceActions/WorkspaceActions.tsx | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/site/src/pages/WorkspacePage/WorkspaceActions/WorkspaceActions.tsx b/site/src/pages/WorkspacePage/WorkspaceActions/WorkspaceActions.tsx index 7e41b7662b20a..beab34de37633 100644 --- a/site/src/pages/WorkspacePage/WorkspaceActions/WorkspaceActions.tsx +++ b/site/src/pages/WorkspacePage/WorkspaceActions/WorkspaceActions.tsx @@ -244,7 +244,11 @@ function getTooltipText( return ""; } - if (!mustUpdate && canChangeVersions) { + if ( + !mustUpdate && + canChangeVersions && + workspace.template_require_active_version + ) { return "This template requires automatic updates on workspace startup, but template administrators can ignore this policy."; }