From ca5a78adbff302dd6dd121f456680fcc12d9ff2f Mon Sep 17 00:00:00 2001 From: Steven Masley Date: Sat, 17 May 2025 17:02:37 -0500 Subject: [PATCH 01/42] chore: update preview to remove AsString panic on unknown fields (#17897) --- go.mod | 2 +- go.sum | 4 ++-- site/src/api/typesGenerated.ts | 8 ++++++++ 3 files changed, 11 insertions(+), 3 deletions(-) diff --git a/go.mod b/go.mod index cdbd61574066f..32b4257f082fe 100644 --- a/go.mod +++ b/go.mod @@ -485,7 +485,7 @@ require ( require ( github.com/anthropics/anthropic-sdk-go v0.2.0-beta.3 - github.com/coder/preview v0.0.2-0.20250510235314-66630669ff6f + github.com/coder/preview v0.0.2-0.20250516233606-a1da43489319 github.com/fsnotify/fsnotify v1.9.0 github.com/kylecarbs/aisdk-go v0.0.8 github.com/mark3labs/mcp-go v0.27.0 diff --git a/go.sum b/go.sum index 3d635991b7abe..2310faffb41d9 100644 --- a/go.sum +++ b/go.sum @@ -911,8 +911,8 @@ github.com/coder/pq v1.10.5-0.20240813183442-0c420cb5a048 h1:3jzYUlGH7ZELIH4XggX github.com/coder/pq v1.10.5-0.20240813183442-0c420cb5a048/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/coder/pretty v0.0.0-20230908205945-e89ba86370e0 h1:3A0ES21Ke+FxEM8CXx9n47SZOKOpgSE1bbJzlE4qPVs= github.com/coder/pretty v0.0.0-20230908205945-e89ba86370e0/go.mod h1:5UuS2Ts+nTToAMeOjNlnHFkPahrtDkmpydBen/3wgZc= -github.com/coder/preview v0.0.2-0.20250510235314-66630669ff6f h1:Q1AcLBpRRR8rf/H+GxcJaZ5Ox4UeIRkDgc6wvvmOJAo= -github.com/coder/preview v0.0.2-0.20250510235314-66630669ff6f/go.mod h1:GfkwIv5gQLpL01qeGU1/YoxoFtt5trzCqnWZLo77clU= +github.com/coder/preview v0.0.2-0.20250516233606-a1da43489319 h1:flPwcvOZ9RwENDYcLOnfYEClbKWfFvpQCddODdSS6Co= +github.com/coder/preview v0.0.2-0.20250516233606-a1da43489319/go.mod h1:GfkwIv5gQLpL01qeGU1/YoxoFtt5trzCqnWZLo77clU= github.com/coder/quartz v0.1.3 h1:hA2nI8uUA2fNN9uhXv2I4xZD4aHkA7oH3g2t03v4xf8= github.com/coder/quartz v0.1.3/go.mod h1:vsiCc+AHViMKH2CQpGIpFgdHIEQsxwm8yCscqKmzbRA= github.com/coder/retry v1.5.1 h1:iWu8YnD8YqHs3XwqrqsjoBTAVqT9ml6z9ViJ2wlMiqc= diff --git a/site/src/api/typesGenerated.ts b/site/src/api/typesGenerated.ts index 6c09014c4ed6f..8017fef790dde 100644 --- a/site/src/api/typesGenerated.ts +++ b/site/src/api/typesGenerated.ts @@ -971,6 +971,7 @@ export interface FriendlyDiagnostic { readonly severity: PreviewDiagnosticSeverityString; readonly summary: string; readonly detail: string; + readonly extra: PreviewDiagnosticExtra; } // From codersdk/apikey.go @@ -1776,6 +1777,13 @@ export interface PresetParameter { readonly Value: string; } +// From types/diagnostics.go +export interface PreviewDiagnosticExtra { + readonly code: string; + // empty interface{} type, falling back to unknown + readonly Wrapped: unknown; +} + // From types/diagnostics.go export type PreviewDiagnosticSeverityString = string; From 1a4160803589034ce1518e24a78f232c8d08f996 Mon Sep 17 00:00:00 2001 From: Spike Curtis Date: Mon, 19 May 2025 12:05:35 +0400 Subject: [PATCH 02/42] fix: stop extending API key access if OIDC refresh is available (#17878) fixes #17070 Cleans up our handling of APIKey expiration and OIDC to keep them separate concepts. For an OIDC-login APIKey, both the APIKey and OIDC link must be valid to login. If the OIDC link is expired and we have a refresh token, we will attempt to refresh. OIDC refreshes do not have any effect on APIKey expiry. https://github.com/coder/coder/issues/17070#issuecomment-2886183613 explains why this is the correct behavior. --- coderd/coderdtest/oidctest/idp.go | 5 +- coderd/httpmw/apikey.go | 94 +++++++++--------- coderd/httpmw/apikey_test.go | 158 +++++++++++++++++++++++++++++- coderd/oauthpki/okidcpki_test.go | 1 + 4 files changed, 210 insertions(+), 48 deletions(-) diff --git a/coderd/coderdtest/oidctest/idp.go b/coderd/coderdtest/oidctest/idp.go index b82f8a00dedb4..c7f7d35937198 100644 --- a/coderd/coderdtest/oidctest/idp.go +++ b/coderd/coderdtest/oidctest/idp.go @@ -307,7 +307,7 @@ func WithCustomClientAuth(hook func(t testing.TB, req *http.Request) (url.Values // WithLogging is optional, but will log some HTTP calls made to the IDP. func WithLogging(t testing.TB, options *slogtest.Options) func(*FakeIDP) { return func(f *FakeIDP) { - f.logger = slogtest.Make(t, options) + f.logger = slogtest.Make(t, options).Named("fakeidp") } } @@ -794,6 +794,7 @@ func (f *FakeIDP) newToken(t testing.TB, email string, expires time.Time) string func (f *FakeIDP) newRefreshTokens(email string) string { refreshToken := uuid.NewString() f.refreshTokens.Store(refreshToken, email) + f.logger.Info(context.Background(), "new refresh token", slog.F("email", email), slog.F("token", refreshToken)) return refreshToken } @@ -1003,6 +1004,7 @@ func (f *FakeIDP) httpHandler(t testing.TB) http.Handler { return } + f.logger.Info(r.Context(), "http idp call refresh_token", slog.F("token", refreshToken)) _, ok := f.refreshTokens.Load(refreshToken) if !assert.True(t, ok, "invalid refresh_token") { http.Error(rw, "invalid refresh_token", http.StatusBadRequest) @@ -1026,6 +1028,7 @@ func (f *FakeIDP) httpHandler(t testing.TB) http.Handler { f.refreshTokensUsed.Store(refreshToken, true) // Always invalidate the refresh token after it is used. f.refreshTokens.Delete(refreshToken) + f.logger.Info(r.Context(), "refresh token invalidated", slog.F("token", refreshToken)) case "urn:ietf:params:oauth:grant-type:device_code": // Device flow var resp externalauth.ExchangeDeviceCodeResponse diff --git a/coderd/httpmw/apikey.go b/coderd/httpmw/apikey.go index d614b37a3d897..4b92848b773e2 100644 --- a/coderd/httpmw/apikey.go +++ b/coderd/httpmw/apikey.go @@ -232,16 +232,21 @@ func ExtractAPIKey(rw http.ResponseWriter, r *http.Request, cfg ExtractAPIKeyCon return optionalWrite(http.StatusUnauthorized, resp) } - var ( - link database.UserLink - now = dbtime.Now() - // Tracks if the API key has properties updated - changed = false - ) + now := dbtime.Now() + if key.ExpiresAt.Before(now) { + return optionalWrite(http.StatusUnauthorized, codersdk.Response{ + Message: SignedOutErrorMessage, + Detail: fmt.Sprintf("API key expired at %q.", key.ExpiresAt.String()), + }) + } + + // We only check OIDC stuff if we have a valid APIKey. An expired key means we don't trust the requestor + // really is the user whose key they have, and so we shouldn't be doing anything on their behalf including possibly + // refreshing the OIDC token. if key.LoginType == database.LoginTypeGithub || key.LoginType == database.LoginTypeOIDC { var err error //nolint:gocritic // System needs to fetch UserLink to check if it's valid. - link, err = cfg.DB.GetUserLinkByUserIDLoginType(dbauthz.AsSystemRestricted(ctx), database.GetUserLinkByUserIDLoginTypeParams{ + link, err := cfg.DB.GetUserLinkByUserIDLoginType(dbauthz.AsSystemRestricted(ctx), database.GetUserLinkByUserIDLoginTypeParams{ UserID: key.UserID, LoginType: key.LoginType, }) @@ -258,7 +263,7 @@ func ExtractAPIKey(rw http.ResponseWriter, r *http.Request, cfg ExtractAPIKeyCon }) } // Check if the OAuth token is expired - if link.OAuthExpiry.Before(now) && !link.OAuthExpiry.IsZero() && link.OAuthRefreshToken != "" { + if !link.OAuthExpiry.IsZero() && link.OAuthExpiry.Before(now) { if cfg.OAuth2Configs.IsZero() { return write(http.StatusInternalServerError, codersdk.Response{ Message: internalErrorMessage, @@ -267,12 +272,15 @@ func ExtractAPIKey(rw http.ResponseWriter, r *http.Request, cfg ExtractAPIKeyCon }) } + var friendlyName string var oauthConfig promoauth.OAuth2Config switch key.LoginType { case database.LoginTypeGithub: oauthConfig = cfg.OAuth2Configs.Github + friendlyName = "GitHub" case database.LoginTypeOIDC: oauthConfig = cfg.OAuth2Configs.OIDC + friendlyName = "OpenID Connect" default: return write(http.StatusInternalServerError, codersdk.Response{ Message: internalErrorMessage, @@ -292,7 +300,13 @@ func ExtractAPIKey(rw http.ResponseWriter, r *http.Request, cfg ExtractAPIKeyCon }) } - // If it is, let's refresh it from the provided config + if link.OAuthRefreshToken == "" { + return optionalWrite(http.StatusUnauthorized, codersdk.Response{ + Message: SignedOutErrorMessage, + Detail: fmt.Sprintf("%s session expired at %q. Try signing in again.", friendlyName, link.OAuthExpiry.String()), + }) + } + // We have a refresh token, so let's try it token, err := oauthConfig.TokenSource(r.Context(), &oauth2.Token{ AccessToken: link.OAuthAccessToken, RefreshToken: link.OAuthRefreshToken, @@ -300,28 +314,39 @@ func ExtractAPIKey(rw http.ResponseWriter, r *http.Request, cfg ExtractAPIKeyCon }).Token() if err != nil { return write(http.StatusUnauthorized, codersdk.Response{ - Message: "Could not refresh expired Oauth token. Try re-authenticating to resolve this issue.", - Detail: err.Error(), + Message: fmt.Sprintf( + "Could not refresh expired %s token. Try re-authenticating to resolve this issue.", + friendlyName), + Detail: err.Error(), }) } link.OAuthAccessToken = token.AccessToken link.OAuthRefreshToken = token.RefreshToken link.OAuthExpiry = token.Expiry - key.ExpiresAt = token.Expiry - changed = true + //nolint:gocritic // system needs to update user link + link, err = cfg.DB.UpdateUserLink(dbauthz.AsSystemRestricted(ctx), database.UpdateUserLinkParams{ + UserID: link.UserID, + LoginType: link.LoginType, + OAuthAccessToken: link.OAuthAccessToken, + OAuthAccessTokenKeyID: sql.NullString{}, // dbcrypt will update as required + OAuthRefreshToken: link.OAuthRefreshToken, + OAuthRefreshTokenKeyID: sql.NullString{}, // dbcrypt will update as required + OAuthExpiry: link.OAuthExpiry, + // Refresh should keep the same debug context because we use + // the original claims for the group/role sync. + Claims: link.Claims, + }) + if err != nil { + return write(http.StatusInternalServerError, codersdk.Response{ + Message: internalErrorMessage, + Detail: fmt.Sprintf("update user_link: %s.", err.Error()), + }) + } } } - // Checking if the key is expired. - // NOTE: The `RequireAuth` React component depends on this `Detail` to detect when - // the users token has expired. If you change the text here, make sure to update it - // in site/src/components/RequireAuth/RequireAuth.tsx as well. - if key.ExpiresAt.Before(now) { - return optionalWrite(http.StatusUnauthorized, codersdk.Response{ - Message: SignedOutErrorMessage, - Detail: fmt.Sprintf("API key expired at %q.", key.ExpiresAt.String()), - }) - } + // Tracks if the API key has properties updated + changed := false // Only update LastUsed once an hour to prevent database spam. if now.Sub(key.LastUsed) > time.Hour { @@ -363,29 +388,6 @@ func ExtractAPIKey(rw http.ResponseWriter, r *http.Request, cfg ExtractAPIKeyCon Detail: fmt.Sprintf("API key couldn't update: %s.", err.Error()), }) } - // If the API Key is associated with a user_link (e.g. Github/OIDC) - // then we want to update the relevant oauth fields. - if link.UserID != uuid.Nil { - //nolint:gocritic // system needs to update user link - link, err = cfg.DB.UpdateUserLink(dbauthz.AsSystemRestricted(ctx), database.UpdateUserLinkParams{ - UserID: link.UserID, - LoginType: link.LoginType, - OAuthAccessToken: link.OAuthAccessToken, - OAuthAccessTokenKeyID: sql.NullString{}, // dbcrypt will update as required - OAuthRefreshToken: link.OAuthRefreshToken, - OAuthRefreshTokenKeyID: sql.NullString{}, // dbcrypt will update as required - OAuthExpiry: link.OAuthExpiry, - // Refresh should keep the same debug context because we use - // the original claims for the group/role sync. - Claims: link.Claims, - }) - if err != nil { - return write(http.StatusInternalServerError, codersdk.Response{ - Message: internalErrorMessage, - Detail: fmt.Sprintf("update user_link: %s.", err.Error()), - }) - } - } // We only want to update this occasionally to reduce DB write // load. We update alongside the UserLink and APIKey since it's diff --git a/coderd/httpmw/apikey_test.go b/coderd/httpmw/apikey_test.go index bd979e88235ad..6e2e75ace9825 100644 --- a/coderd/httpmw/apikey_test.go +++ b/coderd/httpmw/apikey_test.go @@ -508,6 +508,102 @@ func TestAPIKey(t *testing.T) { require.Equal(t, sentAPIKey.ExpiresAt, gotAPIKey.ExpiresAt) }) + t.Run("APIKeyExpiredOAuthExpired", func(t *testing.T) { + t.Parallel() + var ( + db = dbmem.New() + user = dbgen.User(t, db, database.User{}) + sentAPIKey, token = dbgen.APIKey(t, db, database.APIKey{ + UserID: user.ID, + LastUsed: dbtime.Now().AddDate(0, 0, -1), + ExpiresAt: dbtime.Now().AddDate(0, 0, -1), + LoginType: database.LoginTypeOIDC, + }) + _ = dbgen.UserLink(t, db, database.UserLink{ + UserID: user.ID, + LoginType: database.LoginTypeOIDC, + OAuthExpiry: dbtime.Now().AddDate(0, 0, -1), + }) + + r = httptest.NewRequest("GET", "/", nil) + rw = httptest.NewRecorder() + ) + r.Header.Set(codersdk.SessionTokenHeader, token) + + // Include a valid oauth token for refreshing. If this token is invalid, + // it is difficult to tell an auth failure from an expired api key, or + // an expired oauth key. + oauthToken := &oauth2.Token{ + AccessToken: "wow", + RefreshToken: "moo", + Expiry: dbtime.Now().AddDate(0, 0, 1), + } + httpmw.ExtractAPIKeyMW(httpmw.ExtractAPIKeyConfig{ + DB: db, + OAuth2Configs: &httpmw.OAuth2Configs{ + OIDC: &testutil.OAuth2Config{ + Token: oauthToken, + }, + }, + RedirectToLogin: false, + })(successHandler).ServeHTTP(rw, r) + res := rw.Result() + defer res.Body.Close() + require.Equal(t, http.StatusUnauthorized, res.StatusCode) + + gotAPIKey, err := db.GetAPIKeyByID(r.Context(), sentAPIKey.ID) + require.NoError(t, err) + + require.Equal(t, sentAPIKey.LastUsed, gotAPIKey.LastUsed) + require.Equal(t, sentAPIKey.ExpiresAt, gotAPIKey.ExpiresAt) + }) + + t.Run("APIKeyExpiredOAuthNotExpired", func(t *testing.T) { + t.Parallel() + var ( + db = dbmem.New() + user = dbgen.User(t, db, database.User{}) + sentAPIKey, token = dbgen.APIKey(t, db, database.APIKey{ + UserID: user.ID, + LastUsed: dbtime.Now().AddDate(0, 0, -1), + ExpiresAt: dbtime.Now().AddDate(0, 0, -1), + LoginType: database.LoginTypeOIDC, + }) + _ = dbgen.UserLink(t, db, database.UserLink{ + UserID: user.ID, + LoginType: database.LoginTypeOIDC, + }) + + r = httptest.NewRequest("GET", "/", nil) + rw = httptest.NewRecorder() + ) + r.Header.Set(codersdk.SessionTokenHeader, token) + + oauthToken := &oauth2.Token{ + AccessToken: "wow", + RefreshToken: "moo", + Expiry: dbtime.Now().AddDate(0, 0, 1), + } + httpmw.ExtractAPIKeyMW(httpmw.ExtractAPIKeyConfig{ + DB: db, + OAuth2Configs: &httpmw.OAuth2Configs{ + OIDC: &testutil.OAuth2Config{ + Token: oauthToken, + }, + }, + RedirectToLogin: false, + })(successHandler).ServeHTTP(rw, r) + res := rw.Result() + defer res.Body.Close() + require.Equal(t, http.StatusUnauthorized, res.StatusCode) + + gotAPIKey, err := db.GetAPIKeyByID(r.Context(), sentAPIKey.ID) + require.NoError(t, err) + + require.Equal(t, sentAPIKey.LastUsed, gotAPIKey.LastUsed) + require.Equal(t, sentAPIKey.ExpiresAt, gotAPIKey.ExpiresAt) + }) + t.Run("OAuthRefresh", func(t *testing.T) { t.Parallel() var ( @@ -553,7 +649,67 @@ func TestAPIKey(t *testing.T) { require.NoError(t, err) require.Equal(t, sentAPIKey.LastUsed, gotAPIKey.LastUsed) - require.Equal(t, oauthToken.Expiry, gotAPIKey.ExpiresAt) + // Note that OAuth expiry is independent of APIKey expiry, so an OIDC refresh DOES NOT affect the expiry of the + // APIKey + require.Equal(t, sentAPIKey.ExpiresAt, gotAPIKey.ExpiresAt) + + gotLink, err := db.GetUserLinkByUserIDLoginType(r.Context(), database.GetUserLinkByUserIDLoginTypeParams{ + UserID: user.ID, + LoginType: database.LoginTypeGithub, + }) + require.NoError(t, err) + require.Equal(t, gotLink.OAuthRefreshToken, "moo") + }) + + t.Run("OAuthExpiredNoRefresh", func(t *testing.T) { + t.Parallel() + var ( + ctx = testutil.Context(t, testutil.WaitShort) + db = dbmem.New() + user = dbgen.User(t, db, database.User{}) + sentAPIKey, token = dbgen.APIKey(t, db, database.APIKey{ + UserID: user.ID, + LastUsed: dbtime.Now(), + ExpiresAt: dbtime.Now().AddDate(0, 0, 1), + LoginType: database.LoginTypeGithub, + }) + + r = httptest.NewRequest("GET", "/", nil) + rw = httptest.NewRecorder() + ) + _, err := db.InsertUserLink(ctx, database.InsertUserLinkParams{ + UserID: user.ID, + LoginType: database.LoginTypeGithub, + OAuthExpiry: dbtime.Now().AddDate(0, 0, -1), + OAuthAccessToken: "letmein", + }) + require.NoError(t, err) + + r.Header.Set(codersdk.SessionTokenHeader, token) + + oauthToken := &oauth2.Token{ + AccessToken: "wow", + RefreshToken: "moo", + Expiry: dbtime.Now().AddDate(0, 0, 1), + } + httpmw.ExtractAPIKeyMW(httpmw.ExtractAPIKeyConfig{ + DB: db, + OAuth2Configs: &httpmw.OAuth2Configs{ + Github: &testutil.OAuth2Config{ + Token: oauthToken, + }, + }, + RedirectToLogin: false, + })(successHandler).ServeHTTP(rw, r) + res := rw.Result() + defer res.Body.Close() + require.Equal(t, http.StatusUnauthorized, res.StatusCode) + + gotAPIKey, err := db.GetAPIKeyByID(r.Context(), sentAPIKey.ID) + require.NoError(t, err) + + require.Equal(t, sentAPIKey.LastUsed, gotAPIKey.LastUsed) + require.Equal(t, sentAPIKey.ExpiresAt, gotAPIKey.ExpiresAt) }) t.Run("RemoteIPUpdates", func(t *testing.T) { diff --git a/coderd/oauthpki/okidcpki_test.go b/coderd/oauthpki/okidcpki_test.go index 509da563a9145..7f7dda17bcba8 100644 --- a/coderd/oauthpki/okidcpki_test.go +++ b/coderd/oauthpki/okidcpki_test.go @@ -144,6 +144,7 @@ func TestAzureAKPKIWithCoderd(t *testing.T) { return values, nil }), oidctest.WithServing(), + oidctest.WithLogging(t, nil), ) cfg := fake.OIDCConfig(t, scopes, func(cfg *coderd.OIDCConfig) { cfg.AllowSignups = true From c775ea84119efceee3b40f68f70be5f55de903f7 Mon Sep 17 00:00:00 2001 From: Sas Swart Date: Mon, 19 May 2025 11:37:54 +0200 Subject: [PATCH 03/42] test: fix a race in TestReinit (#17902) closes https://github.com/coder/internal/issues/632 `pubsubReinitSpy` used to signal that a subscription had happened before it actually had. This created a slight opportunity for the main goroutine to publish before the actual subscription was listening. The published event was then dropped, leading to a failed test. --- coderd/workspaceagents_test.go | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/coderd/workspaceagents_test.go b/coderd/workspaceagents_test.go index 10403f1ac00ae..9b12d15f3265b 100644 --- a/coderd/workspaceagents_test.go +++ b/coderd/workspaceagents_test.go @@ -2650,8 +2650,8 @@ func TestReinit(t *testing.T) { db, ps := dbtestutil.NewDB(t) pubsubSpy := pubsubReinitSpy{ - Pubsub: ps, - subscribed: make(chan string), + Pubsub: ps, + triedToSubscribe: make(chan string), } client := coderdtest.New(t, &coderdtest.Options{ Database: db, @@ -2664,9 +2664,9 @@ func TestReinit(t *testing.T) { OwnerID: user.UserID, }).WithAgent().Do() - pubsubSpy.Mutex.Lock() + pubsubSpy.Lock() pubsubSpy.expectedEvent = agentsdk.PrebuildClaimedChannel(r.Workspace.ID) - pubsubSpy.Mutex.Unlock() + pubsubSpy.Unlock() agentCtx := testutil.Context(t, testutil.WaitShort) agentClient := agentsdk.New(client.URL) @@ -2681,7 +2681,7 @@ func TestReinit(t *testing.T) { // We need to subscribe before we publish, lest we miss the event ctx := testutil.Context(t, testutil.WaitShort) - testutil.TryReceive(ctx, t, pubsubSpy.subscribed) // Wait for the appropriate subscription + testutil.TryReceive(ctx, t, pubsubSpy.triedToSubscribe) // Now that we're subscribed, publish the event err := prebuilds.NewPubsubWorkspaceClaimPublisher(ps).PublishWorkspaceClaim(agentsdk.ReinitializationEvent{ @@ -2699,15 +2699,16 @@ func TestReinit(t *testing.T) { type pubsubReinitSpy struct { pubsub.Pubsub sync.Mutex - subscribed chan string - expectedEvent string + triedToSubscribe chan string + expectedEvent string } func (p *pubsubReinitSpy) Subscribe(event string, listener pubsub.Listener) (cancel func(), err error) { + cancel, err = p.Pubsub.Subscribe(event, listener) p.Lock() if p.expectedEvent != "" && event == p.expectedEvent { - close(p.subscribed) + close(p.triedToSubscribe) } p.Unlock() - return p.Pubsub.Subscribe(event, listener) + return cancel, err } From 98e2ec4417f93d7eb7485b0af431518659bdfa4b Mon Sep 17 00:00:00 2001 From: Mathias Fredriksson Date: Mon, 19 May 2025 12:56:10 +0300 Subject: [PATCH 04/42] feat: show devcontainer dirty status and allow recreate (#17880) Updates #16424 --- agent/agentcontainers/acmock/acmock.go | 49 +++++- agent/agentcontainers/acmock/doc.go | 2 +- agent/agentcontainers/api.go | 56 ++++--- agent/agentcontainers/api_internal_test.go | 163 ------------------- agent/agentcontainers/api_test.go | 174 ++++++++++++++++++++- coderd/apidoc/docs.go | 40 +++++ coderd/apidoc/swagger.json | 38 +++++ coderd/coderd.go | 1 + coderd/workspaceagents.go | 86 ++++++++++ coderd/workspaceagents_test.go | 111 +++++++++++++ codersdk/workspaceagents.go | 17 ++ docs/reference/api/agents.md | 28 ++++ docs/reference/api/schemas.md | 29 ++-- site/src/api/typesGenerated.ts | 1 + site/src/testHelpers/entities.ts | 1 + 15 files changed, 598 insertions(+), 198 deletions(-) delete mode 100644 agent/agentcontainers/api_internal_test.go diff --git a/agent/agentcontainers/acmock/acmock.go b/agent/agentcontainers/acmock/acmock.go index 93c84e8c54fd3..869d2f7d0923b 100644 --- a/agent/agentcontainers/acmock/acmock.go +++ b/agent/agentcontainers/acmock/acmock.go @@ -1,9 +1,9 @@ // Code generated by MockGen. DO NOT EDIT. -// Source: .. (interfaces: Lister) +// Source: .. (interfaces: Lister,DevcontainerCLI) // // Generated by this command: // -// mockgen -destination ./acmock.go -package acmock .. Lister +// mockgen -destination ./acmock.go -package acmock .. Lister,DevcontainerCLI // // Package acmock is a generated GoMock package. @@ -13,6 +13,7 @@ import ( context "context" reflect "reflect" + agentcontainers "github.com/coder/coder/v2/agent/agentcontainers" codersdk "github.com/coder/coder/v2/codersdk" gomock "go.uber.org/mock/gomock" ) @@ -55,3 +56,47 @@ func (mr *MockListerMockRecorder) List(ctx any) *gomock.Call { mr.mock.ctrl.T.Helper() return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "List", reflect.TypeOf((*MockLister)(nil).List), ctx) } + +// MockDevcontainerCLI is a mock of DevcontainerCLI interface. +type MockDevcontainerCLI struct { + ctrl *gomock.Controller + recorder *MockDevcontainerCLIMockRecorder + isgomock struct{} +} + +// MockDevcontainerCLIMockRecorder is the mock recorder for MockDevcontainerCLI. +type MockDevcontainerCLIMockRecorder struct { + mock *MockDevcontainerCLI +} + +// NewMockDevcontainerCLI creates a new mock instance. +func NewMockDevcontainerCLI(ctrl *gomock.Controller) *MockDevcontainerCLI { + mock := &MockDevcontainerCLI{ctrl: ctrl} + mock.recorder = &MockDevcontainerCLIMockRecorder{mock} + return mock +} + +// EXPECT returns an object that allows the caller to indicate expected use. +func (m *MockDevcontainerCLI) EXPECT() *MockDevcontainerCLIMockRecorder { + return m.recorder +} + +// Up mocks base method. +func (m *MockDevcontainerCLI) Up(ctx context.Context, workspaceFolder, configPath string, opts ...agentcontainers.DevcontainerCLIUpOptions) (string, error) { + m.ctrl.T.Helper() + varargs := []any{ctx, workspaceFolder, configPath} + for _, a := range opts { + varargs = append(varargs, a) + } + ret := m.ctrl.Call(m, "Up", varargs...) + ret0, _ := ret[0].(string) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// Up indicates an expected call of Up. +func (mr *MockDevcontainerCLIMockRecorder) Up(ctx, workspaceFolder, configPath any, opts ...any) *gomock.Call { + mr.mock.ctrl.T.Helper() + varargs := append([]any{ctx, workspaceFolder, configPath}, opts...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Up", reflect.TypeOf((*MockDevcontainerCLI)(nil).Up), varargs...) +} diff --git a/agent/agentcontainers/acmock/doc.go b/agent/agentcontainers/acmock/doc.go index 47679708b0fc8..b807efa253b75 100644 --- a/agent/agentcontainers/acmock/doc.go +++ b/agent/agentcontainers/acmock/doc.go @@ -1,4 +1,4 @@ // Package acmock contains a mock implementation of agentcontainers.Lister for use in tests. package acmock -//go:generate mockgen -destination ./acmock.go -package acmock .. Lister +//go:generate mockgen -destination ./acmock.go -package acmock .. Lister,DevcontainerCLI diff --git a/agent/agentcontainers/api.go b/agent/agentcontainers/api.go index c3393c3fdec9e..f2164c9a874ff 100644 --- a/agent/agentcontainers/api.go +++ b/agent/agentcontainers/api.go @@ -69,6 +69,15 @@ func WithClock(clock quartz.Clock) Option { } } +// WithCacheDuration sets the cache duration for the API. +// This is used to control how often the API refreshes the list of +// containers. The default is 10 seconds. +func WithCacheDuration(d time.Duration) Option { + return func(api *API) { + api.cacheDuration = d + } +} + // WithExecer sets the agentexec.Execer implementation to use. func WithExecer(execer agentexec.Execer) Option { return func(api *API) { @@ -336,7 +345,8 @@ func (api *API) getContainers(ctx context.Context) (codersdk.WorkspaceAgentListC } // Check if the container is running and update the known devcontainers. - for _, container := range updated.Containers { + for i := range updated.Containers { + container := &updated.Containers[i] workspaceFolder := container.Labels[DevcontainerLocalFolderLabel] configFile := container.Labels[DevcontainerConfigFileLabel] @@ -344,6 +354,20 @@ func (api *API) getContainers(ctx context.Context) (codersdk.WorkspaceAgentListC continue } + container.DevcontainerDirty = dirtyStates[workspaceFolder] + if container.DevcontainerDirty { + lastModified, hasModTime := api.configFileModifiedTimes[configFile] + if hasModTime && container.CreatedAt.After(lastModified) { + api.logger.Info(ctx, "new container created after config modification, not marking as dirty", + slog.F("container", container.ID), + slog.F("created_at", container.CreatedAt), + slog.F("config_modified_at", lastModified), + slog.F("file", configFile), + ) + container.DevcontainerDirty = false + } + } + // Check if this is already in our known list. if knownIndex := slices.IndexFunc(api.knownDevcontainers, func(dc codersdk.WorkspaceAgentDevcontainer) bool { return dc.WorkspaceFolder == workspaceFolder @@ -356,7 +380,7 @@ func (api *API) getContainers(ctx context.Context) (codersdk.WorkspaceAgentListC } } api.knownDevcontainers[knownIndex].Running = container.Running - api.knownDevcontainers[knownIndex].Container = &container + api.knownDevcontainers[knownIndex].Container = container // Check if this container was created after the config // file was modified. @@ -395,28 +419,14 @@ func (api *API) getContainers(ctx context.Context) (codersdk.WorkspaceAgentListC } } - dirty := dirtyStates[workspaceFolder] - if dirty { - lastModified, hasModTime := api.configFileModifiedTimes[configFile] - if hasModTime && container.CreatedAt.After(lastModified) { - api.logger.Info(ctx, "new container created after config modification, not marking as dirty", - slog.F("container", container.ID), - slog.F("created_at", container.CreatedAt), - slog.F("config_modified_at", lastModified), - slog.F("file", configFile), - ) - dirty = false - } - } - api.knownDevcontainers = append(api.knownDevcontainers, codersdk.WorkspaceAgentDevcontainer{ ID: uuid.New(), Name: name, WorkspaceFolder: workspaceFolder, ConfigPath: configFile, Running: container.Running, - Dirty: dirty, - Container: &container, + Dirty: container.DevcontainerDirty, + Container: container, }) } @@ -510,6 +520,13 @@ func (api *API) handleDevcontainerRecreate(w http.ResponseWriter, r *http.Reques slog.F("name", api.knownDevcontainers[i].Name), ) api.knownDevcontainers[i].Dirty = false + // TODO(mafredri): This should be handled by a service that + // updates the devcontainer state periodically and on-demand. + api.knownDevcontainers[i].Container = nil + // Set the modified time to the zero value to indicate that + // the containers list must be refreshed. This will see to + // it that the new container is re-assigned. + api.mtime = time.Time{} } return } @@ -579,6 +596,9 @@ func (api *API) markDevcontainerDirty(configPath string, modifiedAt time.Time) { slog.F("modified_at", modifiedAt), ) api.knownDevcontainers[i].Dirty = true + if api.knownDevcontainers[i].Container != nil { + api.knownDevcontainers[i].Container.DevcontainerDirty = true + } } } }) diff --git a/agent/agentcontainers/api_internal_test.go b/agent/agentcontainers/api_internal_test.go deleted file mode 100644 index 331c41e8df10b..0000000000000 --- a/agent/agentcontainers/api_internal_test.go +++ /dev/null @@ -1,163 +0,0 @@ -package agentcontainers - -import ( - "math/rand" - "strings" - "testing" - "time" - - "github.com/google/uuid" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - "go.uber.org/mock/gomock" - - "cdr.dev/slog" - "cdr.dev/slog/sloggers/slogtest" - "github.com/coder/coder/v2/agent/agentcontainers/acmock" - "github.com/coder/coder/v2/codersdk" - "github.com/coder/coder/v2/testutil" - "github.com/coder/quartz" -) - -func TestAPI(t *testing.T) { - t.Parallel() - - // List tests the API.getContainers method using a mock - // implementation. It specifically tests caching behavior. - t.Run("List", func(t *testing.T) { - t.Parallel() - - fakeCt := fakeContainer(t) - fakeCt2 := fakeContainer(t) - makeResponse := func(cts ...codersdk.WorkspaceAgentContainer) codersdk.WorkspaceAgentListContainersResponse { - return codersdk.WorkspaceAgentListContainersResponse{Containers: cts} - } - - // Each test case is called multiple times to ensure idempotency - for _, tc := range []struct { - name string - // data to be stored in the handler - cacheData codersdk.WorkspaceAgentListContainersResponse - // duration of cache - cacheDur time.Duration - // relative age of the cached data - cacheAge time.Duration - // function to set up expectations for the mock - setupMock func(*acmock.MockLister) - // expected result - expected codersdk.WorkspaceAgentListContainersResponse - // expected error - expectedErr string - }{ - { - name: "no cache", - setupMock: func(mcl *acmock.MockLister) { - mcl.EXPECT().List(gomock.Any()).Return(makeResponse(fakeCt), nil).AnyTimes() - }, - expected: makeResponse(fakeCt), - }, - { - name: "no data", - cacheData: makeResponse(), - cacheAge: 2 * time.Second, - cacheDur: time.Second, - setupMock: func(mcl *acmock.MockLister) { - mcl.EXPECT().List(gomock.Any()).Return(makeResponse(fakeCt), nil).AnyTimes() - }, - expected: makeResponse(fakeCt), - }, - { - name: "cached data", - cacheAge: time.Second, - cacheData: makeResponse(fakeCt), - cacheDur: 2 * time.Second, - expected: makeResponse(fakeCt), - }, - { - name: "lister error", - setupMock: func(mcl *acmock.MockLister) { - mcl.EXPECT().List(gomock.Any()).Return(makeResponse(), assert.AnError).AnyTimes() - }, - expectedErr: assert.AnError.Error(), - }, - { - name: "stale cache", - cacheAge: 2 * time.Second, - cacheData: makeResponse(fakeCt), - cacheDur: time.Second, - setupMock: func(mcl *acmock.MockLister) { - mcl.EXPECT().List(gomock.Any()).Return(makeResponse(fakeCt2), nil).AnyTimes() - }, - expected: makeResponse(fakeCt2), - }, - } { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - var ( - ctx = testutil.Context(t, testutil.WaitShort) - clk = quartz.NewMock(t) - ctrl = gomock.NewController(t) - mockLister = acmock.NewMockLister(ctrl) - now = time.Now().UTC() - logger = slogtest.Make(t, nil).Leveled(slog.LevelDebug) - api = NewAPI(logger, WithLister(mockLister)) - ) - defer api.Close() - - api.cacheDuration = tc.cacheDur - api.clock = clk - api.containers = tc.cacheData - if tc.cacheAge != 0 { - api.mtime = now.Add(-tc.cacheAge) - } - if tc.setupMock != nil { - tc.setupMock(mockLister) - } - - clk.Set(now).MustWait(ctx) - - // Repeat the test to ensure idempotency - for i := 0; i < 2; i++ { - actual, err := api.getContainers(ctx) - if tc.expectedErr != "" { - require.Empty(t, actual, "expected no data (attempt %d)", i) - require.ErrorContains(t, err, tc.expectedErr, "expected error (attempt %d)", i) - } else { - require.NoError(t, err, "expected no error (attempt %d)", i) - require.Equal(t, tc.expected, actual, "expected containers to be equal (attempt %d)", i) - } - } - }) - } - }) -} - -func fakeContainer(t *testing.T, mut ...func(*codersdk.WorkspaceAgentContainer)) codersdk.WorkspaceAgentContainer { - t.Helper() - ct := codersdk.WorkspaceAgentContainer{ - CreatedAt: time.Now().UTC(), - ID: uuid.New().String(), - FriendlyName: testutil.GetRandomName(t), - Image: testutil.GetRandomName(t) + ":" + strings.Split(uuid.New().String(), "-")[0], - Labels: map[string]string{ - testutil.GetRandomName(t): testutil.GetRandomName(t), - }, - Running: true, - Ports: []codersdk.WorkspaceAgentContainerPort{ - { - Network: "tcp", - Port: testutil.RandomPortNoListen(t), - HostPort: testutil.RandomPortNoListen(t), - //nolint:gosec // this is a test - HostIP: []string{"127.0.0.1", "[::1]", "localhost", "0.0.0.0", "[::]", testutil.GetRandomName(t)}[rand.Intn(6)], - }, - }, - Status: testutil.MustRandString(t, 10), - Volumes: map[string]string{testutil.GetRandomName(t): testutil.GetRandomName(t)}, - } - for _, m := range mut { - m(&ct) - } - return ct -} diff --git a/agent/agentcontainers/api_test.go b/agent/agentcontainers/api_test.go index 2c602de5cff3a..2e173b7d5a6b4 100644 --- a/agent/agentcontainers/api_test.go +++ b/agent/agentcontainers/api_test.go @@ -3,8 +3,10 @@ package agentcontainers_test import ( "context" "encoding/json" + "math/rand" "net/http" "net/http/httptest" + "strings" "testing" "time" @@ -13,11 +15,13 @@ import ( "github.com/google/uuid" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "go.uber.org/mock/gomock" "golang.org/x/xerrors" "cdr.dev/slog" "cdr.dev/slog/sloggers/slogtest" "github.com/coder/coder/v2/agent/agentcontainers" + "github.com/coder/coder/v2/agent/agentcontainers/acmock" "github.com/coder/coder/v2/agent/agentcontainers/watcher" "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/testutil" @@ -146,6 +150,136 @@ func (w *fakeWatcher) sendEventWaitNextCalled(ctx context.Context, event fsnotif func TestAPI(t *testing.T) { t.Parallel() + // List tests the API.getContainers method using a mock + // implementation. It specifically tests caching behavior. + t.Run("List", func(t *testing.T) { + t.Parallel() + + fakeCt := fakeContainer(t) + fakeCt2 := fakeContainer(t) + makeResponse := func(cts ...codersdk.WorkspaceAgentContainer) codersdk.WorkspaceAgentListContainersResponse { + return codersdk.WorkspaceAgentListContainersResponse{Containers: cts} + } + + // Each test case is called multiple times to ensure idempotency + for _, tc := range []struct { + name string + // data to be stored in the handler + cacheData codersdk.WorkspaceAgentListContainersResponse + // duration of cache + cacheDur time.Duration + // relative age of the cached data + cacheAge time.Duration + // function to set up expectations for the mock + setupMock func(mcl *acmock.MockLister, preReq *gomock.Call) + // expected result + expected codersdk.WorkspaceAgentListContainersResponse + // expected error + expectedErr string + }{ + { + name: "no cache", + setupMock: func(mcl *acmock.MockLister, preReq *gomock.Call) { + mcl.EXPECT().List(gomock.Any()).Return(makeResponse(fakeCt), nil).After(preReq).AnyTimes() + }, + expected: makeResponse(fakeCt), + }, + { + name: "no data", + cacheData: makeResponse(), + cacheAge: 2 * time.Second, + cacheDur: time.Second, + setupMock: func(mcl *acmock.MockLister, preReq *gomock.Call) { + mcl.EXPECT().List(gomock.Any()).Return(makeResponse(fakeCt), nil).After(preReq).AnyTimes() + }, + expected: makeResponse(fakeCt), + }, + { + name: "cached data", + cacheAge: time.Second, + cacheData: makeResponse(fakeCt), + cacheDur: 2 * time.Second, + expected: makeResponse(fakeCt), + }, + { + name: "lister error", + setupMock: func(mcl *acmock.MockLister, preReq *gomock.Call) { + mcl.EXPECT().List(gomock.Any()).Return(makeResponse(), assert.AnError).After(preReq).AnyTimes() + }, + expectedErr: assert.AnError.Error(), + }, + { + name: "stale cache", + cacheAge: 2 * time.Second, + cacheData: makeResponse(fakeCt), + cacheDur: time.Second, + setupMock: func(mcl *acmock.MockLister, preReq *gomock.Call) { + mcl.EXPECT().List(gomock.Any()).Return(makeResponse(fakeCt2), nil).After(preReq).AnyTimes() + }, + expected: makeResponse(fakeCt2), + }, + } { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + var ( + ctx = testutil.Context(t, testutil.WaitShort) + clk = quartz.NewMock(t) + ctrl = gomock.NewController(t) + mockLister = acmock.NewMockLister(ctrl) + now = time.Now().UTC() + logger = slogtest.Make(t, nil).Leveled(slog.LevelDebug) + r = chi.NewRouter() + api = agentcontainers.NewAPI(logger, + agentcontainers.WithCacheDuration(tc.cacheDur), + agentcontainers.WithClock(clk), + agentcontainers.WithLister(mockLister), + ) + ) + defer api.Close() + + r.Mount("/", api.Routes()) + + preReq := mockLister.EXPECT().List(gomock.Any()).Return(tc.cacheData, nil).Times(1) + if tc.setupMock != nil { + tc.setupMock(mockLister, preReq) + } + + if tc.cacheAge != 0 { + clk.Set(now.Add(-tc.cacheAge)).MustWait(ctx) + } else { + clk.Set(now).MustWait(ctx) + } + + // Prime the cache with the initial data. + req := httptest.NewRequest(http.MethodGet, "/", nil) + rec := httptest.NewRecorder() + r.ServeHTTP(rec, req) + + clk.Set(now).MustWait(ctx) + + // Repeat the test to ensure idempotency + for i := 0; i < 2; i++ { + req = httptest.NewRequest(http.MethodGet, "/", nil) + rec = httptest.NewRecorder() + r.ServeHTTP(rec, req) + + if tc.expectedErr != "" { + got := &codersdk.Error{} + err := json.NewDecoder(rec.Body).Decode(got) + require.NoError(t, err, "unmarshal response failed") + require.ErrorContains(t, got, tc.expectedErr, "expected error (attempt %d)", i) + } else { + var got codersdk.WorkspaceAgentListContainersResponse + err := json.NewDecoder(rec.Body).Decode(&got) + require.NoError(t, err, "unmarshal response failed") + require.Equal(t, tc.expected, got, "expected containers to be equal (attempt %d)", i) + } + } + }) + } + }) + t.Run("Recreate", func(t *testing.T) { t.Parallel() @@ -660,6 +794,9 @@ func TestAPI(t *testing.T) { require.NoError(t, err) require.Len(t, response.Devcontainers, 1) assert.False(t, response.Devcontainers[0].Dirty, + "devcontainer should not be marked as dirty initially") + require.NotNil(t, response.Devcontainers[0].Container, "container should not be nil") + assert.False(t, response.Devcontainers[0].Container.DevcontainerDirty, "container should not be marked as dirty initially") // Verify the watcher is watching the config file. @@ -689,6 +826,9 @@ func TestAPI(t *testing.T) { require.Len(t, response.Devcontainers, 1) assert.True(t, response.Devcontainers[0].Dirty, "container should be marked as dirty after config file was modified") + require.NotNil(t, response.Devcontainers[0].Container, "container should not be nil") + assert.True(t, response.Devcontainers[0].Container.DevcontainerDirty, + "container should be marked as dirty after config file was modified") mClock.Advance(time.Minute).MustWait(ctx) @@ -707,7 +847,10 @@ func TestAPI(t *testing.T) { require.NoError(t, err) require.Len(t, response.Devcontainers, 1) assert.False(t, response.Devcontainers[0].Dirty, - "dirty flag should be cleared after container recreation") + "dirty flag should be cleared on the devcontainer after container recreation") + require.NotNil(t, response.Devcontainers[0].Container, "container should not be nil") + assert.False(t, response.Devcontainers[0].Container.DevcontainerDirty, + "dirty flag should be cleared on the container after container recreation") }) } @@ -725,3 +868,32 @@ func mustFindDevcontainerByPath(t *testing.T, devcontainers []codersdk.Workspace require.Failf(t, "no devcontainer found with workspace folder %q", path) return codersdk.WorkspaceAgentDevcontainer{} // Unreachable, but required for compilation } + +func fakeContainer(t *testing.T, mut ...func(*codersdk.WorkspaceAgentContainer)) codersdk.WorkspaceAgentContainer { + t.Helper() + ct := codersdk.WorkspaceAgentContainer{ + CreatedAt: time.Now().UTC(), + ID: uuid.New().String(), + FriendlyName: testutil.GetRandomName(t), + Image: testutil.GetRandomName(t) + ":" + strings.Split(uuid.New().String(), "-")[0], + Labels: map[string]string{ + testutil.GetRandomName(t): testutil.GetRandomName(t), + }, + Running: true, + Ports: []codersdk.WorkspaceAgentContainerPort{ + { + Network: "tcp", + Port: testutil.RandomPortNoListen(t), + HostPort: testutil.RandomPortNoListen(t), + //nolint:gosec // this is a test + HostIP: []string{"127.0.0.1", "[::1]", "localhost", "0.0.0.0", "[::]", testutil.GetRandomName(t)}[rand.Intn(6)], + }, + }, + Status: testutil.MustRandString(t, 10), + Volumes: map[string]string{testutil.GetRandomName(t): testutil.GetRandomName(t)}, + } + for _, m := range mut { + m(&ct) + } + return ct +} diff --git a/coderd/apidoc/docs.go b/coderd/apidoc/docs.go index f744b988956e9..d55582afbbe8b 100644 --- a/coderd/apidoc/docs.go +++ b/coderd/apidoc/docs.go @@ -8606,6 +8606,42 @@ const docTemplate = `{ } } }, + "/workspaceagents/{workspaceagent}/containers/devcontainers/container/{container}/recreate": { + "post": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "tags": [ + "Agents" + ], + "summary": "Recreate devcontainer for workspace agent", + "operationId": "recreate-devcontainer-for-workspace-agent", + "parameters": [ + { + "type": "string", + "format": "uuid", + "description": "Workspace agent ID", + "name": "workspaceagent", + "in": "path", + "required": true + }, + { + "type": "string", + "description": "Container ID or name", + "name": "container", + "in": "path", + "required": true + } + ], + "responses": { + "204": { + "description": "No Content" + } + } + } + }, "/workspaceagents/{workspaceagent}/coordinate": { "get": { "security": [ @@ -17134,6 +17170,10 @@ const docTemplate = `{ "type": "string", "format": "date-time" }, + "devcontainer_dirty": { + "description": "DevcontainerDirty is true if the devcontainer configuration has changed\nsince the container was created. This is used to determine if the\ncontainer needs to be rebuilt.", + "type": "boolean" + }, "id": { "description": "ID is the unique identifier of the container.", "type": "string" diff --git a/coderd/apidoc/swagger.json b/coderd/apidoc/swagger.json index 1859a4f6f6214..00f940737a1d6 100644 --- a/coderd/apidoc/swagger.json +++ b/coderd/apidoc/swagger.json @@ -7605,6 +7605,40 @@ } } }, + "/workspaceagents/{workspaceagent}/containers/devcontainers/container/{container}/recreate": { + "post": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "tags": ["Agents"], + "summary": "Recreate devcontainer for workspace agent", + "operationId": "recreate-devcontainer-for-workspace-agent", + "parameters": [ + { + "type": "string", + "format": "uuid", + "description": "Workspace agent ID", + "name": "workspaceagent", + "in": "path", + "required": true + }, + { + "type": "string", + "description": "Container ID or name", + "name": "container", + "in": "path", + "required": true + } + ], + "responses": { + "204": { + "description": "No Content" + } + } + } + }, "/workspaceagents/{workspaceagent}/coordinate": { "get": { "security": [ @@ -15643,6 +15677,10 @@ "type": "string", "format": "date-time" }, + "devcontainer_dirty": { + "description": "DevcontainerDirty is true if the devcontainer configuration has changed\nsince the container was created. This is used to determine if the\ncontainer needs to be rebuilt.", + "type": "boolean" + }, "id": { "description": "ID is the unique identifier of the container.", "type": "string" diff --git a/coderd/coderd.go b/coderd/coderd.go index c3f45b15e4a30..3989f8a87ea1b 100644 --- a/coderd/coderd.go +++ b/coderd/coderd.go @@ -1326,6 +1326,7 @@ func New(options *Options) *API { r.Get("/listening-ports", api.workspaceAgentListeningPorts) r.Get("/connection", api.workspaceAgentConnection) r.Get("/containers", api.workspaceAgentListContainers) + r.Post("/containers/devcontainers/container/{container}/recreate", api.workspaceAgentRecreateDevcontainer) r.Get("/coordinate", api.workspaceAgentClientCoordinate) // PTY is part of workspaceAppServer. diff --git a/coderd/workspaceagents.go b/coderd/workspaceagents.go index 72a03580121af..8b94566e75715 100644 --- a/coderd/workspaceagents.go +++ b/coderd/workspaceagents.go @@ -15,6 +15,7 @@ import ( "strings" "time" + "github.com/go-chi/chi/v5" "github.com/google/uuid" "github.com/sqlc-dev/pqtype" "golang.org/x/exp/maps" @@ -893,6 +894,91 @@ func (api *API) workspaceAgentListContainers(rw http.ResponseWriter, r *http.Req httpapi.Write(ctx, rw, http.StatusOK, cts) } +// @Summary Recreate devcontainer for workspace agent +// @ID recreate-devcontainer-for-workspace-agent +// @Security CoderSessionToken +// @Tags Agents +// @Param workspaceagent path string true "Workspace agent ID" format(uuid) +// @Param container path string true "Container ID or name" +// @Success 204 +// @Router /workspaceagents/{workspaceagent}/containers/devcontainers/container/{container}/recreate [post] +func (api *API) workspaceAgentRecreateDevcontainer(rw http.ResponseWriter, r *http.Request) { + ctx := r.Context() + workspaceAgent := httpmw.WorkspaceAgentParam(r) + + container := chi.URLParam(r, "container") + if container == "" { + httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{ + Message: "Container ID or name is required.", + Validations: []codersdk.ValidationError{ + {Field: "container", Detail: "Container ID or name is required."}, + }, + }) + return + } + + apiAgent, err := db2sdk.WorkspaceAgent( + api.DERPMap(), + *api.TailnetCoordinator.Load(), + workspaceAgent, + nil, + nil, + nil, + api.AgentInactiveDisconnectTimeout, + api.DeploymentValues.AgentFallbackTroubleshootingURL.String(), + ) + if err != nil { + httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ + Message: "Internal error reading workspace agent.", + Detail: err.Error(), + }) + return + } + if apiAgent.Status != codersdk.WorkspaceAgentConnected { + httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{ + Message: fmt.Sprintf("Agent state is %q, it must be in the %q state.", apiAgent.Status, codersdk.WorkspaceAgentConnected), + }) + return + } + + // If the agent is unreachable, the request will hang. Assume that if we + // don't get a response after 30s that the agent is unreachable. + dialCtx, dialCancel := context.WithTimeout(ctx, 30*time.Second) + defer dialCancel() + agentConn, release, err := api.agentProvider.AgentConn(dialCtx, workspaceAgent.ID) + if err != nil { + httpapi.Write(dialCtx, rw, http.StatusInternalServerError, codersdk.Response{ + Message: "Internal error dialing workspace agent.", + Detail: err.Error(), + }) + return + } + defer release() + + err = agentConn.RecreateDevcontainer(ctx, container) + if err != nil { + if errors.Is(err, context.Canceled) { + httpapi.Write(ctx, rw, http.StatusRequestTimeout, codersdk.Response{ + Message: "Failed to recreate devcontainer from agent.", + Detail: "Request timed out.", + }) + return + } + // If the agent returns a codersdk.Error, we can return that directly. + if cerr, ok := codersdk.AsError(err); ok { + httpapi.Write(ctx, rw, cerr.StatusCode(), cerr.Response) + return + } + httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ + Message: "Internal error recreating devcontainer.", + Detail: err.Error(), + }) + return + } + + httpapi.Write(ctx, rw, http.StatusNoContent, nil) +} + // @Summary Get connection info for workspace agent // @ID get-connection-info-for-workspace-agent // @Security CoderSessionToken diff --git a/coderd/workspaceagents_test.go b/coderd/workspaceagents_test.go index 9b12d15f3265b..bd335e20b0fbb 100644 --- a/coderd/workspaceagents_test.go +++ b/coderd/workspaceagents_test.go @@ -8,6 +8,7 @@ import ( "net" "net/http" "os" + "path/filepath" "runtime" "strconv" "strings" @@ -36,6 +37,7 @@ import ( "github.com/coder/coder/v2/agent" "github.com/coder/coder/v2/agent/agentcontainers" "github.com/coder/coder/v2/agent/agentcontainers/acmock" + "github.com/coder/coder/v2/agent/agentcontainers/watcher" "github.com/coder/coder/v2/agent/agenttest" agentproto "github.com/coder/coder/v2/agent/proto" "github.com/coder/coder/v2/coderd/coderdtest" @@ -1347,6 +1349,115 @@ func TestWorkspaceAgentContainers(t *testing.T) { }) } +func TestWorkspaceAgentRecreateDevcontainer(t *testing.T) { + t.Parallel() + + t.Run("Mock", func(t *testing.T) { + t.Parallel() + + var ( + workspaceFolder = t.TempDir() + configFile = filepath.Join(workspaceFolder, ".devcontainer", "devcontainer.json") + dcLabels = map[string]string{ + agentcontainers.DevcontainerLocalFolderLabel: workspaceFolder, + agentcontainers.DevcontainerConfigFileLabel: configFile, + } + devContainer = codersdk.WorkspaceAgentContainer{ + ID: uuid.NewString(), + CreatedAt: dbtime.Now(), + FriendlyName: testutil.GetRandomName(t), + Image: "busybox:latest", + Labels: dcLabels, + Running: true, + Status: "running", + DevcontainerDirty: true, + } + plainContainer = codersdk.WorkspaceAgentContainer{ + ID: uuid.NewString(), + CreatedAt: dbtime.Now(), + FriendlyName: testutil.GetRandomName(t), + Image: "busybox:latest", + Labels: map[string]string{}, + Running: true, + Status: "running", + } + ) + + for _, tc := range []struct { + name string + setupMock func(*acmock.MockLister, *acmock.MockDevcontainerCLI) (status int) + }{ + { + name: "Recreate", + setupMock: func(mcl *acmock.MockLister, mdccli *acmock.MockDevcontainerCLI) int { + mcl.EXPECT().List(gomock.Any()).Return(codersdk.WorkspaceAgentListContainersResponse{ + Containers: []codersdk.WorkspaceAgentContainer{devContainer}, + }, nil).Times(1) + mdccli.EXPECT().Up(gomock.Any(), workspaceFolder, configFile, gomock.Any()).Return("someid", nil).Times(1) + return 0 + }, + }, + { + name: "Container does not exist", + setupMock: func(mcl *acmock.MockLister, mdccli *acmock.MockDevcontainerCLI) int { + mcl.EXPECT().List(gomock.Any()).Return(codersdk.WorkspaceAgentListContainersResponse{}, nil).Times(1) + return http.StatusNotFound + }, + }, + { + name: "Not a devcontainer", + setupMock: func(mcl *acmock.MockLister, mdccli *acmock.MockDevcontainerCLI) int { + mcl.EXPECT().List(gomock.Any()).Return(codersdk.WorkspaceAgentListContainersResponse{ + Containers: []codersdk.WorkspaceAgentContainer{plainContainer}, + }, nil).Times(1) + return http.StatusNotFound + }, + }, + } { + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + ctrl := gomock.NewController(t) + mcl := acmock.NewMockLister(ctrl) + mdccli := acmock.NewMockDevcontainerCLI(ctrl) + wantStatus := tc.setupMock(mcl, mdccli) + client, db := coderdtest.NewWithDatabase(t, &coderdtest.Options{}) + user := coderdtest.CreateFirstUser(t, client) + r := dbfake.WorkspaceBuild(t, db, database.WorkspaceTable{ + OrganizationID: user.OrganizationID, + OwnerID: user.UserID, + }).WithAgent(func(agents []*proto.Agent) []*proto.Agent { + return agents + }).Do() + _ = agenttest.New(t, client.URL, r.AgentToken, func(o *agent.Options) { + o.ExperimentalDevcontainersEnabled = true + o.ContainerAPIOptions = append( + o.ContainerAPIOptions, + agentcontainers.WithLister(mcl), + agentcontainers.WithDevcontainerCLI(mdccli), + agentcontainers.WithWatcher(watcher.NewNoop()), + ) + }) + resources := coderdtest.NewWorkspaceAgentWaiter(t, client, r.Workspace.ID).Wait() + require.Len(t, resources, 1, "expected one resource") + require.Len(t, resources[0].Agents, 1, "expected one agent") + agentID := resources[0].Agents[0].ID + + ctx := testutil.Context(t, testutil.WaitLong) + + err := client.WorkspaceAgentRecreateDevcontainer(ctx, agentID, devContainer.ID) + if wantStatus > 0 { + cerr, ok := codersdk.AsError(err) + require.True(t, ok, "expected error to be a coder error") + assert.Equal(t, wantStatus, cerr.StatusCode()) + } else { + require.NoError(t, err, "failed to recreate devcontainer") + } + }) + } + }) +} + func TestWorkspaceAgentAppHealth(t *testing.T) { t.Parallel() client, db := coderdtest.NewWithDatabase(t, nil) diff --git a/codersdk/workspaceagents.go b/codersdk/workspaceagents.go index f58338a209901..37048c6c4fcfe 100644 --- a/codersdk/workspaceagents.go +++ b/codersdk/workspaceagents.go @@ -439,6 +439,10 @@ type WorkspaceAgentContainer struct { // Volumes is a map of "things" mounted into the container. Again, this // is somewhat implementation-dependent. Volumes map[string]string `json:"volumes"` + // DevcontainerDirty is true if the devcontainer configuration has changed + // since the container was created. This is used to determine if the + // container needs to be rebuilt. + DevcontainerDirty bool `json:"devcontainer_dirty"` } func (c *WorkspaceAgentContainer) Match(idOrName string) bool { @@ -502,6 +506,19 @@ func (c *Client) WorkspaceAgentListContainers(ctx context.Context, agentID uuid. return cr, json.NewDecoder(res.Body).Decode(&cr) } +// WorkspaceAgentRecreateDevcontainer recreates the devcontainer with the given ID. +func (c *Client) WorkspaceAgentRecreateDevcontainer(ctx context.Context, agentID uuid.UUID, containerIDOrName string) error { + res, err := c.Request(ctx, http.MethodPost, fmt.Sprintf("/api/v2/workspaceagents/%s/containers/devcontainers/container/%s/recreate", agentID, containerIDOrName), nil) + if err != nil { + return err + } + defer res.Body.Close() + if res.StatusCode != http.StatusNoContent { + return ReadBodyAsError(res) + } + return nil +} + //nolint:revive // Follow is a control flag on the server as well. func (c *Client) WorkspaceAgentLogsAfter(ctx context.Context, agentID uuid.UUID, after int64, follow bool) (<-chan []WorkspaceAgentLog, io.Closer, error) { var queryParams []string diff --git a/docs/reference/api/agents.md b/docs/reference/api/agents.md index eced88f4f72cc..f126fec59978c 100644 --- a/docs/reference/api/agents.md +++ b/docs/reference/api/agents.md @@ -776,6 +776,7 @@ curl -X GET http://coder-server:8080/api/v2/workspaceagents/{workspaceagent}/con "containers": [ { "created_at": "2019-08-24T14:15:22Z", + "devcontainer_dirty": true, "id": "string", "image": "string", "labels": { @@ -813,6 +814,33 @@ curl -X GET http://coder-server:8080/api/v2/workspaceagents/{workspaceagent}/con To perform this operation, you must be authenticated. [Learn more](authentication.md). +## Recreate devcontainer for workspace agent + +### Code samples + +```shell +# Example request using curl +curl -X POST http://coder-server:8080/api/v2/workspaceagents/{workspaceagent}/containers/devcontainers/container/{container}/recreate \ + -H 'Coder-Session-Token: API_KEY' +``` + +`POST /workspaceagents/{workspaceagent}/containers/devcontainers/container/{container}/recreate` + +### Parameters + +| Name | In | Type | Required | Description | +|------------------|------|--------------|----------|----------------------| +| `workspaceagent` | path | string(uuid) | true | Workspace agent ID | +| `container` | path | string | true | Container ID or name | + +### Responses + +| Status | Meaning | Description | Schema | +|--------|-----------------------------------------------------------------|-------------|--------| +| 204 | [No Content](https://tools.ietf.org/html/rfc7231#section-6.3.5) | No Content | | + +To perform this operation, you must be authenticated. [Learn more](authentication.md). + ## Coordinate workspace agent ### Code samples diff --git a/docs/reference/api/schemas.md b/docs/reference/api/schemas.md index a001b7210016d..aa704b0fe6a57 100644 --- a/docs/reference/api/schemas.md +++ b/docs/reference/api/schemas.md @@ -8621,6 +8621,7 @@ If the schedule is empty, the user will be updated to use the default schedule.| ```json { "created_at": "2019-08-24T14:15:22Z", + "devcontainer_dirty": true, "id": "string", "image": "string", "labels": { @@ -8647,19 +8648,20 @@ If the schedule is empty, the user will be updated to use the default schedule.| ### Properties -| Name | Type | Required | Restrictions | Description | -|--------------------|---------------------------------------------------------------------------------------|----------|--------------|--------------------------------------------------------------------------------------------------------------------------------------------| -| `created_at` | string | false | | Created at is the time the container was created. | -| `id` | string | false | | ID is the unique identifier of the container. | -| `image` | string | false | | Image is the name of the container image. | -| `labels` | object | false | | Labels is a map of key-value pairs of container labels. | -| » `[any property]` | string | false | | | -| `name` | string | false | | Name is the human-readable name of the container. | -| `ports` | array of [codersdk.WorkspaceAgentContainerPort](#codersdkworkspaceagentcontainerport) | false | | Ports includes ports exposed by the container. | -| `running` | boolean | false | | Running is true if the container is currently running. | -| `status` | string | false | | Status is the current status of the container. This is somewhat implementation-dependent, but should generally be a human-readable string. | -| `volumes` | object | false | | Volumes is a map of "things" mounted into the container. Again, this is somewhat implementation-dependent. | -| » `[any property]` | string | false | | | +| Name | Type | Required | Restrictions | Description | +|----------------------|---------------------------------------------------------------------------------------|----------|--------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| `created_at` | string | false | | Created at is the time the container was created. | +| `devcontainer_dirty` | boolean | false | | Devcontainer dirty is true if the devcontainer configuration has changed since the container was created. This is used to determine if the container needs to be rebuilt. | +| `id` | string | false | | ID is the unique identifier of the container. | +| `image` | string | false | | Image is the name of the container image. | +| `labels` | object | false | | Labels is a map of key-value pairs of container labels. | +| » `[any property]` | string | false | | | +| `name` | string | false | | Name is the human-readable name of the container. | +| `ports` | array of [codersdk.WorkspaceAgentContainerPort](#codersdkworkspaceagentcontainerport) | false | | Ports includes ports exposed by the container. | +| `running` | boolean | false | | Running is true if the container is currently running. | +| `status` | string | false | | Status is the current status of the container. This is somewhat implementation-dependent, but should generally be a human-readable string. | +| `volumes` | object | false | | Volumes is a map of "things" mounted into the container. Again, this is somewhat implementation-dependent. | +| » `[any property]` | string | false | | | ## codersdk.WorkspaceAgentContainerPort @@ -8726,6 +8728,7 @@ If the schedule is empty, the user will be updated to use the default schedule.| "containers": [ { "created_at": "2019-08-24T14:15:22Z", + "devcontainer_dirty": true, "id": "string", "image": "string", "labels": { diff --git a/site/src/api/typesGenerated.ts b/site/src/api/typesGenerated.ts index 8017fef790dde..897e5f9012a4f 100644 --- a/site/src/api/typesGenerated.ts +++ b/site/src/api/typesGenerated.ts @@ -3309,6 +3309,7 @@ export interface WorkspaceAgentContainer { readonly ports: readonly WorkspaceAgentContainerPort[]; readonly status: string; readonly volumes: Record; + readonly devcontainer_dirty: boolean; } // From codersdk/workspaceagents.go diff --git a/site/src/testHelpers/entities.ts b/site/src/testHelpers/entities.ts index b05ec1d869b0d..6351e74d3c54d 100644 --- a/site/src/testHelpers/entities.ts +++ b/site/src/testHelpers/entities.ts @@ -4384,4 +4384,5 @@ export const MockWorkspaceAgentContainer: TypesGen.WorkspaceAgentContainer = { volumes: { "/mnt/volume1": "/volume1", }, + devcontainer_dirty: false, }; From 3dbd4245bead0a84988c32c917c6ce92192ddaa4 Mon Sep 17 00:00:00 2001 From: Mathias Fredriksson Date: Mon, 19 May 2025 13:23:22 +0300 Subject: [PATCH 05/42] fix(dogfood/coder): stop docker containers and prune system on shutdown (#17904) This change adds docker stop and docker system prune to the shutdown script so that it doesn't need to be done by the Docker host which will take a lot longer. This change greatly speeds up workspace destruction: ``` 2025-05-19 12:26:57.046+03:00 docker_container.workspace[0]: Destroying... [id=2685e2f456ba7b280c420219f19ef15384faa52c61ba7c087c7f109ffa6b1bda] 2025-05-19 12:27:07.046+03:00 docker_container.workspace[0]: Still destroying... [10s elapsed] 2025-05-19 12:27:16.734+03:00 docker_container.workspace[0]: Destruction complete after 20s ``` Follow-up for #17110 --- dogfood/coder/main.tf | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/dogfood/coder/main.tf b/dogfood/coder/main.tf index ddfd1f8e95e3d..4e4922e03a4ee 100644 --- a/dogfood/coder/main.tf +++ b/dogfood/coder/main.tf @@ -373,6 +373,17 @@ resource "coder_agent" "dev" { #!/usr/bin/env bash set -eux -o pipefail + # Stop all running containers and prune the system to clean up + # /var/lib/docker to prevent errors during workspace destroy. + # + # WARNING! This will remove: + # - all containers + # - all networks + # - all images + # - all build cache + docker ps -q | xargs docker stop + docker system prune -a + # Stop the Docker service to prevent errors during workspace destroy. sudo service docker stop EOT From 84478bd7d6493db1faac2ebdada1bc9f728ec84e Mon Sep 17 00:00:00 2001 From: Mathias Fredriksson Date: Mon, 19 May 2025 14:25:54 +0300 Subject: [PATCH 06/42] fix(dogfood/coder): add missing -f flag (#17906) --- dogfood/coder/main.tf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dogfood/coder/main.tf b/dogfood/coder/main.tf index 4e4922e03a4ee..e21602a26e922 100644 --- a/dogfood/coder/main.tf +++ b/dogfood/coder/main.tf @@ -382,7 +382,7 @@ resource "coder_agent" "dev" { # - all images # - all build cache docker ps -q | xargs docker stop - docker system prune -a + docker system prune -a -f # Stop the Docker service to prevent errors during workspace destroy. sudo service docker stop From a07298a1739f780c4b39307aebb8af5b65e77ec4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 May 2025 11:30:15 +0000 Subject: [PATCH 07/42] ci: bump github/codeql-action from 3.28.17 to 3.28.18 in the github-actions group (#17907) Bumps the github-actions group with 1 update: [github/codeql-action](https://github.com/github/codeql-action). Updates `github/codeql-action` from 3.28.17 to 3.28.18
Release notes

Sourced from github/codeql-action's releases.

v3.28.18

CodeQL Action Changelog

See the releases page for the relevant changes to the CodeQL CLI and language packs.

3.28.18 - 16 May 2025

  • Update default CodeQL bundle version to 2.21.3. #2893
  • Skip validating SARIF produced by CodeQL for improved performance. #2894
  • The number of threads and amount of RAM used by CodeQL can now be set via the CODEQL_THREADS and CODEQL_RAM runner environment variables. If set, these environment variables override the threads and ram inputs respectively. #2891

See the full CHANGELOG.md for more information.

Changelog

Sourced from github/codeql-action's changelog.

CodeQL Action Changelog

See the releases page for the relevant changes to the CodeQL CLI and language packs.

[UNRELEASED]

No user facing changes.

3.28.18 - 16 May 2025

  • Update default CodeQL bundle version to 2.21.3. #2893
  • Skip validating SARIF produced by CodeQL for improved performance. #2894
  • The number of threads and amount of RAM used by CodeQL can now be set via the CODEQL_THREADS and CODEQL_RAM runner environment variables. If set, these environment variables override the threads and ram inputs respectively. #2891

3.28.17 - 02 May 2025

  • Update default CodeQL bundle version to 2.21.2. #2872

3.28.16 - 23 Apr 2025

  • Update default CodeQL bundle version to 2.21.1. #2863

3.28.15 - 07 Apr 2025

  • Fix bug where the action would fail if it tried to produce a debug artifact with more than 65535 files. #2842

3.28.14 - 07 Apr 2025

  • Update default CodeQL bundle version to 2.21.0. #2838

3.28.13 - 24 Mar 2025

No user facing changes.

3.28.12 - 19 Mar 2025

  • Dependency caching should now cache more dependencies for Java build-mode: none extractions. This should speed up workflows and avoid inconsistent alerts in some cases.
  • Update default CodeQL bundle version to 2.20.7. #2810

3.28.11 - 07 Mar 2025

  • Update default CodeQL bundle version to 2.20.6. #2793

3.28.10 - 21 Feb 2025

  • Update default CodeQL bundle version to 2.20.5. #2772
  • Address an issue where the CodeQL Bundle would occasionally fail to decompress on macOS. #2768

3.28.9 - 07 Feb 2025

... (truncated)

Commits
  • ff0a06e Merge pull request #2896 from github/update-v3.28.18-b86edfc27
  • a41e084 Update changelog for v3.28.18
  • b86edfc Merge pull request #2893 from github/update-bundle/codeql-bundle-v2.21.3
  • e93b900 Merge branch 'main' into update-bundle/codeql-bundle-v2.21.3
  • 510dfa3 Merge pull request #2894 from github/henrymercer/skip-validating-codeql-sarif
  • 492d783 Merge branch 'main' into henrymercer/skip-validating-codeql-sarif
  • 83bdf3b Merge pull request #2859 from github/update-supported-enterprise-server-versions
  • cffc916 Merge pull request #2891 from austinpray-mixpanel/patch-1
  • 4420887 Add deprecation warning for CodeQL 2.16.5 and earlier
  • 4e178c5 Update supported versions table in README
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=github/codeql-action&package-manager=github_actions&previous-version=3.28.17&new-version=3.28.18)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore major version` will close this group update PR and stop Dependabot creating any more for the specific dependency's major version (unless you unignore this specific dependency's major version or upgrade to it yourself) - `@dependabot ignore minor version` will close this group update PR and stop Dependabot creating any more for the specific dependency's minor version (unless you unignore this specific dependency's minor version or upgrade to it yourself) - `@dependabot ignore ` will close this group update PR and stop Dependabot creating any more for the specific dependency (unless you unignore this specific dependency or upgrade to it yourself) - `@dependabot unignore ` will remove all of the ignore conditions of the specified dependency - `@dependabot unignore ` will remove the ignore condition of the specified dependency and ignore conditions
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/scorecard.yml | 2 +- .github/workflows/security.yaml | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml index 5b68e4b26c20d..f9902ede655cf 100644 --- a/.github/workflows/scorecard.yml +++ b/.github/workflows/scorecard.yml @@ -47,6 +47,6 @@ jobs: # Upload the results to GitHub's code scanning dashboard. - name: "Upload to code-scanning" - uses: github/codeql-action/upload-sarif@60168efe1c415ce0f5521ea06d5c2062adbeed1b # v3.28.17 + uses: github/codeql-action/upload-sarif@ff0a06e83cb2de871e5a09832bc6a81e7276941f # v3.28.18 with: sarif_file: results.sarif diff --git a/.github/workflows/security.yaml b/.github/workflows/security.yaml index f9f461cfe9966..721584b89e202 100644 --- a/.github/workflows/security.yaml +++ b/.github/workflows/security.yaml @@ -38,7 +38,7 @@ jobs: uses: ./.github/actions/setup-go - name: Initialize CodeQL - uses: github/codeql-action/init@60168efe1c415ce0f5521ea06d5c2062adbeed1b # v3.28.17 + uses: github/codeql-action/init@ff0a06e83cb2de871e5a09832bc6a81e7276941f # v3.28.18 with: languages: go, javascript @@ -48,7 +48,7 @@ jobs: rm Makefile - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@60168efe1c415ce0f5521ea06d5c2062adbeed1b # v3.28.17 + uses: github/codeql-action/analyze@ff0a06e83cb2de871e5a09832bc6a81e7276941f # v3.28.18 - name: Send Slack notification on failure if: ${{ failure() }} @@ -150,7 +150,7 @@ jobs: severity: "CRITICAL,HIGH" - name: Upload Trivy scan results to GitHub Security tab - uses: github/codeql-action/upload-sarif@60168efe1c415ce0f5521ea06d5c2062adbeed1b # v3.28.17 + uses: github/codeql-action/upload-sarif@ff0a06e83cb2de871e5a09832bc6a81e7276941f # v3.28.18 with: sarif_file: trivy-results.sarif category: "Trivy" From 1a434582bb3f926ffddfdfde15d35fd9fc7877c6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 May 2025 12:29:59 +0000 Subject: [PATCH 08/42] chore: bump github.com/mark3labs/mcp-go from 0.27.0 to 0.28.0 (#17909) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [github.com/mark3labs/mcp-go](https://github.com/mark3labs/mcp-go) from 0.27.0 to 0.28.0.
Release notes

Sourced from github.com/mark3labs/mcp-go's releases.

Release v0.28.0

What's Changed

New Contributors

Full Changelog: https://github.com/mark3labs/mcp-go/compare/v0.27.1...v0.28.0

Release v0.27.1

What's Changed

New Contributors

Full Changelog: https://github.com/mark3labs/mcp-go/compare/v0.27.0...v0.27.1

Commits
  • 077f546 feat(MCPServer): support logging/setlevel request (#276)
  • 09c23b5 fix: type mismatch for request/response ID (#291)
  • 91ddba5 feat(protocol): allow additional fields in meta (#293)
  • eb835b9 fix: Gate notifications on capabilities (#290)
  • e7d2547 feat(tools): implicitly register capabilities (#292)
  • c1e70f3 fix(session): Don't send tool changed notifications if session not initialize...
  • e767652 fix(docs): Update README link (#284)
  • 239cfa4 chore: add a security policy (#283)
  • c46450c fix: proper deprecation messaging for WithHTTPContextFunc (#278)
  • 7bb1fd2 ci: add golangci-lint (#282)
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=github.com/mark3labs/mcp-go&package-manager=go_modules&previous-version=0.27.0&new-version=0.28.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- go.mod | 2 +- go.sum | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/go.mod b/go.mod index 32b4257f082fe..5a489f743c667 100644 --- a/go.mod +++ b/go.mod @@ -488,7 +488,7 @@ require ( github.com/coder/preview v0.0.2-0.20250516233606-a1da43489319 github.com/fsnotify/fsnotify v1.9.0 github.com/kylecarbs/aisdk-go v0.0.8 - github.com/mark3labs/mcp-go v0.27.0 + github.com/mark3labs/mcp-go v0.28.0 github.com/openai/openai-go v0.1.0-beta.10 google.golang.org/genai v0.7.0 ) diff --git a/go.sum b/go.sum index 2310faffb41d9..5ff7a5a3b9f45 100644 --- a/go.sum +++ b/go.sum @@ -1506,8 +1506,8 @@ github.com/makeworld-the-better-one/dither/v2 v2.4.0 h1:Az/dYXiTcwcRSe59Hzw4RI1r github.com/makeworld-the-better-one/dither/v2 v2.4.0/go.mod h1:VBtN8DXO7SNtyGmLiGA7IsFeKrBkQPze1/iAeM95arc= github.com/marekm4/color-extractor v1.2.1 h1:3Zb2tQsn6bITZ8MBVhc33Qn1k5/SEuZ18mrXGUqIwn0= github.com/marekm4/color-extractor v1.2.1/go.mod h1:90VjmiHI6M8ez9eYUaXLdcKnS+BAOp7w+NpwBdkJmpA= -github.com/mark3labs/mcp-go v0.27.0 h1:iok9kU4DUIU2/XVLgFS2Q9biIDqstC0jY4EQTK2Erzc= -github.com/mark3labs/mcp-go v0.27.0/go.mod h1:rXqOudj/djTORU/ThxYx8fqEVj/5pvTuuebQ2RC7uk4= +github.com/mark3labs/mcp-go v0.28.0 h1:7yl4y5D1KYU2f/9Uxp7xfLIggfunHoESCRbrjcytcLM= +github.com/mark3labs/mcp-go v0.28.0/go.mod h1:rXqOudj/djTORU/ThxYx8fqEVj/5pvTuuebQ2RC7uk4= github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= From 9367ef166385e7235c7a0818dd6fb88860f4b195 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 May 2025 13:27:13 +0000 Subject: [PATCH 09/42] chore: bump cloud.google.com/go/compute/metadata from 0.6.0 to 0.7.0 (#17913) Bumps [cloud.google.com/go/compute/metadata](https://github.com/googleapis/google-cloud-go) from 0.6.0 to 0.7.0.
Release notes

Sourced from cloud.google.com/go/compute/metadata's releases.

compute/metadata: v0.7.0

0.7.0 (2025-05-13)

Features

  • compute/metadata: Allow canceling GCE detection (#11786) (78100fe)
Changelog

Sourced from cloud.google.com/go/compute/metadata's changelog.

v0.7.0

  • Release of a client library for Spanner. See the blog post. Note that although the Spanner service is beta, the Go client library is alpha.
Commits
  • 2e6a95e pubsub: fix flaky streaming retry test
  • 581b839 pubsub: check early if streaming iterator is already drained
  • cc13a9b spanner: fix time.Time comparisons for upcoming Go1.9 monotonic times
  • 1ba9ec4 spanner: remove most logging from tests
  • 11737a0 spanner: skip some tests in short mode
  • 7bcba8a datastore: DRY up loading entity code
  • df9740f regenerate toolkit client
  • 960c768 trace: export tracing scopes
  • 8b0ab47 logadmin: retry on CreateMetric and UpdateMetric
  • 2066696 trace: clarify how gRPC options work
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=cloud.google.com/go/compute/metadata&package-manager=go_modules&previous-version=0.6.0&new-version=0.7.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- go.mod | 2 +- go.sum | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/go.mod b/go.mod index 5a489f743c667..11434143a7bd7 100644 --- a/go.mod +++ b/go.mod @@ -74,7 +74,7 @@ replace github.com/spf13/afero => github.com/aslilac/afero v0.0.0-20250403163713 require ( cdr.dev/slog v1.6.2-0.20241112041820-0ec81e6e67bb - cloud.google.com/go/compute/metadata v0.6.0 + cloud.google.com/go/compute/metadata v0.7.0 github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d github.com/adrg/xdg v0.5.0 github.com/ammario/tlru v0.4.0 diff --git a/go.sum b/go.sum index 5ff7a5a3b9f45..a47646cb4bf6e 100644 --- a/go.sum +++ b/go.sum @@ -184,8 +184,8 @@ cloud.google.com/go/compute/metadata v0.1.0/go.mod h1:Z1VN+bulIf6bt4P/C37K4DyZYZ cloud.google.com/go/compute/metadata v0.2.0/go.mod h1:zFmK7XCadkQkj6TtorcaGlCW1hT1fIilQDwofLpJ20k= cloud.google.com/go/compute/metadata v0.2.1/go.mod h1:jgHgmJd2RKBGzXqF5LR2EZMGxBkeanZ9wwa75XHJgOM= cloud.google.com/go/compute/metadata v0.2.3/go.mod h1:VAV5nSsACxMJvgaAuX6Pk2AawlZn8kiOGuCv6gTkwuA= -cloud.google.com/go/compute/metadata v0.6.0 h1:A6hENjEsCDtC1k8byVsgwvVcioamEHvZ4j01OwKxG9I= -cloud.google.com/go/compute/metadata v0.6.0/go.mod h1:FjyFAW1MW0C203CEOMDTu3Dk1FlqW3Rga40jzHL4hfg= +cloud.google.com/go/compute/metadata v0.7.0 h1:PBWF+iiAerVNe8UCHxdOt6eHLVc3ydFeOCw78U8ytSU= +cloud.google.com/go/compute/metadata v0.7.0/go.mod h1:j5MvL9PprKL39t166CoB1uVHfQMs4tFQZZcKwksXUjo= cloud.google.com/go/contactcenterinsights v1.3.0/go.mod h1:Eu2oemoePuEFc/xKFPjbTuPSj0fYJcPls9TFlPNnHHY= cloud.google.com/go/contactcenterinsights v1.4.0/go.mod h1:L2YzkGbPsv+vMQMCADxJoT9YiTTnSEd6fEvCeHTYVck= cloud.google.com/go/contactcenterinsights v1.6.0/go.mod h1:IIDlT6CLcDoyv79kDv8iWxMSTZhLxSCofVV5W6YFM/w= From 4e0fc6e17c987abd55f697cd1b1220fe71d2c1ec Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 19 May 2025 13:47:12 +0000 Subject: [PATCH 10/42] chore: bump github.com/hashicorp/terraform-json from 0.24.0 to 0.25.0 (#17914) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [github.com/hashicorp/terraform-json](https://github.com/hashicorp/terraform-json) from 0.24.0 to 0.25.0.
Release notes

Sourced from github.com/hashicorp/terraform-json's releases.

v0.25.0

ENHANCEMENTS:

INTERNAL:

Full Changelog: https://github.com/hashicorp/terraform-json/compare/v0.24.0...v0.25.0

Commits
  • c2689b1 github: Use Dependabot to keep Actions updated (#160)
  • 6bc20aa Add identity fields to Plan struct (#158)
  • b5939fa Update CODEOWNERS (#159)
  • c370ee7 Update owner field in catalog-info.yaml (#157)
  • 0b330eb build(deps): Bump workflows to latest trusted versions (#156)
  • f86d5e3 Update state and provider JSON with identity fields (#155)
  • 4d6dac0 Bump github.com/google/go-cmp from 0.6.0 to 0.7.0 (#154)
  • 323ee61 Merge pull request #153 from hashicorp/tsccr-auto-pinning/trusted/2025-02-03
  • 2eb7d11 Result of tsccr-helper -log-level=info gha update -latest .github/
  • 0169f43 Bump github.com/zclconf/go-cty from 1.16.1 to 1.16.2 (#152)
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=github.com/hashicorp/terraform-json&package-manager=go_modules&previous-version=0.24.0&new-version=0.25.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- go.mod | 2 +- go.sum | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/go.mod b/go.mod index 11434143a7bd7..c43feefefee4d 100644 --- a/go.mod +++ b/go.mod @@ -140,7 +140,7 @@ require ( github.com/hashicorp/go-version v1.7.0 github.com/hashicorp/hc-install v0.9.2 github.com/hashicorp/terraform-config-inspect v0.0.0-20211115214459-90acf1ca460f - github.com/hashicorp/terraform-json v0.24.0 + github.com/hashicorp/terraform-json v0.25.0 github.com/hashicorp/yamux v0.1.2 github.com/hinshun/vt10x v0.0.0-20220301184237-5011da428d02 github.com/imulab/go-scim/pkg/v2 v2.2.0 diff --git a/go.sum b/go.sum index a47646cb4bf6e..9ffd716b334de 100644 --- a/go.sum +++ b/go.sum @@ -1385,8 +1385,8 @@ github.com/hashicorp/logutils v1.0.0 h1:dLEQVugN8vlakKOUE3ihGLTZJRB4j+M2cdTm/ORI github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= github.com/hashicorp/terraform-exec v0.23.0 h1:MUiBM1s0CNlRFsCLJuM5wXZrzA3MnPYEsiXmzATMW/I= github.com/hashicorp/terraform-exec v0.23.0/go.mod h1:mA+qnx1R8eePycfwKkCRk3Wy65mwInvlpAeOwmA7vlY= -github.com/hashicorp/terraform-json v0.24.0 h1:rUiyF+x1kYawXeRth6fKFm/MdfBS6+lW4NbeATsYz8Q= -github.com/hashicorp/terraform-json v0.24.0/go.mod h1:Nfj5ubo9xbu9uiAoZVBsNOjvNKB66Oyrvtit74kC7ow= +github.com/hashicorp/terraform-json v0.25.0 h1:rmNqc/CIfcWawGiwXmRuiXJKEiJu1ntGoxseG1hLhoQ= +github.com/hashicorp/terraform-json v0.25.0/go.mod h1:sMKS8fiRDX4rVlR6EJUMudg1WcanxCMoWwTLkgZP/vc= github.com/hashicorp/terraform-plugin-go v0.26.0 h1:cuIzCv4qwigug3OS7iKhpGAbZTiypAfFQmw8aE65O2M= github.com/hashicorp/terraform-plugin-go v0.26.0/go.mod h1:+CXjuLDiFgqR+GcrM5a2E2Kal5t5q2jb0E3D57tTdNY= github.com/hashicorp/terraform-plugin-log v0.9.0 h1:i7hOA+vdAItN1/7UrfBqBwvYPQ9TFvymaRGZED3FCV0= From 766277c20e6847127e3bd6d590caac571f390407 Mon Sep 17 00:00:00 2001 From: Jaayden Halko Date: Mon, 19 May 2025 15:43:56 +0100 Subject: [PATCH 11/42] fix: disable submit button on diagnostics error (#17900) --- .../CreateWorkspacePageViewExperimental.tsx | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/site/src/pages/CreateWorkspacePage/CreateWorkspacePageViewExperimental.tsx b/site/src/pages/CreateWorkspacePage/CreateWorkspacePageViewExperimental.tsx index 7d22316bfe4f7..365acfbacc0ec 100644 --- a/site/src/pages/CreateWorkspacePage/CreateWorkspacePageViewExperimental.tsx +++ b/site/src/pages/CreateWorkspacePage/CreateWorkspacePageViewExperimental.tsx @@ -516,7 +516,18 @@ export const CreateWorkspacePageViewExperimental: FC<
+ {job.worker_name || "[removed]"} + {job.worker_name && ( + + )} )} From 61f22a59ba368982ea5f42942061203bad915db0 Mon Sep 17 00:00:00 2001 From: Danielle Maywood Date: Mon, 19 May 2025 16:09:56 +0100 Subject: [PATCH 15/42] feat(agent): add `ParentId` to agent manifest (#17888) Closes https://github.com/coder/internal/issues/648 This change introduces a new `ParentId` field to the agent's manifest. This will allow an agent to know if it is a child or not, as well as knowing who the owner is. This is part of the Dev Container Agents work --- agent/agent.go | 6 +-- agent/agenttest/client.go | 4 +- agent/proto/agent.pb.go | 66 ++++++++++++++++------------ agent/proto/agent.proto | 1 + agent/proto/agent_drpc_old.go | 5 +++ coderd/agentapi/manifest.go | 6 +++ coderd/agentapi/manifest_test.go | 72 +++++++++++++++++++++++++++++++ coderd/workspaceagents_test.go | 2 +- codersdk/agentsdk/agentsdk.go | 14 +++++- tailnet/proto/tailnet_drpc_old.go | 5 +++ tailnet/proto/version.go | 6 ++- 11 files changed, 152 insertions(+), 35 deletions(-) diff --git a/agent/agent.go b/agent/agent.go index ffdacfb64ba75..927612302bf71 100644 --- a/agent/agent.go +++ b/agent/agent.go @@ -95,8 +95,8 @@ type Options struct { } type Client interface { - ConnectRPC24(ctx context.Context) ( - proto.DRPCAgentClient24, tailnetproto.DRPCTailnetClient24, error, + ConnectRPC25(ctx context.Context) ( + proto.DRPCAgentClient25, tailnetproto.DRPCTailnetClient25, error, ) RewriteDERPMap(derpMap *tailcfg.DERPMap) } @@ -908,7 +908,7 @@ func (a *agent) run() (retErr error) { a.sessionToken.Store(&sessionToken) // ConnectRPC returns the dRPC connection we use for the Agent and Tailnet v2+ APIs - aAPI, tAPI, err := a.client.ConnectRPC24(a.hardCtx) + aAPI, tAPI, err := a.client.ConnectRPC25(a.hardCtx) if err != nil { return err } diff --git a/agent/agenttest/client.go b/agent/agenttest/client.go index 24658c44d6e18..05011971c7c50 100644 --- a/agent/agenttest/client.go +++ b/agent/agenttest/client.go @@ -98,8 +98,8 @@ func (c *Client) Close() { c.derpMapOnce.Do(func() { close(c.derpMapUpdates) }) } -func (c *Client) ConnectRPC24(ctx context.Context) ( - agentproto.DRPCAgentClient24, proto.DRPCTailnetClient24, error, +func (c *Client) ConnectRPC25(ctx context.Context) ( + agentproto.DRPCAgentClient25, proto.DRPCTailnetClient25, error, ) { conn, lis := drpcsdk.MemTransportPipe() c.LastWorkspaceAgent = func() { diff --git a/agent/proto/agent.pb.go b/agent/proto/agent.pb.go index ca454026f4790..562e349df9b2c 100644 --- a/agent/proto/agent.pb.go +++ b/agent/proto/agent.pb.go @@ -954,6 +954,7 @@ type Manifest struct { MotdPath string `protobuf:"bytes,6,opt,name=motd_path,json=motdPath,proto3" json:"motd_path,omitempty"` DisableDirectConnections bool `protobuf:"varint,7,opt,name=disable_direct_connections,json=disableDirectConnections,proto3" json:"disable_direct_connections,omitempty"` DerpForceWebsockets bool `protobuf:"varint,8,opt,name=derp_force_websockets,json=derpForceWebsockets,proto3" json:"derp_force_websockets,omitempty"` + ParentId []byte `protobuf:"bytes,18,opt,name=parent_id,json=parentId,proto3,oneof" json:"parent_id,omitempty"` DerpMap *proto.DERPMap `protobuf:"bytes,9,opt,name=derp_map,json=derpMap,proto3" json:"derp_map,omitempty"` Scripts []*WorkspaceAgentScript `protobuf:"bytes,10,rep,name=scripts,proto3" json:"scripts,omitempty"` Apps []*WorkspaceApp `protobuf:"bytes,11,rep,name=apps,proto3" json:"apps,omitempty"` @@ -1077,6 +1078,13 @@ func (x *Manifest) GetDerpForceWebsockets() bool { return false } +func (x *Manifest) GetParentId() []byte { + if x != nil { + return x.ParentId + } + return nil +} + func (x *Manifest) GetDerpMap() *proto.DERPMap { if x != nil { return x.DerpMap @@ -3665,7 +3673,7 @@ var file_agent_proto_agent_proto_rawDesc = []byte{ 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x44, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x07, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, - 0x22, 0xbc, 0x07, 0x0a, 0x08, 0x4d, 0x61, 0x6e, 0x69, 0x66, 0x65, 0x73, 0x74, 0x12, 0x19, 0x0a, + 0x22, 0xec, 0x07, 0x0a, 0x08, 0x4d, 0x61, 0x6e, 0x69, 0x66, 0x65, 0x73, 0x74, 0x12, 0x19, 0x0a, 0x08, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x07, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x49, 0x64, 0x12, 0x1d, 0x0a, 0x0a, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x0f, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x61, 0x67, @@ -3699,32 +3707,35 @@ var file_agent_proto_agent_proto_rawDesc = []byte{ 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x32, 0x0a, 0x15, 0x64, 0x65, 0x72, 0x70, 0x5f, 0x66, 0x6f, 0x72, 0x63, 0x65, 0x5f, 0x77, 0x65, 0x62, 0x73, 0x6f, 0x63, 0x6b, 0x65, 0x74, 0x73, 0x18, 0x08, 0x20, 0x01, 0x28, 0x08, 0x52, 0x13, 0x64, 0x65, 0x72, 0x70, 0x46, 0x6f, 0x72, - 0x63, 0x65, 0x57, 0x65, 0x62, 0x73, 0x6f, 0x63, 0x6b, 0x65, 0x74, 0x73, 0x12, 0x34, 0x0a, 0x08, - 0x64, 0x65, 0x72, 0x70, 0x5f, 0x6d, 0x61, 0x70, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, - 0x2e, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2e, 0x74, 0x61, 0x69, 0x6c, 0x6e, 0x65, 0x74, 0x2e, 0x76, - 0x32, 0x2e, 0x44, 0x45, 0x52, 0x50, 0x4d, 0x61, 0x70, 0x52, 0x07, 0x64, 0x65, 0x72, 0x70, 0x4d, - 0x61, 0x70, 0x12, 0x3e, 0x0a, 0x07, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x73, 0x18, 0x0a, 0x20, - 0x03, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2e, 0x61, 0x67, 0x65, 0x6e, - 0x74, 0x2e, 0x76, 0x32, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x41, 0x67, - 0x65, 0x6e, 0x74, 0x53, 0x63, 0x72, 0x69, 0x70, 0x74, 0x52, 0x07, 0x73, 0x63, 0x72, 0x69, 0x70, - 0x74, 0x73, 0x12, 0x30, 0x0a, 0x04, 0x61, 0x70, 0x70, 0x73, 0x18, 0x0b, 0x20, 0x03, 0x28, 0x0b, - 0x32, 0x1c, 0x2e, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2e, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x2e, 0x76, - 0x32, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x41, 0x70, 0x70, 0x52, 0x04, - 0x61, 0x70, 0x70, 0x73, 0x12, 0x4e, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, - 0x18, 0x0c, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x32, 0x2e, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2e, 0x61, - 0x67, 0x65, 0x6e, 0x74, 0x2e, 0x76, 0x32, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, - 0x65, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x2e, 0x44, - 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, - 0x64, 0x61, 0x74, 0x61, 0x12, 0x50, 0x0a, 0x0d, 0x64, 0x65, 0x76, 0x63, 0x6f, 0x6e, 0x74, 0x61, - 0x69, 0x6e, 0x65, 0x72, 0x73, 0x18, 0x11, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2a, 0x2e, 0x63, 0x6f, - 0x64, 0x65, 0x72, 0x2e, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x2e, 0x76, 0x32, 0x2e, 0x57, 0x6f, 0x72, - 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x44, 0x65, 0x76, 0x63, 0x6f, - 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x52, 0x0d, 0x64, 0x65, 0x76, 0x63, 0x6f, 0x6e, 0x74, - 0x61, 0x69, 0x6e, 0x65, 0x72, 0x73, 0x1a, 0x47, 0x0a, 0x19, 0x45, 0x6e, 0x76, 0x69, 0x72, 0x6f, - 0x6e, 0x6d, 0x65, 0x6e, 0x74, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x73, 0x45, 0x6e, - 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, + 0x63, 0x65, 0x57, 0x65, 0x62, 0x73, 0x6f, 0x63, 0x6b, 0x65, 0x74, 0x73, 0x12, 0x20, 0x0a, 0x09, + 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x12, 0x20, 0x01, 0x28, 0x0c, 0x48, + 0x00, 0x52, 0x08, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x49, 0x64, 0x88, 0x01, 0x01, 0x12, 0x34, + 0x0a, 0x08, 0x64, 0x65, 0x72, 0x70, 0x5f, 0x6d, 0x61, 0x70, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x19, 0x2e, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2e, 0x74, 0x61, 0x69, 0x6c, 0x6e, 0x65, 0x74, + 0x2e, 0x76, 0x32, 0x2e, 0x44, 0x45, 0x52, 0x50, 0x4d, 0x61, 0x70, 0x52, 0x07, 0x64, 0x65, 0x72, + 0x70, 0x4d, 0x61, 0x70, 0x12, 0x3e, 0x0a, 0x07, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x73, 0x18, + 0x0a, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2e, 0x61, 0x67, + 0x65, 0x6e, 0x74, 0x2e, 0x76, 0x32, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, + 0x41, 0x67, 0x65, 0x6e, 0x74, 0x53, 0x63, 0x72, 0x69, 0x70, 0x74, 0x52, 0x07, 0x73, 0x63, 0x72, + 0x69, 0x70, 0x74, 0x73, 0x12, 0x30, 0x0a, 0x04, 0x61, 0x70, 0x70, 0x73, 0x18, 0x0b, 0x20, 0x03, + 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2e, 0x61, 0x67, 0x65, 0x6e, 0x74, + 0x2e, 0x76, 0x32, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x41, 0x70, 0x70, + 0x52, 0x04, 0x61, 0x70, 0x70, 0x73, 0x12, 0x4e, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, + 0x74, 0x61, 0x18, 0x0c, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x32, 0x2e, 0x63, 0x6f, 0x64, 0x65, 0x72, + 0x2e, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x2e, 0x76, 0x32, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, + 0x61, 0x63, 0x65, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, + 0x2e, 0x44, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x08, 0x6d, 0x65, + 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x50, 0x0a, 0x0d, 0x64, 0x65, 0x76, 0x63, 0x6f, 0x6e, + 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x73, 0x18, 0x11, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2a, 0x2e, + 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2e, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x2e, 0x76, 0x32, 0x2e, 0x57, + 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x44, 0x65, 0x76, + 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x52, 0x0d, 0x64, 0x65, 0x76, 0x63, 0x6f, + 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x73, 0x1a, 0x47, 0x0a, 0x19, 0x45, 0x6e, 0x76, 0x69, + 0x72, 0x6f, 0x6e, 0x6d, 0x65, 0x6e, 0x74, 0x56, 0x61, 0x72, 0x69, 0x61, 0x62, 0x6c, 0x65, 0x73, + 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, + 0x01, 0x42, 0x0c, 0x0a, 0x0a, 0x5f, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x22, 0x8c, 0x01, 0x0a, 0x1a, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x44, 0x65, 0x76, 0x63, 0x6f, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x65, 0x72, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x02, 0x69, 0x64, 0x12, 0x29, @@ -4901,6 +4912,7 @@ func file_agent_proto_agent_proto_init() { } } } + file_agent_proto_agent_proto_msgTypes[3].OneofWrappers = []interface{}{} file_agent_proto_agent_proto_msgTypes[30].OneofWrappers = []interface{}{} file_agent_proto_agent_proto_msgTypes[33].OneofWrappers = []interface{}{} file_agent_proto_agent_proto_msgTypes[46].OneofWrappers = []interface{}{} diff --git a/agent/proto/agent.proto b/agent/proto/agent.proto index 5bfd867720cfa..f6237980b6fd6 100644 --- a/agent/proto/agent.proto +++ b/agent/proto/agent.proto @@ -90,6 +90,7 @@ message Manifest { string motd_path = 6; bool disable_direct_connections = 7; bool derp_force_websockets = 8; + optional bytes parent_id = 18; coder.tailnet.v2.DERPMap derp_map = 9; repeated WorkspaceAgentScript scripts = 10; diff --git a/agent/proto/agent_drpc_old.go b/agent/proto/agent_drpc_old.go index 63b666a259c5c..e1e6625908c8a 100644 --- a/agent/proto/agent_drpc_old.go +++ b/agent/proto/agent_drpc_old.go @@ -50,3 +50,8 @@ type DRPCAgentClient24 interface { PushResourcesMonitoringUsage(ctx context.Context, in *PushResourcesMonitoringUsageRequest) (*PushResourcesMonitoringUsageResponse, error) ReportConnection(ctx context.Context, in *ReportConnectionRequest) (*emptypb.Empty, error) } + +// DRPCAgentClient25 is the Agent API at v2.5. +type DRPCAgentClient25 interface { + DRPCAgentClient24 +} diff --git a/coderd/agentapi/manifest.go b/coderd/agentapi/manifest.go index db8a0af3946a9..66bfe4cb5f94f 100644 --- a/coderd/agentapi/manifest.go +++ b/coderd/agentapi/manifest.go @@ -120,6 +120,11 @@ func (a *ManifestAPI) GetManifest(ctx context.Context, _ *agentproto.GetManifest return nil, xerrors.Errorf("converting workspace apps: %w", err) } + var parentID []byte + if workspaceAgent.ParentID.Valid { + parentID = workspaceAgent.ParentID.UUID[:] + } + return &agentproto.Manifest{ AgentId: workspaceAgent.ID[:], AgentName: workspaceAgent.Name, @@ -133,6 +138,7 @@ func (a *ManifestAPI) GetManifest(ctx context.Context, _ *agentproto.GetManifest MotdPath: workspaceAgent.MOTDFile, DisableDirectConnections: a.DisableDirectConnections, DerpForceWebsockets: a.DerpForceWebSockets, + ParentId: parentID, DerpMap: tailnet.DERPMapToProto(a.DerpMapFn()), Scripts: dbAgentScriptsToProto(scripts), diff --git a/coderd/agentapi/manifest_test.go b/coderd/agentapi/manifest_test.go index 98e7ccc8c8b52..9273acb0c40ff 100644 --- a/coderd/agentapi/manifest_test.go +++ b/coderd/agentapi/manifest_test.go @@ -60,6 +60,13 @@ func TestGetManifest(t *testing.T) { Directory: "/cool/dir", MOTDFile: "/cool/motd", } + childAgent = database.WorkspaceAgent{ + ID: uuid.New(), + Name: "cool-child-agent", + ParentID: uuid.NullUUID{Valid: true, UUID: agent.ID}, + Directory: "/workspace/dir", + MOTDFile: "/workspace/motd", + } apps = []database.WorkspaceApp{ { ID: uuid.New(), @@ -337,6 +344,7 @@ func TestGetManifest(t *testing.T) { expected := &agentproto.Manifest{ AgentId: agent.ID[:], AgentName: agent.Name, + ParentId: nil, OwnerUsername: owner.Username, WorkspaceId: workspace.ID[:], WorkspaceName: workspace.Name, @@ -364,6 +372,70 @@ func TestGetManifest(t *testing.T) { require.Equal(t, expected, got) }) + t.Run("OK/Child", func(t *testing.T) { + t.Parallel() + + mDB := dbmock.NewMockStore(gomock.NewController(t)) + + api := &agentapi.ManifestAPI{ + AccessURL: &url.URL{Scheme: "https", Host: "example.com"}, + AppHostname: "*--apps.example.com", + ExternalAuthConfigs: []*externalauth.Config{ + {Type: string(codersdk.EnhancedExternalAuthProviderGitHub)}, + {Type: "some-provider"}, + {Type: string(codersdk.EnhancedExternalAuthProviderGitLab)}, + }, + DisableDirectConnections: true, + DerpForceWebSockets: true, + + AgentFn: func(ctx context.Context) (database.WorkspaceAgent, error) { + return childAgent, nil + }, + WorkspaceID: workspace.ID, + Database: mDB, + DerpMapFn: derpMapFn, + } + + mDB.EXPECT().GetWorkspaceAppsByAgentID(gomock.Any(), childAgent.ID).Return([]database.WorkspaceApp{}, nil) + mDB.EXPECT().GetWorkspaceAgentScriptsByAgentIDs(gomock.Any(), []uuid.UUID{childAgent.ID}).Return([]database.WorkspaceAgentScript{}, nil) + mDB.EXPECT().GetWorkspaceAgentMetadata(gomock.Any(), database.GetWorkspaceAgentMetadataParams{ + WorkspaceAgentID: childAgent.ID, + Keys: nil, // all + }).Return([]database.WorkspaceAgentMetadatum{}, nil) + mDB.EXPECT().GetWorkspaceAgentDevcontainersByAgentID(gomock.Any(), childAgent.ID).Return([]database.WorkspaceAgentDevcontainer{}, nil) + mDB.EXPECT().GetWorkspaceByID(gomock.Any(), workspace.ID).Return(workspace, nil) + mDB.EXPECT().GetUserByID(gomock.Any(), workspace.OwnerID).Return(owner, nil) + + got, err := api.GetManifest(context.Background(), &agentproto.GetManifestRequest{}) + require.NoError(t, err) + + expected := &agentproto.Manifest{ + AgentId: childAgent.ID[:], + AgentName: childAgent.Name, + ParentId: agent.ID[:], + OwnerUsername: owner.Username, + WorkspaceId: workspace.ID[:], + WorkspaceName: workspace.Name, + GitAuthConfigs: 2, // two "enhanced" external auth configs + EnvironmentVariables: nil, + Directory: childAgent.Directory, + VsCodePortProxyUri: fmt.Sprintf("https://{{port}}--%s--%s--%s--apps.example.com", childAgent.Name, workspace.Name, owner.Username), + MotdPath: childAgent.MOTDFile, + DisableDirectConnections: true, + DerpForceWebsockets: true, + // tailnet.DERPMapToProto() is extensively tested elsewhere, so it's + // not necessary to manually recreate a big DERP map here like we + // did for apps and metadata. + DerpMap: tailnet.DERPMapToProto(derpMapFn()), + Scripts: []*agentproto.WorkspaceAgentScript{}, + Apps: []*agentproto.WorkspaceApp{}, + Metadata: []*agentproto.WorkspaceAgentMetadata_Description{}, + Devcontainers: []*agentproto.WorkspaceAgentDevcontainer{}, + } + + require.Equal(t, expected, got) + }) + t.Run("NoAppHostname", func(t *testing.T) { t.Parallel() diff --git a/coderd/workspaceagents_test.go b/coderd/workspaceagents_test.go index bd335e20b0fbb..27da80b3c579b 100644 --- a/coderd/workspaceagents_test.go +++ b/coderd/workspaceagents_test.go @@ -2575,7 +2575,7 @@ func requireGetManifest(ctx context.Context, t testing.TB, aAPI agentproto.DRPCA } func postStartup(ctx context.Context, t testing.TB, client agent.Client, startup *agentproto.Startup) error { - aAPI, _, err := client.ConnectRPC24(ctx) + aAPI, _, err := client.ConnectRPC25(ctx) require.NoError(t, err) defer func() { cErr := aAPI.DRPCConn().Close() diff --git a/codersdk/agentsdk/agentsdk.go b/codersdk/agentsdk/agentsdk.go index ba3ff5681b742..9e6df933ce6c3 100644 --- a/codersdk/agentsdk/agentsdk.go +++ b/codersdk/agentsdk/agentsdk.go @@ -246,7 +246,7 @@ func (c *Client) ConnectRPC23(ctx context.Context) ( } // ConnectRPC24 returns a dRPC client to the Agent API v2.4. It is useful when you want to be -// maximally compatible with Coderd Release Versions from 2.xx+ // TODO @vincent: define version +// maximally compatible with Coderd Release Versions from 2.20+ func (c *Client) ConnectRPC24(ctx context.Context) ( proto.DRPCAgentClient24, tailnetproto.DRPCTailnetClient24, error, ) { @@ -257,6 +257,18 @@ func (c *Client) ConnectRPC24(ctx context.Context) ( return proto.NewDRPCAgentClient(conn), tailnetproto.NewDRPCTailnetClient(conn), nil } +// ConnectRPC25 returns a dRPC client to the Agent API v2.5. It is useful when you want to be +// maximally compatible with Coderd Release Versions from 2.xx+ // TODO(DanielleMaywood): Update version +func (c *Client) ConnectRPC25(ctx context.Context) ( + proto.DRPCAgentClient25, tailnetproto.DRPCTailnetClient25, error, +) { + conn, err := c.connectRPCVersion(ctx, apiversion.New(2, 5)) + if err != nil { + return nil, nil, err + } + return proto.NewDRPCAgentClient(conn), tailnetproto.NewDRPCTailnetClient(conn), nil +} + // ConnectRPC connects to the workspace agent API and tailnet API func (c *Client) ConnectRPC(ctx context.Context) (drpc.Conn, error) { return c.connectRPCVersion(ctx, proto.CurrentVersion) diff --git a/tailnet/proto/tailnet_drpc_old.go b/tailnet/proto/tailnet_drpc_old.go index c98932c9f41a7..ffbfa679b5912 100644 --- a/tailnet/proto/tailnet_drpc_old.go +++ b/tailnet/proto/tailnet_drpc_old.go @@ -40,3 +40,8 @@ type DRPCTailnetClient23 interface { type DRPCTailnetClient24 interface { DRPCTailnetClient23 } + +// DRPCTailnetClient25 is the Tailnet API at v2.5. +type DRPCTailnetClient25 interface { + DRPCTailnetClient24 +} diff --git a/tailnet/proto/version.go b/tailnet/proto/version.go index dd478fdcbdcd4..9e3e58ff0c051 100644 --- a/tailnet/proto/version.go +++ b/tailnet/proto/version.go @@ -45,9 +45,13 @@ import ( // PushResourcesMonitoringUsage RPCs on the Agent API. // - Added support for reporting connection events for auditing via the // ReportConnection RPC on the Agent API. +// +// API v2.5: +// - Shipped in Coder v2.xx.x // TODO(DanielleMaywood): Update version +// - Added `ParentId` to the agent manifest. const ( CurrentMajor = 2 - CurrentMinor = 4 + CurrentMinor = 5 ) var CurrentVersion = apiversion.New(CurrentMajor, CurrentMinor) From ac7961a5b0d9c5c5bc62e09adb4519a63af62cdf Mon Sep 17 00:00:00 2001 From: Cian Johnston Date: Mon, 19 May 2025 16:58:12 +0100 Subject: [PATCH 16/42] feat: add Organization Provisioner Keys view (#17889) Fixes https://github.com/coder/coder/issues/17698 **Demo:** https://github.com/user-attachments/assets/ba92693f-29b7-43ee-8d69-3d77214f3230 --------- Co-authored-by: BrunoQuaresma --- site/src/api/queries/organizations.ts | 2 +- site/src/components/Badge/Badge.tsx | 23 ++- .../management/OrganizationSidebarView.tsx | 5 + .../modules/provisioners/ProvisionerTags.tsx | 2 +- .../OrganizationProvisionerKeysPage.tsx | 62 ++++++++ ...izationProvisionerKeysPageView.stories.tsx | 112 +++++++++++++++ .../OrganizationProvisionerKeysPageView.tsx | 123 ++++++++++++++++ .../ProvisionerKeyRow.tsx | 136 ++++++++++++++++++ site/src/router.tsx | 10 ++ site/src/testHelpers/entities.ts | 2 +- 10 files changed, 471 insertions(+), 6 deletions(-) create mode 100644 site/src/pages/OrganizationSettingsPage/OrganizationProvisionerKeysPage/OrganizationProvisionerKeysPage.tsx create mode 100644 site/src/pages/OrganizationSettingsPage/OrganizationProvisionerKeysPage/OrganizationProvisionerKeysPageView.stories.tsx create mode 100644 site/src/pages/OrganizationSettingsPage/OrganizationProvisionerKeysPage/OrganizationProvisionerKeysPageView.tsx create mode 100644 site/src/pages/OrganizationSettingsPage/OrganizationProvisionerKeysPage/ProvisionerKeyRow.tsx diff --git a/site/src/api/queries/organizations.ts b/site/src/api/queries/organizations.ts index c7b42f5f0e79f..608b2fa2a1ac4 100644 --- a/site/src/api/queries/organizations.ts +++ b/site/src/api/queries/organizations.ts @@ -187,7 +187,7 @@ const getProvisionerDaemonGroupsKey = (organization: string) => [ "provisionerDaemons", ]; -const provisionerDaemonGroups = (organization: string) => { +export const provisionerDaemonGroups = (organization: string) => { return { queryKey: getProvisionerDaemonGroupsKey(organization), queryFn: () => API.getProvisionerDaemonGroupsByOrganization(organization), diff --git a/site/src/components/Badge/Badge.tsx b/site/src/components/Badge/Badge.tsx index e6b23b8a4dd94..b4d405055bb98 100644 --- a/site/src/components/Badge/Badge.tsx +++ b/site/src/components/Badge/Badge.tsx @@ -9,7 +9,6 @@ import { cn } from "utils/cn"; const badgeVariants = cva( `inline-flex items-center rounded-md border px-2 py-1 transition-colors - focus:outline-none focus:ring-2 focus:ring-ring focus:ring-offset-2 [&_svg]:pointer-events-none [&_svg]:pr-0.5 [&_svg]:py-0.5 [&_svg]:mr-0.5`, { variants: { @@ -30,11 +29,23 @@ const badgeVariants = cva( none: "border-transparent", solid: "border border-solid", }, + hover: { + false: null, + true: "no-underline focus:outline-none focus-visible:ring-2 focus-visible:ring-content-link", + }, }, + compoundVariants: [ + { + hover: true, + variant: "default", + class: "hover:bg-surface-tertiary", + }, + ], defaultVariants: { variant: "default", size: "md", border: "solid", + hover: false, }, }, ); @@ -46,14 +57,20 @@ export interface BadgeProps } export const Badge = forwardRef( - ({ className, variant, size, border, asChild = false, ...props }, ref) => { + ( + { className, variant, size, border, hover, asChild = false, ...props }, + ref, + ) => { const Comp = asChild ? Slot : "div"; return ( ); }, diff --git a/site/src/modules/management/OrganizationSidebarView.tsx b/site/src/modules/management/OrganizationSidebarView.tsx index a03dc62b65c0e..745268278da49 100644 --- a/site/src/modules/management/OrganizationSidebarView.tsx +++ b/site/src/modules/management/OrganizationSidebarView.tsx @@ -190,6 +190,11 @@ const OrganizationSettingsNavigation: FC< > Provisioners + + Provisioner Keys + diff --git a/site/src/modules/provisioners/ProvisionerTags.tsx b/site/src/modules/provisioners/ProvisionerTags.tsx index b31be42df234f..667d2cb56ef15 100644 --- a/site/src/modules/provisioners/ProvisionerTags.tsx +++ b/site/src/modules/provisioners/ProvisionerTags.tsx @@ -9,7 +9,7 @@ export const ProvisionerTags: FC> = ({ return (
); }; diff --git a/site/src/pages/OrganizationSettingsPage/OrganizationProvisionerKeysPage/OrganizationProvisionerKeysPage.tsx b/site/src/pages/OrganizationSettingsPage/OrganizationProvisionerKeysPage/OrganizationProvisionerKeysPage.tsx new file mode 100644 index 0000000000000..77bcfe10cb229 --- /dev/null +++ b/site/src/pages/OrganizationSettingsPage/OrganizationProvisionerKeysPage/OrganizationProvisionerKeysPage.tsx @@ -0,0 +1,62 @@ +import { provisionerDaemonGroups } from "api/queries/organizations"; +import { EmptyState } from "components/EmptyState/EmptyState"; +import { useDashboard } from "modules/dashboard/useDashboard"; +import { useOrganizationSettings } from "modules/management/OrganizationSettingsLayout"; +import { RequirePermission } from "modules/permissions/RequirePermission"; +import type { FC } from "react"; +import { Helmet } from "react-helmet-async"; +import { useQuery } from "react-query"; +import { useParams } from "react-router-dom"; +import { pageTitle } from "utils/page"; +import { OrganizationProvisionerKeysPageView } from "./OrganizationProvisionerKeysPageView"; + +const OrganizationProvisionerKeysPage: FC = () => { + const { organization: organizationName } = useParams() as { + organization: string; + }; + const { organization, organizationPermissions } = useOrganizationSettings(); + const { entitlements } = useDashboard(); + const provisionerKeyDaemonsQuery = useQuery({ + ...provisionerDaemonGroups(organizationName), + select: (data) => + [...data].sort((a, b) => b.daemons.length - a.daemons.length), + }); + + if (!organization) { + return ; + } + + const helmet = ( + + + {pageTitle( + "Provisioner Keys", + organization.display_name || organization.name, + )} + + + ); + + if (!organizationPermissions?.viewProvisioners) { + return ( + <> + {helmet} + + + ); + } + + return ( + <> + {helmet} + + + ); +}; + +export default OrganizationProvisionerKeysPage; diff --git a/site/src/pages/OrganizationSettingsPage/OrganizationProvisionerKeysPage/OrganizationProvisionerKeysPageView.stories.tsx b/site/src/pages/OrganizationSettingsPage/OrganizationProvisionerKeysPage/OrganizationProvisionerKeysPageView.stories.tsx new file mode 100644 index 0000000000000..f30ea66175e07 --- /dev/null +++ b/site/src/pages/OrganizationSettingsPage/OrganizationProvisionerKeysPage/OrganizationProvisionerKeysPageView.stories.tsx @@ -0,0 +1,112 @@ +import type { Meta, StoryObj } from "@storybook/react"; +import { + type ProvisionerKeyDaemons, + ProvisionerKeyIDBuiltIn, + ProvisionerKeyIDPSK, + ProvisionerKeyIDUserAuth, +} from "api/typesGenerated"; +import { + MockProvisioner, + MockProvisionerKey, + mockApiError, +} from "testHelpers/entities"; +import { OrganizationProvisionerKeysPageView } from "./OrganizationProvisionerKeysPageView"; + +const mockProvisionerKeyDaemons: ProvisionerKeyDaemons[] = [ + { + key: { + ...MockProvisionerKey, + }, + daemons: [ + { + ...MockProvisioner, + name: "Test Provisioner 1", + id: "daemon-1", + }, + { + ...MockProvisioner, + name: "Test Provisioner 2", + id: "daemon-2", + }, + ], + }, + { + key: { + ...MockProvisionerKey, + name: "no-daemons", + }, + daemons: [], + }, + // Built-in provisioners, user-auth, and PSK keys are not shown here. + { + key: { + ...MockProvisionerKey, + id: ProvisionerKeyIDBuiltIn, + name: "built-in", + }, + daemons: [], + }, + { + key: { + ...MockProvisionerKey, + id: ProvisionerKeyIDUserAuth, + name: "user-auth", + }, + daemons: [], + }, + { + key: { + ...MockProvisionerKey, + id: ProvisionerKeyIDPSK, + name: "PSK", + }, + daemons: [], + }, +]; + +const meta: Meta = { + title: "pages/OrganizationProvisionerKeysPage", + component: OrganizationProvisionerKeysPageView, + args: { + error: undefined, + provisionerKeyDaemons: mockProvisionerKeyDaemons, + onRetry: () => {}, + }, +}; + +export default meta; +type Story = StoryObj; + +export const Default: Story = { + args: { + error: undefined, + provisionerKeyDaemons: mockProvisionerKeyDaemons, + onRetry: () => {}, + showPaywall: false, + }, +}; + +export const Paywalled: Story = { + ...Default, + args: { + showPaywall: true, + }, +}; + +export const Empty: Story = { + ...Default, + args: { + provisionerKeyDaemons: [], + }, +}; + +export const WithError: Story = { + ...Default, + args: { + provisionerKeyDaemons: undefined, + error: mockApiError({ + message: "Error loading provisioner keys", + detail: "Something went wrong. This is an unhelpful error message.", + }), + }, +}; diff --git a/site/src/pages/OrganizationSettingsPage/OrganizationProvisionerKeysPage/OrganizationProvisionerKeysPageView.tsx b/site/src/pages/OrganizationSettingsPage/OrganizationProvisionerKeysPage/OrganizationProvisionerKeysPageView.tsx new file mode 100644 index 0000000000000..5373636308f15 --- /dev/null +++ b/site/src/pages/OrganizationSettingsPage/OrganizationProvisionerKeysPage/OrganizationProvisionerKeysPageView.tsx @@ -0,0 +1,123 @@ +import { + type ProvisionerKeyDaemons, + ProvisionerKeyIDBuiltIn, + ProvisionerKeyIDPSK, + ProvisionerKeyIDUserAuth, +} from "api/typesGenerated"; +import { Button } from "components/Button/Button"; +import { EmptyState } from "components/EmptyState/EmptyState"; +import { Link } from "components/Link/Link"; +import { Loader } from "components/Loader/Loader"; +import { Paywall } from "components/Paywall/Paywall"; +import { + SettingsHeader, + SettingsHeaderDescription, + SettingsHeaderTitle, +} from "components/SettingsHeader/SettingsHeader"; +import { + Table, + TableBody, + TableCell, + TableHead, + TableHeader, + TableRow, +} from "components/Table/Table"; +import type { FC } from "react"; +import { docs } from "utils/docs"; +import { ProvisionerKeyRow } from "./ProvisionerKeyRow"; + +// If the user using provisioner keys for external provisioners you're unlikely to +// want to keep the built-in provisioners. +const HIDDEN_PROVISIONER_KEYS = [ + ProvisionerKeyIDBuiltIn, + ProvisionerKeyIDUserAuth, + ProvisionerKeyIDPSK, +]; + +interface OrganizationProvisionerKeysPageViewProps { + showPaywall: boolean | undefined; + provisionerKeyDaemons: ProvisionerKeyDaemons[] | undefined; + error: unknown; + onRetry: () => void; +} + +export const OrganizationProvisionerKeysPageView: FC< + OrganizationProvisionerKeysPageViewProps +> = ({ showPaywall, provisionerKeyDaemons, error, onRetry }) => { + return ( +
+ + Provisioner Keys + + Manage provisioner keys used to authenticate provisioner instances.{" "} + View docs + + + + {showPaywall ? ( + + ) : ( + + + + Name + Tags + Provisioners + Created + + + + {provisionerKeyDaemons ? ( + provisionerKeyDaemons.length === 0 ? ( + + + + + + ) : ( + provisionerKeyDaemons + .filter( + (pkd) => !HIDDEN_PROVISIONER_KEYS.includes(pkd.key.id), + ) + .map((pkd) => ( + + )) + ) + ) : error ? ( + + + + Retry + + } + /> + + + ) : ( + + + + + + )} + +
+ )} +
+ ); +}; diff --git a/site/src/pages/OrganizationSettingsPage/OrganizationProvisionerKeysPage/ProvisionerKeyRow.tsx b/site/src/pages/OrganizationSettingsPage/OrganizationProvisionerKeysPage/ProvisionerKeyRow.tsx new file mode 100644 index 0000000000000..e1b337c85dacb --- /dev/null +++ b/site/src/pages/OrganizationSettingsPage/OrganizationProvisionerKeysPage/ProvisionerKeyRow.tsx @@ -0,0 +1,136 @@ +import type { ProvisionerDaemon, ProvisionerKey } from "api/typesGenerated"; +import { Badge } from "components/Badge/Badge"; +import { Button } from "components/Button/Button"; +import { TableCell, TableRow } from "components/Table/Table"; +import { ChevronDownIcon, ChevronRightIcon } from "lucide-react"; +import { + ProvisionerTag, + ProvisionerTags, + ProvisionerTruncateTags, +} from "modules/provisioners/ProvisionerTags"; +import { type FC, useState } from "react"; +import { Link as RouterLink } from "react-router-dom"; +import { cn } from "utils/cn"; +import { relativeTime } from "utils/time"; + +type ProvisionerKeyRowProps = { + readonly provisionerKey: ProvisionerKey; + readonly provisioners: readonly ProvisionerDaemon[]; + defaultIsOpen: boolean; +}; + +export const ProvisionerKeyRow: FC = ({ + provisionerKey, + provisioners, + defaultIsOpen = false, +}) => { + const [isOpen, setIsOpen] = useState(defaultIsOpen); + + return ( + <> + + + + + + {Object.entries(provisionerKey.tags).length > 0 ? ( + + ) : ( + No tags + )} + + + {provisioners.length > 0 ? ( + + ) : ( + No provisioners + )} + + + + {relativeTime(new Date(provisionerKey.created_at))} + + + + + {isOpen && ( + + +
+
Creation time:
+
{provisionerKey.created_at}
+ +
Tags:
+
+ + {Object.entries(provisionerKey.tags).length === 0 && ( + No tags + )} + {Object.entries(provisionerKey.tags).map(([key, value]) => ( + + ))} + +
+ +
Provisioners:
+
+ + {provisioners.length === 0 && ( + + No provisioners + + )} + {provisioners.map((provisioner) => ( + + + {provisionerKey.name} + + + ))} + +
+
+
+
+ )} + + ); +}; + +type TruncateProvisionersProps = { + provisioners: readonly ProvisionerDaemon[]; +}; + +const TruncateProvisioners: FC = ({ + provisioners, +}) => { + const firstProvisioner = provisioners[0]; + const remainderCount = provisioners.length - 1; + + return ( + + {firstProvisioner.name} + {remainderCount > 0 && +{remainderCount}} + + ); +}; diff --git a/site/src/router.tsx b/site/src/router.tsx index 534d4037d02b3..5784696a16f2d 100644 --- a/site/src/router.tsx +++ b/site/src/router.tsx @@ -313,6 +313,12 @@ const ChangePasswordPage = lazy( const IdpOrgSyncPage = lazy( () => import("./pages/DeploymentSettingsPage/IdpOrgSyncPage/IdpOrgSyncPage"), ); +const ProvisionerKeysPage = lazy( + () => + import( + "./pages/OrganizationSettingsPage/OrganizationProvisionerKeysPage/OrganizationProvisionerKeysPage" + ), +); const ProvisionerJobsPage = lazy( () => import( @@ -449,6 +455,10 @@ export const router = createBrowserRouter( path="provisioner-jobs" element={} /> + } + /> } /> } /> diff --git a/site/src/testHelpers/entities.ts b/site/src/testHelpers/entities.ts index 6351e74d3c54d..e09b196a82446 100644 --- a/site/src/testHelpers/entities.ts +++ b/site/src/testHelpers/entities.ts @@ -561,7 +561,7 @@ export const MockOrganizationMember2: TypesGen.OrganizationMemberWithUserData = roles: [], }; -const MockProvisionerKey: TypesGen.ProvisionerKey = { +export const MockProvisionerKey: TypesGen.ProvisionerKey = { id: "test-provisioner-key", organization: MockOrganization.id, created_at: "2022-05-17T17:39:01.382927298Z", From ca5f1142047bb85d1f4161d028eda390e3f722c5 Mon Sep 17 00:00:00 2001 From: Bruno Quaresma Date: Mon, 19 May 2025 13:27:58 -0300 Subject: [PATCH 17/42] refactor: update cli auth page design (#17915) Improve UX of CLI Auth page. **Before:** Screenshot 2025-05-19 at 09 22 36 **After:** https://github.com/user-attachments/assets/01dfcd70-d0a6-48bb-9186-77da24001498 Fixes https://github.com/coder/coder/issues/17905 --- .../src/pages/CliAuthPage/CliAuthPageView.tsx | 86 +++++++------------ 1 file changed, 33 insertions(+), 53 deletions(-) diff --git a/site/src/pages/CliAuthPage/CliAuthPageView.tsx b/site/src/pages/CliAuthPage/CliAuthPageView.tsx index ddda2dec789e9..a32345dcb5673 100644 --- a/site/src/pages/CliAuthPage/CliAuthPageView.tsx +++ b/site/src/pages/CliAuthPage/CliAuthPageView.tsx @@ -1,9 +1,9 @@ -import type { Interpolation, Theme } from "@emotion/react"; -import { visuallyHidden } from "@mui/utils"; -import { CodeExample } from "components/CodeExample/CodeExample"; -import { Loader } from "components/Loader/Loader"; +import { Button } from "components/Button/Button"; import { SignInLayout } from "components/SignInLayout/SignInLayout"; +import { Spinner } from "components/Spinner/Spinner"; import { Welcome } from "components/Welcome/Welcome"; +import { useClipboard } from "hooks"; +import { CheckIcon, CopyIcon } from "lucide-react"; import type { FC } from "react"; import { Link as RouterLink } from "react-router-dom"; @@ -11,63 +11,43 @@ export interface CliAuthPageViewProps { sessionToken?: string; } -const VISUALLY_HIDDEN_SPACE = " "; - export const CliAuthPageView: FC = ({ sessionToken }) => { - if (!sessionToken) { - return ; - } + const clipboard = useClipboard({ + textToCopy: sessionToken ?? "", + }); return ( - Session token + Session token -

- Copy the session token below and - {/* - * This looks silly, but it's a case where you want to hide the space - * visually because it messes up the centering, but you want the space - * to still be available to screen readers - */} - {VISUALLY_HIDDEN_SPACE} - paste it in your terminal. +

+ Copy the session token below and{" "} + paste it in your terminal.

- - -
- - Go to workspaces - +
+ + +
); }; - -const styles = { - instructions: (theme) => ({ - fontSize: 16, - color: theme.palette.text.secondary, - paddingBottom: 8, - textAlign: "center", - lineHeight: 1.4, - - // Have to undo styling side effects from component - marginTop: -24, - }), - - backLink: (theme) => ({ - display: "block", - textAlign: "center", - color: theme.palette.text.primary, - textDecoration: "underline", - textUnderlineOffset: 3, - textDecorationColor: "hsla(0deg, 0%, 100%, 0.7)", - paddingTop: 16, - paddingBottom: 16, - - "&:hover": { - textDecoration: "none", - }, - }), -} satisfies Record>; From 433f0be53d80f49360c01e685691d3a68f78ffe8 Mon Sep 17 00:00:00 2001 From: Cian Johnston Date: Mon, 19 May 2025 18:35:22 +0100 Subject: [PATCH 18/42] fix: show provisioner name instead of key name in expanded ProvisionerKeyRow (#17921) --- .../OrganizationProvisionerKeysPage/ProvisionerKeyRow.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/site/src/pages/OrganizationSettingsPage/OrganizationProvisionerKeysPage/ProvisionerKeyRow.tsx b/site/src/pages/OrganizationSettingsPage/OrganizationProvisionerKeysPage/ProvisionerKeyRow.tsx index e1b337c85dacb..dd0a2e2aeb954 100644 --- a/site/src/pages/OrganizationSettingsPage/OrganizationProvisionerKeysPage/ProvisionerKeyRow.tsx +++ b/site/src/pages/OrganizationSettingsPage/OrganizationProvisionerKeysPage/ProvisionerKeyRow.tsx @@ -103,7 +103,7 @@ export const ProvisionerKeyRow: FC = ({ - {provisionerKey.name} + {provisioner.name} ))} From fe733afd141bf1649174fe5aad73de94b9e8cd42 Mon Sep 17 00:00:00 2001 From: Bruno Quaresma Date: Mon, 19 May 2025 16:43:26 -0300 Subject: [PATCH 19/42] chore: fix flake on useAgentLogs (#17919) We need to wait for the result since the result is depending on effects. Fix https://github.com/coder/internal/issues/644 --- site/src/modules/resources/useAgentLogs.test.ts | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/site/src/modules/resources/useAgentLogs.test.ts b/site/src/modules/resources/useAgentLogs.test.ts index 8480f756611d2..a5339e00c87eb 100644 --- a/site/src/modules/resources/useAgentLogs.test.ts +++ b/site/src/modules/resources/useAgentLogs.test.ts @@ -1,4 +1,4 @@ -import { renderHook } from "@testing-library/react"; +import { renderHook, waitFor } from "@testing-library/react"; import type { WorkspaceAgentLog } from "api/typesGenerated"; import WS from "jest-websocket-mock"; import { MockWorkspaceAgent } from "testHelpers/entities"; @@ -29,17 +29,23 @@ describe("useAgentLogs", () => { // Send 3 logs server.send(JSON.stringify(generateLogs(3))); - expect(result.current).toHaveLength(3); + await waitFor(() => { + expect(result.current).toHaveLength(3); + }); // Disable the hook rerender({ enabled: false }); - expect(result.current).toHaveLength(0); + await waitFor(() => { + expect(result.current).toHaveLength(0); + }); // Enable the hook again rerender({ enabled: true }); await server.connected; server.send(JSON.stringify(generateLogs(3))); - expect(result.current).toHaveLength(3); + await waitFor(() => { + expect(result.current).toHaveLength(3); + }); }); }); From 358b64154e61b2d172a3074807cd428261e251d9 Mon Sep 17 00:00:00 2001 From: Steven Masley Date: Mon, 19 May 2025 16:15:15 -0500 Subject: [PATCH 20/42] chore: skip parameter resolution for dynamic params (#17922) Pass through the user input as is. The previous code only passed through parameters that existed in the db (static params). This would omit conditional params. Validation is enforced by the dynamic params websocket, so validation at this point is not required. --- coderd/wsbuilder/wsbuilder.go | 42 ++++++++++++++++++++++------------- 1 file changed, 27 insertions(+), 15 deletions(-) diff --git a/coderd/wsbuilder/wsbuilder.go b/coderd/wsbuilder/wsbuilder.go index 91638c63e436f..64389b7532066 100644 --- a/coderd/wsbuilder/wsbuilder.go +++ b/coderd/wsbuilder/wsbuilder.go @@ -593,30 +593,42 @@ func (b *Builder) getParameters() (names, values []string, err error) { return nil, nil, BuildError{http.StatusBadRequest, "Unable to build workspace with unsupported parameters", err} } + if b.dynamicParametersEnabled { + // Dynamic parameters skip all parameter validation. + // Pass the user's input as is. + // TODO: The previous behavior was only to pass param values + // for parameters that exist. Since dynamic params can have + // conditional parameter existence, the static frame of reference + // is not sufficient. So assume the user is correct, or pull in the + // dynamic param code to find the actual parameters. + for _, value := range b.richParameterValues { + names = append(names, value.Name) + values = append(values, value.Value) + } + b.parameterNames = &names + b.parameterValues = &values + return names, values, nil + } + resolver := codersdk.ParameterResolver{ Rich: db2sdk.WorkspaceBuildParameters(lastBuildParameters), } + for _, templateVersionParameter := range templateVersionParameters { tvp, err := db2sdk.TemplateVersionParameter(templateVersionParameter) if err != nil { return nil, nil, BuildError{http.StatusInternalServerError, "failed to convert template version parameter", err} } - var value string - if !b.dynamicParametersEnabled { - var err error - value, err = resolver.ValidateResolve( - tvp, - b.findNewBuildParameterValue(templateVersionParameter.Name), - ) - if err != nil { - // At this point, we've queried all the data we need from the database, - // so the only errors are problems with the request (missing data, failed - // validation, immutable parameters, etc.) - return nil, nil, BuildError{http.StatusBadRequest, fmt.Sprintf("Unable to validate parameter %q", templateVersionParameter.Name), err} - } - } else { - value = resolver.Resolve(tvp, b.findNewBuildParameterValue(templateVersionParameter.Name)) + value, err := resolver.ValidateResolve( + tvp, + b.findNewBuildParameterValue(templateVersionParameter.Name), + ) + if err != nil { + // At this point, we've queried all the data we need from the database, + // so the only errors are problems with the request (missing data, failed + // validation, immutable parameters, etc.) + return nil, nil, BuildError{http.StatusBadRequest, fmt.Sprintf("Unable to validate parameter %q", templateVersionParameter.Name), err} } names = append(names, templateVersionParameter.Name) From 0cac6a8c383fbd671cee3eb3e87832bd14d25b49 Mon Sep 17 00:00:00 2001 From: Susana Ferreira Date: Mon, 19 May 2025 22:23:36 +0100 Subject: [PATCH 21/42] docs: add provisioner job state transition diagram (#17882) # Description Add a state transition diagram for provisioner jobs to the documentation. This PR introduces a new diagram illustrating the lifecycle and state transitions of provisioner jobs. The diagram complements the existing status table by providing a visual representation of how jobs move between different states throughout their lifecycle. # Changes - Added a SVG diagram under the **Manage Provisioner Jobs** documentation page, in the **Provisioner Job Status** section. - Included a brief introductory text before the diagram. Mermaid [link](https://www.mermaidchart.com/play#pako:eNqFkD1PwzAQhv_KyRMdvPSDIUKVUFIGJtSyYQbXvjSW3DM4jiqE-O_YsRtFCMF49z6P75U_mXIaWcU454KUo9acKkEAocMzVkA4BC-toDFvrbuoTvoAz02CAO5vXgQ7hLgS7HUBnMOjO0LtUQbUcdxCHYEnJG3oFJFs1VdwNAvYRHA_EM3BZnrRnd8sRvTu6LeHQSns-3aw9mNUaZlapC1q1P_YFxM62HnvfHZX0X2Qxv4qSlJorQzGUXL3-D5gf21M66hmZF6a1kn_qeYT5eRf4FQ2s5vpxqwgbXJ4m75_RylYlGRVkjIup5F9fQNTV5aS) --- Screenshot of `Provisioner job status` section in documentation page: ![Screenshot 2025-05-19 at 16 10 12](https://github.com/user-attachments/assets/9cd6a46e-24ae-450c-842c-9580d61a50f6) --- .../provisioners/manage-provisioner-jobs.md | 4 ++++ .../provisioner-jobs-status-flow.png | Bin 0 -> 40994 bytes 2 files changed, 4 insertions(+) create mode 100644 docs/images/admin/provisioners/provisioner-jobs-status-flow.png diff --git a/docs/admin/provisioners/manage-provisioner-jobs.md b/docs/admin/provisioners/manage-provisioner-jobs.md index 05d5d9dddff9f..b2581e6020fc6 100644 --- a/docs/admin/provisioners/manage-provisioner-jobs.md +++ b/docs/admin/provisioners/manage-provisioner-jobs.md @@ -48,6 +48,10 @@ Each provisioner job has a lifecycle state: | **Failed** | Provisioner encountered an error while executing the job. | | **Canceled** | Job was manually terminated by an admin. | +The following diagram shows how a provisioner job transitions between lifecycle states: + +![Provisioner jobs state transitions](../../images/admin/provisioners/provisioner-jobs-status-flow.png) + ## When to cancel provisioner jobs A job might need to be cancelled when: diff --git a/docs/images/admin/provisioners/provisioner-jobs-status-flow.png b/docs/images/admin/provisioners/provisioner-jobs-status-flow.png new file mode 100644 index 0000000000000000000000000000000000000000..384a7c9efba82e14cd32fb6ccc6977f7a404ccd3 GIT binary patch literal 40994 zcmb@u2{@H)yEnd4%2-m!+)}AzN>XO6kjzDqF^bS&mYK>B5*d;yGnJ6YuuPRisZ2@c zBq4Jov;S}PJkQ?myZ_(*j(vQ`UdQns&zou8>%On+Jb%-91!-#@W2EDtBM=CT$5j+h z5(reh_}_CHDtx6}FVGYJS>t%}7>SVkfoqsR*iJaEctqDVVYJ)LNcYt0hRN#6+ixXC zAKrXmm3(tw^5I~$fMou#lixq3e$U(eOweOAztF)$l{a7a9SdDGi=7hdmMEp8N}Jay zxiB$BU1PuJzqr^x6BkHW)Ay45Uc;hDiR1S!kprT<HeDJdxq z4vvL|h4t&#zkdBXARyrJX*vRTM!>Xja=|HG-EZG1uhXYa#m2^3TU!tR`0-<8 zv-7XGxVVl#uedl4M#j)euf^H1&bD{&ZdJTole`=*+!*vIJA2EPEdvgvm6eql85u!A z$9Inn%})<1DJdO1cu*|%(&*=WylGG6!bm?vMzVY#KW4CYX2y`lJXInzpL~>0kq|9Ef%cY-J zGcxKbvF{gc-PHKYu9E&qW@aWoA0Npr)t~8WU!VP0$Ft_X(qnKHwD(!3O~P| znwt9dt?IxnR+37dNd*n@%$YMrY*fjlaC|R4J>9H2z}{F%Er5~pZdBAOXBPa{Fco)n zWMoU(Roh+ivL;$uc|NNv!^6Xr-x9E6Q5$*?xQVyp^XI~j$B!Qi9zHxb)nALd;Ns#k zJ2lnR)HEDHwQ6o{?Kb!!k$5z~X+U$0E^9Njlz@nc?aWY;gVatDk%s2xr>&M7HgYl1`OB}&ol{m;wzs#pvU>3B*+HTrf25FMk>IXf8D1W% z%fEZCj&b`P;ZL86#cdJVCm_J*S3dte9&du>8Qfxi z{rYtpB32(htAwcNLYL1_#s8(}$dn4`n~_ z{o9+!HEY&zN?dBhd=Y@v*wEiUAzVT(mv*! zZ{NH*Q{n!rB}>b;t0dp5DfQP#+ZK}fxpNb}Rq~GC-<_^+Y-!mNq{uyIza}{cTad_i zM+J+f)sml|AMdd|TF}5F>t5(I+#DvqQc_ejYZ0oKV_;!z&CbJPa^l31w)DL`U#DF6 z>E(WGZcg2;QYny5uMl&DtPp60`)^ZZ?Ro0hu?x!!zu4K?Z7b(3?d(Jo?X6XXW3Zgv zv733l=lXgoJceubvxJdLORX-yGiHrK}u~YT@7@M*J2cQYpHBf65M9=AdsD z-OYQGC^z)9p`l@RUS99(*M!E+l*f;&ZEJ0P)*&b(5*C>@$C+r(MJ^fF$+&|Ty_cqH{ruJ) zI8cC^T5NRm%$e8-(@l)Ywa=Gvn{6&$^ucaBQ(*ZlV+t!C{}vJwYHj7@6=!2(<78*w z%X&!OXT`|SF#fjroAXaQIt-2;t&VXvG&VLiG`whT-u%N)RaZAkVSDnqD}=^}cB53> zsoB|`Sz1Xye>&*sw4CNFp3LtVauD-cm^tB@^YCFisgQ}pz{qHLMFGv_p>D9%njwyR zSFhcET3np9J9AHIb93{N8|&?jseKeRo4@P=xk_9Nm zaR0g2T~$6wBLW6Lzr0v`;liNA{W)3!H<}qM1CLAznQUM!^7N#ssj0U1m#<%6{wc7- zRHOIAt(%`+Y%9r4b;IM?!ms{(?*hS?7T+0a&2B?+LNc8jf1a!2a_18iKFWHBI7Hy0o;kU%!5t z`Yit{>MD;ALN~&)Ll;Ch_LQrtpQR^+VrOizu&~(0&)?DB9((WJz4-X6KPun8eTz1!MdJJ+zfkwJ(>( z?kDUKN~hnGMkbSg{IJQ*Ciu=CB?B-m;;*IK6^Qs=VH~DXr5l{5a=Q2rV2Qc|1QHOi zzU>r2VUcL8!2Sm40o|2_(CGc2KZ8wh?b@3E_!)fs){h_UjHwBQ|B87izm5VlGCYhG z_vc$tcNF<8FGOZ@R;Zel}n z8R^f*n5-fa2v0T*0}GD+^})nTg$d*}0cI{^9Tr%#{rxSZf|?Mv5XIES#zu)*x8dfD zZ{NNpByc%8I-(y9w`5(kaag6FA1|B0GhAL-oIqc1tS~#xYp^?}9%U0H8=GsGH7=C% zQ0}8g0zyJLtsAqmvtxt|l^t)^_M_>LRFstb@7&qEk6Q24DQ-E>&$t=y-;bS2l14Gb ztw-T{dXm=n?aR7tc4yzm9ZX3{Sy@@xwQJX@Kd=4!_g_+?Cw{}DPV)SD{K%0thTK%o zv%h+8ry{3e6>=WBBFfPJC_*9$0LW*PMkPVT3U)4D(SW0bow;fXi`W>h{{eW{L{c74&PJQZs*{T z8kz*C#>#pZ_|n{5%}{Fj!NzBx!% zi=L~!eR-yng@dD`uoyL!nRNZejri`irTJ-EqOh=VVIZ2&z`%gy;ltviv+}F+o2o?} zO1oaa&g`2_@>z*ByNA2^(^ZDUKte(SU%+2H`LM7l{Z3d|m^y#j*lM1XM^yty!$G1M&jeg$$P**j~4;|#KsJfXMF{$Ag znc&3r-S|su!eRUCOh5Yqd2Ot$?#0FB8axYTa#&e%TX^vB;X^#4%E}eo99)I^F*3(5 zZ4(ns8f&+^1C-V(Au-;Xt!HM|hZQa>BQxjet*ZJKz-4G#M~-1J4bisZ*C#9lJ`}63 zUl)E)h-{?;vWpwDL(lVLx~33*|Ni~N#Ib<^C7M$Eeq|#gBO9AXRaL9_WVFQ7r%$8i zT1>Q3X&a-1d^%{Pe=j;(iN@a9IipufT6&*^gxG}-3BJC*wPC!6I(n*nd`K!-acTBB z0m`?w9jU6TtK07Ps-~t%l7%`*kwh~;5M$cov+6~<85+u%Sj`rs2;vdy4y@GP-tHH! ztX7Bqh!RRmM8Ci6y`PY*frgg@%t7>P)EYy%ZrN2elLA;+b2JoKXfx^EyLTLydC@kA z-(LFD3yq%8&%OfN9~?YJ(_ZOSW`4TLtY0HuqUqFm`Q@3GyF!LsTwFs= zvfPgG(pRsjt1~1P>Irg2s%Ch~0iJ#T^5O=tv3dSs35mP8be$#k=K!h|w(FNV>@t3$ zaN$>XR~OaVwZ)g(uOB=|c&tEvuC_c`y(w6UT;?)%@)CBWp5SH|J3AE>mE_La+uZw@ zXogPiC)MFuVHuiyng@xqvN{2z>$$Rojf|og+N>lH?G4Nnd{28E;#)e7ES``}Y~UUonxEmY3%}D?>um zc9$oB6Y5>RcDK=@78Ncyh>2Rt>XY5D-iZ@;K&Jx&z>)GxOWh2GIZmHA0hn<4@@2z{ z6Z&3WUiikthkLnyon#8BG0)fEKQ)bQDCf1XCs;&CXkdDtao=7FmTMiu7ED|5C#8_d zQi1j5s7AR`j4g*E=(XimrJMrt5>~!0J@x>?l&Sppm6l$TcFL3%L~r`)}H_R zg*`iPJ-C*?zvU|yp|X?j&dtAn^Pit_c6N53-cK0Hr2H{7$@q%cZ3HK_f134Q3dF9~ z8E*?hBS4PA@xDqILN`YulAMlD(wKuzejH6P*-)pt zS9DJpD!1kN^Wq)YhV#H;j=Gx(gqxR}BFfa_Y85kH)UGp6YCM3(^Qy8IwgG~>$dCKu_5GHhQ z#gBgY_)v$Dk?~n!p^U6-X)ZOP<9OrsP-PNgEmq6g5Svr zpRBE{tS(-31lo0Y5qzHIXDzRfjN4DeqepLykleQhnB58u?e(12pr{JMhh0t(8LAIU zOaC5e3&XdgKs3O19q`M|FUtRZcK^*I{ond>bz;~xZj}8yckVC;y{fAlde&y*XlJ(# zC2pTAFpPCerjY)VvjcCV{lZaRQ99ZSE)+CN5}cT!%Jc(4^rFC43t>ww{j3)O;FbPd zpw6$RrnXO580~nfhD}mZvZtpfE^cm&TH9nB`&tQqKfm&-uYfIRCtKbaZTOC-4Syi;D*sws3PV<9BSVtr>acjJ7j1zJ|=< zG4`x&W@aYWsPyEiQv<-|kdh)-R(gYR2ZtUxazrfl-N%nnJT2JR?-L}a z0jGiDfdLO(_@K*B3pkk|z?TR!y~f8?OJxXVP3v5CpbX!`-uP3=A48jC4M13kUGXo+A>-V+tSX(-(d zxJp;%YIOi3DWwrry|dGh@6Kc6vRUAC5N#PAtvy?>bQJD|W&*<4kR;DWvko`UNt$^6 zLxLnjwcU(4;Q`;RTWjjEZlg(wT_1#vLVHck z4=-+P0QQ6&LQ8~Lv)ykiD{JNN9*^1CSv>gP6TMm07sWXp933|>a(uMA3ItkwrQ7}I zw>NmWv_wEhzwqbJpJRWFbruhP`v&PKfyw;h#q5lXnkF@>AG5Ls2raI9N3%|3oQ{H{%CezQ6yeekW zX^0Z%&K;#;m-9R<%V6UYcEIx8lY#;rJ-sAJM}8s{r?A_PHj}=8NX*X6{5~*H(?11~ zgghHf&~~P*D@iG-xcGQ*{V*;`hfj~sFbD12y*rSJLRACaL&J=dYkTqHnx%rfyA<&} z@MK=$MXXcOO+~(xYffldfazH7&?8j#Y}LFcno=##MJT7jG9L!?xd-(zEfgJ9O;huV z>tsr9&%%-E>A?meLBS5afUq#FvuD3RK*-cgj0+D}pqZMUzG$jPXq4N=enT_;;ltst zUmG8qdR`w;x<{(O0ei}2q7BQhvooIQq=`u-z5;xKr&U~bh+wq=ibw7rZQ$wI z7uPR$7I90vKtB3i&>(klelIS>I@BYgDjV|yY|=daUL$N#~B&V@JF@e=D%SH2Tut@%?%J@skT7T9BfFM zXv^!xPS?vb(TLy6anZ)6Na~3M$c26X>oLeqf`WoVLSwj;>Dk%W!RLK*(*xusTkU`S zXa#7Zl(Wc+nCR%?p`n={t=VH*dqqShppPtSUW-!MIg`Lis`FW0?1h%ZPb8)^*;t;A zijKy53E%i?jW3nuuauB`v9T3DbcQ77$Lt0_aP#seMeozQ%_S*@C2(K)+?6Xml|6)H zZh-Bz;EKnN)#8&h*-}%z81F7yoEffmHvXfc$~M;B*&$+5u7B!ObW&1pmCx#J(=2v^ z2}?7z`(tQV&?2JH*>tylx&;{>6!l-5=KorS?XK3n3Dq2G5wxyX&cvK^+X7hM;lGjf z0}$XT@`uyMp``r}uKy9&(G0C3)6;jRXdkwCyET{IdJ703E|#L8IK0RYq1!JiIwZVm zaS<;Yo49-T?2Kv9+P?oEJOB$UyocG@^?x&IZDq08$7O=HYke*MjRL#7yZXgPgYAC$^qovcu#$%Wu%pyS_>RiI{mkDg zz%A%v$z?~$zG6@le8-_}{L7tAz6IsfF7*5#nc%I8K=fCRH-zpuez+at=jy_VCm&KC zSn~46Z2^7c^#sC@wWt{W^EVa$>jA#|anx`>L&3#1VQFb;EFIQ3AuNHN%>DrZMIC0a zGqA%B95@hq>lWw-C|6cmTD8rayzkvtdvmh07ok;4K3)So?Xg^8w2^Qdx`M37ENHu- zl>}5d5G-8cU61;P23{$r2ikkSP0z#T@Cz?5mqF}Nh2>^vmwEiYB27_$APkq}UufV-(Jry=t&E6>KrO(&hPcg4Ixp4^@P~@g z**1o{D?k4&^7?hEq7iCey&HG-TUGs!SbD}9(F-5AP|&%#xocW{<_FkFDmFGh(48RI z^|}pmlT?(IUt?=Q3+7SYrjFdLh0i&%)W)D-AxgOXe`4o%{*9eWv;JY{N!|-V zOh4N%Bpu%w5zsjX_EB)@(k1k?Z2=&_HXr+d%&pGxgk89Bp|*d@YB4Hm>%zgX+JgN2 z`K9Rw_JBL88KL~ao}QlP#n`JwuTJ!2{MzSDw|jS%u$#~tcfjGoQ`Jtin>J-_nZr(a z{PC+SwD)f zqLNbUr%!OBt}}`Gtay?WT`Dc=S%VaReyyPoFazOMrIGYrD*0Oa@1*=vO)!gJIK+tK z87Ft|-c70lif0di7lQ=^;9PXJhq~j2_gkO2zCf6B@S?I$=kWO@$ot%vgJfWH-X=Z!}ff{S8v}w z!m<>skRkZGu~`)a)YA`JMQ3O4TmIDEekMQvj-=!Fb?es2;G+&7h4>dQad}}VEewK& z{ZM1d<{iq7Q8o}A^NdQxL`B1SKtACNvV@`7fOJ&QswcbJ$aYi=7S=ioEb5W&~|C~AGnK-CP9oPJO4<4ssB#8>a$jaqc0+4IV{hBG;g zbsU77aGZ4;(-5lgjt?}M+<>*P7#Pwl`iHW)b(Fg5@8i^grsi|LEO=$^M z+|ASmC$+Vo;Df@WBdKU=ZX~|y=!jzyyYyLjvs$;=OKqzir8b zhBIWpo~Q`w&(u8W6bn?xM5>(s&IzE(;ox*7^URDV`ndW+4p7R26KPKi3h?K1ntWlI z$}(W9o$Gg|^5rkNKer}Bd6O!rnZ%{f3VM!BzYPA3XdC@kqU|*K$hraR6B+_;c1NN0 zmNCgadv3~o`$M*iE_b5V91s(OrC=2ZiLd^yP+CGVIzHbB88o>-cF<`H%Z9w z*f!jk4Gsj?Ow<45i8X=GPv#G$c&qt7tKOtKfX22<r{?yz>Ha9oM!Iautw3UU0 z;#FH&ZDnJNkB>hjEs_e|N|-eyCnpE)$EIdh78Z6+&XFHK1YFV~%;)84jXccB0dl{T z@&)>iTvD&y1nL*GQ;nFrP*}lm*RH)GQO-k3@%vot3pFa10Cc&nnL6WOzYfk1}&y=O

Y5m{7XQrgQ5M|hCL?Dac^K)xyX~`vpv9kdd z?29<10wG32gR$*Y;c;i@XGX#k-DQW`{0oBu0$x8pQvl4NP-=zfgHz(qpFgoB@X#HM zRkT1H-#gP1R5uK#8o2)+&&kVcpxmr!v!4GNu~J^&Qdp1f8LB$y=~irxmge+V835_rd~A* zw>R>@1N`y=S~>dDlO9RJ6H59Z69OXzb;AqqpK_JHIxZ+IZ0W^@OakM^ji?ot6&DC+ zH{XV-1rP`M1?vVL<`j0{Z>STl9v*=K0ULScJYU`1T=+g{%WhzU3-9jPk9^7r4i3gv zhVn9w=Xo@6BiMawW8-$e1bhWL0hH4+hXGZ9>oUqqnz$2`SzbPg>7}ST{d*cBew*HZEijeF66Xo(=AAiJiJ@M288AF9CTC@P=rxCr)F z0!nn7TY7YC3}&yuRytbZn>TMdi!Sf*8||qmD=%LHMe$<-e!`!}_02CVtSpS=f%5o; zyG?&*=isP(3?P6)NKa3Xds8?_aUz!^2oG4nqT@d#O8>4GsS-4;T$&kf0kz|lbU2@_ zHD4>gG6makX2uC#2T;(>8gsJj?%jhBCsC2|^Yi_}@l_@gZd$o{O>lsgZP6Kk4t@R4 zhPD;av9WHeD_%TdS=rekYuS8r{mvnG0>whJ>Kvu;j@ z0}&k+)mUSBV?AS>m<1j26$I*@o)s{&_prk)A-wYYy^B3?3{(ewYCDskzdwXPvsiY8 zm>2mVsOaiY#)*Eqy1GDy<;{-P4^(zWWNw0~0Uy1n<3?&bL;yEFmVw=hs!?5hJH-V4%9YPdXZ5gFpTUY$vK~UtWiDLUB#3 z&>>jSz@8m}J&Qzw3eA}+?+PgJd3h8W1FFX6ty{%mIg)Q?>mZFJXjpXVZPeaBY;h+% zk%I@H6>a0EYKTjIE ze+nU0A1NpjKOiCDJ(zg)<;$1jJr&?koLpSxSeVc^u+f}5AiMdbeY(5Geu&+BajY2o z0b+~e)8F94oE#iBHZD!_;)qc}4f0M*NRWZHP#+@_#&Ph`fmvrjE-3$12i(^WRl-Ox zPKR4-T$BgOfP@bHy6fx*ENT2-e))3NU7^JSRR=V5`yG{UfB3k%V@-Rz8`k#P^$cIg z!UUGr-KIr%t~_Ckn2#;5Tqcc7Z%Q-hNDPAHtQ!p_zsU1(G5m+F~1Q~2Qb+CnwpIE zKpuH-Pi){y4obBCEA_?OIz~TZ@(Skl1`(<0(24~63%{nbgL^-*Yg`%S5)ASl?ZR>c!ipQ*bCeqIXQ1IfgQ(gJ6*jxAzXcr@cw1KTEmIcr(JM&C^9;d zCg=x7F)hSLE~CvV=w61tP5U@6DPO9Q)KQ+Vzyl*lJ6thtjJ_EYExDe44jw2@H)ARaCz_w0IA z`r+pHipt8!T72s4^mu)X-KecO$ZoXx5o!S%EV6l!14DUaJG#0O;1jH@A8x+?pM zefkp>6LzMvy?rcLT5bd_7m2**AxaGN7xZhNrG8#C!vS(aZos*ijjAdtbNylRNMFH$ zK5Y4JTW`BAr_mtRIH#9V~wnmQ_Bpc#UP|}e1W!xOFH2Q19=+{wQQ#gwx zrL%D|$!;{&2x2S#$kc02+Xr*WOt+2ocd~{c0>;6@@(W!TKc28>t?}C7R6)3m=vZ=R zpZlTpd$f`c7umHU{ve)H^YYw~%JenVVB0GT>w5t_O6q zNrzpAZO~?6H=x(Sh%jnPEw>we8H6TVSEmxUU!QRkx8J##+#Or#8WJRr(##KMCAly8 zzm!se7_FxlGa{gS`v&E86awz~^WS0*T*!pLJAGxGV2l;0gs1Sbx?1nCz8O>yXQ6Ii zT>+4s0Wuv6B*C&v?N5;(F|PD1MM|={IbtL@*^oqbk_u9MYTGLZWRCW3XH2hPh_n&> zl(x1uM`Ic%J(ZXb03b7+wpBu=m7WmVjM}Is-(uO63a=sWTl(RYiu)oD@L9FGERss? zPC(veyo;Tc^^*f7?6NpN|E!_`X0-S8`$KAX1?RtphyY#fj(qcZPmwd2kb{eh_uM3L z*#$*<%*3hy+6*Ml)7c}Nw+*c;w(T{k^qj}NEEP^KBLjv2ZkFa!tps?FgF==KSVA=c zI=-Y9NJ`PZa}=Q~ZxE~(Co6J3SNX_SxG^ugAhB?HUd9t0n9bC43O-ED-G~Tg(oG2S z7g#%m`ab)6K_=H!RqcRTGTB#MnwwGfGH(B=C&=Idllu6qLVv?9!t36>ZM&*$6tuM) zVR>_!sJ!gzT7rR$zPC2ZdGC&QWltWG$?kBM;c6l2(nr2K&Uo=P+5dke$^Cc18T0%( z=zSpT(0=eo-Ijl~4bP$ifxxlgPDVX9k zL~IZs534>4$U5p(-pfdX0hF&o$2=a%@8<6AGCH8=Zp16|iwn;VO2Fmg$7$RAXVz_F z*CB-}zm}W(vX(@X)R2|56PM^vnsEEp!G&;IM#jq>omdI9#1}7KydS^V_tKxyWAgX! ztj?;1AN$-*ZkzAC-1_L$ObYD1#!v)oW>TQ826XP_31j2r?EG1$58Et_w+~N(p_=Qk zov(NO;56g{_+gO|5u{>IL_BgT1;KLQo}ExH5K+^*?qpVrJ>$JFygN60NHSbl^?@1L zFEFr$))O@P^ywn2CO%RfaNSVto}>nmy?b%fv0qboGhxYvx*O)3QxkUrW43!`q^AS- z!YymQD7i27dS*Qs*1l*}1e&WKpR30Wk{Ot5D<6-V2sR*UpPXXEn`q~`24maR$*{AI(lz&#z2fe zlLH*a0JD{)X)V%Cm^r)4)l}C_3k%Lo)$&4{(91P?18ipO=lvbx%>Pv|T*CfrQCk%M zky2isuyN19&efZncl3BnRA3_n`1{Ak#W9lrpB}r;K>3E+UHIe)h>8F){lSAFEPGOl z9aPd8K5O*NKein@CP*Y6fQ~?P57}7_l%%`6BKgm2fOoP*OCDuX&h6*UoN1I;Zl5z; z+}y(ovAAofY38K{TK;Y^8{M!`LF`~YZCQ}ts<7$PWSpPVsEWOin7hc+%(be1;eV-% z;_uc6n}n4t2L(- zjAhfo)Qk)z8sP7XgIvne5ILP(j?G>TW}ma9hR3shy|8Iz`QyiL#182%ygpAK@kEtI{C7{TmFTfh>c>S7RS~}iro7BDULql^AH4xLG$h?Bk z*&&dw(EP22Dm7EnQ2g#KKcS+6l3W(g?)_QcJh@W`w1p?E?Ue0~LqKSw_zUWqn%~L6 zJV0eKf7H)!wYkP+TRQ>#SKN?s(7gW}_YEn=Kv zyi2k}63+*F)|#Y}(M&Nton8gW$!_27dtd6P{J`?^vb8))11MxeUESGjnGl2f7QTZg zV$`8I%Lm;QGAiJhlQX-quN{~!MMLchSx;+hJPW_7>4Fyu;0zIY$3^7-K0iqrh^cx1 zKF5sw4t=vWi+-GE)<(j%XLNAzkX7S$V5vU4-=Z0$wl)KOeenvB&D^RsB{P^_sDoa8 z^k^C|0~&}m{?3$n(?#Z`_fYiO9VviLcg`U zo?W;L1g;LbZ3vjVMDV~`hbS73mewkQc*Cs|1;FZQX&2td zL3(K=*@E@Qh?v5<`aL;mXK(K|`Q-?V1L8MWjF?UVTS_f%WeQTHU^6wf1BVVF@-p-3 zt}cGf)z$ULkr%+~_FtO$je5^2@4eJf`0&Y-@DYJ8K6byK-2aB}`Go_#EeKVxx1pO1 zadz8z-m2{fAr@mmV*uaV$~nvi49Q$(LXL*oiCy9X<`Cba6o>e8URlL*B|EY{ZPQhER8O;DL$p~v&OZ9%thLy8q$ zuH8V8Iu7*~^j8k%mzMl;6qDH1@h-4#V1-VMI_=t}Bpd^2Gqk5Z!t5T5WGHvgHy=Y2 zhkO~x>uvRHmDPO_9AL;y11g!eb~v0>2!7gb@zp~)KJT+`j-#2^`JE(Gi1GSOHo4@@R+hJTgv9;}avy=HaCEVl3)2q<=a3)_3qZSy7 zqriRPbsW_!a~?cMLH#A_LWqL_yM+XSXy{V#=ma!oHyIP$Y;}H*GqxJG?JG}vAiy+Y zWjYRiz>-2{ih*3$n!~}G^IYPa?27zLZRMGBwO(e zgbndW6}05H`!(L^$AsU46WLO5=T1d|XbDd;8bIQYP4x65FWVmO*6wzC^Wg)$=Re{Y z6QT7fvIV5Np9A$sNQ*Ghd`Oa)zxe4fuo>K6~R7dU~_VJ^v zbfB}g$7lW+Z(}-HnqFsYk?gC0CJQn)JR_H=?baW9h=vH50Lym{v42bk!iC(ql@3yv z_saYr9{5JYka-5wCN zzjuNk+LZ9cqJJO!i9BFTOw7K3`!q!8|7c6;muDw>^Yl$FojaGNf9IX(J8UeqV#qU% zM~VQ20?bT4byD{J)CMTNm;r=FwM=0Jg}ag(E^JfAJu5d%j*QgN*WY$IHa;GMA;*gs z7Z<1JJ=xe|gnfu*?9Vc*V;*fsLJ1?N##P?3ZT$LsOJ%}V@{(J?WG`udbmmJ?rqS^CMn_|&Gle8yn%c3-F(`zg~&a1{Rf z`2pmL5YRwu%9VQVp07*z-)U>S;~in}(Z8Zuc?8}qgziMu^m@Rsc~3bM7Yuesm$V#r zb1RLFWyil2vh=IGz0lIUykw1UbFs4@7m)_uk;qlku@9n7kSegm%qo;U^nNcn?xroEuWpew|E#HX2}Rbt>hp=nUk+8;0# zI5IxoGbvvOP7{w5n!(lw3S=3d6|P~b#sZCm!zb+L%x!E0m}p9JX>_+671 z=V^sKK{aIH%rXRe#D*odZ%_8GH-&_km6?fr!?TQPRQ-mygCpSGeoQ=JSAnunr(I@g zO@Y*4bJ(iYzk2lw!jTu`r93j8w+VvSnDc*@tuXuWBWJCsI;OYv1<}K&vd2m7J%!`g#5l3ufJ*UT_V7G<_)yYw&d$>EJG>9zv^d#oBSTGTa9z-sa7P%s zVI|;|W)+US4-3mF0|QWt!;r->n?T9soV2jWC@7eos9MFlIbswHBQevsb6|P^^zpJD z5*5BMu$mz%g6MgH7?&E!Iee?b$2e*~lA;XMg17==(MXxWAQ|?rN3TTd0f#AqP)e$U z6#Bk@%H-3%U?>71Aq7vKxVGPpTDRM_3*r)`popsL>)!*6Q`o*u&J$pD4n8?n1qdN% z!9LqAun!Vtva7q;HYp{gPkN&KC?}wfw19x>o~;|%#m*yf3{552n_SPiAz5+TX>9k+ z6l}FE;NgP@+YZ`nT|S?(*nx5MYEdLHW(*h3sB>aIw#wQTVBD!Bt=)?oQ{86r07r57lZ==8xlpK&9up7K!stJk9;p)(0mY0?w`rpUId6rGw zO5I!E>O;AOg~(fHK6>;6LY@=%5zP0f>@+~^9>5NvI+kUpZ=q0NxBETT&*zhumzR^f z{~^f>I0;wtzM+9Fqz2Ha?!yN}x7W(1{t&k1@;y2H$|+?FGX+o(&qIWFS|alE2gpg; zeNjMy69y&zwDjS_JNfxR1|jGn!q~*9!2&Hvo#qU%4=kwLw?7=?^-`pP@_I^KA76u1 z4K>$_P*PR~Mk=EJ#Fq*F&MBn=lLW&vqE;?dmbaF7V7A=H=<>*(O|{Nq&W)*F4^%IxY3}BRJ{< z`NvQL2~Xj~o#FKZ8W%+m9op?DzBF+!b#;(JC$KUZi(9ha6je-NuVM}jQegAUnYRkG zy(Ixwr=5E^W8&geX{LVvwkJ5e6T`| zBm~BkCiFV27O`@M{cKvY8tZxFJ!aMO2O!pSm)7lB{&>;7LTzF57A`Kq#j-VP*Ncy( z4`7Yfy!Vz9Sj*zs&vE-olRpnWpY`XHQ+#`-Q3(q5RbtQt;mtPY4OfL@h72w=lz+(7 z5ahEqHQBFUC~cuhq_JLbtMHsh!V~Zs-F281xenC18qYlQ2RpIgq@_JDgE+?XGz)$lK#lximC?NXUYqtZc>T=%pGVb_1)?Z{L98ZF|bwD}PGecw1@jWQx5PtT}W> z4(#X3=LjCxy?vY;8ik&Sictjg^KT+x-oSbXw8CJl)OfyG^?JM^4myB>0SU_!bq-ey z_Juy}7Y)HHbn7pTgskLYUcbrQrW12!e^c;LJNU_1}7ET8nC~elT%mG z<#_Eq336Uive%YOR~Oo{S*zeEB9D?F;})@TIgc{ADrW!n6)r^Bpm5Lj?PnqAAZP+B z1?F-ba#5)NT9|hQD3D_P*5B_4dmGL8bA3HJVY>foz~HMTlYDlN;z=T{&dzrC5br9I8?9#^I=^=4KbGYx2-m3pHRbEC)eQ?|F06Q=+`B~_`qtC}&h z?WVU{gJ-(X8Ml6HkON3_l9!PaVyNyayQ-D%U)%46acZa%h55i9i5vDsl(tr%-kF{M ztmrAA8;%l>RT`4FI_dvttxl3lvn)p44D`RLin?XA?1{amoHQMtWCN(F4{i2cI`#| z716GtEtIhDecBW>AgEfeS3pMMyCG`j&2d>LW%BOQO)F?b@qtnjf4SYsV9_EJA+M;h zEvnJIC7JR{5N6^kA|EnW+IOG88A_0sJ#o^GyYHT@bWns>EiYW491B1{Yl6c(?%w5R z4H+&*X`#qFoVMkgxwyKhJ@k&quJ%L`l6N92Y=<>d+~Yn1h=Qq5WJ2J17;h2MfDr{` zZ^`CEwNLkuF&w!YfGQHweCysa*cu)B1sMc-xumo-@{%#PPh#gHGqnJ?cl*{Y&&e;> zunhqf%$G0 zlDGggY#Bbj>%z})JdF>ccj{#x<)KCgj$}sT2oZlfR}wU{s2u;!~EQ(`TZ0$&Yzfb~nU}x`I6W z^i!*co|Ey_)@LChWOF^;b?Y`OZ5G_4!pN-p;P`_S{%daXG7}%y7$vYq7Ta6rcP4bA3tu@I;y9y zUpL@@%n|O-q+gdf$7N^-=v}bz`={rDtl)9uL?e6!xB~df^y!aotPHdWc>Db7_Y^*y zypxcCX7p`z6#6KTmwH!$TBiK1w?9fS?M-ld$-NOd&iJvHF;P+C`0nt$)|3t{O z@!g@(@(k1o5toP$Wx`n#P(iaYiVsOw8L8FT9-#p$RZ>*MpfJd5xwtE&&#EPDZuTgQ zO#SrrxB#Ym$V*L-HP5*0Zo^<8rc=|?jmS<9Gj4=Wr+ytD1qyB2=<50jVbl55>?XzR zoJgKJ|Ez-uiLBxSP>7YKr3C1ah`jEx_3hMz?VDOMHNcvn387+tdkyJ3Umrg?I5+1C z*9!4B@Si@~0>dQ9tzkFIok#A%9_YE<>5L-W;^ZwUGi(FWsOJyO*7oYvt2n_25m&?j zhK8)>Z!PPt38!3ma#mJgn+dW~pJ7U1Jz}(80;(xySt0br#m3?j!}abhb*$~|1V@%wkf=~H<{bGBBc|D=-4Pwlgt3pyx&E*FrgaR#(OkMyL zx}K$FAr^~xBWlWV%=9bry@=L5e=2&W9vvDf%+=pjtKHx_k#8uX6w*>yCa5dmo0N-z z1LD^RvCDow6BCK(%b3^iy>b0I0NnLWd==|Xq{W6#)nJL|!a}WF>gSG?c}jU85W3NXFxd-}5Cm+i&@+sBQg7I>xUg`g zn;iW^U-ue@W1)kfrB2mD7{T@rZy&gb12I&3HyUt=u2Jy#>v%Jy++SD!LT&E;zOOXx z&M}u4HzF)QBpgp`+Q#r26l`i12ZJECinJeN6^?gs!^tGbxCdrS+yBNfT)H@P3c>j= z8SAIqhtZ81-n}ct0ELX3nD-?j4d2e4-|)O4vmykuombe8)JF-(ReFt#j>7|X#$}(Vb2=bM{qe#;!$Czfk5<2%H#gK|N4PB zps2{m>cMH`#cnWxViM2jAPkC9OwS)F3A%eXbN_1{?Z|AxS-=Lw__m3i#}qolrcG^4 zO~^vmBkpeo`g-o%mTlYgO-w4UbW7oGpvs{z@q`URSiwO*2^r`w(7h2hqXbqF%EDQ0 zeJ30U;s;?7A&!9{41yovXA;t(j8^x#K84R?7j7mYp1nCpvAOe%;3qg6Z(>SJB~3yF z1O*xL+7va(zHF^8)Yv0Bu}o3q^ac523SZ(aFdT}bmU3Ub3WOudcZ5k!POkZ)7NUSx zaQxX)GDyG?CY%IvxU=s$0uW3jXa=w9ZHRu5D<=)r`5lqgbloLfUbJI=@QX`5j^C49 zdg-u*mpAOfm**mg``W+RR0WbC7WwAYJ};sl!fMBvUg1{M=~$YX-HDG6(IZw*2Avg) z1%lM5x`HiRl3L#6BE!Blwg>|V0S(k4HGCqRL5g1|CPbEnps~OW{dRn(yf@BanOz-- zTKkwsS%}3@R-H#SVoHpRK-F+}*>89tD=w>WV6o^i=C+%kA4dtHlp%79oD7~TyuVvq zl236MNBsb(9ZvXCIeN4orS00aYtRE=Bz>8lhrWY8~ zwQHfS8sbTUTj?;SIQe`J$|>XoK0g%bO@57;8u4}>$*7FDez!1AS$8K@Rl30H7ui3Q zj~y$}7c5s9L)`!+kv?C?B$YTmG*o9<0I#zZ4@!C0eIJ^<(nSy@K0mN0kk&Hu8oFWO z{g3)UHrc{zJr-v#BNvDB4tbaqnATl+o&-k8#&ZEO1;im-e2HDl+b;pAS~$E1LX{V4 z#@!xd^=5F1Xz_QlllI5H12n@yOG7_?pp<`&G)JKJtSCgz8%$&z(lc+5=m`>gD1Y0y zHYj+6w^W3^IJ<<s;u12ZydvRLkvcC?xp z8yl2Iw-lk7<>BrQM)C7aW>eHCfzTc?68Q{;9oe(ZBns)XD86gS^aC6-_fTZd%?W-% zRsb%wbO-=_tffdh;d%6wUES2M@gq7oRt8Y5V`&$tdEb%p_cqlk$ym43uH{HL;sSAA zX?tg<&)DXzrf68}899Knf8&GZAeRVMi2)}(^xK#Qd3rubrrt)Qo~zl=?%Mi>`j9^M zFK@1$ex&#K|7-2d<7!;Px9_Fdl-N;HR6>$6mt|;>BBYe5K_tl(Qf5lZ)GmdzMJkOV zG?5~bsECvhkp@E=V`d~f^rJn!dy|9Ss;)*trXcCB@<`@XL8I?m%f&f}Cajw!4& zQqFx@=TkRx_t&kxG4VtiRCh(IPZ3M->r^3=tm{`a`1OG6^t@hZIP%IWv7Ab$q3Q!X zTH+G>-coyrCo21t5-CbBLscu}X=a*_N=p}|x)gBqHPN0RdB;hH^2FC!*P7M#9qm7H z!897c#6~6mjF!~M;M)=Z&3iphc4gcBvH#ez-Ze@=$FOVb`sN1mo=n!xF1) zFLrO(AIt$=6Yi)No?pW~7_V};r^tO+bJ9kky$Y!0`{i<$Sh__zV(dzq3vR=Rk?8LD z@ML7O`6xWrXnvwwXAPtfn8kAD%&Uk)O-*kR?B_;`-_+H8qi#%ICnGDHn3#C?&K(?D zw(`TM{+fRLm|$Q4?8&})KA4#_YD3HI5B1UkF1f!AHH@nE}eS`*;CtSBV+B`@q*_G%{2y6QO z{q~lxKwFSP=8l%nt{0okIXW<}xW);Fj2z#ei=P_Mofw7u>y-7aCVc1W+S_O|f&t#> z77?;!U8&)|e4X#ijT$>~zC5L+&%k(665zwmGkQ6qVe?>Bq3sJ~giu=GrAplDHtPJx zUvohI{PAKvCm=Ra=-paAzw9JGq|Cu_jR)mYIxSp>bX`2(G_f49M2`1|%bfafT0$0C zA2$tw82I#Vd&!1-UjaI}o6NJZ*<&I&}%6U`j0h_-J z1pG^~y}Gv4xr_4@MaFOI$30Xw1)#Dw$3-lbdwPU`e^k;s?qGq-9^+E4i2c+CQx{cv z4ed(n8{dmfbKOR(4-&@OiqDtyqi*3`IR2y-s($)03q{;A44ay&`zkB9($Z3OZCiT$ zYRwAkdGpRBCRzmRo=Z+X^T<7A?j;QcImoyHk!ll@@Qp%uwmD6q^gz|y{*op0Qn%fy z4|lRdwb8-%C7DgWc(D^+`(ngBIni`cWq`fTai?fcV>k2{xfDaO6Pj$$wW0u#FF1_Ik=ZTT{=Vo#EM--mj}Gw}w&X)HNblm@>1MmdiY z+;RI9=>V9}R)iD2?Ar5xB9wNTsYa@^V9-7_X?Mjk)xeMaR+f*kjnT&jmFOlZ{K#M3I#}x7Ap)% zsI8PXx78Vr%NNuDH)5R^`fQ?Uu<~d>zFsKpueg0f{oAM=NSoL6Q6hGD*fBiZJUsMF z6zpUra-S2phR%t2{Azi}o8>rd@})-&w|SAYb#s^ESrk0A`tU4T#oUNiRJt5=N%CfH zP?WvYj=hlKTUvwGNLB4Dy5wSw^NTF_It!I_BYAn~5O_&0>wR>U4EM~>Wt4J=)3NCo zn2_oVd=ab=KXn~m!Dw4EC$&Gfr3beqTDgbJWhL-H@j z-6L6%CMG5S%$qT14$O8ktpOE#GW7XS1S-+d>O7>O!UwXb3y4)Ykcwcjd1?nH)AKTf zC{K?sAGMFgsuE-#`-lyZ5deGSsx-YH%Ctd)I4HI!ONK_xKer%H)S=HF3Q(6*9vc&< z+yf10&k7sgx3ClB6D;9VjmGsrK`)BSoTYZMXH@Mb=ZM}QMw1-8tZzBEfQx@ij^wSb zNLQFC`mXSM@jN?GrL=Yu`JTyU8)G_coU*<8Pui?ovB%mC;)}AmY=w<>7vY;CxR|!t zW*n#b<;+aAv2H#18MH3rdK)r5N%?Kb<7D)^DbfgmG6tk5_oYHB<9WmImO z_n*5FF*=|bu_gufdzkg|dj-k!&kd$)J2OIr#N0c z(r7=qBjkT;0kjoWjtTcQVzlU3#GkExfnevY7(G(F@kJ&qv)K{pe6_7;*51f*XQwHFaFhA4dVYI3|K>iJ2pmjy; zrtkC7jB@U2d=RbZYX9$^+Onrl^+$|=)3TE-GY<<3L$ORLW~k|bBG+yKrq0gQblZ@2 zP29PIraTm$m7D)1k=6hCiW(&w(xWmB`xw2gv;GhNz%^Gt(6}dQq5zyC(a!m9PJt7W ztElD$d=OnKcy8~me6N}rhgk=_62VKE#>cB0!7%WXtYxokzgKr+*yiEfB&s46HW@Dg zy^y0gy$=@13f-lR-=D34kSINDmrFY`>$y6}WqWc`25YSPMx9I*<#1l z2pGRSNg43Dbu3d7+B{GN7R+R=scHPdvT3Z`a)@VxE(EerrKIswD&%K^=dal z6(q-qpU?!?uY~F*?r)oXnsk-I7MI%b!*lf(f6Ksw2k}*l6?ZE&CsrU@u(Nn%4#b@% z=fqfm1_wH9$f?UZdxBvCs-IuqVPIPL5>Y z^}D&LfQ4UkccnisUbImAsY4dAedPRZP7ea5 zZWJgCl~V&bcAjkCtRYtm&4Vx;`ja=lHS2EOC+O0_rrL0vRYxDbJ6ib$0g_zIkXgrZ z%EY?m&6|M4*>@_X1rmC>pE|z1hZR&NtX;IDhqbH zm@air$HL*lBO=n4oXL7~Iex)$o&~-Ajif3x9CBwBy9JaTT_6N@t3)PR^feeyAFL=V-ezOi zcUQ~HEuOu+$X>F}AgS6-7Q604w~7m24_B4VYyh806+1c00n(U9(1Y0MkIh)Hhu{f!V*d-KVGzUzaK`lQ^-eP;~<;$Bt zq>?iHZf(UqM1JAP*PqupH(S)`WTJ|A9S*3dzhjOh14Nix|4+03$?&iFyKVVx;w=51 z=UYe!R%Z|J@F9W7_i05Vrt&EyBI1yOc=Jc)3u;5!KiQvi-a3EW5eNwd&77l;8obb& zUAlCM$8El&_TEh0ephxIFSdCiJFT?0@_+%~woXotbstPd;As2!dkT3tHfRFj>7696 zYFJukQg~*7N8Gw!YrJT4t#;O$*A1TR@TdP&Bo5cNkB@pd7qb6(+pDre7qW)>ek|Ny z_w&VzsM?zcg;^MLs`~^MaNxaZ+6qIVqpf{2KEo|E@NC`Dc@!1hWly~n{`#l0>y-m; z?Vsj9;t9i}$M)%}=Q&#WZ}T0+SBHu~hmOQ};E$y^+LHnRv?rx21hAS0yg(IA&@5z&Nj~bI>*e`_AjN=Upu%Lm6esP)f@(|e9>)r4vE7C|^-MHN+bXdT7&7)H-QXKml&VAjy#+C&w7xVw( zEkYNEjA>r=DyCaTC2svQXF1A6yPC81XpefIU;@kD;W1VZP{>gpRd=XNhx(C_m< z0vrTG*Qsb#NiQYs=WXh%A^5z~Z0uuRpENXehq>W;tEe+w@C3oL6ZjdD=iJSoR65F0 zQ%$?vSbcqm(x5Iav5+3BCJN*NH!}la(9qw@1r}^?{kja%?q|hOBuK!j_0KNl-MS^= ze1}MuCHs^OrV<&u;6k_5MSt&TV#-Yu6aad_vXoygsmjBYF<;<>#Vx#th8L^jxLzWO z+t0SgKvq+#T`p_a>I%3B0KZ7L-_KFZC$L?xz+B-rP^j>80ez%|(EiA|OcWSDk~iL} zwT@<26xW-Q7AUTirq?H(#J1a`EsQdh{ygL_c+`O*f|Z zJbaf;dh6x&4VrmV!aKf2b-tdE)xkQs{a5OG=~9 z!9pEfN+2A=;E}f2nJw$F<)grNXHc&mJ&-C6_8HNz>2cY76NTHH%SAGicfP^~4xd~9 zYDP>DT3nS$I&Zx=dHock?>FBjmD?xZv+apElDaM1mpr4?oW*ZvYkQJnLdBl8!ALDl zV=j?Qm-VgYG4OJYMUvQmqVdJPn0Bixj8lui>)Xb`VZ@Yc^5+nUo;x>q$4tr$%O_ti$1x)mPS_nVwta{{1uUa^~6FKev20A|8cpB=d(>tvYxv@e-slMHxV9 znfo|Db>u`hTotuU6cFX1zzLGkIb^1Zc^O@-36jbIrWzAvrvFmS^qkMhTU8ew-}Doi zG{X&?`&`;-ytzxaPaKGAC!E|_DkIYF-gkWkD)75%I=)95pJ%T5F!G#8R1jBN84rC{ zIq-w=V$$YL?a=cET>Y(KrjeD$)tTh{EOefy*)%T;==_zac-F!(a;7>;3!h`gi@yKH|Zw zp=5F20MKy9$ogNEhd{q^Cioz8}xsLdIkOo~f`=|Ka`SXssJ0~vD`RD)1m5ywVj1@Y0vA+dZ zuWu3BFuHcFB%nXt9bl$8bH)sHrPffRr2dkYzE+7e+=T`@dyfNONDQYql=AT!Z~^dL zH^1O!_%kZ5tZpUUMF>{#;NwNSj4>=X`tmGw+V+&OLnKxxMXR#?_vbe}ij^h(<<`sT zC(P26(?rsPh!w?EdHE)|-mSqc*b|-F>k#pKG95ACI1UWmeru4`m8EB*ZPca6GbzpDN$<4>oH^)xH><42Vj=b z$e=bddA-kwccZyChK5-&G0TY9odbE&$7t~vDj#e0(0uYLz%Rr3cF^>smQtlnZ2 zEf^TlQUF5rSF`eLgtA}5YEO4obpWJk49ocYe zAQjBQ@s*e0%}%HvfW)f_=a}Htnm#hp!cQFrtFXPvNNA?I8}-d6X=}+p$RqCAO5%02 zvgK{bfaKWdeZj%<0@vaW-rG9Gk8&){plFJ7k?(Mks`9ec3s6`e3~yeCKdl$(7-=#I z%m3iwCGFwr2MAhf#8wtN(TLhy)*t=<65&O+gL7|Fq*nPAQ;`sx$$5wT8h!1;rt}nD;QpUXMOj&Sl=9zn z$-Qoy^ZAl%Q6B<3{fMVTlP6Dx*UfGJ;oxu0q@=43l=)VTiy#jJ=Ap`lB6#*bkhBFN zW`EBCZZ0l;l#~`y^*r~MA9y-0dBKA^s1F5GN>WJ9&E#q>3{pI8;l8DdWqw|s243$} zBkW`~*YkZm9)G3_`fktwC4^Wb9KIg8lQIY^ioP1KlA|4!%rLoup@ld1-mt+v>T6$> z+*72mMaf!yb|2Gl(0}ihotU_i7(g*Gc1pZb#+`%7@k73Vs(Iw)N#~TOjwd!CSt)xl zUDKv~aE9kVLD67Jf#+wM(e;-m1nHhTcI;?#?*8$4<_J{gz+HCW9NF9By1z?2xA3B6 z2b=b~f+?qu+NPe?pl5&U^w_f-zQOv8zAD|Tp@8)CU{KJy?NV*i@vmL&wHmYhs_$|s zQ5OEzRD@6F9~IcgRY;UP4ON-Z;;sHT$E)E8nf`zJ?WS0-^s8N}3pgi>5(3z}|8@pc z_vscubv3?rZpzA2F4|(~1fdvA>)bPBiz@pk<2!vR7VT~oK!;%tw z#W0WOe7(J5XjQ_2xj~C`k53)n8ps`-s4Bm>YSo*`wPlI5e@|KfvzxvsSs7`s{!Y7v z>mb;uRLjfB$;r!~9<@xuRD0B@OATgGTQ6OwMTC}8LazDYfok~FQ7WM#i4%J&+bgSi zC7GC93^Z1X^NkM3D4`!*`&YVlH2En&Z|74iGd=*JaXGk#u}VAJ4F6#$4*XV}&@59M zg9$mH@Qd+0xj#l&fx1|?#7#Oz{3lCX*zkhihfqKn!X!8glw3em+BMl@^b&mL6I<(q z0)rgHsqkOq=_#k~lH1fSe{j;^3-q2{&qtj`cHRq)9;Qp26jcb0LgkM`6BfTschM!0 z26(63eFw!Yc(5j)8hBj#ZTl{)`b%ohm*KE~QYIKo=q^r^((ppm>8FlUl-G)T0Mp8~ z6Q;g~=-DuAA-Y`n6m@^9h%*YDX9od(U%wr3|L$D?YLAv%%@p?r@F>|EUps)FCUQ+% zC%1Vb8Px3;I9bKx>#!YKMg0Vid+SH2CoSPDUE)Cf@bze7x*(-8JXIbG=MP? zAn*$1uFTy7iR*hh6~Lu1G^f_qw@&Nm6|uTy5^`Z4C`{+@ianxA3^#--0U6F&c{2^X zWBaS9)HYngF^!9?T6@$OZ3U@td)|g``cV(Ruoo^^Y4n!Lt81D)XU?0xxwmLZ=DZMs zpGW-Nx*%w@cnjU#G0161I@!p<{l_L;zI=T4*fNVh@b5fLb@;mg^Ttm(jH?!f0VtQM zva)UTP5j)6)9mKRVV`ChM~#l24U};8@Oa>sKA`865C6oC-NTtSD`$M8$A5N_ue=OY zf}VuhQqcjR!g{aDcCxV8f4^j1ER0B`vb{JhBEmjBQRd;(QukT2-u2dSUAwkz?wPQ@ zeb;a4k_^>FBk1WyZEOywfw%_N%=T6^K8}_T7DM2l+omAn$I1v5$UHT<@nE&rcZLWh zB3F2q)vJr&c%GG)^1Tt0nD~w%8;0d0@120jjy^(^5q~_j99YTz{@Fv-@X~Vmc1h!d zpw40o?h0kYDN`aFja}gccar2$@pyg@rSYT(XQIee<{^*rZS3pJ(?s^1NcV%anw+~n zr(KAXom7zd8u4SfBws-}`OLHsL6s2|R8p8Ed8v%SO!B-DSz^C}qKk&yndJ}KlAED8 zFs=n5@qj150V7}!Ib&%oZ=YA8$+ zjs_*`@I4lTDnQS)blR#F8r^#1Nn`dU@}6YLF3r*L@fBW8qvyv{-LOi`2hgr1kFj)` zc!wVY5Dlx6Rx=q-T#4K8kO`@;WPsHZCb4ug8Z$XTqQzL1ogoY2lfV;DEga{$~lfgmZkRv1i#aD4~r zN=j1ob1==Q`|dKo=_B>zN#2cS6d2TMR@@0o>NuKTBVu`g478 zb$>fGURtF4gpI*_241TCh!_c6X{#m|z0XW@&s3gm5ajlBbMtv>1=X`COquU`(358lA=$po+v)msgImYM`tw?e7PaM*B;q<8;kXY!^)56OhS}0o~2LB(;wr^&#@T z9?(0;JioK_=~F z^0^iEYKzHOXrp0fD0B*nBIDIvwwg}9R;p+&CFj8XA zQqC+-c}eXAnmq}6H9iCS_h;1ZzQzO3Ba7XX{%CodzMUsMqYJ#E#`i}i$dE%50d)VB zNH6IP3O$T0X8`omeT7>A=Ye#7T8yIzt@`T*vUN&dSG079k={N@`6$oUv%^le4zN zDz?@y>{&c%je+9~=lJC0Zf3{W9OzR_mOW}@Vc zP28vcDy>GKLYTSr8IQ`!JhAy5xhszR0)tG1?98X$K^o51rfZZWGPi9OJ&ZItcakyu zYHe-X`)tpd!E&{P1Imf-Jg>4{SduE#cq8==4@f9>JfY|s&L;b#7z6|S+}Xi0rutbw z$efl{9m)YHeHZS+lk+;O>ZPAV5GjlYD=uh)Faap+W1s&0uQH;8fC15lu9M`Jiv7i3 z^A4IZoX<`$2~674Cy5lFS_pf$x9RMmw%Upv6x~(jEvBv+dG3DLOq0zMH`Yp-YU}7= z<0}yB&{xfeyCnTh-GnaWy*tmw#5^i~fR3UQ76K{J@bFUKG$@&;yra;K!d?Zb$5ChL=!KW2Kbjg+K#YN(Us>;b?F9&svhw7OwPtNcO5Cu^ zB%4GM$yp4WRQm}Mm}EkC`0$MsBffwCh&E483q7j#`0*9bo^7Q&zcuF)_Z{uMUicbj zf6^V-Y-m`6K>Rt)P3EyPWoI7`1=xTcc-C`r3;vWbNk|Au^3qC{Fh$-3rAOuyPOA0q zzu4M(DQ4Hy6rqJ2kNzO7b(}zQv2J_P0=<)@SR{TEupY?CxzbrJm2~X*@y%#g@#p}} z3RX(B_vl>sngjg?qw|1X!m{{#DE(!7DEkU!??NJvgTo|U-SfC`@I;`jaz z73gDD~F!=VF>DMvv}o zRPuU_WKtY`S;tQ+qKFg-a7VQI9I`q(hl*T3SKo-*X)t2M2YhiL)sP@glZ!O+8E9lc z=7IQivVWevcrn-?e6%6Gn$%grlmwYMZM;adp^kv(YrlK*1lBZM_!s%NPHWiA zIfv^Bi;T_VPV^*Vyi8jW$V`K&Q%Y`Kk!i4;?KqvCA~@ihdw*_dXpoCxx1o8qT+dWl z9%q)WCkxJeJs{Se%(t>~{4eUdjnrbQ{+hXchm(_<)dbwmR+sk@xw^s55s*S&G0Pkg zjT@WXKO{4Ayhp{FcJ&m;31pXlD1^+pt;RJZ#F83zix#;v^z8dfs0M1or_aaV-EuJD ziawu2>qDBBe|j)dZ7u&&NR)Vk3G24xwVZJKVt>eI#k`3TWkp3@!=(rvO~>5=y`ida z4KZs7;uMpMNqG5XLrp#t>^pA=gRoLk=$^x4BlgPnA0H1Ya6lrcT<cOYyd|tBR6~LY+8B}nY}SO@ zY4yOK)tjXxOkzK^UjD&h*6^X{u)h-qt#7yx|9at2=}HQ7$J-37dVvoNf7V$ttl;q7 z{5Q7#*6<(-rjT{@iE@6?cIDr?Pbfv9(2hc(m6VBWs1M<Um5Gah>xRBgF>>5_uRn8)ji^n;!N&*~f(>jj+UVfU2{D@qWciM=|u~1^0&a z%E?LF07NSD?B2Y6`{Vti<#eVHbP*Kn{{CK=6@tQ;mI=Xt9Bmrhcsn^X&;#sMbnhN- zGwfsuHLx=g5zjd|0T<{t;Y3pZwEPT~m6auGlieRUaDZM!TACCTE>R~&-@woD5{CFP ztw|U%ehbwALl)6A-lI3Ezy+rWDOp(%^rk;NuTcKOFB}d8o9yv|^9>|x zev?89Pnl^Vw4?FBhi3^2x`Oe?tKdofwlqFIsD1#O?H$!QudHpS_kvx{XaoCdjQrhht=PgGbCcV!35@ zxJHaM?bSucwMx&!(TC3Pf8o3Wi@{mW&YgT)z$e%P4TaDHiT&TvB7EQ`|FM_3j${jv zC>;5Yzd=&f>^Vk6R_*O>be85ca@oA%;zj}_@dJ~fFq$biQ*iO8ot-p5upi|r5H~Kc zvugte-e4>zGcZ6naTo6sTr$0$9{Cowc)s z(T*+QhA;Y6Q)zf(w&Q#jJW4;<%MKo0Rs;TKIGNte&mTNt!Vf-iu9X#1_2UoCdW-Uh zC9N9!mNycJbAE>izS7lIb<*Y@8j5}Tl)1m7J8oTd?i3@VK`r^K8q!7^81(AajoMi@ zyMoEnUzaa0&rLQC!=dJJb#*m29CDF$PQOzJTqVLF_A_m!zd2)gZ!GzLAWj4RXE~O= zKXNl(q1k*iq$1mf)!4xy4#&+9N`oDx9cQ%Yj|L;W2a62ic z-U<_7w%}9de%w%DXB@X^(8GBiR=DI<0y)}{J~PXEr;~?USjaAnJPMGaF3Zj7-@`n% z9pb=P9mk$^UPI;lXyzI@MFEPB7KiVn9kmsO#$Wr#KWrTw$Z5+3r-lz#3LSA>VkC5W zO6(^-XEtgL=e~Uo#vxKToUu;o>B&%~`6I%E@a=l~bp03E1E`|f+ICL&NS-f6^?}^u zBfD^ij}0IKS0u8@6C)K&$ymrRT`R08M9xyXk4~Sor#2yfF;Sg^DI$2KJ28s2_=1Tw zCooc&9<0;e=h&1r25JxXWsZ4in!ZKWKO{8NraTDV$+aP|x6t)jEr{}uWjd%l@aw+B z_|XjK8D8S+@qvktHq9InJ|{iAzx6uIj^_-JgJR#feA160XQi(sjGHr3F6MY+&;X|s zBVT4J{w((I-%3vUza{qn*OSrz|2KT(uzfJmA|b&%H_x}@%d*x41Al;FU?E<`BMAA8dn zvHwNvj0J^-w_*u~{u~8NCyWt?+~J1w?9n4B*z^&2JxE;+^-*{E{j3AbF=lwHJL8Ln zdr0e_4@f1R;da3Xpd#W0(*l_$&lzWN*uYJ)iZbro+3Sw$7*qwqZ)0x`IfQmw9s++h^mU_ly+Gu#a4D4tKOs4O zxo;m96Rm>(^Vaxg$6ve{b>hTKZ*^m=8T%xSk@mb(*TfnGj3I@$f?rnmCuL`D8$xK2 zZ}4aIAqX>IIgOH|oP-1=GO}l{WMh@Rd8LFTgi=KLZArT~o5*e3x)nPp zG>^U#^fc^UuuFX|XW}mu_uTLcVzbTtnbd8p?)z)BF%;6{(o(Lk)P``hmTEEh6ngb) zqXG)WjMWv%46wti5sJ5)2@PI<`XkhY6Nw3T?>~N=?XBMY#3rq~sXHwP$Y;);KOYAJ zYHU&SUP?ub{tJ}1l}cXHYVNzK*RSum^* zF$iHoX{7ya*D+`1_itCb;`u)!C+=R^lP60qE$u?L%++LA!-&!9BXHKCicb9prUI*w zM6v4hM=_myVH{8Z2tM=Bu0UtXZn0ubk?)w^a#DzNLCfH8QaOIUplsusJ8_T(JT7&H z&>(kKyX3H4mP6qTj+Ze*cS)rEOG5(y601DC58i7&)-BZQOpJ~e8T^YNNafI#^VC9*?D^Q-LOU?t z7+{9Cw3IKsDFTZn50>b(Ble@N&1l@4z%VqXdSk}5;phgtzQp&6gjWb|b<$vJ9lcU~4)rS5 zQ?n+2fb`}L<96`PIl4Bc-FS283CLPe5d+GPG`V|OH z#J!bWv@Gcm5~k4WlpKVH2lUxRt}3u|Sy{%b%pVDiiB;m^0S1VPjB^AI0j$i*Shh^# zfwpza5T6jI=QcBE3UoOl+Hc>!aqjblyRRGDO`=#YIgti!@)cgNUu9)!bo>BitbL3% z3iX;vQGWusU=M(FfM+U?IayhwlzSxCHD*Q6cOH+W0f7$r?d^gB4qD=@8Nf7bwN|chp@WYv7YPJ zjl4P?jWHZ5oBG(DcriadQPs-aTxNeh7Lo5O&V_S9fdziAeyAz9Duo?6eLA>&HhD2o z|I?pWjGjXTRjmE_8qRPpn^^G2%;Tz9RE&v*QsPt5e?b!8cS+sQMOr4-YXj9*TRwQ; z!&6j|ZEcr#Dk(o~>uBHMIXaQ4G3cpvdd;;A#1bPZu6ju0?uTv3I)W<}DS`9kl z?R8MBY$H_maRy{a6n^A$7A=E_V9gONR ztz2thV1PEC_HHUnLrTWM8uBC{tSCYe$oP9+h|rsR_v{%Y0PqMEnZ3AjB-7v-Ytbw0 zi$dtX;Y?wOv36gvg5H!3yOg|CWIB1SrHlA@k2tobZolqI?$q#L7n9^atv^suo0XM? z3lze90SJOC^Oo-XBo=egyto2haM3J!BJ08vd(3E^ysym+C7PW$56EEoZp0ir3t78v zd{6}EW_sn?J;n+QNIADq-!ab2<>pScnsChM>9?P4cp5?J zazUGa9U{FZN8#8M^swa%3oR+aFg<3~9LKS+nv_ z+##Ic%fO2m*s<#&|4EP=2SNN}caLFz%0z4=;>V|7)7K$4Oz*^h4;;#3;l+*Eid zHOZGmkN2J}KXA|>Bg|?RoBgU_pcZOUXQUB7ejgILcV@7lOceP3V96|#-24aWqGNKd zO6t9E;&M%(_@?>$bY_}i2~{VHzm-ShJ&>>-5{Pqg_0x{+&@3e2D-5;ca&wo6BdKDh zrQIto?l$y*q@`?H_mC@W(^GEG)?Q{;omi#feb+v7H5~16L4bRBZtUR6J6lU1JP3aN zV>Q!X-O^FaKL4J=*-zu&i>FUp*7x#rsM}~hoIZ)GQhOf_Gp?j+Y1c;DX1)aor)-4#S4xCYmgW$Q$o^RObScKOimTJetrx0|BgF#A}yBLMm(( zJ$_)4GFlEBN=ipgBw(a+ZA@1`yg{Kahkrin&BCSyr7*7P?I2tYAvqmzS&5p&8;#1*w<|7nWd= zO`xG@dAY2&FWwfzu3sDdB%?i{FUO;BGR&AUKY%`CDw*zeTJxbA6KOFnCByC&#F9gB=gW2}=dU9x`j znl8z5NEw&0bG%*7FIYo*K|zeTC?x@V2_9Zn-2=-c3aU9PJ;g#@uQcI`~mKGYaE zXh4v8&dbwj-+=UZo(M*wLW!SjNM0hn049H5(q5kgMQZxz=i?lBg=jcrIw>@xmn=4OM}~3p+ek zueO{jsPgvaz4j94WM@Bl`jlKP(&`;W+x!{|7$!@WoGxq&yw;XsO04QzS3;wKJHgyRM?6W{abHt{QGYcgJZXJbmn%z zdO@VSl)I(>+tSkcqG-ZNM$@$buK|6yPXHV7Dbg8`jj)@^p~Xk{eB0}_#{<#LWS@>} zsi`(4=fkbF$BiBx`pnBv6}Jpa!y%eF+cwcWef$l_0#$9;j>C8V>K4#37Fe@rlrsK$ zPtM*@4Ox@q(sjzVrrLm)9t&mC$`BGV#w}_1iUc8BFjW0aZg1-&YOlT8{d*j1V9OSR zT~7^8%x+z^xJ+71+E)AT_OUO!n;8E4m;ve{~E5QMFFNxlTQ;P$Atu}sS?Rc-B= z%C$r7MdP!!?D;(WTxHHnk1H=C6NcvQ{Mo063`xt@nLVrzdYS08l8K|JYWeX4HXaYm zao0Sx6$KXzGDgr$*!0jYNhaCqTRwjNy#4yCrK8?^UPVJ0X|=Mro1RWVUDJP93nDuE z;CF)*lUJTy~@lZxXDQF@zuS@A}_?C^<)XB5CxH`HMZulHb=A;A78OpPgi%` z$r6r>x~5B;yMOHr(JNd*>Utz@hE_8db+ohMTXXZ@=CKv4GwoA`9&PCHTdsa+nY1W+ o%BvYNBKN~yUH&ip@kM{wvpr7XsoDQPr-^1xw=%n6y43G~0VP}N Date: Mon, 19 May 2025 17:23:53 -0400 Subject: [PATCH 22/42] docs: add new dynamic parameters information to parameters doc (#17653) Co-authored-by: EdwardAngert <17991901+EdwardAngert@users.noreply.github.com> Co-authored-by: Steven Masley Co-authored-by: Stephen Kirby Co-authored-by: Stephen Kirby <58410745+stirby@users.noreply.github.com> --- .../extending-templates/parameters.md | 547 +++++++++++++++++- 1 file changed, 546 insertions(+), 1 deletion(-) diff --git a/docs/admin/templates/extending-templates/parameters.md b/docs/admin/templates/extending-templates/parameters.md index b5e6473ab6b4f..7f216bd3e64f9 100644 --- a/docs/admin/templates/extending-templates/parameters.md +++ b/docs/admin/templates/extending-templates/parameters.md @@ -252,7 +252,7 @@ data "coder_parameter" "force_rebuild" { ## Validating parameters -Coder supports rich parameters with multiple validation modes: min, max, +Coder supports parameters with multiple validation modes: min, max, monotonic numbers, and regular expressions. ### Number @@ -391,3 +391,548 @@ parameters in one of two ways: ``` Or set the [environment variable](../../setup/index.md), `CODER_EXPERIMENTS=auto-fill-parameters` + +## Dynamic Parameters + +Dynamic Parameters enhances Coder's existing parameter system with real-time validation, +conditional parameter behavior, and richer input types. +This feature allows template authors to create more interactive and responsive workspace creation experiences. + +### Enable Dynamic Parameters (Early Access) + +To use Dynamic Parameters, enable the experiment flag or set the environment variable. + +Note that as of v2.22.0, Dynamic parameters are an unsafe experiment and will not be enabled with the experiment wildcard. + +

+ +#### Flag + +```shell +coder server --experiments=dynamic-parameters +``` + +#### Env Variable + +```shell +CODER_EXPERIMENTS=dynamic-parameters +``` + +
+ +Dynamic Parameters also require version >=2.4.0 of the Coder provider. + +Enable the experiment, then include the following at the top of your template: + +```terraform +terraform { + required_providers { + coder = { + source = "coder/coder" + version = ">=2.4.0" + } + } +} +``` + +Once enabled, users can toggle between the experimental and classic interfaces during +workspace creation using an escape hatch in the workspace creation form. + +## Features and Capabilities + +Dynamic Parameters introduces three primary enhancements to the standard parameter system: + +- **Conditional Parameters** + + - Parameters can respond to changes in other parameters + - Show or hide parameters based on other selections + - Modify validation rules conditionally + - Create branching paths in workspace creation forms + +- **Reference User Properties** + + - Read user data at build time from [`coder_workspace_owner`](https://registry.terraform.io/providers/coder/coder/latest/docs/data-sources/workspace_owner) + - Conditionally hide parameters based on user's role + - Change parameter options based on user groups + - Reference user name in parameters + +- **Additional Form Inputs** + + - Searchable dropdown lists for easier selection + - Multi-select options for choosing multiple items + - Secret text inputs for sensitive information + - Key-value pair inputs for complex data + - Button parameters for toggling sections + +## Available Form Input Types + +Dynamic Parameters supports a variety of form types to create rich, interactive user experiences. + +You can specify the form type using the `form_type` property. +Different parameter types support different form types. + +The "Options" column in the table below indicates whether the form type requires options to be defined (Yes) or doesn't support/require them (No). When required, options are specified using one or more `option` blocks in your parameter definition, where each option has a `name` (displayed to the user) and a `value` (used in your template logic). + +| Form Type | Parameter Types | Options | Notes | +|----------------|--------------------------------------------|---------|------------------------------------------------------------------------------------------------------------------------------| +| `checkbox` | `bool` | No | A single checkbox for boolean parameters. Default for boolean parameters. | +| `dropdown` | `string`, `number` | Yes | Searchable dropdown list for choosing a single option from a list. Default for `string` or `number` parameters with options. | +| `input` | `string`, `number` | No | Standard single-line text input field. Default for string/number parameters without options. | +| `key-value` | `string` | No | For entering key-value pairs (as JSON). | +| `multi-select` | `list(string)` | Yes | Select multiple items from a list with checkboxes. | +| `password` | `string` | No | Masked input field for sensitive information. | +| `radio` | `string`, `number`, `bool`, `list(string)` | Yes | Radio buttons for selecting a single option with all choices visible at once. | +| `slider` | `number` | No | Slider selection with min/max validation for numeric values. | +| `switch` | `bool` | No | Toggle switch alternative for boolean parameters. | +| `tag-select` | `list(string)` | No | Default for list(string) parameters without options. | +| `textarea` | `string` | No | Multi-line text input field for longer content. | | + +### Form Type Examples + +
`checkbox`: A single checkbox for boolean values + +```tf +data "coder_parameter" "enable_gpu" { + name = "enable_gpu" + display_name = "Enable GPU" + type = "bool" + form_type = "checkbox" # This is the default for boolean parameters + default = false +} +``` + +
+ +
`dropdown`: A searchable select menu for choosing a single option from a list + +```tf +data "coder_parameter" "region" { + name = "region" + display_name = "Region" + description = "Select a region" + type = "string" + form_type = "dropdown" # This is the default for string parameters with options + + option { + name = "US East" + value = "us-east-1" + } + option { + name = "US West" + value = "us-west-2" + } +} +``` + +
+ +
`input`: A standard text input field + +```tf +data "coder_parameter" "custom_domain" { + name = "custom_domain" + display_name = "Custom Domain" + type = "string" + form_type = "input" # This is the default for string parameters without options + default = "" +} +``` + +
+ +
`key-value`: Input for entering key-value pairs + +```tf +data "coder_parameter" "environment_vars" { + name = "environment_vars" + display_name = "Environment Variables" + type = "string" + form_type = "key-value" + default = jsonencode({"NODE_ENV": "development"}) +} +``` + +
+ +
`multi-select`: Checkboxes for selecting multiple options from a list + +```tf +data "coder_parameter" "tools" { + name = "tools" + display_name = "Developer Tools" + type = "list(string)" + form_type = "multi-select" + default = jsonencode(["git", "docker"]) + + option { + name = "Git" + value = "git" + } + option { + name = "Docker" + value = "docker" + } + option { + name = "Kubernetes CLI" + value = "kubectl" + } +} +``` + +
+ +
`password`: A text input that masks sensitive information + +```tf +data "coder_parameter" "api_key" { + name = "api_key" + display_name = "API Key" + type = "string" + form_type = "password" + secret = true +} +``` + +
+ +
`radio`: Radio buttons for selecting a single option with high visibility + +```tf +data "coder_parameter" "environment" { + name = "environment" + display_name = "Environment" + type = "string" + form_type = "radio" + default = "dev" + + option { + name = "Development" + value = "dev" + } + option { + name = "Staging" + value = "staging" + } +} +``` + +
+ +
`slider`: A slider for selecting numeric values within a range + +```tf +data "coder_parameter" "cpu_cores" { + name = "cpu_cores" + display_name = "CPU Cores" + type = "number" + form_type = "slider" + default = 2 + validation { + min = 1 + max = 8 + } +} +``` + +
+ +
`switch`: A toggle switch for boolean values + +```tf +data "coder_parameter" "advanced_mode" { + name = "advanced_mode" + display_name = "Advanced Mode" + type = "bool" + form_type = "switch" + default = false +} +``` + +
+ +
`textarea`: A multi-line text input field for longer content + +```tf +data "coder_parameter" "init_script" { + name = "init_script" + display_name = "Initialization Script" + type = "string" + form_type = "textarea" + default = "#!/bin/bash\necho 'Hello World'" +} +``` + +
+ +## Dynamic Parameter Use Case Examples + +
Conditional Parameters: Region and Instance Types + +This example shows instance types based on the selected region: + +```tf +data "coder_parameter" "region" { + name = "region" + display_name = "Region" + description = "Select a region for your workspace" + type = "string" + default = "us-east-1" + + option { + name = "US East (N. Virginia)" + value = "us-east-1" + } + + option { + name = "US West (Oregon)" + value = "us-west-2" + } +} + +data "coder_parameter" "instance_type" { + name = "instance_type" + display_name = "Instance Type" + description = "Select an instance type available in the selected region" + type = "string" + + # This option will only appear when us-east-1 is selected + dynamic "option" { + for_each = data.coder_parameter.region.value == "us-east-1" ? [1] : [] + content { + name = "t3.large (US East)" + value = "t3.large" + } + } + + # This option will only appear when us-west-2 is selected + dynamic "option" { + for_each = data.coder_parameter.region.value == "us-west-2" ? [1] : [] + content { + name = "t3.medium (US West)" + value = "t3.medium" + } + } +} +``` + +
+ +
Advanced Options Toggle + +This example shows how to create an advanced options section: + +```tf +data "coder_parameter" "show_advanced" { + name = "show_advanced" + display_name = "Show Advanced Options" + description = "Enable to show advanced configuration options" + type = "bool" + default = false + order = 0 +} + +data "coder_parameter" "advanced_setting" { + # This parameter is only visible when show_advanced is true + count = data.coder_parameter.show_advanced.value ? 1 : 0 + name = "advanced_setting" + display_name = "Advanced Setting" + description = "An advanced configuration option" + type = "string" + default = "default_value" + mutable = true + order = 1 +} + +
+ +
Multi-select IDE Options + +This example allows selecting multiple IDEs to install: + +```tf +data "coder_parameter" "ides" { + name = "ides" + display_name = "IDEs to Install" + description = "Select which IDEs to install in your workspace" + type = "list(string)" + default = jsonencode(["vscode"]) + mutable = true + form_type = "multi-select" + + option { + name = "VS Code" + value = "vscode" + icon = "/icon/vscode.png" + } + + option { + name = "JetBrains IntelliJ" + value = "intellij" + icon = "/icon/intellij.png" + } + + option { + name = "JupyterLab" + value = "jupyter" + icon = "/icon/jupyter.png" + } +} +``` + +
+ +
Team-specific Resources + +This example filters resources based on user group membership: + +```tf +data "coder_parameter" "instance_type" { + name = "instance_type" + display_name = "Instance Type" + description = "Select an instance type for your workspace" + type = "string" + + # Show GPU options only if user belongs to the "data-science" group + dynamic "option" { + for_each = contains(data.coder_workspace_owner.me.groups, "data-science") ? [1] : [] + content { + name = "p3.2xlarge (GPU)" + value = "p3.2xlarge" + } + } + + # Standard options for all users + option { + name = "t3.medium (Standard)" + value = "t3.medium" + } +} +``` + +### Advanced Usage Patterns + +
Creating Branching Paths + +For templates serving multiple teams or use cases, you can create comprehensive branching paths: + +```tf +data "coder_parameter" "environment_type" { + name = "environment_type" + display_name = "Environment Type" + description = "Select your preferred development environment" + type = "string" + default = "container" + + option { + name = "Container" + value = "container" + } + + option { + name = "Virtual Machine" + value = "vm" + } +} + +# Container-specific parameters +data "coder_parameter" "container_image" { + name = "container_image" + display_name = "Container Image" + description = "Select a container image for your environment" + type = "string" + default = "ubuntu:latest" + + # Only show when container environment is selected + condition { + field = data.coder_parameter.environment_type.name + value = "container" + } + + option { + name = "Ubuntu" + value = "ubuntu:latest" + } + + option { + name = "Python" + value = "python:3.9" + } +} + +# VM-specific parameters +data "coder_parameter" "vm_image" { + name = "vm_image" + display_name = "VM Image" + description = "Select a VM image for your environment" + type = "string" + default = "ubuntu-20.04" + + # Only show when VM environment is selected + condition { + field = data.coder_parameter.environment_type.name + value = "vm" + } + + option { + name = "Ubuntu 20.04" + value = "ubuntu-20.04" + } + + option { + name = "Debian 11" + value = "debian-11" + } +} +``` + +
+ +
Conditional Validation + +Adjust validation rules dynamically based on parameter values: + +```tf +data "coder_parameter" "team" { + name = "team" + display_name = "Team" + type = "string" + default = "engineering" + + option { + name = "Engineering" + value = "engineering" + } + + option { + name = "Data Science" + value = "data-science" + } +} + +data "coder_parameter" "cpu_count" { + name = "cpu_count" + display_name = "CPU Count" + type = "number" + default = 2 + + # Engineering team has lower limits + dynamic "validation" { + for_each = data.coder_parameter.team.value == "engineering" ? [1] : [] + content { + min = 1 + max = 4 + } + } + + # Data Science team has higher limits + dynamic "validation" { + for_each = data.coder_parameter.team.value == "data-science" ? [1] : [] + content { + min = 2 + max = 8 + } + } +} +``` + +
From cc53c4d1d5dc2a2c4842e2f4d50b80be06e347f9 Mon Sep 17 00:00:00 2001 From: Bruno Quaresma Date: Mon, 19 May 2025 18:38:38 -0300 Subject: [PATCH 23/42] fix: fix devcontainer port button (#17924) --- site/src/modules/resources/AgentDevcontainerCard.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/site/src/modules/resources/AgentDevcontainerCard.tsx b/site/src/modules/resources/AgentDevcontainerCard.tsx index d9a591625b2f8..543004de5c1e2 100644 --- a/site/src/modules/resources/AgentDevcontainerCard.tsx +++ b/site/src/modules/resources/AgentDevcontainerCard.tsx @@ -88,7 +88,7 @@ export const AgentDevcontainerCard: FC = ({ return ( - +
From 9c000468a1b64d35e3b89c0f7ba5710f3d122ff6 Mon Sep 17 00:00:00 2001 From: Steven Masley Date: Mon, 19 May 2025 16:59:15 -0500 Subject: [PATCH 24/42] chore: expose use_classic_parameter_flow on workspace response (#17925) --- cli/testdata/coder_list_--output_json.golden | 1 + coderd/apidoc/docs.go | 3 +++ coderd/apidoc/swagger.json | 3 +++ coderd/workspaces.go | 1 + codersdk/workspaces.go | 1 + docs/reference/api/schemas.md | 3 +++ docs/reference/api/workspaces.md | 6 ++++++ site/src/api/typesGenerated.ts | 1 + site/src/testHelpers/entities.ts | 1 + 9 files changed, 20 insertions(+) diff --git a/cli/testdata/coder_list_--output_json.golden b/cli/testdata/coder_list_--output_json.golden index 5f293787de719..9cdaa98c3f813 100644 --- a/cli/testdata/coder_list_--output_json.golden +++ b/cli/testdata/coder_list_--output_json.golden @@ -15,6 +15,7 @@ "template_allow_user_cancel_workspace_jobs": false, "template_active_version_id": "============[version ID]============", "template_require_active_version": false, + "template_use_classic_parameter_flow": false, "latest_build": { "id": "========[workspace build ID]========", "created_at": "====[timestamp]=====", diff --git a/coderd/apidoc/docs.go b/coderd/apidoc/docs.go index 075f33aeac02f..f59fcd308c655 100644 --- a/coderd/apidoc/docs.go +++ b/coderd/apidoc/docs.go @@ -17006,6 +17006,9 @@ const docTemplate = `{ "template_require_active_version": { "type": "boolean" }, + "template_use_classic_parameter_flow": { + "type": "boolean" + }, "ttl_ms": { "type": "integer" }, diff --git a/coderd/apidoc/swagger.json b/coderd/apidoc/swagger.json index e00ab22232483..25f3c2166755d 100644 --- a/coderd/apidoc/swagger.json +++ b/coderd/apidoc/swagger.json @@ -15513,6 +15513,9 @@ "template_require_active_version": { "type": "boolean" }, + "template_use_classic_parameter_flow": { + "type": "boolean" + }, "ttl_ms": { "type": "integer" }, diff --git a/coderd/workspaces.go b/coderd/workspaces.go index 203c9f8599298..35960d1f95a12 100644 --- a/coderd/workspaces.go +++ b/coderd/workspaces.go @@ -2259,6 +2259,7 @@ func convertWorkspace( TemplateAllowUserCancelWorkspaceJobs: template.AllowUserCancelWorkspaceJobs, TemplateActiveVersionID: template.ActiveVersionID, TemplateRequireActiveVersion: template.RequireActiveVersion, + TemplateUseClassicParameterFlow: template.UseClassicParameterFlow, Outdated: workspaceBuild.TemplateVersionID.String() != template.ActiveVersionID.String(), Name: workspace.Name, AutostartSchedule: autostartSchedule, diff --git a/codersdk/workspaces.go b/codersdk/workspaces.go index 311c4bcba35d4..b39b220ca33b8 100644 --- a/codersdk/workspaces.go +++ b/codersdk/workspaces.go @@ -41,6 +41,7 @@ type Workspace struct { TemplateAllowUserCancelWorkspaceJobs bool `json:"template_allow_user_cancel_workspace_jobs"` TemplateActiveVersionID uuid.UUID `json:"template_active_version_id" format:"uuid"` TemplateRequireActiveVersion bool `json:"template_require_active_version"` + TemplateUseClassicParameterFlow bool `json:"template_use_classic_parameter_flow"` LatestBuild WorkspaceBuild `json:"latest_build"` LatestAppStatus *WorkspaceAppStatus `json:"latest_app_status"` Outdated bool `json:"outdated"` diff --git a/docs/reference/api/schemas.md b/docs/reference/api/schemas.md index 91f70950e989e..b35c35361cb1f 100644 --- a/docs/reference/api/schemas.md +++ b/docs/reference/api/schemas.md @@ -8416,6 +8416,7 @@ If the schedule is empty, the user will be updated to use the default schedule.| "template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc", "template_name": "string", "template_require_active_version": true, + "template_use_classic_parameter_flow": true, "ttl_ms": 0, "updated_at": "2019-08-24T14:15:22Z" } @@ -8452,6 +8453,7 @@ If the schedule is empty, the user will be updated to use the default schedule.| | `template_id` | string | false | | | | `template_name` | string | false | | | | `template_require_active_version` | boolean | false | | | +| `template_use_classic_parameter_flow` | boolean | false | | | | `ttl_ms` | integer | false | | | | `updated_at` | string | false | | | @@ -10088,6 +10090,7 @@ If the schedule is empty, the user will be updated to use the default schedule.| "template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc", "template_name": "string", "template_require_active_version": true, + "template_use_classic_parameter_flow": true, "ttl_ms": 0, "updated_at": "2019-08-24T14:15:22Z" } diff --git a/docs/reference/api/workspaces.md b/docs/reference/api/workspaces.md index 49377ec14c6fd..241d80ac05f7d 100644 --- a/docs/reference/api/workspaces.md +++ b/docs/reference/api/workspaces.md @@ -296,6 +296,7 @@ of the template will be used. "template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc", "template_name": "string", "template_require_active_version": true, + "template_use_classic_parameter_flow": true, "ttl_ms": 0, "updated_at": "2019-08-24T14:15:22Z" } @@ -578,6 +579,7 @@ curl -X GET http://coder-server:8080/api/v2/users/{user}/workspace/{workspacenam "template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc", "template_name": "string", "template_require_active_version": true, + "template_use_classic_parameter_flow": true, "ttl_ms": 0, "updated_at": "2019-08-24T14:15:22Z" } @@ -886,6 +888,7 @@ of the template will be used. "template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc", "template_name": "string", "template_require_active_version": true, + "template_use_classic_parameter_flow": true, "ttl_ms": 0, "updated_at": "2019-08-24T14:15:22Z" } @@ -1154,6 +1157,7 @@ curl -X GET http://coder-server:8080/api/v2/workspaces \ "template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc", "template_name": "string", "template_require_active_version": true, + "template_use_classic_parameter_flow": true, "ttl_ms": 0, "updated_at": "2019-08-24T14:15:22Z" } @@ -1437,6 +1441,7 @@ curl -X GET http://coder-server:8080/api/v2/workspaces/{workspace} \ "template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc", "template_name": "string", "template_require_active_version": true, + "template_use_classic_parameter_flow": true, "ttl_ms": 0, "updated_at": "2019-08-24T14:15:22Z" } @@ -1835,6 +1840,7 @@ curl -X PUT http://coder-server:8080/api/v2/workspaces/{workspace}/dormant \ "template_id": "c6d67e98-83ea-49f0-8812-e4abae2b68bc", "template_name": "string", "template_require_active_version": true, + "template_use_classic_parameter_flow": true, "ttl_ms": 0, "updated_at": "2019-08-24T14:15:22Z" } diff --git a/site/src/api/typesGenerated.ts b/site/src/api/typesGenerated.ts index 68cf0940ad8e1..9a73fc9f3d6bf 100644 --- a/site/src/api/typesGenerated.ts +++ b/site/src/api/typesGenerated.ts @@ -3246,6 +3246,7 @@ export interface Workspace { readonly template_allow_user_cancel_workspace_jobs: boolean; readonly template_active_version_id: string; readonly template_require_active_version: boolean; + readonly template_use_classic_parameter_flow: boolean; readonly latest_build: WorkspaceBuild; readonly latest_app_status: WorkspaceAppStatus | null; readonly outdated: boolean; diff --git a/site/src/testHelpers/entities.ts b/site/src/testHelpers/entities.ts index e09b196a82446..1e8d6f3aa7b0b 100644 --- a/site/src/testHelpers/entities.ts +++ b/site/src/testHelpers/entities.ts @@ -1410,6 +1410,7 @@ export const MockWorkspace: TypesGen.Workspace = { MockTemplate.allow_user_cancel_workspace_jobs, template_active_version_id: MockTemplate.active_version_id, template_require_active_version: MockTemplate.require_active_version, + template_use_classic_parameter_flow: false, outdated: false, owner_id: MockUserOwner.id, organization_id: MockOrganization.id, From dc21016151389efc502b951e1f8a27405bf993c9 Mon Sep 17 00:00:00 2001 From: Jaayden Halko Date: Mon, 19 May 2025 23:20:40 +0100 Subject: [PATCH 25/42] fix: get presets working correctly with dynamic params (#17923) This adds a few fixes to get presets working correctly with dynamic params 1. Changes to preset params need to be rendered and displayed correctly 2. Changes to preset params need to be sent to the websocket 3. Changes to preset params need to be marked as touched so they won't be automatically changed later because of dynamic defaults. Dynamic defaults means any default parameter value can be changed by the websocket response unless edited by the user, set by autofill or set by a preset. --- .../DynamicParameter/DynamicParameter.tsx | 11 ++- .../CreateWorkspacePageExperimental.tsx | 2 +- .../CreateWorkspacePageViewExperimental.tsx | 73 ++++++++++++++++--- 3 files changed, 72 insertions(+), 14 deletions(-) diff --git a/site/src/modules/workspaces/DynamicParameter/DynamicParameter.tsx b/site/src/modules/workspaces/DynamicParameter/DynamicParameter.tsx index cbc7852bd14e5..94fa3bc383074 100644 --- a/site/src/modules/workspaces/DynamicParameter/DynamicParameter.tsx +++ b/site/src/modules/workspaces/DynamicParameter/DynamicParameter.tsx @@ -222,6 +222,15 @@ const DebouncedParameterField: FC = ({ const onChangeEvent = useEffectEvent(onChange); // prevDebouncedValueRef is to prevent calling the onChangeEvent on the initial render const prevDebouncedValueRef = useRef(); + const prevValueRef = useRef(value); + + // This is necessary in the case of fields being set by preset parameters + useEffect(() => { + if (value !== undefined && value !== prevValueRef.current) { + setLocalValue(value); + prevValueRef.current = value; + } + }, [value]); useEffect(() => { if (prevDebouncedValueRef.current !== undefined) { @@ -458,7 +467,7 @@ const ParameterField: FC = ({ { onChange(value.toString()); }} diff --git a/site/src/pages/CreateWorkspacePage/CreateWorkspacePageExperimental.tsx b/site/src/pages/CreateWorkspacePage/CreateWorkspacePageExperimental.tsx index 8268ded111b59..fbb35c61ee047 100644 --- a/site/src/pages/CreateWorkspacePage/CreateWorkspacePageExperimental.tsx +++ b/site/src/pages/CreateWorkspacePage/CreateWorkspacePageExperimental.tsx @@ -101,7 +101,7 @@ const CreateWorkspacePageExperimental: FC = () => { } }, []); - // On sends all initial parameter values to the websocket + // On page load, sends all initial parameter values to the websocket // (including defaults and autofilled from the url) // This ensures the backend has the complete initial state of the form, // which is vital for correctly rendering dynamic UI elements where parameter visibility diff --git a/site/src/pages/CreateWorkspacePage/CreateWorkspacePageViewExperimental.tsx b/site/src/pages/CreateWorkspacePage/CreateWorkspacePageViewExperimental.tsx index 434cd23fb9a92..630faf8e806d2 100644 --- a/site/src/pages/CreateWorkspacePage/CreateWorkspacePageViewExperimental.tsx +++ b/site/src/pages/CreateWorkspacePage/CreateWorkspacePageViewExperimental.tsx @@ -213,6 +213,15 @@ export const CreateWorkspacePageViewExperimental: FC< setPresetParameterNames(selectedPreset.Parameters.map((p) => p.Name)); + const currentValues = form.values.rich_parameter_values ?? []; + + const updates: Array<{ + field: string; + fieldValue: TypesGen.WorkspaceBuildParameter; + parameter: PreviewParameter; + presetValue: string; + }> = []; + for (const presetParameter of selectedPreset.Parameters) { const parameterIndex = parameters.findIndex( (p) => p.name === presetParameter.Name, @@ -220,32 +229,64 @@ export const CreateWorkspacePageViewExperimental: FC< if (parameterIndex === -1) continue; const parameterField = `rich_parameter_values.${parameterIndex}`; + const parameter = parameters[parameterIndex]; + const currentValue = currentValues.find( + (p) => p.name === presetParameter.Name, + )?.value; + + if (currentValue !== presetParameter.Value) { + updates.push({ + field: parameterField, + fieldValue: { + name: presetParameter.Name, + value: presetParameter.Value, + }, + parameter, + presetValue: presetParameter.Value, + }); + } + } - form.setFieldValue(parameterField, { - name: presetParameter.Name, - value: presetParameter.Value, - }); + if (updates.length > 0) { + for (const update of updates) { + form.setFieldValue(update.field, update.fieldValue); + form.setFieldTouched(update.parameter.name, true); + } + + sendDynamicParamsRequest( + updates.map((update) => ({ + parameter: update.parameter, + value: update.presetValue, + })), + ); } }, [ presetOptions, selectedPresetIndex, presets, form.setFieldValue, + form.setFieldTouched, parameters, + form.values.rich_parameter_values, ]); // send the last user modified parameter and all touched parameters to the websocket const sendDynamicParamsRequest = ( - parameter: PreviewParameter, - value: string, + parameters: Array<{ parameter: PreviewParameter; value: string }>, ) => { const formInputs: Record = {}; - formInputs[parameter.name] = value; - const parameters = form.values.rich_parameter_values ?? []; + const formParameters = form.values.rich_parameter_values ?? []; + + for (const { parameter, value } of parameters) { + formInputs[parameter.name] = value; + } for (const [fieldName, isTouched] of Object.entries(form.touched)) { - if (isTouched && fieldName !== parameter.name) { - const param = parameters.find((p) => p.name === fieldName); + if ( + isTouched && + !parameters.some((p) => p.parameter.name === fieldName) + ) { + const param = formParameters.find((p) => p.name === fieldName); if (param?.value) { formInputs[fieldName] = param.value; } @@ -260,12 +301,20 @@ export const CreateWorkspacePageViewExperimental: FC< parameterField: string, value: string, ) => { + const currentFormValue = form.values.rich_parameter_values?.find( + (p) => p.name === parameter.name, + )?.value; + await form.setFieldValue(parameterField, { name: parameter.name, value, }); - form.setFieldTouched(parameter.name, true); - sendDynamicParamsRequest(parameter, value); + + // Only send the request if the value has changed from the form value + if (currentFormValue !== value) { + form.setFieldTouched(parameter.name, true); + sendDynamicParamsRequest([{ parameter, value }]); + } }; useSyncFormParameters({ From e5758a12c778e461a71dbab30ee7a07809e15c7c Mon Sep 17 00:00:00 2001 From: Ethan <39577870+ethanndickson@users.noreply.github.com> Date: Tue, 20 May 2025 14:25:13 +1000 Subject: [PATCH 26/42] fix(site): center `/cli-auth` on firefox (#17929) `-webkit-fill-available` is not available in Firefox: https://caniuse.com/mdn-css_properties_height_stretch `-moz-available` doesn't work on `height`, so we have to use `100vh`. Before: image After: image The existing CSS is retained in browsers that support `-webkit-fill-available`, i.e. chrome: image --- site/src/components/SignInLayout/SignInLayout.tsx | 3 ++- site/src/pages/CliInstallPage/CliInstallPageView.tsx | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/site/src/components/SignInLayout/SignInLayout.tsx b/site/src/components/SignInLayout/SignInLayout.tsx index 6a0d4f5865ea1..c557fd3b4c797 100644 --- a/site/src/components/SignInLayout/SignInLayout.tsx +++ b/site/src/components/SignInLayout/SignInLayout.tsx @@ -17,7 +17,8 @@ export const SignInLayout: FC = ({ children }) => { const styles = { container: { flex: 1, - height: "-webkit-fill-available", + // Fallback to 100vh + height: ["100vh", "-webkit-fill-available"], display: "flex", justifyContent: "center", alignItems: "center", diff --git a/site/src/pages/CliInstallPage/CliInstallPageView.tsx b/site/src/pages/CliInstallPage/CliInstallPageView.tsx index 9356cee6153b3..db77abcb28f04 100644 --- a/site/src/pages/CliInstallPage/CliInstallPageView.tsx +++ b/site/src/pages/CliInstallPage/CliInstallPageView.tsx @@ -39,7 +39,8 @@ export const CliInstallPageView: FC = ({ origin }) => { const styles = { container: { flex: 1, - height: "-webkit-fill-available", + // Fallback to 100vh + height: ["100vh", "-webkit-fill-available"], display: "flex", flexDirection: "column", justifyContent: "center", From 613117bde29cba74127ebe2e32ceeb46ade06bb5 Mon Sep 17 00:00:00 2001 From: Sas Swart Date: Tue, 20 May 2025 14:45:26 +0200 Subject: [PATCH 27/42] chore: add presets with prebuilds to our dogfood template (#17933) This PR adds a preset with prebuilds for each region to our dogfood template. Creating a workspace based on a preset should now save time compared to creating a workspace from scratch --- dogfood/coder/main.tf | 83 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 83 insertions(+) diff --git a/dogfood/coder/main.tf b/dogfood/coder/main.tf index e21602a26e922..06da4d79c549a 100644 --- a/dogfood/coder/main.tf +++ b/dogfood/coder/main.tf @@ -30,6 +30,81 @@ locals { container_name = "coder-${data.coder_workspace_owner.me.name}-${lower(data.coder_workspace.me.name)}" } +data "coder_workspace_preset" "cpt" { + name = "Cape Town" + parameters = { + (data.coder_parameter.region.name) = "za-cpt" + (data.coder_parameter.image_type.name) = "codercom/oss-dogfood:latest" + (data.coder_parameter.repo_base_dir.name) = "~" + (data.coder_parameter.res_mon_memory_threshold.name) = 80 + (data.coder_parameter.res_mon_volume_threshold.name) = 90 + (data.coder_parameter.res_mon_volume_path.name) = "/home/coder" + } + prebuilds { + instances = 1 + } +} + +data "coder_workspace_preset" "pittsburgh" { + name = "Pittsburgh" + parameters = { + (data.coder_parameter.region.name) = "us-pittsburgh" + (data.coder_parameter.image_type.name) = "codercom/oss-dogfood:latest" + (data.coder_parameter.repo_base_dir.name) = "~" + (data.coder_parameter.res_mon_memory_threshold.name) = 80 + (data.coder_parameter.res_mon_volume_threshold.name) = 90 + (data.coder_parameter.res_mon_volume_path.name) = "/home/coder" + } + prebuilds { + instances = 2 + } +} + +data "coder_workspace_preset" "falkenstein" { + name = "Falkenstein" + parameters = { + (data.coder_parameter.region.name) = "eu-helsinki" + (data.coder_parameter.image_type.name) = "codercom/oss-dogfood:latest" + (data.coder_parameter.repo_base_dir.name) = "~" + (data.coder_parameter.res_mon_memory_threshold.name) = 80 + (data.coder_parameter.res_mon_volume_threshold.name) = 90 + (data.coder_parameter.res_mon_volume_path.name) = "/home/coder" + } + prebuilds { + instances = 1 + } +} + +data "coder_workspace_preset" "sydney" { + name = "Sydney" + parameters = { + (data.coder_parameter.region.name) = "ap-sydney" + (data.coder_parameter.image_type.name) = "codercom/oss-dogfood:latest" + (data.coder_parameter.repo_base_dir.name) = "~" + (data.coder_parameter.res_mon_memory_threshold.name) = 80 + (data.coder_parameter.res_mon_volume_threshold.name) = 90 + (data.coder_parameter.res_mon_volume_path.name) = "/home/coder" + } + prebuilds { + instances = 1 + } +} + +data "coder_workspace_preset" "saopaulo" { + name = "São Paulo" + parameters = { + (data.coder_parameter.region.name) = "sa-saopaulo" + (data.coder_parameter.image_type.name) = "codercom/oss-dogfood:latest" + (data.coder_parameter.repo_base_dir.name) = "~" + (data.coder_parameter.res_mon_memory_threshold.name) = 80 + (data.coder_parameter.res_mon_volume_threshold.name) = 90 + (data.coder_parameter.res_mon_volume_path.name) = "/home/coder" + } + prebuilds { + instances = 1 + } +} + data "coder_parameter" "repo_base_dir" { type = "string" name = "Coder Repository Base Directory" @@ -438,6 +513,14 @@ resource "docker_image" "dogfood" { } resource "docker_container" "workspace" { + lifecycle { + // Ignore changes that would invalidate prebuilds + ignore_changes = [ + name, + hostname, + labels, + ] + } count = data.coder_workspace.me.start_count image = docker_image.dogfood.name name = local.container_name From 769c9ee3372c45dea1085eb5c663363cdf14bf65 Mon Sep 17 00:00:00 2001 From: Michael Suchacz <203725896+ibetitsmike@users.noreply.github.com> Date: Tue, 20 May 2025 15:22:44 +0200 Subject: [PATCH 28/42] feat: cancel stuck pending jobs (#17803) Closes: #16488 --- cli/server.go | 12 +- cli/testdata/server-config.yaml.golden | 2 +- coderd/coderdtest/coderdtest.go | 12 +- coderd/database/dbauthz/dbauthz.go | 139 +++++---- coderd/database/dbauthz/dbauthz_test.go | 51 ++-- coderd/database/dbmem/dbmem.go | 76 +++-- coderd/database/dbmetrics/querymetrics.go | 28 +- coderd/database/dbmock/dbmock.go | 59 +++- coderd/database/querier.go | 7 +- coderd/database/queries.sql.go | 189 +++++++++---- coderd/database/queries/provisionerjobs.sql | 45 ++- coderd/httpmw/loggermw/logger.go | 2 +- coderd/{unhanger => jobreaper}/detector.go | 146 ++++++---- .../{unhanger => jobreaper}/detector_test.go | 264 ++++++++++++++++-- coderd/rbac/authz.go | 2 +- coderd/rbac/object_gen.go | 2 + coderd/rbac/policy/policy.go | 4 +- coderd/rbac/roles.go | 2 +- coderd/rbac/roles_test.go | 2 +- codersdk/deployment.go | 8 +- codersdk/rbacresources_gen.go | 2 +- provisioner/terraform/serve.go | 8 +- site/src/api/rbacresourcesGenerated.ts | 2 + 23 files changed, 773 insertions(+), 291 deletions(-) rename coderd/{unhanger => jobreaper}/detector.go (72%) rename coderd/{unhanger => jobreaper}/detector_test.go (73%) diff --git a/cli/server.go b/cli/server.go index c5532e07e7a81..59993b55771a9 100644 --- a/cli/server.go +++ b/cli/server.go @@ -87,6 +87,7 @@ import ( "github.com/coder/coder/v2/coderd/externalauth" "github.com/coder/coder/v2/coderd/gitsshkey" "github.com/coder/coder/v2/coderd/httpmw" + "github.com/coder/coder/v2/coderd/jobreaper" "github.com/coder/coder/v2/coderd/notifications" "github.com/coder/coder/v2/coderd/oauthpki" "github.com/coder/coder/v2/coderd/prometheusmetrics" @@ -95,7 +96,6 @@ import ( "github.com/coder/coder/v2/coderd/schedule" "github.com/coder/coder/v2/coderd/telemetry" "github.com/coder/coder/v2/coderd/tracing" - "github.com/coder/coder/v2/coderd/unhanger" "github.com/coder/coder/v2/coderd/updatecheck" "github.com/coder/coder/v2/coderd/util/ptr" "github.com/coder/coder/v2/coderd/util/slice" @@ -1127,11 +1127,11 @@ func (r *RootCmd) Server(newAPI func(context.Context, *coderd.Options) (*coderd. ctx, options.Database, options.Pubsub, options.PrometheusRegistry, coderAPI.TemplateScheduleStore, &coderAPI.Auditor, coderAPI.AccessControlStore, logger, autobuildTicker.C, options.NotificationsEnqueuer) autobuildExecutor.Run() - hangDetectorTicker := time.NewTicker(vals.JobHangDetectorInterval.Value()) - defer hangDetectorTicker.Stop() - hangDetector := unhanger.New(ctx, options.Database, options.Pubsub, logger, hangDetectorTicker.C) - hangDetector.Start() - defer hangDetector.Close() + jobReaperTicker := time.NewTicker(vals.JobReaperDetectorInterval.Value()) + defer jobReaperTicker.Stop() + jobReaper := jobreaper.New(ctx, options.Database, options.Pubsub, logger, jobReaperTicker.C) + jobReaper.Start() + defer jobReaper.Close() waitForProvisionerJobs := false // Currently there is no way to ask the server to shut diff --git a/cli/testdata/server-config.yaml.golden b/cli/testdata/server-config.yaml.golden index fc76a6c2ec8a0..9995a7f389130 100644 --- a/cli/testdata/server-config.yaml.golden +++ b/cli/testdata/server-config.yaml.golden @@ -183,7 +183,7 @@ networking: # Interval to poll for scheduled workspace builds. # (default: 1m0s, type: duration) autobuildPollInterval: 1m0s -# Interval to poll for hung jobs and automatically terminate them. +# Interval to poll for hung and pending jobs and automatically terminate them. # (default: 1m0s, type: duration) jobHangDetectorInterval: 1m0s introspection: diff --git a/coderd/coderdtest/coderdtest.go b/coderd/coderdtest/coderdtest.go index b395a2cf2afbe..90a29e0f0d876 100644 --- a/coderd/coderdtest/coderdtest.go +++ b/coderd/coderdtest/coderdtest.go @@ -68,6 +68,7 @@ import ( "github.com/coder/coder/v2/coderd/externalauth" "github.com/coder/coder/v2/coderd/gitsshkey" "github.com/coder/coder/v2/coderd/httpmw" + "github.com/coder/coder/v2/coderd/jobreaper" "github.com/coder/coder/v2/coderd/notifications" "github.com/coder/coder/v2/coderd/notifications/notificationstest" "github.com/coder/coder/v2/coderd/rbac" @@ -75,7 +76,6 @@ import ( "github.com/coder/coder/v2/coderd/runtimeconfig" "github.com/coder/coder/v2/coderd/schedule" "github.com/coder/coder/v2/coderd/telemetry" - "github.com/coder/coder/v2/coderd/unhanger" "github.com/coder/coder/v2/coderd/updatecheck" "github.com/coder/coder/v2/coderd/util/ptr" "github.com/coder/coder/v2/coderd/webpush" @@ -368,11 +368,11 @@ func NewOptions(t testing.TB, options *Options) (func(http.Handler), context.Can ).WithStatsChannel(options.AutobuildStats) lifecycleExecutor.Run() - hangDetectorTicker := time.NewTicker(options.DeploymentValues.JobHangDetectorInterval.Value()) - defer hangDetectorTicker.Stop() - hangDetector := unhanger.New(ctx, options.Database, options.Pubsub, options.Logger.Named("unhanger.detector"), hangDetectorTicker.C) - hangDetector.Start() - t.Cleanup(hangDetector.Close) + jobReaperTicker := time.NewTicker(options.DeploymentValues.JobReaperDetectorInterval.Value()) + defer jobReaperTicker.Stop() + jobReaper := jobreaper.New(ctx, options.Database, options.Pubsub, options.Logger.Named("reaper.detector"), jobReaperTicker.C) + jobReaper.Start() + t.Cleanup(jobReaper.Close) if options.TelemetryReporter == nil { options.TelemetryReporter = telemetry.NewNoop() diff --git a/coderd/database/dbauthz/dbauthz.go b/coderd/database/dbauthz/dbauthz.go index 928dee0e30ea3..20afcf66c7867 100644 --- a/coderd/database/dbauthz/dbauthz.go +++ b/coderd/database/dbauthz/dbauthz.go @@ -170,10 +170,10 @@ var ( Identifier: rbac.RoleIdentifier{Name: "provisionerd"}, DisplayName: "Provisioner Daemon", Site: rbac.Permissions(map[string][]policy.Action{ - // TODO: Add ProvisionerJob resource type. - rbac.ResourceFile.Type: {policy.ActionRead}, - rbac.ResourceSystem.Type: {policy.WildcardSymbol}, - rbac.ResourceTemplate.Type: {policy.ActionRead, policy.ActionUpdate}, + rbac.ResourceProvisionerJobs.Type: {policy.ActionRead, policy.ActionUpdate, policy.ActionCreate}, + rbac.ResourceFile.Type: {policy.ActionRead}, + rbac.ResourceSystem.Type: {policy.WildcardSymbol}, + rbac.ResourceTemplate.Type: {policy.ActionRead, policy.ActionUpdate}, // Unsure why provisionerd needs update and read personal rbac.ResourceUser.Type: {policy.ActionRead, policy.ActionReadPersonal, policy.ActionUpdatePersonal}, rbac.ResourceWorkspaceDormant.Type: {policy.ActionDelete, policy.ActionRead, policy.ActionUpdate, policy.ActionWorkspaceStop}, @@ -219,19 +219,20 @@ var ( Scope: rbac.ScopeAll, }.WithCachedASTValue() - // See unhanger package. - subjectHangDetector = rbac.Subject{ - Type: rbac.SubjectTypeHangDetector, - FriendlyName: "Hang Detector", + // See reaper package. + subjectJobReaper = rbac.Subject{ + Type: rbac.SubjectTypeJobReaper, + FriendlyName: "Job Reaper", ID: uuid.Nil.String(), Roles: rbac.Roles([]rbac.Role{ { - Identifier: rbac.RoleIdentifier{Name: "hangdetector"}, - DisplayName: "Hang Detector Daemon", + Identifier: rbac.RoleIdentifier{Name: "jobreaper"}, + DisplayName: "Job Reaper Daemon", Site: rbac.Permissions(map[string][]policy.Action{ - rbac.ResourceSystem.Type: {policy.WildcardSymbol}, - rbac.ResourceTemplate.Type: {policy.ActionRead}, - rbac.ResourceWorkspace.Type: {policy.ActionRead, policy.ActionUpdate}, + rbac.ResourceSystem.Type: {policy.WildcardSymbol}, + rbac.ResourceTemplate.Type: {policy.ActionRead}, + rbac.ResourceWorkspace.Type: {policy.ActionRead, policy.ActionUpdate}, + rbac.ResourceProvisionerJobs.Type: {policy.ActionRead, policy.ActionUpdate}, }), Org: map[string][]rbac.Permission{}, User: []rbac.Permission{}, @@ -346,6 +347,7 @@ var ( rbac.ResourceNotificationTemplate.Type: {policy.ActionCreate, policy.ActionUpdate, policy.ActionDelete}, rbac.ResourceCryptoKey.Type: {policy.ActionCreate, policy.ActionUpdate, policy.ActionDelete}, rbac.ResourceFile.Type: {policy.ActionCreate, policy.ActionRead}, + rbac.ResourceProvisionerJobs.Type: {policy.ActionRead, policy.ActionUpdate, policy.ActionCreate}, }), Org: map[string][]rbac.Permission{}, User: []rbac.Permission{}, @@ -407,10 +409,10 @@ func AsAutostart(ctx context.Context) context.Context { return As(ctx, subjectAutostart) } -// AsHangDetector returns a context with an actor that has permissions required -// for unhanger.Detector to function. -func AsHangDetector(ctx context.Context) context.Context { - return As(ctx, subjectHangDetector) +// AsJobReaper returns a context with an actor that has permissions required +// for reaper.Detector to function. +func AsJobReaper(ctx context.Context) context.Context { + return As(ctx, subjectJobReaper) } // AsKeyRotator returns a context with an actor that has permissions required for rotating crypto keys. @@ -1085,11 +1087,10 @@ func (q *querier) AcquireNotificationMessages(ctx context.Context, arg database. return q.db.AcquireNotificationMessages(ctx, arg) } -// TODO: We need to create a ProvisionerJob resource type func (q *querier) AcquireProvisionerJob(ctx context.Context, arg database.AcquireProvisionerJobParams) (database.ProvisionerJob, error) { - // if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceSystem); err != nil { - // return database.ProvisionerJob{}, err - // } + if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceProvisionerJobs); err != nil { + return database.ProvisionerJob{}, err + } return q.db.AcquireProvisionerJob(ctx, arg) } @@ -1912,14 +1913,6 @@ func (q *querier) GetHealthSettings(ctx context.Context) (string, error) { return q.db.GetHealthSettings(ctx) } -// TODO: We need to create a ProvisionerJob resource type -func (q *querier) GetHungProvisionerJobs(ctx context.Context, hungSince time.Time) ([]database.ProvisionerJob, error) { - // if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceSystem); err != nil { - // return nil, err - // } - return q.db.GetHungProvisionerJobs(ctx, hungSince) -} - func (q *querier) GetInboxNotificationByID(ctx context.Context, id uuid.UUID) (database.InboxNotification, error) { return fetchWithAction(q.log, q.auth, policy.ActionRead, q.db.GetInboxNotificationByID)(ctx, id) } @@ -2307,6 +2300,13 @@ func (q *querier) GetProvisionerJobByID(ctx context.Context, id uuid.UUID) (data return job, nil } +func (q *querier) GetProvisionerJobByIDForUpdate(ctx context.Context, id uuid.UUID) (database.ProvisionerJob, error) { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceProvisionerJobs); err != nil { + return database.ProvisionerJob{}, err + } + return q.db.GetProvisionerJobByIDForUpdate(ctx, id) +} + func (q *querier) GetProvisionerJobTimingsByJobID(ctx context.Context, jobID uuid.UUID) ([]database.ProvisionerJobTiming, error) { _, err := q.GetProvisionerJobByID(ctx, jobID) if err != nil { @@ -2315,31 +2315,49 @@ func (q *querier) GetProvisionerJobTimingsByJobID(ctx context.Context, jobID uui return q.db.GetProvisionerJobTimingsByJobID(ctx, jobID) } -// TODO: We have a ProvisionerJobs resource, but it hasn't been checked for this use-case. func (q *querier) GetProvisionerJobsByIDs(ctx context.Context, ids []uuid.UUID) ([]database.ProvisionerJob, error) { - // if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceSystem); err != nil { - // return nil, err - // } - return q.db.GetProvisionerJobsByIDs(ctx, ids) + provisionerJobs, err := q.db.GetProvisionerJobsByIDs(ctx, ids) + if err != nil { + return nil, err + } + orgIDs := make(map[uuid.UUID]struct{}) + for _, job := range provisionerJobs { + orgIDs[job.OrganizationID] = struct{}{} + } + for orgID := range orgIDs { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceProvisionerJobs.InOrg(orgID)); err != nil { + return nil, err + } + } + return provisionerJobs, nil } -// TODO: We have a ProvisionerJobs resource, but it hasn't been checked for this use-case. func (q *querier) GetProvisionerJobsByIDsWithQueuePosition(ctx context.Context, ids []uuid.UUID) ([]database.GetProvisionerJobsByIDsWithQueuePositionRow, error) { + // TODO: Remove this once we have a proper rbac check for provisioner jobs. + // Details in https://github.com/coder/coder/issues/16160 return q.db.GetProvisionerJobsByIDsWithQueuePosition(ctx, ids) } func (q *querier) GetProvisionerJobsByOrganizationAndStatusWithQueuePositionAndProvisioner(ctx context.Context, arg database.GetProvisionerJobsByOrganizationAndStatusWithQueuePositionAndProvisionerParams) ([]database.GetProvisionerJobsByOrganizationAndStatusWithQueuePositionAndProvisionerRow, error) { + // TODO: Remove this once we have a proper rbac check for provisioner jobs. + // Details in https://github.com/coder/coder/issues/16160 return fetchWithPostFilter(q.auth, policy.ActionRead, q.db.GetProvisionerJobsByOrganizationAndStatusWithQueuePositionAndProvisioner)(ctx, arg) } -// TODO: We have a ProvisionerJobs resource, but it hasn't been checked for this use-case. func (q *querier) GetProvisionerJobsCreatedAfter(ctx context.Context, createdAt time.Time) ([]database.ProvisionerJob, error) { - // if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceSystem); err != nil { - // return nil, err - // } + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceProvisionerJobs); err != nil { + return nil, err + } return q.db.GetProvisionerJobsCreatedAfter(ctx, createdAt) } +func (q *querier) GetProvisionerJobsToBeReaped(ctx context.Context, arg database.GetProvisionerJobsToBeReapedParams) ([]database.ProvisionerJob, error) { + if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceProvisionerJobs); err != nil { + return nil, err + } + return q.db.GetProvisionerJobsToBeReaped(ctx, arg) +} + func (q *querier) GetProvisionerKeyByHashedSecret(ctx context.Context, hashedSecret []byte) (database.ProvisionerKey, error) { return fetch(q.log, q.auth, q.db.GetProvisionerKeyByHashedSecret)(ctx, hashedSecret) } @@ -3533,27 +3551,22 @@ func (q *querier) InsertPresetParameters(ctx context.Context, arg database.Inser return q.db.InsertPresetParameters(ctx, arg) } -// TODO: We need to create a ProvisionerJob resource type func (q *querier) InsertProvisionerJob(ctx context.Context, arg database.InsertProvisionerJobParams) (database.ProvisionerJob, error) { - // if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceSystem); err != nil { - // return database.ProvisionerJob{}, err - // } + // TODO: Remove this once we have a proper rbac check for provisioner jobs. + // Details in https://github.com/coder/coder/issues/16160 return q.db.InsertProvisionerJob(ctx, arg) } -// TODO: We need to create a ProvisionerJob resource type func (q *querier) InsertProvisionerJobLogs(ctx context.Context, arg database.InsertProvisionerJobLogsParams) ([]database.ProvisionerJobLog, error) { - // if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceSystem); err != nil { - // return nil, err - // } + // TODO: Remove this once we have a proper rbac check for provisioner jobs. + // Details in https://github.com/coder/coder/issues/16160 return q.db.InsertProvisionerJobLogs(ctx, arg) } -// TODO: We need to create a ProvisionerJob resource type func (q *querier) InsertProvisionerJobTimings(ctx context.Context, arg database.InsertProvisionerJobTimingsParams) ([]database.ProvisionerJobTiming, error) { - // if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceSystem); err != nil { - // return nil, err - // } + if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceProvisionerJobs); err != nil { + return nil, err + } return q.db.InsertProvisionerJobTimings(ctx, arg) } @@ -4176,15 +4189,17 @@ func (q *querier) UpdateProvisionerDaemonLastSeenAt(ctx context.Context, arg dat return q.db.UpdateProvisionerDaemonLastSeenAt(ctx, arg) } -// TODO: We need to create a ProvisionerJob resource type func (q *querier) UpdateProvisionerJobByID(ctx context.Context, arg database.UpdateProvisionerJobByIDParams) error { - // if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceSystem); err != nil { - // return err - // } + if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceProvisionerJobs); err != nil { + return err + } return q.db.UpdateProvisionerJobByID(ctx, arg) } func (q *querier) UpdateProvisionerJobWithCancelByID(ctx context.Context, arg database.UpdateProvisionerJobWithCancelByIDParams) error { + // TODO: Remove this once we have a proper rbac check for provisioner jobs. + // Details in https://github.com/coder/coder/issues/16160 + job, err := q.db.GetProvisionerJobByID(ctx, arg.ID) if err != nil { return err @@ -4251,14 +4266,20 @@ func (q *querier) UpdateProvisionerJobWithCancelByID(ctx context.Context, arg da return q.db.UpdateProvisionerJobWithCancelByID(ctx, arg) } -// TODO: We need to create a ProvisionerJob resource type func (q *querier) UpdateProvisionerJobWithCompleteByID(ctx context.Context, arg database.UpdateProvisionerJobWithCompleteByIDParams) error { - // if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceSystem); err != nil { - // return err - // } + if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceProvisionerJobs); err != nil { + return err + } return q.db.UpdateProvisionerJobWithCompleteByID(ctx, arg) } +func (q *querier) UpdateProvisionerJobWithCompleteWithStartedAtByID(ctx context.Context, arg database.UpdateProvisionerJobWithCompleteWithStartedAtByIDParams) error { + if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceProvisionerJobs); err != nil { + return err + } + return q.db.UpdateProvisionerJobWithCompleteWithStartedAtByID(ctx, arg) +} + func (q *querier) UpdateReplica(ctx context.Context, arg database.UpdateReplicaParams) (database.Replica, error) { if err := q.authorizeContext(ctx, policy.ActionUpdate, rbac.ResourceSystem); err != nil { return database.Replica{}, err diff --git a/coderd/database/dbauthz/dbauthz_test.go b/coderd/database/dbauthz/dbauthz_test.go index a0289f222392b..1e4b4ea879b77 100644 --- a/coderd/database/dbauthz/dbauthz_test.go +++ b/coderd/database/dbauthz/dbauthz_test.go @@ -694,9 +694,12 @@ func (s *MethodTestSuite) TestProvisionerJob() { Asserts(v.RBACObject(tpl), []policy.Action{policy.ActionRead, policy.ActionUpdate}).Returns() })) s.Run("GetProvisionerJobsByIDs", s.Subtest(func(db database.Store, check *expects) { - a := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{}) - b := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{}) - check.Args([]uuid.UUID{a.ID, b.ID}).Asserts().Returns(slice.New(a, b)) + o := dbgen.Organization(s.T(), db, database.Organization{}) + a := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{OrganizationID: o.ID}) + b := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{OrganizationID: o.ID}) + check.Args([]uuid.UUID{a.ID, b.ID}). + Asserts(rbac.ResourceProvisionerJobs.InOrg(o.ID), policy.ActionRead). + Returns(slice.New(a, b)) })) s.Run("GetProvisionerLogsAfterID", s.Subtest(func(db database.Store, check *expects) { u := dbgen.User(s.T(), db, database.User{}) @@ -3923,9 +3926,8 @@ func (s *MethodTestSuite) TestSystemFunctions() { check.Args().Asserts(rbac.ResourceSystem, policy.ActionDelete) })) s.Run("GetProvisionerJobsCreatedAfter", s.Subtest(func(db database.Store, check *expects) { - // TODO: add provisioner job resource type _ = dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{CreatedAt: time.Now().Add(-time.Hour)}) - check.Args(time.Now()).Asserts( /*rbac.ResourceSystem, policy.ActionRead*/ ) + check.Args(time.Now()).Asserts(rbac.ResourceProvisionerJobs, policy.ActionRead) })) s.Run("GetTemplateVersionsByIDs", s.Subtest(func(db database.Store, check *expects) { dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) @@ -4008,11 +4010,11 @@ func (s *MethodTestSuite) TestSystemFunctions() { Returns([]database.WorkspaceAgent{agt}) })) s.Run("GetProvisionerJobsByIDs", s.Subtest(func(db database.Store, check *expects) { - // TODO: add a ProvisionerJob resource type - a := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{}) - b := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{}) + o := dbgen.Organization(s.T(), db, database.Organization{}) + a := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{OrganizationID: o.ID}) + b := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{OrganizationID: o.ID}) check.Args([]uuid.UUID{a.ID, b.ID}). - Asserts( /*rbac.ResourceSystem, policy.ActionRead*/ ). + Asserts(rbac.ResourceProvisionerJobs.InOrg(o.ID), policy.ActionRead). Returns(slice.New(a, b)) })) s.Run("InsertWorkspaceAgent", s.Subtest(func(db database.Store, check *expects) { @@ -4048,7 +4050,6 @@ func (s *MethodTestSuite) TestSystemFunctions() { }).Asserts(rbac.ResourceSystem, policy.ActionUpdate).Returns() })) s.Run("AcquireProvisionerJob", s.Subtest(func(db database.Store, check *expects) { - // TODO: we need to create a ProvisionerJob resource j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{ StartedAt: sql.NullTime{Valid: false}, UpdatedAt: time.Now(), @@ -4058,47 +4059,48 @@ func (s *MethodTestSuite) TestSystemFunctions() { OrganizationID: j.OrganizationID, Types: []database.ProvisionerType{j.Provisioner}, ProvisionerTags: must(json.Marshal(j.Tags)), - }).Asserts( /*rbac.ResourceSystem, policy.ActionUpdate*/ ) + }).Asserts(rbac.ResourceProvisionerJobs, policy.ActionUpdate) })) s.Run("UpdateProvisionerJobWithCompleteByID", s.Subtest(func(db database.Store, check *expects) { - // TODO: we need to create a ProvisionerJob resource j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{}) check.Args(database.UpdateProvisionerJobWithCompleteByIDParams{ ID: j.ID, - }).Asserts( /*rbac.ResourceSystem, policy.ActionUpdate*/ ) + }).Asserts(rbac.ResourceProvisionerJobs, policy.ActionUpdate) + })) + s.Run("UpdateProvisionerJobWithCompleteWithStartedAtByID", s.Subtest(func(db database.Store, check *expects) { + j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{}) + check.Args(database.UpdateProvisionerJobWithCompleteWithStartedAtByIDParams{ + ID: j.ID, + }).Asserts(rbac.ResourceProvisionerJobs, policy.ActionUpdate) })) s.Run("UpdateProvisionerJobByID", s.Subtest(func(db database.Store, check *expects) { - // TODO: we need to create a ProvisionerJob resource j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{}) check.Args(database.UpdateProvisionerJobByIDParams{ ID: j.ID, UpdatedAt: time.Now(), - }).Asserts( /*rbac.ResourceSystem, policy.ActionUpdate*/ ) + }).Asserts(rbac.ResourceProvisionerJobs, policy.ActionUpdate) })) s.Run("InsertProvisionerJob", s.Subtest(func(db database.Store, check *expects) { dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) - // TODO: we need to create a ProvisionerJob resource check.Args(database.InsertProvisionerJobParams{ ID: uuid.New(), Provisioner: database.ProvisionerTypeEcho, StorageMethod: database.ProvisionerStorageMethodFile, Type: database.ProvisionerJobTypeWorkspaceBuild, Input: json.RawMessage("{}"), - }).Asserts( /*rbac.ResourceSystem, policy.ActionCreate*/ ) + }).Asserts( /* rbac.ResourceProvisionerJobs, policy.ActionCreate */ ) })) s.Run("InsertProvisionerJobLogs", s.Subtest(func(db database.Store, check *expects) { - // TODO: we need to create a ProvisionerJob resource j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{}) check.Args(database.InsertProvisionerJobLogsParams{ JobID: j.ID, - }).Asserts( /*rbac.ResourceSystem, policy.ActionCreate*/ ) + }).Asserts( /* rbac.ResourceProvisionerJobs, policy.ActionUpdate */ ) })) s.Run("InsertProvisionerJobTimings", s.Subtest(func(db database.Store, check *expects) { - // TODO: we need to create a ProvisionerJob resource j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{}) check.Args(database.InsertProvisionerJobTimingsParams{ JobID: j.ID, - }).Asserts( /*rbac.ResourceSystem, policy.ActionCreate*/ ) + }).Asserts(rbac.ResourceProvisionerJobs, policy.ActionUpdate) })) s.Run("UpsertProvisionerDaemon", s.Subtest(func(db database.Store, check *expects) { dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) @@ -4234,8 +4236,8 @@ func (s *MethodTestSuite) TestSystemFunctions() { s.Run("GetFileTemplates", s.Subtest(func(db database.Store, check *expects) { check.Args(uuid.New()).Asserts(rbac.ResourceSystem, policy.ActionRead) })) - s.Run("GetHungProvisionerJobs", s.Subtest(func(db database.Store, check *expects) { - check.Args(time.Time{}).Asserts() + s.Run("GetProvisionerJobsToBeReaped", s.Subtest(func(db database.Store, check *expects) { + check.Args(database.GetProvisionerJobsToBeReapedParams{}).Asserts(rbac.ResourceProvisionerJobs, policy.ActionRead) })) s.Run("UpsertOAuthSigningKey", s.Subtest(func(db database.Store, check *expects) { check.Args("foo").Asserts(rbac.ResourceSystem, policy.ActionUpdate) @@ -4479,6 +4481,9 @@ func (s *MethodTestSuite) TestSystemFunctions() { VapidPrivateKey: "test", }).Asserts(rbac.ResourceDeploymentConfig, policy.ActionUpdate) })) + s.Run("GetProvisionerJobByIDForUpdate", s.Subtest(func(db database.Store, check *expects) { + check.Args(uuid.New()).Asserts(rbac.ResourceProvisionerJobs, policy.ActionRead).Errors(sql.ErrNoRows) + })) } func (s *MethodTestSuite) TestNotifications() { diff --git a/coderd/database/dbmem/dbmem.go b/coderd/database/dbmem/dbmem.go index 7dec84f8aaeb0..3ab2895876ac5 100644 --- a/coderd/database/dbmem/dbmem.go +++ b/coderd/database/dbmem/dbmem.go @@ -8,6 +8,7 @@ import ( "errors" "fmt" "math" + insecurerand "math/rand" //#nosec // this is only used for shuffling an array to pick random jobs to reap "reflect" "regexp" "slices" @@ -3707,23 +3708,6 @@ func (q *FakeQuerier) GetHealthSettings(_ context.Context) (string, error) { return string(q.healthSettings), nil } -func (q *FakeQuerier) GetHungProvisionerJobs(_ context.Context, hungSince time.Time) ([]database.ProvisionerJob, error) { - q.mutex.RLock() - defer q.mutex.RUnlock() - - hungJobs := []database.ProvisionerJob{} - for _, provisionerJob := range q.provisionerJobs { - if provisionerJob.StartedAt.Valid && !provisionerJob.CompletedAt.Valid && provisionerJob.UpdatedAt.Before(hungSince) { - // clone the Tags before appending, since maps are reference types and - // we don't want the caller to be able to mutate the map we have inside - // dbmem! - provisionerJob.Tags = maps.Clone(provisionerJob.Tags) - hungJobs = append(hungJobs, provisionerJob) - } - } - return hungJobs, nil -} - func (q *FakeQuerier) GetInboxNotificationByID(_ context.Context, id uuid.UUID) (database.InboxNotification, error) { q.mutex.RLock() defer q.mutex.RUnlock() @@ -4642,6 +4626,13 @@ func (q *FakeQuerier) GetProvisionerJobByID(ctx context.Context, id uuid.UUID) ( return q.getProvisionerJobByIDNoLock(ctx, id) } +func (q *FakeQuerier) GetProvisionerJobByIDForUpdate(ctx context.Context, id uuid.UUID) (database.ProvisionerJob, error) { + q.mutex.RLock() + defer q.mutex.RUnlock() + + return q.getProvisionerJobByIDNoLock(ctx, id) +} + func (q *FakeQuerier) GetProvisionerJobTimingsByJobID(_ context.Context, jobID uuid.UUID) ([]database.ProvisionerJobTiming, error) { q.mutex.RLock() defer q.mutex.RUnlock() @@ -4884,6 +4875,33 @@ func (q *FakeQuerier) GetProvisionerJobsCreatedAfter(_ context.Context, after ti return jobs, nil } +func (q *FakeQuerier) GetProvisionerJobsToBeReaped(_ context.Context, arg database.GetProvisionerJobsToBeReapedParams) ([]database.ProvisionerJob, error) { + q.mutex.RLock() + defer q.mutex.RUnlock() + maxJobs := arg.MaxJobs + + hungJobs := []database.ProvisionerJob{} + for _, provisionerJob := range q.provisionerJobs { + if !provisionerJob.CompletedAt.Valid { + if (provisionerJob.StartedAt.Valid && provisionerJob.UpdatedAt.Before(arg.HungSince)) || + (!provisionerJob.StartedAt.Valid && provisionerJob.UpdatedAt.Before(arg.PendingSince)) { + // clone the Tags before appending, since maps are reference types and + // we don't want the caller to be able to mutate the map we have inside + // dbmem! + provisionerJob.Tags = maps.Clone(provisionerJob.Tags) + hungJobs = append(hungJobs, provisionerJob) + if len(hungJobs) >= int(maxJobs) { + break + } + } + } + } + insecurerand.Shuffle(len(hungJobs), func(i, j int) { + hungJobs[i], hungJobs[j] = hungJobs[j], hungJobs[i] + }) + return hungJobs, nil +} + func (q *FakeQuerier) GetProvisionerKeyByHashedSecret(_ context.Context, hashedSecret []byte) (database.ProvisionerKey, error) { q.mutex.RLock() defer q.mutex.RUnlock() @@ -10958,6 +10976,30 @@ func (q *FakeQuerier) UpdateProvisionerJobWithCompleteByID(_ context.Context, ar return sql.ErrNoRows } +func (q *FakeQuerier) UpdateProvisionerJobWithCompleteWithStartedAtByID(_ context.Context, arg database.UpdateProvisionerJobWithCompleteWithStartedAtByIDParams) error { + if err := validateDatabaseType(arg); err != nil { + return err + } + + q.mutex.Lock() + defer q.mutex.Unlock() + + for index, job := range q.provisionerJobs { + if arg.ID != job.ID { + continue + } + job.UpdatedAt = arg.UpdatedAt + job.CompletedAt = arg.CompletedAt + job.Error = arg.Error + job.ErrorCode = arg.ErrorCode + job.StartedAt = arg.StartedAt + job.JobStatus = provisionerJobStatus(job) + q.provisionerJobs[index] = job + return nil + } + return sql.ErrNoRows +} + func (q *FakeQuerier) UpdateReplica(_ context.Context, arg database.UpdateReplicaParams) (database.Replica, error) { if err := validateDatabaseType(arg); err != nil { return database.Replica{}, err diff --git a/coderd/database/dbmetrics/querymetrics.go b/coderd/database/dbmetrics/querymetrics.go index a5a22aad1a0bf..9122cedbf786c 100644 --- a/coderd/database/dbmetrics/querymetrics.go +++ b/coderd/database/dbmetrics/querymetrics.go @@ -865,13 +865,6 @@ func (m queryMetricsStore) GetHealthSettings(ctx context.Context) (string, error return r0, r1 } -func (m queryMetricsStore) GetHungProvisionerJobs(ctx context.Context, hungSince time.Time) ([]database.ProvisionerJob, error) { - start := time.Now() - jobs, err := m.s.GetHungProvisionerJobs(ctx, hungSince) - m.queryLatencies.WithLabelValues("GetHungProvisionerJobs").Observe(time.Since(start).Seconds()) - return jobs, err -} - func (m queryMetricsStore) GetInboxNotificationByID(ctx context.Context, id uuid.UUID) (database.InboxNotification, error) { start := time.Now() r0, r1 := m.s.GetInboxNotificationByID(ctx, id) @@ -1194,6 +1187,13 @@ func (m queryMetricsStore) GetProvisionerJobByID(ctx context.Context, id uuid.UU return job, err } +func (m queryMetricsStore) GetProvisionerJobByIDForUpdate(ctx context.Context, id uuid.UUID) (database.ProvisionerJob, error) { + start := time.Now() + r0, r1 := m.s.GetProvisionerJobByIDForUpdate(ctx, id) + m.queryLatencies.WithLabelValues("GetProvisionerJobByIDForUpdate").Observe(time.Since(start).Seconds()) + return r0, r1 +} + func (m queryMetricsStore) GetProvisionerJobTimingsByJobID(ctx context.Context, jobID uuid.UUID) ([]database.ProvisionerJobTiming, error) { start := time.Now() r0, r1 := m.s.GetProvisionerJobTimingsByJobID(ctx, jobID) @@ -1229,6 +1229,13 @@ func (m queryMetricsStore) GetProvisionerJobsCreatedAfter(ctx context.Context, c return jobs, err } +func (m queryMetricsStore) GetProvisionerJobsToBeReaped(ctx context.Context, arg database.GetProvisionerJobsToBeReapedParams) ([]database.ProvisionerJob, error) { + start := time.Now() + r0, r1 := m.s.GetProvisionerJobsToBeReaped(ctx, arg) + m.queryLatencies.WithLabelValues("GetProvisionerJobsToBeReaped").Observe(time.Since(start).Seconds()) + return r0, r1 +} + func (m queryMetricsStore) GetProvisionerKeyByHashedSecret(ctx context.Context, hashedSecret []byte) (database.ProvisionerKey, error) { start := time.Now() r0, r1 := m.s.GetProvisionerKeyByHashedSecret(ctx, hashedSecret) @@ -2706,6 +2713,13 @@ func (m queryMetricsStore) UpdateProvisionerJobWithCompleteByID(ctx context.Cont return err } +func (m queryMetricsStore) UpdateProvisionerJobWithCompleteWithStartedAtByID(ctx context.Context, arg database.UpdateProvisionerJobWithCompleteWithStartedAtByIDParams) error { + start := time.Now() + r0 := m.s.UpdateProvisionerJobWithCompleteWithStartedAtByID(ctx, arg) + m.queryLatencies.WithLabelValues("UpdateProvisionerJobWithCompleteWithStartedAtByID").Observe(time.Since(start).Seconds()) + return r0 +} + func (m queryMetricsStore) UpdateReplica(ctx context.Context, arg database.UpdateReplicaParams) (database.Replica, error) { start := time.Now() replica, err := m.s.UpdateReplica(ctx, arg) diff --git a/coderd/database/dbmock/dbmock.go b/coderd/database/dbmock/dbmock.go index 0d66dcec11848..e7af9ecd8fee8 100644 --- a/coderd/database/dbmock/dbmock.go +++ b/coderd/database/dbmock/dbmock.go @@ -1743,21 +1743,6 @@ func (mr *MockStoreMockRecorder) GetHealthSettings(ctx any) *gomock.Call { return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetHealthSettings", reflect.TypeOf((*MockStore)(nil).GetHealthSettings), ctx) } -// GetHungProvisionerJobs mocks base method. -func (m *MockStore) GetHungProvisionerJobs(ctx context.Context, updatedAt time.Time) ([]database.ProvisionerJob, error) { - m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "GetHungProvisionerJobs", ctx, updatedAt) - ret0, _ := ret[0].([]database.ProvisionerJob) - ret1, _ := ret[1].(error) - return ret0, ret1 -} - -// GetHungProvisionerJobs indicates an expected call of GetHungProvisionerJobs. -func (mr *MockStoreMockRecorder) GetHungProvisionerJobs(ctx, updatedAt any) *gomock.Call { - mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetHungProvisionerJobs", reflect.TypeOf((*MockStore)(nil).GetHungProvisionerJobs), ctx, updatedAt) -} - // GetInboxNotificationByID mocks base method. func (m *MockStore) GetInboxNotificationByID(ctx context.Context, id uuid.UUID) (database.InboxNotification, error) { m.ctrl.T.Helper() @@ -2448,6 +2433,21 @@ func (mr *MockStoreMockRecorder) GetProvisionerJobByID(ctx, id any) *gomock.Call return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetProvisionerJobByID", reflect.TypeOf((*MockStore)(nil).GetProvisionerJobByID), ctx, id) } +// GetProvisionerJobByIDForUpdate mocks base method. +func (m *MockStore) GetProvisionerJobByIDForUpdate(ctx context.Context, id uuid.UUID) (database.ProvisionerJob, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetProvisionerJobByIDForUpdate", ctx, id) + ret0, _ := ret[0].(database.ProvisionerJob) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetProvisionerJobByIDForUpdate indicates an expected call of GetProvisionerJobByIDForUpdate. +func (mr *MockStoreMockRecorder) GetProvisionerJobByIDForUpdate(ctx, id any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetProvisionerJobByIDForUpdate", reflect.TypeOf((*MockStore)(nil).GetProvisionerJobByIDForUpdate), ctx, id) +} + // GetProvisionerJobTimingsByJobID mocks base method. func (m *MockStore) GetProvisionerJobTimingsByJobID(ctx context.Context, jobID uuid.UUID) ([]database.ProvisionerJobTiming, error) { m.ctrl.T.Helper() @@ -2523,6 +2523,21 @@ func (mr *MockStoreMockRecorder) GetProvisionerJobsCreatedAfter(ctx, createdAt a return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetProvisionerJobsCreatedAfter", reflect.TypeOf((*MockStore)(nil).GetProvisionerJobsCreatedAfter), ctx, createdAt) } +// GetProvisionerJobsToBeReaped mocks base method. +func (m *MockStore) GetProvisionerJobsToBeReaped(ctx context.Context, arg database.GetProvisionerJobsToBeReapedParams) ([]database.ProvisionerJob, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetProvisionerJobsToBeReaped", ctx, arg) + ret0, _ := ret[0].([]database.ProvisionerJob) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetProvisionerJobsToBeReaped indicates an expected call of GetProvisionerJobsToBeReaped. +func (mr *MockStoreMockRecorder) GetProvisionerJobsToBeReaped(ctx, arg any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetProvisionerJobsToBeReaped", reflect.TypeOf((*MockStore)(nil).GetProvisionerJobsToBeReaped), ctx, arg) +} + // GetProvisionerKeyByHashedSecret mocks base method. func (m *MockStore) GetProvisionerKeyByHashedSecret(ctx context.Context, hashedSecret []byte) (database.ProvisionerKey, error) { m.ctrl.T.Helper() @@ -5732,6 +5747,20 @@ func (mr *MockStoreMockRecorder) UpdateProvisionerJobWithCompleteByID(ctx, arg a return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateProvisionerJobWithCompleteByID", reflect.TypeOf((*MockStore)(nil).UpdateProvisionerJobWithCompleteByID), ctx, arg) } +// UpdateProvisionerJobWithCompleteWithStartedAtByID mocks base method. +func (m *MockStore) UpdateProvisionerJobWithCompleteWithStartedAtByID(ctx context.Context, arg database.UpdateProvisionerJobWithCompleteWithStartedAtByIDParams) error { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "UpdateProvisionerJobWithCompleteWithStartedAtByID", ctx, arg) + ret0, _ := ret[0].(error) + return ret0 +} + +// UpdateProvisionerJobWithCompleteWithStartedAtByID indicates an expected call of UpdateProvisionerJobWithCompleteWithStartedAtByID. +func (mr *MockStoreMockRecorder) UpdateProvisionerJobWithCompleteWithStartedAtByID(ctx, arg any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "UpdateProvisionerJobWithCompleteWithStartedAtByID", reflect.TypeOf((*MockStore)(nil).UpdateProvisionerJobWithCompleteWithStartedAtByID), ctx, arg) +} + // UpdateReplica mocks base method. func (m *MockStore) UpdateReplica(ctx context.Context, arg database.UpdateReplicaParams) (database.Replica, error) { m.ctrl.T.Helper() diff --git a/coderd/database/querier.go b/coderd/database/querier.go index 81b8d58758ada..78a88426349da 100644 --- a/coderd/database/querier.go +++ b/coderd/database/querier.go @@ -196,7 +196,6 @@ type sqlcQuerier interface { GetGroupMembersCountByGroupID(ctx context.Context, arg GetGroupMembersCountByGroupIDParams) (int64, error) GetGroups(ctx context.Context, arg GetGroupsParams) ([]GetGroupsRow, error) GetHealthSettings(ctx context.Context) (string, error) - GetHungProvisionerJobs(ctx context.Context, updatedAt time.Time) ([]ProvisionerJob, error) GetInboxNotificationByID(ctx context.Context, id uuid.UUID) (InboxNotification, error) // Fetches inbox notifications for a user filtered by templates and targets // param user_id: The user ID @@ -265,11 +264,16 @@ type sqlcQuerier interface { // Previous job information. GetProvisionerDaemonsWithStatusByOrganization(ctx context.Context, arg GetProvisionerDaemonsWithStatusByOrganizationParams) ([]GetProvisionerDaemonsWithStatusByOrganizationRow, error) GetProvisionerJobByID(ctx context.Context, id uuid.UUID) (ProvisionerJob, error) + // Gets a single provisioner job by ID for update. + // This is used to securely reap jobs that have been hung/pending for a long time. + GetProvisionerJobByIDForUpdate(ctx context.Context, id uuid.UUID) (ProvisionerJob, error) GetProvisionerJobTimingsByJobID(ctx context.Context, jobID uuid.UUID) ([]ProvisionerJobTiming, error) GetProvisionerJobsByIDs(ctx context.Context, ids []uuid.UUID) ([]ProvisionerJob, error) GetProvisionerJobsByIDsWithQueuePosition(ctx context.Context, ids []uuid.UUID) ([]GetProvisionerJobsByIDsWithQueuePositionRow, error) GetProvisionerJobsByOrganizationAndStatusWithQueuePositionAndProvisioner(ctx context.Context, arg GetProvisionerJobsByOrganizationAndStatusWithQueuePositionAndProvisionerParams) ([]GetProvisionerJobsByOrganizationAndStatusWithQueuePositionAndProvisionerRow, error) GetProvisionerJobsCreatedAfter(ctx context.Context, createdAt time.Time) ([]ProvisionerJob, error) + // To avoid repeatedly attempting to reap the same jobs, we randomly order and limit to @max_jobs. + GetProvisionerJobsToBeReaped(ctx context.Context, arg GetProvisionerJobsToBeReapedParams) ([]ProvisionerJob, error) GetProvisionerKeyByHashedSecret(ctx context.Context, hashedSecret []byte) (ProvisionerKey, error) GetProvisionerKeyByID(ctx context.Context, id uuid.UUID) (ProvisionerKey, error) GetProvisionerKeyByName(ctx context.Context, arg GetProvisionerKeyByNameParams) (ProvisionerKey, error) @@ -567,6 +571,7 @@ type sqlcQuerier interface { UpdateProvisionerJobByID(ctx context.Context, arg UpdateProvisionerJobByIDParams) error UpdateProvisionerJobWithCancelByID(ctx context.Context, arg UpdateProvisionerJobWithCancelByIDParams) error UpdateProvisionerJobWithCompleteByID(ctx context.Context, arg UpdateProvisionerJobWithCompleteByIDParams) error + UpdateProvisionerJobWithCompleteWithStartedAtByID(ctx context.Context, arg UpdateProvisionerJobWithCompleteWithStartedAtByIDParams) error UpdateReplica(ctx context.Context, arg UpdateReplicaParams) (Replica, error) UpdateTailnetPeerStatusByCoordinator(ctx context.Context, arg UpdateTailnetPeerStatusByCoordinatorParams) error UpdateTemplateACLByID(ctx context.Context, arg UpdateTemplateACLByIDParams) error diff --git a/coderd/database/queries.sql.go b/coderd/database/queries.sql.go index fdb9252bf27ee..b956fc1db5f91 100644 --- a/coderd/database/queries.sql.go +++ b/coderd/database/queries.sql.go @@ -7384,71 +7384,57 @@ func (q *sqlQuerier) AcquireProvisionerJob(ctx context.Context, arg AcquireProvi return i, err } -const getHungProvisionerJobs = `-- name: GetHungProvisionerJobs :many +const getProvisionerJobByID = `-- name: GetProvisionerJobByID :one SELECT id, created_at, updated_at, started_at, canceled_at, completed_at, error, organization_id, initiator_id, provisioner, storage_method, type, input, worker_id, file_id, tags, error_code, trace_metadata, job_status FROM provisioner_jobs WHERE - updated_at < $1 - AND started_at IS NOT NULL - AND completed_at IS NULL + id = $1 ` -func (q *sqlQuerier) GetHungProvisionerJobs(ctx context.Context, updatedAt time.Time) ([]ProvisionerJob, error) { - rows, err := q.db.QueryContext(ctx, getHungProvisionerJobs, updatedAt) - if err != nil { - return nil, err - } - defer rows.Close() - var items []ProvisionerJob - for rows.Next() { - var i ProvisionerJob - if err := rows.Scan( - &i.ID, - &i.CreatedAt, - &i.UpdatedAt, - &i.StartedAt, - &i.CanceledAt, - &i.CompletedAt, - &i.Error, - &i.OrganizationID, - &i.InitiatorID, - &i.Provisioner, - &i.StorageMethod, - &i.Type, - &i.Input, - &i.WorkerID, - &i.FileID, - &i.Tags, - &i.ErrorCode, - &i.TraceMetadata, - &i.JobStatus, - ); err != nil { - return nil, err - } - items = append(items, i) - } - if err := rows.Close(); err != nil { - return nil, err - } - if err := rows.Err(); err != nil { - return nil, err - } - return items, nil +func (q *sqlQuerier) GetProvisionerJobByID(ctx context.Context, id uuid.UUID) (ProvisionerJob, error) { + row := q.db.QueryRowContext(ctx, getProvisionerJobByID, id) + var i ProvisionerJob + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.StartedAt, + &i.CanceledAt, + &i.CompletedAt, + &i.Error, + &i.OrganizationID, + &i.InitiatorID, + &i.Provisioner, + &i.StorageMethod, + &i.Type, + &i.Input, + &i.WorkerID, + &i.FileID, + &i.Tags, + &i.ErrorCode, + &i.TraceMetadata, + &i.JobStatus, + ) + return i, err } -const getProvisionerJobByID = `-- name: GetProvisionerJobByID :one +const getProvisionerJobByIDForUpdate = `-- name: GetProvisionerJobByIDForUpdate :one SELECT id, created_at, updated_at, started_at, canceled_at, completed_at, error, organization_id, initiator_id, provisioner, storage_method, type, input, worker_id, file_id, tags, error_code, trace_metadata, job_status FROM provisioner_jobs WHERE id = $1 +FOR UPDATE +SKIP LOCKED ` -func (q *sqlQuerier) GetProvisionerJobByID(ctx context.Context, id uuid.UUID) (ProvisionerJob, error) { - row := q.db.QueryRowContext(ctx, getProvisionerJobByID, id) +// Gets a single provisioner job by ID for update. +// This is used to securely reap jobs that have been hung/pending for a long time. +func (q *sqlQuerier) GetProvisionerJobByIDForUpdate(ctx context.Context, id uuid.UUID) (ProvisionerJob, error) { + row := q.db.QueryRowContext(ctx, getProvisionerJobByIDForUpdate, id) var i ProvisionerJob err := row.Scan( &i.ID, @@ -7913,6 +7899,79 @@ func (q *sqlQuerier) GetProvisionerJobsCreatedAfter(ctx context.Context, created return items, nil } +const getProvisionerJobsToBeReaped = `-- name: GetProvisionerJobsToBeReaped :many +SELECT + id, created_at, updated_at, started_at, canceled_at, completed_at, error, organization_id, initiator_id, provisioner, storage_method, type, input, worker_id, file_id, tags, error_code, trace_metadata, job_status +FROM + provisioner_jobs +WHERE + ( + -- If the job has not been started before @pending_since, reap it. + updated_at < $1 + AND started_at IS NULL + AND completed_at IS NULL + ) + OR + ( + -- If the job has been started but not completed before @hung_since, reap it. + updated_at < $2 + AND started_at IS NOT NULL + AND completed_at IS NULL + ) +ORDER BY random() +LIMIT $3 +` + +type GetProvisionerJobsToBeReapedParams struct { + PendingSince time.Time `db:"pending_since" json:"pending_since"` + HungSince time.Time `db:"hung_since" json:"hung_since"` + MaxJobs int32 `db:"max_jobs" json:"max_jobs"` +} + +// To avoid repeatedly attempting to reap the same jobs, we randomly order and limit to @max_jobs. +func (q *sqlQuerier) GetProvisionerJobsToBeReaped(ctx context.Context, arg GetProvisionerJobsToBeReapedParams) ([]ProvisionerJob, error) { + rows, err := q.db.QueryContext(ctx, getProvisionerJobsToBeReaped, arg.PendingSince, arg.HungSince, arg.MaxJobs) + if err != nil { + return nil, err + } + defer rows.Close() + var items []ProvisionerJob + for rows.Next() { + var i ProvisionerJob + if err := rows.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.StartedAt, + &i.CanceledAt, + &i.CompletedAt, + &i.Error, + &i.OrganizationID, + &i.InitiatorID, + &i.Provisioner, + &i.StorageMethod, + &i.Type, + &i.Input, + &i.WorkerID, + &i.FileID, + &i.Tags, + &i.ErrorCode, + &i.TraceMetadata, + &i.JobStatus, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + const insertProvisionerJob = `-- name: InsertProvisionerJob :one INSERT INTO provisioner_jobs ( @@ -8121,6 +8180,40 @@ func (q *sqlQuerier) UpdateProvisionerJobWithCompleteByID(ctx context.Context, a return err } +const updateProvisionerJobWithCompleteWithStartedAtByID = `-- name: UpdateProvisionerJobWithCompleteWithStartedAtByID :exec +UPDATE + provisioner_jobs +SET + updated_at = $2, + completed_at = $3, + error = $4, + error_code = $5, + started_at = $6 +WHERE + id = $1 +` + +type UpdateProvisionerJobWithCompleteWithStartedAtByIDParams struct { + ID uuid.UUID `db:"id" json:"id"` + UpdatedAt time.Time `db:"updated_at" json:"updated_at"` + CompletedAt sql.NullTime `db:"completed_at" json:"completed_at"` + Error sql.NullString `db:"error" json:"error"` + ErrorCode sql.NullString `db:"error_code" json:"error_code"` + StartedAt sql.NullTime `db:"started_at" json:"started_at"` +} + +func (q *sqlQuerier) UpdateProvisionerJobWithCompleteWithStartedAtByID(ctx context.Context, arg UpdateProvisionerJobWithCompleteWithStartedAtByIDParams) error { + _, err := q.db.ExecContext(ctx, updateProvisionerJobWithCompleteWithStartedAtByID, + arg.ID, + arg.UpdatedAt, + arg.CompletedAt, + arg.Error, + arg.ErrorCode, + arg.StartedAt, + ) + return err +} + const deleteProvisionerKey = `-- name: DeleteProvisionerKey :exec DELETE FROM provisioner_keys diff --git a/coderd/database/queries/provisionerjobs.sql b/coderd/database/queries/provisionerjobs.sql index 2ab7774e660b8..88bacc705601c 100644 --- a/coderd/database/queries/provisionerjobs.sql +++ b/coderd/database/queries/provisionerjobs.sql @@ -41,6 +41,18 @@ FROM WHERE id = $1; +-- name: GetProvisionerJobByIDForUpdate :one +-- Gets a single provisioner job by ID for update. +-- This is used to securely reap jobs that have been hung/pending for a long time. +SELECT + * +FROM + provisioner_jobs +WHERE + id = $1 +FOR UPDATE +SKIP LOCKED; + -- name: GetProvisionerJobsByIDs :many SELECT * @@ -262,15 +274,40 @@ SET WHERE id = $1; --- name: GetHungProvisionerJobs :many +-- name: UpdateProvisionerJobWithCompleteWithStartedAtByID :exec +UPDATE + provisioner_jobs +SET + updated_at = $2, + completed_at = $3, + error = $4, + error_code = $5, + started_at = $6 +WHERE + id = $1; + +-- name: GetProvisionerJobsToBeReaped :many SELECT * FROM provisioner_jobs WHERE - updated_at < $1 - AND started_at IS NOT NULL - AND completed_at IS NULL; + ( + -- If the job has not been started before @pending_since, reap it. + updated_at < @pending_since + AND started_at IS NULL + AND completed_at IS NULL + ) + OR + ( + -- If the job has been started but not completed before @hung_since, reap it. + updated_at < @hung_since + AND started_at IS NOT NULL + AND completed_at IS NULL + ) +-- To avoid repeatedly attempting to reap the same jobs, we randomly order and limit to @max_jobs. +ORDER BY random() +LIMIT @max_jobs; -- name: InsertProvisionerJobTimings :many INSERT INTO provisioner_job_timings (job_id, started_at, ended_at, stage, source, action, resource) diff --git a/coderd/httpmw/loggermw/logger.go b/coderd/httpmw/loggermw/logger.go index 9eeb07a5f10e5..30e5e2d811ad8 100644 --- a/coderd/httpmw/loggermw/logger.go +++ b/coderd/httpmw/loggermw/logger.go @@ -132,7 +132,7 @@ var actorLogOrder = []rbac.SubjectType{ rbac.SubjectTypeAutostart, rbac.SubjectTypeCryptoKeyReader, rbac.SubjectTypeCryptoKeyRotator, - rbac.SubjectTypeHangDetector, + rbac.SubjectTypeJobReaper, rbac.SubjectTypeNotifier, rbac.SubjectTypePrebuildsOrchestrator, rbac.SubjectTypeProvisionerd, diff --git a/coderd/unhanger/detector.go b/coderd/jobreaper/detector.go similarity index 72% rename from coderd/unhanger/detector.go rename to coderd/jobreaper/detector.go index 14383b1839363..ad5774ee6b95d 100644 --- a/coderd/unhanger/detector.go +++ b/coderd/jobreaper/detector.go @@ -1,11 +1,10 @@ -package unhanger +package jobreaper import ( "context" "database/sql" "encoding/json" - "fmt" - "math/rand" //#nosec // this is only used for shuffling an array to pick random jobs to unhang + "fmt" //#nosec // this is only used for shuffling an array to pick random jobs to unhang "time" "golang.org/x/xerrors" @@ -21,10 +20,14 @@ import ( ) const ( - // HungJobDuration is the duration of time since the last update to a job - // before it is considered hung. + // HungJobDuration is the duration of time since the last update + // to a RUNNING job before it is considered hung. HungJobDuration = 5 * time.Minute + // PendingJobDuration is the duration of time since last update + // to a PENDING job before it is considered dead. + PendingJobDuration = 30 * time.Minute + // HungJobExitTimeout is the duration of time that provisioners should allow // for a graceful exit upon cancellation due to failing to send an update to // a job. @@ -38,16 +41,30 @@ const ( MaxJobsPerRun = 10 ) -// HungJobLogMessages are written to provisioner job logs when a job is hung and -// terminated. -var HungJobLogMessages = []string{ - "", - "====================", - "Coder: Build has been detected as hung for 5 minutes and will be terminated.", - "====================", - "", +// jobLogMessages are written to provisioner job logs when a job is reaped +func JobLogMessages(reapType ReapType, threshold time.Duration) []string { + return []string{ + "", + "====================", + fmt.Sprintf("Coder: Build has been detected as %s for %.0f minutes and will be terminated.", reapType, threshold.Minutes()), + "====================", + "", + } +} + +type jobToReap struct { + ID uuid.UUID + Threshold time.Duration + Type ReapType } +type ReapType string + +const ( + Pending ReapType = "pending" + Hung ReapType = "hung" +) + // acquireLockError is returned when the detector fails to acquire a lock and // cancels the current run. type acquireLockError struct{} @@ -93,10 +110,10 @@ type Stats struct { Error error } -// New returns a new hang detector. +// New returns a new job reaper. func New(ctx context.Context, db database.Store, pub pubsub.Pubsub, log slog.Logger, tick <-chan time.Time) *Detector { - //nolint:gocritic // Hang detector has a limited set of permissions. - ctx, cancel := context.WithCancel(dbauthz.AsHangDetector(ctx)) + //nolint:gocritic // Job reaper has a limited set of permissions. + ctx, cancel := context.WithCancel(dbauthz.AsJobReaper(ctx)) d := &Detector{ ctx: ctx, cancel: cancel, @@ -172,34 +189,42 @@ func (d *Detector) run(t time.Time) Stats { Error: nil, } - // Find all provisioner jobs that are currently running but have not - // received an update in the last 5 minutes. - jobs, err := d.db.GetHungProvisionerJobs(ctx, t.Add(-HungJobDuration)) + // Find all provisioner jobs to be reaped + jobs, err := d.db.GetProvisionerJobsToBeReaped(ctx, database.GetProvisionerJobsToBeReapedParams{ + PendingSince: t.Add(-PendingJobDuration), + HungSince: t.Add(-HungJobDuration), + MaxJobs: MaxJobsPerRun, + }) if err != nil { - stats.Error = xerrors.Errorf("get hung provisioner jobs: %w", err) + stats.Error = xerrors.Errorf("get provisioner jobs to be reaped: %w", err) return stats } - // Limit the number of jobs we'll unhang in a single run to avoid - // timing out. - if len(jobs) > MaxJobsPerRun { - // Pick a random subset of the jobs to unhang. - rand.Shuffle(len(jobs), func(i, j int) { - jobs[i], jobs[j] = jobs[j], jobs[i] - }) - jobs = jobs[:MaxJobsPerRun] - } + jobsToReap := make([]*jobToReap, 0, len(jobs)) - // Send a message into the build log for each hung job saying that it - // has been detected and will be terminated, then mark the job as - // failed. for _, job := range jobs { + j := &jobToReap{ + ID: job.ID, + } + if job.JobStatus == database.ProvisionerJobStatusPending { + j.Threshold = PendingJobDuration + j.Type = Pending + } else { + j.Threshold = HungJobDuration + j.Type = Hung + } + jobsToReap = append(jobsToReap, j) + } + + // Send a message into the build log for each hung or pending job saying that it + // has been detected and will be terminated, then mark the job as failed. + for _, job := range jobsToReap { log := d.log.With(slog.F("job_id", job.ID)) - err := unhangJob(ctx, log, d.db, d.pubsub, job.ID) + err := reapJob(ctx, log, d.db, d.pubsub, job) if err != nil { if !(xerrors.As(err, &acquireLockError{}) || xerrors.As(err, &jobIneligibleError{})) { - log.Error(ctx, "error forcefully terminating hung provisioner job", slog.Error(err)) + log.Error(ctx, "error forcefully terminating provisioner job", slog.F("type", job.Type), slog.Error(err)) } continue } @@ -210,47 +235,34 @@ func (d *Detector) run(t time.Time) Stats { return stats } -func unhangJob(ctx context.Context, log slog.Logger, db database.Store, pub pubsub.Pubsub, jobID uuid.UUID) error { +func reapJob(ctx context.Context, log slog.Logger, db database.Store, pub pubsub.Pubsub, jobToReap *jobToReap) error { var lowestLogID int64 err := db.InTx(func(db database.Store) error { - locked, err := db.TryAcquireLock(ctx, database.GenLockID(fmt.Sprintf("hang-detector:%s", jobID))) - if err != nil { - return xerrors.Errorf("acquire lock: %w", err) - } - if !locked { - // This error is ignored. - return acquireLockError{} - } - // Refetch the job while we hold the lock. - job, err := db.GetProvisionerJobByID(ctx, jobID) + job, err := db.GetProvisionerJobByIDForUpdate(ctx, jobToReap.ID) if err != nil { + if xerrors.Is(err, sql.ErrNoRows) { + return acquireLockError{} + } return xerrors.Errorf("get provisioner job: %w", err) } - // Check if we should still unhang it. - if !job.StartedAt.Valid { - // This shouldn't be possible to hit because the query only selects - // started and not completed jobs, and a job can't be "un-started". - return jobIneligibleError{ - Err: xerrors.New("job is not started"), - } - } if job.CompletedAt.Valid { return jobIneligibleError{ Err: xerrors.Errorf("job is completed (status %s)", job.JobStatus), } } - if job.UpdatedAt.After(time.Now().Add(-HungJobDuration)) { + if job.UpdatedAt.After(time.Now().Add(-jobToReap.Threshold)) { return jobIneligibleError{ Err: xerrors.New("job has been updated recently"), } } log.Warn( - ctx, "detected hung provisioner job, forcefully terminating", - "threshold", HungJobDuration, + ctx, "forcefully terminating provisioner job", + "type", jobToReap.Type, + "threshold", jobToReap.Threshold, ) // First, get the latest logs from the build so we can make sure @@ -260,7 +272,7 @@ func unhangJob(ctx context.Context, log slog.Logger, db database.Store, pub pubs CreatedAfter: 0, }) if err != nil { - return xerrors.Errorf("get logs for hung job: %w", err) + return xerrors.Errorf("get logs for %s job: %w", jobToReap.Type, err) } logStage := "" if len(logs) != 0 { @@ -280,7 +292,7 @@ func unhangJob(ctx context.Context, log slog.Logger, db database.Store, pub pubs Output: nil, } now := dbtime.Now() - for i, msg := range HungJobLogMessages { + for i, msg := range JobLogMessages(jobToReap.Type, jobToReap.Threshold) { // Set the created at in a way that ensures each message has // a unique timestamp so they will be sorted correctly. insertParams.CreatedAt = append(insertParams.CreatedAt, now.Add(time.Millisecond*time.Duration(i))) @@ -291,13 +303,22 @@ func unhangJob(ctx context.Context, log slog.Logger, db database.Store, pub pubs } newLogs, err := db.InsertProvisionerJobLogs(ctx, insertParams) if err != nil { - return xerrors.Errorf("insert logs for hung job: %w", err) + return xerrors.Errorf("insert logs for %s job: %w", job.JobStatus, err) } lowestLogID = newLogs[0].ID // Mark the job as failed. now = dbtime.Now() - err = db.UpdateProvisionerJobWithCompleteByID(ctx, database.UpdateProvisionerJobWithCompleteByIDParams{ + + // If the job was never started (pending), set the StartedAt time to the current + // time so that the build duration is correct. + if job.JobStatus == database.ProvisionerJobStatusPending { + job.StartedAt = sql.NullTime{ + Time: now, + Valid: true, + } + } + err = db.UpdateProvisionerJobWithCompleteWithStartedAtByID(ctx, database.UpdateProvisionerJobWithCompleteWithStartedAtByIDParams{ ID: job.ID, UpdatedAt: now, CompletedAt: sql.NullTime{ @@ -305,12 +326,13 @@ func unhangJob(ctx context.Context, log slog.Logger, db database.Store, pub pubs Valid: true, }, Error: sql.NullString{ - String: "Coder: Build has been detected as hung for 5 minutes and has been terminated by hang detector.", + String: fmt.Sprintf("Coder: Build has been detected as %s for %.0f minutes and has been terminated by the reaper.", jobToReap.Type, jobToReap.Threshold.Minutes()), Valid: true, }, ErrorCode: sql.NullString{ Valid: false, }, + StartedAt: job.StartedAt, }) if err != nil { return xerrors.Errorf("mark job as failed: %w", err) @@ -364,7 +386,7 @@ func unhangJob(ctx context.Context, log slog.Logger, db database.Store, pub pubs if err != nil { return xerrors.Errorf("marshal log notification: %w", err) } - err = pub.Publish(provisionersdk.ProvisionerJobLogsNotifyChannel(jobID), data) + err = pub.Publish(provisionersdk.ProvisionerJobLogsNotifyChannel(jobToReap.ID), data) if err != nil { return xerrors.Errorf("publish log notification: %w", err) } diff --git a/coderd/unhanger/detector_test.go b/coderd/jobreaper/detector_test.go similarity index 73% rename from coderd/unhanger/detector_test.go rename to coderd/jobreaper/detector_test.go index 43eb62bfa884b..28457aeeca3a8 100644 --- a/coderd/unhanger/detector_test.go +++ b/coderd/jobreaper/detector_test.go @@ -1,4 +1,4 @@ -package unhanger_test +package jobreaper_test import ( "context" @@ -20,9 +20,9 @@ import ( "github.com/coder/coder/v2/coderd/database/dbauthz" "github.com/coder/coder/v2/coderd/database/dbgen" "github.com/coder/coder/v2/coderd/database/dbtestutil" + "github.com/coder/coder/v2/coderd/jobreaper" "github.com/coder/coder/v2/coderd/provisionerdserver" "github.com/coder/coder/v2/coderd/rbac" - "github.com/coder/coder/v2/coderd/unhanger" "github.com/coder/coder/v2/provisionersdk" "github.com/coder/coder/v2/testutil" ) @@ -39,10 +39,10 @@ func TestDetectorNoJobs(t *testing.T) { db, pubsub = dbtestutil.NewDB(t) log = testutil.Logger(t) tickCh = make(chan time.Time) - statsCh = make(chan unhanger.Stats) + statsCh = make(chan jobreaper.Stats) ) - detector := unhanger.New(ctx, wrapDBAuthz(db, log), pubsub, log, tickCh).WithStatsChannel(statsCh) + detector := jobreaper.New(ctx, wrapDBAuthz(db, log), pubsub, log, tickCh).WithStatsChannel(statsCh) detector.Start() tickCh <- time.Now() @@ -62,7 +62,7 @@ func TestDetectorNoHungJobs(t *testing.T) { db, pubsub = dbtestutil.NewDB(t) log = testutil.Logger(t) tickCh = make(chan time.Time) - statsCh = make(chan unhanger.Stats) + statsCh = make(chan jobreaper.Stats) ) // Insert some jobs that are running and haven't been updated in a while, @@ -89,7 +89,7 @@ func TestDetectorNoHungJobs(t *testing.T) { }) } - detector := unhanger.New(ctx, wrapDBAuthz(db, log), pubsub, log, tickCh).WithStatsChannel(statsCh) + detector := jobreaper.New(ctx, wrapDBAuthz(db, log), pubsub, log, tickCh).WithStatsChannel(statsCh) detector.Start() tickCh <- now @@ -109,7 +109,7 @@ func TestDetectorHungWorkspaceBuild(t *testing.T) { db, pubsub = dbtestutil.NewDB(t) log = testutil.Logger(t) tickCh = make(chan time.Time) - statsCh = make(chan unhanger.Stats) + statsCh = make(chan jobreaper.Stats) ) var ( @@ -195,7 +195,7 @@ func TestDetectorHungWorkspaceBuild(t *testing.T) { t.Log("previous job ID: ", previousWorkspaceBuildJob.ID) t.Log("current job ID: ", currentWorkspaceBuildJob.ID) - detector := unhanger.New(ctx, wrapDBAuthz(db, log), pubsub, log, tickCh).WithStatsChannel(statsCh) + detector := jobreaper.New(ctx, wrapDBAuthz(db, log), pubsub, log, tickCh).WithStatsChannel(statsCh) detector.Start() tickCh <- now @@ -231,7 +231,7 @@ func TestDetectorHungWorkspaceBuildNoOverrideState(t *testing.T) { db, pubsub = dbtestutil.NewDB(t) log = testutil.Logger(t) tickCh = make(chan time.Time) - statsCh = make(chan unhanger.Stats) + statsCh = make(chan jobreaper.Stats) ) var ( @@ -318,7 +318,7 @@ func TestDetectorHungWorkspaceBuildNoOverrideState(t *testing.T) { t.Log("previous job ID: ", previousWorkspaceBuildJob.ID) t.Log("current job ID: ", currentWorkspaceBuildJob.ID) - detector := unhanger.New(ctx, wrapDBAuthz(db, log), pubsub, log, tickCh).WithStatsChannel(statsCh) + detector := jobreaper.New(ctx, wrapDBAuthz(db, log), pubsub, log, tickCh).WithStatsChannel(statsCh) detector.Start() tickCh <- now @@ -354,7 +354,7 @@ func TestDetectorHungWorkspaceBuildNoOverrideStateIfNoExistingBuild(t *testing.T db, pubsub = dbtestutil.NewDB(t) log = testutil.Logger(t) tickCh = make(chan time.Time) - statsCh = make(chan unhanger.Stats) + statsCh = make(chan jobreaper.Stats) ) var ( @@ -411,7 +411,7 @@ func TestDetectorHungWorkspaceBuildNoOverrideStateIfNoExistingBuild(t *testing.T t.Log("current job ID: ", currentWorkspaceBuildJob.ID) - detector := unhanger.New(ctx, wrapDBAuthz(db, log), pubsub, log, tickCh).WithStatsChannel(statsCh) + detector := jobreaper.New(ctx, wrapDBAuthz(db, log), pubsub, log, tickCh).WithStatsChannel(statsCh) detector.Start() tickCh <- now @@ -439,6 +439,100 @@ func TestDetectorHungWorkspaceBuildNoOverrideStateIfNoExistingBuild(t *testing.T detector.Wait() } +func TestDetectorPendingWorkspaceBuildNoOverrideStateIfNoExistingBuild(t *testing.T) { + t.Parallel() + + var ( + ctx = testutil.Context(t, testutil.WaitLong) + db, pubsub = dbtestutil.NewDB(t) + log = testutil.Logger(t) + tickCh = make(chan time.Time) + statsCh = make(chan jobreaper.Stats) + ) + + var ( + now = time.Now() + thirtyFiveMinAgo = now.Add(-time.Minute * 35) + org = dbgen.Organization(t, db, database.Organization{}) + user = dbgen.User(t, db, database.User{}) + file = dbgen.File(t, db, database.File{}) + template = dbgen.Template(t, db, database.Template{ + OrganizationID: org.ID, + CreatedBy: user.ID, + }) + templateVersion = dbgen.TemplateVersion(t, db, database.TemplateVersion{ + OrganizationID: org.ID, + TemplateID: uuid.NullUUID{ + UUID: template.ID, + Valid: true, + }, + CreatedBy: user.ID, + }) + workspace = dbgen.Workspace(t, db, database.WorkspaceTable{ + OwnerID: user.ID, + OrganizationID: org.ID, + TemplateID: template.ID, + }) + + // First build. + expectedWorkspaceBuildState = []byte(`{"dean":"cool","colin":"also cool"}`) + currentWorkspaceBuildJob = dbgen.ProvisionerJob(t, db, pubsub, database.ProvisionerJob{ + CreatedAt: thirtyFiveMinAgo, + UpdatedAt: thirtyFiveMinAgo, + StartedAt: sql.NullTime{ + Time: time.Time{}, + Valid: false, + }, + OrganizationID: org.ID, + InitiatorID: user.ID, + Provisioner: database.ProvisionerTypeEcho, + StorageMethod: database.ProvisionerStorageMethodFile, + FileID: file.ID, + Type: database.ProvisionerJobTypeWorkspaceBuild, + Input: []byte("{}"), + }) + currentWorkspaceBuild = dbgen.WorkspaceBuild(t, db, database.WorkspaceBuild{ + WorkspaceID: workspace.ID, + TemplateVersionID: templateVersion.ID, + BuildNumber: 1, + JobID: currentWorkspaceBuildJob.ID, + // Should not be overridden. + ProvisionerState: expectedWorkspaceBuildState, + }) + ) + + t.Log("current job ID: ", currentWorkspaceBuildJob.ID) + + detector := jobreaper.New(ctx, wrapDBAuthz(db, log), pubsub, log, tickCh).WithStatsChannel(statsCh) + detector.Start() + tickCh <- now + + stats := <-statsCh + require.NoError(t, stats.Error) + require.Len(t, stats.TerminatedJobIDs, 1) + require.Equal(t, currentWorkspaceBuildJob.ID, stats.TerminatedJobIDs[0]) + + // Check that the current provisioner job was updated. + job, err := db.GetProvisionerJobByID(ctx, currentWorkspaceBuildJob.ID) + require.NoError(t, err) + require.WithinDuration(t, now, job.UpdatedAt, 30*time.Second) + require.True(t, job.CompletedAt.Valid) + require.WithinDuration(t, now, job.CompletedAt.Time, 30*time.Second) + require.True(t, job.StartedAt.Valid) + require.WithinDuration(t, now, job.StartedAt.Time, 30*time.Second) + require.True(t, job.Error.Valid) + require.Contains(t, job.Error.String, "Build has been detected as pending") + require.False(t, job.ErrorCode.Valid) + + // Check that the provisioner state was NOT updated. + build, err := db.GetWorkspaceBuildByID(ctx, currentWorkspaceBuild.ID) + require.NoError(t, err) + require.Equal(t, expectedWorkspaceBuildState, build.ProvisionerState) + + detector.Close() + detector.Wait() +} + func TestDetectorHungOtherJobTypes(t *testing.T) { t.Parallel() @@ -447,7 +541,7 @@ func TestDetectorHungOtherJobTypes(t *testing.T) { db, pubsub = dbtestutil.NewDB(t) log = testutil.Logger(t) tickCh = make(chan time.Time) - statsCh = make(chan unhanger.Stats) + statsCh = make(chan jobreaper.Stats) ) var ( @@ -509,7 +603,7 @@ func TestDetectorHungOtherJobTypes(t *testing.T) { t.Log("template import job ID: ", templateImportJob.ID) t.Log("template dry-run job ID: ", templateDryRunJob.ID) - detector := unhanger.New(ctx, wrapDBAuthz(db, log), pubsub, log, tickCh).WithStatsChannel(statsCh) + detector := jobreaper.New(ctx, wrapDBAuthz(db, log), pubsub, log, tickCh).WithStatsChannel(statsCh) detector.Start() tickCh <- now @@ -543,6 +637,113 @@ func TestDetectorHungOtherJobTypes(t *testing.T) { detector.Wait() } +func TestDetectorPendingOtherJobTypes(t *testing.T) { + t.Parallel() + + var ( + ctx = testutil.Context(t, testutil.WaitLong) + db, pubsub = dbtestutil.NewDB(t) + log = testutil.Logger(t) + tickCh = make(chan time.Time) + statsCh = make(chan jobreaper.Stats) + ) + + var ( + now = time.Now() + thirtyFiveMinAgo = now.Add(-time.Minute * 35) + org = dbgen.Organization(t, db, database.Organization{}) + user = dbgen.User(t, db, database.User{}) + file = dbgen.File(t, db, database.File{}) + + // Template import job. + templateImportJob = dbgen.ProvisionerJob(t, db, pubsub, database.ProvisionerJob{ + CreatedAt: thirtyFiveMinAgo, + UpdatedAt: thirtyFiveMinAgo, + StartedAt: sql.NullTime{ + Time: time.Time{}, + Valid: false, + }, + OrganizationID: org.ID, + InitiatorID: user.ID, + Provisioner: database.ProvisionerTypeEcho, + StorageMethod: database.ProvisionerStorageMethodFile, + FileID: file.ID, + Type: database.ProvisionerJobTypeTemplateVersionImport, + Input: []byte("{}"), + }) + _ = dbgen.TemplateVersion(t, db, database.TemplateVersion{ + OrganizationID: org.ID, + JobID: templateImportJob.ID, + CreatedBy: user.ID, + }) + ) + + // Template dry-run job. + dryRunVersion := dbgen.TemplateVersion(t, db, database.TemplateVersion{ + OrganizationID: org.ID, + CreatedBy: user.ID, + }) + input, err := json.Marshal(provisionerdserver.TemplateVersionDryRunJob{ + TemplateVersionID: dryRunVersion.ID, + }) + require.NoError(t, err) + templateDryRunJob := dbgen.ProvisionerJob(t, db, pubsub, database.ProvisionerJob{ + CreatedAt: thirtyFiveMinAgo, + UpdatedAt: thirtyFiveMinAgo, + StartedAt: sql.NullTime{ + Time: time.Time{}, + Valid: false, + }, + OrganizationID: org.ID, + InitiatorID: user.ID, + Provisioner: database.ProvisionerTypeEcho, + StorageMethod: database.ProvisionerStorageMethodFile, + FileID: file.ID, + Type: database.ProvisionerJobTypeTemplateVersionDryRun, + Input: input, + }) + + t.Log("template import job ID: ", templateImportJob.ID) + t.Log("template dry-run job ID: ", templateDryRunJob.ID) + + detector := jobreaper.New(ctx, wrapDBAuthz(db, log), pubsub, log, tickCh).WithStatsChannel(statsCh) + detector.Start() + tickCh <- now + + stats := <-statsCh + require.NoError(t, stats.Error) + require.Len(t, stats.TerminatedJobIDs, 2) + require.Contains(t, stats.TerminatedJobIDs, templateImportJob.ID) + require.Contains(t, stats.TerminatedJobIDs, templateDryRunJob.ID) + + // Check that the template import job was updated. + job, err := db.GetProvisionerJobByID(ctx, templateImportJob.ID) + require.NoError(t, err) + require.WithinDuration(t, now, job.UpdatedAt, 30*time.Second) + require.True(t, job.CompletedAt.Valid) + require.WithinDuration(t, now, job.CompletedAt.Time, 30*time.Second) + require.True(t, job.StartedAt.Valid) + require.WithinDuration(t, now, job.StartedAt.Time, 30*time.Second) + require.True(t, job.Error.Valid) + require.Contains(t, job.Error.String, "Build has been detected as pending") + require.False(t, job.ErrorCode.Valid) + + // Check that the template dry-run job was updated. + job, err = db.GetProvisionerJobByID(ctx, templateDryRunJob.ID) + require.NoError(t, err) + require.WithinDuration(t, now, job.UpdatedAt, 30*time.Second) + require.True(t, job.CompletedAt.Valid) + require.WithinDuration(t, now, job.CompletedAt.Time, 30*time.Second) + require.True(t, job.StartedAt.Valid) + require.WithinDuration(t, now, job.StartedAt.Time, 30*time.Second) + require.True(t, job.Error.Valid) + require.Contains(t, job.Error.String, "Build has been detected as pending") + require.False(t, job.ErrorCode.Valid) + + detector.Close() + detector.Wait() +} + func TestDetectorHungCanceledJob(t *testing.T) { t.Parallel() @@ -551,7 +752,7 @@ func TestDetectorHungCanceledJob(t *testing.T) { db, pubsub = dbtestutil.NewDB(t) log = testutil.Logger(t) tickCh = make(chan time.Time) - statsCh = make(chan unhanger.Stats) + statsCh = make(chan jobreaper.Stats) ) var ( @@ -591,7 +792,7 @@ func TestDetectorHungCanceledJob(t *testing.T) { t.Log("template import job ID: ", templateImportJob.ID) - detector := unhanger.New(ctx, wrapDBAuthz(db, log), pubsub, log, tickCh).WithStatsChannel(statsCh) + detector := jobreaper.New(ctx, wrapDBAuthz(db, log), pubsub, log, tickCh).WithStatsChannel(statsCh) detector.Start() tickCh <- now @@ -653,7 +854,7 @@ func TestDetectorPushesLogs(t *testing.T) { db, pubsub = dbtestutil.NewDB(t) log = testutil.Logger(t) tickCh = make(chan time.Time) - statsCh = make(chan unhanger.Stats) + statsCh = make(chan jobreaper.Stats) ) var ( @@ -706,7 +907,7 @@ func TestDetectorPushesLogs(t *testing.T) { require.Len(t, logs, 10) } - detector := unhanger.New(ctx, wrapDBAuthz(db, log), pubsub, log, tickCh).WithStatsChannel(statsCh) + detector := jobreaper.New(ctx, wrapDBAuthz(db, log), pubsub, log, tickCh).WithStatsChannel(statsCh) detector.Start() // Create pubsub subscription to listen for new log events. @@ -741,12 +942,19 @@ func TestDetectorPushesLogs(t *testing.T) { CreatedAfter: after, }) require.NoError(t, err) - require.Len(t, logs, len(unhanger.HungJobLogMessages)) + threshold := jobreaper.HungJobDuration + jobType := jobreaper.Hung + if templateImportJob.JobStatus == database.ProvisionerJobStatusPending { + threshold = jobreaper.PendingJobDuration + jobType = jobreaper.Pending + } + expectedLogs := jobreaper.JobLogMessages(jobType, threshold) + require.Len(t, logs, len(expectedLogs)) for i, log := range logs { assert.Equal(t, database.LogLevelError, log.Level) assert.Equal(t, c.expectStage, log.Stage) assert.Equal(t, database.LogSourceProvisionerDaemon, log.Source) - assert.Equal(t, unhanger.HungJobLogMessages[i], log.Output) + assert.Equal(t, expectedLogs[i], log.Output) } // Double check the full log count. @@ -755,7 +963,7 @@ func TestDetectorPushesLogs(t *testing.T) { CreatedAfter: 0, }) require.NoError(t, err) - require.Len(t, logs, c.preLogCount+len(unhanger.HungJobLogMessages)) + require.Len(t, logs, c.preLogCount+len(expectedLogs)) detector.Close() detector.Wait() @@ -771,15 +979,15 @@ func TestDetectorMaxJobsPerRun(t *testing.T) { db, pubsub = dbtestutil.NewDB(t) log = testutil.Logger(t) tickCh = make(chan time.Time) - statsCh = make(chan unhanger.Stats) + statsCh = make(chan jobreaper.Stats) org = dbgen.Organization(t, db, database.Organization{}) user = dbgen.User(t, db, database.User{}) file = dbgen.File(t, db, database.File{}) ) - // Create unhanger.MaxJobsPerRun + 1 hung jobs. + // Create MaxJobsPerRun + 1 hung jobs. now := time.Now() - for i := 0; i < unhanger.MaxJobsPerRun+1; i++ { + for i := 0; i < jobreaper.MaxJobsPerRun+1; i++ { pj := dbgen.ProvisionerJob(t, db, pubsub, database.ProvisionerJob{ CreatedAt: now.Add(-time.Hour), UpdatedAt: now.Add(-time.Hour), @@ -802,14 +1010,14 @@ func TestDetectorMaxJobsPerRun(t *testing.T) { }) } - detector := unhanger.New(ctx, wrapDBAuthz(db, log), pubsub, log, tickCh).WithStatsChannel(statsCh) + detector := jobreaper.New(ctx, wrapDBAuthz(db, log), pubsub, log, tickCh).WithStatsChannel(statsCh) detector.Start() tickCh <- now - // Make sure that only unhanger.MaxJobsPerRun jobs are terminated. + // Make sure that only MaxJobsPerRun jobs are terminated. stats := <-statsCh require.NoError(t, stats.Error) - require.Len(t, stats.TerminatedJobIDs, unhanger.MaxJobsPerRun) + require.Len(t, stats.TerminatedJobIDs, jobreaper.MaxJobsPerRun) // Run the detector again and make sure that only the remaining job is // terminated. @@ -823,7 +1031,7 @@ func TestDetectorMaxJobsPerRun(t *testing.T) { } // wrapDBAuthz adds our Authorization/RBAC around the given database store, to -// ensure the unhanger has the right permissions to do its work. +// ensure the reaper has the right permissions to do its work. func wrapDBAuthz(db database.Store, logger slog.Logger) database.Store { return dbauthz.New( db, diff --git a/coderd/rbac/authz.go b/coderd/rbac/authz.go index d2c6d5d0675be..c63042a2a1363 100644 --- a/coderd/rbac/authz.go +++ b/coderd/rbac/authz.go @@ -65,7 +65,7 @@ const ( SubjectTypeUser SubjectType = "user" SubjectTypeProvisionerd SubjectType = "provisionerd" SubjectTypeAutostart SubjectType = "autostart" - SubjectTypeHangDetector SubjectType = "hang_detector" + SubjectTypeJobReaper SubjectType = "job_reaper" SubjectTypeResourceMonitor SubjectType = "resource_monitor" SubjectTypeCryptoKeyRotator SubjectType = "crypto_key_rotator" SubjectTypeCryptoKeyReader SubjectType = "crypto_key_reader" diff --git a/coderd/rbac/object_gen.go b/coderd/rbac/object_gen.go index 40b7dc87a56f8..ad1a510fd44bd 100644 --- a/coderd/rbac/object_gen.go +++ b/coderd/rbac/object_gen.go @@ -234,7 +234,9 @@ var ( // ResourceProvisionerJobs // Valid Actions + // - "ActionCreate" :: create provisioner jobs // - "ActionRead" :: read provisioner jobs + // - "ActionUpdate" :: update provisioner jobs ResourceProvisionerJobs = Object{ Type: "provisioner_jobs", } diff --git a/coderd/rbac/policy/policy.go b/coderd/rbac/policy/policy.go index 35da0892abfdb..c37e84c48f964 100644 --- a/coderd/rbac/policy/policy.go +++ b/coderd/rbac/policy/policy.go @@ -182,7 +182,9 @@ var RBACPermissions = map[string]PermissionDefinition{ }, "provisioner_jobs": { Actions: map[Action]ActionDefinition{ - ActionRead: actDef("read provisioner jobs"), + ActionRead: actDef("read provisioner jobs"), + ActionUpdate: actDef("update provisioner jobs"), + ActionCreate: actDef("create provisioner jobs"), }, }, "organization": { diff --git a/coderd/rbac/roles.go b/coderd/rbac/roles.go index 56124faee44e2..0b94a74201b16 100644 --- a/coderd/rbac/roles.go +++ b/coderd/rbac/roles.go @@ -503,7 +503,7 @@ func ReloadBuiltinRoles(opts *RoleOptions) { // the ability to create templates and provisioners has // a lot of overlap. ResourceProvisionerDaemon.Type: {policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete}, - ResourceProvisionerJobs.Type: {policy.ActionRead}, + ResourceProvisionerJobs.Type: {policy.ActionRead, policy.ActionUpdate, policy.ActionCreate}, }), }, User: []Permission{}, diff --git a/coderd/rbac/roles_test.go b/coderd/rbac/roles_test.go index e90c89914fdec..6d42a01474d1a 100644 --- a/coderd/rbac/roles_test.go +++ b/coderd/rbac/roles_test.go @@ -580,7 +580,7 @@ func TestRolePermissions(t *testing.T) { }, { Name: "ProvisionerJobs", - Actions: []policy.Action{policy.ActionRead}, + Actions: []policy.Action{policy.ActionRead, policy.ActionUpdate, policy.ActionCreate}, Resource: rbac.ResourceProvisionerJobs.InOrg(orgID), AuthorizeMap: map[bool][]hasAuthSubjects{ true: {owner, orgTemplateAdmin, orgAdmin}, diff --git a/codersdk/deployment.go b/codersdk/deployment.go index 0741bf9e3844a..39b67feb2c73a 100644 --- a/codersdk/deployment.go +++ b/codersdk/deployment.go @@ -345,7 +345,7 @@ type DeploymentValues struct { // HTTPAddress is a string because it may be set to zero to disable. HTTPAddress serpent.String `json:"http_address,omitempty" typescript:",notnull"` AutobuildPollInterval serpent.Duration `json:"autobuild_poll_interval,omitempty"` - JobHangDetectorInterval serpent.Duration `json:"job_hang_detector_interval,omitempty"` + JobReaperDetectorInterval serpent.Duration `json:"job_hang_detector_interval,omitempty"` DERP DERP `json:"derp,omitempty" typescript:",notnull"` Prometheus PrometheusConfig `json:"prometheus,omitempty" typescript:",notnull"` Pprof PprofConfig `json:"pprof,omitempty" typescript:",notnull"` @@ -1287,13 +1287,13 @@ func (c *DeploymentValues) Options() serpent.OptionSet { Annotations: serpent.Annotations{}.Mark(annotationFormatDuration, "true"), }, { - Name: "Job Hang Detector Interval", - Description: "Interval to poll for hung jobs and automatically terminate them.", + Name: "Job Reaper Detect Interval", + Description: "Interval to poll for hung and pending jobs and automatically terminate them.", Flag: "job-hang-detector-interval", Env: "CODER_JOB_HANG_DETECTOR_INTERVAL", Hidden: true, Default: time.Minute.String(), - Value: &c.JobHangDetectorInterval, + Value: &c.JobReaperDetectorInterval, YAML: "jobHangDetectorInterval", Annotations: serpent.Annotations{}.Mark(annotationFormatDuration, "true"), }, diff --git a/codersdk/rbacresources_gen.go b/codersdk/rbacresources_gen.go index 54f65767928d6..6157281f21356 100644 --- a/codersdk/rbacresources_gen.go +++ b/codersdk/rbacresources_gen.go @@ -90,7 +90,7 @@ var RBACResourceActions = map[RBACResource][]RBACAction{ ResourceOrganization: {ActionCreate, ActionDelete, ActionRead, ActionUpdate}, ResourceOrganizationMember: {ActionCreate, ActionDelete, ActionRead, ActionUpdate}, ResourceProvisionerDaemon: {ActionCreate, ActionDelete, ActionRead, ActionUpdate}, - ResourceProvisionerJobs: {ActionRead}, + ResourceProvisionerJobs: {ActionCreate, ActionRead, ActionUpdate}, ResourceReplicas: {ActionRead}, ResourceSystem: {ActionCreate, ActionDelete, ActionRead, ActionUpdate}, ResourceTailnetCoordinator: {ActionCreate, ActionDelete, ActionRead, ActionUpdate}, diff --git a/provisioner/terraform/serve.go b/provisioner/terraform/serve.go index 562946d8ef92e..3e671b0c68e56 100644 --- a/provisioner/terraform/serve.go +++ b/provisioner/terraform/serve.go @@ -16,7 +16,7 @@ import ( "cdr.dev/slog" "github.com/coder/coder/v2/coderd/database" - "github.com/coder/coder/v2/coderd/unhanger" + "github.com/coder/coder/v2/coderd/jobreaper" "github.com/coder/coder/v2/provisionersdk" ) @@ -39,9 +39,9 @@ type ServeOptions struct { // // This is a no-op on Windows where the process can't be interrupted. // - // Default value: 3 minutes (unhanger.HungJobExitTimeout). This value should + // Default value: 3 minutes (jobreaper.HungJobExitTimeout). This value should // be kept less than the value that Coder uses to mark hung jobs as failed, - // which is 5 minutes (see unhanger package). + // which is 5 minutes (see jobreaper package). ExitTimeout time.Duration } @@ -131,7 +131,7 @@ func Serve(ctx context.Context, options *ServeOptions) error { options.Tracer = trace.NewNoopTracerProvider().Tracer("noop") } if options.ExitTimeout == 0 { - options.ExitTimeout = unhanger.HungJobExitTimeout + options.ExitTimeout = jobreaper.HungJobExitTimeout } return provisionersdk.Serve(ctx, &server{ execMut: &sync.Mutex{}, diff --git a/site/src/api/rbacresourcesGenerated.ts b/site/src/api/rbacresourcesGenerated.ts index 079dcb4a87a61..3acb86c079908 100644 --- a/site/src/api/rbacresourcesGenerated.ts +++ b/site/src/api/rbacresourcesGenerated.ts @@ -130,7 +130,9 @@ export const RBACResourceActions: Partial< update: "update a provisioner daemon", }, provisioner_jobs: { + create: "create provisioner jobs", read: "read provisioner jobs", + update: "update provisioner jobs", }, replicas: { read: "read replicas", From 1267c9c4056810adaad72d86ecc25e1e0201caa0 Mon Sep 17 00:00:00 2001 From: Danielle Maywood Date: Tue, 20 May 2025 16:01:57 +0100 Subject: [PATCH 29/42] fix: ensure reason present for workspace autoupdated notification (#17935) Fixes https://github.com/coder/coder/issues/17930 Update the `WorkspaceAutoUpdated` notification to only display the reason if it is present. --- coderd/autobuild/lifecycle_executor.go | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/coderd/autobuild/lifecycle_executor.go b/coderd/autobuild/lifecycle_executor.go index cc4e48b43544c..eedcc812bb19c 100644 --- a/coderd/autobuild/lifecycle_executor.go +++ b/coderd/autobuild/lifecycle_executor.go @@ -349,13 +349,18 @@ func (e *Executor) runOnce(t time.Time) Stats { nextBuildReason = string(nextBuild.Reason) } + templateVersionMessage := activeTemplateVersion.Message + if templateVersionMessage == "" { + templateVersionMessage = "None provided" + } + if _, err := e.notificationsEnqueuer.Enqueue(e.ctx, ws.OwnerID, notifications.TemplateWorkspaceAutoUpdated, map[string]string{ "name": ws.Name, "initiator": "autobuild", "reason": nextBuildReason, "template_version_name": activeTemplateVersion.Name, - "template_version_message": activeTemplateVersion.Message, + "template_version_message": templateVersionMessage, }, "autobuild", // Associate this notification with all the related entities. ws.ID, ws.OwnerID, ws.TemplateID, ws.OrganizationID, From 93f17bc73e71d9eb23543bdd4c2ada22ff35a2c8 Mon Sep 17 00:00:00 2001 From: Thomas Kosiewski Date: Tue, 20 May 2025 17:07:50 +0200 Subject: [PATCH 30/42] fix: remove unnecessary user lookup in agent API calls (#17934) # Use workspace.OwnerUsername instead of fetching the owner This PR optimizes the agent API by using the `workspace.OwnerUsername` field directly instead of making an additional database query to fetch the owner's username. The change removes the need to call `GetUserByID` in the manifest API and workspace agent RPC endpoints. An issue arose when the agent token was scoped without access to user data (`api_key_scope = "no_user_data"`), causing the agent to fail to fetch the manifest due to an RBAC issue. Change-Id: I3b6e7581134e2374b364ee059e3b18ece3d98b41 Signed-off-by: Thomas Kosiewski --- coderd/agentapi/manifest.go | 11 +- coderd/agentapi/manifest_test.go | 10 +- coderd/workspaceagents_test.go | 64 ++++++--- coderd/workspaceagentsrpc.go | 13 +- coderd/workspaceagentsrpc_test.go | 212 +++++++++++++++++++----------- flake.nix | 1 + 6 files changed, 194 insertions(+), 117 deletions(-) diff --git a/coderd/agentapi/manifest.go b/coderd/agentapi/manifest.go index 66bfe4cb5f94f..855ff4b8acd37 100644 --- a/coderd/agentapi/manifest.go +++ b/coderd/agentapi/manifest.go @@ -47,7 +47,6 @@ func (a *ManifestAPI) GetManifest(ctx context.Context, _ *agentproto.GetManifest scripts []database.WorkspaceAgentScript metadata []database.WorkspaceAgentMetadatum workspace database.Workspace - owner database.User devcontainers []database.WorkspaceAgentDevcontainer ) @@ -76,10 +75,6 @@ func (a *ManifestAPI) GetManifest(ctx context.Context, _ *agentproto.GetManifest if err != nil { return xerrors.Errorf("getting workspace by id: %w", err) } - owner, err = a.Database.GetUserByID(ctx, workspace.OwnerID) - if err != nil { - return xerrors.Errorf("getting workspace owner by id: %w", err) - } return err }) eg.Go(func() (err error) { @@ -98,7 +93,7 @@ func (a *ManifestAPI) GetManifest(ctx context.Context, _ *agentproto.GetManifest AppSlugOrPort: "{{port}}", AgentName: workspaceAgent.Name, WorkspaceName: workspace.Name, - Username: owner.Username, + Username: workspace.OwnerUsername, } vscodeProxyURI := vscodeProxyURI(appSlug, a.AccessURL, a.AppHostname) @@ -115,7 +110,7 @@ func (a *ManifestAPI) GetManifest(ctx context.Context, _ *agentproto.GetManifest } } - apps, err := dbAppsToProto(dbApps, workspaceAgent, owner.Username, workspace) + apps, err := dbAppsToProto(dbApps, workspaceAgent, workspace.OwnerUsername, workspace) if err != nil { return nil, xerrors.Errorf("converting workspace apps: %w", err) } @@ -128,7 +123,7 @@ func (a *ManifestAPI) GetManifest(ctx context.Context, _ *agentproto.GetManifest return &agentproto.Manifest{ AgentId: workspaceAgent.ID[:], AgentName: workspaceAgent.Name, - OwnerUsername: owner.Username, + OwnerUsername: workspace.OwnerUsername, WorkspaceId: workspace.ID[:], WorkspaceName: workspace.Name, GitAuthConfigs: gitAuthConfigs, diff --git a/coderd/agentapi/manifest_test.go b/coderd/agentapi/manifest_test.go index 9273acb0c40ff..fc46f5fe480f8 100644 --- a/coderd/agentapi/manifest_test.go +++ b/coderd/agentapi/manifest_test.go @@ -46,9 +46,10 @@ func TestGetManifest(t *testing.T) { Username: "cool-user", } workspace = database.Workspace{ - ID: uuid.New(), - OwnerID: owner.ID, - Name: "cool-workspace", + ID: uuid.New(), + OwnerID: owner.ID, + OwnerUsername: owner.Username, + Name: "cool-workspace", } agent = database.WorkspaceAgent{ ID: uuid.New(), @@ -336,7 +337,6 @@ func TestGetManifest(t *testing.T) { }).Return(metadata, nil) mDB.EXPECT().GetWorkspaceAgentDevcontainersByAgentID(gomock.Any(), agent.ID).Return(devcontainers, nil) mDB.EXPECT().GetWorkspaceByID(gomock.Any(), workspace.ID).Return(workspace, nil) - mDB.EXPECT().GetUserByID(gomock.Any(), workspace.OwnerID).Return(owner, nil) got, err := api.GetManifest(context.Background(), &agentproto.GetManifestRequest{}) require.NoError(t, err) @@ -404,7 +404,6 @@ func TestGetManifest(t *testing.T) { }).Return([]database.WorkspaceAgentMetadatum{}, nil) mDB.EXPECT().GetWorkspaceAgentDevcontainersByAgentID(gomock.Any(), childAgent.ID).Return([]database.WorkspaceAgentDevcontainer{}, nil) mDB.EXPECT().GetWorkspaceByID(gomock.Any(), workspace.ID).Return(workspace, nil) - mDB.EXPECT().GetUserByID(gomock.Any(), workspace.OwnerID).Return(owner, nil) got, err := api.GetManifest(context.Background(), &agentproto.GetManifestRequest{}) require.NoError(t, err) @@ -468,7 +467,6 @@ func TestGetManifest(t *testing.T) { }).Return(metadata, nil) mDB.EXPECT().GetWorkspaceAgentDevcontainersByAgentID(gomock.Any(), agent.ID).Return(devcontainers, nil) mDB.EXPECT().GetWorkspaceByID(gomock.Any(), workspace.ID).Return(workspace, nil) - mDB.EXPECT().GetUserByID(gomock.Any(), workspace.OwnerID).Return(owner, nil) got, err := api.GetManifest(context.Background(), &agentproto.GetManifestRequest{}) require.NoError(t, err) diff --git a/coderd/workspaceagents_test.go b/coderd/workspaceagents_test.go index 27da80b3c579b..f4f3dcdec9f89 100644 --- a/coderd/workspaceagents_test.go +++ b/coderd/workspaceagents_test.go @@ -439,25 +439,55 @@ func TestWorkspaceAgentConnectRPC(t *testing.T) { t.Run("Connect", func(t *testing.T) { t.Parallel() - client, db := coderdtest.NewWithDatabase(t, nil) - user := coderdtest.CreateFirstUser(t, client) - r := dbfake.WorkspaceBuild(t, db, database.WorkspaceTable{ - OrganizationID: user.OrganizationID, - OwnerID: user.UserID, - }).WithAgent().Do() - _ = agenttest.New(t, client.URL, r.AgentToken) - resources := coderdtest.AwaitWorkspaceAgents(t, client, r.Workspace.ID) + for _, tc := range []struct { + name string + apiKeyScope rbac.ScopeName + }{ + { + name: "empty (backwards compat)", + apiKeyScope: "", + }, + { + name: "all", + apiKeyScope: rbac.ScopeAll, + }, + { + name: "no_user_data", + apiKeyScope: rbac.ScopeNoUserData, + }, + { + name: "application_connect", + apiKeyScope: rbac.ScopeApplicationConnect, + }, + } { + t.Run(tc.name, func(t *testing.T) { + client, db := coderdtest.NewWithDatabase(t, nil) + user := coderdtest.CreateFirstUser(t, client) + r := dbfake.WorkspaceBuild(t, db, database.WorkspaceTable{ + OrganizationID: user.OrganizationID, + OwnerID: user.UserID, + }).WithAgent(func(agents []*proto.Agent) []*proto.Agent { + for _, agent := range agents { + agent.ApiKeyScope = string(tc.apiKeyScope) + } - ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) - defer cancel() + return agents + }).Do() + _ = agenttest.New(t, client.URL, r.AgentToken) + resources := coderdtest.NewWorkspaceAgentWaiter(t, client, r.Workspace.ID).AgentNames([]string{}).Wait() - conn, err := workspacesdk.New(client). - DialAgent(ctx, resources[0].Agents[0].ID, nil) - require.NoError(t, err) - defer func() { - _ = conn.Close() - }() - conn.AwaitReachable(ctx) + ctx, cancel := context.WithTimeout(context.Background(), testutil.WaitLong) + defer cancel() + + conn, err := workspacesdk.New(client). + DialAgent(ctx, resources[0].Agents[0].ID, nil) + require.NoError(t, err) + defer func() { + _ = conn.Close() + }() + conn.AwaitReachable(ctx) + }) + } }) t.Run("FailNonLatestBuild", func(t *testing.T) { diff --git a/coderd/workspaceagentsrpc.go b/coderd/workspaceagentsrpc.go index 43da35410f632..2dcf65bd8c7d5 100644 --- a/coderd/workspaceagentsrpc.go +++ b/coderd/workspaceagentsrpc.go @@ -76,17 +76,8 @@ func (api *API) workspaceAgentRPC(rw http.ResponseWriter, r *http.Request) { return } - owner, err := api.Database.GetUserByID(ctx, workspace.OwnerID) - if err != nil { - httpapi.Write(ctx, rw, http.StatusBadRequest, codersdk.Response{ - Message: "Internal error fetching user.", - Detail: err.Error(), - }) - return - } - logger = logger.With( - slog.F("owner", owner.Username), + slog.F("owner", workspace.OwnerUsername), slog.F("workspace_name", workspace.Name), slog.F("agent_name", workspaceAgent.Name), ) @@ -170,7 +161,7 @@ func (api *API) workspaceAgentRPC(rw http.ResponseWriter, r *http.Request) { }) streamID := tailnet.StreamID{ - Name: fmt.Sprintf("%s-%s-%s", owner.Username, workspace.Name, workspaceAgent.Name), + Name: fmt.Sprintf("%s-%s-%s", workspace.OwnerUsername, workspace.Name, workspaceAgent.Name), ID: workspaceAgent.ID, Auth: tailnet.AgentCoordinateeAuth{ID: workspaceAgent.ID}, } diff --git a/coderd/workspaceagentsrpc_test.go b/coderd/workspaceagentsrpc_test.go index caea9b39c2f54..5175f80b0b723 100644 --- a/coderd/workspaceagentsrpc_test.go +++ b/coderd/workspaceagentsrpc_test.go @@ -13,6 +13,7 @@ import ( "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/database/dbfake" "github.com/coder/coder/v2/coderd/database/dbtime" + "github.com/coder/coder/v2/coderd/rbac" "github.com/coder/coder/v2/codersdk/agentsdk" "github.com/coder/coder/v2/provisionersdk/proto" "github.com/coder/coder/v2/testutil" @@ -22,89 +23,150 @@ import ( func TestWorkspaceAgentReportStats(t *testing.T) { t.Parallel() - tickCh := make(chan time.Time) - flushCh := make(chan int, 1) - client, db := coderdtest.NewWithDatabase(t, &coderdtest.Options{ - WorkspaceUsageTrackerFlush: flushCh, - WorkspaceUsageTrackerTick: tickCh, - }) - user := coderdtest.CreateFirstUser(t, client) - r := dbfake.WorkspaceBuild(t, db, database.WorkspaceTable{ - OrganizationID: user.OrganizationID, - OwnerID: user.UserID, - LastUsedAt: dbtime.Now().Add(-time.Minute), - }).WithAgent().Do() + for _, tc := range []struct { + name string + apiKeyScope rbac.ScopeName + }{ + { + name: "empty (backwards compat)", + apiKeyScope: "", + }, + { + name: "all", + apiKeyScope: rbac.ScopeAll, + }, + { + name: "no_user_data", + apiKeyScope: rbac.ScopeNoUserData, + }, + { + name: "application_connect", + apiKeyScope: rbac.ScopeApplicationConnect, + }, + } { + t.Run(tc.name, func(t *testing.T) { + t.Parallel() - ac := agentsdk.New(client.URL) - ac.SetSessionToken(r.AgentToken) - conn, err := ac.ConnectRPC(context.Background()) - require.NoError(t, err) - defer func() { - _ = conn.Close() - }() - agentAPI := agentproto.NewDRPCAgentClient(conn) + tickCh := make(chan time.Time) + flushCh := make(chan int, 1) + client, db := coderdtest.NewWithDatabase(t, &coderdtest.Options{ + WorkspaceUsageTrackerFlush: flushCh, + WorkspaceUsageTrackerTick: tickCh, + }) + user := coderdtest.CreateFirstUser(t, client) + r := dbfake.WorkspaceBuild(t, db, database.WorkspaceTable{ + OrganizationID: user.OrganizationID, + OwnerID: user.UserID, + LastUsedAt: dbtime.Now().Add(-time.Minute), + }).WithAgent( + func(agent []*proto.Agent) []*proto.Agent { + for _, a := range agent { + a.ApiKeyScope = string(tc.apiKeyScope) + } - _, err = agentAPI.UpdateStats(context.Background(), &agentproto.UpdateStatsRequest{ - Stats: &agentproto.Stats{ - ConnectionsByProto: map[string]int64{"TCP": 1}, - ConnectionCount: 1, - RxPackets: 1, - RxBytes: 1, - TxPackets: 1, - TxBytes: 1, - SessionCountVscode: 1, - SessionCountJetbrains: 0, - SessionCountReconnectingPty: 0, - SessionCountSsh: 0, - ConnectionMedianLatencyMs: 10, - }, - }) - require.NoError(t, err) + return agent + }, + ).Do() + + ac := agentsdk.New(client.URL) + ac.SetSessionToken(r.AgentToken) + conn, err := ac.ConnectRPC(context.Background()) + require.NoError(t, err) + defer func() { + _ = conn.Close() + }() + agentAPI := agentproto.NewDRPCAgentClient(conn) + + _, err = agentAPI.UpdateStats(context.Background(), &agentproto.UpdateStatsRequest{ + Stats: &agentproto.Stats{ + ConnectionsByProto: map[string]int64{"TCP": 1}, + ConnectionCount: 1, + RxPackets: 1, + RxBytes: 1, + TxPackets: 1, + TxBytes: 1, + SessionCountVscode: 1, + SessionCountJetbrains: 0, + SessionCountReconnectingPty: 0, + SessionCountSsh: 0, + ConnectionMedianLatencyMs: 10, + }, + }) + require.NoError(t, err) - tickCh <- dbtime.Now() - count := <-flushCh - require.Equal(t, 1, count, "expected one flush with one id") + tickCh <- dbtime.Now() + count := <-flushCh + require.Equal(t, 1, count, "expected one flush with one id") - newWorkspace, err := client.Workspace(context.Background(), r.Workspace.ID) - require.NoError(t, err) + newWorkspace, err := client.Workspace(context.Background(), r.Workspace.ID) + require.NoError(t, err) - assert.True(t, - newWorkspace.LastUsedAt.After(r.Workspace.LastUsedAt), - "%s is not after %s", newWorkspace.LastUsedAt, r.Workspace.LastUsedAt, - ) + assert.True(t, + newWorkspace.LastUsedAt.After(r.Workspace.LastUsedAt), + "%s is not after %s", newWorkspace.LastUsedAt, r.Workspace.LastUsedAt, + ) + }) + } } func TestAgentAPI_LargeManifest(t *testing.T) { t.Parallel() - ctx := testutil.Context(t, testutil.WaitLong) - client, store := coderdtest.NewWithDatabase(t, nil) - adminUser := coderdtest.CreateFirstUser(t, client) - n := 512000 - longScript := make([]byte, n) - for i := range longScript { - longScript[i] = 'q' + + for _, tc := range []struct { + name string + apiKeyScope rbac.ScopeName + }{ + { + name: "empty (backwards compat)", + apiKeyScope: "", + }, + { + name: "all", + apiKeyScope: rbac.ScopeAll, + }, + { + name: "no_user_data", + apiKeyScope: rbac.ScopeNoUserData, + }, + { + name: "application_connect", + apiKeyScope: rbac.ScopeApplicationConnect, + }, + } { + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + ctx := testutil.Context(t, testutil.WaitLong) + client, store := coderdtest.NewWithDatabase(t, nil) + adminUser := coderdtest.CreateFirstUser(t, client) + n := 512000 + longScript := make([]byte, n) + for i := range longScript { + longScript[i] = 'q' + } + r := dbfake.WorkspaceBuild(t, store, database.WorkspaceTable{ + OrganizationID: adminUser.OrganizationID, + OwnerID: adminUser.UserID, + }).WithAgent(func(agents []*proto.Agent) []*proto.Agent { + agents[0].Scripts = []*proto.Script{ + { + Script: string(longScript), + }, + } + agents[0].ApiKeyScope = string(tc.apiKeyScope) + return agents + }).Do() + ac := agentsdk.New(client.URL) + ac.SetSessionToken(r.AgentToken) + conn, err := ac.ConnectRPC(ctx) + defer func() { + _ = conn.Close() + }() + require.NoError(t, err) + agentAPI := agentproto.NewDRPCAgentClient(conn) + manifest, err := agentAPI.GetManifest(ctx, &agentproto.GetManifestRequest{}) + require.NoError(t, err) + require.Len(t, manifest.Scripts, 1) + require.Len(t, manifest.Scripts[0].Script, n) + }) } - r := dbfake.WorkspaceBuild(t, store, database.WorkspaceTable{ - OrganizationID: adminUser.OrganizationID, - OwnerID: adminUser.UserID, - }).WithAgent(func(agents []*proto.Agent) []*proto.Agent { - agents[0].Scripts = []*proto.Script{ - { - Script: string(longScript), - }, - } - return agents - }).Do() - ac := agentsdk.New(client.URL) - ac.SetSessionToken(r.AgentToken) - conn, err := ac.ConnectRPC(ctx) - defer func() { - _ = conn.Close() - }() - require.NoError(t, err) - agentAPI := agentproto.NewDRPCAgentClient(conn) - manifest, err := agentAPI.GetManifest(ctx, &agentproto.GetManifestRequest{}) - require.NoError(t, err) - require.Len(t, manifest.Scripts, 1) - require.Len(t, manifest.Scripts[0].Script, n) } diff --git a/flake.nix b/flake.nix index bff207662f913..c0f36c3be6e0f 100644 --- a/flake.nix +++ b/flake.nix @@ -141,6 +141,7 @@ kubectl kubectx kubernetes-helm + lazydocker lazygit less mockgen From e76d58f2b692e12ba37c0dba22bb2960bf313568 Mon Sep 17 00:00:00 2001 From: Steven Masley Date: Tue, 20 May 2025 10:09:53 -0500 Subject: [PATCH 31/42] chore: disable parameter validatation for dynamic params for all transitions (#17926) Dynamic params skip parameter validation in coder/coder. This is because conditional parameters cannot be validated with the static parameters in the database. --- cli/server.go | 2 +- coderd/apidoc/docs.go | 4 + coderd/apidoc/swagger.json | 4 + coderd/autobuild/lifecycle_executor.go | 6 +- coderd/coderdtest/coderdtest.go | 2 + coderd/parameters.go | 11 +-- coderd/workspacebuilds.go | 17 ++++ coderd/workspaces.go | 4 +- coderd/wsbuilder/wsbuilder.go | 78 +++++++++++++++-- coderd/wsbuilder/wsbuilder_test.go | 26 ++++++ codersdk/workspaces.go | 4 + docs/reference/api/builds.md | 1 + docs/reference/api/schemas.md | 2 + enterprise/coderd/workspaces_test.go | 113 +++++++++++++++++++++++++ site/src/api/typesGenerated.ts | 1 + 15 files changed, 258 insertions(+), 17 deletions(-) diff --git a/cli/server.go b/cli/server.go index 59993b55771a9..1794044bce48f 100644 --- a/cli/server.go +++ b/cli/server.go @@ -1124,7 +1124,7 @@ func (r *RootCmd) Server(newAPI func(context.Context, *coderd.Options) (*coderd. autobuildTicker := time.NewTicker(vals.AutobuildPollInterval.Value()) defer autobuildTicker.Stop() autobuildExecutor := autobuild.NewExecutor( - ctx, options.Database, options.Pubsub, options.PrometheusRegistry, coderAPI.TemplateScheduleStore, &coderAPI.Auditor, coderAPI.AccessControlStore, logger, autobuildTicker.C, options.NotificationsEnqueuer) + ctx, options.Database, options.Pubsub, options.PrometheusRegistry, coderAPI.TemplateScheduleStore, &coderAPI.Auditor, coderAPI.AccessControlStore, logger, autobuildTicker.C, options.NotificationsEnqueuer, coderAPI.Experiments) autobuildExecutor.Run() jobReaperTicker := time.NewTicker(vals.JobReaperDetectorInterval.Value()) diff --git a/coderd/apidoc/docs.go b/coderd/apidoc/docs.go index f59fcd308c655..95e2cc0f48ac8 100644 --- a/coderd/apidoc/docs.go +++ b/coderd/apidoc/docs.go @@ -11998,6 +11998,10 @@ const docTemplate = `{ "dry_run": { "type": "boolean" }, + "enable_dynamic_parameters": { + "description": "EnableDynamicParameters skips some of the static parameter checking.\nIt will default to whatever the template has marked as the default experience.\nRequires the \"dynamic-experiment\" to be used.", + "type": "boolean" + }, "log_level": { "description": "Log level changes the default logging verbosity of a provider (\"info\" if empty).", "enum": [ diff --git a/coderd/apidoc/swagger.json b/coderd/apidoc/swagger.json index 25f3c2166755d..02212d9944415 100644 --- a/coderd/apidoc/swagger.json +++ b/coderd/apidoc/swagger.json @@ -10716,6 +10716,10 @@ "dry_run": { "type": "boolean" }, + "enable_dynamic_parameters": { + "description": "EnableDynamicParameters skips some of the static parameter checking.\nIt will default to whatever the template has marked as the default experience.\nRequires the \"dynamic-experiment\" to be used.", + "type": "boolean" + }, "log_level": { "description": "Log level changes the default logging verbosity of a provider (\"info\" if empty).", "enum": ["debug"], diff --git a/coderd/autobuild/lifecycle_executor.go b/coderd/autobuild/lifecycle_executor.go index eedcc812bb19c..b0cba60111335 100644 --- a/coderd/autobuild/lifecycle_executor.go +++ b/coderd/autobuild/lifecycle_executor.go @@ -27,6 +27,7 @@ import ( "github.com/coder/coder/v2/coderd/notifications" "github.com/coder/coder/v2/coderd/schedule" "github.com/coder/coder/v2/coderd/wsbuilder" + "github.com/coder/coder/v2/codersdk" ) // Executor automatically starts or stops workspaces. @@ -43,6 +44,7 @@ type Executor struct { // NotificationsEnqueuer handles enqueueing notifications for delivery by SMTP, webhook, etc. notificationsEnqueuer notifications.Enqueuer reg prometheus.Registerer + experiments codersdk.Experiments metrics executorMetrics } @@ -59,7 +61,7 @@ type Stats struct { } // New returns a new wsactions executor. -func NewExecutor(ctx context.Context, db database.Store, ps pubsub.Pubsub, reg prometheus.Registerer, tss *atomic.Pointer[schedule.TemplateScheduleStore], auditor *atomic.Pointer[audit.Auditor], acs *atomic.Pointer[dbauthz.AccessControlStore], log slog.Logger, tick <-chan time.Time, enqueuer notifications.Enqueuer) *Executor { +func NewExecutor(ctx context.Context, db database.Store, ps pubsub.Pubsub, reg prometheus.Registerer, tss *atomic.Pointer[schedule.TemplateScheduleStore], auditor *atomic.Pointer[audit.Auditor], acs *atomic.Pointer[dbauthz.AccessControlStore], log slog.Logger, tick <-chan time.Time, enqueuer notifications.Enqueuer, exp codersdk.Experiments) *Executor { factory := promauto.With(reg) le := &Executor{ //nolint:gocritic // Autostart has a limited set of permissions. @@ -73,6 +75,7 @@ func NewExecutor(ctx context.Context, db database.Store, ps pubsub.Pubsub, reg p accessControlStore: acs, notificationsEnqueuer: enqueuer, reg: reg, + experiments: exp, metrics: executorMetrics{ autobuildExecutionDuration: factory.NewHistogram(prometheus.HistogramOpts{ Namespace: "coderd", @@ -258,6 +261,7 @@ func (e *Executor) runOnce(t time.Time) Stats { builder := wsbuilder.New(ws, nextTransition). SetLastWorkspaceBuildInTx(&latestBuild). SetLastWorkspaceBuildJobInTx(&latestJob). + Experiments(e.experiments). Reason(reason) log.Debug(e.ctx, "auto building workspace", slog.F("transition", nextTransition)) if nextTransition == database.WorkspaceTransitionStart && diff --git a/coderd/coderdtest/coderdtest.go b/coderd/coderdtest/coderdtest.go index 90a29e0f0d876..a8f444c8f632e 100644 --- a/coderd/coderdtest/coderdtest.go +++ b/coderd/coderdtest/coderdtest.go @@ -354,6 +354,7 @@ func NewOptions(t testing.TB, options *Options) (func(http.Handler), context.Can auditor.Store(&options.Auditor) ctx, cancelFunc := context.WithCancel(context.Background()) + experiments := coderd.ReadExperiments(*options.Logger, options.DeploymentValues.Experiments) lifecycleExecutor := autobuild.NewExecutor( ctx, options.Database, @@ -365,6 +366,7 @@ func NewOptions(t testing.TB, options *Options) (func(http.Handler), context.Can *options.Logger, options.AutobuildTicker, options.NotificationsEnqueuer, + experiments, ).WithStatsChannel(options.AutobuildStats) lifecycleExecutor.Run() diff --git a/coderd/parameters.go b/coderd/parameters.go index c3fc4ffdeeede..13b1346991c90 100644 --- a/coderd/parameters.go +++ b/coderd/parameters.go @@ -12,13 +12,13 @@ import ( "golang.org/x/sync/errgroup" "golang.org/x/xerrors" - "github.com/coder/coder/v2/apiversion" "github.com/coder/coder/v2/coderd/database" "github.com/coder/coder/v2/coderd/database/dbauthz" "github.com/coder/coder/v2/coderd/files" "github.com/coder/coder/v2/coderd/httpapi" "github.com/coder/coder/v2/coderd/httpmw" "github.com/coder/coder/v2/coderd/util/ptr" + "github.com/coder/coder/v2/coderd/wsbuilder" "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/codersdk/wsjson" sdkproto "github.com/coder/coder/v2/provisionersdk/proto" @@ -69,13 +69,10 @@ func (api *API) templateVersionDynamicParameters(rw http.ResponseWriter, r *http return } - major, minor, err := apiversion.Parse(tf.ProvisionerdVersion) - // If the api version is not valid or less than 1.5, we need to use the static parameters - useStaticParams := err != nil || major < 1 || (major == 1 && minor < 6) - if useStaticParams { - api.handleStaticParameters(rw, r, templateVersion.ID) - } else { + if wsbuilder.ProvisionerVersionSupportsDynamicParameters(tf.ProvisionerdVersion) { api.handleDynamicParameters(rw, r, tf, templateVersion) + } else { + api.handleStaticParameters(rw, r, templateVersion.ID) } } diff --git a/coderd/workspacebuilds.go b/coderd/workspacebuilds.go index 719d4e2a48123..08b90b834ccca 100644 --- a/coderd/workspacebuilds.go +++ b/coderd/workspacebuilds.go @@ -338,6 +338,7 @@ func (api *API) postWorkspaceBuilds(rw http.ResponseWriter, r *http.Request) { RichParameterValues(createBuild.RichParameterValues). LogLevel(string(createBuild.LogLevel)). DeploymentValues(api.Options.DeploymentValues). + Experiments(api.Experiments). TemplateVersionPresetID(createBuild.TemplateVersionPresetID) var ( @@ -383,6 +384,22 @@ func (api *API) postWorkspaceBuilds(rw http.ResponseWriter, r *http.Request) { builder = builder.State(createBuild.ProvisionerState) } + // Only defer to dynamic parameters if the experiment is enabled. + if api.Experiments.Enabled(codersdk.ExperimentDynamicParameters) { + if createBuild.EnableDynamicParameters != nil { + // Explicit opt-in + builder = builder.DynamicParameters(*createBuild.EnableDynamicParameters) + } + } else { + if createBuild.EnableDynamicParameters != nil { + api.Logger.Warn(ctx, "ignoring dynamic parameter field sent by request, the experiment is not enabled", + slog.F("field", *createBuild.EnableDynamicParameters), + slog.F("user", apiKey.UserID.String()), + slog.F("transition", string(createBuild.Transition)), + ) + } + } + workspaceBuild, provisionerJob, provisionerDaemons, err = builder.Build( ctx, tx, diff --git a/coderd/workspaces.go b/coderd/workspaces.go index 35960d1f95a12..fe0c2d3f609a2 100644 --- a/coderd/workspaces.go +++ b/coderd/workspaces.go @@ -704,6 +704,8 @@ func createWorkspace( Reason(database.BuildReasonInitiator). Initiator(initiatorID). ActiveVersion(). + Experiments(api.Experiments). + DeploymentValues(api.DeploymentValues). RichParameterValues(req.RichParameterValues) if req.TemplateVersionID != uuid.Nil { builder = builder.VersionID(req.TemplateVersionID) @@ -716,7 +718,7 @@ func createWorkspace( } if req.EnableDynamicParameters && api.Experiments.Enabled(codersdk.ExperimentDynamicParameters) { - builder = builder.UsingDynamicParameters() + builder = builder.DynamicParameters(req.EnableDynamicParameters) } workspaceBuild, provisionerJob, provisionerDaemons, err = builder.Build( diff --git a/coderd/wsbuilder/wsbuilder.go b/coderd/wsbuilder/wsbuilder.go index 64389b7532066..46035f28dda77 100644 --- a/coderd/wsbuilder/wsbuilder.go +++ b/coderd/wsbuilder/wsbuilder.go @@ -13,7 +13,9 @@ import ( "github.com/hashicorp/hcl/v2" "github.com/hashicorp/hcl/v2/hclsyntax" + "github.com/coder/coder/v2/apiversion" "github.com/coder/coder/v2/coderd/rbac/policy" + "github.com/coder/coder/v2/coderd/util/ptr" "github.com/coder/coder/v2/provisioner/terraform/tfparse" "github.com/coder/coder/v2/provisionersdk" sdkproto "github.com/coder/coder/v2/provisionersdk/proto" @@ -51,9 +53,11 @@ type Builder struct { state stateTarget logLevel string deploymentValues *codersdk.DeploymentValues + experiments codersdk.Experiments - richParameterValues []codersdk.WorkspaceBuildParameter - dynamicParametersEnabled bool + richParameterValues []codersdk.WorkspaceBuildParameter + // dynamicParametersEnabled is non-nil if set externally + dynamicParametersEnabled *bool initiator uuid.UUID reason database.BuildReason templateVersionPresetID uuid.UUID @@ -66,6 +70,7 @@ type Builder struct { template *database.Template templateVersion *database.TemplateVersion templateVersionJob *database.ProvisionerJob + terraformValues *database.TemplateVersionTerraformValue templateVersionParameters *[]database.TemplateVersionParameter templateVersionVariables *[]database.TemplateVersionVariable templateVersionWorkspaceTags *[]database.TemplateVersionWorkspaceTag @@ -155,6 +160,14 @@ func (b Builder) DeploymentValues(dv *codersdk.DeploymentValues) Builder { return b } +func (b Builder) Experiments(exp codersdk.Experiments) Builder { + // nolint: revive + cpy := make(codersdk.Experiments, len(exp)) + copy(cpy, exp) + b.experiments = cpy + return b +} + func (b Builder) Initiator(u uuid.UUID) Builder { // nolint: revive b.initiator = u @@ -187,8 +200,9 @@ func (b Builder) MarkPrebuiltWorkspaceClaim() Builder { return b } -func (b Builder) UsingDynamicParameters() Builder { - b.dynamicParametersEnabled = true +func (b Builder) DynamicParameters(using bool) Builder { + // nolint: revive + b.dynamicParametersEnabled = ptr.Ref(using) return b } @@ -516,6 +530,22 @@ func (b *Builder) getTemplateVersionID() (uuid.UUID, error) { return bld.TemplateVersionID, nil } +func (b *Builder) getTemplateTerraformValues() (*database.TemplateVersionTerraformValue, error) { + if b.terraformValues != nil { + return b.terraformValues, nil + } + v, err := b.getTemplateVersion() + if err != nil { + return nil, xerrors.Errorf("get template version so we can get terraform values: %w", err) + } + vals, err := b.store.GetTemplateVersionTerraformValues(b.ctx, v.ID) + if err != nil { + return nil, xerrors.Errorf("get template version terraform values %s: %w", v.JobID, err) + } + b.terraformValues = &vals + return b.terraformValues, err +} + func (b *Builder) getLastBuild() (*database.WorkspaceBuild, error) { if b.lastBuild != nil { return b.lastBuild, nil @@ -593,9 +623,10 @@ func (b *Builder) getParameters() (names, values []string, err error) { return nil, nil, BuildError{http.StatusBadRequest, "Unable to build workspace with unsupported parameters", err} } - if b.dynamicParametersEnabled { - // Dynamic parameters skip all parameter validation. - // Pass the user's input as is. + // Dynamic parameters skip all parameter validation. + // Deleting a workspace also should skip parameter validation. + // Pass the user's input as is. + if b.usingDynamicParameters() { // TODO: The previous behavior was only to pass param values // for parameters that exist. Since dynamic params can have // conditional parameter existence, the static frame of reference @@ -989,3 +1020,36 @@ func (b *Builder) checkRunningBuild() error { } return nil } + +func (b *Builder) usingDynamicParameters() bool { + if !b.experiments.Enabled(codersdk.ExperimentDynamicParameters) { + // Experiment required + return false + } + + vals, err := b.getTemplateTerraformValues() + if err != nil { + return false + } + + if !ProvisionerVersionSupportsDynamicParameters(vals.ProvisionerdVersion) { + return false + } + + if b.dynamicParametersEnabled != nil { + return *b.dynamicParametersEnabled + } + + tpl, err := b.getTemplate() + if err != nil { + return false // Let another part of the code get this error + } + return !tpl.UseClassicParameterFlow +} + +func ProvisionerVersionSupportsDynamicParameters(version string) bool { + major, minor, err := apiversion.Parse(version) + // If the api version is not valid or less than 1.6, we need to use the static parameters + useStaticParams := err != nil || major < 1 || (major == 1 && minor < 6) + return !useStaticParams +} diff --git a/coderd/wsbuilder/wsbuilder_test.go b/coderd/wsbuilder/wsbuilder_test.go index 00b7b5f0ae08b..abe5e3fe9b8b7 100644 --- a/coderd/wsbuilder/wsbuilder_test.go +++ b/coderd/wsbuilder/wsbuilder_test.go @@ -839,6 +839,32 @@ func TestWorkspaceBuildWithPreset(t *testing.T) { req.NoError(err) } +func TestProvisionerVersionSupportsDynamicParameters(t *testing.T) { + t.Parallel() + + for v, dyn := range map[string]bool{ + "": false, + "na": false, + "0.0": false, + "0.10": false, + "1.4": false, + "1.5": false, + "1.6": true, + "1.7": true, + "1.8": true, + "2.0": true, + "2.17": true, + "4.0": true, + } { + t.Run(v, func(t *testing.T) { + t.Parallel() + + does := wsbuilder.ProvisionerVersionSupportsDynamicParameters(v) + require.Equal(t, dyn, does) + }) + } +} + type txExpect func(mTx *dbmock.MockStore) func expectDB(t *testing.T, opts ...txExpect) *dbmock.MockStore { diff --git a/codersdk/workspaces.go b/codersdk/workspaces.go index b39b220ca33b8..e0f1b9b1e2c2a 100644 --- a/codersdk/workspaces.go +++ b/codersdk/workspaces.go @@ -110,6 +110,10 @@ type CreateWorkspaceBuildRequest struct { LogLevel ProvisionerLogLevel `json:"log_level,omitempty" validate:"omitempty,oneof=debug"` // TemplateVersionPresetID is the ID of the template version preset to use for the build. TemplateVersionPresetID uuid.UUID `json:"template_version_preset_id,omitempty" format:"uuid"` + // EnableDynamicParameters skips some of the static parameter checking. + // It will default to whatever the template has marked as the default experience. + // Requires the "dynamic-experiment" to be used. + EnableDynamicParameters *bool `json:"enable_dynamic_parameters,omitempty"` } type WorkspaceOptions struct { diff --git a/docs/reference/api/builds.md b/docs/reference/api/builds.md index 00417c700cdfd..3cfd25f2a6e0f 100644 --- a/docs/reference/api/builds.md +++ b/docs/reference/api/builds.md @@ -1731,6 +1731,7 @@ curl -X POST http://coder-server:8080/api/v2/workspaces/{workspace}/builds \ ```json { "dry_run": true, + "enable_dynamic_parameters": true, "log_level": "debug", "orphan": true, "rich_parameter_values": [ diff --git a/docs/reference/api/schemas.md b/docs/reference/api/schemas.md index b35c35361cb1f..9325d751bc352 100644 --- a/docs/reference/api/schemas.md +++ b/docs/reference/api/schemas.md @@ -1917,6 +1917,7 @@ This is required on creation to enable a user-flow of validating a template work ```json { "dry_run": true, + "enable_dynamic_parameters": true, "log_level": "debug", "orphan": true, "rich_parameter_values": [ @@ -1939,6 +1940,7 @@ This is required on creation to enable a user-flow of validating a template work | Name | Type | Required | Restrictions | Description | |------------------------------|-------------------------------------------------------------------------------|----------|--------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | `dry_run` | boolean | false | | | +| `enable_dynamic_parameters` | boolean | false | | Enable dynamic parameters skips some of the static parameter checking. It will default to whatever the template has marked as the default experience. Requires the "dynamic-experiment" to be used. | | `log_level` | [codersdk.ProvisionerLogLevel](#codersdkprovisionerloglevel) | false | | Log level changes the default logging verbosity of a provider ("info" if empty). | | `orphan` | boolean | false | | Orphan may be set for the Destroy transition. | | `rich_parameter_values` | array of [codersdk.WorkspaceBuildParameter](#codersdkworkspacebuildparameter) | false | | Rich parameter values are optional. It will write params to the 'workspace' scope. This will overwrite any existing parameters with the same name. This will not delete old params not included in this list. | diff --git a/enterprise/coderd/workspaces_test.go b/enterprise/coderd/workspaces_test.go index 7005c93ca36f5..226232f37bf7f 100644 --- a/enterprise/coderd/workspaces_test.go +++ b/enterprise/coderd/workspaces_test.go @@ -1659,6 +1659,119 @@ func TestTemplateDoesNotAllowUserAutostop(t *testing.T) { }) } +// TestWorkspaceTemplateParamsChange tests a workspace with a parameter that +// validation changes on apply. The params used in create workspace are invalid +// according to the static params on import. +// +// This is testing that dynamic params defers input validation to terraform. +// It does not try to do this in coder/coder. +func TestWorkspaceTemplateParamsChange(t *testing.T) { + mainTfTemplate := ` + terraform { + required_providers { + coder = { + source = "coder/coder" + } + } + } + provider "coder" {} + data "coder_workspace" "me" {} + data "coder_workspace_owner" "me" {} + + data "coder_parameter" "param_min" { + name = "param_min" + type = "number" + default = 10 + } + + data "coder_parameter" "param" { + name = "param" + type = "number" + default = 12 + validation { + min = data.coder_parameter.param_min.value + } + } + ` + tfCliConfigPath := downloadProviders(t, mainTfTemplate) + t.Setenv("TF_CLI_CONFIG_FILE", tfCliConfigPath) + + logger := slogtest.Make(t, &slogtest.Options{IgnoreErrors: false}) + dv := coderdtest.DeploymentValues(t) + dv.Experiments = []string{string(codersdk.ExperimentDynamicParameters)} + client, owner := coderdenttest.New(t, &coderdenttest.Options{ + Options: &coderdtest.Options{ + Logger: &logger, + // We intentionally do not run a built-in provisioner daemon here. + IncludeProvisionerDaemon: false, + DeploymentValues: dv, + }, + LicenseOptions: &coderdenttest.LicenseOptions{ + Features: license.Features{ + codersdk.FeatureExternalProvisionerDaemons: 1, + }, + }, + }) + templateAdmin, _ := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID, rbac.RoleTemplateAdmin()) + member, memberUser := coderdtest.CreateAnotherUser(t, client, owner.OrganizationID) + + _ = coderdenttest.NewExternalProvisionerDaemonTerraform(t, client, owner.OrganizationID, nil) + + // This can take a while, so set a relatively long timeout. + ctx := testutil.Context(t, 2*testutil.WaitSuperLong) + + // Creating a template as a template admin must succeed + templateFiles := map[string]string{"main.tf": mainTfTemplate} + tarBytes := testutil.CreateTar(t, templateFiles) + fi, err := templateAdmin.Upload(ctx, "application/x-tar", bytes.NewReader(tarBytes)) + require.NoError(t, err, "failed to upload file") + + tv, err := templateAdmin.CreateTemplateVersion(ctx, owner.OrganizationID, codersdk.CreateTemplateVersionRequest{ + Name: testutil.GetRandomName(t), + FileID: fi.ID, + StorageMethod: codersdk.ProvisionerStorageMethodFile, + Provisioner: codersdk.ProvisionerTypeTerraform, + UserVariableValues: []codersdk.VariableValue{}, + }) + require.NoError(t, err, "failed to create template version") + coderdtest.AwaitTemplateVersionJobCompleted(t, templateAdmin, tv.ID) + tpl := coderdtest.CreateTemplate(t, templateAdmin, owner.OrganizationID, tv.ID) + require.False(t, tpl.UseClassicParameterFlow, "template to use dynamic parameters") + + // When: we create a workspace build using the above template but with + // parameter values that are different from those defined in the template. + // The new values are not valid according to the original plan, but are valid. + ws, err := member.CreateUserWorkspace(ctx, memberUser.Username, codersdk.CreateWorkspaceRequest{ + TemplateID: tpl.ID, + Name: coderdtest.RandomUsername(t), + RichParameterValues: []codersdk.WorkspaceBuildParameter{ + { + Name: "param_min", + Value: "5", + }, + { + Name: "param", + Value: "7", + }, + }, + EnableDynamicParameters: true, + }) + + // Then: the build should succeed. The updated value of param_min should be + // used to validate param instead of the value defined in the temp + require.NoError(t, err, "failed to create workspace") + createBuild := coderdtest.AwaitWorkspaceBuildJobCompleted(t, member, ws.LatestBuild.ID) + require.Equal(t, createBuild.Status, codersdk.WorkspaceStatusRunning) + + // Now delete the workspace + build, err := member.CreateWorkspaceBuild(ctx, ws.ID, codersdk.CreateWorkspaceBuildRequest{ + Transition: codersdk.WorkspaceTransitionDelete, + }) + require.NoError(t, err) + build = coderdtest.AwaitWorkspaceBuildJobCompleted(t, member, build.ID) + require.Equal(t, codersdk.WorkspaceStatusDeleted, build.Status) +} + // TestWorkspaceTagsTerraform tests that a workspace can be created with tags. // This is an end-to-end-style test, meaning that we actually run the // real Terraform provisioner and validate that the workspace is created diff --git a/site/src/api/typesGenerated.ts b/site/src/api/typesGenerated.ts index 9a73fc9f3d6bf..d367302186870 100644 --- a/site/src/api/typesGenerated.ts +++ b/site/src/api/typesGenerated.ts @@ -490,6 +490,7 @@ export interface CreateWorkspaceBuildRequest { readonly rich_parameter_values?: readonly WorkspaceBuildParameter[]; readonly log_level?: ProvisionerLogLevel; readonly template_version_preset_id?: string; + readonly enable_dynamic_parameters?: boolean; } // From codersdk/workspaceproxy.go From a123900fe86ded9ddf3ac8f9dda3d8355945544a Mon Sep 17 00:00:00 2001 From: Steven Masley Date: Tue, 20 May 2025 10:45:12 -0500 Subject: [PATCH 32/42] chore: remove coder/preview dependency from codersdk (#17939) --- cli/parameterresolver.go | 2 +- coderd/database/db2sdk/db2sdk.go | 82 ++++++++++++ coderd/parameters.go | 9 +- coderd/parameters_test.go | 16 +-- codersdk/parameters.go | 118 ++++++++++++++++-- codersdk/templateversions.go | 16 --- enterprise/coderd/parameters_test.go | 12 +- go.mod | 8 +- go.sum | 16 +-- site/src/api/typesGenerated.ts | 96 +++++++++----- .../CreateWorkspacePageViewExperimental.tsx | 4 +- 11 files changed, 292 insertions(+), 87 deletions(-) diff --git a/cli/parameterresolver.go b/cli/parameterresolver.go index 41c61d5315a77..40625331fa6aa 100644 --- a/cli/parameterresolver.go +++ b/cli/parameterresolver.go @@ -226,7 +226,7 @@ func (pr *ParameterResolver) resolveWithInput(resolved []codersdk.WorkspaceBuild if p != nil { continue } - // Parameter has not been resolved yet, so CLI needs to determine if user should input it. + // PreviewParameter has not been resolved yet, so CLI needs to determine if user should input it. firstTimeUse := pr.isFirstTimeUse(tvp.Name) promptParameterOption := pr.isLastBuildParameterInvalidOption(tvp) diff --git a/coderd/database/db2sdk/db2sdk.go b/coderd/database/db2sdk/db2sdk.go index 18d1d8a6ac788..ed258a07820ab 100644 --- a/coderd/database/db2sdk/db2sdk.go +++ b/coderd/database/db2sdk/db2sdk.go @@ -12,6 +12,7 @@ import ( "time" "github.com/google/uuid" + "github.com/hashicorp/hcl/v2" "golang.org/x/xerrors" "tailscale.com/tailcfg" @@ -24,6 +25,7 @@ import ( "github.com/coder/coder/v2/codersdk" "github.com/coder/coder/v2/provisionersdk/proto" "github.com/coder/coder/v2/tailnet" + previewtypes "github.com/coder/preview/types" ) // List is a helper function to reduce boilerplate when converting slices of @@ -764,3 +766,83 @@ func Chat(chat database.Chat) codersdk.Chat { func Chats(chats []database.Chat) []codersdk.Chat { return List(chats, Chat) } + +func PreviewParameter(param previewtypes.Parameter) codersdk.PreviewParameter { + return codersdk.PreviewParameter{ + PreviewParameterData: codersdk.PreviewParameterData{ + Name: param.Name, + DisplayName: param.DisplayName, + Description: param.Description, + Type: codersdk.OptionType(param.Type), + FormType: codersdk.ParameterFormType(param.FormType), + Styling: codersdk.PreviewParameterStyling{ + Placeholder: param.Styling.Placeholder, + Disabled: param.Styling.Disabled, + Label: param.Styling.Label, + }, + Mutable: param.Mutable, + DefaultValue: PreviewHCLString(param.DefaultValue), + Icon: param.Icon, + Options: List(param.Options, PreviewParameterOption), + Validations: List(param.Validations, PreviewParameterValidation), + Required: param.Required, + Order: param.Order, + Ephemeral: param.Ephemeral, + }, + Value: PreviewHCLString(param.Value), + Diagnostics: PreviewDiagnostics(param.Diagnostics), + } +} + +func HCLDiagnostics(d hcl.Diagnostics) []codersdk.FriendlyDiagnostic { + return PreviewDiagnostics(previewtypes.Diagnostics(d)) +} + +func PreviewDiagnostics(d previewtypes.Diagnostics) []codersdk.FriendlyDiagnostic { + f := d.FriendlyDiagnostics() + return List(f, func(f previewtypes.FriendlyDiagnostic) codersdk.FriendlyDiagnostic { + return codersdk.FriendlyDiagnostic{ + Severity: codersdk.DiagnosticSeverityString(f.Severity), + Summary: f.Summary, + Detail: f.Detail, + Extra: codersdk.DiagnosticExtra{ + Code: f.Extra.Code, + }, + } + }) +} + +func PreviewHCLString(h previewtypes.HCLString) codersdk.NullHCLString { + n := h.NullHCLString() + return codersdk.NullHCLString{ + Value: n.Value, + Valid: n.Valid, + } +} + +func PreviewParameterOption(o *previewtypes.ParameterOption) codersdk.PreviewParameterOption { + if o == nil { + // This should never be sent + return codersdk.PreviewParameterOption{} + } + return codersdk.PreviewParameterOption{ + Name: o.Name, + Description: o.Description, + Value: PreviewHCLString(o.Value), + Icon: o.Icon, + } +} + +func PreviewParameterValidation(v *previewtypes.ParameterValidation) codersdk.PreviewParameterValidation { + if v == nil { + // This should never be sent + return codersdk.PreviewParameterValidation{} + } + return codersdk.PreviewParameterValidation{ + Error: v.Error, + Regex: v.Regex, + Min: v.Min, + Max: v.Max, + Monotonic: v.Monotonic, + } +} diff --git a/coderd/parameters.go b/coderd/parameters.go index 13b1346991c90..1a0c1f92ddbf9 100644 --- a/coderd/parameters.go +++ b/coderd/parameters.go @@ -13,6 +13,7 @@ import ( "golang.org/x/xerrors" "github.com/coder/coder/v2/coderd/database" + "github.com/coder/coder/v2/coderd/database/db2sdk" "github.com/coder/coder/v2/coderd/database/dbauthz" "github.com/coder/coder/v2/coderd/files" "github.com/coder/coder/v2/coderd/httpapi" @@ -286,10 +287,10 @@ func (api *API) handleParameterWebsocket(rw http.ResponseWriter, r *http.Request result, diagnostics := render(ctx, map[string]string{}) response := codersdk.DynamicParametersResponse{ ID: -1, // Always start with -1. - Diagnostics: previewtypes.Diagnostics(diagnostics), + Diagnostics: db2sdk.HCLDiagnostics(diagnostics), } if result != nil { - response.Parameters = result.Parameters + response.Parameters = db2sdk.List(result.Parameters, db2sdk.PreviewParameter) } err = stream.Send(response) if err != nil { @@ -314,10 +315,10 @@ func (api *API) handleParameterWebsocket(rw http.ResponseWriter, r *http.Request result, diagnostics := render(ctx, update.Inputs) response := codersdk.DynamicParametersResponse{ ID: update.ID, - Diagnostics: previewtypes.Diagnostics(diagnostics), + Diagnostics: db2sdk.HCLDiagnostics(diagnostics), } if result != nil { - response.Parameters = result.Parameters + response.Parameters = db2sdk.List(result.Parameters, db2sdk.PreviewParameter) } err = stream.Send(response) if err != nil { diff --git a/coderd/parameters_test.go b/coderd/parameters_test.go index e7fc77f141efc..8edadc9b7e797 100644 --- a/coderd/parameters_test.go +++ b/coderd/parameters_test.go @@ -68,8 +68,8 @@ func TestDynamicParametersOwnerSSHPublicKey(t *testing.T) { require.Equal(t, -1, preview.ID) require.Empty(t, preview.Diagnostics) require.Equal(t, "public_key", preview.Parameters[0].Name) - require.True(t, preview.Parameters[0].Value.Valid()) - require.Equal(t, sshKey.PublicKey, preview.Parameters[0].Value.Value.AsString()) + require.True(t, preview.Parameters[0].Value.Valid) + require.Equal(t, sshKey.PublicKey, preview.Parameters[0].Value.Value) } func TestDynamicParametersWithTerraformValues(t *testing.T) { @@ -103,8 +103,8 @@ func TestDynamicParametersWithTerraformValues(t *testing.T) { require.Len(t, preview.Parameters, 1) require.Equal(t, "jetbrains_ide", preview.Parameters[0].Name) - require.True(t, preview.Parameters[0].Value.Valid()) - require.Equal(t, "CL", preview.Parameters[0].Value.AsString()) + require.True(t, preview.Parameters[0].Value.Valid) + require.Equal(t, "CL", preview.Parameters[0].Value.Value) }) // OldProvisioners use the static parameters in the dynamic param flow @@ -154,8 +154,8 @@ func TestDynamicParametersWithTerraformValues(t *testing.T) { require.Contains(t, preview.Diagnostics[0].Summary, "required metadata to support dynamic parameters") require.Len(t, preview.Parameters, 1) require.Equal(t, "jetbrains_ide", preview.Parameters[0].Name) - require.True(t, preview.Parameters[0].Value.Valid()) - require.Equal(t, defaultValue, preview.Parameters[0].Value.AsString()) + require.True(t, preview.Parameters[0].Value.Valid) + require.Equal(t, defaultValue, preview.Parameters[0].Value.Value) // Test some inputs for _, exp := range []string{defaultValue, "GO", "Invalid", defaultValue} { @@ -182,8 +182,8 @@ func TestDynamicParametersWithTerraformValues(t *testing.T) { require.Len(t, preview.Parameters[0].Diagnostics, 0) } require.Equal(t, "jetbrains_ide", preview.Parameters[0].Name) - require.True(t, preview.Parameters[0].Value.Valid()) - require.Equal(t, exp, preview.Parameters[0].Value.AsString()) + require.True(t, preview.Parameters[0].Value.Valid) + require.Equal(t, exp, preview.Parameters[0].Value.Value) } }) diff --git a/codersdk/parameters.go b/codersdk/parameters.go index 881aaf99f573c..d81dc7cf55ca0 100644 --- a/codersdk/parameters.go +++ b/codersdk/parameters.go @@ -7,17 +7,121 @@ import ( "github.com/google/uuid" "github.com/coder/coder/v2/codersdk/wsjson" - previewtypes "github.com/coder/preview/types" "github.com/coder/websocket" ) -// FriendlyDiagnostic is included to guarantee it is generated in the output -// types. This is used as the type override for `previewtypes.Diagnostic`. -type FriendlyDiagnostic = previewtypes.FriendlyDiagnostic +type ParameterFormType string -// NullHCLString is included to guarantee it is generated in the output -// types. This is used as the type override for `previewtypes.HCLString`. -type NullHCLString = previewtypes.NullHCLString +const ( + ParameterFormTypeDefault ParameterFormType = "" + ParameterFormTypeRadio ParameterFormType = "radio" + ParameterFormTypeSlider ParameterFormType = "slider" + ParameterFormTypeInput ParameterFormType = "input" + ParameterFormTypeDropdown ParameterFormType = "dropdown" + ParameterFormTypeCheckbox ParameterFormType = "checkbox" + ParameterFormTypeSwitch ParameterFormType = "switch" + ParameterFormTypeMultiSelect ParameterFormType = "multi-select" + ParameterFormTypeTagSelect ParameterFormType = "tag-select" + ParameterFormTypeTextArea ParameterFormType = "textarea" + ParameterFormTypeError ParameterFormType = "error" +) + +type OptionType string + +const ( + OptionTypeString OptionType = "string" + OptionTypeNumber OptionType = "number" + OptionTypeBoolean OptionType = "bool" + OptionTypeListString OptionType = "list(string)" +) + +type DiagnosticSeverityString string + +const ( + DiagnosticSeverityError DiagnosticSeverityString = "error" + DiagnosticSeverityWarning DiagnosticSeverityString = "warning" +) + +// FriendlyDiagnostic == previewtypes.FriendlyDiagnostic +// Copied to avoid import deps +type FriendlyDiagnostic struct { + Severity DiagnosticSeverityString `json:"severity"` + Summary string `json:"summary"` + Detail string `json:"detail"` + + Extra DiagnosticExtra `json:"extra"` +} + +type DiagnosticExtra struct { + Code string `json:"code"` +} + +// NullHCLString == `previewtypes.NullHCLString`. +type NullHCLString struct { + Value string `json:"value"` + Valid bool `json:"valid"` +} + +type PreviewParameter struct { + PreviewParameterData + Value NullHCLString `json:"value"` + Diagnostics []FriendlyDiagnostic `json:"diagnostics"` +} + +type PreviewParameterData struct { + Name string `json:"name"` + DisplayName string `json:"display_name"` + Description string `json:"description"` + Type OptionType `json:"type"` + FormType ParameterFormType `json:"form_type"` + Styling PreviewParameterStyling `json:"styling"` + Mutable bool `json:"mutable"` + DefaultValue NullHCLString `json:"default_value"` + Icon string `json:"icon"` + Options []PreviewParameterOption `json:"options"` + Validations []PreviewParameterValidation `json:"validations"` + Required bool `json:"required"` + // legacy_variable_name was removed (= 14) + Order int64 `json:"order"` + Ephemeral bool `json:"ephemeral"` +} + +type PreviewParameterStyling struct { + Placeholder *string `json:"placeholder,omitempty"` + Disabled *bool `json:"disabled,omitempty"` + Label *string `json:"label,omitempty"` +} + +type PreviewParameterOption struct { + Name string `json:"name"` + Description string `json:"description"` + Value NullHCLString `json:"value"` + Icon string `json:"icon"` +} + +type PreviewParameterValidation struct { + Error string `json:"validation_error"` + + // All validation attributes are optional. + Regex *string `json:"validation_regex"` + Min *int64 `json:"validation_min"` + Max *int64 `json:"validation_max"` + Monotonic *string `json:"validation_monotonic"` +} + +type DynamicParametersRequest struct { + // ID identifies the request. The response contains the same + // ID so that the client can match it to the request. + ID int `json:"id"` + Inputs map[string]string `json:"inputs"` +} + +type DynamicParametersResponse struct { + ID int `json:"id"` + Diagnostics []FriendlyDiagnostic `json:"diagnostics"` + Parameters []PreviewParameter `json:"parameters"` + // TODO: Workspace tags +} func (c *Client) TemplateVersionDynamicParameters(ctx context.Context, userID, version uuid.UUID) (*wsjson.Stream[DynamicParametersResponse, DynamicParametersRequest], error) { conn, err := c.Dial(ctx, fmt.Sprintf("/api/v2/users/%s/templateversions/%s/parameters", userID, version), nil) diff --git a/codersdk/templateversions.go b/codersdk/templateversions.go index 42b381fadebce..de8bb7b970957 100644 --- a/codersdk/templateversions.go +++ b/codersdk/templateversions.go @@ -9,8 +9,6 @@ import ( "time" "github.com/google/uuid" - - previewtypes "github.com/coder/preview/types" ) type TemplateVersionWarning string @@ -125,20 +123,6 @@ func (c *Client) CancelTemplateVersion(ctx context.Context, version uuid.UUID) e return nil } -type DynamicParametersRequest struct { - // ID identifies the request. The response contains the same - // ID so that the client can match it to the request. - ID int `json:"id"` - Inputs map[string]string `json:"inputs"` -} - -type DynamicParametersResponse struct { - ID int `json:"id"` - Diagnostics previewtypes.Diagnostics `json:"diagnostics"` - Parameters []previewtypes.Parameter `json:"parameters"` - // TODO: Workspace tags -} - // TemplateVersionParameters returns parameters a template version exposes. func (c *Client) TemplateVersionRichParameters(ctx context.Context, version uuid.UUID) ([]TemplateVersionParameter, error) { res, err := c.Request(ctx, http.MethodGet, fmt.Sprintf("/api/v2/templateversions/%s/rich-parameters", version), nil) diff --git a/enterprise/coderd/parameters_test.go b/enterprise/coderd/parameters_test.go index e6bc564e43da2..76bd5a1eafdbb 100644 --- a/enterprise/coderd/parameters_test.go +++ b/enterprise/coderd/parameters_test.go @@ -70,8 +70,8 @@ func TestDynamicParametersOwnerGroups(t *testing.T) { require.Equal(t, -1, preview.ID) require.Empty(t, preview.Diagnostics) require.Equal(t, "group", preview.Parameters[0].Name) - require.True(t, preview.Parameters[0].Value.Valid()) - require.Equal(t, database.EveryoneGroup, preview.Parameters[0].Value.Value.AsString()) + require.True(t, preview.Parameters[0].Value.Valid) + require.Equal(t, database.EveryoneGroup, preview.Parameters[0].Value.Value) // Send a new value, and see it reflected err = stream.Send(codersdk.DynamicParametersRequest{ @@ -83,8 +83,8 @@ func TestDynamicParametersOwnerGroups(t *testing.T) { require.Equal(t, 1, preview.ID) require.Empty(t, preview.Diagnostics) require.Equal(t, "group", preview.Parameters[0].Name) - require.True(t, preview.Parameters[0].Value.Valid()) - require.Equal(t, group.Name, preview.Parameters[0].Value.Value.AsString()) + require.True(t, preview.Parameters[0].Value.Valid) + require.Equal(t, group.Name, preview.Parameters[0].Value.Value) // Back to default err = stream.Send(codersdk.DynamicParametersRequest{ @@ -96,6 +96,6 @@ func TestDynamicParametersOwnerGroups(t *testing.T) { require.Equal(t, 3, preview.ID) require.Empty(t, preview.Diagnostics) require.Equal(t, "group", preview.Parameters[0].Name) - require.True(t, preview.Parameters[0].Value.Valid()) - require.Equal(t, database.EveryoneGroup, preview.Parameters[0].Value.Value.AsString()) + require.True(t, preview.Parameters[0].Value.Valid) + require.Equal(t, database.EveryoneGroup, preview.Parameters[0].Value.Value) } diff --git a/go.mod b/go.mod index c43feefefee4d..0c6b482b38f4e 100644 --- a/go.mod +++ b/go.mod @@ -96,12 +96,12 @@ require ( github.com/chromedp/chromedp v0.13.3 github.com/cli/safeexec v1.0.1 github.com/coder/flog v1.1.0 - github.com/coder/guts v1.3.1-0.20250428170043-ad369017e95b + github.com/coder/guts v1.5.0 github.com/coder/pretty v0.0.0-20230908205945-e89ba86370e0 github.com/coder/quartz v0.1.3 github.com/coder/retry v1.5.1 github.com/coder/serpent v0.10.0 - github.com/coder/terraform-provider-coder/v2 v2.4.1 + github.com/coder/terraform-provider-coder/v2 v2.4.2 github.com/coder/websocket v1.8.13 github.com/coder/wgtunnel v0.1.13-0.20240522110300-ade90dfb2da0 github.com/coreos/go-oidc/v3 v3.14.1 @@ -204,7 +204,7 @@ require ( golang.org/x/sys v0.33.0 golang.org/x/term v0.32.0 golang.org/x/text v0.25.0 // indirect - golang.org/x/tools v0.32.0 + golang.org/x/tools v0.33.0 golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da google.golang.org/api v0.231.0 google.golang.org/grpc v1.72.0 @@ -485,7 +485,7 @@ require ( require ( github.com/anthropics/anthropic-sdk-go v0.2.0-beta.3 - github.com/coder/preview v0.0.2-0.20250516233606-a1da43489319 + github.com/coder/preview v0.0.2-0.20250520134327-ac391431027d github.com/fsnotify/fsnotify v1.9.0 github.com/kylecarbs/aisdk-go v0.0.8 github.com/mark3labs/mcp-go v0.28.0 diff --git a/go.sum b/go.sum index 9ffd716b334de..0f5638614d275 100644 --- a/go.sum +++ b/go.sum @@ -905,14 +905,14 @@ github.com/coder/go-httpstat v0.0.0-20230801153223-321c88088322 h1:m0lPZjlQ7vdVp github.com/coder/go-httpstat v0.0.0-20230801153223-321c88088322/go.mod h1:rOLFDDVKVFiDqZFXoteXc97YXx7kFi9kYqR+2ETPkLQ= github.com/coder/go-scim/pkg/v2 v2.0.0-20230221055123-1d63c1222136 h1:0RgB61LcNs24WOxc3PBvygSNTQurm0PYPujJjLLOzs0= github.com/coder/go-scim/pkg/v2 v2.0.0-20230221055123-1d63c1222136/go.mod h1:VkD1P761nykiq75dz+4iFqIQIZka189tx1BQLOp0Skc= -github.com/coder/guts v1.3.1-0.20250428170043-ad369017e95b h1:tfLKcE2s6D7YpFk7MUUCDE0Xbbmac+k2GqO8KMjv/Ug= -github.com/coder/guts v1.3.1-0.20250428170043-ad369017e95b/go.mod h1:31NO4z6MVTOD4WaCLqE/hUAHGgNok9sRbuMc/LZFopI= +github.com/coder/guts v1.5.0 h1:a94apf7xMf5jDdg1bIHzncbRiTn3+BvBZgrFSDbUnyI= +github.com/coder/guts v1.5.0/go.mod h1:0Sbv5Kp83u1Nl7MIQiV2zmacJ3o02I341bkWkjWXSUQ= github.com/coder/pq v1.10.5-0.20240813183442-0c420cb5a048 h1:3jzYUlGH7ZELIH4XggXhnTnP05FCYiAFeQpoN+gNR5I= github.com/coder/pq v1.10.5-0.20240813183442-0c420cb5a048/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/coder/pretty v0.0.0-20230908205945-e89ba86370e0 h1:3A0ES21Ke+FxEM8CXx9n47SZOKOpgSE1bbJzlE4qPVs= github.com/coder/pretty v0.0.0-20230908205945-e89ba86370e0/go.mod h1:5UuS2Ts+nTToAMeOjNlnHFkPahrtDkmpydBen/3wgZc= -github.com/coder/preview v0.0.2-0.20250516233606-a1da43489319 h1:flPwcvOZ9RwENDYcLOnfYEClbKWfFvpQCddODdSS6Co= -github.com/coder/preview v0.0.2-0.20250516233606-a1da43489319/go.mod h1:GfkwIv5gQLpL01qeGU1/YoxoFtt5trzCqnWZLo77clU= +github.com/coder/preview v0.0.2-0.20250520134327-ac391431027d h1:MxAAuqcno5hMM45Ihl3KAjVOXbyZyt/+tjSiq9XMTC0= +github.com/coder/preview v0.0.2-0.20250520134327-ac391431027d/go.mod h1:9bwyhQSVDjcxAWuFFaG6/qBqhaiW5oqF5PEQMhevKLs= github.com/coder/quartz v0.1.3 h1:hA2nI8uUA2fNN9uhXv2I4xZD4aHkA7oH3g2t03v4xf8= github.com/coder/quartz v0.1.3/go.mod h1:vsiCc+AHViMKH2CQpGIpFgdHIEQsxwm8yCscqKmzbRA= github.com/coder/retry v1.5.1 h1:iWu8YnD8YqHs3XwqrqsjoBTAVqT9ml6z9ViJ2wlMiqc= @@ -925,8 +925,8 @@ github.com/coder/tailscale v1.1.1-0.20250422090654-5090e715905e h1:nope/SZfoLB9M github.com/coder/tailscale v1.1.1-0.20250422090654-5090e715905e/go.mod h1:1ggFFdHTRjPRu9Yc1yA7nVHBYB50w9Ce7VIXNqcW6Ko= github.com/coder/terraform-config-inspect v0.0.0-20250107175719-6d06d90c630e h1:JNLPDi2P73laR1oAclY6jWzAbucf70ASAvf5mh2cME0= github.com/coder/terraform-config-inspect v0.0.0-20250107175719-6d06d90c630e/go.mod h1:Gz/z9Hbn+4KSp8A2FBtNszfLSdT2Tn/uAKGuVqqWmDI= -github.com/coder/terraform-provider-coder/v2 v2.4.1 h1:+HxLJVENJ+kvGhibQ0jbr8Evi6M857d9691ytxNbv90= -github.com/coder/terraform-provider-coder/v2 v2.4.1/go.mod h1:2kaBpn5k9ZWtgKq5k4JbkVZG9DzEqR4mJSmpdshcO+s= +github.com/coder/terraform-provider-coder/v2 v2.4.2 h1:41SJkgwgiA555kwQzGIQcNS3bCm12sVMUmBSa5zGr+A= +github.com/coder/terraform-provider-coder/v2 v2.4.2/go.mod h1:2kaBpn5k9ZWtgKq5k4JbkVZG9DzEqR4mJSmpdshcO+s= github.com/coder/trivy v0.0.0-20250409153844-e6b004bc465a h1:yryP7e+IQUAArlycH4hQrjXQ64eRNbxsV5/wuVXHgME= github.com/coder/trivy v0.0.0-20250409153844-e6b004bc465a/go.mod h1:dDvq9axp3kZsT63gY2Znd1iwzfqDq3kXbQnccIrjRYY= github.com/coder/websocket v1.8.13 h1:f3QZdXy7uGVz+4uCJy2nTZyM0yTBj8yANEHhqlXZ9FE= @@ -2412,8 +2412,8 @@ golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= golang.org/x/tools v0.7.0/go.mod h1:4pg6aUX35JBAogB10C9AtvVL+qowtN4pT3CGSQex14s= golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58= golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk= -golang.org/x/tools v0.32.0 h1:Q7N1vhpkQv7ybVzLFtTjvQya2ewbwNDZzUgfXGqtMWU= -golang.org/x/tools v0.32.0/go.mod h1:ZxrU41P/wAbZD8EDa6dDCa6XfpkhJ7HFMjHJXfBDu8s= +golang.org/x/tools v0.33.0 h1:4qz2S3zmRxbGIhDIAgjxvFutSvH5EfnsYrRBj0UI0bc= +golang.org/x/tools v0.33.0/go.mod h1:CIJMaWEY88juyUfo7UbgPqbC8rU2OqfAV1h2Qp0oMYI= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= diff --git a/site/src/api/typesGenerated.ts b/site/src/api/typesGenerated.ts index d367302186870..4e337bd7c65f0 100644 --- a/site/src/api/typesGenerated.ts +++ b/site/src/api/typesGenerated.ts @@ -349,7 +349,7 @@ export interface ConvertLoginRequest { // From codersdk/chat.go export interface CreateChatMessageRequest { readonly model: string; - // embedded anonymous struct, please fix by naming it + // external type "github.com/kylecarbs/aisdk-go.Message", to include this type the package must be explicitly included in the parsing readonly message: unknown; readonly thinking: boolean; } @@ -741,6 +741,19 @@ export interface DeploymentValues { readonly address?: string; } +// From codersdk/parameters.go +export interface DiagnosticExtra { + readonly code: string; +} + +// From codersdk/parameters.go +export type DiagnosticSeverityString = "error" | "warning"; + +export const DiagnosticSeverityStrings: DiagnosticSeverityString[] = [ + "error", + "warning", +]; + // From codersdk/workspaceagents.go export type DisplayApp = | "port_forwarding_helper" @@ -757,16 +770,16 @@ export const DisplayApps: DisplayApp[] = [ "web_terminal", ]; -// From codersdk/templateversions.go +// From codersdk/parameters.go export interface DynamicParametersRequest { readonly id: number; readonly inputs: Record; } -// From codersdk/templateversions.go +// From codersdk/parameters.go export interface DynamicParametersResponse { readonly id: number; - readonly diagnostics: PreviewDiagnostics; + readonly diagnostics: readonly FriendlyDiagnostic[]; readonly parameters: readonly PreviewParameter[]; } @@ -969,10 +982,10 @@ export const FormatZip = "zip"; // From codersdk/parameters.go export interface FriendlyDiagnostic { - readonly severity: PreviewDiagnosticSeverityString; + readonly severity: DiagnosticSeverityString; readonly summary: string; readonly detail: string; - readonly extra: PreviewDiagnosticExtra; + readonly extra: DiagnosticExtra; } // From codersdk/apikey.go @@ -1596,6 +1609,16 @@ export interface OIDCConfig { readonly skip_issuer_checks: boolean; } +// From codersdk/parameters.go +export type OptionType = "bool" | "list(string)" | "number" | "string"; + +export const OptionTypes: OptionType[] = [ + "bool", + "list(string)", + "number", + "string", +]; + // From codersdk/organizations.go export interface Organization extends MinimalOrganization { readonly description: string; @@ -1663,6 +1686,34 @@ export interface Pagination { readonly offset?: number; } +// From codersdk/parameters.go +export type ParameterFormType = + | "checkbox" + | "" + | "dropdown" + | "error" + | "input" + | "multi-select" + | "radio" + | "slider" + | "switch" + | "tag-select" + | "textarea"; + +export const ParameterFormTypes: ParameterFormType[] = [ + "checkbox", + "", + "dropdown", + "error", + "input", + "multi-select", + "radio", + "slider", + "switch", + "tag-select", + "textarea", +]; + // From codersdk/idpsync.go export interface PatchGroupIDPSyncConfigRequest { readonly field: string; @@ -1778,33 +1829,19 @@ export interface PresetParameter { readonly Value: string; } -// From types/diagnostics.go -export interface PreviewDiagnosticExtra { - readonly code: string; - // empty interface{} type, falling back to unknown - readonly Wrapped: unknown; -} - -// From types/diagnostics.go -export type PreviewDiagnosticSeverityString = string; - -// From types/diagnostics.go -export type PreviewDiagnostics = readonly FriendlyDiagnostic[]; - -// From types/parameter.go +// From codersdk/parameters.go export interface PreviewParameter extends PreviewParameterData { readonly value: NullHCLString; - readonly diagnostics: PreviewDiagnostics; + readonly diagnostics: readonly FriendlyDiagnostic[]; } -// From types/parameter.go +// From codersdk/parameters.go export interface PreviewParameterData { readonly name: string; readonly display_name: string; readonly description: string; - readonly type: PreviewParameterType; - // this is likely an enum in an external package "github.com/coder/terraform-provider-coder/v2/provider.ParameterFormType" - readonly form_type: string; + readonly type: OptionType; + readonly form_type: ParameterFormType; readonly styling: PreviewParameterStyling; readonly mutable: boolean; readonly default_value: NullHCLString; @@ -1816,7 +1853,7 @@ export interface PreviewParameterData { readonly ephemeral: boolean; } -// From types/parameter.go +// From codersdk/parameters.go export interface PreviewParameterOption { readonly name: string; readonly description: string; @@ -1824,17 +1861,14 @@ export interface PreviewParameterOption { readonly icon: string; } -// From types/parameter.go +// From codersdk/parameters.go export interface PreviewParameterStyling { readonly placeholder?: string; readonly disabled?: boolean; readonly label?: string; } -// From types/enum.go -export type PreviewParameterType = string; - -// From types/parameter.go +// From codersdk/parameters.go export interface PreviewParameterValidation { readonly validation_error: string; readonly validation_regex: string | null; diff --git a/site/src/pages/CreateWorkspacePage/CreateWorkspacePageViewExperimental.tsx b/site/src/pages/CreateWorkspacePage/CreateWorkspacePageViewExperimental.tsx index 630faf8e806d2..cb4451b53acd7 100644 --- a/site/src/pages/CreateWorkspacePage/CreateWorkspacePageViewExperimental.tsx +++ b/site/src/pages/CreateWorkspacePage/CreateWorkspacePageViewExperimental.tsx @@ -1,5 +1,5 @@ import type * as TypesGen from "api/typesGenerated"; -import type { PreviewDiagnostics, PreviewParameter } from "api/typesGenerated"; +import type { FriendlyDiagnostic, PreviewParameter } from "api/typesGenerated"; import { Alert } from "components/Alert/Alert"; import { ErrorAlert } from "components/Alert/ErrorAlert"; import { Avatar } from "components/Avatar/Avatar"; @@ -51,7 +51,7 @@ export interface CreateWorkspacePageViewExperimentalProps { creatingWorkspace: boolean; defaultName?: string | null; defaultOwner: TypesGen.User; - diagnostics: PreviewDiagnostics; + diagnostics: readonly FriendlyDiagnostic[]; disabledParams?: string[]; error: unknown; externalAuth: TypesGen.TemplateVersionExternalAuth[]; From b51c902e4859919dba9c30f804cce3642a6735a1 Mon Sep 17 00:00:00 2001 From: Edward Angert Date: Tue, 20 May 2025 12:46:07 -0400 Subject: [PATCH 33/42] docs: add early access badge to devcontainers admin (#17937) [preview](https://coder.com/docs/@dev-container-tweaks/admin/templates/extending-templates/devcontainers) --------- Co-authored-by: EdwardAngert <17991901+EdwardAngert@users.noreply.github.com> --- docs/admin/templates/extending-templates/devcontainers.md | 2 ++ docs/manifest.json | 3 ++- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/docs/admin/templates/extending-templates/devcontainers.md b/docs/admin/templates/extending-templates/devcontainers.md index 4894a012476a1..d4284bf48efde 100644 --- a/docs/admin/templates/extending-templates/devcontainers.md +++ b/docs/admin/templates/extending-templates/devcontainers.md @@ -122,3 +122,5 @@ resource "docker_container" "workspace" { ## Next Steps - [Dev Containers Integration](../../../user-guides/devcontainers/index.md) +- [Working with Dev Containers](../../../user-guides/devcontainers/working-with-dev-containers.md) +- [Troubleshooting Dev Containers](../../../user-guides/devcontainers/troubleshooting-dev-containers.md) diff --git a/docs/manifest.json b/docs/manifest.json index 3af0cc7505057..6c85934017ebb 100644 --- a/docs/manifest.json +++ b/docs/manifest.json @@ -506,7 +506,8 @@ { "title": "Configure a template for dev containers", "description": "How to use configure your template for dev containers", - "path": "./admin/templates/extending-templates/devcontainers.md" + "path": "./admin/templates/extending-templates/devcontainers.md", + "state": ["early access"] }, { "title": "Process Logging", From 55313cffbccb03246de8c46554e464c40ec77a30 Mon Sep 17 00:00:00 2001 From: Julio <13398285+ggjulio@users.noreply.github.com> Date: Tue, 20 May 2025 19:19:38 +0200 Subject: [PATCH 34/42] chore: add vsphere icon (#17936) --- site/src/theme/icons.json | 1 + site/static/icon/vsphere.svg | 14 ++++++++++++++ 2 files changed, 15 insertions(+) create mode 100644 site/static/icon/vsphere.svg diff --git a/site/src/theme/icons.json b/site/src/theme/icons.json index 96f3abb704ef9..8e92dd9a48198 100644 --- a/site/src/theme/icons.json +++ b/site/src/theme/icons.json @@ -102,6 +102,7 @@ "typescript.svg", "ubuntu.svg", "vault.svg", + "vsphere.svg", "webstorm.svg", "widgets.svg", "windsurf.svg", diff --git a/site/static/icon/vsphere.svg b/site/static/icon/vsphere.svg new file mode 100644 index 0000000000000..e50dd3ca83c69 --- /dev/null +++ b/site/static/icon/vsphere.svg @@ -0,0 +1,14 @@ + + + + + + + + From b551a062d7a418ff0c6c83164759f9d055bf0b35 Mon Sep 17 00:00:00 2001 From: Thomas Kosiewski Date: Tue, 20 May 2025 19:35:19 +0200 Subject: [PATCH 35/42] fix: correct environment variable name for MCP app status slug (#17948) Fixed environment variable name for app status slug in Claude MCP configuration from `CODER_MCP_CLAUDE_APP_STATUS_SLUG` to `CODER_MCP_APP_STATUS_SLUG` to maintain consistency with other MCP environment variables. This also caused the User level Claude.md to not contain instructions to report its progress, so it did not receive status reports. --- cli/exp_mcp.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cli/exp_mcp.go b/cli/exp_mcp.go index 6174f0cffbf0e..fb866666daf4a 100644 --- a/cli/exp_mcp.go +++ b/cli/exp_mcp.go @@ -255,7 +255,7 @@ func (*RootCmd) mcpConfigureClaudeCode() *serpent.Command { { Name: "app-status-slug", Description: "The app status slug to use when running the Coder MCP server.", - Env: "CODER_MCP_CLAUDE_APP_STATUS_SLUG", + Env: "CODER_MCP_APP_STATUS_SLUG", Flag: "claude-app-status-slug", Value: serpent.StringOf(&appStatusSlug), }, From 1f54c363753c22927db88abaa4d7f0ffa502a6ce Mon Sep 17 00:00:00 2001 From: Edward Angert Date: Tue, 20 May 2025 15:10:52 -0400 Subject: [PATCH 36/42] docs: rename external-auth heading in setup doc (#17868) to help point searchers to the correct doc [preview](https://coder.com/docs/@setup-ext-auth/admin/setup#continue-your-setup-with-external-authentication) --------- Co-authored-by: EdwardAngert <17991901+EdwardAngert@users.noreply.github.com> --- docs/admin/setup/index.md | 2 +- docs/manifest.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/admin/setup/index.md b/docs/admin/setup/index.md index 96000292266e2..1a34920e733e8 100644 --- a/docs/admin/setup/index.md +++ b/docs/admin/setup/index.md @@ -140,7 +140,7 @@ To configure Coder behind a corporate proxy, set the environment variables `HTTP_PROXY` and `HTTPS_PROXY`. Be sure to restart the server. Lowercase values (e.g. `http_proxy`) are also respected in this case. -## External Authentication +## Continue your setup with external authentication Coder supports external authentication via OAuth2.0. This allows enabling integrations with Git providers, such as GitHub, GitLab, and Bitbucket. diff --git a/docs/manifest.json b/docs/manifest.json index 6c85934017ebb..c191eda07c425 100644 --- a/docs/manifest.json +++ b/docs/manifest.json @@ -551,7 +551,7 @@ ] }, { - "title": "External Auth", + "title": "External Authentication", "description": "Learn how to configure external authentication", "path": "./admin/external-auth.md", "icon_path": "./images/icons/plug.svg" From d2d21898f24e559e910011e800615dc14c19b5fc Mon Sep 17 00:00:00 2001 From: Danny Kopping Date: Tue, 20 May 2025 22:16:23 +0200 Subject: [PATCH 37/42] chore: reduce `ignore_changes` suggestion scope (#17947) We probably shouldn't be suggesting `ignore_changes = all`. Only the attributes which cause drift in prebuilds should be ignored; everything else can behave as normal. --------- Signed-off-by: Danny Kopping Co-authored-by: Edward Angert --- .../extending-templates/prebuilt-workspaces.md | 17 +++-------------- 1 file changed, 3 insertions(+), 14 deletions(-) diff --git a/docs/admin/templates/extending-templates/prebuilt-workspaces.md b/docs/admin/templates/extending-templates/prebuilt-workspaces.md index 3fd82d62d1943..57f3dc0b3109f 100644 --- a/docs/admin/templates/extending-templates/prebuilt-workspaces.md +++ b/docs/admin/templates/extending-templates/prebuilt-workspaces.md @@ -142,7 +142,7 @@ To prevent this, add a `lifecycle` block with `ignore_changes`: ```hcl resource "docker_container" "workspace" { lifecycle { - ignore_changes = all + ignore_changes = [env, image] # include all fields which caused drift } count = data.coder_workspace.me.start_count @@ -151,19 +151,8 @@ resource "docker_container" "workspace" { } ``` -For more targeted control, specify which attributes to ignore: - -```hcl -resource "docker_container" "workspace" { - lifecycle { - ignore_changes = [name] - } - - count = data.coder_workspace.me.start_count - name = "coder-${data.coder_workspace_owner.me.name}-${lower(data.coder_workspace.me.name)}" - ... -} -``` +Limit the scope of `ignore_changes` to include only the fields specified in the notification. +If you include too many fields, Terraform might ignore changes that wouldn't otherwise cause drift. Learn more about `ignore_changes` in the [Terraform documentation](https://developer.hashicorp.com/terraform/language/meta-arguments/lifecycle#ignore_changes). From 3e7ff9d9e1c359285a8c39a15947231de6ee74c0 Mon Sep 17 00:00:00 2001 From: Danielle Maywood Date: Tue, 20 May 2025 21:20:56 +0100 Subject: [PATCH 38/42] chore(coderd/rbac): add `Action{Create,Delete}Agent` to `ResourceWorkspace` (#17932) --- coderd/apidoc/docs.go | 4 ++ coderd/apidoc/swagger.json | 4 ++ coderd/database/dbauthz/dbauthz.go | 25 ++++++++-- coderd/database/dbauthz/dbauthz_test.go | 33 ++++++++++++- coderd/database/dbmem/dbmem.go | 27 +++++++++++ coderd/database/dbmetrics/querymetrics.go | 7 +++ coderd/database/dbmock/dbmock.go | 15 ++++++ coderd/database/querier.go | 1 + coderd/database/queries.sql.go | 59 +++++++++++++++++++++++ coderd/database/queries/workspaces.sql | 24 +++++++++ coderd/rbac/object_gen.go | 6 +++ coderd/rbac/policy/policy.go | 6 +++ coderd/rbac/roles.go | 16 ++++-- coderd/rbac/roles_test.go | 11 ++++- codersdk/rbacresources_gen.go | 6 ++- docs/reference/api/members.md | 10 ++++ docs/reference/api/schemas.md | 2 + site/src/api/rbacresourcesGenerated.ts | 4 ++ site/src/api/typesGenerated.ts | 4 ++ 19 files changed, 253 insertions(+), 11 deletions(-) diff --git a/coderd/apidoc/docs.go b/coderd/apidoc/docs.go index 95e2cc0f48ac8..e98197d3b5bb2 100644 --- a/coderd/apidoc/docs.go +++ b/coderd/apidoc/docs.go @@ -14901,7 +14901,9 @@ const docTemplate = `{ "application_connect", "assign", "create", + "create_agent", "delete", + "delete_agent", "read", "read_personal", "ssh", @@ -14917,7 +14919,9 @@ const docTemplate = `{ "ActionApplicationConnect", "ActionAssign", "ActionCreate", + "ActionCreateAgent", "ActionDelete", + "ActionDeleteAgent", "ActionRead", "ActionReadPersonal", "ActionSSH", diff --git a/coderd/apidoc/swagger.json b/coderd/apidoc/swagger.json index 02212d9944415..fa103f55fbe9f 100644 --- a/coderd/apidoc/swagger.json +++ b/coderd/apidoc/swagger.json @@ -13509,7 +13509,9 @@ "application_connect", "assign", "create", + "create_agent", "delete", + "delete_agent", "read", "read_personal", "ssh", @@ -13525,7 +13527,9 @@ "ActionApplicationConnect", "ActionAssign", "ActionCreate", + "ActionCreateAgent", "ActionDelete", + "ActionDeleteAgent", "ActionRead", "ActionReadPersonal", "ActionSSH", diff --git a/coderd/database/dbauthz/dbauthz.go b/coderd/database/dbauthz/dbauthz.go index 20afcf66c7867..ab3781452dd2d 100644 --- a/coderd/database/dbauthz/dbauthz.go +++ b/coderd/database/dbauthz/dbauthz.go @@ -177,7 +177,7 @@ var ( // Unsure why provisionerd needs update and read personal rbac.ResourceUser.Type: {policy.ActionRead, policy.ActionReadPersonal, policy.ActionUpdatePersonal}, rbac.ResourceWorkspaceDormant.Type: {policy.ActionDelete, policy.ActionRead, policy.ActionUpdate, policy.ActionWorkspaceStop}, - rbac.ResourceWorkspace.Type: {policy.ActionDelete, policy.ActionRead, policy.ActionUpdate, policy.ActionWorkspaceStart, policy.ActionWorkspaceStop}, + rbac.ResourceWorkspace.Type: {policy.ActionDelete, policy.ActionRead, policy.ActionUpdate, policy.ActionWorkspaceStart, policy.ActionWorkspaceStop, policy.ActionCreateAgent}, rbac.ResourceApiKey.Type: {policy.WildcardSymbol}, // When org scoped provisioner credentials are implemented, // this can be reduced to read a specific org. @@ -339,7 +339,7 @@ var ( rbac.ResourceProvisionerDaemon.Type: {policy.ActionCreate, policy.ActionRead, policy.ActionUpdate}, rbac.ResourceUser.Type: rbac.ResourceUser.AvailableActions(), rbac.ResourceWorkspaceDormant.Type: {policy.ActionUpdate, policy.ActionDelete, policy.ActionWorkspaceStop}, - rbac.ResourceWorkspace.Type: {policy.ActionUpdate, policy.ActionDelete, policy.ActionWorkspaceStart, policy.ActionWorkspaceStop, policy.ActionSSH}, + rbac.ResourceWorkspace.Type: {policy.ActionUpdate, policy.ActionDelete, policy.ActionWorkspaceStart, policy.ActionWorkspaceStop, policy.ActionSSH, policy.ActionCreateAgent, policy.ActionDeleteAgent}, rbac.ResourceWorkspaceProxy.Type: {policy.ActionCreate, policy.ActionUpdate, policy.ActionDelete}, rbac.ResourceDeploymentConfig.Type: {policy.ActionCreate, policy.ActionUpdate, policy.ActionDelete}, rbac.ResourceNotificationMessage.Type: {policy.ActionCreate, policy.ActionRead, policy.ActionUpdate, policy.ActionDelete}, @@ -3180,6 +3180,10 @@ func (q *querier) GetWorkspaceByOwnerIDAndName(ctx context.Context, arg database return fetch(q.log, q.auth, q.db.GetWorkspaceByOwnerIDAndName)(ctx, arg) } +func (q *querier) GetWorkspaceByResourceID(ctx context.Context, resourceID uuid.UUID) (database.Workspace, error) { + return fetch(q.log, q.auth, q.db.GetWorkspaceByResourceID)(ctx, resourceID) +} + func (q *querier) GetWorkspaceByWorkspaceAppID(ctx context.Context, workspaceAppID uuid.UUID) (database.Workspace, error) { return fetch(q.log, q.auth, q.db.GetWorkspaceByWorkspaceAppID)(ctx, workspaceAppID) } @@ -3713,9 +3717,24 @@ func (q *querier) InsertWorkspace(ctx context.Context, arg database.InsertWorksp } func (q *querier) InsertWorkspaceAgent(ctx context.Context, arg database.InsertWorkspaceAgentParams) (database.WorkspaceAgent, error) { - if err := q.authorizeContext(ctx, policy.ActionCreate, rbac.ResourceSystem); err != nil { + // NOTE(DanielleMaywood): + // Currently, the only way to link a Resource back to a Workspace is by following this chain: + // + // WorkspaceResource -> WorkspaceBuild -> Workspace + // + // It is possible for this function to be called without there existing + // a `WorkspaceBuild` to link back to. This means that we want to allow + // execution to continue if there isn't a workspace found to allow this + // behavior to continue. + workspace, err := q.db.GetWorkspaceByResourceID(ctx, arg.ResourceID) + if err != nil && !errors.Is(err, sql.ErrNoRows) { return database.WorkspaceAgent{}, err } + + if err := q.authorizeContext(ctx, policy.ActionCreateAgent, workspace); err != nil { + return database.WorkspaceAgent{}, err + } + return q.db.InsertWorkspaceAgent(ctx, arg) } diff --git a/coderd/database/dbauthz/dbauthz_test.go b/coderd/database/dbauthz/dbauthz_test.go index 1e4b4ea879b77..e8b90afbc396d 100644 --- a/coderd/database/dbauthz/dbauthz_test.go +++ b/coderd/database/dbauthz/dbauthz_test.go @@ -1928,6 +1928,22 @@ func (s *MethodTestSuite) TestWorkspace() { }) check.Args(ws.ID).Asserts(ws, policy.ActionRead) })) + s.Run("GetWorkspaceByResourceID", s.Subtest(func(db database.Store, check *expects) { + u := dbgen.User(s.T(), db, database.User{}) + o := dbgen.Organization(s.T(), db, database.Organization{}) + j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{Type: database.ProvisionerJobTypeWorkspaceBuild}) + tpl := dbgen.Template(s.T(), db, database.Template{CreatedBy: u.ID, OrganizationID: o.ID}) + tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{ + TemplateID: uuid.NullUUID{UUID: tpl.ID, Valid: true}, + JobID: j.ID, + OrganizationID: o.ID, + CreatedBy: u.ID, + }) + ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{OwnerID: u.ID, TemplateID: tpl.ID, OrganizationID: o.ID}) + _ = dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{WorkspaceID: ws.ID, JobID: j.ID, TemplateVersionID: tv.ID}) + res := dbgen.WorkspaceResource(s.T(), db, database.WorkspaceResource{JobID: j.ID}) + check.Args(res.ID).Asserts(ws, policy.ActionRead) + })) s.Run("GetWorkspaces", s.Subtest(func(_ database.Store, check *expects) { // No asserts here because SQLFilter. check.Args(database.GetWorkspacesParams{}).Asserts() @@ -4018,12 +4034,25 @@ func (s *MethodTestSuite) TestSystemFunctions() { Returns(slice.New(a, b)) })) s.Run("InsertWorkspaceAgent", s.Subtest(func(db database.Store, check *expects) { - dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) + u := dbgen.User(s.T(), db, database.User{}) + o := dbgen.Organization(s.T(), db, database.Organization{}) + j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{Type: database.ProvisionerJobTypeWorkspaceBuild}) + tpl := dbgen.Template(s.T(), db, database.Template{CreatedBy: u.ID, OrganizationID: o.ID}) + tv := dbgen.TemplateVersion(s.T(), db, database.TemplateVersion{ + TemplateID: uuid.NullUUID{UUID: tpl.ID, Valid: true}, + JobID: j.ID, + OrganizationID: o.ID, + CreatedBy: u.ID, + }) + ws := dbgen.Workspace(s.T(), db, database.WorkspaceTable{OwnerID: u.ID, TemplateID: tpl.ID, OrganizationID: o.ID}) + _ = dbgen.WorkspaceBuild(s.T(), db, database.WorkspaceBuild{WorkspaceID: ws.ID, JobID: j.ID, TemplateVersionID: tv.ID}) + res := dbgen.WorkspaceResource(s.T(), db, database.WorkspaceResource{JobID: j.ID}) check.Args(database.InsertWorkspaceAgentParams{ ID: uuid.New(), + ResourceID: res.ID, Name: "dev", APIKeyScope: database.AgentKeyScopeEnumAll, - }).Asserts(rbac.ResourceSystem, policy.ActionCreate) + }).Asserts(ws, policy.ActionCreateAgent) })) s.Run("InsertWorkspaceApp", s.Subtest(func(db database.Store, check *expects) { dbtestutil.DisableForeignKeysAndTriggers(s.T(), db) diff --git a/coderd/database/dbmem/dbmem.go b/coderd/database/dbmem/dbmem.go index 3ab2895876ac5..75c56b9c2324d 100644 --- a/coderd/database/dbmem/dbmem.go +++ b/coderd/database/dbmem/dbmem.go @@ -8053,6 +8053,33 @@ func (q *FakeQuerier) GetWorkspaceByOwnerIDAndName(_ context.Context, arg databa return database.Workspace{}, sql.ErrNoRows } +func (q *FakeQuerier) GetWorkspaceByResourceID(ctx context.Context, resourceID uuid.UUID) (database.Workspace, error) { + q.mutex.RLock() + defer q.mutex.RUnlock() + + for _, resource := range q.workspaceResources { + if resource.ID != resourceID { + continue + } + + for _, build := range q.workspaceBuilds { + if build.JobID != resource.JobID { + continue + } + + for _, workspace := range q.workspaces { + if workspace.ID != build.WorkspaceID { + continue + } + + return q.extendWorkspace(workspace), nil + } + } + } + + return database.Workspace{}, sql.ErrNoRows +} + func (q *FakeQuerier) GetWorkspaceByWorkspaceAppID(_ context.Context, workspaceAppID uuid.UUID) (database.Workspace, error) { if err := validateDatabaseType(workspaceAppID); err != nil { return database.Workspace{}, err diff --git a/coderd/database/dbmetrics/querymetrics.go b/coderd/database/dbmetrics/querymetrics.go index 9122cedbf786c..47ec185915660 100644 --- a/coderd/database/dbmetrics/querymetrics.go +++ b/coderd/database/dbmetrics/querymetrics.go @@ -1887,6 +1887,13 @@ func (m queryMetricsStore) GetWorkspaceByOwnerIDAndName(ctx context.Context, arg return workspace, err } +func (m queryMetricsStore) GetWorkspaceByResourceID(ctx context.Context, resourceID uuid.UUID) (database.Workspace, error) { + start := time.Now() + r0, r1 := m.s.GetWorkspaceByResourceID(ctx, resourceID) + m.queryLatencies.WithLabelValues("GetWorkspaceByResourceID").Observe(time.Since(start).Seconds()) + return r0, r1 +} + func (m queryMetricsStore) GetWorkspaceByWorkspaceAppID(ctx context.Context, workspaceAppID uuid.UUID) (database.Workspace, error) { start := time.Now() workspace, err := m.s.GetWorkspaceByWorkspaceAppID(ctx, workspaceAppID) diff --git a/coderd/database/dbmock/dbmock.go b/coderd/database/dbmock/dbmock.go index e7af9ecd8fee8..e3a9a14698e42 100644 --- a/coderd/database/dbmock/dbmock.go +++ b/coderd/database/dbmock/dbmock.go @@ -3963,6 +3963,21 @@ func (mr *MockStoreMockRecorder) GetWorkspaceByOwnerIDAndName(ctx, arg any) *gom return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetWorkspaceByOwnerIDAndName", reflect.TypeOf((*MockStore)(nil).GetWorkspaceByOwnerIDAndName), ctx, arg) } +// GetWorkspaceByResourceID mocks base method. +func (m *MockStore) GetWorkspaceByResourceID(ctx context.Context, resourceID uuid.UUID) (database.Workspace, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetWorkspaceByResourceID", ctx, resourceID) + ret0, _ := ret[0].(database.Workspace) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetWorkspaceByResourceID indicates an expected call of GetWorkspaceByResourceID. +func (mr *MockStoreMockRecorder) GetWorkspaceByResourceID(ctx, resourceID any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetWorkspaceByResourceID", reflect.TypeOf((*MockStore)(nil).GetWorkspaceByResourceID), ctx, resourceID) +} + // GetWorkspaceByWorkspaceAppID mocks base method. func (m *MockStore) GetWorkspaceByWorkspaceAppID(ctx context.Context, workspaceAppID uuid.UUID) (database.Workspace, error) { m.ctrl.T.Helper() diff --git a/coderd/database/querier.go b/coderd/database/querier.go index 78a88426349da..d248780397ead 100644 --- a/coderd/database/querier.go +++ b/coderd/database/querier.go @@ -422,6 +422,7 @@ type sqlcQuerier interface { GetWorkspaceByAgentID(ctx context.Context, agentID uuid.UUID) (Workspace, error) GetWorkspaceByID(ctx context.Context, id uuid.UUID) (Workspace, error) GetWorkspaceByOwnerIDAndName(ctx context.Context, arg GetWorkspaceByOwnerIDAndNameParams) (Workspace, error) + GetWorkspaceByResourceID(ctx context.Context, resourceID uuid.UUID) (Workspace, error) GetWorkspaceByWorkspaceAppID(ctx context.Context, workspaceAppID uuid.UUID) (Workspace, error) GetWorkspaceModulesByJobID(ctx context.Context, jobID uuid.UUID) ([]WorkspaceModule, error) GetWorkspaceModulesCreatedAfter(ctx context.Context, createdAt time.Time) ([]WorkspaceModule, error) diff --git a/coderd/database/queries.sql.go b/coderd/database/queries.sql.go index b956fc1db5f91..99a8bf4603b57 100644 --- a/coderd/database/queries.sql.go +++ b/coderd/database/queries.sql.go @@ -18143,6 +18143,65 @@ func (q *sqlQuerier) GetWorkspaceByOwnerIDAndName(ctx context.Context, arg GetWo return i, err } +const getWorkspaceByResourceID = `-- name: GetWorkspaceByResourceID :one +SELECT + id, created_at, updated_at, owner_id, organization_id, template_id, deleted, name, autostart_schedule, ttl, last_used_at, dormant_at, deleting_at, automatic_updates, favorite, next_start_at, owner_avatar_url, owner_username, organization_name, organization_display_name, organization_icon, organization_description, template_name, template_display_name, template_icon, template_description +FROM + workspaces_expanded as workspaces +WHERE + workspaces.id = ( + SELECT + workspace_id + FROM + workspace_builds + WHERE + workspace_builds.job_id = ( + SELECT + job_id + FROM + workspace_resources + WHERE + workspace_resources.id = $1 + ) + ) +LIMIT + 1 +` + +func (q *sqlQuerier) GetWorkspaceByResourceID(ctx context.Context, resourceID uuid.UUID) (Workspace, error) { + row := q.db.QueryRowContext(ctx, getWorkspaceByResourceID, resourceID) + var i Workspace + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.OwnerID, + &i.OrganizationID, + &i.TemplateID, + &i.Deleted, + &i.Name, + &i.AutostartSchedule, + &i.Ttl, + &i.LastUsedAt, + &i.DormantAt, + &i.DeletingAt, + &i.AutomaticUpdates, + &i.Favorite, + &i.NextStartAt, + &i.OwnerAvatarUrl, + &i.OwnerUsername, + &i.OrganizationName, + &i.OrganizationDisplayName, + &i.OrganizationIcon, + &i.OrganizationDescription, + &i.TemplateName, + &i.TemplateDisplayName, + &i.TemplateIcon, + &i.TemplateDescription, + ) + return i, err +} + const getWorkspaceByWorkspaceAppID = `-- name: GetWorkspaceByWorkspaceAppID :one SELECT id, created_at, updated_at, owner_id, organization_id, template_id, deleted, name, autostart_schedule, ttl, last_used_at, dormant_at, deleting_at, automatic_updates, favorite, next_start_at, owner_avatar_url, owner_username, organization_name, organization_display_name, organization_icon, organization_description, template_name, template_display_name, template_icon, template_description diff --git a/coderd/database/queries/workspaces.sql b/coderd/database/queries/workspaces.sql index 4ec74c066fe41..44b7dcbf0387d 100644 --- a/coderd/database/queries/workspaces.sql +++ b/coderd/database/queries/workspaces.sql @@ -8,6 +8,30 @@ WHERE LIMIT 1; +-- name: GetWorkspaceByResourceID :one +SELECT + * +FROM + workspaces_expanded as workspaces +WHERE + workspaces.id = ( + SELECT + workspace_id + FROM + workspace_builds + WHERE + workspace_builds.job_id = ( + SELECT + job_id + FROM + workspace_resources + WHERE + workspace_resources.id = @resource_id + ) + ) +LIMIT + 1; + -- name: GetWorkspaceByWorkspaceAppID :one SELECT * diff --git a/coderd/rbac/object_gen.go b/coderd/rbac/object_gen.go index ad1a510fd44bd..f19d90894dd55 100644 --- a/coderd/rbac/object_gen.go +++ b/coderd/rbac/object_gen.go @@ -308,7 +308,9 @@ var ( // Valid Actions // - "ActionApplicationConnect" :: connect to workspace apps via browser // - "ActionCreate" :: create a new workspace + // - "ActionCreateAgent" :: create a new workspace agent // - "ActionDelete" :: delete workspace + // - "ActionDeleteAgent" :: delete an existing workspace agent // - "ActionRead" :: read workspace data to view on the UI // - "ActionSSH" :: ssh into a given workspace // - "ActionWorkspaceStart" :: allows starting a workspace @@ -338,7 +340,9 @@ var ( // Valid Actions // - "ActionApplicationConnect" :: connect to workspace apps via browser // - "ActionCreate" :: create a new workspace + // - "ActionCreateAgent" :: create a new workspace agent // - "ActionDelete" :: delete workspace + // - "ActionDeleteAgent" :: delete an existing workspace agent // - "ActionRead" :: read workspace data to view on the UI // - "ActionSSH" :: ssh into a given workspace // - "ActionWorkspaceStart" :: allows starting a workspace @@ -406,7 +410,9 @@ func AllActions() []policy.Action { policy.ActionApplicationConnect, policy.ActionAssign, policy.ActionCreate, + policy.ActionCreateAgent, policy.ActionDelete, + policy.ActionDeleteAgent, policy.ActionRead, policy.ActionReadPersonal, policy.ActionSSH, diff --git a/coderd/rbac/policy/policy.go b/coderd/rbac/policy/policy.go index c37e84c48f964..160062283f857 100644 --- a/coderd/rbac/policy/policy.go +++ b/coderd/rbac/policy/policy.go @@ -24,6 +24,9 @@ const ( ActionReadPersonal Action = "read_personal" ActionUpdatePersonal Action = "update_personal" + + ActionCreateAgent Action = "create_agent" + ActionDeleteAgent Action = "delete_agent" ) type PermissionDefinition struct { @@ -67,6 +70,9 @@ var workspaceActions = map[Action]ActionDefinition{ // Running a workspace ActionSSH: actDef("ssh into a given workspace"), ActionApplicationConnect: actDef("connect to workspace apps via browser"), + + ActionCreateAgent: actDef("create a new workspace agent"), + ActionDeleteAgent: actDef("delete an existing workspace agent"), } // RBACPermissions is indexed by the type diff --git a/coderd/rbac/roles.go b/coderd/rbac/roles.go index 0b94a74201b16..89f86b567a48d 100644 --- a/coderd/rbac/roles.go +++ b/coderd/rbac/roles.go @@ -272,7 +272,7 @@ func ReloadBuiltinRoles(opts *RoleOptions) { // This adds back in the Workspace permissions. Permissions(map[string][]policy.Action{ ResourceWorkspace.Type: ownerWorkspaceActions, - ResourceWorkspaceDormant.Type: {policy.ActionRead, policy.ActionDelete, policy.ActionCreate, policy.ActionUpdate, policy.ActionWorkspaceStop}, + ResourceWorkspaceDormant.Type: {policy.ActionRead, policy.ActionDelete, policy.ActionCreate, policy.ActionUpdate, policy.ActionWorkspaceStop, policy.ActionCreateAgent, policy.ActionDeleteAgent}, })...), Org: map[string][]Permission{}, User: []Permission{}, @@ -291,7 +291,7 @@ func ReloadBuiltinRoles(opts *RoleOptions) { User: append(allPermsExcept(ResourceWorkspaceDormant, ResourceUser, ResourceOrganizationMember), Permissions(map[string][]policy.Action{ // Reduced permission set on dormant workspaces. No build, ssh, or exec - ResourceWorkspaceDormant.Type: {policy.ActionRead, policy.ActionDelete, policy.ActionCreate, policy.ActionUpdate, policy.ActionWorkspaceStop}, + ResourceWorkspaceDormant.Type: {policy.ActionRead, policy.ActionDelete, policy.ActionCreate, policy.ActionUpdate, policy.ActionWorkspaceStop, policy.ActionCreateAgent, policy.ActionDeleteAgent}, // Users cannot do create/update/delete on themselves, but they // can read their own details. ResourceUser.Type: {policy.ActionRead, policy.ActionReadPersonal, policy.ActionUpdatePersonal}, @@ -412,7 +412,7 @@ func ReloadBuiltinRoles(opts *RoleOptions) { Org: map[string][]Permission{ // Org admins should not have workspace exec perms. organizationID.String(): append(allPermsExcept(ResourceWorkspace, ResourceWorkspaceDormant, ResourceAssignRole), Permissions(map[string][]policy.Action{ - ResourceWorkspaceDormant.Type: {policy.ActionRead, policy.ActionDelete, policy.ActionCreate, policy.ActionUpdate, policy.ActionWorkspaceStop}, + ResourceWorkspaceDormant.Type: {policy.ActionRead, policy.ActionDelete, policy.ActionCreate, policy.ActionUpdate, policy.ActionWorkspaceStop, policy.ActionCreateAgent, policy.ActionDeleteAgent}, ResourceWorkspace.Type: slice.Omit(ResourceWorkspace.AvailableActions(), policy.ActionApplicationConnect, policy.ActionSSH), })...), }, @@ -529,6 +529,16 @@ func ReloadBuiltinRoles(opts *RoleOptions) { ResourceType: ResourceWorkspace.Type, Action: policy.ActionDelete, }, + { + Negate: true, + ResourceType: ResourceWorkspace.Type, + Action: policy.ActionCreateAgent, + }, + { + Negate: true, + ResourceType: ResourceWorkspace.Type, + Action: policy.ActionDeleteAgent, + }, }, }, User: []Permission{}, diff --git a/coderd/rbac/roles_test.go b/coderd/rbac/roles_test.go index 6d42a01474d1a..4dfbc8fa2ab31 100644 --- a/coderd/rbac/roles_test.go +++ b/coderd/rbac/roles_test.go @@ -226,6 +226,15 @@ func TestRolePermissions(t *testing.T) { false: {setOtherOrg, setOrgNotMe, memberMe, templateAdmin, userAdmin}, }, }, + { + Name: "CreateDeleteWorkspaceAgent", + Actions: []policy.Action{policy.ActionCreateAgent, policy.ActionDeleteAgent}, + Resource: rbac.ResourceWorkspace.WithID(workspaceID).InOrg(orgID).WithOwner(currentUser.String()), + AuthorizeMap: map[bool][]hasAuthSubjects{ + true: {owner, orgMemberMe, orgAdmin}, + false: {setOtherOrg, memberMe, userAdmin, templateAdmin, orgTemplateAdmin, orgUserAdmin, orgAuditor, orgMemberMeBanWorkspace}, + }, + }, { Name: "Templates", Actions: []policy.Action{policy.ActionCreate, policy.ActionUpdate, policy.ActionDelete}, @@ -462,7 +471,7 @@ func TestRolePermissions(t *testing.T) { }, { Name: "WorkspaceDormant", - Actions: append(crud, policy.ActionWorkspaceStop), + Actions: append(crud, policy.ActionWorkspaceStop, policy.ActionCreateAgent, policy.ActionDeleteAgent), Resource: rbac.ResourceWorkspaceDormant.WithID(uuid.New()).InOrg(orgID).WithOwner(memberMe.Actor.ID), AuthorizeMap: map[bool][]hasAuthSubjects{ true: {orgMemberMe, orgAdmin, owner}, diff --git a/codersdk/rbacresources_gen.go b/codersdk/rbacresources_gen.go index 6157281f21356..95792bb8e2a7b 100644 --- a/codersdk/rbacresources_gen.go +++ b/codersdk/rbacresources_gen.go @@ -49,7 +49,9 @@ const ( ActionApplicationConnect RBACAction = "application_connect" ActionAssign RBACAction = "assign" ActionCreate RBACAction = "create" + ActionCreateAgent RBACAction = "create_agent" ActionDelete RBACAction = "delete" + ActionDeleteAgent RBACAction = "delete_agent" ActionRead RBACAction = "read" ActionReadPersonal RBACAction = "read_personal" ActionSSH RBACAction = "ssh" @@ -97,9 +99,9 @@ var RBACResourceActions = map[RBACResource][]RBACAction{ ResourceTemplate: {ActionCreate, ActionDelete, ActionRead, ActionUpdate, ActionUse, ActionViewInsights}, ResourceUser: {ActionCreate, ActionDelete, ActionRead, ActionReadPersonal, ActionUpdate, ActionUpdatePersonal}, ResourceWebpushSubscription: {ActionCreate, ActionDelete, ActionRead}, - ResourceWorkspace: {ActionApplicationConnect, ActionCreate, ActionDelete, ActionRead, ActionSSH, ActionWorkspaceStart, ActionWorkspaceStop, ActionUpdate}, + ResourceWorkspace: {ActionApplicationConnect, ActionCreate, ActionCreateAgent, ActionDelete, ActionDeleteAgent, ActionRead, ActionSSH, ActionWorkspaceStart, ActionWorkspaceStop, ActionUpdate}, ResourceWorkspaceAgentDevcontainers: {ActionCreate}, ResourceWorkspaceAgentResourceMonitor: {ActionCreate, ActionRead, ActionUpdate}, - ResourceWorkspaceDormant: {ActionApplicationConnect, ActionCreate, ActionDelete, ActionRead, ActionSSH, ActionWorkspaceStart, ActionWorkspaceStop, ActionUpdate}, + ResourceWorkspaceDormant: {ActionApplicationConnect, ActionCreate, ActionCreateAgent, ActionDelete, ActionDeleteAgent, ActionRead, ActionSSH, ActionWorkspaceStart, ActionWorkspaceStop, ActionUpdate}, ResourceWorkspaceProxy: {ActionCreate, ActionDelete, ActionRead, ActionUpdate}, } diff --git a/docs/reference/api/members.md b/docs/reference/api/members.md index a58a597d1ea2a..6b5d124753bc0 100644 --- a/docs/reference/api/members.md +++ b/docs/reference/api/members.md @@ -169,7 +169,9 @@ Status Code **200** | `action` | `application_connect` | | `action` | `assign` | | `action` | `create` | +| `action` | `create_agent` | | `action` | `delete` | +| `action` | `delete_agent` | | `action` | `read` | | `action` | `read_personal` | | `action` | `ssh` | @@ -336,7 +338,9 @@ Status Code **200** | `action` | `application_connect` | | `action` | `assign` | | `action` | `create` | +| `action` | `create_agent` | | `action` | `delete` | +| `action` | `delete_agent` | | `action` | `read` | | `action` | `read_personal` | | `action` | `ssh` | @@ -503,7 +507,9 @@ Status Code **200** | `action` | `application_connect` | | `action` | `assign` | | `action` | `create` | +| `action` | `create_agent` | | `action` | `delete` | +| `action` | `delete_agent` | | `action` | `read` | | `action` | `read_personal` | | `action` | `ssh` | @@ -639,7 +645,9 @@ Status Code **200** | `action` | `application_connect` | | `action` | `assign` | | `action` | `create` | +| `action` | `create_agent` | | `action` | `delete` | +| `action` | `delete_agent` | | `action` | `read` | | `action` | `read_personal` | | `action` | `ssh` | @@ -997,7 +1005,9 @@ Status Code **200** | `action` | `application_connect` | | `action` | `assign` | | `action` | `create` | +| `action` | `create_agent` | | `action` | `delete` | +| `action` | `delete_agent` | | `action` | `read` | | `action` | `read_personal` | | `action` | `ssh` | diff --git a/docs/reference/api/schemas.md b/docs/reference/api/schemas.md index 9325d751bc352..86cc4644c2685 100644 --- a/docs/reference/api/schemas.md +++ b/docs/reference/api/schemas.md @@ -5913,7 +5913,9 @@ Git clone makes use of this by parsing the URL from: 'Username for "https://gith | `application_connect` | | `assign` | | `create` | +| `create_agent` | | `delete` | +| `delete_agent` | | `read` | | `read_personal` | | `ssh` | diff --git a/site/src/api/rbacresourcesGenerated.ts b/site/src/api/rbacresourcesGenerated.ts index 3acb86c079908..885f603c1eb82 100644 --- a/site/src/api/rbacresourcesGenerated.ts +++ b/site/src/api/rbacresourcesGenerated.ts @@ -173,7 +173,9 @@ export const RBACResourceActions: Partial< workspace: { application_connect: "connect to workspace apps via browser", create: "create a new workspace", + create_agent: "create a new workspace agent", delete: "delete workspace", + delete_agent: "delete an existing workspace agent", read: "read workspace data to view on the UI", ssh: "ssh into a given workspace", start: "allows starting a workspace", @@ -191,7 +193,9 @@ export const RBACResourceActions: Partial< workspace_dormant: { application_connect: "connect to workspace apps via browser", create: "create a new workspace", + create_agent: "create a new workspace agent", delete: "delete workspace", + delete_agent: "delete an existing workspace agent", read: "read workspace data to view on the UI", ssh: "ssh into a given workspace", start: "allows starting a workspace", diff --git a/site/src/api/typesGenerated.ts b/site/src/api/typesGenerated.ts index 4e337bd7c65f0..35cd006ec6c55 100644 --- a/site/src/api/typesGenerated.ts +++ b/site/src/api/typesGenerated.ts @@ -2131,7 +2131,9 @@ export type RBACAction = | "application_connect" | "assign" | "create" + | "create_agent" | "delete" + | "delete_agent" | "read" | "read_personal" | "ssh" @@ -2147,7 +2149,9 @@ export const RBACActions: RBACAction[] = [ "application_connect", "assign", "create", + "create_agent", "delete", + "delete_agent", "read", "read_personal", "ssh", From 36224f263f2b7c5f9af1250e6daa8e612b476ff5 Mon Sep 17 00:00:00 2001 From: Bruno Quaresma Date: Tue, 20 May 2025 22:24:17 -0300 Subject: [PATCH 39/42] chore: replace MUI icons with Lucide icons - 17 (#17957) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit 1. ExpandMoreOutlined → ChevronDownIcon 2. Error/ErrorIcon → CircleAlertIcon 3. CheckCircle → CircleCheckIcon 4. Warning → TriangleAlertIcon --- .../WorkspaceAppStatus/WorkspaceAppStatus.tsx | 12 ++++++------ .../workspaces/WorkspaceTiming/StagesChart.tsx | 6 +++--- site/src/pages/ChatPage/ChatToolInvocation.tsx | 14 ++++++++++---- .../TemplateInsightsPage/IntervalMenu.tsx | 4 ++-- .../TemplateInsightsPage/WeekPicker.tsx | 4 ++-- .../ProvisionerTagsPopover.tsx | 4 ++-- site/src/pages/WorkspacePage/AppStatuses.tsx | 12 ++++++------ 7 files changed, 31 insertions(+), 25 deletions(-) diff --git a/site/src/modules/workspaces/WorkspaceAppStatus/WorkspaceAppStatus.tsx b/site/src/modules/workspaces/WorkspaceAppStatus/WorkspaceAppStatus.tsx index 412df60d9203e..95123ce8734df 100644 --- a/site/src/modules/workspaces/WorkspaceAppStatus/WorkspaceAppStatus.tsx +++ b/site/src/modules/workspaces/WorkspaceAppStatus/WorkspaceAppStatus.tsx @@ -1,10 +1,7 @@ import type { Theme } from "@emotion/react"; import { useTheme } from "@emotion/react"; import AppsIcon from "@mui/icons-material/Apps"; -import CheckCircle from "@mui/icons-material/CheckCircle"; -import ErrorIcon from "@mui/icons-material/Error"; import InsertDriveFile from "@mui/icons-material/InsertDriveFile"; -import Warning from "@mui/icons-material/Warning"; import CircularProgress from "@mui/material/CircularProgress"; import type { WorkspaceAppStatus as APIWorkspaceAppStatus, @@ -12,6 +9,9 @@ import type { WorkspaceAgent, WorkspaceApp, } from "api/typesGenerated"; +import { CircleCheckIcon } from "lucide-react"; +import { CircleAlertIcon } from "lucide-react"; +import { TriangleAlertIcon } from "lucide-react"; import { ExternalLinkIcon } from "lucide-react"; import { useAppLink } from "modules/apps/useAppLink"; import type { FC } from "react"; @@ -46,13 +46,13 @@ const getStatusIcon = (theme: Theme, state: APIWorkspaceAppStatus["state"]) => { const color = getStatusColor(theme, state); switch (state) { case "complete": - return ; + return ; case "failure": - return ; + return ; case "working": return ; default: - return ; + return ; } }; diff --git a/site/src/modules/workspaces/WorkspaceTiming/StagesChart.tsx b/site/src/modules/workspaces/WorkspaceTiming/StagesChart.tsx index 6bf18b084b02b..6ca814bb39afd 100644 --- a/site/src/modules/workspaces/WorkspaceTiming/StagesChart.tsx +++ b/site/src/modules/workspaces/WorkspaceTiming/StagesChart.tsx @@ -1,6 +1,6 @@ import type { Interpolation, Theme } from "@emotion/react"; -import ErrorSharp from "@mui/icons-material/ErrorSharp"; import type { TimingStage } from "api/typesGenerated"; +import { CircleAlertIcon } from "lucide-react"; import { InfoIcon } from "lucide-react"; import type { FC } from "react"; import { Bar, ClickableBar } from "./Chart/Bar"; @@ -159,9 +159,9 @@ export const StagesChart: FC = ({ }} > {t.error && ( - = ({ )} {toolInvocation.state === "result" ? ( hasError ? ( - + ) : ( - + ) ) : null}
= ({ value, onChange }) => { variant="outline" > {insightsIntervals[value].label} - + = ({ value, onChange }) => { aria-haspopup="true" aria-expanded={open ? "true" : undefined} onClick={() => setOpen(true)} - endIcon={} + endIcon={} > Last {numberOfWeeks} weeks diff --git a/site/src/pages/TemplateVersionEditorPage/ProvisionerTagsPopover.tsx b/site/src/pages/TemplateVersionEditorPage/ProvisionerTagsPopover.tsx index 2d76db8f9243d..bb0e3f439ed49 100644 --- a/site/src/pages/TemplateVersionEditorPage/ProvisionerTagsPopover.tsx +++ b/site/src/pages/TemplateVersionEditorPage/ProvisionerTagsPopover.tsx @@ -1,4 +1,3 @@ -import ExpandMoreOutlined from "@mui/icons-material/ExpandMoreOutlined"; import Link from "@mui/material/Link"; import useTheme from "@mui/system/useTheme"; import type { ProvisionerDaemon } from "api/typesGenerated"; @@ -9,6 +8,7 @@ import { PopoverContent, PopoverTrigger, } from "components/deprecated/Popover/Popover"; +import { ChevronDownIcon } from "lucide-react"; import { ProvisionerTagsField } from "modules/provisioners/ProvisionerTagsField"; import type { FC } from "react"; import { docs } from "utils/docs"; @@ -31,7 +31,7 @@ export const ProvisionerTagsPopover: FC = ({ color="neutral" css={{ paddingLeft: 0, paddingRight: 0, minWidth: "28px !important" }} > - + Expand provisioner tags diff --git a/site/src/pages/WorkspacePage/AppStatuses.tsx b/site/src/pages/WorkspacePage/AppStatuses.tsx index 60e4a8cecf22e..22dc5257f0e00 100644 --- a/site/src/pages/WorkspacePage/AppStatuses.tsx +++ b/site/src/pages/WorkspacePage/AppStatuses.tsx @@ -1,10 +1,7 @@ import type { Theme } from "@emotion/react"; import { useTheme } from "@emotion/react"; import AppsIcon from "@mui/icons-material/Apps"; -import CheckCircle from "@mui/icons-material/CheckCircle"; -import ErrorIcon from "@mui/icons-material/Error"; import InsertDriveFile from "@mui/icons-material/InsertDriveFile"; -import Warning from "@mui/icons-material/Warning"; import CircularProgress from "@mui/material/CircularProgress"; import Link from "@mui/material/Link"; import Tooltip from "@mui/material/Tooltip"; @@ -15,6 +12,9 @@ import type { WorkspaceApp, } from "api/typesGenerated"; import { formatDistance, formatDistanceToNow } from "date-fns"; +import { CircleCheckIcon } from "lucide-react"; +import { CircleAlertIcon } from "lucide-react"; +import { TriangleAlertIcon } from "lucide-react"; import { ExternalLinkIcon } from "lucide-react"; import { HourglassIcon } from "lucide-react"; import { CircleHelpIcon } from "lucide-react"; @@ -49,9 +49,9 @@ const getStatusIcon = ( : theme.palette.text.disabled; switch (state) { case "complete": - return ; + return ; case "failure": - return ; + return ; case "working": // Use Hourglass for past "working" states, spinner for the current one return isLatest ? ( @@ -60,7 +60,7 @@ const getStatusIcon = ( ); default: - return ; + return ; } }; From cbbbb4492a5a75b4b143e663cd3aaa9c997cea15 Mon Sep 17 00:00:00 2001 From: Spike Curtis Date: Wed, 21 May 2025 09:28:31 +0400 Subject: [PATCH 40/42] docs: explain coder:// link for RDP (#17901) fixes https://github.com/coder/internal/issues/627 Adds docs for `coder://` URLs for Windows Remote Desktop (RDP). Note that we might want to hold of merging since the URI handling is unreleased in Coder Desktop for Windows. --- .../workspace-access/remote-desktops.md | 32 +++++++++++++++++++ 1 file changed, 32 insertions(+) diff --git a/docs/user-guides/workspace-access/remote-desktops.md b/docs/user-guides/workspace-access/remote-desktops.md index ef8488f5889ff..2fe512b686763 100644 --- a/docs/user-guides/workspace-access/remote-desktops.md +++ b/docs/user-guides/workspace-access/remote-desktops.md @@ -47,6 +47,38 @@ Or use your favorite RDP client to connect to `localhost:3399`. The default username is `Administrator` and password is `coderRDP!`. +### Coder Desktop URI Handling (Beta) + +[Coder Desktop](../desktop) can use a URI handler to directly launch an RDP session without setting up port-forwarding. +The URI format is: + +```text +coder:///v0/open/ws//agent//rdp?username=&password= +``` + +For example: + +```text +coder://coder.example.com/v0/open/ws/myworkspace/agent/main/rdp?username=Administrator&password=coderRDP! +``` + +To include a Coder Desktop button to the workspace dashboard page, add a `coder_app` resource to the template: + +```tf +locals { + server_name = regex("https?:\\/\\/([^\\/]+)", data.coder_workspace.me.access_url)[0] +} + +resource "coder_app" "rdp-coder-desktop" { + agent_id = resource.coder_agent.main.id + slug = "rdp-desktop" + display_name = "RDP with Coder Desktop" + url = "coder://${local.server_name}/v0/open/ws/${data.coder_workspace.me.name}/agent/main/rdp?username=Administrator&password=coderRDP!" + icon = "/icon/desktop.svg" + external = true +} +``` + ## RDP Web Our [WebRDP](https://registry.coder.com/modules/windows-rdp) module in the Coder From 3654a49fb57dd911b9e6feb74b74d3bbbfb8d2b4 Mon Sep 17 00:00:00 2001 From: Michael Suchacz <203725896+ibetitsmike@users.noreply.github.com> Date: Wed, 21 May 2025 09:16:00 +0200 Subject: [PATCH 41/42] feat: add Claude.md initial draft (#17785) --- .cursorrules | 28 +++++++------- CLAUDE.md | 104 +++++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 119 insertions(+), 13 deletions(-) create mode 100644 CLAUDE.md diff --git a/.cursorrules b/.cursorrules index ce4412b83f6e9..54966b1dcc89e 100644 --- a/.cursorrules +++ b/.cursorrules @@ -4,7 +4,7 @@ This project is called "Coder" - an application for managing remote development Coder provides a platform for creating, managing, and using remote development environments (also known as Cloud Development Environments or CDEs). It leverages Terraform to define and provision these environments, which are referred to as "workspaces" within the project. The system is designed to be extensible, secure, and provide developers with a seamless remote development experience. -# Core Architecture +## Core Architecture The heart of Coder is a control plane that orchestrates the creation and management of workspaces. This control plane interacts with separate Provisioner processes over gRPC to handle workspace builds. The Provisioners consume workspace definitions and use Terraform to create the actual infrastructure. @@ -12,17 +12,17 @@ The CLI package serves dual purposes - it can be used to launch the control plan The database layer uses PostgreSQL with SQLC for generating type-safe database code. Database migrations are carefully managed to ensure both forward and backward compatibility through paired `.up.sql` and `.down.sql` files. -# API Design +## API Design Coder's API architecture combines REST and gRPC approaches. The REST API is defined in `coderd/coderd.go` and uses Chi for HTTP routing. This provides the primary interface for the frontend and external integrations. Internal communication with Provisioners occurs over gRPC, with service definitions maintained in `.proto` files. This separation allows for efficient binary communication with the components responsible for infrastructure management while providing a standard REST interface for human-facing applications. -# Network Architecture +## Network Architecture Coder implements a secure networking layer based on Tailscale's Wireguard implementation. The `tailnet` package provides connectivity between workspace agents and clients through DERP (Designated Encrypted Relay for Packets) servers when direct connections aren't possible. This creates a secure overlay network allowing access to workspaces regardless of network topology, firewalls, or NAT configurations. -## Tailnet and DERP System +### Tailnet and DERP System The networking system has three key components: @@ -35,7 +35,7 @@ The networking system has three key components: 3. **Direct Connections**: When possible, the system establishes peer-to-peer connections between clients and workspaces using STUN for NAT traversal. This requires both endpoints to send UDP traffic on ephemeral ports. -## Workspace Proxies +### Workspace Proxies Workspace proxies (in the Enterprise edition) provide regional relay points for browser-based connections, reducing latency for geo-distributed teams. Key characteristics: @@ -45,9 +45,10 @@ Workspace proxies (in the Enterprise edition) provide regional relay points for - Managed through the `coder wsproxy` commands - Implemented primarily in the `enterprise/wsproxy/` package -# Agent System +## Agent System The workspace agent runs within each provisioned workspace and provides core functionality including: + - SSH access to workspaces via the `agentssh` package - Port forwarding - Terminal connectivity via the `pty` package for pseudo-terminal support @@ -57,7 +58,7 @@ The workspace agent runs within each provisioned workspace and provides core fun Agents communicate with the control plane using the tailnet system and authenticate using secure tokens. -# Workspace Applications +## Workspace Applications Workspace applications (or "apps") provide browser-based access to services running within workspaces. The system supports: @@ -69,17 +70,17 @@ Workspace applications (or "apps") provide browser-based access to services runn The implementation is primarily in the `coderd/workspaceapps/` directory with components for URL generation, proxying connections, and managing application state. -# Implementation Details +## Implementation Details The project structure separates frontend and backend concerns. React components and pages are organized in the `site/src/` directory, with Jest used for testing. The backend is primarily written in Go, with a strong emphasis on error handling patterns and test coverage. Database interactions are carefully managed through migrations in `coderd/database/migrations/` and queries in `coderd/database/queries/`. All new queries require proper database authorization (dbauthz) implementation to ensure that only users with appropriate permissions can access specific resources. -# Authorization System +## Authorization System The database authorization (dbauthz) system enforces fine-grained access control across all database operations. It uses role-based access control (RBAC) to validate user permissions before executing database operations. The `dbauthz` package wraps the database store and performs authorization checks before returning data. All database operations must pass through this layer to ensure security. -# Testing Framework +## Testing Framework The codebase has a comprehensive testing approach with several key components: @@ -91,7 +92,7 @@ The codebase has a comprehensive testing approach with several key components: 4. **Enterprise Testing**: Enterprise features have dedicated test utilities in the `coderdenttest` package. -# Open Source and Enterprise Components +## Open Source and Enterprise Components The repository contains both open source and enterprise components: @@ -100,9 +101,10 @@ The repository contains both open source and enterprise components: - The boundary between open source and enterprise is managed through a licensing system - The same core codebase supports both editions, with enterprise features conditionally enabled -# Development Philosophy +## Development Philosophy Coder emphasizes clear error handling, with specific patterns required: + - Concise error messages that avoid phrases like "failed to" - Wrapping errors with `%w` to maintain error chains - Using sentinel errors with the "err" prefix (e.g., `errNotFound`) @@ -111,7 +113,7 @@ All tests should run in parallel using `t.Parallel()` to ensure efficient testin Git contributions follow a standard format with commit messages structured as `type: `, where type is one of `feat`, `fix`, or `chore`. -# Development Workflow +## Development Workflow Development can be initiated using `scripts/develop.sh` to start the application after making changes. Database schema updates should be performed through the migration system using `create_migration.sh ` to generate migration files, with each `.up.sql` migration paired with a corresponding `.down.sql` that properly reverts all changes. diff --git a/CLAUDE.md b/CLAUDE.md new file mode 100644 index 0000000000000..90d91c9966df7 --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1,104 @@ +# Coder Development Guidelines + +Read [cursor rules](.cursorrules). + +## Build/Test/Lint Commands + +### Main Commands + +- `make build` or `make build-fat` - Build all "fat" binaries (includes "server" functionality) +- `make build-slim` - Build "slim" binaries +- `make test` - Run Go tests +- `make test RUN=TestFunctionName` or `go test -v ./path/to/package -run TestFunctionName` - Test single +- `make test-postgres` - Run tests with Postgres database +- `make test-race` - Run tests with Go race detector +- `make test-e2e` - Run end-to-end tests +- `make lint` - Run all linters +- `make fmt` - Format all code +- `make gen` - Generates mocks, database queries and other auto-generated files + +### Frontend Commands (site directory) + +- `pnpm build` - Build frontend +- `pnpm dev` - Run development server +- `pnpm check` - Run code checks +- `pnpm format` - Format frontend code +- `pnpm lint` - Lint frontend code +- `pnpm test` - Run frontend tests + +## Code Style Guidelines + +### Go + +- Follow [Effective Go](https://go.dev/doc/effective_go) and [Go's Code Review Comments](https://github.com/golang/go/wiki/CodeReviewComments) +- Use `gofumpt` for formatting +- Create packages when used during implementation +- Validate abstractions against implementations + +### Error Handling + +- Use descriptive error messages +- Wrap errors with context +- Propagate errors appropriately +- Use proper error types +- (`xerrors.Errorf("failed to X: %w", err)`) + +### Naming + +- Use clear, descriptive names +- Abbreviate only when obvious +- Follow Go and TypeScript naming conventions + +### Comments + +- Document exported functions, types, and non-obvious logic +- Follow JSDoc format for TypeScript +- Use godoc format for Go code + +## Commit Style + +- Follow [Conventional Commits 1.0.0](https://www.conventionalcommits.org/en/v1.0.0/) +- Format: `type(scope): message` +- Types: `feat`, `fix`, `docs`, `style`, `refactor`, `test`, `chore` +- Keep message titles concise (~70 characters) +- Use imperative, present tense in commit titles + +## Database queries + +- MUST DO! Any changes to database - adding queries, modifying queries should be done in the `coderd\database\queries\*.sql` files. Use `make gen` to generate necessary changes after. +- MUST DO! Queries are grouped in files relating to context - e.g. `prebuilds.sql`, `users.sql`, `provisionerjobs.sql`. +- After making changes to any `coderd\database\queries\*.sql` files you must run `make gen` to generate respective ORM changes. + +## Architecture + +### Core Components + +- **coderd**: Main API service connecting workspaces, provisioners, and users +- **provisionerd**: Execution context for infrastructure-modifying providers +- **Agents**: Services in remote workspaces providing features like SSH and port forwarding +- **Workspaces**: Cloud resources defined by Terraform + +## Sub-modules + +### Template System + +- Templates define infrastructure for workspaces using Terraform +- Environment variables pass context between Coder and templates +- Official modules extend development environments + +### RBAC System + +- Permissions defined at site, organization, and user levels +- Object-Action model protects resources +- Built-in roles: owner, member, auditor, templateAdmin +- Permission format: `?...` + +### Database + +- PostgreSQL 13+ recommended for production +- Migrations managed with `migrate` +- Database authorization through `dbauthz` package + +## Frontend + +For building Frontend refer to [this document](docs/contributing/frontend.md) From 818d4d03f4297333e870b96f782a933870b78c9a Mon Sep 17 00:00:00 2001 From: Spike Curtis Date: Wed, 21 May 2025 11:29:25 +0400 Subject: [PATCH 42/42] chore: ignore 'session shutdown' yamux error in tests (#17964) Fixes flake seen here: https://github.com/coder/coder/actions/runs/15154327939/job/42606133069?pr=17960 Error log dropped when the dRPC server is being shut down right as we are (re)dialing. --- testutil/logger.go | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/testutil/logger.go b/testutil/logger.go index 47cb835aa16aa..88b6e20bada51 100644 --- a/testutil/logger.go +++ b/testutil/logger.go @@ -5,6 +5,7 @@ import ( "strings" "testing" + "github.com/hashicorp/yamux" "golang.org/x/xerrors" "cdr.dev/slog" @@ -24,6 +25,11 @@ func IgnoreLoggedError(entry slog.SinkEntry) bool { if !ok { return false } + // Yamux sessions get shut down when we are shutting down tests, so ignoring + // them should reduce flakiness. + if xerrors.Is(err, yamux.ErrSessionShutdown) { + return true + } // Canceled queries usually happen when we're shutting down tests, and so // ignoring them should reduce flakiness. This also includes // context.Canceled and context.DeadlineExceeded errors, even if they are