diff --git a/coderd/apidoc/docs.go b/coderd/apidoc/docs.go index 597043dfd0915..6ef218f3beb69 100644 --- a/coderd/apidoc/docs.go +++ b/coderd/apidoc/docs.go @@ -8213,6 +8213,41 @@ const docTemplate = `{ } } }, + "/workspaces/{workspace}/timings": { + "get": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "produces": [ + "application/json" + ], + "tags": [ + "Workspaces" + ], + "summary": "Get workspace timings by ID", + "operationId": "get-workspace-timings-by-id", + "parameters": [ + { + "type": "string", + "format": "uuid", + "description": "Workspace ID", + "name": "workspace", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/codersdk.WorkspaceTimings" + } + } + } + } + }, "/workspaces/{workspace}/ttl": { "put": { "security": [ @@ -11622,6 +11657,35 @@ const docTemplate = `{ "ProvisionerStorageMethodFile" ] }, + "codersdk.ProvisionerTiming": { + "type": "object", + "properties": { + "action": { + "type": "string" + }, + "ended_at": { + "type": "string", + "format": "date-time" + }, + "job_id": { + "type": "string", + "format": "uuid" + }, + "resource": { + "type": "string" + }, + "source": { + "type": "string" + }, + "stage": { + "type": "string" + }, + "started_at": { + "type": "string", + "format": "date-time" + } + } + }, "codersdk.ProxyHealthReport": { "type": "object", "properties": { @@ -14443,6 +14507,17 @@ const docTemplate = `{ "WorkspaceStatusDeleted" ] }, + "codersdk.WorkspaceTimings": { + "type": "object", + "properties": { + "provisioner_timings": { + "type": "array", + "items": { + "$ref": "#/definitions/codersdk.ProvisionerTiming" + } + } + } + }, "codersdk.WorkspaceTransition": { "type": "string", "enum": [ diff --git a/coderd/apidoc/swagger.json b/coderd/apidoc/swagger.json index 1ec7773cdd497..df82814aa139d 100644 --- a/coderd/apidoc/swagger.json +++ b/coderd/apidoc/swagger.json @@ -7267,6 +7267,37 @@ } } }, + "/workspaces/{workspace}/timings": { + "get": { + "security": [ + { + "CoderSessionToken": [] + } + ], + "produces": ["application/json"], + "tags": ["Workspaces"], + "summary": "Get workspace timings by ID", + "operationId": "get-workspace-timings-by-id", + "parameters": [ + { + "type": "string", + "format": "uuid", + "description": "Workspace ID", + "name": "workspace", + "in": "path", + "required": true + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/codersdk.WorkspaceTimings" + } + } + } + } + }, "/workspaces/{workspace}/ttl": { "put": { "security": [ @@ -10488,6 +10519,35 @@ "enum": ["file"], "x-enum-varnames": ["ProvisionerStorageMethodFile"] }, + "codersdk.ProvisionerTiming": { + "type": "object", + "properties": { + "action": { + "type": "string" + }, + "ended_at": { + "type": "string", + "format": "date-time" + }, + "job_id": { + "type": "string", + "format": "uuid" + }, + "resource": { + "type": "string" + }, + "source": { + "type": "string" + }, + "stage": { + "type": "string" + }, + "started_at": { + "type": "string", + "format": "date-time" + } + } + }, "codersdk.ProxyHealthReport": { "type": "object", "properties": { @@ -13160,6 +13220,17 @@ "WorkspaceStatusDeleted" ] }, + "codersdk.WorkspaceTimings": { + "type": "object", + "properties": { + "provisioner_timings": { + "type": "array", + "items": { + "$ref": "#/definitions/codersdk.ProvisionerTiming" + } + } + } + }, "codersdk.WorkspaceTransition": { "type": "string", "enum": ["start", "stop", "delete"], diff --git a/coderd/coderd.go b/coderd/coderd.go index e04f13d367c6e..de6d098c42ae2 100644 --- a/coderd/coderd.go +++ b/coderd/coderd.go @@ -1157,6 +1157,7 @@ func New(options *Options) *API { r.Post("/", api.postWorkspaceAgentPortShare) r.Delete("/", api.deleteWorkspaceAgentPortShare) }) + r.Get("/timings", api.workspaceTimings) }) }) r.Route("/workspacebuilds/{workspacebuild}", func(r chi.Router) { diff --git a/coderd/database/dbauthz/dbauthz.go b/coderd/database/dbauthz/dbauthz.go index 077d704be1300..9ae60bec90a0f 100644 --- a/coderd/database/dbauthz/dbauthz.go +++ b/coderd/database/dbauthz/dbauthz.go @@ -1791,6 +1791,10 @@ func (q *querier) GetProvisionerJobByID(ctx context.Context, id uuid.UUID) (data return job, nil } +func (q *querier) GetProvisionerJobTimingsByJobID(ctx context.Context, jobID uuid.UUID) ([]database.ProvisionerJobTiming, error) { + return q.db.GetProvisionerJobTimingsByJobID(ctx, jobID) +} + // TODO: we need to add a provisioner job resource func (q *querier) GetProvisionerJobsByIDs(ctx context.Context, ids []uuid.UUID) ([]database.ProvisionerJob, error) { // if err := q.authorizeContext(ctx, policy.ActionRead, rbac.ResourceSystem); err != nil { diff --git a/coderd/database/dbauthz/dbauthz_test.go b/coderd/database/dbauthz/dbauthz_test.go index 4b4874f34247c..ebe4674be7de4 100644 --- a/coderd/database/dbauthz/dbauthz_test.go +++ b/coderd/database/dbauthz/dbauthz_test.go @@ -551,6 +551,23 @@ func (s *MethodTestSuite) TestProvisionerJob() { check.Args(database.UpdateProvisionerJobWithCancelByIDParams{ID: j.ID}). Asserts(v.RBACObject(tpl), []policy.Action{policy.ActionRead, policy.ActionUpdate}).Returns() })) + s.Run("GetProvisionerJobTimingsByJobID", s.Subtest(func(db database.Store, check *expects) { + jobID := uuid.New() + j := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{ID: jobID}) + t := dbgen.ProvisionerJobTimings(s.T(), db, database.InsertProvisionerJobTimingsParams{ + JobID: jobID, + StartedAt: []time.Time{dbtime.Now(), dbtime.Now()}, + EndedAt: []time.Time{dbtime.Now(), dbtime.Now()}, + Stage: []database.ProvisionerJobTimingStage{ + database.ProvisionerJobTimingStageInit, + database.ProvisionerJobTimingStagePlan, + }, + Source: []string{"source1", "source2"}, + Action: []string{"action1", "action2"}, + Resource: []string{"resource1", "resource2"}, + }) + check.Args(j.ID).Asserts().Returns(t) + })) s.Run("GetProvisionerJobsByIDs", s.Subtest(func(db database.Store, check *expects) { a := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{}) b := dbgen.ProvisionerJob(s.T(), db, nil, database.ProvisionerJob{}) diff --git a/coderd/database/dbgen/dbgen.go b/coderd/database/dbgen/dbgen.go index 79aee59d97dbe..9bfc05a041da2 100644 --- a/coderd/database/dbgen/dbgen.go +++ b/coderd/database/dbgen/dbgen.go @@ -893,6 +893,12 @@ func CustomRole(t testing.TB, db database.Store, seed database.CustomRole) datab return role } +func ProvisionerJobTimings(t testing.TB, db database.Store, seed database.InsertProvisionerJobTimingsParams) []database.ProvisionerJobTiming { + timings, err := db.InsertProvisionerJobTimings(genCtx, seed) + require.NoError(t, err, "insert provisioner job timings") + return timings +} + func must[V any](v V, err error) V { if err != nil { panic(err) diff --git a/coderd/database/dbmem/dbmem.go b/coderd/database/dbmem/dbmem.go index ed766d48ecd43..f661581e8dd54 100644 --- a/coderd/database/dbmem/dbmem.go +++ b/coderd/database/dbmem/dbmem.go @@ -195,6 +195,7 @@ type data struct { workspaces []database.Workspace workspaceProxies []database.WorkspaceProxy customRoles []database.CustomRole + provisionerJobTimings []database.ProvisionerJobTiming runtimeConfig map[string]string // Locks is a map of lock names. Any keys within the map are currently // locked. @@ -3284,6 +3285,26 @@ func (q *FakeQuerier) GetProvisionerJobByID(ctx context.Context, id uuid.UUID) ( return q.getProvisionerJobByIDNoLock(ctx, id) } +func (q *FakeQuerier) GetProvisionerJobTimingsByJobID(_ context.Context, jobID uuid.UUID) ([]database.ProvisionerJobTiming, error) { + q.mutex.RLock() + defer q.mutex.RUnlock() + + timings := make([]database.ProvisionerJobTiming, 0) + for _, timing := range q.provisionerJobTimings { + if timing.JobID == jobID { + timings = append(timings, timing) + } + } + if len(timings) == 0 { + return nil, sql.ErrNoRows + } + sort.Slice(timings, func(i, j int) bool { + return timings[i].StartedAt.Before(timings[j].StartedAt) + }) + + return timings, nil +} + func (q *FakeQuerier) GetProvisionerJobsByIDs(_ context.Context, ids []uuid.UUID) ([]database.ProvisionerJob, error) { q.mutex.RLock() defer q.mutex.RUnlock() @@ -6777,13 +6798,31 @@ func (q *FakeQuerier) InsertProvisionerJobLogs(_ context.Context, arg database.I return logs, nil } -func (*FakeQuerier) InsertProvisionerJobTimings(_ context.Context, arg database.InsertProvisionerJobTimingsParams) ([]database.ProvisionerJobTiming, error) { +func (q *FakeQuerier) InsertProvisionerJobTimings(_ context.Context, arg database.InsertProvisionerJobTimingsParams) ([]database.ProvisionerJobTiming, error) { err := validateDatabaseType(arg) if err != nil { return nil, err } - return nil, nil + q.mutex.Lock() + defer q.mutex.Unlock() + + insertedTimings := make([]database.ProvisionerJobTiming, 0, len(arg.StartedAt)) + for i := range arg.StartedAt { + timing := database.ProvisionerJobTiming{ + JobID: arg.JobID, + StartedAt: arg.StartedAt[i], + EndedAt: arg.EndedAt[i], + Stage: arg.Stage[i], + Source: arg.Source[i], + Action: arg.Action[i], + Resource: arg.Resource[i], + } + q.provisionerJobTimings = append(q.provisionerJobTimings, timing) + insertedTimings = append(insertedTimings, timing) + } + + return insertedTimings, nil } func (q *FakeQuerier) InsertProvisionerKey(_ context.Context, arg database.InsertProvisionerKeyParams) (database.ProvisionerKey, error) { diff --git a/coderd/database/dbmetrics/dbmetrics.go b/coderd/database/dbmetrics/dbmetrics.go index 0ec70c1736d43..56dd9dbf0949d 100644 --- a/coderd/database/dbmetrics/dbmetrics.go +++ b/coderd/database/dbmetrics/dbmetrics.go @@ -921,6 +921,13 @@ func (m metricsStore) GetProvisionerJobByID(ctx context.Context, id uuid.UUID) ( return job, err } +func (m metricsStore) GetProvisionerJobTimingsByJobID(ctx context.Context, jobID uuid.UUID) ([]database.ProvisionerJobTiming, error) { + start := time.Now() + r0, r1 := m.s.GetProvisionerJobTimingsByJobID(ctx, jobID) + m.queryLatencies.WithLabelValues("GetProvisionerJobTimingsByJobID").Observe(time.Since(start).Seconds()) + return r0, r1 +} + func (m metricsStore) GetProvisionerJobsByIDs(ctx context.Context, ids []uuid.UUID) ([]database.ProvisionerJob, error) { start := time.Now() jobs, err := m.s.GetProvisionerJobsByIDs(ctx, ids) diff --git a/coderd/database/dbmock/dbmock.go b/coderd/database/dbmock/dbmock.go index c5d579e1c2656..634d02f7a9f41 100644 --- a/coderd/database/dbmock/dbmock.go +++ b/coderd/database/dbmock/dbmock.go @@ -1868,6 +1868,21 @@ func (mr *MockStoreMockRecorder) GetProvisionerJobByID(arg0, arg1 any) *gomock.C return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetProvisionerJobByID", reflect.TypeOf((*MockStore)(nil).GetProvisionerJobByID), arg0, arg1) } +// GetProvisionerJobTimingsByJobID mocks base method. +func (m *MockStore) GetProvisionerJobTimingsByJobID(arg0 context.Context, arg1 uuid.UUID) ([]database.ProvisionerJobTiming, error) { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "GetProvisionerJobTimingsByJobID", arg0, arg1) + ret0, _ := ret[0].([]database.ProvisionerJobTiming) + ret1, _ := ret[1].(error) + return ret0, ret1 +} + +// GetProvisionerJobTimingsByJobID indicates an expected call of GetProvisionerJobTimingsByJobID. +func (mr *MockStoreMockRecorder) GetProvisionerJobTimingsByJobID(arg0, arg1 any) *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetProvisionerJobTimingsByJobID", reflect.TypeOf((*MockStore)(nil).GetProvisionerJobTimingsByJobID), arg0, arg1) +} + // GetProvisionerJobsByIDs mocks base method. func (m *MockStore) GetProvisionerJobsByIDs(arg0 context.Context, arg1 []uuid.UUID) ([]database.ProvisionerJob, error) { m.ctrl.T.Helper() diff --git a/coderd/database/querier.go b/coderd/database/querier.go index ee9a64f12076d..b6a1eb5e155f2 100644 --- a/coderd/database/querier.go +++ b/coderd/database/querier.go @@ -189,6 +189,7 @@ type sqlcQuerier interface { GetProvisionerDaemons(ctx context.Context) ([]ProvisionerDaemon, error) GetProvisionerDaemonsByOrganization(ctx context.Context, organizationID uuid.UUID) ([]ProvisionerDaemon, error) GetProvisionerJobByID(ctx context.Context, id uuid.UUID) (ProvisionerJob, error) + GetProvisionerJobTimingsByJobID(ctx context.Context, jobID uuid.UUID) ([]ProvisionerJobTiming, error) GetProvisionerJobsByIDs(ctx context.Context, ids []uuid.UUID) ([]ProvisionerJob, error) GetProvisionerJobsByIDsWithQueuePosition(ctx context.Context, ids []uuid.UUID) ([]GetProvisionerJobsByIDsWithQueuePositionRow, error) GetProvisionerJobsCreatedAfter(ctx context.Context, createdAt time.Time) ([]ProvisionerJob, error) diff --git a/coderd/database/queries.sql.go b/coderd/database/queries.sql.go index 6831415907b67..c9ba238f29ef3 100644 --- a/coderd/database/queries.sql.go +++ b/coderd/database/queries.sql.go @@ -5412,6 +5412,43 @@ func (q *sqlQuerier) GetProvisionerJobByID(ctx context.Context, id uuid.UUID) (P return i, err } +const getProvisionerJobTimingsByJobID = `-- name: GetProvisionerJobTimingsByJobID :many +SELECT job_id, started_at, ended_at, stage, source, action, resource FROM provisioner_job_timings +WHERE job_id = $1 +ORDER BY started_at ASC +` + +func (q *sqlQuerier) GetProvisionerJobTimingsByJobID(ctx context.Context, jobID uuid.UUID) ([]ProvisionerJobTiming, error) { + rows, err := q.db.QueryContext(ctx, getProvisionerJobTimingsByJobID, jobID) + if err != nil { + return nil, err + } + defer rows.Close() + var items []ProvisionerJobTiming + for rows.Next() { + var i ProvisionerJobTiming + if err := rows.Scan( + &i.JobID, + &i.StartedAt, + &i.EndedAt, + &i.Stage, + &i.Source, + &i.Action, + &i.Resource, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + const getProvisionerJobsByIDs = `-- name: GetProvisionerJobsByIDs :many SELECT id, created_at, updated_at, started_at, canceled_at, completed_at, error, organization_id, initiator_id, provisioner, storage_method, type, input, worker_id, file_id, tags, error_code, trace_metadata, job_status diff --git a/coderd/database/queries/provisionerjobs.sql b/coderd/database/queries/provisionerjobs.sql index 687176d3c255b..95a84fcd3c824 100644 --- a/coderd/database/queries/provisionerjobs.sql +++ b/coderd/database/queries/provisionerjobs.sql @@ -156,3 +156,8 @@ SELECT unnest(@action::text[]), unnest(@resource::text[]) RETURNING *; + +-- name: GetProvisionerJobTimingsByJobID :many +SELECT * FROM provisioner_job_timings +WHERE job_id = $1 +ORDER BY started_at ASC; \ No newline at end of file diff --git a/coderd/workspaces.go b/coderd/workspaces.go index 62193b6d673f0..188ec92818c72 100644 --- a/coderd/workspaces.go +++ b/coderd/workspaces.go @@ -1740,6 +1740,55 @@ func (api *API) watchWorkspace(rw http.ResponseWriter, r *http.Request) { } } +// @Summary Get workspace timings by ID +// @ID get-workspace-timings-by-id +// @Security CoderSessionToken +// @Produce json +// @Tags Workspaces +// @Param workspace path string true "Workspace ID" format(uuid) +// @Success 200 {object} codersdk.WorkspaceTimings +// @Router /workspaces/{workspace}/timings [get] +func (api *API) workspaceTimings(rw http.ResponseWriter, r *http.Request) { + var ( + ctx = r.Context() + workspace = httpmw.WorkspaceParam(r) + ) + + build, err := api.Database.GetLatestWorkspaceBuildByWorkspaceID(ctx, workspace.ID) + if err != nil { + httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ + Message: "Internal error fetching workspace build.", + Detail: err.Error(), + }) + return + } + + provisionerTimings, err := api.Database.GetProvisionerJobTimingsByJobID(ctx, build.JobID) + if err != nil && !errors.Is(err, sql.ErrNoRows) { + httpapi.Write(ctx, rw, http.StatusInternalServerError, codersdk.Response{ + Message: "Internal error fetching workspace timings.", + Detail: err.Error(), + }) + return + } + + res := codersdk.WorkspaceTimings{ + ProvisionerTimings: make([]codersdk.ProvisionerTiming, 0, len(provisionerTimings)), + } + for _, t := range provisionerTimings { + res.ProvisionerTimings = append(res.ProvisionerTimings, codersdk.ProvisionerTiming{ + JobID: t.JobID, + Stage: string(t.Stage), + Source: t.Source, + Action: t.Action, + Resource: t.Resource, + StartedAt: t.StartedAt, + EndedAt: t.EndedAt, + }) + } + httpapi.Write(ctx, rw, http.StatusOK, res) +} + type workspaceData struct { templates []database.Template builds []codersdk.WorkspaceBuild diff --git a/coderd/workspaces_test.go b/coderd/workspaces_test.go index 98f36c3b9a13e..4f5064de48cbe 100644 --- a/coderd/workspaces_test.go +++ b/coderd/workspaces_test.go @@ -3556,3 +3556,169 @@ func TestWorkspaceNotifications(t *testing.T) { }) }) } + +func TestWorkspaceTimings(t *testing.T) { + t.Parallel() + + // Setup a base template for the workspaces + db, pubsub := dbtestutil.NewDB(t) + client := coderdtest.New(t, &coderdtest.Options{ + Database: db, + Pubsub: pubsub, + }) + owner := coderdtest.CreateFirstUser(t, client) + file := dbgen.File(t, db, database.File{ + CreatedBy: owner.UserID, + }) + versionJob := dbgen.ProvisionerJob(t, db, pubsub, database.ProvisionerJob{ + OrganizationID: owner.OrganizationID, + InitiatorID: owner.UserID, + WorkerID: uuid.NullUUID{}, + FileID: file.ID, + Tags: database.StringMap{ + "custom": "true", + }, + }) + version := dbgen.TemplateVersion(t, db, database.TemplateVersion{ + OrganizationID: owner.OrganizationID, + JobID: versionJob.ID, + CreatedBy: owner.UserID, + }) + template := dbgen.Template(t, db, database.Template{ + OrganizationID: owner.OrganizationID, + ActiveVersionID: version.ID, + CreatedBy: owner.UserID, + }) + + // Since the tests run in parallel, we need to create a new workspace for + // each test to avoid fetching the wrong latest build. + type workspaceWithBuild struct { + database.Workspace + build database.WorkspaceBuild + } + makeWorkspace := func() workspaceWithBuild { + ws := dbgen.Workspace(t, db, database.Workspace{ + OwnerID: owner.UserID, + OrganizationID: owner.OrganizationID, + TemplateID: template.ID, + // Generate unique name for the workspace + Name: "test-workspace-" + uuid.New().String(), + }) + jobID := uuid.New() + job := dbgen.ProvisionerJob(t, db, pubsub, database.ProvisionerJob{ + ID: jobID, + OrganizationID: owner.OrganizationID, + Type: database.ProvisionerJobTypeWorkspaceBuild, + Tags: database.StringMap{jobID.String(): "true"}, + }) + build := dbgen.WorkspaceBuild(t, db, database.WorkspaceBuild{ + WorkspaceID: ws.ID, + TemplateVersionID: version.ID, + BuildNumber: 1, + Transition: database.WorkspaceTransitionStart, + InitiatorID: owner.UserID, + JobID: job.ID, + }) + return workspaceWithBuild{ + Workspace: ws, + build: build, + } + } + + makeProvisionerTimings := func(jobID uuid.UUID, count int) []database.ProvisionerJobTiming { + // Use the database.ProvisionerJobTiming struct to mock timings data instead + // of directly creating database.InsertProvisionerJobTimingsParams. This + // approach makes the mock data easier to understand, as + // database.InsertProvisionerJobTimingsParams requires slices of each field + // for batch inserts. + timings := make([]database.ProvisionerJobTiming, count) + now := time.Now() + for i := range count { + startedAt := now.Add(-time.Hour + time.Duration(i)*time.Minute) + endedAt := startedAt.Add(time.Minute) + timings[i] = database.ProvisionerJobTiming{ + StartedAt: startedAt, + EndedAt: endedAt, + Stage: database.ProvisionerJobTimingStageInit, + Action: string(database.AuditActionCreate), + Source: "source", + Resource: fmt.Sprintf("resource[%d]", i), + } + } + insertParams := database.InsertProvisionerJobTimingsParams{ + JobID: jobID, + } + for _, timing := range timings { + insertParams.StartedAt = append(insertParams.StartedAt, timing.StartedAt) + insertParams.EndedAt = append(insertParams.EndedAt, timing.EndedAt) + insertParams.Stage = append(insertParams.Stage, timing.Stage) + insertParams.Action = append(insertParams.Action, timing.Action) + insertParams.Source = append(insertParams.Source, timing.Source) + insertParams.Resource = append(insertParams.Resource, timing.Resource) + } + return dbgen.ProvisionerJobTimings(t, db, insertParams) + } + + // Given + testCases := []struct { + name string + provisionerTimings int + workspace workspaceWithBuild + error bool + }{ + { + name: "workspace with 5 provisioner timings", + provisionerTimings: 5, + workspace: makeWorkspace(), + }, + { + name: "workspace with 2 provisioner timings", + provisionerTimings: 2, + workspace: makeWorkspace(), + }, + { + name: "workspace with 0 provisioner timings", + provisionerTimings: 0, + workspace: makeWorkspace(), + }, + { + name: "workspace not found", + provisionerTimings: 0, + workspace: workspaceWithBuild{}, + error: true, + }, + } + + for _, tc := range testCases { + tc := tc + t.Run(tc.name, func(t *testing.T) { + t.Parallel() + + // Generate timings based on test config + generatedTimings := makeProvisionerTimings(tc.workspace.build.JobID, tc.provisionerTimings) + res, err := client.WorkspaceTimings(context.Background(), tc.workspace.ID) + + // When error is expected, than an error is returned + if tc.error { + require.Error(t, err) + return + } + + // When success is expected, than no error is returned and the length and + // fields are correctly returned + require.NoError(t, err) + require.Len(t, res.ProvisionerTimings, tc.provisionerTimings) + for i := range res.ProvisionerTimings { + timingRes := res.ProvisionerTimings[i] + genTiming := generatedTimings[i] + require.Equal(t, genTiming.Resource, timingRes.Resource) + require.Equal(t, genTiming.Action, timingRes.Action) + require.Equal(t, string(genTiming.Stage), timingRes.Stage) + require.Equal(t, genTiming.JobID.String(), timingRes.JobID.String()) + require.Equal(t, genTiming.Source, timingRes.Source) + require.Equal(t, genTiming.StartedAt.UnixMilli(), timingRes.StartedAt.UnixMilli()) + require.Equal(t, genTiming.EndedAt.UnixMilli(), timingRes.EndedAt.UnixMilli()) + } + }) + } +} diff --git a/codersdk/client.go b/codersdk/client.go index cf013a25c3ce8..d267355d37096 100644 --- a/codersdk/client.go +++ b/codersdk/client.go @@ -192,6 +192,9 @@ func prefixLines(prefix, s []byte) []byte { // Request performs a HTTP request with the body provided. The caller is // responsible for closing the response body. func (c *Client) Request(ctx context.Context, method, path string, body interface{}, opts ...RequestOption) (*http.Response, error) { + if ctx == nil { + return nil, xerrors.Errorf("context should not be nil") + } ctx, span := tracing.StartSpanWithName(ctx, tracing.FuncNameSkip(1)) defer span.End() diff --git a/codersdk/workspaces.go b/codersdk/workspaces.go index 4e4b98fe8c243..658af09cdda61 100644 --- a/codersdk/workspaces.go +++ b/codersdk/workspaces.go @@ -626,6 +626,35 @@ func (c *Client) UnfavoriteWorkspace(ctx context.Context, workspaceID uuid.UUID) return nil } +type ProvisionerTiming struct { + JobID uuid.UUID `json:"job_id" format:"uuid"` + StartedAt time.Time `json:"started_at" format:"date-time"` + EndedAt time.Time `json:"ended_at" format:"date-time"` + Stage string `json:"stage"` + Source string `json:"source"` + Action string `json:"action"` + Resource string `json:"resource"` +} + +type WorkspaceTimings struct { + ProvisionerTimings []ProvisionerTiming `json:"provisioner_timings"` + // TODO: Add AgentScriptTimings when it is done https://github.com/coder/coder/issues/14630 +} + +func (c *Client) WorkspaceTimings(ctx context.Context, id uuid.UUID) (WorkspaceTimings, error) { + path := fmt.Sprintf("/api/v2/workspaces/%s/timings", id.String()) + res, err := c.Request(ctx, http.MethodGet, path, nil) + if err != nil { + return WorkspaceTimings{}, err + } + defer res.Body.Close() + if res.StatusCode != http.StatusOK { + return WorkspaceTimings{}, ReadBodyAsError(res) + } + var timings WorkspaceTimings + return timings, json.NewDecoder(res.Body).Decode(&timings) +} + // WorkspaceNotifyChannel is the PostgreSQL NOTIFY // channel to listen for updates on. The payload is empty, // because the size of a workspace payload can be very large. diff --git a/docs/reference/api/schemas.md b/docs/reference/api/schemas.md index c021e18953f60..ca57ad4f60a35 100644 --- a/docs/reference/api/schemas.md +++ b/docs/reference/api/schemas.md @@ -4177,6 +4177,32 @@ CreateWorkspaceRequest provides options for creating a new workspace. Only one o | ------ | | `file` | +## codersdk.ProvisionerTiming + +```json +{ + "action": "string", + "ended_at": "2019-08-24T14:15:22Z", + "job_id": "453bd7d7-5355-4d6d-a38e-d9e7eb218c3f", + "resource": "string", + "source": "string", + "stage": "string", + "started_at": "2019-08-24T14:15:22Z" +} +``` + +### Properties + +| Name | Type | Required | Restrictions | Description | +| ------------ | ------ | -------- | ------------ | ----------- | +| `action` | string | false | | | +| `ended_at` | string | false | | | +| `job_id` | string | false | | | +| `resource` | string | false | | | +| `source` | string | false | | | +| `stage` | string | false | | | +| `started_at` | string | false | | | + ## codersdk.ProxyHealthReport ```json @@ -7454,6 +7480,30 @@ If the schedule is empty, the user will be updated to use the default schedule.| | `deleting` | | `deleted` | +## codersdk.WorkspaceTimings + +```json +{ + "provisioner_timings": [ + { + "action": "string", + "ended_at": "2019-08-24T14:15:22Z", + "job_id": "453bd7d7-5355-4d6d-a38e-d9e7eb218c3f", + "resource": "string", + "source": "string", + "stage": "string", + "started_at": "2019-08-24T14:15:22Z" + } + ] +} +``` + +### Properties + +| Name | Type | Required | Restrictions | Description | +| --------------------- | ----------------------------------------------------------------- | -------- | ------------ | ----------- | +| `provisioner_timings` | array of [codersdk.ProvisionerTiming](#codersdkprovisionertiming) | false | | | + ## codersdk.WorkspaceTransition ```json diff --git a/docs/reference/api/workspaces.md b/docs/reference/api/workspaces.md index 6b7d2dd985de3..92ce677e6ece9 100644 --- a/docs/reference/api/workspaces.md +++ b/docs/reference/api/workspaces.md @@ -1604,6 +1604,53 @@ curl -X GET http://coder-server:8080/api/v2/workspaces/{workspace}/resolve-autos To perform this operation, you must be authenticated. [Learn more](authentication.md). +## Get workspace timings by ID + +### Code samples + +```shell +# Example request using curl +curl -X GET http://coder-server:8080/api/v2/workspaces/{workspace}/timings \ + -H 'Accept: application/json' \ + -H 'Coder-Session-Token: API_KEY' +``` + +`GET /workspaces/{workspace}/timings` + +### Parameters + +| Name | In | Type | Required | Description | +| ----------- | ---- | ------------ | -------- | ------------ | +| `workspace` | path | string(uuid) | true | Workspace ID | + +### Example responses + +> 200 Response + +```json +{ + "provisioner_timings": [ + { + "action": "string", + "ended_at": "2019-08-24T14:15:22Z", + "job_id": "453bd7d7-5355-4d6d-a38e-d9e7eb218c3f", + "resource": "string", + "source": "string", + "stage": "string", + "started_at": "2019-08-24T14:15:22Z" + } + ] +} +``` + +### Responses + +| Status | Meaning | Description | Schema | +| ------ | ------------------------------------------------------- | ----------- | ---------------------------------------------------------------- | +| 200 | [OK](https://tools.ietf.org/html/rfc7231#section-6.3.1) | OK | [codersdk.WorkspaceTimings](schemas.md#codersdkworkspacetimings) | + +To perform this operation, you must be authenticated. [Learn more](authentication.md). + ## Update workspace TTL by ID ### Code samples diff --git a/site/src/api/typesGenerated.ts b/site/src/api/typesGenerated.ts index 64bdb2d262852..9054876e55741 100644 --- a/site/src/api/typesGenerated.ts +++ b/site/src/api/typesGenerated.ts @@ -1050,6 +1050,17 @@ export interface ProvisionerKey { readonly tags: Record; } +// From codersdk/workspaces.go +export interface ProvisionerTiming { + readonly job_id: string; + readonly started_at: string; + readonly ended_at: string; + readonly stage: string; + readonly source: string; + readonly action: string; + readonly resource: string; +} + // From codersdk/workspaceproxy.go export interface ProxyHealthReport { readonly errors: Readonly>; @@ -1999,6 +2010,11 @@ export interface WorkspaceResourceMetadata { readonly sensitive: boolean; } +// From codersdk/workspaces.go +export interface WorkspaceTimings { + readonly provisioner_timings: Readonly>; +} + // From codersdk/workspaces.go export interface WorkspacesRequest extends Pagination { readonly q?: string;