From 48527f7de48140ec08fe528d4e8c047019f87618 Mon Sep 17 00:00:00 2001 From: Kyle Carberry Date: Sat, 29 Jan 2022 15:24:27 +0000 Subject: [PATCH 01/17] feat: Add parameter and jobs database schema This modifies a prior migration which is typically forbidden, but because we're pre-production deployment I felt grouping would be helpful to future contributors. This adds database functions that are required for the provisioner daemon and job queue logic. --- database/databasefake/databasefake.go | 215 +++++++- database/dump.sql | 82 +++- database/migrations/000002_projects.up.sql | 12 +- database/migrations/000004_jobs.down.sql | 0 database/migrations/000004_jobs.up.sql | 53 ++ database/migrations/create_migration.sh | 2 +- database/models.go | 146 +++++- database/querier.go | 13 + database/query.sql | 155 +++++- database/query.sql.go | 545 ++++++++++++++++++++- 10 files changed, 1161 insertions(+), 62 deletions(-) create mode 100644 database/migrations/000004_jobs.down.sql create mode 100644 database/migrations/000004_jobs.up.sql diff --git a/database/databasefake/databasefake.go b/database/databasefake/databasefake.go index b7cd6de3c57e8..f0e97101321f5 100644 --- a/database/databasefake/databasefake.go +++ b/database/databasefake/databasefake.go @@ -18,13 +18,16 @@ func New() database.Store { organizationMembers: make([]database.OrganizationMember, 0), users: make([]database.User, 0), - project: make([]database.Project, 0), - projectHistory: make([]database.ProjectHistory, 0), - projectParameter: make([]database.ProjectParameter, 0), - workspace: make([]database.Workspace, 0), - workspaceResource: make([]database.WorkspaceResource, 0), - workspaceHistory: make([]database.WorkspaceHistory, 0), - workspaceAgent: make([]database.WorkspaceAgent, 0), + parameterValue: make([]database.ParameterValue, 0), + project: make([]database.Project, 0), + projectHistory: make([]database.ProjectHistory, 0), + projectParameter: make([]database.ProjectParameter, 0), + provisionerDaemons: make([]database.ProvisionerDaemon, 0), + provisionerJobs: make([]database.ProvisionerJob, 0), + workspace: make([]database.Workspace, 0), + workspaceResource: make([]database.WorkspaceResource, 0), + workspaceHistory: make([]database.WorkspaceHistory, 0), + workspaceAgent: make([]database.WorkspaceAgent, 0), } } @@ -37,13 +40,16 @@ type fakeQuerier struct { users []database.User // New tables - project []database.Project - projectHistory []database.ProjectHistory - projectParameter []database.ProjectParameter - workspace []database.Workspace - workspaceResource []database.WorkspaceResource - workspaceHistory []database.WorkspaceHistory - workspaceAgent []database.WorkspaceAgent + parameterValue []database.ParameterValue + project []database.Project + projectHistory []database.ProjectHistory + projectParameter []database.ProjectParameter + provisionerDaemons []database.ProvisionerDaemon + provisionerJobs []database.ProvisionerJob + workspace []database.Workspace + workspaceResource []database.WorkspaceResource + workspaceHistory []database.WorkspaceHistory + workspaceAgent []database.WorkspaceAgent } // InTx doesn't rollback data properly for in-memory yet. @@ -51,6 +57,31 @@ func (q *fakeQuerier) InTx(fn func(database.Store) error) error { return fn(q) } +func (q *fakeQuerier) AcquireProvisionerJob(_ context.Context, arg database.AcquireProvisionerJobParams) (database.ProvisionerJob, error) { + for index, provisionerJob := range q.provisionerJobs { + if provisionerJob.StartedAt.Valid { + continue + } + found := false + for _, provisionerType := range arg.Types { + if provisionerJob.Provisioner != provisionerType { + continue + } + found = true + break + } + if !found { + continue + } + provisionerJob.StartedAt = arg.StartedAt + provisionerJob.UpdatedAt = arg.StartedAt.Time + provisionerJob.WorkerID = arg.WorkerID + q.provisionerJobs[index] = provisionerJob + return provisionerJob, nil + } + return database.ProvisionerJob{}, sql.ErrNoRows +} + func (q *fakeQuerier) GetAPIKeyByID(_ context.Context, id string) (database.APIKey, error) { for _, apiKey := range q.apiKeys { if apiKey.ID == id { @@ -97,6 +128,15 @@ func (q *fakeQuerier) GetWorkspaceAgentsByResourceIDs(_ context.Context, ids []u return agents, nil } +func (q *fakeQuerier) GetWorkspaceByID(_ context.Context, id uuid.UUID) (database.Workspace, error) { + for _, workspace := range q.workspace { + if workspace.ID.String() == id.String() { + return workspace, nil + } + } + return database.Workspace{}, sql.ErrNoRows +} + func (q *fakeQuerier) GetWorkspaceByUserIDAndName(_ context.Context, arg database.GetWorkspaceByUserIDAndNameParams) (database.Workspace, error) { for _, workspace := range q.workspace { if workspace.OwnerID != arg.OwnerID { @@ -123,6 +163,15 @@ func (q *fakeQuerier) GetWorkspaceResourcesByHistoryID(_ context.Context, worksp return resources, nil } +func (q *fakeQuerier) GetWorkspaceHistoryByID(_ context.Context, id uuid.UUID) (database.WorkspaceHistory, error) { + for _, history := range q.workspaceHistory { + if history.ID.String() == id.String() { + return history, nil + } + } + return database.WorkspaceHistory{}, sql.ErrNoRows +} + func (q *fakeQuerier) GetWorkspaceHistoryByWorkspaceIDWithoutAfter(_ context.Context, workspaceID uuid.UUID) (database.WorkspaceHistory, error) { for _, workspaceHistory := range q.workspaceHistory { if workspaceHistory.WorkspaceID.String() != workspaceID.String() { @@ -179,6 +228,15 @@ func (q *fakeQuerier) GetWorkspacesByUserID(_ context.Context, ownerID string) ( return workspaces, nil } +func (q *fakeQuerier) GetOrganizationByID(_ context.Context, id string) (database.Organization, error) { + for _, organization := range q.organizations { + if organization.ID == id { + return organization, nil + } + } + return database.Organization{}, sql.ErrNoRows +} + func (q *fakeQuerier) GetOrganizationByName(_ context.Context, name string) (database.Organization, error) { for _, organization := range q.organizations { if organization.Name == name { @@ -207,6 +265,23 @@ func (q *fakeQuerier) GetOrganizationsByUserID(_ context.Context, userID string) return organizations, nil } +func (q *fakeQuerier) GetParameterValuesByScope(_ context.Context, arg database.GetParameterValuesByScopeParams) ([]database.ParameterValue, error) { + parameterValues := make([]database.ParameterValue, 0) + for _, parameterValue := range q.parameterValue { + if parameterValue.Scope != arg.Scope { + continue + } + if parameterValue.ScopeID != arg.ScopeID { + continue + } + parameterValues = append(parameterValues, parameterValue) + } + if len(parameterValues) == 0 { + return nil, sql.ErrNoRows + } + return parameterValues, nil +} + func (q *fakeQuerier) GetProjectByID(_ context.Context, id uuid.UUID) (database.Project, error) { for _, project := range q.project { if project.ID.String() == id.String() { @@ -253,6 +328,20 @@ func (q *fakeQuerier) GetProjectHistoryByID(_ context.Context, projectHistoryID return database.ProjectHistory{}, sql.ErrNoRows } +func (q *fakeQuerier) GetProjectParametersByHistoryID(_ context.Context, projectHistoryID uuid.UUID) ([]database.ProjectParameter, error) { + parameters := make([]database.ProjectParameter, 0) + for _, projectParameter := range q.projectParameter { + if projectParameter.ProjectHistoryID.String() != projectHistoryID.String() { + continue + } + parameters = append(parameters, projectParameter) + } + if len(parameters) == 0 { + return nil, sql.ErrNoRows + } + return parameters, nil +} + func (q *fakeQuerier) GetProjectsByOrganizationIDs(_ context.Context, ids []string) ([]database.Project, error) { projects := make([]database.Project, 0) for _, project := range q.project { @@ -282,6 +371,26 @@ func (q *fakeQuerier) GetOrganizationMemberByUserID(_ context.Context, arg datab return database.OrganizationMember{}, sql.ErrNoRows } +func (q *fakeQuerier) GetProvisionerDaemonByID(_ context.Context, id uuid.UUID) (database.ProvisionerDaemon, error) { + for _, provisionerDaemon := range q.provisionerDaemons { + if provisionerDaemon.ID.String() != id.String() { + continue + } + return provisionerDaemon, nil + } + return database.ProvisionerDaemon{}, sql.ErrNoRows +} + +func (q *fakeQuerier) GetProvisionerJobByID(_ context.Context, id uuid.UUID) (database.ProvisionerJob, error) { + for _, provisionerJob := range q.provisionerJobs { + if provisionerJob.ID.String() != id.String() { + continue + } + return provisionerJob, nil + } + return database.ProvisionerJob{}, sql.ErrNoRows +} + func (q *fakeQuerier) InsertAPIKey(_ context.Context, arg database.InsertAPIKeyParams) (database.APIKey, error) { //nolint:gosimple key := database.APIKey{ @@ -329,6 +438,24 @@ func (q *fakeQuerier) InsertOrganizationMember(_ context.Context, arg database.I return organizationMember, nil } +func (q *fakeQuerier) InsertParameterValue(_ context.Context, arg database.InsertParameterValueParams) (database.ParameterValue, error) { + //nolint:gosimple + parameterValue := database.ParameterValue{ + ID: arg.ID, + Name: arg.Name, + CreatedAt: arg.CreatedAt, + UpdatedAt: arg.UpdatedAt, + Scope: arg.Scope, + ScopeID: arg.ScopeID, + SourceScheme: arg.SourceScheme, + SourceValue: arg.SourceValue, + DestinationScheme: arg.DestinationScheme, + DestinationValue: arg.DestinationValue, + } + q.parameterValue = append(q.parameterValue, parameterValue) + return parameterValue, nil +} + func (q *fakeQuerier) InsertProject(_ context.Context, arg database.InsertProjectParams) (database.Project, error) { project := database.Project{ ID: arg.ID, @@ -367,9 +494,11 @@ func (q *fakeQuerier) InsertProjectParameter(_ context.Context, arg database.Ins ProjectHistoryID: arg.ProjectHistoryID, Name: arg.Name, Description: arg.Description, - DefaultSource: arg.DefaultSource, + DefaultSourceScheme: arg.DefaultSourceScheme, + DefaultSourceValue: arg.DefaultSourceValue, AllowOverrideSource: arg.AllowOverrideSource, - DefaultDestination: arg.DefaultDestination, + DefaultDestinationScheme: arg.DefaultDestinationScheme, + DefaultDestinationValue: arg.DefaultDestinationValue, AllowOverrideDestination: arg.AllowOverrideDestination, DefaultRefresh: arg.DefaultRefresh, RedisplayValue: arg.RedisplayValue, @@ -382,6 +511,32 @@ func (q *fakeQuerier) InsertProjectParameter(_ context.Context, arg database.Ins return param, nil } +func (q *fakeQuerier) InsertProvisionerDaemon(_ context.Context, arg database.InsertProvisionerDaemonParams) (database.ProvisionerDaemon, error) { + daemon := database.ProvisionerDaemon{ + ID: arg.ID, + CreatedAt: arg.CreatedAt, + Name: arg.Name, + Provisioners: arg.Provisioners, + } + q.provisionerDaemons = append(q.provisionerDaemons, daemon) + return daemon, nil +} + +func (q *fakeQuerier) InsertProvisionerJob(_ context.Context, arg database.InsertProvisionerJobParams) (database.ProvisionerJob, error) { + job := database.ProvisionerJob{ + ID: arg.ID, + CreatedAt: arg.CreatedAt, + UpdatedAt: arg.UpdatedAt, + InitiatorID: arg.InitiatorID, + Provisioner: arg.Provisioner, + ProjectID: arg.ProjectID, + Type: arg.Type, + Input: arg.Input, + } + q.provisionerJobs = append(q.provisionerJobs, job) + return job, nil +} + func (q *fakeQuerier) InsertUser(_ context.Context, arg database.InsertUserParams) (database.User, error) { user := database.User{ ID: arg.ID, @@ -470,6 +625,34 @@ func (q *fakeQuerier) UpdateAPIKeyByID(_ context.Context, arg database.UpdateAPI return sql.ErrNoRows } +func (q *fakeQuerier) UpdateProvisionerDaemonByID(_ context.Context, arg database.UpdateProvisionerDaemonByIDParams) error { + for index, daemon := range q.provisionerDaemons { + if arg.ID.String() != daemon.ID.String() { + continue + } + daemon.UpdatedAt = arg.UpdatedAt + daemon.Provisioners = arg.Provisioners + q.provisionerDaemons[index] = daemon + return nil + } + return sql.ErrNoRows +} + +func (q *fakeQuerier) UpdateProvisionerJobByID(_ context.Context, arg database.UpdateProvisionerJobByIDParams) error { + for index, job := range q.provisionerJobs { + if arg.ID.String() != job.ID.String() { + continue + } + job.CompletedAt = arg.CompletedAt + job.CancelledAt = arg.CancelledAt + job.UpdatedAt = arg.UpdatedAt + job.Error = arg.Error + q.provisionerJobs[index] = job + return nil + } + return sql.ErrNoRows +} + func (q *fakeQuerier) UpdateWorkspaceHistoryByID(_ context.Context, arg database.UpdateWorkspaceHistoryByIDParams) error { for index, workspaceHistory := range q.workspaceHistory { if workspaceHistory.ID.String() != arg.ID.String() { diff --git a/database/dump.sql b/database/dump.sql index 9ba40007c5285..af4874e96db94 100644 --- a/database/dump.sql +++ b/database/dump.sql @@ -15,6 +15,22 @@ CREATE TYPE login_type AS ENUM ( 'oidc' ); +CREATE TYPE parameter_destination_scheme AS ENUM ( + 'environment_variable', + 'provisioner_variable' +); + +CREATE TYPE parameter_scope AS ENUM ( + 'organization', + 'project', + 'user', + 'workspace' +); + +CREATE TYPE parameter_source_scheme AS ENUM ( + 'data' +); + CREATE TYPE parameter_type_system AS ENUM ( 'hcl' ); @@ -23,6 +39,11 @@ CREATE TYPE project_storage_method AS ENUM ( 'inline-archive' ); +CREATE TYPE provisioner_job_type AS ENUM ( + 'project_import', + 'workspace_provision' +); + CREATE TYPE provisioner_type AS ENUM ( 'terraform', 'cdr-basic' @@ -86,6 +107,19 @@ CREATE TABLE organizations ( workspace_auto_off boolean DEFAULT false NOT NULL ); +CREATE TABLE parameter_value ( + id uuid NOT NULL, + name character varying(64) NOT NULL, + created_at timestamp with time zone NOT NULL, + updated_at timestamp with time zone NOT NULL, + scope parameter_scope NOT NULL, + scope_id text NOT NULL, + source_scheme parameter_source_scheme NOT NULL, + source_value text NOT NULL, + destination_scheme parameter_destination_scheme NOT NULL, + destination_value text NOT NULL +); + CREATE TABLE project ( id uuid NOT NULL, created_at timestamp with time zone NOT NULL, @@ -114,9 +148,11 @@ CREATE TABLE project_parameter ( project_history_id uuid NOT NULL, name character varying(64) NOT NULL, description character varying(8192) DEFAULT ''::character varying NOT NULL, - default_source text, + default_source_scheme parameter_source_scheme, + default_source_value text, allow_override_source boolean NOT NULL, - default_destination text, + default_destination_scheme parameter_destination_scheme, + default_destination_value text, allow_override_destination boolean NOT NULL, default_refresh text NOT NULL, redisplay_value boolean NOT NULL, @@ -126,6 +162,30 @@ CREATE TABLE project_parameter ( validation_value_type character varying(64) NOT NULL ); +CREATE TABLE provisioner_daemon ( + id uuid NOT NULL, + created_at timestamp with time zone NOT NULL, + updated_at timestamp with time zone, + name character varying(64) NOT NULL, + provisioners provisioner_type[] NOT NULL +); + +CREATE TABLE provisioner_job ( + id uuid NOT NULL, + created_at timestamp with time zone NOT NULL, + updated_at timestamp with time zone NOT NULL, + started_at timestamp with time zone, + cancelled_at timestamp with time zone, + completed_at timestamp with time zone, + error text, + initiator_id text NOT NULL, + provisioner provisioner_type NOT NULL, + type provisioner_job_type NOT NULL, + project_id uuid NOT NULL, + input jsonb NOT NULL, + worker_id uuid +); + CREATE TABLE users ( id text NOT NULL, email text NOT NULL, @@ -200,6 +260,12 @@ CREATE TABLE workspace_resource ( workspace_agent_id uuid ); +ALTER TABLE ONLY parameter_value + ADD CONSTRAINT parameter_value_id_key UNIQUE (id); + +ALTER TABLE ONLY parameter_value + ADD CONSTRAINT parameter_value_name_scope_scope_id_key UNIQUE (name, scope, scope_id); + ALTER TABLE ONLY project_history ADD CONSTRAINT project_history_id_key UNIQUE (id); @@ -218,6 +284,15 @@ ALTER TABLE ONLY project_parameter ALTER TABLE ONLY project_parameter ADD CONSTRAINT project_parameter_project_history_id_name_key UNIQUE (project_history_id, name); +ALTER TABLE ONLY provisioner_daemon + ADD CONSTRAINT provisioner_daemon_id_key UNIQUE (id); + +ALTER TABLE ONLY provisioner_daemon + ADD CONSTRAINT provisioner_daemon_name_key UNIQUE (name); + +ALTER TABLE ONLY provisioner_job + ADD CONSTRAINT provisioner_job_id_key UNIQUE (id); + ALTER TABLE ONLY workspace_agent ADD CONSTRAINT workspace_agent_id_key UNIQUE (id); @@ -244,6 +319,9 @@ ALTER TABLE ONLY project_history ALTER TABLE ONLY project_parameter ADD CONSTRAINT project_parameter_project_history_id_fkey FOREIGN KEY (project_history_id) REFERENCES project_history(id) ON DELETE CASCADE; +ALTER TABLE ONLY provisioner_job + ADD CONSTRAINT provisioner_job_project_id_fkey FOREIGN KEY (project_id) REFERENCES project(id) ON DELETE CASCADE; + ALTER TABLE ONLY workspace_agent ADD CONSTRAINT workspace_agent_workspace_resource_id_fkey FOREIGN KEY (workspace_resource_id) REFERENCES workspace_resource(id) ON DELETE CASCADE; diff --git a/database/migrations/000002_projects.up.sql b/database/migrations/000002_projects.up.sql index 3483dcd9ff858..251b368ef3701 100644 --- a/database/migrations/000002_projects.up.sql +++ b/database/migrations/000002_projects.up.sql @@ -48,6 +48,12 @@ CREATE TABLE project_history ( -- Types of parameters the automator supports. CREATE TYPE parameter_type_system AS ENUM ('hcl'); +-- Supported schemes for a parameter source. +CREATE TYPE parameter_source_scheme AS ENUM('data'); + +-- Supported schemes for a parameter destination. +CREATE TYPE parameter_destination_scheme AS ENUM('environment_variable', 'provisioner_variable'); + -- Stores project version parameters parsed on import. -- No secrets are stored here. -- @@ -65,11 +71,13 @@ CREATE TABLE project_parameter ( -- 8KB limit description varchar(8192) NOT NULL DEFAULT '', -- eg. data://inlinevalue - default_source text, + default_source_scheme parameter_source_scheme, + default_source_value text, -- Allows the user to override the source. allow_override_source boolean NOT null, -- eg. env://SOME_VARIABLE, tfvars://example - default_destination text, + default_destination_scheme parameter_destination_scheme, + default_destination_value text, -- Allows the user to override the destination. allow_override_destination boolean NOT null, default_refresh text NOT NULL, diff --git a/database/migrations/000004_jobs.down.sql b/database/migrations/000004_jobs.down.sql new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/database/migrations/000004_jobs.up.sql b/database/migrations/000004_jobs.up.sql new file mode 100644 index 0000000000000..7f767e7dfd983 --- /dev/null +++ b/database/migrations/000004_jobs.up.sql @@ -0,0 +1,53 @@ +CREATE TABLE IF NOT EXISTS provisioner_daemon ( + id uuid NOT NULL UNIQUE, + created_at timestamptz NOT NULL, + updated_at timestamptz, + -- Name is generated for ease of differentiation. + -- eg. WowBananas16 + name varchar(64) NOT NULL UNIQUE, + provisioners provisioner_type [ ] NOT NULL +); + +CREATE TYPE provisioner_job_type AS ENUM ( + 'project_import', + 'workspace_provision' +); + +CREATE TABLE IF NOT EXISTS provisioner_job ( + id uuid NOT NULL UNIQUE, + created_at timestamptz NOT NULL, + updated_at timestamptz NOT NULL, + started_at timestamptz, + cancelled_at timestamptz, + completed_at timestamptz, + error text, + initiator_id text NOT NULL, + provisioner provisioner_type NOT NULL, + type provisioner_job_type NOT NULL, + project_id uuid NOT NULL REFERENCES project(id) ON DELETE CASCADE, + input jsonb NOT NULL, + worker_id uuid +); + +CREATE TYPE parameter_scope AS ENUM ( + 'organization', + 'project', + 'user', + 'workspace' +); + +-- Parameters are provided to jobs for provisioning and to workspaces. +CREATE TABLE parameter_value ( + id uuid NOT NULL UNIQUE, + name varchar(64) NOT NULL, + created_at timestamptz NOT NULL, + updated_at timestamptz NOT NULL, + scope parameter_scope NOT NULL, + scope_id text NOT NULL, + source_scheme parameter_source_scheme NOT NULL, + source_value text NOT NULL, + destination_scheme parameter_destination_scheme NOT NULL, + destination_value text NOT NULL, + -- Prevents duplicates for parameters in the same scope. + UNIQUE(name, scope, scope_id) +); \ No newline at end of file diff --git a/database/migrations/create_migration.sh b/database/migrations/create_migration.sh index 68c17eb3a62a1..d063ea1eec562 100755 --- a/database/migrations/create_migration.sh +++ b/database/migrations/create_migration.sh @@ -8,4 +8,4 @@ fi migrate create -ext sql -dir . -seq $1 -echo "After making adjustments, run \"make database/generate\" to generate models." +echo "Run \"make gen\" to generate models." diff --git a/database/models.go b/database/models.go index 58f0e05f28b9f..3b9fdfcd83668 100644 --- a/database/models.go +++ b/database/models.go @@ -54,6 +54,64 @@ func (e *LoginType) Scan(src interface{}) error { return nil } +type ParameterDestinationScheme string + +const ( + ParameterDestinationSchemeEnvironmentVariable ParameterDestinationScheme = "environment_variable" + ParameterDestinationSchemeProvisionerVariable ParameterDestinationScheme = "provisioner_variable" +) + +func (e *ParameterDestinationScheme) Scan(src interface{}) error { + switch s := src.(type) { + case []byte: + *e = ParameterDestinationScheme(s) + case string: + *e = ParameterDestinationScheme(s) + default: + return fmt.Errorf("unsupported scan type for ParameterDestinationScheme: %T", src) + } + return nil +} + +type ParameterScope string + +const ( + ParameterScopeOrganization ParameterScope = "organization" + ParameterScopeProject ParameterScope = "project" + ParameterScopeUser ParameterScope = "user" + ParameterScopeWorkspace ParameterScope = "workspace" +) + +func (e *ParameterScope) Scan(src interface{}) error { + switch s := src.(type) { + case []byte: + *e = ParameterScope(s) + case string: + *e = ParameterScope(s) + default: + return fmt.Errorf("unsupported scan type for ParameterScope: %T", src) + } + return nil +} + +type ParameterSourceScheme string + +const ( + ParameterSourceSchemeData ParameterSourceScheme = "data" +) + +func (e *ParameterSourceScheme) Scan(src interface{}) error { + switch s := src.(type) { + case []byte: + *e = ParameterSourceScheme(s) + case string: + *e = ParameterSourceScheme(s) + default: + return fmt.Errorf("unsupported scan type for ParameterSourceScheme: %T", src) + } + return nil +} + type ParameterTypeSystem string const ( @@ -90,6 +148,25 @@ func (e *ProjectStorageMethod) Scan(src interface{}) error { return nil } +type ProvisionerJobType string + +const ( + ProvisionerJobTypeProjectImport ProvisionerJobType = "project_import" + ProvisionerJobTypeWorkspaceProvision ProvisionerJobType = "workspace_provision" +) + +func (e *ProvisionerJobType) Scan(src interface{}) error { + switch s := src.(type) { + case []byte: + *e = ProvisionerJobType(s) + case string: + *e = ProvisionerJobType(s) + default: + return fmt.Errorf("unsupported scan type for ProvisionerJobType: %T", src) + } + return nil +} + type ProvisionerType string const ( @@ -195,6 +272,19 @@ type OrganizationMember struct { Roles []string `db:"roles" json:"roles"` } +type ParameterValue struct { + ID uuid.UUID `db:"id" json:"id"` + Name string `db:"name" json:"name"` + CreatedAt time.Time `db:"created_at" json:"created_at"` + UpdatedAt time.Time `db:"updated_at" json:"updated_at"` + Scope ParameterScope `db:"scope" json:"scope"` + ScopeID string `db:"scope_id" json:"scope_id"` + SourceScheme ParameterSourceScheme `db:"source_scheme" json:"source_scheme"` + SourceValue string `db:"source_value" json:"source_value"` + DestinationScheme ParameterDestinationScheme `db:"destination_scheme" json:"destination_scheme"` + DestinationValue string `db:"destination_value" json:"destination_value"` +} + type Project struct { ID uuid.UUID `db:"id" json:"id"` CreatedAt time.Time `db:"created_at" json:"created_at"` @@ -218,21 +308,47 @@ type ProjectHistory struct { } type ProjectParameter struct { - ID uuid.UUID `db:"id" json:"id"` - CreatedAt time.Time `db:"created_at" json:"created_at"` - ProjectHistoryID uuid.UUID `db:"project_history_id" json:"project_history_id"` - Name string `db:"name" json:"name"` - Description string `db:"description" json:"description"` - DefaultSource sql.NullString `db:"default_source" json:"default_source"` - AllowOverrideSource bool `db:"allow_override_source" json:"allow_override_source"` - DefaultDestination sql.NullString `db:"default_destination" json:"default_destination"` - AllowOverrideDestination bool `db:"allow_override_destination" json:"allow_override_destination"` - DefaultRefresh string `db:"default_refresh" json:"default_refresh"` - RedisplayValue bool `db:"redisplay_value" json:"redisplay_value"` - ValidationError string `db:"validation_error" json:"validation_error"` - ValidationCondition string `db:"validation_condition" json:"validation_condition"` - ValidationTypeSystem ParameterTypeSystem `db:"validation_type_system" json:"validation_type_system"` - ValidationValueType string `db:"validation_value_type" json:"validation_value_type"` + ID uuid.UUID `db:"id" json:"id"` + CreatedAt time.Time `db:"created_at" json:"created_at"` + ProjectHistoryID uuid.UUID `db:"project_history_id" json:"project_history_id"` + Name string `db:"name" json:"name"` + Description string `db:"description" json:"description"` + DefaultSourceScheme ParameterSourceScheme `db:"default_source_scheme" json:"default_source_scheme"` + DefaultSourceValue sql.NullString `db:"default_source_value" json:"default_source_value"` + AllowOverrideSource bool `db:"allow_override_source" json:"allow_override_source"` + DefaultDestinationScheme ParameterDestinationScheme `db:"default_destination_scheme" json:"default_destination_scheme"` + DefaultDestinationValue sql.NullString `db:"default_destination_value" json:"default_destination_value"` + AllowOverrideDestination bool `db:"allow_override_destination" json:"allow_override_destination"` + DefaultRefresh string `db:"default_refresh" json:"default_refresh"` + RedisplayValue bool `db:"redisplay_value" json:"redisplay_value"` + ValidationError string `db:"validation_error" json:"validation_error"` + ValidationCondition string `db:"validation_condition" json:"validation_condition"` + ValidationTypeSystem ParameterTypeSystem `db:"validation_type_system" json:"validation_type_system"` + ValidationValueType string `db:"validation_value_type" json:"validation_value_type"` +} + +type ProvisionerDaemon struct { + ID uuid.UUID `db:"id" json:"id"` + CreatedAt time.Time `db:"created_at" json:"created_at"` + UpdatedAt sql.NullTime `db:"updated_at" json:"updated_at"` + Name string `db:"name" json:"name"` + Provisioners []ProvisionerType `db:"provisioners" json:"provisioners"` +} + +type ProvisionerJob struct { + ID uuid.UUID `db:"id" json:"id"` + CreatedAt time.Time `db:"created_at" json:"created_at"` + UpdatedAt time.Time `db:"updated_at" json:"updated_at"` + StartedAt sql.NullTime `db:"started_at" json:"started_at"` + CancelledAt sql.NullTime `db:"cancelled_at" json:"cancelled_at"` + CompletedAt sql.NullTime `db:"completed_at" json:"completed_at"` + Error sql.NullString `db:"error" json:"error"` + InitiatorID string `db:"initiator_id" json:"initiator_id"` + Provisioner ProvisionerType `db:"provisioner" json:"provisioner"` + Type ProvisionerJobType `db:"type" json:"type"` + ProjectID uuid.UUID `db:"project_id" json:"project_id"` + Input json.RawMessage `db:"input" json:"input"` + WorkerID uuid.NullUUID `db:"worker_id" json:"worker_id"` } type User struct { diff --git a/database/querier.go b/database/querier.go index 64b26cbdaf4da..1c908c186c544 100644 --- a/database/querier.go +++ b/database/querier.go @@ -9,20 +9,28 @@ import ( ) type querier interface { + AcquireProvisionerJob(ctx context.Context, arg AcquireProvisionerJobParams) (ProvisionerJob, error) GetAPIKeyByID(ctx context.Context, id string) (APIKey, error) + GetOrganizationByID(ctx context.Context, id string) (Organization, error) GetOrganizationByName(ctx context.Context, name string) (Organization, error) GetOrganizationMemberByUserID(ctx context.Context, arg GetOrganizationMemberByUserIDParams) (OrganizationMember, error) GetOrganizationsByUserID(ctx context.Context, userID string) ([]Organization, error) + GetParameterValuesByScope(ctx context.Context, arg GetParameterValuesByScopeParams) ([]ParameterValue, error) GetProjectByID(ctx context.Context, id uuid.UUID) (Project, error) GetProjectByOrganizationAndName(ctx context.Context, arg GetProjectByOrganizationAndNameParams) (Project, error) GetProjectHistoryByID(ctx context.Context, id uuid.UUID) (ProjectHistory, error) GetProjectHistoryByProjectID(ctx context.Context, projectID uuid.UUID) ([]ProjectHistory, error) + GetProjectParametersByHistoryID(ctx context.Context, projectHistoryID uuid.UUID) ([]ProjectParameter, error) GetProjectsByOrganizationIDs(ctx context.Context, ids []string) ([]Project, error) + GetProvisionerDaemonByID(ctx context.Context, id uuid.UUID) (ProvisionerDaemon, error) + GetProvisionerJobByID(ctx context.Context, id uuid.UUID) (ProvisionerJob, error) GetUserByEmailOrUsername(ctx context.Context, arg GetUserByEmailOrUsernameParams) (User, error) GetUserByID(ctx context.Context, id string) (User, error) GetUserCount(ctx context.Context) (int64, error) GetWorkspaceAgentsByResourceIDs(ctx context.Context, ids []uuid.UUID) ([]WorkspaceAgent, error) + GetWorkspaceByID(ctx context.Context, id uuid.UUID) (Workspace, error) GetWorkspaceByUserIDAndName(ctx context.Context, arg GetWorkspaceByUserIDAndNameParams) (Workspace, error) + GetWorkspaceHistoryByID(ctx context.Context, id uuid.UUID) (WorkspaceHistory, error) GetWorkspaceHistoryByWorkspaceID(ctx context.Context, workspaceID uuid.UUID) ([]WorkspaceHistory, error) GetWorkspaceHistoryByWorkspaceIDWithoutAfter(ctx context.Context, workspaceID uuid.UUID) (WorkspaceHistory, error) GetWorkspaceResourcesByHistoryID(ctx context.Context, workspaceHistoryID uuid.UUID) ([]WorkspaceResource, error) @@ -31,15 +39,20 @@ type querier interface { InsertAPIKey(ctx context.Context, arg InsertAPIKeyParams) (APIKey, error) InsertOrganization(ctx context.Context, arg InsertOrganizationParams) (Organization, error) InsertOrganizationMember(ctx context.Context, arg InsertOrganizationMemberParams) (OrganizationMember, error) + InsertParameterValue(ctx context.Context, arg InsertParameterValueParams) (ParameterValue, error) InsertProject(ctx context.Context, arg InsertProjectParams) (Project, error) InsertProjectHistory(ctx context.Context, arg InsertProjectHistoryParams) (ProjectHistory, error) InsertProjectParameter(ctx context.Context, arg InsertProjectParameterParams) (ProjectParameter, error) + InsertProvisionerDaemon(ctx context.Context, arg InsertProvisionerDaemonParams) (ProvisionerDaemon, error) + InsertProvisionerJob(ctx context.Context, arg InsertProvisionerJobParams) (ProvisionerJob, error) InsertUser(ctx context.Context, arg InsertUserParams) (User, error) InsertWorkspace(ctx context.Context, arg InsertWorkspaceParams) (Workspace, error) InsertWorkspaceAgent(ctx context.Context, arg InsertWorkspaceAgentParams) (WorkspaceAgent, error) InsertWorkspaceHistory(ctx context.Context, arg InsertWorkspaceHistoryParams) (WorkspaceHistory, error) InsertWorkspaceResource(ctx context.Context, arg InsertWorkspaceResourceParams) (WorkspaceResource, error) UpdateAPIKeyByID(ctx context.Context, arg UpdateAPIKeyByIDParams) error + UpdateProvisionerDaemonByID(ctx context.Context, arg UpdateProvisionerDaemonByIDParams) error + UpdateProvisionerJobByID(ctx context.Context, arg UpdateProvisionerJobByIDParams) error UpdateWorkspaceHistoryByID(ctx context.Context, arg UpdateWorkspaceHistoryByIDParams) error } diff --git a/database/query.sql b/database/query.sql index 6ed73a070edcd..b361bbe3094d0 100644 --- a/database/query.sql +++ b/database/query.sql @@ -4,6 +4,32 @@ -- Run "make gen" to generate models and query functions. ; +-- name: AcquireProvisionerJob :one +UPDATE + provisioner_job +SET + started_at = @started_at, + updated_at = @started_at, + worker_id = @worker_id +WHERE + id = ( + SELECT + id + FROM + provisioner_job AS nested + WHERE + nested.started_at IS NULL + AND nested.cancelled_at IS NULL + AND nested.completed_at IS NULL + AND nested.provisioner = ANY(@types :: provisioner_type [ ]) + ORDER BY + nested.created FOR + UPDATE + SKIP LOCKED + LIMIT + 1 + ) RETURNING *; + -- name: GetAPIKeyByID :one SELECT * @@ -41,6 +67,14 @@ SELECT FROM users; +-- name: GetOrganizationByID :one +SELECT + * +FROM + organizations +WHERE + id = $1; + -- name: GetOrganizationByName :one SELECT * @@ -77,6 +111,15 @@ WHERE LIMIT 1; +-- name: GetParameterValuesByScope :many +SELECT + * +FROM + parameter_value +WHERE + scope = $1 + AND scope_id = $2; + -- name: GetProjectByID :one SELECT * @@ -106,6 +149,14 @@ FROM WHERE organization_id = ANY(@ids :: text [ ]); +-- name: GetProjectParametersByHistoryID :many +SELECT + * +FROM + project_parameter +WHERE + project_history_id = $1; + -- name: GetProjectHistoryByProjectID :many SELECT * @@ -122,6 +173,32 @@ FROM WHERE id = $1; +-- name: GetProvisionerDaemonByID :one +SELECT + * +FROM + provisioner_daemon +WHERE + id = $1; + +-- name: GetProvisionerJobByID :one +SELECT + * +FROM + provisioner_job +WHERE + id = $1; + +-- name: GetWorkspaceByID :one +SELECT + * +FROM + workspace +WHERE + id = $1 +LIMIT + 1; + -- name: GetWorkspacesByUserID :many SELECT * @@ -148,6 +225,16 @@ WHERE owner_id = $1 AND project_id = $2; +-- name: GetWorkspaceHistoryByID :one +SELECT + * +FROM + workspace_history +WHERE + id = $1 +LIMIT + 1; + -- name: GetWorkspaceHistoryByWorkspaceID :many SELECT * @@ -239,6 +326,23 @@ INSERT INTO VALUES ($1, $2, $3, $4, $5) RETURNING *; +-- name: InsertParameterValue :one +INSERT INTO + parameter_value ( + id, + name, + created_at, + updated_at, + scope, + scope_id, + source_scheme, + source_value, + destination_scheme, + destination_value + ) +VALUES + ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10) RETURNING *; + -- name: InsertProject :one INSERT INTO project ( @@ -276,9 +380,11 @@ INSERT INTO project_history_id, name, description, - default_source, + default_source_scheme, + default_source_value, allow_override_source, - default_destination, + default_destination_scheme, + default_destination_value, allow_override_destination, default_refresh, redisplay_value, @@ -303,9 +409,32 @@ VALUES $12, $13, $14, - $15 + $15, + $16, + $17 ) RETURNING *; +-- name: InsertProvisionerDaemon :one +INSERT INTO + provisioner_daemon (id, created_at, name, provisioners) +VALUES + ($1, $2, $3, $4) RETURNING *; + +-- name: InsertProvisionerJob :one +INSERT INTO + provisioner_job ( + id, + created_at, + updated_at, + initiator_id, + provisioner, + type, + project_id, + input + ) +VALUES + ($1, $2, $3, $4, $5, $6, $7, $8) RETURNING *; + -- name: InsertUser :one INSERT INTO users ( @@ -389,6 +518,26 @@ SET WHERE id = $1; +-- name: UpdateProvisionerDaemonByID :exec +UPDATE + provisioner_daemon +SET + updated_at = $2, + provisioners = $3 +WHERE + id = $1; + +-- name: UpdateProvisionerJobByID :exec +UPDATE + provisioner_job +SET + updated_at = $2, + cancelled_at = $3, + completed_at = $4, + error = $5 +WHERE + id = $1; + -- name: UpdateWorkspaceHistoryByID :exec UPDATE workspace_history diff --git a/database/query.sql.go b/database/query.sql.go index abb5aea348521..cb49f86eb67f0 100644 --- a/database/query.sql.go +++ b/database/query.sql.go @@ -13,6 +13,60 @@ import ( "github.com/lib/pq" ) +const acquireProvisionerJob = `-- name: AcquireProvisionerJob :one +UPDATE + provisioner_job +SET + started_at = $1, + updated_at = $1, + worker_id = $2 +WHERE + id = ( + SELECT + id + FROM + provisioner_job AS nested + WHERE + nested.started_at IS NULL + AND nested.cancelled_at IS NULL + AND nested.completed_at IS NULL + AND nested.provisioner = ANY($3 :: provisioner_type [ ]) + ORDER BY + nested.created FOR + UPDATE + SKIP LOCKED + LIMIT + 1 + ) RETURNING id, created_at, updated_at, started_at, cancelled_at, completed_at, error, initiator_id, provisioner, type, project_id, input, worker_id +` + +type AcquireProvisionerJobParams struct { + StartedAt sql.NullTime `db:"started_at" json:"started_at"` + WorkerID uuid.NullUUID `db:"worker_id" json:"worker_id"` + Types []ProvisionerType `db:"types" json:"types"` +} + +func (q *sqlQuerier) AcquireProvisionerJob(ctx context.Context, arg AcquireProvisionerJobParams) (ProvisionerJob, error) { + row := q.db.QueryRowContext(ctx, acquireProvisionerJob, arg.StartedAt, arg.WorkerID, pq.Array(arg.Types)) + var i ProvisionerJob + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.StartedAt, + &i.CancelledAt, + &i.CompletedAt, + &i.Error, + &i.InitiatorID, + &i.Provisioner, + &i.Type, + &i.ProjectID, + &i.Input, + &i.WorkerID, + ) + return i, err +} + const getAPIKeyByID = `-- name: GetAPIKeyByID :one SELECT id, hashed_secret, user_id, application, name, last_used, expires_at, created_at, updated_at, login_type, oidc_access_token, oidc_refresh_token, oidc_id_token, oidc_expiry, devurl_token @@ -47,6 +101,33 @@ func (q *sqlQuerier) GetAPIKeyByID(ctx context.Context, id string) (APIKey, erro return i, err } +const getOrganizationByID = `-- name: GetOrganizationByID :one +SELECT + id, name, description, created_at, updated_at, "default", auto_off_threshold, cpu_provisioning_rate, memory_provisioning_rate, workspace_auto_off +FROM + organizations +WHERE + id = $1 +` + +func (q *sqlQuerier) GetOrganizationByID(ctx context.Context, id string) (Organization, error) { + row := q.db.QueryRowContext(ctx, getOrganizationByID, id) + var i Organization + err := row.Scan( + &i.ID, + &i.Name, + &i.Description, + &i.CreatedAt, + &i.UpdatedAt, + &i.Default, + &i.AutoOffThreshold, + &i.CpuProvisioningRate, + &i.MemoryProvisioningRate, + &i.WorkspaceAutoOff, + ) + return i, err +} + const getOrganizationByName = `-- name: GetOrganizationByName :one SELECT id, name, description, created_at, updated_at, "default", auto_off_threshold, cpu_provisioning_rate, memory_provisioning_rate, workspace_auto_off @@ -156,6 +237,55 @@ func (q *sqlQuerier) GetOrganizationsByUserID(ctx context.Context, userID string return items, nil } +const getParameterValuesByScope = `-- name: GetParameterValuesByScope :many +SELECT + id, name, created_at, updated_at, scope, scope_id, source_scheme, source_value, destination_scheme, destination_value +FROM + parameter_value +WHERE + scope = $1 + AND scope_id = $2 +` + +type GetParameterValuesByScopeParams struct { + Scope ParameterScope `db:"scope" json:"scope"` + ScopeID string `db:"scope_id" json:"scope_id"` +} + +func (q *sqlQuerier) GetParameterValuesByScope(ctx context.Context, arg GetParameterValuesByScopeParams) ([]ParameterValue, error) { + rows, err := q.db.QueryContext(ctx, getParameterValuesByScope, arg.Scope, arg.ScopeID) + if err != nil { + return nil, err + } + defer rows.Close() + var items []ParameterValue + for rows.Next() { + var i ParameterValue + if err := rows.Scan( + &i.ID, + &i.Name, + &i.CreatedAt, + &i.UpdatedAt, + &i.Scope, + &i.ScopeID, + &i.SourceScheme, + &i.SourceValue, + &i.DestinationScheme, + &i.DestinationValue, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + const getProjectByID = `-- name: GetProjectByID :one SELECT id, created_at, updated_at, organization_id, name, provisioner, active_version_id @@ -282,6 +412,56 @@ func (q *sqlQuerier) GetProjectHistoryByProjectID(ctx context.Context, projectID return items, nil } +const getProjectParametersByHistoryID = `-- name: GetProjectParametersByHistoryID :many +SELECT + id, created_at, project_history_id, name, description, default_source_scheme, default_source_value, allow_override_source, default_destination_scheme, default_destination_value, allow_override_destination, default_refresh, redisplay_value, validation_error, validation_condition, validation_type_system, validation_value_type +FROM + project_parameter +WHERE + project_history_id = $1 +` + +func (q *sqlQuerier) GetProjectParametersByHistoryID(ctx context.Context, projectHistoryID uuid.UUID) ([]ProjectParameter, error) { + rows, err := q.db.QueryContext(ctx, getProjectParametersByHistoryID, projectHistoryID) + if err != nil { + return nil, err + } + defer rows.Close() + var items []ProjectParameter + for rows.Next() { + var i ProjectParameter + if err := rows.Scan( + &i.ID, + &i.CreatedAt, + &i.ProjectHistoryID, + &i.Name, + &i.Description, + &i.DefaultSourceScheme, + &i.DefaultSourceValue, + &i.AllowOverrideSource, + &i.DefaultDestinationScheme, + &i.DefaultDestinationValue, + &i.AllowOverrideDestination, + &i.DefaultRefresh, + &i.RedisplayValue, + &i.ValidationError, + &i.ValidationCondition, + &i.ValidationTypeSystem, + &i.ValidationValueType, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + const getProjectsByOrganizationIDs = `-- name: GetProjectsByOrganizationIDs :many SELECT id, created_at, updated_at, organization_id, name, provisioner, active_version_id @@ -322,6 +502,58 @@ func (q *sqlQuerier) GetProjectsByOrganizationIDs(ctx context.Context, ids []str return items, nil } +const getProvisionerDaemonByID = `-- name: GetProvisionerDaemonByID :one +SELECT + id, created_at, updated_at, name, provisioners +FROM + provisioner_daemon +WHERE + id = $1 +` + +func (q *sqlQuerier) GetProvisionerDaemonByID(ctx context.Context, id uuid.UUID) (ProvisionerDaemon, error) { + row := q.db.QueryRowContext(ctx, getProvisionerDaemonByID, id) + var i ProvisionerDaemon + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.Name, + pq.Array(&i.Provisioners), + ) + return i, err +} + +const getProvisionerJobByID = `-- name: GetProvisionerJobByID :one +SELECT + id, created_at, updated_at, started_at, cancelled_at, completed_at, error, initiator_id, provisioner, type, project_id, input, worker_id +FROM + provisioner_job +WHERE + id = $1 +` + +func (q *sqlQuerier) GetProvisionerJobByID(ctx context.Context, id uuid.UUID) (ProvisionerJob, error) { + row := q.db.QueryRowContext(ctx, getProvisionerJobByID, id) + var i ProvisionerJob + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.StartedAt, + &i.CancelledAt, + &i.CompletedAt, + &i.Error, + &i.InitiatorID, + &i.Provisioner, + &i.Type, + &i.ProjectID, + &i.Input, + &i.WorkerID, + ) + return i, err +} + const getUserByEmailOrUsername = `-- name: GetUserByEmailOrUsername :one SELECT id, email, name, revoked, login_type, hashed_password, created_at, updated_at, temporary_password, avatar_hash, ssh_key_regenerated_at, username, dotfiles_git_uri, roles, status, relatime, gpg_key_regenerated_at, _decomissioned, shell @@ -457,6 +689,31 @@ func (q *sqlQuerier) GetWorkspaceAgentsByResourceIDs(ctx context.Context, ids [] return items, nil } +const getWorkspaceByID = `-- name: GetWorkspaceByID :one +SELECT + id, created_at, updated_at, owner_id, project_id, name +FROM + workspace +WHERE + id = $1 +LIMIT + 1 +` + +func (q *sqlQuerier) GetWorkspaceByID(ctx context.Context, id uuid.UUID) (Workspace, error) { + row := q.db.QueryRowContext(ctx, getWorkspaceByID, id) + var i Workspace + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.OwnerID, + &i.ProjectID, + &i.Name, + ) + return i, err +} + const getWorkspaceByUserIDAndName = `-- name: GetWorkspaceByUserIDAndName :one SELECT id, created_at, updated_at, owner_id, project_id, name @@ -486,6 +743,37 @@ func (q *sqlQuerier) GetWorkspaceByUserIDAndName(ctx context.Context, arg GetWor return i, err } +const getWorkspaceHistoryByID = `-- name: GetWorkspaceHistoryByID :one +SELECT + id, created_at, updated_at, completed_at, workspace_id, project_history_id, before_id, after_id, transition, initiator, provisioner_state, provision_job_id +FROM + workspace_history +WHERE + id = $1 +LIMIT + 1 +` + +func (q *sqlQuerier) GetWorkspaceHistoryByID(ctx context.Context, id uuid.UUID) (WorkspaceHistory, error) { + row := q.db.QueryRowContext(ctx, getWorkspaceHistoryByID, id) + var i WorkspaceHistory + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.CompletedAt, + &i.WorkspaceID, + &i.ProjectHistoryID, + &i.BeforeID, + &i.AfterID, + &i.Transition, + &i.Initiator, + &i.ProvisionerState, + &i.ProvisionJobID, + ) + return i, err +} + const getWorkspaceHistoryByWorkspaceID = `-- name: GetWorkspaceHistoryByWorkspaceID :many SELECT id, created_at, updated_at, completed_at, workspace_id, project_history_id, before_id, after_id, transition, initiator, provisioner_state, provision_job_id @@ -862,6 +1150,66 @@ func (q *sqlQuerier) InsertOrganizationMember(ctx context.Context, arg InsertOrg return i, err } +const insertParameterValue = `-- name: InsertParameterValue :one +INSERT INTO + parameter_value ( + id, + name, + created_at, + updated_at, + scope, + scope_id, + source_scheme, + source_value, + destination_scheme, + destination_value + ) +VALUES + ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10) RETURNING id, name, created_at, updated_at, scope, scope_id, source_scheme, source_value, destination_scheme, destination_value +` + +type InsertParameterValueParams struct { + ID uuid.UUID `db:"id" json:"id"` + Name string `db:"name" json:"name"` + CreatedAt time.Time `db:"created_at" json:"created_at"` + UpdatedAt time.Time `db:"updated_at" json:"updated_at"` + Scope ParameterScope `db:"scope" json:"scope"` + ScopeID string `db:"scope_id" json:"scope_id"` + SourceScheme ParameterSourceScheme `db:"source_scheme" json:"source_scheme"` + SourceValue string `db:"source_value" json:"source_value"` + DestinationScheme ParameterDestinationScheme `db:"destination_scheme" json:"destination_scheme"` + DestinationValue string `db:"destination_value" json:"destination_value"` +} + +func (q *sqlQuerier) InsertParameterValue(ctx context.Context, arg InsertParameterValueParams) (ParameterValue, error) { + row := q.db.QueryRowContext(ctx, insertParameterValue, + arg.ID, + arg.Name, + arg.CreatedAt, + arg.UpdatedAt, + arg.Scope, + arg.ScopeID, + arg.SourceScheme, + arg.SourceValue, + arg.DestinationScheme, + arg.DestinationValue, + ) + var i ParameterValue + err := row.Scan( + &i.ID, + &i.Name, + &i.CreatedAt, + &i.UpdatedAt, + &i.Scope, + &i.ScopeID, + &i.SourceScheme, + &i.SourceValue, + &i.DestinationScheme, + &i.DestinationValue, + ) + return i, err +} + const insertProject = `-- name: InsertProject :one INSERT INTO project ( @@ -971,9 +1319,11 @@ INSERT INTO project_history_id, name, description, - default_source, + default_source_scheme, + default_source_value, allow_override_source, - default_destination, + default_destination_scheme, + default_destination_value, allow_override_destination, default_refresh, redisplay_value, @@ -998,26 +1348,30 @@ VALUES $12, $13, $14, - $15 - ) RETURNING id, created_at, project_history_id, name, description, default_source, allow_override_source, default_destination, allow_override_destination, default_refresh, redisplay_value, validation_error, validation_condition, validation_type_system, validation_value_type + $15, + $16, + $17 + ) RETURNING id, created_at, project_history_id, name, description, default_source_scheme, default_source_value, allow_override_source, default_destination_scheme, default_destination_value, allow_override_destination, default_refresh, redisplay_value, validation_error, validation_condition, validation_type_system, validation_value_type ` type InsertProjectParameterParams struct { - ID uuid.UUID `db:"id" json:"id"` - CreatedAt time.Time `db:"created_at" json:"created_at"` - ProjectHistoryID uuid.UUID `db:"project_history_id" json:"project_history_id"` - Name string `db:"name" json:"name"` - Description string `db:"description" json:"description"` - DefaultSource sql.NullString `db:"default_source" json:"default_source"` - AllowOverrideSource bool `db:"allow_override_source" json:"allow_override_source"` - DefaultDestination sql.NullString `db:"default_destination" json:"default_destination"` - AllowOverrideDestination bool `db:"allow_override_destination" json:"allow_override_destination"` - DefaultRefresh string `db:"default_refresh" json:"default_refresh"` - RedisplayValue bool `db:"redisplay_value" json:"redisplay_value"` - ValidationError string `db:"validation_error" json:"validation_error"` - ValidationCondition string `db:"validation_condition" json:"validation_condition"` - ValidationTypeSystem ParameterTypeSystem `db:"validation_type_system" json:"validation_type_system"` - ValidationValueType string `db:"validation_value_type" json:"validation_value_type"` + ID uuid.UUID `db:"id" json:"id"` + CreatedAt time.Time `db:"created_at" json:"created_at"` + ProjectHistoryID uuid.UUID `db:"project_history_id" json:"project_history_id"` + Name string `db:"name" json:"name"` + Description string `db:"description" json:"description"` + DefaultSourceScheme ParameterSourceScheme `db:"default_source_scheme" json:"default_source_scheme"` + DefaultSourceValue sql.NullString `db:"default_source_value" json:"default_source_value"` + AllowOverrideSource bool `db:"allow_override_source" json:"allow_override_source"` + DefaultDestinationScheme ParameterDestinationScheme `db:"default_destination_scheme" json:"default_destination_scheme"` + DefaultDestinationValue sql.NullString `db:"default_destination_value" json:"default_destination_value"` + AllowOverrideDestination bool `db:"allow_override_destination" json:"allow_override_destination"` + DefaultRefresh string `db:"default_refresh" json:"default_refresh"` + RedisplayValue bool `db:"redisplay_value" json:"redisplay_value"` + ValidationError string `db:"validation_error" json:"validation_error"` + ValidationCondition string `db:"validation_condition" json:"validation_condition"` + ValidationTypeSystem ParameterTypeSystem `db:"validation_type_system" json:"validation_type_system"` + ValidationValueType string `db:"validation_value_type" json:"validation_value_type"` } func (q *sqlQuerier) InsertProjectParameter(ctx context.Context, arg InsertProjectParameterParams) (ProjectParameter, error) { @@ -1027,9 +1381,11 @@ func (q *sqlQuerier) InsertProjectParameter(ctx context.Context, arg InsertProje arg.ProjectHistoryID, arg.Name, arg.Description, - arg.DefaultSource, + arg.DefaultSourceScheme, + arg.DefaultSourceValue, arg.AllowOverrideSource, - arg.DefaultDestination, + arg.DefaultDestinationScheme, + arg.DefaultDestinationValue, arg.AllowOverrideDestination, arg.DefaultRefresh, arg.RedisplayValue, @@ -1045,9 +1401,11 @@ func (q *sqlQuerier) InsertProjectParameter(ctx context.Context, arg InsertProje &i.ProjectHistoryID, &i.Name, &i.Description, - &i.DefaultSource, + &i.DefaultSourceScheme, + &i.DefaultSourceValue, &i.AllowOverrideSource, - &i.DefaultDestination, + &i.DefaultDestinationScheme, + &i.DefaultDestinationValue, &i.AllowOverrideDestination, &i.DefaultRefresh, &i.RedisplayValue, @@ -1059,6 +1417,95 @@ func (q *sqlQuerier) InsertProjectParameter(ctx context.Context, arg InsertProje return i, err } +const insertProvisionerDaemon = `-- name: InsertProvisionerDaemon :one +INSERT INTO + provisioner_daemon (id, created_at, name, provisioners) +VALUES + ($1, $2, $3, $4) RETURNING id, created_at, updated_at, name, provisioners +` + +type InsertProvisionerDaemonParams struct { + ID uuid.UUID `db:"id" json:"id"` + CreatedAt time.Time `db:"created_at" json:"created_at"` + Name string `db:"name" json:"name"` + Provisioners []ProvisionerType `db:"provisioners" json:"provisioners"` +} + +func (q *sqlQuerier) InsertProvisionerDaemon(ctx context.Context, arg InsertProvisionerDaemonParams) (ProvisionerDaemon, error) { + row := q.db.QueryRowContext(ctx, insertProvisionerDaemon, + arg.ID, + arg.CreatedAt, + arg.Name, + pq.Array(arg.Provisioners), + ) + var i ProvisionerDaemon + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.Name, + pq.Array(&i.Provisioners), + ) + return i, err +} + +const insertProvisionerJob = `-- name: InsertProvisionerJob :one +INSERT INTO + provisioner_job ( + id, + created_at, + updated_at, + initiator_id, + provisioner, + type, + project_id, + input + ) +VALUES + ($1, $2, $3, $4, $5, $6, $7, $8) RETURNING id, created_at, updated_at, started_at, cancelled_at, completed_at, error, initiator_id, provisioner, type, project_id, input, worker_id +` + +type InsertProvisionerJobParams struct { + ID uuid.UUID `db:"id" json:"id"` + CreatedAt time.Time `db:"created_at" json:"created_at"` + UpdatedAt time.Time `db:"updated_at" json:"updated_at"` + InitiatorID string `db:"initiator_id" json:"initiator_id"` + Provisioner ProvisionerType `db:"provisioner" json:"provisioner"` + Type ProvisionerJobType `db:"type" json:"type"` + ProjectID uuid.UUID `db:"project_id" json:"project_id"` + Input json.RawMessage `db:"input" json:"input"` +} + +func (q *sqlQuerier) InsertProvisionerJob(ctx context.Context, arg InsertProvisionerJobParams) (ProvisionerJob, error) { + row := q.db.QueryRowContext(ctx, insertProvisionerJob, + arg.ID, + arg.CreatedAt, + arg.UpdatedAt, + arg.InitiatorID, + arg.Provisioner, + arg.Type, + arg.ProjectID, + arg.Input, + ) + var i ProvisionerJob + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.StartedAt, + &i.CancelledAt, + &i.CompletedAt, + &i.Error, + &i.InitiatorID, + &i.Provisioner, + &i.Type, + &i.ProjectID, + &i.Input, + &i.WorkerID, + ) + return i, err +} + const insertUser = `-- name: InsertUser :one INSERT INTO users ( @@ -1349,6 +1796,58 @@ func (q *sqlQuerier) UpdateAPIKeyByID(ctx context.Context, arg UpdateAPIKeyByIDP return err } +const updateProvisionerDaemonByID = `-- name: UpdateProvisionerDaemonByID :exec +UPDATE + provisioner_daemon +SET + updated_at = $2, + provisioners = $3 +WHERE + id = $1 +` + +type UpdateProvisionerDaemonByIDParams struct { + ID uuid.UUID `db:"id" json:"id"` + UpdatedAt sql.NullTime `db:"updated_at" json:"updated_at"` + Provisioners []ProvisionerType `db:"provisioners" json:"provisioners"` +} + +func (q *sqlQuerier) UpdateProvisionerDaemonByID(ctx context.Context, arg UpdateProvisionerDaemonByIDParams) error { + _, err := q.db.ExecContext(ctx, updateProvisionerDaemonByID, arg.ID, arg.UpdatedAt, pq.Array(arg.Provisioners)) + return err +} + +const updateProvisionerJobByID = `-- name: UpdateProvisionerJobByID :exec +UPDATE + provisioner_job +SET + updated_at = $2, + cancelled_at = $3, + completed_at = $4, + error = $5 +WHERE + id = $1 +` + +type UpdateProvisionerJobByIDParams struct { + ID uuid.UUID `db:"id" json:"id"` + UpdatedAt time.Time `db:"updated_at" json:"updated_at"` + CancelledAt sql.NullTime `db:"cancelled_at" json:"cancelled_at"` + CompletedAt sql.NullTime `db:"completed_at" json:"completed_at"` + Error sql.NullString `db:"error" json:"error"` +} + +func (q *sqlQuerier) UpdateProvisionerJobByID(ctx context.Context, arg UpdateProvisionerJobByIDParams) error { + _, err := q.db.ExecContext(ctx, updateProvisionerJobByID, + arg.ID, + arg.UpdatedAt, + arg.CancelledAt, + arg.CompletedAt, + arg.Error, + ) + return err +} + const updateWorkspaceHistoryByID = `-- name: UpdateWorkspaceHistoryByID :exec UPDATE workspace_history From c7c7388a064a5295aed73bcfd074cfb5cd4feef0 Mon Sep 17 00:00:00 2001 From: Kyle Carberry Date: Sat, 29 Jan 2022 15:41:46 +0000 Subject: [PATCH 02/17] feat: Compute project build parameters Adds a projectparameter package to compute build-time project values for a provided scope. This package will be used to return which variables are being used for a build, and can visually indicate the hierarchy to a user. --- coderd/projectparameter/projectparameter.go | 234 +++++++ .../projectparameter/projectparameter_test.go | 205 ++++++ provisionersdk/proto/provisioner.pb.go | 640 +++++++++++++----- provisionersdk/proto/provisioner.proto | 51 +- 4 files changed, 949 insertions(+), 181 deletions(-) create mode 100644 coderd/projectparameter/projectparameter.go create mode 100644 coderd/projectparameter/projectparameter_test.go diff --git a/coderd/projectparameter/projectparameter.go b/coderd/projectparameter/projectparameter.go new file mode 100644 index 0000000000000..f1814657f22bb --- /dev/null +++ b/coderd/projectparameter/projectparameter.go @@ -0,0 +1,234 @@ +package projectparameter + +import ( + "context" + "database/sql" + "errors" + "fmt" + + "github.com/google/uuid" + "golang.org/x/xerrors" + + "github.com/coder/coder/database" + "github.com/coder/coder/provisionersdk/proto" +) + +// Scope targets identifiers to pull parameters from. +type Scope struct { + OrganizationID string + ProjectID uuid.UUID + ProjectHistoryID uuid.UUID + UserID string + WorkspaceID uuid.UUID + WorkspaceHistoryID uuid.UUID +} + +// Value represents a computed parameter. +type Value struct { + Proto *proto.ParameterValue + // DefaultValue is whether a default value for the scope + // was consumed. This can only be true for projects. + DefaultValue bool + Scope database.ParameterScope + ScopeID string +} + +// Compute accepts a scope in which parameter values are sourced. +// These sources are iterated in a hierarchial fashion to determine +// the runtime parameter vaues for a project. +func Compute(ctx context.Context, db database.Store, scope Scope) ([]Value, error) { + compute := &compute{ + parameterByName: map[string]Value{}, + projectParameterByName: map[string]database.ProjectParameter{}, + } + + // All parameters for the project version! + projectHistoryParameters, err := db.GetProjectParametersByHistoryID(ctx, scope.ProjectHistoryID) + if errors.Is(err, sql.ErrNoRows) { + // It's valid to have no parameters! + return []Value{}, nil + } + if err != nil { + return nil, xerrors.Errorf("get project parameters: %w", err) + } + for _, projectParameter := range projectHistoryParameters { + compute.projectParameterByName[projectParameter.Name] = projectParameter + } + + // Organization parameters come first! + organizationParameters, err := db.GetParameterValuesByScope(ctx, database.GetParameterValuesByScopeParams{ + Scope: database.ParameterScopeOrganization, + ScopeID: scope.OrganizationID, + }) + if errors.Is(err, sql.ErrNoRows) { + err = nil + } + if err != nil { + return nil, xerrors.Errorf("get organization parameters: %w", err) + } + err = compute.inject(organizationParameters) + if err != nil { + return nil, xerrors.Errorf("inject organization parameters: %w", err) + } + + // Default project parameter values come second! + for _, projectParameter := range projectHistoryParameters { + if !projectParameter.DefaultSourceValue.Valid { + continue + } + if !projectParameter.DefaultDestinationValue.Valid { + continue + } + + destinationScheme, err := convertDestinationScheme(projectParameter.DefaultDestinationScheme) + if err != nil { + return nil, xerrors.Errorf("convert default destination scheme for project parameter %q: %w", projectParameter.Name, err) + } + + switch projectParameter.DefaultSourceScheme { + case database.ParameterSourceSchemeData: + compute.parameterByName[projectParameter.Name] = Value{ + Proto: &proto.ParameterValue{ + DestinationScheme: destinationScheme, + Name: projectParameter.DefaultDestinationValue.String, + Value: projectParameter.DefaultSourceValue.String, + }, + DefaultValue: true, + Scope: database.ParameterScopeProject, + ScopeID: scope.ProjectID.String(), + } + default: + return nil, xerrors.Errorf("unsupported source scheme for project parameter %q: %q", projectParameter.Name, string(projectParameter.DefaultSourceScheme)) + } + } + + // Project parameters come third! + projectParameters, err := db.GetParameterValuesByScope(ctx, database.GetParameterValuesByScopeParams{ + Scope: database.ParameterScopeProject, + ScopeID: scope.ProjectID.String(), + }) + if errors.Is(err, sql.ErrNoRows) { + err = nil + } + if err != nil { + return nil, xerrors.Errorf("get project parameters: %w", err) + } + err = compute.inject(projectParameters) + if err != nil { + return nil, xerrors.Errorf("inject project parameters: %w", err) + } + + // User parameters come fourth! + userParameters, err := db.GetParameterValuesByScope(ctx, database.GetParameterValuesByScopeParams{ + Scope: database.ParameterScopeUser, + ScopeID: scope.UserID, + }) + if errors.Is(err, sql.ErrNoRows) { + err = nil + } + if err != nil { + return nil, xerrors.Errorf("get user parameters: %w", err) + } + err = compute.inject(userParameters) + if err != nil { + return nil, xerrors.Errorf("inject user parameters: %w", err) + } + + // Workspace parameters come last! + workspaceParameters, err := db.GetParameterValuesByScope(ctx, database.GetParameterValuesByScopeParams{ + Scope: database.ParameterScopeWorkspace, + ScopeID: scope.WorkspaceID.String(), + }) + if errors.Is(err, sql.ErrNoRows) { + err = nil + } + if err != nil { + return nil, xerrors.Errorf("get workspace parameters: %w", err) + } + err = compute.inject(workspaceParameters) + if err != nil { + return nil, xerrors.Errorf("inject workspace parameters: %w", err) + } + + for _, projectParameter := range compute.projectParameterByName { + if _, ok := compute.parameterByName[projectParameter.Name]; ok { + continue + } + return nil, NoValueError{ + ParameterID: projectParameter.ID, + ParameterName: projectParameter.Name, + } + } + + values := make([]Value, 0, len(compute.parameterByName)) + for _, value := range compute.parameterByName { + values = append(values, value) + } + return values, nil +} + +type compute struct { + parameterByName map[string]Value + projectParameterByName map[string]database.ProjectParameter +} + +// Validates and computes the value for parameters; setting the value on "parameterByName". +func (c *compute) inject(scopedParameters []database.ParameterValue) error { + for _, scopedParameter := range scopedParameters { + projectParameter, hasProjectParameter := c.projectParameterByName[scopedParameter.Name] + if !hasProjectParameter { + // Don't inject parameters that aren't defined by the project. + continue + } + + _, hasExistingParameter := c.parameterByName[scopedParameter.Name] + if hasExistingParameter { + // If a parameter already exists, check if this variable can override it. + // Injection hierarchy is the responsibility of the caller. This check ensures + // project parameters cannot be overridden if already set. + if !projectParameter.AllowOverrideSource && scopedParameter.Scope != database.ParameterScopeProject { + continue + } + } + + destinationScheme, err := convertDestinationScheme(scopedParameter.DestinationScheme) + if err != nil { + return xerrors.Errorf("convert destination scheme: %w", err) + } + + switch scopedParameter.SourceScheme { + case database.ParameterSourceSchemeData: + c.parameterByName[projectParameter.Name] = Value{ + Proto: &proto.ParameterValue{ + DestinationScheme: destinationScheme, + Name: scopedParameter.SourceValue, + Value: scopedParameter.DestinationValue, + }, + } + default: + return xerrors.Errorf("unsupported source scheme: %q", string(projectParameter.DefaultSourceScheme)) + } + } + return nil +} + +// Converts the database destination scheme to the protobuf version. +func convertDestinationScheme(scheme database.ParameterDestinationScheme) (proto.ParameterDestination_Scheme, error) { + switch scheme { + case database.ParameterDestinationSchemeEnvironmentVariable: + return proto.ParameterDestination_ENVIRONMENT_VARIABLE, nil + case database.ParameterDestinationSchemeProvisionerVariable: + return proto.ParameterDestination_PROVISIONER_VARIABLE, nil + default: + return 0, xerrors.Errorf("unsupported destination scheme: %q", scheme) + } +} + +type NoValueError struct { + ParameterID uuid.UUID + ParameterName string +} + +func (e NoValueError) Error() string { + return fmt.Sprintf("no value for parameter %q found", e.ParameterName) +} diff --git a/coderd/projectparameter/projectparameter_test.go b/coderd/projectparameter/projectparameter_test.go new file mode 100644 index 0000000000000..5b562175497e6 --- /dev/null +++ b/coderd/projectparameter/projectparameter_test.go @@ -0,0 +1,205 @@ +package projectparameter_test + +import ( + "context" + "database/sql" + "testing" + + "github.com/google/uuid" + "github.com/stretchr/testify/require" + + "github.com/coder/coder/coderd/projectparameter" + "github.com/coder/coder/cryptorand" + "github.com/coder/coder/database" + "github.com/coder/coder/database/databasefake" + "github.com/coder/coder/provisionersdk/proto" +) + +func TestCompute(t *testing.T) { + t.Parallel() + generateScope := func() projectparameter.Scope { + return projectparameter.Scope{ + OrganizationID: uuid.New().String(), + ProjectID: uuid.New(), + ProjectHistoryID: uuid.New(), + UserID: uuid.NewString(), + } + } + type projectParameterOptions struct { + AllowOverrideSource bool + AllowOverrideDestination bool + DefaultDestinationScheme database.ParameterDestinationScheme + ProjectHistoryID uuid.UUID + } + generateProjectParameter := func(t *testing.T, db database.Store, opts projectParameterOptions) database.ProjectParameter { + if opts.DefaultDestinationScheme == "" { + opts.DefaultDestinationScheme = database.ParameterDestinationSchemeEnvironmentVariable + } + name, err := cryptorand.String(8) + require.NoError(t, err) + sourceValue, err := cryptorand.String(8) + require.NoError(t, err) + destinationValue, err := cryptorand.String(8) + require.NoError(t, err) + param, err := db.InsertProjectParameter(context.Background(), database.InsertProjectParameterParams{ + ID: uuid.New(), + Name: name, + ProjectHistoryID: opts.ProjectHistoryID, + DefaultSourceScheme: database.ParameterSourceSchemeData, + DefaultSourceValue: sql.NullString{ + String: sourceValue, + Valid: true, + }, + DefaultDestinationValue: sql.NullString{ + String: destinationValue, + Valid: true, + }, + AllowOverrideSource: opts.AllowOverrideSource, + AllowOverrideDestination: opts.AllowOverrideDestination, + DefaultDestinationScheme: opts.DefaultDestinationScheme, + }) + require.NoError(t, err) + return param + } + + t.Run("NoValue", func(t *testing.T) { + t.Parallel() + db := databasefake.New() + scope := generateScope() + parameter, err := db.InsertProjectParameter(context.Background(), database.InsertProjectParameterParams{ + ID: uuid.New(), + ProjectHistoryID: scope.ProjectHistoryID, + Name: "hey", + }) + require.NoError(t, err) + + _, err = projectparameter.Compute(context.Background(), db, scope) + var noValueErr projectparameter.NoValueError + require.ErrorAs(t, err, &noValueErr) + require.Equal(t, parameter.ID.String(), noValueErr.ParameterID.String()) + require.Equal(t, parameter.Name, noValueErr.ParameterName) + }) + + t.Run("UseDefaultProjectValue", func(t *testing.T) { + t.Parallel() + db := databasefake.New() + scope := generateScope() + parameter := generateProjectParameter(t, db, projectParameterOptions{ + ProjectHistoryID: scope.ProjectHistoryID, + DefaultDestinationScheme: database.ParameterDestinationSchemeProvisionerVariable, + }) + values, err := projectparameter.Compute(context.Background(), db, scope) + require.NoError(t, err) + require.Len(t, values, 1) + value := values[0] + require.True(t, value.DefaultValue) + require.Equal(t, database.ParameterScopeProject, value.Scope) + require.Equal(t, scope.ProjectID.String(), value.ScopeID) + require.Equal(t, value.Proto.Name, parameter.DefaultDestinationValue.String) + require.Equal(t, value.Proto.DestinationScheme, proto.ParameterDestination_PROVISIONER_VARIABLE) + require.Equal(t, value.Proto.Value, parameter.DefaultSourceValue.String) + }) + + t.Run("OverrideOrganizationWithProjectDefault", func(t *testing.T) { + t.Parallel() + db := databasefake.New() + scope := generateScope() + parameter := generateProjectParameter(t, db, projectParameterOptions{ + ProjectHistoryID: scope.ProjectHistoryID, + }) + _, err := db.InsertParameterValue(context.Background(), database.InsertParameterValueParams{ + ID: uuid.New(), + Name: parameter.Name, + Scope: database.ParameterScopeOrganization, + ScopeID: scope.OrganizationID, + SourceScheme: database.ParameterSourceSchemeData, + SourceValue: "nop", + DestinationScheme: database.ParameterDestinationSchemeEnvironmentVariable, + DestinationValue: "organizationvalue", + }) + require.NoError(t, err) + + values, err := projectparameter.Compute(context.Background(), db, scope) + require.NoError(t, err) + require.Len(t, values, 1) + require.Equal(t, true, values[0].DefaultValue) + require.Equal(t, parameter.DefaultSourceValue.String, values[0].Proto.Value) + }) + + t.Run("ProjectOverridesProjectDefault", func(t *testing.T) { + t.Parallel() + db := databasefake.New() + scope := generateScope() + parameter := generateProjectParameter(t, db, projectParameterOptions{ + ProjectHistoryID: scope.ProjectHistoryID, + }) + value, err := db.InsertParameterValue(context.Background(), database.InsertParameterValueParams{ + ID: uuid.New(), + Name: parameter.Name, + Scope: database.ParameterScopeProject, + ScopeID: scope.ProjectID.String(), + SourceScheme: database.ParameterSourceSchemeData, + SourceValue: "nop", + DestinationScheme: database.ParameterDestinationSchemeEnvironmentVariable, + DestinationValue: "projectvalue", + }) + require.NoError(t, err) + + values, err := projectparameter.Compute(context.Background(), db, scope) + require.NoError(t, err) + require.Len(t, values, 1) + require.Equal(t, false, values[0].DefaultValue) + require.Equal(t, value.DestinationValue, values[0].Proto.Value) + }) + + t.Run("WorkspaceCannotOverwriteProjectDefault", func(t *testing.T) { + t.Parallel() + db := databasefake.New() + scope := generateScope() + parameter := generateProjectParameter(t, db, projectParameterOptions{ + ProjectHistoryID: scope.ProjectHistoryID, + }) + _, err := db.InsertParameterValue(context.Background(), database.InsertParameterValueParams{ + ID: uuid.New(), + Name: parameter.Name, + Scope: database.ParameterScopeWorkspace, + ScopeID: scope.WorkspaceID.String(), + SourceScheme: database.ParameterSourceSchemeData, + SourceValue: "nop", + DestinationScheme: database.ParameterDestinationSchemeEnvironmentVariable, + DestinationValue: "projectvalue", + }) + require.NoError(t, err) + + values, err := projectparameter.Compute(context.Background(), db, scope) + require.NoError(t, err) + require.Len(t, values, 1) + require.Equal(t, true, values[0].DefaultValue) + }) + + t.Run("WorkspaceOverwriteProjectDefault", func(t *testing.T) { + t.Parallel() + db := databasefake.New() + scope := generateScope() + parameter := generateProjectParameter(t, db, projectParameterOptions{ + AllowOverrideSource: true, + ProjectHistoryID: scope.ProjectHistoryID, + }) + _, err := db.InsertParameterValue(context.Background(), database.InsertParameterValueParams{ + ID: uuid.New(), + Name: parameter.Name, + Scope: database.ParameterScopeWorkspace, + ScopeID: scope.WorkspaceID.String(), + SourceScheme: database.ParameterSourceSchemeData, + SourceValue: "nop", + DestinationScheme: database.ParameterDestinationSchemeEnvironmentVariable, + DestinationValue: "projectvalue", + }) + require.NoError(t, err) + + values, err := projectparameter.Compute(context.Background(), db, scope) + require.NoError(t, err) + require.Len(t, values, 1) + require.Equal(t, false, values[0].DefaultValue) + }) +} diff --git a/provisionersdk/proto/provisioner.pb.go b/provisionersdk/proto/provisioner.pb.go index 37bf884066dd4..7537f69f27c51 100644 --- a/provisionersdk/proto/provisioner.pb.go +++ b/provisionersdk/proto/provisioner.pb.go @@ -20,6 +20,95 @@ const ( _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) ) +type ParameterSource_Scheme int32 + +const ( + ParameterSource_DATA ParameterSource_Scheme = 0 +) + +// Enum value maps for ParameterSource_Scheme. +var ( + ParameterSource_Scheme_name = map[int32]string{ + 0: "DATA", + } + ParameterSource_Scheme_value = map[string]int32{ + "DATA": 0, + } +) + +func (x ParameterSource_Scheme) Enum() *ParameterSource_Scheme { + p := new(ParameterSource_Scheme) + *p = x + return p +} + +func (x ParameterSource_Scheme) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (ParameterSource_Scheme) Descriptor() protoreflect.EnumDescriptor { + return file_provisioner_proto_enumTypes[0].Descriptor() +} + +func (ParameterSource_Scheme) Type() protoreflect.EnumType { + return &file_provisioner_proto_enumTypes[0] +} + +func (x ParameterSource_Scheme) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use ParameterSource_Scheme.Descriptor instead. +func (ParameterSource_Scheme) EnumDescriptor() ([]byte, []int) { + return file_provisioner_proto_rawDescGZIP(), []int{0, 0} +} + +type ParameterDestination_Scheme int32 + +const ( + ParameterDestination_ENVIRONMENT_VARIABLE ParameterDestination_Scheme = 0 + ParameterDestination_PROVISIONER_VARIABLE ParameterDestination_Scheme = 1 +) + +// Enum value maps for ParameterDestination_Scheme. +var ( + ParameterDestination_Scheme_name = map[int32]string{ + 0: "ENVIRONMENT_VARIABLE", + 1: "PROVISIONER_VARIABLE", + } + ParameterDestination_Scheme_value = map[string]int32{ + "ENVIRONMENT_VARIABLE": 0, + "PROVISIONER_VARIABLE": 1, + } +) + +func (x ParameterDestination_Scheme) Enum() *ParameterDestination_Scheme { + p := new(ParameterDestination_Scheme) + *p = x + return p +} + +func (x ParameterDestination_Scheme) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (ParameterDestination_Scheme) Descriptor() protoreflect.EnumDescriptor { + return file_provisioner_proto_enumTypes[1].Descriptor() +} + +func (ParameterDestination_Scheme) Type() protoreflect.EnumType { + return &file_provisioner_proto_enumTypes[1] +} + +func (x ParameterDestination_Scheme) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use ParameterDestination_Scheme.Descriptor instead. +func (ParameterDestination_Scheme) EnumDescriptor() ([]byte, []int) { + return file_provisioner_proto_rawDescGZIP(), []int{1, 0} +} + type ParameterSchema_TypeSystem int32 const ( @@ -47,11 +136,11 @@ func (x ParameterSchema_TypeSystem) String() string { } func (ParameterSchema_TypeSystem) Descriptor() protoreflect.EnumDescriptor { - return file_provisioner_proto_enumTypes[0].Descriptor() + return file_provisioner_proto_enumTypes[2].Descriptor() } func (ParameterSchema_TypeSystem) Type() protoreflect.EnumType { - return &file_provisioner_proto_enumTypes[0] + return &file_provisioner_proto_enumTypes[2] } func (x ParameterSchema_TypeSystem) Number() protoreflect.EnumNumber { @@ -60,27 +149,21 @@ func (x ParameterSchema_TypeSystem) Number() protoreflect.EnumNumber { // Deprecated: Use ParameterSchema_TypeSystem.Descriptor instead. func (ParameterSchema_TypeSystem) EnumDescriptor() ([]byte, []int) { - return file_provisioner_proto_rawDescGZIP(), []int{0, 0} + return file_provisioner_proto_rawDescGZIP(), []int{3, 0} } -// ParameterSchema represents validation and type information for a parsed parameter. -type ParameterSchema struct { +// ParameterSource represents the source location for a parameter to get it's value from. +type ParameterSource struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` - Description string `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` - DefaultValue string `protobuf:"bytes,3,opt,name=default_value,json=defaultValue,proto3" json:"default_value,omitempty"` - Sensitive bool `protobuf:"varint,4,opt,name=sensitive,proto3" json:"sensitive,omitempty"` - ValidationTypeSystem ParameterSchema_TypeSystem `protobuf:"varint,5,opt,name=validation_type_system,json=validationTypeSystem,proto3,enum=provisioner.ParameterSchema_TypeSystem" json:"validation_type_system,omitempty"` - ValidationValueType string `protobuf:"bytes,6,opt,name=validation_value_type,json=validationValueType,proto3" json:"validation_value_type,omitempty"` - ValidationError string `protobuf:"bytes,7,opt,name=validation_error,json=validationError,proto3" json:"validation_error,omitempty"` - ValidationCondition string `protobuf:"bytes,8,opt,name=validation_condition,json=validationCondition,proto3" json:"validation_condition,omitempty"` + Scheme ParameterSource_Scheme `protobuf:"varint,1,opt,name=scheme,proto3,enum=provisioner.ParameterSource_Scheme" json:"scheme,omitempty"` + Value string `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` } -func (x *ParameterSchema) Reset() { - *x = ParameterSchema{} +func (x *ParameterSource) Reset() { + *x = ParameterSource{} if protoimpl.UnsafeEnabled { mi := &file_provisioner_proto_msgTypes[0] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) @@ -88,13 +171,13 @@ func (x *ParameterSchema) Reset() { } } -func (x *ParameterSchema) String() string { +func (x *ParameterSource) String() string { return protoimpl.X.MessageStringOf(x) } -func (*ParameterSchema) ProtoMessage() {} +func (*ParameterSource) ProtoMessage() {} -func (x *ParameterSchema) ProtoReflect() protoreflect.Message { +func (x *ParameterSource) ProtoReflect() protoreflect.Message { mi := &file_provisioner_proto_msgTypes[0] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) @@ -106,81 +189,96 @@ func (x *ParameterSchema) ProtoReflect() protoreflect.Message { return mi.MessageOf(x) } -// Deprecated: Use ParameterSchema.ProtoReflect.Descriptor instead. -func (*ParameterSchema) Descriptor() ([]byte, []int) { +// Deprecated: Use ParameterSource.ProtoReflect.Descriptor instead. +func (*ParameterSource) Descriptor() ([]byte, []int) { return file_provisioner_proto_rawDescGZIP(), []int{0} } -func (x *ParameterSchema) GetName() string { +func (x *ParameterSource) GetScheme() ParameterSource_Scheme { if x != nil { - return x.Name + return x.Scheme } - return "" + return ParameterSource_DATA } -func (x *ParameterSchema) GetDescription() string { +func (x *ParameterSource) GetValue() string { if x != nil { - return x.Description + return x.Value } return "" } -func (x *ParameterSchema) GetDefaultValue() string { - if x != nil { - return x.DefaultValue - } - return "" +// ParameterDestination represents the target location for a provisioner to set the value. +type ParameterDestination struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Scheme ParameterDestination_Scheme `protobuf:"varint,1,opt,name=scheme,proto3,enum=provisioner.ParameterDestination_Scheme" json:"scheme,omitempty"` + Value string `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` } -func (x *ParameterSchema) GetSensitive() bool { - if x != nil { - return x.Sensitive +func (x *ParameterDestination) Reset() { + *x = ParameterDestination{} + if protoimpl.UnsafeEnabled { + mi := &file_provisioner_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) } - return false } -func (x *ParameterSchema) GetValidationTypeSystem() ParameterSchema_TypeSystem { - if x != nil { - return x.ValidationTypeSystem - } - return ParameterSchema_HCL +func (x *ParameterDestination) String() string { + return protoimpl.X.MessageStringOf(x) } -func (x *ParameterSchema) GetValidationValueType() string { - if x != nil { - return x.ValidationValueType +func (*ParameterDestination) ProtoMessage() {} + +func (x *ParameterDestination) ProtoReflect() protoreflect.Message { + mi := &file_provisioner_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms } - return "" + return mi.MessageOf(x) } -func (x *ParameterSchema) GetValidationError() string { +// Deprecated: Use ParameterDestination.ProtoReflect.Descriptor instead. +func (*ParameterDestination) Descriptor() ([]byte, []int) { + return file_provisioner_proto_rawDescGZIP(), []int{1} +} + +func (x *ParameterDestination) GetScheme() ParameterDestination_Scheme { if x != nil { - return x.ValidationError + return x.Scheme } - return "" + return ParameterDestination_ENVIRONMENT_VARIABLE } -func (x *ParameterSchema) GetValidationCondition() string { +func (x *ParameterDestination) GetValue() string { if x != nil { - return x.ValidationCondition + return x.Value } return "" } -// ParameterValue holds the value of a parameter. +// ParameterValue represents the resolved source and destination of a parameter. type ParameterValue struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` - Value string `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` + DestinationScheme ParameterDestination_Scheme `protobuf:"varint,1,opt,name=destination_scheme,json=destinationScheme,proto3,enum=provisioner.ParameterDestination_Scheme" json:"destination_scheme,omitempty"` + Name string `protobuf:"bytes,2,opt,name=name,proto3" json:"name,omitempty"` + Value string `protobuf:"bytes,3,opt,name=value,proto3" json:"value,omitempty"` } func (x *ParameterValue) Reset() { *x = ParameterValue{} if protoimpl.UnsafeEnabled { - mi := &file_provisioner_proto_msgTypes[1] + mi := &file_provisioner_proto_msgTypes[2] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -193,7 +291,7 @@ func (x *ParameterValue) String() string { func (*ParameterValue) ProtoMessage() {} func (x *ParameterValue) ProtoReflect() protoreflect.Message { - mi := &file_provisioner_proto_msgTypes[1] + mi := &file_provisioner_proto_msgTypes[2] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -206,7 +304,14 @@ func (x *ParameterValue) ProtoReflect() protoreflect.Message { // Deprecated: Use ParameterValue.ProtoReflect.Descriptor instead. func (*ParameterValue) Descriptor() ([]byte, []int) { - return file_provisioner_proto_rawDescGZIP(), []int{1} + return file_provisioner_proto_rawDescGZIP(), []int{2} +} + +func (x *ParameterValue) GetDestinationScheme() ParameterDestination_Scheme { + if x != nil { + return x.DestinationScheme + } + return ParameterDestination_ENVIRONMENT_VARIABLE } func (x *ParameterValue) GetName() string { @@ -223,6 +328,134 @@ func (x *ParameterValue) GetValue() string { return "" } +// ParameterSchema represents validation and type information for a parsed parameter. +type ParameterSchema struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + Description string `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` + DefaultSource *ParameterSource `protobuf:"bytes,3,opt,name=default_source,json=defaultSource,proto3" json:"default_source,omitempty"` + AllowOverrideSource bool `protobuf:"varint,4,opt,name=allow_override_source,json=allowOverrideSource,proto3" json:"allow_override_source,omitempty"` + DefaultDestination *ParameterDestination `protobuf:"bytes,5,opt,name=default_destination,json=defaultDestination,proto3" json:"default_destination,omitempty"` + AllowOverrideDestination bool `protobuf:"varint,6,opt,name=allow_override_destination,json=allowOverrideDestination,proto3" json:"allow_override_destination,omitempty"` + RedisplayValue bool `protobuf:"varint,7,opt,name=redisplay_value,json=redisplayValue,proto3" json:"redisplay_value,omitempty"` + ValidationTypeSystem ParameterSchema_TypeSystem `protobuf:"varint,8,opt,name=validation_type_system,json=validationTypeSystem,proto3,enum=provisioner.ParameterSchema_TypeSystem" json:"validation_type_system,omitempty"` + ValidationValueType string `protobuf:"bytes,9,opt,name=validation_value_type,json=validationValueType,proto3" json:"validation_value_type,omitempty"` + ValidationError string `protobuf:"bytes,10,opt,name=validation_error,json=validationError,proto3" json:"validation_error,omitempty"` + ValidationCondition string `protobuf:"bytes,11,opt,name=validation_condition,json=validationCondition,proto3" json:"validation_condition,omitempty"` +} + +func (x *ParameterSchema) Reset() { + *x = ParameterSchema{} + if protoimpl.UnsafeEnabled { + mi := &file_provisioner_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *ParameterSchema) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ParameterSchema) ProtoMessage() {} + +func (x *ParameterSchema) ProtoReflect() protoreflect.Message { + mi := &file_provisioner_proto_msgTypes[3] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ParameterSchema.ProtoReflect.Descriptor instead. +func (*ParameterSchema) Descriptor() ([]byte, []int) { + return file_provisioner_proto_rawDescGZIP(), []int{3} +} + +func (x *ParameterSchema) GetName() string { + if x != nil { + return x.Name + } + return "" +} + +func (x *ParameterSchema) GetDescription() string { + if x != nil { + return x.Description + } + return "" +} + +func (x *ParameterSchema) GetDefaultSource() *ParameterSource { + if x != nil { + return x.DefaultSource + } + return nil +} + +func (x *ParameterSchema) GetAllowOverrideSource() bool { + if x != nil { + return x.AllowOverrideSource + } + return false +} + +func (x *ParameterSchema) GetDefaultDestination() *ParameterDestination { + if x != nil { + return x.DefaultDestination + } + return nil +} + +func (x *ParameterSchema) GetAllowOverrideDestination() bool { + if x != nil { + return x.AllowOverrideDestination + } + return false +} + +func (x *ParameterSchema) GetRedisplayValue() bool { + if x != nil { + return x.RedisplayValue + } + return false +} + +func (x *ParameterSchema) GetValidationTypeSystem() ParameterSchema_TypeSystem { + if x != nil { + return x.ValidationTypeSystem + } + return ParameterSchema_HCL +} + +func (x *ParameterSchema) GetValidationValueType() string { + if x != nil { + return x.ValidationValueType + } + return "" +} + +func (x *ParameterSchema) GetValidationError() string { + if x != nil { + return x.ValidationError + } + return "" +} + +func (x *ParameterSchema) GetValidationCondition() string { + if x != nil { + return x.ValidationCondition + } + return "" +} + // Parse consumes source-code from a directory to produce inputs. type Parse struct { state protoimpl.MessageState @@ -233,7 +466,7 @@ type Parse struct { func (x *Parse) Reset() { *x = Parse{} if protoimpl.UnsafeEnabled { - mi := &file_provisioner_proto_msgTypes[2] + mi := &file_provisioner_proto_msgTypes[4] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -246,7 +479,7 @@ func (x *Parse) String() string { func (*Parse) ProtoMessage() {} func (x *Parse) ProtoReflect() protoreflect.Message { - mi := &file_provisioner_proto_msgTypes[2] + mi := &file_provisioner_proto_msgTypes[4] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -259,7 +492,7 @@ func (x *Parse) ProtoReflect() protoreflect.Message { // Deprecated: Use Parse.ProtoReflect.Descriptor instead. func (*Parse) Descriptor() ([]byte, []int) { - return file_provisioner_proto_rawDescGZIP(), []int{2} + return file_provisioner_proto_rawDescGZIP(), []int{4} } // Resource is a provisioned unit. @@ -275,7 +508,7 @@ type Resource struct { func (x *Resource) Reset() { *x = Resource{} if protoimpl.UnsafeEnabled { - mi := &file_provisioner_proto_msgTypes[3] + mi := &file_provisioner_proto_msgTypes[5] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -288,7 +521,7 @@ func (x *Resource) String() string { func (*Resource) ProtoMessage() {} func (x *Resource) ProtoReflect() protoreflect.Message { - mi := &file_provisioner_proto_msgTypes[3] + mi := &file_provisioner_proto_msgTypes[5] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -301,7 +534,7 @@ func (x *Resource) ProtoReflect() protoreflect.Message { // Deprecated: Use Resource.ProtoReflect.Descriptor instead. func (*Resource) Descriptor() ([]byte, []int) { - return file_provisioner_proto_rawDescGZIP(), []int{3} + return file_provisioner_proto_rawDescGZIP(), []int{5} } func (x *Resource) GetName() string { @@ -328,7 +561,7 @@ type Provision struct { func (x *Provision) Reset() { *x = Provision{} if protoimpl.UnsafeEnabled { - mi := &file_provisioner_proto_msgTypes[4] + mi := &file_provisioner_proto_msgTypes[6] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -341,7 +574,7 @@ func (x *Provision) String() string { func (*Provision) ProtoMessage() {} func (x *Provision) ProtoReflect() protoreflect.Message { - mi := &file_provisioner_proto_msgTypes[4] + mi := &file_provisioner_proto_msgTypes[6] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -354,7 +587,7 @@ func (x *Provision) ProtoReflect() protoreflect.Message { // Deprecated: Use Provision.ProtoReflect.Descriptor instead. func (*Provision) Descriptor() ([]byte, []int) { - return file_provisioner_proto_rawDescGZIP(), []int{4} + return file_provisioner_proto_rawDescGZIP(), []int{6} } type Parse_Request struct { @@ -368,7 +601,7 @@ type Parse_Request struct { func (x *Parse_Request) Reset() { *x = Parse_Request{} if protoimpl.UnsafeEnabled { - mi := &file_provisioner_proto_msgTypes[5] + mi := &file_provisioner_proto_msgTypes[7] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -381,7 +614,7 @@ func (x *Parse_Request) String() string { func (*Parse_Request) ProtoMessage() {} func (x *Parse_Request) ProtoReflect() protoreflect.Message { - mi := &file_provisioner_proto_msgTypes[5] + mi := &file_provisioner_proto_msgTypes[7] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -394,7 +627,7 @@ func (x *Parse_Request) ProtoReflect() protoreflect.Message { // Deprecated: Use Parse_Request.ProtoReflect.Descriptor instead. func (*Parse_Request) Descriptor() ([]byte, []int) { - return file_provisioner_proto_rawDescGZIP(), []int{2, 0} + return file_provisioner_proto_rawDescGZIP(), []int{4, 0} } func (x *Parse_Request) GetDirectory() string { @@ -415,7 +648,7 @@ type Parse_Response struct { func (x *Parse_Response) Reset() { *x = Parse_Response{} if protoimpl.UnsafeEnabled { - mi := &file_provisioner_proto_msgTypes[6] + mi := &file_provisioner_proto_msgTypes[8] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -428,7 +661,7 @@ func (x *Parse_Response) String() string { func (*Parse_Response) ProtoMessage() {} func (x *Parse_Response) ProtoReflect() protoreflect.Message { - mi := &file_provisioner_proto_msgTypes[6] + mi := &file_provisioner_proto_msgTypes[8] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -441,7 +674,7 @@ func (x *Parse_Response) ProtoReflect() protoreflect.Message { // Deprecated: Use Parse_Response.ProtoReflect.Descriptor instead. func (*Parse_Response) Descriptor() ([]byte, []int) { - return file_provisioner_proto_rawDescGZIP(), []int{2, 1} + return file_provisioner_proto_rawDescGZIP(), []int{4, 1} } func (x *Parse_Response) GetParameterSchemas() []*ParameterSchema { @@ -464,7 +697,7 @@ type Provision_Request struct { func (x *Provision_Request) Reset() { *x = Provision_Request{} if protoimpl.UnsafeEnabled { - mi := &file_provisioner_proto_msgTypes[7] + mi := &file_provisioner_proto_msgTypes[9] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -477,7 +710,7 @@ func (x *Provision_Request) String() string { func (*Provision_Request) ProtoMessage() {} func (x *Provision_Request) ProtoReflect() protoreflect.Message { - mi := &file_provisioner_proto_msgTypes[7] + mi := &file_provisioner_proto_msgTypes[9] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -490,7 +723,7 @@ func (x *Provision_Request) ProtoReflect() protoreflect.Message { // Deprecated: Use Provision_Request.ProtoReflect.Descriptor instead. func (*Provision_Request) Descriptor() ([]byte, []int) { - return file_provisioner_proto_rawDescGZIP(), []int{4, 0} + return file_provisioner_proto_rawDescGZIP(), []int{6, 0} } func (x *Provision_Request) GetDirectory() string { @@ -526,7 +759,7 @@ type Provision_Response struct { func (x *Provision_Response) Reset() { *x = Provision_Response{} if protoimpl.UnsafeEnabled { - mi := &file_provisioner_proto_msgTypes[8] + mi := &file_provisioner_proto_msgTypes[10] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -539,7 +772,7 @@ func (x *Provision_Response) String() string { func (*Provision_Response) ProtoMessage() {} func (x *Provision_Response) ProtoReflect() protoreflect.Message { - mi := &file_provisioner_proto_msgTypes[8] + mi := &file_provisioner_proto_msgTypes[10] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -552,7 +785,7 @@ func (x *Provision_Response) ProtoReflect() protoreflect.Message { // Deprecated: Use Provision_Response.ProtoReflect.Descriptor instead. func (*Provision_Response) Descriptor() ([]byte, []int) { - return file_provisioner_proto_rawDescGZIP(), []int{4, 1} + return file_provisioner_proto_rawDescGZIP(), []int{6, 1} } func (x *Provision_Response) GetState() []byte { @@ -574,75 +807,115 @@ var File_provisioner_proto protoreflect.FileDescriptor var file_provisioner_proto_rawDesc = []byte{ 0x0a, 0x11, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0b, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, - 0x22, 0x92, 0x03, 0x0a, 0x0f, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x63, - 0x68, 0x65, 0x6d, 0x61, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, - 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, - 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x23, 0x0a, 0x0d, 0x64, 0x65, - 0x66, 0x61, 0x75, 0x6c, 0x74, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x0c, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, - 0x1c, 0x0a, 0x09, 0x73, 0x65, 0x6e, 0x73, 0x69, 0x74, 0x69, 0x76, 0x65, 0x18, 0x04, 0x20, 0x01, - 0x28, 0x08, 0x52, 0x09, 0x73, 0x65, 0x6e, 0x73, 0x69, 0x74, 0x69, 0x76, 0x65, 0x12, 0x5d, 0x0a, + 0x22, 0x78, 0x0a, 0x0f, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x6f, 0x75, + 0x72, 0x63, 0x65, 0x12, 0x3b, 0x0a, 0x06, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x65, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x0e, 0x32, 0x23, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, + 0x72, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x6f, 0x75, 0x72, 0x63, + 0x65, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x65, 0x52, 0x06, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x65, + 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x12, 0x0a, 0x06, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x65, + 0x12, 0x08, 0x0a, 0x04, 0x44, 0x41, 0x54, 0x41, 0x10, 0x00, 0x22, 0xac, 0x01, 0x0a, 0x14, 0x50, + 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x44, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x12, 0x40, 0x0a, 0x06, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x65, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x0e, 0x32, 0x28, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, + 0x72, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x44, 0x65, 0x73, 0x74, 0x69, + 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x65, 0x52, 0x06, 0x73, + 0x63, 0x68, 0x65, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x3c, 0x0a, 0x06, 0x53, + 0x63, 0x68, 0x65, 0x6d, 0x65, 0x12, 0x18, 0x0a, 0x14, 0x45, 0x4e, 0x56, 0x49, 0x52, 0x4f, 0x4e, + 0x4d, 0x45, 0x4e, 0x54, 0x5f, 0x56, 0x41, 0x52, 0x49, 0x41, 0x42, 0x4c, 0x45, 0x10, 0x00, 0x12, + 0x18, 0x0a, 0x14, 0x50, 0x52, 0x4f, 0x56, 0x49, 0x53, 0x49, 0x4f, 0x4e, 0x45, 0x52, 0x5f, 0x56, + 0x41, 0x52, 0x49, 0x41, 0x42, 0x4c, 0x45, 0x10, 0x01, 0x22, 0x93, 0x01, 0x0a, 0x0e, 0x50, 0x61, + 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x57, 0x0a, 0x12, + 0x64, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x63, 0x68, 0x65, + 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x28, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, + 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, + 0x44, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x53, 0x63, 0x68, 0x65, + 0x6d, 0x65, 0x52, 0x11, 0x64, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x53, + 0x63, 0x68, 0x65, 0x6d, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, + 0x83, 0x05, 0x0a, 0x0f, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x63, 0x68, + 0x65, 0x6d, 0x61, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, + 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, + 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x43, 0x0a, 0x0e, 0x64, 0x65, 0x66, + 0x61, 0x75, 0x6c, 0x74, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x1c, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, + 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, + 0x0d, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x32, + 0x0a, 0x15, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x5f, 0x6f, 0x76, 0x65, 0x72, 0x72, 0x69, 0x64, 0x65, + 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, 0x13, 0x61, + 0x6c, 0x6c, 0x6f, 0x77, 0x4f, 0x76, 0x65, 0x72, 0x72, 0x69, 0x64, 0x65, 0x53, 0x6f, 0x75, 0x72, + 0x63, 0x65, 0x12, 0x52, 0x0a, 0x13, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x5f, 0x64, 0x65, + 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x21, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, + 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x44, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x52, 0x12, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x44, 0x65, 0x73, 0x74, 0x69, + 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x3c, 0x0a, 0x1a, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x5f, + 0x6f, 0x76, 0x65, 0x72, 0x72, 0x69, 0x64, 0x65, 0x5f, 0x64, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, + 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x06, 0x20, 0x01, 0x28, 0x08, 0x52, 0x18, 0x61, 0x6c, 0x6c, 0x6f, + 0x77, 0x4f, 0x76, 0x65, 0x72, 0x72, 0x69, 0x64, 0x65, 0x44, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, + 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x27, 0x0a, 0x0f, 0x72, 0x65, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, + 0x79, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0e, 0x72, + 0x65, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x5d, 0x0a, 0x16, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x74, 0x79, 0x70, 0x65, - 0x5f, 0x73, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x27, 0x2e, + 0x5f, 0x73, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x27, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x54, 0x79, 0x70, 0x65, 0x53, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x52, 0x14, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x54, 0x79, 0x70, 0x65, 0x53, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x12, 0x32, 0x0a, 0x15, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x13, 0x76, 0x61, 0x6c, + 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x09, 0x20, 0x01, 0x28, 0x09, 0x52, 0x13, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x54, 0x79, 0x70, 0x65, 0x12, 0x29, 0x0a, 0x10, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x65, - 0x72, 0x72, 0x6f, 0x72, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0f, 0x76, 0x61, 0x6c, 0x69, + 0x72, 0x72, 0x6f, 0x72, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0f, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x31, 0x0a, 0x14, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, - 0x69, 0x6f, 0x6e, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x52, 0x13, 0x76, 0x61, 0x6c, 0x69, 0x64, + 0x69, 0x6f, 0x6e, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x09, 0x52, 0x13, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x15, 0x0a, 0x0a, 0x54, 0x79, 0x70, 0x65, 0x53, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x12, 0x07, 0x0a, 0x03, - 0x48, 0x43, 0x4c, 0x10, 0x00, 0x22, 0x3a, 0x0a, 0x0e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, - 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, - 0x65, 0x22, 0x87, 0x01, 0x0a, 0x05, 0x50, 0x61, 0x72, 0x73, 0x65, 0x1a, 0x27, 0x0a, 0x07, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, - 0x6f, 0x72, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x64, 0x69, 0x72, 0x65, 0x63, - 0x74, 0x6f, 0x72, 0x79, 0x1a, 0x55, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, - 0x12, 0x49, 0x0a, 0x11, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x5f, 0x73, 0x63, - 0x68, 0x65, 0x6d, 0x61, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x70, 0x72, - 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, - 0x74, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, 0x10, 0x70, 0x61, 0x72, 0x61, 0x6d, - 0x65, 0x74, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x73, 0x22, 0x32, 0x0a, 0x08, 0x52, - 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x74, - 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x22, - 0xea, 0x01, 0x0a, 0x09, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x1a, 0x85, 0x01, - 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x64, 0x69, 0x72, - 0x65, 0x63, 0x74, 0x6f, 0x72, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x64, 0x69, - 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x79, 0x12, 0x46, 0x0a, 0x10, 0x70, 0x61, 0x72, 0x61, 0x6d, - 0x65, 0x74, 0x65, 0x72, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, - 0x0b, 0x32, 0x1b, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, - 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0f, - 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, - 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, - 0x73, 0x74, 0x61, 0x74, 0x65, 0x1a, 0x55, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, - 0x65, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, - 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x12, 0x33, 0x0a, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, - 0x72, 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, - 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, - 0x65, 0x52, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x32, 0x9d, 0x01, 0x0a, - 0x0b, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x12, 0x40, 0x0a, 0x05, - 0x50, 0x61, 0x72, 0x73, 0x65, 0x12, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, - 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x73, 0x65, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, - 0x74, 0x1a, 0x1b, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, - 0x50, 0x61, 0x72, 0x73, 0x65, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x4c, - 0x0a, 0x09, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x1e, 0x2e, 0x70, 0x72, - 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, - 0x69, 0x6f, 0x6e, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1f, 0x2e, 0x70, 0x72, - 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, - 0x69, 0x6f, 0x6e, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x42, 0x2a, 0x5a, 0x28, - 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, - 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, - 0x65, 0x72, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x48, 0x43, 0x4c, 0x10, 0x00, 0x22, 0x87, 0x01, 0x0a, 0x05, 0x50, 0x61, 0x72, 0x73, 0x65, 0x1a, + 0x27, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x64, 0x69, + 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x64, + 0x69, 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x79, 0x1a, 0x55, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x49, 0x0a, 0x11, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, + 0x72, 0x5f, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, + 0x1c, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, + 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, 0x10, 0x70, + 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x73, 0x22, + 0x32, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, + 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, + 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, + 0x79, 0x70, 0x65, 0x22, 0xea, 0x01, 0x0a, 0x09, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, + 0x6e, 0x1a, 0x85, 0x01, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, + 0x09, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x09, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x79, 0x12, 0x46, 0x0a, 0x10, 0x70, + 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, + 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, + 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, + 0x75, 0x65, 0x52, 0x0f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, + 0x75, 0x65, 0x73, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x03, 0x20, 0x01, + 0x28, 0x0c, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x1a, 0x55, 0x0a, 0x08, 0x52, 0x65, 0x73, + 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x12, 0x33, 0x0a, 0x09, 0x72, + 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, + 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x65, 0x73, + 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, + 0x32, 0x9d, 0x01, 0x0a, 0x0b, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, + 0x12, 0x40, 0x0a, 0x05, 0x50, 0x61, 0x72, 0x73, 0x65, 0x12, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, + 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x73, 0x65, 0x2e, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1b, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, + 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x73, 0x65, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x12, 0x4c, 0x0a, 0x09, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x12, + 0x1e, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x72, + 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, + 0x1f, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x72, + 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, + 0x42, 0x2d, 0x5a, 0x2b, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x63, + 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x70, 0x72, 0x6f, 0x76, 0x69, + 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x73, 0x64, 0x6b, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, + 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -657,34 +930,43 @@ func file_provisioner_proto_rawDescGZIP() []byte { return file_provisioner_proto_rawDescData } -var file_provisioner_proto_enumTypes = make([]protoimpl.EnumInfo, 1) -var file_provisioner_proto_msgTypes = make([]protoimpl.MessageInfo, 9) +var file_provisioner_proto_enumTypes = make([]protoimpl.EnumInfo, 3) +var file_provisioner_proto_msgTypes = make([]protoimpl.MessageInfo, 11) var file_provisioner_proto_goTypes = []interface{}{ - (ParameterSchema_TypeSystem)(0), // 0: provisioner.ParameterSchema.TypeSystem - (*ParameterSchema)(nil), // 1: provisioner.ParameterSchema - (*ParameterValue)(nil), // 2: provisioner.ParameterValue - (*Parse)(nil), // 3: provisioner.Parse - (*Resource)(nil), // 4: provisioner.Resource - (*Provision)(nil), // 5: provisioner.Provision - (*Parse_Request)(nil), // 6: provisioner.Parse.Request - (*Parse_Response)(nil), // 7: provisioner.Parse.Response - (*Provision_Request)(nil), // 8: provisioner.Provision.Request - (*Provision_Response)(nil), // 9: provisioner.Provision.Response + (ParameterSource_Scheme)(0), // 0: provisioner.ParameterSource.Scheme + (ParameterDestination_Scheme)(0), // 1: provisioner.ParameterDestination.Scheme + (ParameterSchema_TypeSystem)(0), // 2: provisioner.ParameterSchema.TypeSystem + (*ParameterSource)(nil), // 3: provisioner.ParameterSource + (*ParameterDestination)(nil), // 4: provisioner.ParameterDestination + (*ParameterValue)(nil), // 5: provisioner.ParameterValue + (*ParameterSchema)(nil), // 6: provisioner.ParameterSchema + (*Parse)(nil), // 7: provisioner.Parse + (*Resource)(nil), // 8: provisioner.Resource + (*Provision)(nil), // 9: provisioner.Provision + (*Parse_Request)(nil), // 10: provisioner.Parse.Request + (*Parse_Response)(nil), // 11: provisioner.Parse.Response + (*Provision_Request)(nil), // 12: provisioner.Provision.Request + (*Provision_Response)(nil), // 13: provisioner.Provision.Response } var file_provisioner_proto_depIdxs = []int32{ - 0, // 0: provisioner.ParameterSchema.validation_type_system:type_name -> provisioner.ParameterSchema.TypeSystem - 1, // 1: provisioner.Parse.Response.parameter_schemas:type_name -> provisioner.ParameterSchema - 2, // 2: provisioner.Provision.Request.parameter_values:type_name -> provisioner.ParameterValue - 4, // 3: provisioner.Provision.Response.resources:type_name -> provisioner.Resource - 6, // 4: provisioner.Provisioner.Parse:input_type -> provisioner.Parse.Request - 8, // 5: provisioner.Provisioner.Provision:input_type -> provisioner.Provision.Request - 7, // 6: provisioner.Provisioner.Parse:output_type -> provisioner.Parse.Response - 9, // 7: provisioner.Provisioner.Provision:output_type -> provisioner.Provision.Response - 6, // [6:8] is the sub-list for method output_type - 4, // [4:6] is the sub-list for method input_type - 4, // [4:4] is the sub-list for extension type_name - 4, // [4:4] is the sub-list for extension extendee - 0, // [0:4] is the sub-list for field type_name + 0, // 0: provisioner.ParameterSource.scheme:type_name -> provisioner.ParameterSource.Scheme + 1, // 1: provisioner.ParameterDestination.scheme:type_name -> provisioner.ParameterDestination.Scheme + 1, // 2: provisioner.ParameterValue.destination_scheme:type_name -> provisioner.ParameterDestination.Scheme + 3, // 3: provisioner.ParameterSchema.default_source:type_name -> provisioner.ParameterSource + 4, // 4: provisioner.ParameterSchema.default_destination:type_name -> provisioner.ParameterDestination + 2, // 5: provisioner.ParameterSchema.validation_type_system:type_name -> provisioner.ParameterSchema.TypeSystem + 6, // 6: provisioner.Parse.Response.parameter_schemas:type_name -> provisioner.ParameterSchema + 5, // 7: provisioner.Provision.Request.parameter_values:type_name -> provisioner.ParameterValue + 8, // 8: provisioner.Provision.Response.resources:type_name -> provisioner.Resource + 10, // 9: provisioner.Provisioner.Parse:input_type -> provisioner.Parse.Request + 12, // 10: provisioner.Provisioner.Provision:input_type -> provisioner.Provision.Request + 11, // 11: provisioner.Provisioner.Parse:output_type -> provisioner.Parse.Response + 13, // 12: provisioner.Provisioner.Provision:output_type -> provisioner.Provision.Response + 11, // [11:13] is the sub-list for method output_type + 9, // [9:11] is the sub-list for method input_type + 9, // [9:9] is the sub-list for extension type_name + 9, // [9:9] is the sub-list for extension extendee + 0, // [0:9] is the sub-list for field type_name } func init() { file_provisioner_proto_init() } @@ -694,7 +976,7 @@ func file_provisioner_proto_init() { } if !protoimpl.UnsafeEnabled { file_provisioner_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ParameterSchema); i { + switch v := v.(*ParameterSource); i { case 0: return &v.state case 1: @@ -706,7 +988,7 @@ func file_provisioner_proto_init() { } } file_provisioner_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ParameterValue); i { + switch v := v.(*ParameterDestination); i { case 0: return &v.state case 1: @@ -718,7 +1000,7 @@ func file_provisioner_proto_init() { } } file_provisioner_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Parse); i { + switch v := v.(*ParameterValue); i { case 0: return &v.state case 1: @@ -730,7 +1012,7 @@ func file_provisioner_proto_init() { } } file_provisioner_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Resource); i { + switch v := v.(*ParameterSchema); i { case 0: return &v.state case 1: @@ -742,7 +1024,7 @@ func file_provisioner_proto_init() { } } file_provisioner_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Provision); i { + switch v := v.(*Parse); i { case 0: return &v.state case 1: @@ -754,7 +1036,7 @@ func file_provisioner_proto_init() { } } file_provisioner_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Parse_Request); i { + switch v := v.(*Resource); i { case 0: return &v.state case 1: @@ -766,7 +1048,7 @@ func file_provisioner_proto_init() { } } file_provisioner_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Parse_Response); i { + switch v := v.(*Provision); i { case 0: return &v.state case 1: @@ -778,7 +1060,7 @@ func file_provisioner_proto_init() { } } file_provisioner_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Provision_Request); i { + switch v := v.(*Parse_Request); i { case 0: return &v.state case 1: @@ -790,6 +1072,30 @@ func file_provisioner_proto_init() { } } file_provisioner_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Parse_Response); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_provisioner_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Provision_Request); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_provisioner_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Provision_Response); i { case 0: return &v.state @@ -807,8 +1113,8 @@ func file_provisioner_proto_init() { File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_provisioner_proto_rawDesc, - NumEnums: 1, - NumMessages: 9, + NumEnums: 3, + NumMessages: 11, NumExtensions: 0, NumServices: 1, }, diff --git a/provisionersdk/proto/provisioner.proto b/provisionersdk/proto/provisioner.proto index 362604c27ec89..c865dced18fe9 100644 --- a/provisionersdk/proto/provisioner.proto +++ b/provisionersdk/proto/provisioner.proto @@ -1,29 +1,52 @@ syntax = "proto3"; -option go_package = "github.com/coder/coder/provisioner/proto"; +option go_package = "github.com/coder/coder/provisionersdk/proto"; package provisioner; +// ParameterSource represents the source location for a parameter to get it's value from. +message ParameterSource { + enum Scheme { + DATA = 0; + } + Scheme scheme = 1; + string value = 2; +} + +// ParameterDestination represents the target location for a provisioner to set the value. +message ParameterDestination { + enum Scheme { + ENVIRONMENT_VARIABLE = 0; + PROVISIONER_VARIABLE = 1; + } + Scheme scheme = 1; + string value = 2; +} + +// ParameterValue represents the resolved source and destination of a parameter. +message ParameterValue { + ParameterDestination.Scheme destination_scheme = 1; + string name = 2; + string value = 3; +} + // ParameterSchema represents validation and type information for a parsed parameter. message ParameterSchema { string name = 1; string description = 2; - string default_value = 3; - bool sensitive = 4; + ParameterSource default_source = 3; + bool allow_override_source = 4; + ParameterDestination default_destination = 5; + bool allow_override_destination = 6; + bool redisplay_value = 7; enum TypeSystem { HCL = 0; } - TypeSystem validation_type_system = 5; - string validation_value_type = 6; - string validation_error = 7; - string validation_condition = 8; -} - -// ParameterValue holds the value of a parameter. -message ParameterValue { - string name = 1; - string value = 2; + TypeSystem validation_type_system = 8; + string validation_value_type = 9; + string validation_error = 10; + string validation_condition = 11; } // Parse consumes source-code from a directory to produce inputs. @@ -58,4 +81,4 @@ message Provision { service Provisioner { rpc Parse(Parse.Request) returns (Parse.Response); rpc Provision(Provision.Request) returns (Provision.Response); -} +} \ No newline at end of file From ace6248a07da898c2e5aa5481a2d75ab97fb64ab Mon Sep 17 00:00:00 2001 From: Kyle Carberry Date: Sat, 29 Jan 2022 15:44:57 +0000 Subject: [PATCH 03/17] Fix terraform provisioner --- provisioner/terraform/parse.go | 11 +++++++++-- provisioner/terraform/parse_test.go | 11 +++++++++-- provisioner/terraform/provision.go | 17 +++++++++++++++-- provisioner/terraform/provision_test.go | 5 +++-- 4 files changed, 36 insertions(+), 8 deletions(-) diff --git a/provisioner/terraform/parse.go b/provisioner/terraform/parse.go index 639d1039f451c..cc92bc8f8008c 100644 --- a/provisioner/terraform/parse.go +++ b/provisioner/terraform/parse.go @@ -37,7 +37,7 @@ func convertVariableToParameter(variable *tfconfig.Variable) (*proto.ParameterSc schema := &proto.ParameterSchema{ Name: variable.Name, Description: variable.Description, - Sensitive: variable.Sensitive, + RedisplayValue: variable.Sensitive, ValidationValueType: variable.Type, } @@ -46,7 +46,14 @@ func convertVariableToParameter(variable *tfconfig.Variable) (*proto.ParameterSc if err != nil { return nil, xerrors.Errorf("parse variable %q default: %w", variable.Name, err) } - schema.DefaultValue = string(defaultData) + schema.DefaultSource = &proto.ParameterSource{ + Scheme: proto.ParameterSource_DATA, + Value: string(defaultData), + } + schema.DefaultDestination = &proto.ParameterDestination{ + Scheme: proto.ParameterDestination_PROVISIONER_VARIABLE, + Value: variable.Name, + } } if len(variable.Validations) > 0 && variable.Validations[0].Condition != nil { diff --git a/provisioner/terraform/parse_test.go b/provisioner/terraform/parse_test.go index 94af39103a308..bbfe166827851 100644 --- a/provisioner/terraform/parse_test.go +++ b/provisioner/terraform/parse_test.go @@ -63,8 +63,15 @@ func TestParse(t *testing.T) { }, Response: &proto.Parse_Response{ ParameterSchemas: []*proto.ParameterSchema{{ - Name: "A", - DefaultValue: "\"wow\"", + Name: "A", + DefaultSource: &proto.ParameterSource{ + Scheme: proto.ParameterSource_DATA, + Value: "\"wow\"", + }, + DefaultDestination: &proto.ParameterDestination{ + Scheme: proto.ParameterDestination_PROVISIONER_VARIABLE, + Value: "A", + }, }}, }, }, { diff --git a/provisioner/terraform/provision.go b/provisioner/terraform/provision.go index 34f57ff649192..4cf94ec0d9378 100644 --- a/provisioner/terraform/provision.go +++ b/provisioner/terraform/provision.go @@ -37,10 +37,23 @@ func (t *terraform) Provision(ctx context.Context, request *proto.Provision_Requ return nil, xerrors.Errorf("initialize terraform: %w", err) } + env := map[string]string{} options := make([]tfexec.ApplyOption, 0) - for _, params := range request.ParameterValues { - options = append(options, tfexec.Var(fmt.Sprintf("%s=%s", params.Name, params.Value))) + for _, param := range request.ParameterValues { + switch param.DestinationScheme { + case proto.ParameterDestination_ENVIRONMENT_VARIABLE: + env[param.Name] = param.Value + case proto.ParameterDestination_PROVISIONER_VARIABLE: + options = append(options, tfexec.Var(fmt.Sprintf("%s=%s", param.Name, param.Value))) + default: + return nil, xerrors.Errorf("unsupported parameter type %q for %q", param.DestinationScheme, param.Name) + } + } + err = terraform.SetEnv(env) + if err != nil { + return nil, xerrors.Errorf("apply environment variables: %w", err) } + err = terraform.Apply(ctx, options...) if err != nil { return nil, xerrors.Errorf("apply terraform: %w", err) diff --git a/provisioner/terraform/provision_test.go b/provisioner/terraform/provision_test.go index 7d193033f1e65..b596c85d0bf15 100644 --- a/provisioner/terraform/provision_test.go +++ b/provisioner/terraform/provision_test.go @@ -63,8 +63,9 @@ func TestProvision(t *testing.T) { }, Request: &proto.Provision_Request{ ParameterValues: []*proto.ParameterValue{{ - Name: "A", - Value: "example", + DestinationScheme: proto.ParameterDestination_PROVISIONER_VARIABLE, + Name: "A", + Value: "example", }}, }, Response: &proto.Provision_Response{}, From 2bd0c42786cc5e42ceda261f9322083c4412b918 Mon Sep 17 00:00:00 2001 From: Kyle Carberry Date: Sat, 29 Jan 2022 15:51:04 +0000 Subject: [PATCH 04/17] feat: Add provisionerd protobuf definitions Provisionerd communicates with coderd over a multiplexed WebSocket serving dRPC. This adds a roughly accurate protocol definition. It shares definitions with "provisioner.proto" for simple interop with provisioners! --- .gitattributes | 1 + Makefile | 28 +- codecov.yml | 1 + peerbroker/proto/peerbroker.pb.go | 207 +-- peerbroker/proto/peerbroker_drpc.pb.go | 30 +- provisionerd/proto/provisionerd.pb.go | 1346 +++++++++++++++++++ provisionerd/proto/provisionerd.proto | 118 ++ provisionerd/proto/provisionerd_drpc.pb.go | 275 ++++ provisionersdk/proto/provisioner.pb.go | 371 ++--- provisionersdk/proto/provisioner_drpc.pb.go | 26 +- 10 files changed, 2079 insertions(+), 324 deletions(-) create mode 100644 provisionerd/proto/provisionerd.pb.go create mode 100644 provisionerd/proto/provisionerd.proto create mode 100644 provisionerd/proto/provisionerd_drpc.pb.go diff --git a/.gitattributes b/.gitattributes index c72d98e5c6cef..d7fbd84988c2b 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1,3 +1,4 @@ # Generated files peerbroker/proto/*.go linguist-generated=true +provisionerd/proto/*.go linguist-generated=true provisionersdk/proto/*.go linguist-generated=true diff --git a/Makefile b/Makefile index 572afc61095f2..7ae274ddf8086 100644 --- a/Makefile +++ b/Makefile @@ -3,7 +3,12 @@ bin/coderd: go build -o bin/coderd cmd/coderd/main.go .PHONY: bin/coderd -build: site/out bin/coderd +bin/provisionerd: + mkdir -p bin + go build -o bin/provisionerd cmd/provisionerd/main.go +.PHONY: bin/provisionerd + +build: site/out bin/coderd bin/provisionerd .PHONY: build # Runs migrations to output a dump of the database. @@ -38,27 +43,34 @@ fmt/sql: ./database/query.sql fmt: fmt/prettier fmt/sql .PHONY: fmt -gen: database/generate peerbroker/proto provisionersdk/proto +gen: database/generate peerbroker/proto provisionersdk/proto provisionerd/proto .PHONY: gen -# Generates the protocol files. peerbroker/proto: peerbroker/proto/peerbroker.proto - cd peerbroker/proto && protoc \ + protoc \ --go_out=. \ --go_opt=paths=source_relative \ --go-drpc_out=. \ --go-drpc_opt=paths=source_relative \ - ./peerbroker.proto + ./peerbroker/proto/peerbroker.proto .PHONY: peerbroker/proto -# Generates the protocol files. +provisionerd/proto: provisionerd/proto/provisionerd.proto + protoc \ + --go_out=. \ + --go_opt=paths=source_relative \ + --go-drpc_out=. \ + --go-drpc_opt=paths=source_relative \ + ./provisionerd/proto/provisionerd.proto +.PHONY: provisionerd/proto + provisionersdk/proto: provisionersdk/proto/provisioner.proto - cd provisionersdk/proto && protoc \ + protoc \ --go_out=. \ --go_opt=paths=source_relative \ --go-drpc_out=. \ --go-drpc_opt=paths=source_relative \ - ./provisioner.proto + ./provisionersdk/proto/provisioner.proto .PHONY: provisionersdk/proto site/out: diff --git a/codecov.yml b/codecov.yml index e63dbbfd3c901..1ccc943684949 100644 --- a/codecov.yml +++ b/codecov.yml @@ -29,4 +29,5 @@ ignore: # All coderd tests fail if this doesn't work. - database/databasefake - peerbroker/proto + - provisionerd/proto - provisionersdk/proto diff --git a/peerbroker/proto/peerbroker.pb.go b/peerbroker/proto/peerbroker.pb.go index f5a41277bd10f..dc89ea8cb57eb 100644 --- a/peerbroker/proto/peerbroker.pb.go +++ b/peerbroker/proto/peerbroker.pb.go @@ -2,7 +2,7 @@ // versions: // protoc-gen-go v1.26.0 // protoc v3.6.1 -// source: peerbroker.proto +// source: peerbroker/proto/peerbroker.proto package proto @@ -32,7 +32,7 @@ type WebRTCSessionDescription struct { func (x *WebRTCSessionDescription) Reset() { *x = WebRTCSessionDescription{} if protoimpl.UnsafeEnabled { - mi := &file_peerbroker_proto_msgTypes[0] + mi := &file_peerbroker_proto_peerbroker_proto_msgTypes[0] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -45,7 +45,7 @@ func (x *WebRTCSessionDescription) String() string { func (*WebRTCSessionDescription) ProtoMessage() {} func (x *WebRTCSessionDescription) ProtoReflect() protoreflect.Message { - mi := &file_peerbroker_proto_msgTypes[0] + mi := &file_peerbroker_proto_peerbroker_proto_msgTypes[0] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -58,7 +58,7 @@ func (x *WebRTCSessionDescription) ProtoReflect() protoreflect.Message { // Deprecated: Use WebRTCSessionDescription.ProtoReflect.Descriptor instead. func (*WebRTCSessionDescription) Descriptor() ([]byte, []int) { - return file_peerbroker_proto_rawDescGZIP(), []int{0} + return file_peerbroker_proto_peerbroker_proto_rawDescGZIP(), []int{0} } func (x *WebRTCSessionDescription) GetSdpType() int32 { @@ -89,7 +89,7 @@ type WebRTCICEServer struct { func (x *WebRTCICEServer) Reset() { *x = WebRTCICEServer{} if protoimpl.UnsafeEnabled { - mi := &file_peerbroker_proto_msgTypes[1] + mi := &file_peerbroker_proto_peerbroker_proto_msgTypes[1] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -102,7 +102,7 @@ func (x *WebRTCICEServer) String() string { func (*WebRTCICEServer) ProtoMessage() {} func (x *WebRTCICEServer) ProtoReflect() protoreflect.Message { - mi := &file_peerbroker_proto_msgTypes[1] + mi := &file_peerbroker_proto_peerbroker_proto_msgTypes[1] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -115,7 +115,7 @@ func (x *WebRTCICEServer) ProtoReflect() protoreflect.Message { // Deprecated: Use WebRTCICEServer.ProtoReflect.Descriptor instead. func (*WebRTCICEServer) Descriptor() ([]byte, []int) { - return file_peerbroker_proto_rawDescGZIP(), []int{1} + return file_peerbroker_proto_peerbroker_proto_rawDescGZIP(), []int{1} } func (x *WebRTCICEServer) GetUrls() []string { @@ -157,7 +157,7 @@ type WebRTCICEServers struct { func (x *WebRTCICEServers) Reset() { *x = WebRTCICEServers{} if protoimpl.UnsafeEnabled { - mi := &file_peerbroker_proto_msgTypes[2] + mi := &file_peerbroker_proto_peerbroker_proto_msgTypes[2] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -170,7 +170,7 @@ func (x *WebRTCICEServers) String() string { func (*WebRTCICEServers) ProtoMessage() {} func (x *WebRTCICEServers) ProtoReflect() protoreflect.Message { - mi := &file_peerbroker_proto_msgTypes[2] + mi := &file_peerbroker_proto_peerbroker_proto_msgTypes[2] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -183,7 +183,7 @@ func (x *WebRTCICEServers) ProtoReflect() protoreflect.Message { // Deprecated: Use WebRTCICEServers.ProtoReflect.Descriptor instead. func (*WebRTCICEServers) Descriptor() ([]byte, []int) { - return file_peerbroker_proto_rawDescGZIP(), []int{2} + return file_peerbroker_proto_peerbroker_proto_rawDescGZIP(), []int{2} } func (x *WebRTCICEServers) GetServers() []*WebRTCICEServer { @@ -202,7 +202,7 @@ type NegotiateConnection struct { func (x *NegotiateConnection) Reset() { *x = NegotiateConnection{} if protoimpl.UnsafeEnabled { - mi := &file_peerbroker_proto_msgTypes[3] + mi := &file_peerbroker_proto_peerbroker_proto_msgTypes[3] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -215,7 +215,7 @@ func (x *NegotiateConnection) String() string { func (*NegotiateConnection) ProtoMessage() {} func (x *NegotiateConnection) ProtoReflect() protoreflect.Message { - mi := &file_peerbroker_proto_msgTypes[3] + mi := &file_peerbroker_proto_peerbroker_proto_msgTypes[3] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -228,7 +228,7 @@ func (x *NegotiateConnection) ProtoReflect() protoreflect.Message { // Deprecated: Use NegotiateConnection.ProtoReflect.Descriptor instead. func (*NegotiateConnection) Descriptor() ([]byte, []int) { - return file_peerbroker_proto_rawDescGZIP(), []int{3} + return file_peerbroker_proto_peerbroker_proto_rawDescGZIP(), []int{3} } type NegotiateConnection_ClientToServer struct { @@ -246,7 +246,7 @@ type NegotiateConnection_ClientToServer struct { func (x *NegotiateConnection_ClientToServer) Reset() { *x = NegotiateConnection_ClientToServer{} if protoimpl.UnsafeEnabled { - mi := &file_peerbroker_proto_msgTypes[4] + mi := &file_peerbroker_proto_peerbroker_proto_msgTypes[4] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -259,7 +259,7 @@ func (x *NegotiateConnection_ClientToServer) String() string { func (*NegotiateConnection_ClientToServer) ProtoMessage() {} func (x *NegotiateConnection_ClientToServer) ProtoReflect() protoreflect.Message { - mi := &file_peerbroker_proto_msgTypes[4] + mi := &file_peerbroker_proto_peerbroker_proto_msgTypes[4] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -272,7 +272,7 @@ func (x *NegotiateConnection_ClientToServer) ProtoReflect() protoreflect.Message // Deprecated: Use NegotiateConnection_ClientToServer.ProtoReflect.Descriptor instead. func (*NegotiateConnection_ClientToServer) Descriptor() ([]byte, []int) { - return file_peerbroker_proto_rawDescGZIP(), []int{3, 0} + return file_peerbroker_proto_peerbroker_proto_rawDescGZIP(), []int{3, 0} } func (m *NegotiateConnection_ClientToServer) GetMessage() isNegotiateConnection_ClientToServer_Message { @@ -340,7 +340,7 @@ type NegotiateConnection_ServerToClient struct { func (x *NegotiateConnection_ServerToClient) Reset() { *x = NegotiateConnection_ServerToClient{} if protoimpl.UnsafeEnabled { - mi := &file_peerbroker_proto_msgTypes[5] + mi := &file_peerbroker_proto_peerbroker_proto_msgTypes[5] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -353,7 +353,7 @@ func (x *NegotiateConnection_ServerToClient) String() string { func (*NegotiateConnection_ServerToClient) ProtoMessage() {} func (x *NegotiateConnection_ServerToClient) ProtoReflect() protoreflect.Message { - mi := &file_peerbroker_proto_msgTypes[5] + mi := &file_peerbroker_proto_peerbroker_proto_msgTypes[5] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -366,7 +366,7 @@ func (x *NegotiateConnection_ServerToClient) ProtoReflect() protoreflect.Message // Deprecated: Use NegotiateConnection_ServerToClient.ProtoReflect.Descriptor instead. func (*NegotiateConnection_ServerToClient) Descriptor() ([]byte, []int) { - return file_peerbroker_proto_rawDescGZIP(), []int{3, 1} + return file_peerbroker_proto_peerbroker_proto_rawDescGZIP(), []int{3, 1} } func (m *NegotiateConnection_ServerToClient) GetMessage() isNegotiateConnection_ServerToClient_Message { @@ -407,79 +407,80 @@ func (*NegotiateConnection_ServerToClient_Answer) isNegotiateConnection_ServerTo func (*NegotiateConnection_ServerToClient_IceCandidate) isNegotiateConnection_ServerToClient_Message() { } -var File_peerbroker_proto protoreflect.FileDescriptor - -var file_peerbroker_proto_rawDesc = []byte{ - 0x0a, 0x10, 0x70, 0x65, 0x65, 0x72, 0x62, 0x72, 0x6f, 0x6b, 0x65, 0x72, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x12, 0x0a, 0x70, 0x65, 0x65, 0x72, 0x62, 0x72, 0x6f, 0x6b, 0x65, 0x72, 0x22, 0x47, - 0x0a, 0x18, 0x57, 0x65, 0x62, 0x52, 0x54, 0x43, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x44, - 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x19, 0x0a, 0x08, 0x73, 0x64, - 0x70, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x07, 0x73, 0x64, - 0x70, 0x54, 0x79, 0x70, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x73, 0x64, 0x70, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x03, 0x73, 0x64, 0x70, 0x22, 0x8a, 0x01, 0x0a, 0x0f, 0x57, 0x65, 0x62, 0x52, - 0x54, 0x43, 0x49, 0x43, 0x45, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x12, 0x12, 0x0a, 0x04, 0x75, - 0x72, 0x6c, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, 0x75, 0x72, 0x6c, 0x73, 0x12, - 0x1a, 0x0a, 0x08, 0x75, 0x73, 0x65, 0x72, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x08, 0x75, 0x73, 0x65, 0x72, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x1e, 0x0a, 0x0a, 0x63, - 0x72, 0x65, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x0a, 0x63, 0x72, 0x65, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x12, 0x27, 0x0a, 0x0f, 0x63, - 0x72, 0x65, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x04, - 0x20, 0x01, 0x28, 0x05, 0x52, 0x0e, 0x63, 0x72, 0x65, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, - 0x54, 0x79, 0x70, 0x65, 0x22, 0x49, 0x0a, 0x10, 0x57, 0x65, 0x62, 0x52, 0x54, 0x43, 0x49, 0x43, - 0x45, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x73, 0x12, 0x35, 0x0a, 0x07, 0x73, 0x65, 0x72, 0x76, - 0x65, 0x72, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x70, 0x65, 0x65, 0x72, - 0x62, 0x72, 0x6f, 0x6b, 0x65, 0x72, 0x2e, 0x57, 0x65, 0x62, 0x52, 0x54, 0x43, 0x49, 0x43, 0x45, - 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x52, 0x07, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x73, 0x22, - 0xd7, 0x02, 0x0a, 0x13, 0x4e, 0x65, 0x67, 0x6f, 0x74, 0x69, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, - 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x1a, 0xba, 0x01, 0x0a, 0x0e, 0x43, 0x6c, 0x69, 0x65, - 0x6e, 0x74, 0x54, 0x6f, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x12, 0x38, 0x0a, 0x07, 0x73, 0x65, - 0x72, 0x76, 0x65, 0x72, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x70, 0x65, - 0x65, 0x72, 0x62, 0x72, 0x6f, 0x6b, 0x65, 0x72, 0x2e, 0x57, 0x65, 0x62, 0x52, 0x54, 0x43, 0x49, - 0x43, 0x45, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x73, 0x48, 0x00, 0x52, 0x07, 0x73, 0x65, 0x72, - 0x76, 0x65, 0x72, 0x73, 0x12, 0x3c, 0x0a, 0x05, 0x6f, 0x66, 0x66, 0x65, 0x72, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x62, 0x72, 0x6f, 0x6b, 0x65, 0x72, - 0x2e, 0x57, 0x65, 0x62, 0x52, 0x54, 0x43, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x44, 0x65, - 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x48, 0x00, 0x52, 0x05, 0x6f, 0x66, 0x66, - 0x65, 0x72, 0x12, 0x25, 0x0a, 0x0d, 0x69, 0x63, 0x65, 0x5f, 0x63, 0x61, 0x6e, 0x64, 0x69, 0x64, - 0x61, 0x74, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x0c, 0x69, 0x63, 0x65, - 0x43, 0x61, 0x6e, 0x64, 0x69, 0x64, 0x61, 0x74, 0x65, 0x42, 0x09, 0x0a, 0x07, 0x6d, 0x65, 0x73, - 0x73, 0x61, 0x67, 0x65, 0x1a, 0x82, 0x01, 0x0a, 0x0e, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x54, - 0x6f, 0x43, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x12, 0x3e, 0x0a, 0x06, 0x61, 0x6e, 0x73, 0x77, 0x65, - 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x62, 0x72, - 0x6f, 0x6b, 0x65, 0x72, 0x2e, 0x57, 0x65, 0x62, 0x52, 0x54, 0x43, 0x53, 0x65, 0x73, 0x73, 0x69, - 0x6f, 0x6e, 0x44, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x48, 0x00, 0x52, - 0x06, 0x61, 0x6e, 0x73, 0x77, 0x65, 0x72, 0x12, 0x25, 0x0a, 0x0d, 0x69, 0x63, 0x65, 0x5f, 0x63, - 0x61, 0x6e, 0x64, 0x69, 0x64, 0x61, 0x74, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, - 0x52, 0x0c, 0x69, 0x63, 0x65, 0x43, 0x61, 0x6e, 0x64, 0x69, 0x64, 0x61, 0x74, 0x65, 0x42, 0x09, - 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x32, 0x87, 0x01, 0x0a, 0x0a, 0x50, 0x65, - 0x65, 0x72, 0x42, 0x72, 0x6f, 0x6b, 0x65, 0x72, 0x12, 0x79, 0x0a, 0x13, 0x4e, 0x65, 0x67, 0x6f, - 0x74, 0x69, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x12, - 0x2e, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x62, 0x72, 0x6f, 0x6b, 0x65, 0x72, 0x2e, 0x4e, 0x65, 0x67, +var File_peerbroker_proto_peerbroker_proto protoreflect.FileDescriptor + +var file_peerbroker_proto_peerbroker_proto_rawDesc = []byte{ + 0x0a, 0x21, 0x70, 0x65, 0x65, 0x72, 0x62, 0x72, 0x6f, 0x6b, 0x65, 0x72, 0x2f, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x2f, 0x70, 0x65, 0x65, 0x72, 0x62, 0x72, 0x6f, 0x6b, 0x65, 0x72, 0x2e, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x12, 0x0a, 0x70, 0x65, 0x65, 0x72, 0x62, 0x72, 0x6f, 0x6b, 0x65, 0x72, 0x22, + 0x47, 0x0a, 0x18, 0x57, 0x65, 0x62, 0x52, 0x54, 0x43, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, + 0x44, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x19, 0x0a, 0x08, 0x73, + 0x64, 0x70, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x07, 0x73, + 0x64, 0x70, 0x54, 0x79, 0x70, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x73, 0x64, 0x70, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x03, 0x73, 0x64, 0x70, 0x22, 0x8a, 0x01, 0x0a, 0x0f, 0x57, 0x65, 0x62, + 0x52, 0x54, 0x43, 0x49, 0x43, 0x45, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x12, 0x12, 0x0a, 0x04, + 0x75, 0x72, 0x6c, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x04, 0x75, 0x72, 0x6c, 0x73, + 0x12, 0x1a, 0x0a, 0x08, 0x75, 0x73, 0x65, 0x72, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x08, 0x75, 0x73, 0x65, 0x72, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x1e, 0x0a, 0x0a, + 0x63, 0x72, 0x65, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x0a, 0x63, 0x72, 0x65, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x12, 0x27, 0x0a, 0x0f, + 0x63, 0x72, 0x65, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x61, 0x6c, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, + 0x04, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0e, 0x63, 0x72, 0x65, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x61, + 0x6c, 0x54, 0x79, 0x70, 0x65, 0x22, 0x49, 0x0a, 0x10, 0x57, 0x65, 0x62, 0x52, 0x54, 0x43, 0x49, + 0x43, 0x45, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x73, 0x12, 0x35, 0x0a, 0x07, 0x73, 0x65, 0x72, + 0x76, 0x65, 0x72, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x70, 0x65, 0x65, + 0x72, 0x62, 0x72, 0x6f, 0x6b, 0x65, 0x72, 0x2e, 0x57, 0x65, 0x62, 0x52, 0x54, 0x43, 0x49, 0x43, + 0x45, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x52, 0x07, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x73, + 0x22, 0xd7, 0x02, 0x0a, 0x13, 0x4e, 0x65, 0x67, 0x6f, 0x74, 0x69, 0x61, 0x74, 0x65, 0x43, 0x6f, + 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x1a, 0xba, 0x01, 0x0a, 0x0e, 0x43, 0x6c, 0x69, + 0x65, 0x6e, 0x74, 0x54, 0x6f, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x12, 0x38, 0x0a, 0x07, 0x73, + 0x65, 0x72, 0x76, 0x65, 0x72, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x70, + 0x65, 0x65, 0x72, 0x62, 0x72, 0x6f, 0x6b, 0x65, 0x72, 0x2e, 0x57, 0x65, 0x62, 0x52, 0x54, 0x43, + 0x49, 0x43, 0x45, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x73, 0x48, 0x00, 0x52, 0x07, 0x73, 0x65, + 0x72, 0x76, 0x65, 0x72, 0x73, 0x12, 0x3c, 0x0a, 0x05, 0x6f, 0x66, 0x66, 0x65, 0x72, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x62, 0x72, 0x6f, 0x6b, 0x65, + 0x72, 0x2e, 0x57, 0x65, 0x62, 0x52, 0x54, 0x43, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x44, + 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x48, 0x00, 0x52, 0x05, 0x6f, 0x66, + 0x66, 0x65, 0x72, 0x12, 0x25, 0x0a, 0x0d, 0x69, 0x63, 0x65, 0x5f, 0x63, 0x61, 0x6e, 0x64, 0x69, + 0x64, 0x61, 0x74, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x0c, 0x69, 0x63, + 0x65, 0x43, 0x61, 0x6e, 0x64, 0x69, 0x64, 0x61, 0x74, 0x65, 0x42, 0x09, 0x0a, 0x07, 0x6d, 0x65, + 0x73, 0x73, 0x61, 0x67, 0x65, 0x1a, 0x82, 0x01, 0x0a, 0x0e, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, + 0x54, 0x6f, 0x43, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x12, 0x3e, 0x0a, 0x06, 0x61, 0x6e, 0x73, 0x77, + 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x62, + 0x72, 0x6f, 0x6b, 0x65, 0x72, 0x2e, 0x57, 0x65, 0x62, 0x52, 0x54, 0x43, 0x53, 0x65, 0x73, 0x73, + 0x69, 0x6f, 0x6e, 0x44, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x48, 0x00, + 0x52, 0x06, 0x61, 0x6e, 0x73, 0x77, 0x65, 0x72, 0x12, 0x25, 0x0a, 0x0d, 0x69, 0x63, 0x65, 0x5f, + 0x63, 0x61, 0x6e, 0x64, 0x69, 0x64, 0x61, 0x74, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, + 0x00, 0x52, 0x0c, 0x69, 0x63, 0x65, 0x43, 0x61, 0x6e, 0x64, 0x69, 0x64, 0x61, 0x74, 0x65, 0x42, + 0x09, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x32, 0x87, 0x01, 0x0a, 0x0a, 0x50, + 0x65, 0x65, 0x72, 0x42, 0x72, 0x6f, 0x6b, 0x65, 0x72, 0x12, 0x79, 0x0a, 0x13, 0x4e, 0x65, 0x67, 0x6f, 0x74, 0x69, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x2e, 0x43, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x54, 0x6f, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x1a, - 0x2e, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x62, 0x72, 0x6f, 0x6b, 0x65, 0x72, 0x2e, 0x4e, 0x65, 0x67, - 0x6f, 0x74, 0x69, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, - 0x2e, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x54, 0x6f, 0x43, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x28, - 0x01, 0x30, 0x01, 0x42, 0x29, 0x5a, 0x27, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, - 0x6d, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x70, 0x65, - 0x65, 0x72, 0x62, 0x72, 0x6f, 0x6b, 0x65, 0x72, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x12, 0x2e, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x62, 0x72, 0x6f, 0x6b, 0x65, 0x72, 0x2e, 0x4e, 0x65, + 0x67, 0x6f, 0x74, 0x69, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x2e, 0x43, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x54, 0x6f, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, + 0x1a, 0x2e, 0x2e, 0x70, 0x65, 0x65, 0x72, 0x62, 0x72, 0x6f, 0x6b, 0x65, 0x72, 0x2e, 0x4e, 0x65, + 0x67, 0x6f, 0x74, 0x69, 0x61, 0x74, 0x65, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, + 0x6e, 0x2e, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x54, 0x6f, 0x43, 0x6c, 0x69, 0x65, 0x6e, 0x74, + 0x28, 0x01, 0x30, 0x01, 0x42, 0x29, 0x5a, 0x27, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, + 0x6f, 0x6d, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x70, + 0x65, 0x65, 0x72, 0x62, 0x72, 0x6f, 0x6b, 0x65, 0x72, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, + 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( - file_peerbroker_proto_rawDescOnce sync.Once - file_peerbroker_proto_rawDescData = file_peerbroker_proto_rawDesc + file_peerbroker_proto_peerbroker_proto_rawDescOnce sync.Once + file_peerbroker_proto_peerbroker_proto_rawDescData = file_peerbroker_proto_peerbroker_proto_rawDesc ) -func file_peerbroker_proto_rawDescGZIP() []byte { - file_peerbroker_proto_rawDescOnce.Do(func() { - file_peerbroker_proto_rawDescData = protoimpl.X.CompressGZIP(file_peerbroker_proto_rawDescData) +func file_peerbroker_proto_peerbroker_proto_rawDescGZIP() []byte { + file_peerbroker_proto_peerbroker_proto_rawDescOnce.Do(func() { + file_peerbroker_proto_peerbroker_proto_rawDescData = protoimpl.X.CompressGZIP(file_peerbroker_proto_peerbroker_proto_rawDescData) }) - return file_peerbroker_proto_rawDescData + return file_peerbroker_proto_peerbroker_proto_rawDescData } -var file_peerbroker_proto_msgTypes = make([]protoimpl.MessageInfo, 6) -var file_peerbroker_proto_goTypes = []interface{}{ +var file_peerbroker_proto_peerbroker_proto_msgTypes = make([]protoimpl.MessageInfo, 6) +var file_peerbroker_proto_peerbroker_proto_goTypes = []interface{}{ (*WebRTCSessionDescription)(nil), // 0: peerbroker.WebRTCSessionDescription (*WebRTCICEServer)(nil), // 1: peerbroker.WebRTCICEServer (*WebRTCICEServers)(nil), // 2: peerbroker.WebRTCICEServers @@ -487,7 +488,7 @@ var file_peerbroker_proto_goTypes = []interface{}{ (*NegotiateConnection_ClientToServer)(nil), // 4: peerbroker.NegotiateConnection.ClientToServer (*NegotiateConnection_ServerToClient)(nil), // 5: peerbroker.NegotiateConnection.ServerToClient } -var file_peerbroker_proto_depIdxs = []int32{ +var file_peerbroker_proto_peerbroker_proto_depIdxs = []int32{ 1, // 0: peerbroker.WebRTCICEServers.servers:type_name -> peerbroker.WebRTCICEServer 2, // 1: peerbroker.NegotiateConnection.ClientToServer.servers:type_name -> peerbroker.WebRTCICEServers 0, // 2: peerbroker.NegotiateConnection.ClientToServer.offer:type_name -> peerbroker.WebRTCSessionDescription @@ -501,13 +502,13 @@ var file_peerbroker_proto_depIdxs = []int32{ 0, // [0:4] is the sub-list for field type_name } -func init() { file_peerbroker_proto_init() } -func file_peerbroker_proto_init() { - if File_peerbroker_proto != nil { +func init() { file_peerbroker_proto_peerbroker_proto_init() } +func file_peerbroker_proto_peerbroker_proto_init() { + if File_peerbroker_proto_peerbroker_proto != nil { return } if !protoimpl.UnsafeEnabled { - file_peerbroker_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + file_peerbroker_proto_peerbroker_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*WebRTCSessionDescription); i { case 0: return &v.state @@ -519,7 +520,7 @@ func file_peerbroker_proto_init() { return nil } } - file_peerbroker_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + file_peerbroker_proto_peerbroker_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*WebRTCICEServer); i { case 0: return &v.state @@ -531,7 +532,7 @@ func file_peerbroker_proto_init() { return nil } } - file_peerbroker_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + file_peerbroker_proto_peerbroker_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*WebRTCICEServers); i { case 0: return &v.state @@ -543,7 +544,7 @@ func file_peerbroker_proto_init() { return nil } } - file_peerbroker_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { + file_peerbroker_proto_peerbroker_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*NegotiateConnection); i { case 0: return &v.state @@ -555,7 +556,7 @@ func file_peerbroker_proto_init() { return nil } } - file_peerbroker_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { + file_peerbroker_proto_peerbroker_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*NegotiateConnection_ClientToServer); i { case 0: return &v.state @@ -567,7 +568,7 @@ func file_peerbroker_proto_init() { return nil } } - file_peerbroker_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { + file_peerbroker_proto_peerbroker_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*NegotiateConnection_ServerToClient); i { case 0: return &v.state @@ -580,12 +581,12 @@ func file_peerbroker_proto_init() { } } } - file_peerbroker_proto_msgTypes[4].OneofWrappers = []interface{}{ + file_peerbroker_proto_peerbroker_proto_msgTypes[4].OneofWrappers = []interface{}{ (*NegotiateConnection_ClientToServer_Servers)(nil), (*NegotiateConnection_ClientToServer_Offer)(nil), (*NegotiateConnection_ClientToServer_IceCandidate)(nil), } - file_peerbroker_proto_msgTypes[5].OneofWrappers = []interface{}{ + file_peerbroker_proto_peerbroker_proto_msgTypes[5].OneofWrappers = []interface{}{ (*NegotiateConnection_ServerToClient_Answer)(nil), (*NegotiateConnection_ServerToClient_IceCandidate)(nil), } @@ -593,18 +594,18 @@ func file_peerbroker_proto_init() { out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_peerbroker_proto_rawDesc, + RawDescriptor: file_peerbroker_proto_peerbroker_proto_rawDesc, NumEnums: 0, NumMessages: 6, NumExtensions: 0, NumServices: 1, }, - GoTypes: file_peerbroker_proto_goTypes, - DependencyIndexes: file_peerbroker_proto_depIdxs, - MessageInfos: file_peerbroker_proto_msgTypes, + GoTypes: file_peerbroker_proto_peerbroker_proto_goTypes, + DependencyIndexes: file_peerbroker_proto_peerbroker_proto_depIdxs, + MessageInfos: file_peerbroker_proto_peerbroker_proto_msgTypes, }.Build() - File_peerbroker_proto = out.File - file_peerbroker_proto_rawDesc = nil - file_peerbroker_proto_goTypes = nil - file_peerbroker_proto_depIdxs = nil + File_peerbroker_proto_peerbroker_proto = out.File + file_peerbroker_proto_peerbroker_proto_rawDesc = nil + file_peerbroker_proto_peerbroker_proto_goTypes = nil + file_peerbroker_proto_peerbroker_proto_depIdxs = nil } diff --git a/peerbroker/proto/peerbroker_drpc.pb.go b/peerbroker/proto/peerbroker_drpc.pb.go index bbf5e1e2c0cbd..977c9149972ed 100644 --- a/peerbroker/proto/peerbroker_drpc.pb.go +++ b/peerbroker/proto/peerbroker_drpc.pb.go @@ -1,6 +1,6 @@ // Code generated by protoc-gen-go-drpc. DO NOT EDIT. // protoc-gen-go-drpc version: v0.0.26 -// source: peerbroker.proto +// source: peerbroker/proto/peerbroker.proto package proto @@ -13,25 +13,25 @@ import ( drpcerr "storj.io/drpc/drpcerr" ) -type drpcEncoding_File_peerbroker_proto struct{} +type drpcEncoding_File_peerbroker_proto_peerbroker_proto struct{} -func (drpcEncoding_File_peerbroker_proto) Marshal(msg drpc.Message) ([]byte, error) { +func (drpcEncoding_File_peerbroker_proto_peerbroker_proto) Marshal(msg drpc.Message) ([]byte, error) { return proto.Marshal(msg.(proto.Message)) } -func (drpcEncoding_File_peerbroker_proto) MarshalAppend(buf []byte, msg drpc.Message) ([]byte, error) { +func (drpcEncoding_File_peerbroker_proto_peerbroker_proto) MarshalAppend(buf []byte, msg drpc.Message) ([]byte, error) { return proto.MarshalOptions{}.MarshalAppend(buf, msg.(proto.Message)) } -func (drpcEncoding_File_peerbroker_proto) Unmarshal(buf []byte, msg drpc.Message) error { +func (drpcEncoding_File_peerbroker_proto_peerbroker_proto) Unmarshal(buf []byte, msg drpc.Message) error { return proto.Unmarshal(buf, msg.(proto.Message)) } -func (drpcEncoding_File_peerbroker_proto) JSONMarshal(msg drpc.Message) ([]byte, error) { +func (drpcEncoding_File_peerbroker_proto_peerbroker_proto) JSONMarshal(msg drpc.Message) ([]byte, error) { return protojson.Marshal(msg.(proto.Message)) } -func (drpcEncoding_File_peerbroker_proto) JSONUnmarshal(buf []byte, msg drpc.Message) error { +func (drpcEncoding_File_peerbroker_proto_peerbroker_proto) JSONUnmarshal(buf []byte, msg drpc.Message) error { return protojson.Unmarshal(buf, msg.(proto.Message)) } @@ -52,7 +52,7 @@ func NewDRPCPeerBrokerClient(cc drpc.Conn) DRPCPeerBrokerClient { func (c *drpcPeerBrokerClient) DRPCConn() drpc.Conn { return c.cc } func (c *drpcPeerBrokerClient) NegotiateConnection(ctx context.Context) (DRPCPeerBroker_NegotiateConnectionClient, error) { - stream, err := c.cc.NewStream(ctx, "/peerbroker.PeerBroker/NegotiateConnection", drpcEncoding_File_peerbroker_proto{}) + stream, err := c.cc.NewStream(ctx, "/peerbroker.PeerBroker/NegotiateConnection", drpcEncoding_File_peerbroker_proto_peerbroker_proto{}) if err != nil { return nil, err } @@ -71,19 +71,19 @@ type drpcPeerBroker_NegotiateConnectionClient struct { } func (x *drpcPeerBroker_NegotiateConnectionClient) Send(m *NegotiateConnection_ClientToServer) error { - return x.MsgSend(m, drpcEncoding_File_peerbroker_proto{}) + return x.MsgSend(m, drpcEncoding_File_peerbroker_proto_peerbroker_proto{}) } func (x *drpcPeerBroker_NegotiateConnectionClient) Recv() (*NegotiateConnection_ServerToClient, error) { m := new(NegotiateConnection_ServerToClient) - if err := x.MsgRecv(m, drpcEncoding_File_peerbroker_proto{}); err != nil { + if err := x.MsgRecv(m, drpcEncoding_File_peerbroker_proto_peerbroker_proto{}); err != nil { return nil, err } return m, nil } func (x *drpcPeerBroker_NegotiateConnectionClient) RecvMsg(m *NegotiateConnection_ServerToClient) error { - return x.MsgRecv(m, drpcEncoding_File_peerbroker_proto{}) + return x.MsgRecv(m, drpcEncoding_File_peerbroker_proto_peerbroker_proto{}) } type DRPCPeerBrokerServer interface { @@ -103,7 +103,7 @@ func (DRPCPeerBrokerDescription) NumMethods() int { return 1 } func (DRPCPeerBrokerDescription) Method(n int) (string, drpc.Encoding, drpc.Receiver, interface{}, bool) { switch n { case 0: - return "/peerbroker.PeerBroker/NegotiateConnection", drpcEncoding_File_peerbroker_proto{}, + return "/peerbroker.PeerBroker/NegotiateConnection", drpcEncoding_File_peerbroker_proto_peerbroker_proto{}, func(srv interface{}, ctx context.Context, in1, in2 interface{}) (drpc.Message, error) { return nil, srv.(DRPCPeerBrokerServer). NegotiateConnection( @@ -130,17 +130,17 @@ type drpcPeerBroker_NegotiateConnectionStream struct { } func (x *drpcPeerBroker_NegotiateConnectionStream) Send(m *NegotiateConnection_ServerToClient) error { - return x.MsgSend(m, drpcEncoding_File_peerbroker_proto{}) + return x.MsgSend(m, drpcEncoding_File_peerbroker_proto_peerbroker_proto{}) } func (x *drpcPeerBroker_NegotiateConnectionStream) Recv() (*NegotiateConnection_ClientToServer, error) { m := new(NegotiateConnection_ClientToServer) - if err := x.MsgRecv(m, drpcEncoding_File_peerbroker_proto{}); err != nil { + if err := x.MsgRecv(m, drpcEncoding_File_peerbroker_proto_peerbroker_proto{}); err != nil { return nil, err } return m, nil } func (x *drpcPeerBroker_NegotiateConnectionStream) RecvMsg(m *NegotiateConnection_ClientToServer) error { - return x.MsgRecv(m, drpcEncoding_File_peerbroker_proto{}) + return x.MsgRecv(m, drpcEncoding_File_peerbroker_proto_peerbroker_proto{}) } diff --git a/provisionerd/proto/provisionerd.pb.go b/provisionerd/proto/provisionerd.pb.go new file mode 100644 index 0000000000000..11dfadb372418 --- /dev/null +++ b/provisionerd/proto/provisionerd.pb.go @@ -0,0 +1,1346 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// versions: +// protoc-gen-go v1.26.0 +// protoc v3.6.1 +// source: provisionerd/proto/provisionerd.proto + +package proto + +import ( + proto "github.com/coder/coder/provisionersdk/proto" + protoreflect "google.golang.org/protobuf/reflect/protoreflect" + protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" +) + +const ( + // Verify that this generated code is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) + // Verify that runtime/protoimpl is sufficiently up-to-date. + _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) +) + +// LogSource represents the sender of the log. +type LogSource int32 + +const ( + LogSource_PROVISIONER LogSource = 0 + LogSource_DAEMON LogSource = 1 +) + +// Enum value maps for LogSource. +var ( + LogSource_name = map[int32]string{ + 0: "PROVISIONER", + 1: "DAEMON", + } + LogSource_value = map[string]int32{ + "PROVISIONER": 0, + "DAEMON": 1, + } +) + +func (x LogSource) Enum() *LogSource { + p := new(LogSource) + *p = x + return p +} + +func (x LogSource) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (LogSource) Descriptor() protoreflect.EnumDescriptor { + return file_provisionerd_proto_provisionerd_proto_enumTypes[0].Descriptor() +} + +func (LogSource) Type() protoreflect.EnumType { + return &file_provisionerd_proto_provisionerd_proto_enumTypes[0] +} + +func (x LogSource) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use LogSource.Descriptor instead. +func (LogSource) EnumDescriptor() ([]byte, []int) { + return file_provisionerd_proto_provisionerd_proto_rawDescGZIP(), []int{0} +} + +// LogLevel represents severity of the log. +type LogLevel int32 + +const ( + LogLevel_TRACE LogLevel = 0 + LogLevel_DEBUG LogLevel = 1 + LogLevel_INFO LogLevel = 2 + LogLevel_WARN LogLevel = 3 + LogLevel_ERROR LogLevel = 4 + LogLevel_FATAL LogLevel = 5 +) + +// Enum value maps for LogLevel. +var ( + LogLevel_name = map[int32]string{ + 0: "TRACE", + 1: "DEBUG", + 2: "INFO", + 3: "WARN", + 4: "ERROR", + 5: "FATAL", + } + LogLevel_value = map[string]int32{ + "TRACE": 0, + "DEBUG": 1, + "INFO": 2, + "WARN": 3, + "ERROR": 4, + "FATAL": 5, + } +) + +func (x LogLevel) Enum() *LogLevel { + p := new(LogLevel) + *p = x + return p +} + +func (x LogLevel) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (LogLevel) Descriptor() protoreflect.EnumDescriptor { + return file_provisionerd_proto_provisionerd_proto_enumTypes[1].Descriptor() +} + +func (LogLevel) Type() protoreflect.EnumType { + return &file_provisionerd_proto_provisionerd_proto_enumTypes[1] +} + +func (x LogLevel) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use LogLevel.Descriptor instead. +func (LogLevel) EnumDescriptor() ([]byte, []int) { + return file_provisionerd_proto_provisionerd_proto_rawDescGZIP(), []int{1} +} + +// Empty indicates a successful request/response. +type Empty struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields +} + +func (x *Empty) Reset() { + *x = Empty{} + if protoimpl.UnsafeEnabled { + mi := &file_provisionerd_proto_provisionerd_proto_msgTypes[0] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Empty) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Empty) ProtoMessage() {} + +func (x *Empty) ProtoReflect() protoreflect.Message { + mi := &file_provisionerd_proto_provisionerd_proto_msgTypes[0] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Empty.ProtoReflect.Descriptor instead. +func (*Empty) Descriptor() ([]byte, []int) { + return file_provisionerd_proto_provisionerd_proto_rawDescGZIP(), []int{0} +} + +// AcquiredJob is returned when a provisioner daemon has a job locked. +type AcquiredJob struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + JobId string `protobuf:"bytes,1,opt,name=job_id,json=jobId,proto3" json:"job_id,omitempty"` + CreatedAt int64 `protobuf:"varint,2,opt,name=created_at,json=createdAt,proto3" json:"created_at,omitempty"` + Provisioner string `protobuf:"bytes,3,opt,name=provisioner,proto3" json:"provisioner,omitempty"` + OrganizationName string `protobuf:"bytes,4,opt,name=organization_name,json=organizationName,proto3" json:"organization_name,omitempty"` + ProjectName string `protobuf:"bytes,5,opt,name=project_name,json=projectName,proto3" json:"project_name,omitempty"` + UserName string `protobuf:"bytes,6,opt,name=user_name,json=userName,proto3" json:"user_name,omitempty"` + ProjectSourceArchive []byte `protobuf:"bytes,7,opt,name=project_source_archive,json=projectSourceArchive,proto3" json:"project_source_archive,omitempty"` + // Types that are assignable to Type: + // *AcquiredJob_WorkspaceProvision_ + // *AcquiredJob_ProjectImport_ + Type isAcquiredJob_Type `protobuf_oneof:"type"` +} + +func (x *AcquiredJob) Reset() { + *x = AcquiredJob{} + if protoimpl.UnsafeEnabled { + mi := &file_provisionerd_proto_provisionerd_proto_msgTypes[1] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *AcquiredJob) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*AcquiredJob) ProtoMessage() {} + +func (x *AcquiredJob) ProtoReflect() protoreflect.Message { + mi := &file_provisionerd_proto_provisionerd_proto_msgTypes[1] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use AcquiredJob.ProtoReflect.Descriptor instead. +func (*AcquiredJob) Descriptor() ([]byte, []int) { + return file_provisionerd_proto_provisionerd_proto_rawDescGZIP(), []int{1} +} + +func (x *AcquiredJob) GetJobId() string { + if x != nil { + return x.JobId + } + return "" +} + +func (x *AcquiredJob) GetCreatedAt() int64 { + if x != nil { + return x.CreatedAt + } + return 0 +} + +func (x *AcquiredJob) GetProvisioner() string { + if x != nil { + return x.Provisioner + } + return "" +} + +func (x *AcquiredJob) GetOrganizationName() string { + if x != nil { + return x.OrganizationName + } + return "" +} + +func (x *AcquiredJob) GetProjectName() string { + if x != nil { + return x.ProjectName + } + return "" +} + +func (x *AcquiredJob) GetUserName() string { + if x != nil { + return x.UserName + } + return "" +} + +func (x *AcquiredJob) GetProjectSourceArchive() []byte { + if x != nil { + return x.ProjectSourceArchive + } + return nil +} + +func (m *AcquiredJob) GetType() isAcquiredJob_Type { + if m != nil { + return m.Type + } + return nil +} + +func (x *AcquiredJob) GetWorkspaceProvision() *AcquiredJob_WorkspaceProvision { + if x, ok := x.GetType().(*AcquiredJob_WorkspaceProvision_); ok { + return x.WorkspaceProvision + } + return nil +} + +func (x *AcquiredJob) GetProjectImport() *AcquiredJob_ProjectImport { + if x, ok := x.GetType().(*AcquiredJob_ProjectImport_); ok { + return x.ProjectImport + } + return nil +} + +type isAcquiredJob_Type interface { + isAcquiredJob_Type() +} + +type AcquiredJob_WorkspaceProvision_ struct { + WorkspaceProvision *AcquiredJob_WorkspaceProvision `protobuf:"bytes,8,opt,name=workspace_provision,json=workspaceProvision,proto3,oneof"` +} + +type AcquiredJob_ProjectImport_ struct { + ProjectImport *AcquiredJob_ProjectImport `protobuf:"bytes,9,opt,name=project_import,json=projectImport,proto3,oneof"` +} + +func (*AcquiredJob_WorkspaceProvision_) isAcquiredJob_Type() {} + +func (*AcquiredJob_ProjectImport_) isAcquiredJob_Type() {} + +type CancelledJob struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + JobId string `protobuf:"bytes,1,opt,name=job_id,json=jobId,proto3" json:"job_id,omitempty"` + Error string `protobuf:"bytes,2,opt,name=error,proto3" json:"error,omitempty"` +} + +func (x *CancelledJob) Reset() { + *x = CancelledJob{} + if protoimpl.UnsafeEnabled { + mi := &file_provisionerd_proto_provisionerd_proto_msgTypes[2] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *CancelledJob) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CancelledJob) ProtoMessage() {} + +func (x *CancelledJob) ProtoReflect() protoreflect.Message { + mi := &file_provisionerd_proto_provisionerd_proto_msgTypes[2] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CancelledJob.ProtoReflect.Descriptor instead. +func (*CancelledJob) Descriptor() ([]byte, []int) { + return file_provisionerd_proto_provisionerd_proto_rawDescGZIP(), []int{2} +} + +func (x *CancelledJob) GetJobId() string { + if x != nil { + return x.JobId + } + return "" +} + +func (x *CancelledJob) GetError() string { + if x != nil { + return x.Error + } + return "" +} + +// CompletedJob is sent when the provisioner daemon completes a job. +type CompletedJob struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + JobId string `protobuf:"bytes,1,opt,name=job_id,json=jobId,proto3" json:"job_id,omitempty"` + // Types that are assignable to Type: + // *CompletedJob_WorkspaceProvision_ + // *CompletedJob_ProjectImport_ + Type isCompletedJob_Type `protobuf_oneof:"type"` +} + +func (x *CompletedJob) Reset() { + *x = CompletedJob{} + if protoimpl.UnsafeEnabled { + mi := &file_provisionerd_proto_provisionerd_proto_msgTypes[3] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *CompletedJob) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CompletedJob) ProtoMessage() {} + +func (x *CompletedJob) ProtoReflect() protoreflect.Message { + mi := &file_provisionerd_proto_provisionerd_proto_msgTypes[3] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CompletedJob.ProtoReflect.Descriptor instead. +func (*CompletedJob) Descriptor() ([]byte, []int) { + return file_provisionerd_proto_provisionerd_proto_rawDescGZIP(), []int{3} +} + +func (x *CompletedJob) GetJobId() string { + if x != nil { + return x.JobId + } + return "" +} + +func (m *CompletedJob) GetType() isCompletedJob_Type { + if m != nil { + return m.Type + } + return nil +} + +func (x *CompletedJob) GetWorkspaceProvision() *CompletedJob_WorkspaceProvision { + if x, ok := x.GetType().(*CompletedJob_WorkspaceProvision_); ok { + return x.WorkspaceProvision + } + return nil +} + +func (x *CompletedJob) GetProjectImport() *CompletedJob_ProjectImport { + if x, ok := x.GetType().(*CompletedJob_ProjectImport_); ok { + return x.ProjectImport + } + return nil +} + +type isCompletedJob_Type interface { + isCompletedJob_Type() +} + +type CompletedJob_WorkspaceProvision_ struct { + WorkspaceProvision *CompletedJob_WorkspaceProvision `protobuf:"bytes,2,opt,name=workspace_provision,json=workspaceProvision,proto3,oneof"` +} + +type CompletedJob_ProjectImport_ struct { + ProjectImport *CompletedJob_ProjectImport `protobuf:"bytes,3,opt,name=project_import,json=projectImport,proto3,oneof"` +} + +func (*CompletedJob_WorkspaceProvision_) isCompletedJob_Type() {} + +func (*CompletedJob_ProjectImport_) isCompletedJob_Type() {} + +// Log represents output from a job. +type Log struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Source LogSource `protobuf:"varint,1,opt,name=source,proto3,enum=provisionerd.LogSource" json:"source,omitempty"` + Level LogLevel `protobuf:"varint,2,opt,name=level,proto3,enum=provisionerd.LogLevel" json:"level,omitempty"` + CreatedAt int64 `protobuf:"varint,3,opt,name=created_at,json=createdAt,proto3" json:"created_at,omitempty"` + // Types that are assignable to Type: + // *Log_WorkspaceProvision_ + // *Log_ProjectImport_ + Type isLog_Type `protobuf_oneof:"type"` +} + +func (x *Log) Reset() { + *x = Log{} + if protoimpl.UnsafeEnabled { + mi := &file_provisionerd_proto_provisionerd_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Log) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Log) ProtoMessage() {} + +func (x *Log) ProtoReflect() protoreflect.Message { + mi := &file_provisionerd_proto_provisionerd_proto_msgTypes[4] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Log.ProtoReflect.Descriptor instead. +func (*Log) Descriptor() ([]byte, []int) { + return file_provisionerd_proto_provisionerd_proto_rawDescGZIP(), []int{4} +} + +func (x *Log) GetSource() LogSource { + if x != nil { + return x.Source + } + return LogSource_PROVISIONER +} + +func (x *Log) GetLevel() LogLevel { + if x != nil { + return x.Level + } + return LogLevel_TRACE +} + +func (x *Log) GetCreatedAt() int64 { + if x != nil { + return x.CreatedAt + } + return 0 +} + +func (m *Log) GetType() isLog_Type { + if m != nil { + return m.Type + } + return nil +} + +func (x *Log) GetWorkspaceProvision() *Log_WorkspaceProvision { + if x, ok := x.GetType().(*Log_WorkspaceProvision_); ok { + return x.WorkspaceProvision + } + return nil +} + +func (x *Log) GetProjectImport() *Log_ProjectImport { + if x, ok := x.GetType().(*Log_ProjectImport_); ok { + return x.ProjectImport + } + return nil +} + +type isLog_Type interface { + isLog_Type() +} + +type Log_WorkspaceProvision_ struct { + WorkspaceProvision *Log_WorkspaceProvision `protobuf:"bytes,4,opt,name=workspace_provision,json=workspaceProvision,proto3,oneof"` +} + +type Log_ProjectImport_ struct { + ProjectImport *Log_ProjectImport `protobuf:"bytes,5,opt,name=project_import,json=projectImport,proto3,oneof"` +} + +func (*Log_WorkspaceProvision_) isLog_Type() {} + +func (*Log_ProjectImport_) isLog_Type() {} + +// JobUpdate represents an update to a job. +// There may be no log output, but this message +// should still be sent periodically as a heartbeat. +type JobUpdate struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + JobId string `protobuf:"bytes,1,opt,name=job_id,json=jobId,proto3" json:"job_id,omitempty"` + Logs []*Log `protobuf:"bytes,2,rep,name=logs,proto3" json:"logs,omitempty"` +} + +func (x *JobUpdate) Reset() { + *x = JobUpdate{} + if protoimpl.UnsafeEnabled { + mi := &file_provisionerd_proto_provisionerd_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *JobUpdate) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*JobUpdate) ProtoMessage() {} + +func (x *JobUpdate) ProtoReflect() protoreflect.Message { + mi := &file_provisionerd_proto_provisionerd_proto_msgTypes[5] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use JobUpdate.ProtoReflect.Descriptor instead. +func (*JobUpdate) Descriptor() ([]byte, []int) { + return file_provisionerd_proto_provisionerd_proto_rawDescGZIP(), []int{5} +} + +func (x *JobUpdate) GetJobId() string { + if x != nil { + return x.JobId + } + return "" +} + +func (x *JobUpdate) GetLogs() []*Log { + if x != nil { + return x.Logs + } + return nil +} + +type AcquiredJob_WorkspaceProvision struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + WorkspaceHistoryId string `protobuf:"bytes,1,opt,name=workspace_history_id,json=workspaceHistoryId,proto3" json:"workspace_history_id,omitempty"` + WorkspaceName string `protobuf:"bytes,2,opt,name=workspace_name,json=workspaceName,proto3" json:"workspace_name,omitempty"` + ParameterValues []*proto.ParameterValue `protobuf:"bytes,3,rep,name=parameter_values,json=parameterValues,proto3" json:"parameter_values,omitempty"` + State []byte `protobuf:"bytes,4,opt,name=state,proto3" json:"state,omitempty"` +} + +func (x *AcquiredJob_WorkspaceProvision) Reset() { + *x = AcquiredJob_WorkspaceProvision{} + if protoimpl.UnsafeEnabled { + mi := &file_provisionerd_proto_provisionerd_proto_msgTypes[6] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *AcquiredJob_WorkspaceProvision) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*AcquiredJob_WorkspaceProvision) ProtoMessage() {} + +func (x *AcquiredJob_WorkspaceProvision) ProtoReflect() protoreflect.Message { + mi := &file_provisionerd_proto_provisionerd_proto_msgTypes[6] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use AcquiredJob_WorkspaceProvision.ProtoReflect.Descriptor instead. +func (*AcquiredJob_WorkspaceProvision) Descriptor() ([]byte, []int) { + return file_provisionerd_proto_provisionerd_proto_rawDescGZIP(), []int{1, 0} +} + +func (x *AcquiredJob_WorkspaceProvision) GetWorkspaceHistoryId() string { + if x != nil { + return x.WorkspaceHistoryId + } + return "" +} + +func (x *AcquiredJob_WorkspaceProvision) GetWorkspaceName() string { + if x != nil { + return x.WorkspaceName + } + return "" +} + +func (x *AcquiredJob_WorkspaceProvision) GetParameterValues() []*proto.ParameterValue { + if x != nil { + return x.ParameterValues + } + return nil +} + +func (x *AcquiredJob_WorkspaceProvision) GetState() []byte { + if x != nil { + return x.State + } + return nil +} + +type AcquiredJob_ProjectImport struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + ProjectHistoryId string `protobuf:"bytes,1,opt,name=project_history_id,json=projectHistoryId,proto3" json:"project_history_id,omitempty"` + ProjectHistoryName string `protobuf:"bytes,2,opt,name=project_history_name,json=projectHistoryName,proto3" json:"project_history_name,omitempty"` +} + +func (x *AcquiredJob_ProjectImport) Reset() { + *x = AcquiredJob_ProjectImport{} + if protoimpl.UnsafeEnabled { + mi := &file_provisionerd_proto_provisionerd_proto_msgTypes[7] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *AcquiredJob_ProjectImport) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*AcquiredJob_ProjectImport) ProtoMessage() {} + +func (x *AcquiredJob_ProjectImport) ProtoReflect() protoreflect.Message { + mi := &file_provisionerd_proto_provisionerd_proto_msgTypes[7] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use AcquiredJob_ProjectImport.ProtoReflect.Descriptor instead. +func (*AcquiredJob_ProjectImport) Descriptor() ([]byte, []int) { + return file_provisionerd_proto_provisionerd_proto_rawDescGZIP(), []int{1, 1} +} + +func (x *AcquiredJob_ProjectImport) GetProjectHistoryId() string { + if x != nil { + return x.ProjectHistoryId + } + return "" +} + +func (x *AcquiredJob_ProjectImport) GetProjectHistoryName() string { + if x != nil { + return x.ProjectHistoryName + } + return "" +} + +type CompletedJob_WorkspaceProvision struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + State []byte `protobuf:"bytes,1,opt,name=state,proto3" json:"state,omitempty"` + Resources []*proto.Resource `protobuf:"bytes,2,rep,name=resources,proto3" json:"resources,omitempty"` +} + +func (x *CompletedJob_WorkspaceProvision) Reset() { + *x = CompletedJob_WorkspaceProvision{} + if protoimpl.UnsafeEnabled { + mi := &file_provisionerd_proto_provisionerd_proto_msgTypes[8] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *CompletedJob_WorkspaceProvision) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CompletedJob_WorkspaceProvision) ProtoMessage() {} + +func (x *CompletedJob_WorkspaceProvision) ProtoReflect() protoreflect.Message { + mi := &file_provisionerd_proto_provisionerd_proto_msgTypes[8] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CompletedJob_WorkspaceProvision.ProtoReflect.Descriptor instead. +func (*CompletedJob_WorkspaceProvision) Descriptor() ([]byte, []int) { + return file_provisionerd_proto_provisionerd_proto_rawDescGZIP(), []int{3, 0} +} + +func (x *CompletedJob_WorkspaceProvision) GetState() []byte { + if x != nil { + return x.State + } + return nil +} + +func (x *CompletedJob_WorkspaceProvision) GetResources() []*proto.Resource { + if x != nil { + return x.Resources + } + return nil +} + +type CompletedJob_ProjectImport struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + ParameterSchemas []*proto.ParameterSchema `protobuf:"bytes,1,rep,name=parameter_schemas,json=parameterSchemas,proto3" json:"parameter_schemas,omitempty"` +} + +func (x *CompletedJob_ProjectImport) Reset() { + *x = CompletedJob_ProjectImport{} + if protoimpl.UnsafeEnabled { + mi := &file_provisionerd_proto_provisionerd_proto_msgTypes[9] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *CompletedJob_ProjectImport) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*CompletedJob_ProjectImport) ProtoMessage() {} + +func (x *CompletedJob_ProjectImport) ProtoReflect() protoreflect.Message { + mi := &file_provisionerd_proto_provisionerd_proto_msgTypes[9] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use CompletedJob_ProjectImport.ProtoReflect.Descriptor instead. +func (*CompletedJob_ProjectImport) Descriptor() ([]byte, []int) { + return file_provisionerd_proto_provisionerd_proto_rawDescGZIP(), []int{3, 1} +} + +func (x *CompletedJob_ProjectImport) GetParameterSchemas() []*proto.ParameterSchema { + if x != nil { + return x.ParameterSchemas + } + return nil +} + +type Log_WorkspaceProvision struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + WorkspaceHistoryId string `protobuf:"bytes,1,opt,name=workspace_history_id,json=workspaceHistoryId,proto3" json:"workspace_history_id,omitempty"` + Text string `protobuf:"bytes,2,opt,name=text,proto3" json:"text,omitempty"` +} + +func (x *Log_WorkspaceProvision) Reset() { + *x = Log_WorkspaceProvision{} + if protoimpl.UnsafeEnabled { + mi := &file_provisionerd_proto_provisionerd_proto_msgTypes[10] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Log_WorkspaceProvision) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Log_WorkspaceProvision) ProtoMessage() {} + +func (x *Log_WorkspaceProvision) ProtoReflect() protoreflect.Message { + mi := &file_provisionerd_proto_provisionerd_proto_msgTypes[10] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Log_WorkspaceProvision.ProtoReflect.Descriptor instead. +func (*Log_WorkspaceProvision) Descriptor() ([]byte, []int) { + return file_provisionerd_proto_provisionerd_proto_rawDescGZIP(), []int{4, 0} +} + +func (x *Log_WorkspaceProvision) GetWorkspaceHistoryId() string { + if x != nil { + return x.WorkspaceHistoryId + } + return "" +} + +func (x *Log_WorkspaceProvision) GetText() string { + if x != nil { + return x.Text + } + return "" +} + +type Log_ProjectImport struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + ProjectHistoryId string `protobuf:"bytes,1,opt,name=project_history_id,json=projectHistoryId,proto3" json:"project_history_id,omitempty"` + Text string `protobuf:"bytes,2,opt,name=text,proto3" json:"text,omitempty"` +} + +func (x *Log_ProjectImport) Reset() { + *x = Log_ProjectImport{} + if protoimpl.UnsafeEnabled { + mi := &file_provisionerd_proto_provisionerd_proto_msgTypes[11] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Log_ProjectImport) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Log_ProjectImport) ProtoMessage() {} + +func (x *Log_ProjectImport) ProtoReflect() protoreflect.Message { + mi := &file_provisionerd_proto_provisionerd_proto_msgTypes[11] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Log_ProjectImport.ProtoReflect.Descriptor instead. +func (*Log_ProjectImport) Descriptor() ([]byte, []int) { + return file_provisionerd_proto_provisionerd_proto_rawDescGZIP(), []int{4, 1} +} + +func (x *Log_ProjectImport) GetProjectHistoryId() string { + if x != nil { + return x.ProjectHistoryId + } + return "" +} + +func (x *Log_ProjectImport) GetText() string { + if x != nil { + return x.Text + } + return "" +} + +var File_provisionerd_proto_provisionerd_proto protoreflect.FileDescriptor + +var file_provisionerd_proto_provisionerd_proto_rawDesc = []byte{ + 0x0a, 0x25, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2f, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, + 0x64, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0c, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, + 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x1a, 0x26, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, + 0x65, 0x72, 0x73, 0x64, 0x6b, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, 0x72, 0x6f, 0x76, + 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x07, 0x0a, + 0x05, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x82, 0x06, 0x0a, 0x0b, 0x41, 0x63, 0x71, 0x75, 0x69, + 0x72, 0x65, 0x64, 0x4a, 0x6f, 0x62, 0x12, 0x15, 0x0a, 0x06, 0x6a, 0x6f, 0x62, 0x5f, 0x69, 0x64, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6a, 0x6f, 0x62, 0x49, 0x64, 0x12, 0x1d, 0x0a, + 0x0a, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x03, 0x52, 0x09, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, 0x12, 0x20, 0x0a, 0x0b, + 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x0b, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x12, 0x2b, + 0x0a, 0x11, 0x6f, 0x72, 0x67, 0x61, 0x6e, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x6e, + 0x61, 0x6d, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x10, 0x6f, 0x72, 0x67, 0x61, 0x6e, + 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x21, 0x0a, 0x0c, 0x70, + 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x0b, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x1b, + 0x0a, 0x09, 0x75, 0x73, 0x65, 0x72, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x08, 0x75, 0x73, 0x65, 0x72, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x34, 0x0a, 0x16, 0x70, + 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x61, 0x72, + 0x63, 0x68, 0x69, 0x76, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x14, 0x70, 0x72, 0x6f, + 0x6a, 0x65, 0x63, 0x74, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x41, 0x72, 0x63, 0x68, 0x69, 0x76, + 0x65, 0x12, 0x5f, 0x0a, 0x13, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x70, + 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2c, + 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x41, 0x63, + 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x4a, 0x6f, 0x62, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, + 0x61, 0x63, 0x65, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x48, 0x00, 0x52, 0x12, + 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, + 0x6f, 0x6e, 0x12, 0x50, 0x0a, 0x0e, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x5f, 0x69, 0x6d, + 0x70, 0x6f, 0x72, 0x74, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x70, 0x72, 0x6f, + 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x41, 0x63, 0x71, 0x75, 0x69, 0x72, + 0x65, 0x64, 0x4a, 0x6f, 0x62, 0x2e, 0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x49, 0x6d, 0x70, + 0x6f, 0x72, 0x74, 0x48, 0x00, 0x52, 0x0d, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x49, 0x6d, + 0x70, 0x6f, 0x72, 0x74, 0x1a, 0xcb, 0x01, 0x0a, 0x12, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, + 0x63, 0x65, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x30, 0x0a, 0x14, 0x77, + 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, + 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x12, 0x77, 0x6f, 0x72, 0x6b, 0x73, + 0x70, 0x61, 0x63, 0x65, 0x48, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x49, 0x64, 0x12, 0x25, 0x0a, + 0x0e, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, + 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0d, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, + 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x46, 0x0a, 0x10, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, + 0x72, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1b, + 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, + 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0f, 0x70, 0x61, 0x72, + 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x14, 0x0a, 0x05, + 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x73, 0x74, 0x61, + 0x74, 0x65, 0x1a, 0x6f, 0x0a, 0x0d, 0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x49, 0x6d, 0x70, + 0x6f, 0x72, 0x74, 0x12, 0x2c, 0x0a, 0x12, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x5f, 0x68, + 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x10, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x48, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x49, + 0x64, 0x12, 0x30, 0x0a, 0x14, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x5f, 0x68, 0x69, 0x73, + 0x74, 0x6f, 0x72, 0x79, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x12, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x48, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x4e, + 0x61, 0x6d, 0x65, 0x42, 0x06, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x22, 0x3b, 0x0a, 0x0c, 0x43, + 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x6c, 0x65, 0x64, 0x4a, 0x6f, 0x62, 0x12, 0x15, 0x0a, 0x06, 0x6a, + 0x6f, 0x62, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6a, 0x6f, 0x62, + 0x49, 0x64, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, 0x9f, 0x03, 0x0a, 0x0c, 0x43, 0x6f, 0x6d, + 0x70, 0x6c, 0x65, 0x74, 0x65, 0x64, 0x4a, 0x6f, 0x62, 0x12, 0x15, 0x0a, 0x06, 0x6a, 0x6f, 0x62, + 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6a, 0x6f, 0x62, 0x49, 0x64, + 0x12, 0x60, 0x0a, 0x13, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x70, 0x72, + 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2d, 0x2e, + 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x43, 0x6f, 0x6d, + 0x70, 0x6c, 0x65, 0x74, 0x65, 0x64, 0x4a, 0x6f, 0x62, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, + 0x61, 0x63, 0x65, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x48, 0x00, 0x52, 0x12, + 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, + 0x6f, 0x6e, 0x12, 0x51, 0x0a, 0x0e, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x5f, 0x69, 0x6d, + 0x70, 0x6f, 0x72, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x70, 0x72, 0x6f, + 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, + 0x74, 0x65, 0x64, 0x4a, 0x6f, 0x62, 0x2e, 0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x49, 0x6d, + 0x70, 0x6f, 0x72, 0x74, 0x48, 0x00, 0x52, 0x0d, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x49, + 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x1a, 0x5f, 0x0a, 0x12, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, + 0x63, 0x65, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x14, 0x0a, 0x05, 0x73, + 0x74, 0x61, 0x74, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, + 0x65, 0x12, 0x33, 0x0a, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x02, + 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, + 0x65, 0x72, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x09, 0x72, 0x65, 0x73, + 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x1a, 0x5a, 0x0a, 0x0d, 0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, + 0x74, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x12, 0x49, 0x0a, 0x11, 0x70, 0x61, 0x72, 0x61, 0x6d, + 0x65, 0x74, 0x65, 0x72, 0x5f, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x73, 0x18, 0x01, 0x20, 0x03, + 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, + 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, + 0x52, 0x10, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, + 0x61, 0x73, 0x42, 0x06, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x22, 0xdd, 0x03, 0x0a, 0x03, 0x4c, + 0x6f, 0x67, 0x12, 0x2f, 0x0a, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x0e, 0x32, 0x17, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, + 0x64, 0x2e, 0x4c, 0x6f, 0x67, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x06, 0x73, 0x6f, 0x75, + 0x72, 0x63, 0x65, 0x12, 0x2c, 0x0a, 0x05, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x0e, 0x32, 0x16, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, + 0x64, 0x2e, 0x4c, 0x6f, 0x67, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x52, 0x05, 0x6c, 0x65, 0x76, 0x65, + 0x6c, 0x12, 0x1d, 0x0a, 0x0a, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x09, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, + 0x12, 0x57, 0x0a, 0x13, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x70, 0x72, + 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, + 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x4c, 0x6f, 0x67, + 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, + 0x69, 0x6f, 0x6e, 0x48, 0x00, 0x52, 0x12, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, + 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x48, 0x0a, 0x0e, 0x70, 0x72, 0x6f, + 0x6a, 0x65, 0x63, 0x74, 0x5f, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x1f, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, + 0x2e, 0x4c, 0x6f, 0x67, 0x2e, 0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x49, 0x6d, 0x70, 0x6f, + 0x72, 0x74, 0x48, 0x00, 0x52, 0x0d, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x49, 0x6d, 0x70, + 0x6f, 0x72, 0x74, 0x1a, 0x5a, 0x0a, 0x12, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, + 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x30, 0x0a, 0x14, 0x77, 0x6f, 0x72, + 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x5f, 0x69, + 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x12, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, + 0x63, 0x65, 0x48, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x49, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x74, + 0x65, 0x78, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, 0x65, 0x78, 0x74, 0x1a, + 0x51, 0x0a, 0x0d, 0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, + 0x12, 0x2c, 0x0a, 0x12, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x5f, 0x68, 0x69, 0x73, 0x74, + 0x6f, 0x72, 0x79, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x10, 0x70, 0x72, + 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x48, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x49, 0x64, 0x12, 0x12, + 0x0a, 0x04, 0x74, 0x65, 0x78, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, 0x65, + 0x78, 0x74, 0x42, 0x06, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x22, 0x49, 0x0a, 0x09, 0x4a, 0x6f, + 0x62, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x12, 0x15, 0x0a, 0x06, 0x6a, 0x6f, 0x62, 0x5f, 0x69, + 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6a, 0x6f, 0x62, 0x49, 0x64, 0x12, 0x25, + 0x0a, 0x04, 0x6c, 0x6f, 0x67, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x70, + 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x4c, 0x6f, 0x67, 0x52, + 0x04, 0x6c, 0x6f, 0x67, 0x73, 0x2a, 0x28, 0x0a, 0x09, 0x4c, 0x6f, 0x67, 0x53, 0x6f, 0x75, 0x72, + 0x63, 0x65, 0x12, 0x0f, 0x0a, 0x0b, 0x50, 0x52, 0x4f, 0x56, 0x49, 0x53, 0x49, 0x4f, 0x4e, 0x45, + 0x52, 0x10, 0x00, 0x12, 0x0a, 0x0a, 0x06, 0x44, 0x41, 0x45, 0x4d, 0x4f, 0x4e, 0x10, 0x01, 0x2a, + 0x4a, 0x0a, 0x08, 0x4c, 0x6f, 0x67, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x12, 0x09, 0x0a, 0x05, 0x54, + 0x52, 0x41, 0x43, 0x45, 0x10, 0x00, 0x12, 0x09, 0x0a, 0x05, 0x44, 0x45, 0x42, 0x55, 0x47, 0x10, + 0x01, 0x12, 0x08, 0x0a, 0x04, 0x49, 0x4e, 0x46, 0x4f, 0x10, 0x02, 0x12, 0x08, 0x0a, 0x04, 0x57, + 0x41, 0x52, 0x4e, 0x10, 0x03, 0x12, 0x09, 0x0a, 0x05, 0x45, 0x52, 0x52, 0x4f, 0x52, 0x10, 0x04, + 0x12, 0x09, 0x0a, 0x05, 0x46, 0x41, 0x54, 0x41, 0x4c, 0x10, 0x05, 0x32, 0x8c, 0x02, 0x0a, 0x11, + 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x44, 0x61, 0x65, 0x6d, 0x6f, + 0x6e, 0x12, 0x3c, 0x0a, 0x0a, 0x41, 0x63, 0x71, 0x75, 0x69, 0x72, 0x65, 0x4a, 0x6f, 0x62, 0x12, + 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x45, + 0x6d, 0x70, 0x74, 0x79, 0x1a, 0x19, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, + 0x65, 0x72, 0x64, 0x2e, 0x41, 0x63, 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x4a, 0x6f, 0x62, 0x12, + 0x3b, 0x0a, 0x09, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x4a, 0x6f, 0x62, 0x12, 0x17, 0x2e, 0x70, + 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x4a, 0x6f, 0x62, 0x55, + 0x70, 0x64, 0x61, 0x74, 0x65, 0x1a, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, + 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x28, 0x01, 0x12, 0x3c, 0x0a, 0x09, + 0x43, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x4a, 0x6f, 0x62, 0x12, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, + 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x43, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x6c, + 0x65, 0x64, 0x4a, 0x6f, 0x62, 0x1a, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, + 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x12, 0x3e, 0x0a, 0x0b, 0x43, 0x6f, + 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x4a, 0x6f, 0x62, 0x12, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, + 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, + 0x65, 0x64, 0x4a, 0x6f, 0x62, 0x1a, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, + 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x42, 0x2b, 0x5a, 0x29, 0x67, 0x69, + 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x63, + 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, + 0x64, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, +} + +var ( + file_provisionerd_proto_provisionerd_proto_rawDescOnce sync.Once + file_provisionerd_proto_provisionerd_proto_rawDescData = file_provisionerd_proto_provisionerd_proto_rawDesc +) + +func file_provisionerd_proto_provisionerd_proto_rawDescGZIP() []byte { + file_provisionerd_proto_provisionerd_proto_rawDescOnce.Do(func() { + file_provisionerd_proto_provisionerd_proto_rawDescData = protoimpl.X.CompressGZIP(file_provisionerd_proto_provisionerd_proto_rawDescData) + }) + return file_provisionerd_proto_provisionerd_proto_rawDescData +} + +var file_provisionerd_proto_provisionerd_proto_enumTypes = make([]protoimpl.EnumInfo, 2) +var file_provisionerd_proto_provisionerd_proto_msgTypes = make([]protoimpl.MessageInfo, 12) +var file_provisionerd_proto_provisionerd_proto_goTypes = []interface{}{ + (LogSource)(0), // 0: provisionerd.LogSource + (LogLevel)(0), // 1: provisionerd.LogLevel + (*Empty)(nil), // 2: provisionerd.Empty + (*AcquiredJob)(nil), // 3: provisionerd.AcquiredJob + (*CancelledJob)(nil), // 4: provisionerd.CancelledJob + (*CompletedJob)(nil), // 5: provisionerd.CompletedJob + (*Log)(nil), // 6: provisionerd.Log + (*JobUpdate)(nil), // 7: provisionerd.JobUpdate + (*AcquiredJob_WorkspaceProvision)(nil), // 8: provisionerd.AcquiredJob.WorkspaceProvision + (*AcquiredJob_ProjectImport)(nil), // 9: provisionerd.AcquiredJob.ProjectImport + (*CompletedJob_WorkspaceProvision)(nil), // 10: provisionerd.CompletedJob.WorkspaceProvision + (*CompletedJob_ProjectImport)(nil), // 11: provisionerd.CompletedJob.ProjectImport + (*Log_WorkspaceProvision)(nil), // 12: provisionerd.Log.WorkspaceProvision + (*Log_ProjectImport)(nil), // 13: provisionerd.Log.ProjectImport + (*proto.ParameterValue)(nil), // 14: provisioner.ParameterValue + (*proto.Resource)(nil), // 15: provisioner.Resource + (*proto.ParameterSchema)(nil), // 16: provisioner.ParameterSchema +} +var file_provisionerd_proto_provisionerd_proto_depIdxs = []int32{ + 8, // 0: provisionerd.AcquiredJob.workspace_provision:type_name -> provisionerd.AcquiredJob.WorkspaceProvision + 9, // 1: provisionerd.AcquiredJob.project_import:type_name -> provisionerd.AcquiredJob.ProjectImport + 10, // 2: provisionerd.CompletedJob.workspace_provision:type_name -> provisionerd.CompletedJob.WorkspaceProvision + 11, // 3: provisionerd.CompletedJob.project_import:type_name -> provisionerd.CompletedJob.ProjectImport + 0, // 4: provisionerd.Log.source:type_name -> provisionerd.LogSource + 1, // 5: provisionerd.Log.level:type_name -> provisionerd.LogLevel + 12, // 6: provisionerd.Log.workspace_provision:type_name -> provisionerd.Log.WorkspaceProvision + 13, // 7: provisionerd.Log.project_import:type_name -> provisionerd.Log.ProjectImport + 6, // 8: provisionerd.JobUpdate.logs:type_name -> provisionerd.Log + 14, // 9: provisionerd.AcquiredJob.WorkspaceProvision.parameter_values:type_name -> provisioner.ParameterValue + 15, // 10: provisionerd.CompletedJob.WorkspaceProvision.resources:type_name -> provisioner.Resource + 16, // 11: provisionerd.CompletedJob.ProjectImport.parameter_schemas:type_name -> provisioner.ParameterSchema + 2, // 12: provisionerd.ProvisionerDaemon.AcquireJob:input_type -> provisionerd.Empty + 7, // 13: provisionerd.ProvisionerDaemon.UpdateJob:input_type -> provisionerd.JobUpdate + 4, // 14: provisionerd.ProvisionerDaemon.CancelJob:input_type -> provisionerd.CancelledJob + 5, // 15: provisionerd.ProvisionerDaemon.CompleteJob:input_type -> provisionerd.CompletedJob + 3, // 16: provisionerd.ProvisionerDaemon.AcquireJob:output_type -> provisionerd.AcquiredJob + 2, // 17: provisionerd.ProvisionerDaemon.UpdateJob:output_type -> provisionerd.Empty + 2, // 18: provisionerd.ProvisionerDaemon.CancelJob:output_type -> provisionerd.Empty + 2, // 19: provisionerd.ProvisionerDaemon.CompleteJob:output_type -> provisionerd.Empty + 16, // [16:20] is the sub-list for method output_type + 12, // [12:16] is the sub-list for method input_type + 12, // [12:12] is the sub-list for extension type_name + 12, // [12:12] is the sub-list for extension extendee + 0, // [0:12] is the sub-list for field type_name +} + +func init() { file_provisionerd_proto_provisionerd_proto_init() } +func file_provisionerd_proto_provisionerd_proto_init() { + if File_provisionerd_proto_provisionerd_proto != nil { + return + } + if !protoimpl.UnsafeEnabled { + file_provisionerd_proto_provisionerd_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Empty); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_provisionerd_proto_provisionerd_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*AcquiredJob); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_provisionerd_proto_provisionerd_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*CancelledJob); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_provisionerd_proto_provisionerd_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*CompletedJob); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_provisionerd_proto_provisionerd_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Log); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_provisionerd_proto_provisionerd_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*JobUpdate); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_provisionerd_proto_provisionerd_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*AcquiredJob_WorkspaceProvision); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_provisionerd_proto_provisionerd_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*AcquiredJob_ProjectImport); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_provisionerd_proto_provisionerd_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*CompletedJob_WorkspaceProvision); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_provisionerd_proto_provisionerd_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*CompletedJob_ProjectImport); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_provisionerd_proto_provisionerd_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Log_WorkspaceProvision); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_provisionerd_proto_provisionerd_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Log_ProjectImport); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + } + file_provisionerd_proto_provisionerd_proto_msgTypes[1].OneofWrappers = []interface{}{ + (*AcquiredJob_WorkspaceProvision_)(nil), + (*AcquiredJob_ProjectImport_)(nil), + } + file_provisionerd_proto_provisionerd_proto_msgTypes[3].OneofWrappers = []interface{}{ + (*CompletedJob_WorkspaceProvision_)(nil), + (*CompletedJob_ProjectImport_)(nil), + } + file_provisionerd_proto_provisionerd_proto_msgTypes[4].OneofWrappers = []interface{}{ + (*Log_WorkspaceProvision_)(nil), + (*Log_ProjectImport_)(nil), + } + type x struct{} + out := protoimpl.TypeBuilder{ + File: protoimpl.DescBuilder{ + GoPackagePath: reflect.TypeOf(x{}).PkgPath(), + RawDescriptor: file_provisionerd_proto_provisionerd_proto_rawDesc, + NumEnums: 2, + NumMessages: 12, + NumExtensions: 0, + NumServices: 1, + }, + GoTypes: file_provisionerd_proto_provisionerd_proto_goTypes, + DependencyIndexes: file_provisionerd_proto_provisionerd_proto_depIdxs, + EnumInfos: file_provisionerd_proto_provisionerd_proto_enumTypes, + MessageInfos: file_provisionerd_proto_provisionerd_proto_msgTypes, + }.Build() + File_provisionerd_proto_provisionerd_proto = out.File + file_provisionerd_proto_provisionerd_proto_rawDesc = nil + file_provisionerd_proto_provisionerd_proto_goTypes = nil + file_provisionerd_proto_provisionerd_proto_depIdxs = nil +} diff --git a/provisionerd/proto/provisionerd.proto b/provisionerd/proto/provisionerd.proto new file mode 100644 index 0000000000000..aa55358ae0920 --- /dev/null +++ b/provisionerd/proto/provisionerd.proto @@ -0,0 +1,118 @@ + +syntax = "proto3"; +option go_package = "github.com/coder/coder/provisionerd/proto"; + +package provisionerd; + +import "provisionersdk/proto/provisioner.proto"; + +// Empty indicates a successful request/response. +message Empty {} + +// AcquiredJob is returned when a provisioner daemon has a job locked. +message AcquiredJob { + message WorkspaceProvision { + string workspace_history_id = 1; + string workspace_name = 2; + repeated provisioner.ParameterValue parameter_values = 3; + bytes state = 4; + } + message ProjectImport { + string project_history_id = 1; + string project_history_name = 2; + } + string job_id = 1; + int64 created_at = 2; + string provisioner = 3; + string organization_name = 4; + string project_name = 5; + string user_name = 6; + bytes project_source_archive = 7; + oneof type { + WorkspaceProvision workspace_provision = 8; + ProjectImport project_import = 9; + } +} + +message CancelledJob { + string job_id = 1; + string error = 2; +} + +// CompletedJob is sent when the provisioner daemon completes a job. +message CompletedJob { + message WorkspaceProvision { + bytes state = 1; + repeated provisioner.Resource resources = 2; + } + message ProjectImport { + repeated provisioner.ParameterSchema parameter_schemas = 1; + } + string job_id = 1; + oneof type { + WorkspaceProvision workspace_provision = 2; + ProjectImport project_import = 3; + } +} + +// LogSource represents the sender of the log. +enum LogSource { + PROVISIONER = 0; + DAEMON = 1; +} + +// LogLevel represents severity of the log. +enum LogLevel { + TRACE = 0; + DEBUG = 1; + INFO = 2; + WARN = 3; + ERROR = 4; + FATAL = 5; +} + +// Log represents output from a job. +message Log { + message WorkspaceProvision { + string workspace_history_id = 1; + string text = 2; + } + message ProjectImport { + string project_history_id = 1; + string text = 2; + } + LogSource source = 1; + LogLevel level = 2; + int64 created_at = 3; + oneof type { + WorkspaceProvision workspace_provision = 4; + ProjectImport project_import = 5; + } +} + +// JobUpdate represents an update to a job. +// There may be no log output, but this message +// should still be sent periodically as a heartbeat. +message JobUpdate { + string job_id = 1; + repeated Log logs = 2; +} + +service ProvisionerDaemon { + // AcquireJob requests a job. Implementations should + // hold a lock on the job until CompleteJob() is + // called with the matching ID. + rpc AcquireJob(Empty) returns (AcquiredJob); + + // UpdateJob streams periodic updates for a job. + // Implementations should buffer logs so this stream + // is non-blocking. + rpc UpdateJob(stream JobUpdate) returns (Empty); + + // CancelJob indicates a job has been cancelled with + // an error message. + rpc CancelJob(CancelledJob) returns (Empty); + + // CompleteJob indicates a job has been completed. + rpc CompleteJob(CompletedJob) returns (Empty); +} \ No newline at end of file diff --git a/provisionerd/proto/provisionerd_drpc.pb.go b/provisionerd/proto/provisionerd_drpc.pb.go new file mode 100644 index 0000000000000..10ade5583bc32 --- /dev/null +++ b/provisionerd/proto/provisionerd_drpc.pb.go @@ -0,0 +1,275 @@ +// Code generated by protoc-gen-go-drpc. DO NOT EDIT. +// protoc-gen-go-drpc version: v0.0.26 +// source: provisionerd/proto/provisionerd.proto + +package proto + +import ( + context "context" + errors "errors" + protojson "google.golang.org/protobuf/encoding/protojson" + proto "google.golang.org/protobuf/proto" + drpc "storj.io/drpc" + drpcerr "storj.io/drpc/drpcerr" +) + +type drpcEncoding_File_provisionerd_proto_provisionerd_proto struct{} + +func (drpcEncoding_File_provisionerd_proto_provisionerd_proto) Marshal(msg drpc.Message) ([]byte, error) { + return proto.Marshal(msg.(proto.Message)) +} + +func (drpcEncoding_File_provisionerd_proto_provisionerd_proto) MarshalAppend(buf []byte, msg drpc.Message) ([]byte, error) { + return proto.MarshalOptions{}.MarshalAppend(buf, msg.(proto.Message)) +} + +func (drpcEncoding_File_provisionerd_proto_provisionerd_proto) Unmarshal(buf []byte, msg drpc.Message) error { + return proto.Unmarshal(buf, msg.(proto.Message)) +} + +func (drpcEncoding_File_provisionerd_proto_provisionerd_proto) JSONMarshal(msg drpc.Message) ([]byte, error) { + return protojson.Marshal(msg.(proto.Message)) +} + +func (drpcEncoding_File_provisionerd_proto_provisionerd_proto) JSONUnmarshal(buf []byte, msg drpc.Message) error { + return protojson.Unmarshal(buf, msg.(proto.Message)) +} + +type DRPCProvisionerDaemonClient interface { + DRPCConn() drpc.Conn + + AcquireJob(ctx context.Context, in *Empty) (*AcquiredJob, error) + UpdateJob(ctx context.Context) (DRPCProvisionerDaemon_UpdateJobClient, error) + CancelJob(ctx context.Context, in *CancelledJob) (*Empty, error) + CompleteJob(ctx context.Context, in *CompletedJob) (*Empty, error) +} + +type drpcProvisionerDaemonClient struct { + cc drpc.Conn +} + +func NewDRPCProvisionerDaemonClient(cc drpc.Conn) DRPCProvisionerDaemonClient { + return &drpcProvisionerDaemonClient{cc} +} + +func (c *drpcProvisionerDaemonClient) DRPCConn() drpc.Conn { return c.cc } + +func (c *drpcProvisionerDaemonClient) AcquireJob(ctx context.Context, in *Empty) (*AcquiredJob, error) { + out := new(AcquiredJob) + err := c.cc.Invoke(ctx, "/provisionerd.ProvisionerDaemon/AcquireJob", drpcEncoding_File_provisionerd_proto_provisionerd_proto{}, in, out) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *drpcProvisionerDaemonClient) UpdateJob(ctx context.Context) (DRPCProvisionerDaemon_UpdateJobClient, error) { + stream, err := c.cc.NewStream(ctx, "/provisionerd.ProvisionerDaemon/UpdateJob", drpcEncoding_File_provisionerd_proto_provisionerd_proto{}) + if err != nil { + return nil, err + } + x := &drpcProvisionerDaemon_UpdateJobClient{stream} + return x, nil +} + +type DRPCProvisionerDaemon_UpdateJobClient interface { + drpc.Stream + Send(*JobUpdate) error + CloseAndRecv() (*Empty, error) +} + +type drpcProvisionerDaemon_UpdateJobClient struct { + drpc.Stream +} + +func (x *drpcProvisionerDaemon_UpdateJobClient) Send(m *JobUpdate) error { + return x.MsgSend(m, drpcEncoding_File_provisionerd_proto_provisionerd_proto{}) +} + +func (x *drpcProvisionerDaemon_UpdateJobClient) CloseAndRecv() (*Empty, error) { + if err := x.CloseSend(); err != nil { + return nil, err + } + m := new(Empty) + if err := x.MsgRecv(m, drpcEncoding_File_provisionerd_proto_provisionerd_proto{}); err != nil { + return nil, err + } + return m, nil +} + +func (x *drpcProvisionerDaemon_UpdateJobClient) CloseAndRecvMsg(m *Empty) error { + if err := x.CloseSend(); err != nil { + return err + } + return x.MsgRecv(m, drpcEncoding_File_provisionerd_proto_provisionerd_proto{}) +} + +func (c *drpcProvisionerDaemonClient) CancelJob(ctx context.Context, in *CancelledJob) (*Empty, error) { + out := new(Empty) + err := c.cc.Invoke(ctx, "/provisionerd.ProvisionerDaemon/CancelJob", drpcEncoding_File_provisionerd_proto_provisionerd_proto{}, in, out) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *drpcProvisionerDaemonClient) CompleteJob(ctx context.Context, in *CompletedJob) (*Empty, error) { + out := new(Empty) + err := c.cc.Invoke(ctx, "/provisionerd.ProvisionerDaemon/CompleteJob", drpcEncoding_File_provisionerd_proto_provisionerd_proto{}, in, out) + if err != nil { + return nil, err + } + return out, nil +} + +type DRPCProvisionerDaemonServer interface { + AcquireJob(context.Context, *Empty) (*AcquiredJob, error) + UpdateJob(DRPCProvisionerDaemon_UpdateJobStream) error + CancelJob(context.Context, *CancelledJob) (*Empty, error) + CompleteJob(context.Context, *CompletedJob) (*Empty, error) +} + +type DRPCProvisionerDaemonUnimplementedServer struct{} + +func (s *DRPCProvisionerDaemonUnimplementedServer) AcquireJob(context.Context, *Empty) (*AcquiredJob, error) { + return nil, drpcerr.WithCode(errors.New("Unimplemented"), drpcerr.Unimplemented) +} + +func (s *DRPCProvisionerDaemonUnimplementedServer) UpdateJob(DRPCProvisionerDaemon_UpdateJobStream) error { + return drpcerr.WithCode(errors.New("Unimplemented"), drpcerr.Unimplemented) +} + +func (s *DRPCProvisionerDaemonUnimplementedServer) CancelJob(context.Context, *CancelledJob) (*Empty, error) { + return nil, drpcerr.WithCode(errors.New("Unimplemented"), drpcerr.Unimplemented) +} + +func (s *DRPCProvisionerDaemonUnimplementedServer) CompleteJob(context.Context, *CompletedJob) (*Empty, error) { + return nil, drpcerr.WithCode(errors.New("Unimplemented"), drpcerr.Unimplemented) +} + +type DRPCProvisionerDaemonDescription struct{} + +func (DRPCProvisionerDaemonDescription) NumMethods() int { return 4 } + +func (DRPCProvisionerDaemonDescription) Method(n int) (string, drpc.Encoding, drpc.Receiver, interface{}, bool) { + switch n { + case 0: + return "/provisionerd.ProvisionerDaemon/AcquireJob", drpcEncoding_File_provisionerd_proto_provisionerd_proto{}, + func(srv interface{}, ctx context.Context, in1, in2 interface{}) (drpc.Message, error) { + return srv.(DRPCProvisionerDaemonServer). + AcquireJob( + ctx, + in1.(*Empty), + ) + }, DRPCProvisionerDaemonServer.AcquireJob, true + case 1: + return "/provisionerd.ProvisionerDaemon/UpdateJob", drpcEncoding_File_provisionerd_proto_provisionerd_proto{}, + func(srv interface{}, ctx context.Context, in1, in2 interface{}) (drpc.Message, error) { + return nil, srv.(DRPCProvisionerDaemonServer). + UpdateJob( + &drpcProvisionerDaemon_UpdateJobStream{in1.(drpc.Stream)}, + ) + }, DRPCProvisionerDaemonServer.UpdateJob, true + case 2: + return "/provisionerd.ProvisionerDaemon/CancelJob", drpcEncoding_File_provisionerd_proto_provisionerd_proto{}, + func(srv interface{}, ctx context.Context, in1, in2 interface{}) (drpc.Message, error) { + return srv.(DRPCProvisionerDaemonServer). + CancelJob( + ctx, + in1.(*CancelledJob), + ) + }, DRPCProvisionerDaemonServer.CancelJob, true + case 3: + return "/provisionerd.ProvisionerDaemon/CompleteJob", drpcEncoding_File_provisionerd_proto_provisionerd_proto{}, + func(srv interface{}, ctx context.Context, in1, in2 interface{}) (drpc.Message, error) { + return srv.(DRPCProvisionerDaemonServer). + CompleteJob( + ctx, + in1.(*CompletedJob), + ) + }, DRPCProvisionerDaemonServer.CompleteJob, true + default: + return "", nil, nil, nil, false + } +} + +func DRPCRegisterProvisionerDaemon(mux drpc.Mux, impl DRPCProvisionerDaemonServer) error { + return mux.Register(impl, DRPCProvisionerDaemonDescription{}) +} + +type DRPCProvisionerDaemon_AcquireJobStream interface { + drpc.Stream + SendAndClose(*AcquiredJob) error +} + +type drpcProvisionerDaemon_AcquireJobStream struct { + drpc.Stream +} + +func (x *drpcProvisionerDaemon_AcquireJobStream) SendAndClose(m *AcquiredJob) error { + if err := x.MsgSend(m, drpcEncoding_File_provisionerd_proto_provisionerd_proto{}); err != nil { + return err + } + return x.CloseSend() +} + +type DRPCProvisionerDaemon_UpdateJobStream interface { + drpc.Stream + SendAndClose(*Empty) error + Recv() (*JobUpdate, error) +} + +type drpcProvisionerDaemon_UpdateJobStream struct { + drpc.Stream +} + +func (x *drpcProvisionerDaemon_UpdateJobStream) SendAndClose(m *Empty) error { + if err := x.MsgSend(m, drpcEncoding_File_provisionerd_proto_provisionerd_proto{}); err != nil { + return err + } + return x.CloseSend() +} + +func (x *drpcProvisionerDaemon_UpdateJobStream) Recv() (*JobUpdate, error) { + m := new(JobUpdate) + if err := x.MsgRecv(m, drpcEncoding_File_provisionerd_proto_provisionerd_proto{}); err != nil { + return nil, err + } + return m, nil +} + +func (x *drpcProvisionerDaemon_UpdateJobStream) RecvMsg(m *JobUpdate) error { + return x.MsgRecv(m, drpcEncoding_File_provisionerd_proto_provisionerd_proto{}) +} + +type DRPCProvisionerDaemon_CancelJobStream interface { + drpc.Stream + SendAndClose(*Empty) error +} + +type drpcProvisionerDaemon_CancelJobStream struct { + drpc.Stream +} + +func (x *drpcProvisionerDaemon_CancelJobStream) SendAndClose(m *Empty) error { + if err := x.MsgSend(m, drpcEncoding_File_provisionerd_proto_provisionerd_proto{}); err != nil { + return err + } + return x.CloseSend() +} + +type DRPCProvisionerDaemon_CompleteJobStream interface { + drpc.Stream + SendAndClose(*Empty) error +} + +type drpcProvisionerDaemon_CompleteJobStream struct { + drpc.Stream +} + +func (x *drpcProvisionerDaemon_CompleteJobStream) SendAndClose(m *Empty) error { + if err := x.MsgSend(m, drpcEncoding_File_provisionerd_proto_provisionerd_proto{}); err != nil { + return err + } + return x.CloseSend() +} diff --git a/provisionersdk/proto/provisioner.pb.go b/provisionersdk/proto/provisioner.pb.go index 7537f69f27c51..4801450d25aaf 100644 --- a/provisionersdk/proto/provisioner.pb.go +++ b/provisionersdk/proto/provisioner.pb.go @@ -2,7 +2,7 @@ // versions: // protoc-gen-go v1.26.0 // protoc v3.6.1 -// source: provisioner.proto +// source: provisionersdk/proto/provisioner.proto package proto @@ -47,11 +47,11 @@ func (x ParameterSource_Scheme) String() string { } func (ParameterSource_Scheme) Descriptor() protoreflect.EnumDescriptor { - return file_provisioner_proto_enumTypes[0].Descriptor() + return file_provisionersdk_proto_provisioner_proto_enumTypes[0].Descriptor() } func (ParameterSource_Scheme) Type() protoreflect.EnumType { - return &file_provisioner_proto_enumTypes[0] + return &file_provisionersdk_proto_provisioner_proto_enumTypes[0] } func (x ParameterSource_Scheme) Number() protoreflect.EnumNumber { @@ -60,7 +60,7 @@ func (x ParameterSource_Scheme) Number() protoreflect.EnumNumber { // Deprecated: Use ParameterSource_Scheme.Descriptor instead. func (ParameterSource_Scheme) EnumDescriptor() ([]byte, []int) { - return file_provisioner_proto_rawDescGZIP(), []int{0, 0} + return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{0, 0} } type ParameterDestination_Scheme int32 @@ -93,11 +93,11 @@ func (x ParameterDestination_Scheme) String() string { } func (ParameterDestination_Scheme) Descriptor() protoreflect.EnumDescriptor { - return file_provisioner_proto_enumTypes[1].Descriptor() + return file_provisionersdk_proto_provisioner_proto_enumTypes[1].Descriptor() } func (ParameterDestination_Scheme) Type() protoreflect.EnumType { - return &file_provisioner_proto_enumTypes[1] + return &file_provisionersdk_proto_provisioner_proto_enumTypes[1] } func (x ParameterDestination_Scheme) Number() protoreflect.EnumNumber { @@ -106,7 +106,7 @@ func (x ParameterDestination_Scheme) Number() protoreflect.EnumNumber { // Deprecated: Use ParameterDestination_Scheme.Descriptor instead. func (ParameterDestination_Scheme) EnumDescriptor() ([]byte, []int) { - return file_provisioner_proto_rawDescGZIP(), []int{1, 0} + return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{1, 0} } type ParameterSchema_TypeSystem int32 @@ -136,11 +136,11 @@ func (x ParameterSchema_TypeSystem) String() string { } func (ParameterSchema_TypeSystem) Descriptor() protoreflect.EnumDescriptor { - return file_provisioner_proto_enumTypes[2].Descriptor() + return file_provisionersdk_proto_provisioner_proto_enumTypes[2].Descriptor() } func (ParameterSchema_TypeSystem) Type() protoreflect.EnumType { - return &file_provisioner_proto_enumTypes[2] + return &file_provisionersdk_proto_provisioner_proto_enumTypes[2] } func (x ParameterSchema_TypeSystem) Number() protoreflect.EnumNumber { @@ -149,7 +149,7 @@ func (x ParameterSchema_TypeSystem) Number() protoreflect.EnumNumber { // Deprecated: Use ParameterSchema_TypeSystem.Descriptor instead. func (ParameterSchema_TypeSystem) EnumDescriptor() ([]byte, []int) { - return file_provisioner_proto_rawDescGZIP(), []int{3, 0} + return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{3, 0} } // ParameterSource represents the source location for a parameter to get it's value from. @@ -165,7 +165,7 @@ type ParameterSource struct { func (x *ParameterSource) Reset() { *x = ParameterSource{} if protoimpl.UnsafeEnabled { - mi := &file_provisioner_proto_msgTypes[0] + mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[0] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -178,7 +178,7 @@ func (x *ParameterSource) String() string { func (*ParameterSource) ProtoMessage() {} func (x *ParameterSource) ProtoReflect() protoreflect.Message { - mi := &file_provisioner_proto_msgTypes[0] + mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[0] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -191,7 +191,7 @@ func (x *ParameterSource) ProtoReflect() protoreflect.Message { // Deprecated: Use ParameterSource.ProtoReflect.Descriptor instead. func (*ParameterSource) Descriptor() ([]byte, []int) { - return file_provisioner_proto_rawDescGZIP(), []int{0} + return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{0} } func (x *ParameterSource) GetScheme() ParameterSource_Scheme { @@ -221,7 +221,7 @@ type ParameterDestination struct { func (x *ParameterDestination) Reset() { *x = ParameterDestination{} if protoimpl.UnsafeEnabled { - mi := &file_provisioner_proto_msgTypes[1] + mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[1] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -234,7 +234,7 @@ func (x *ParameterDestination) String() string { func (*ParameterDestination) ProtoMessage() {} func (x *ParameterDestination) ProtoReflect() protoreflect.Message { - mi := &file_provisioner_proto_msgTypes[1] + mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[1] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -247,7 +247,7 @@ func (x *ParameterDestination) ProtoReflect() protoreflect.Message { // Deprecated: Use ParameterDestination.ProtoReflect.Descriptor instead. func (*ParameterDestination) Descriptor() ([]byte, []int) { - return file_provisioner_proto_rawDescGZIP(), []int{1} + return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{1} } func (x *ParameterDestination) GetScheme() ParameterDestination_Scheme { @@ -278,7 +278,7 @@ type ParameterValue struct { func (x *ParameterValue) Reset() { *x = ParameterValue{} if protoimpl.UnsafeEnabled { - mi := &file_provisioner_proto_msgTypes[2] + mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[2] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -291,7 +291,7 @@ func (x *ParameterValue) String() string { func (*ParameterValue) ProtoMessage() {} func (x *ParameterValue) ProtoReflect() protoreflect.Message { - mi := &file_provisioner_proto_msgTypes[2] + mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[2] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -304,7 +304,7 @@ func (x *ParameterValue) ProtoReflect() protoreflect.Message { // Deprecated: Use ParameterValue.ProtoReflect.Descriptor instead. func (*ParameterValue) Descriptor() ([]byte, []int) { - return file_provisioner_proto_rawDescGZIP(), []int{2} + return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{2} } func (x *ParameterValue) GetDestinationScheme() ParameterDestination_Scheme { @@ -350,7 +350,7 @@ type ParameterSchema struct { func (x *ParameterSchema) Reset() { *x = ParameterSchema{} if protoimpl.UnsafeEnabled { - mi := &file_provisioner_proto_msgTypes[3] + mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[3] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -363,7 +363,7 @@ func (x *ParameterSchema) String() string { func (*ParameterSchema) ProtoMessage() {} func (x *ParameterSchema) ProtoReflect() protoreflect.Message { - mi := &file_provisioner_proto_msgTypes[3] + mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[3] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -376,7 +376,7 @@ func (x *ParameterSchema) ProtoReflect() protoreflect.Message { // Deprecated: Use ParameterSchema.ProtoReflect.Descriptor instead. func (*ParameterSchema) Descriptor() ([]byte, []int) { - return file_provisioner_proto_rawDescGZIP(), []int{3} + return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{3} } func (x *ParameterSchema) GetName() string { @@ -466,7 +466,7 @@ type Parse struct { func (x *Parse) Reset() { *x = Parse{} if protoimpl.UnsafeEnabled { - mi := &file_provisioner_proto_msgTypes[4] + mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[4] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -479,7 +479,7 @@ func (x *Parse) String() string { func (*Parse) ProtoMessage() {} func (x *Parse) ProtoReflect() protoreflect.Message { - mi := &file_provisioner_proto_msgTypes[4] + mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[4] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -492,7 +492,7 @@ func (x *Parse) ProtoReflect() protoreflect.Message { // Deprecated: Use Parse.ProtoReflect.Descriptor instead. func (*Parse) Descriptor() ([]byte, []int) { - return file_provisioner_proto_rawDescGZIP(), []int{4} + return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{4} } // Resource is a provisioned unit. @@ -508,7 +508,7 @@ type Resource struct { func (x *Resource) Reset() { *x = Resource{} if protoimpl.UnsafeEnabled { - mi := &file_provisioner_proto_msgTypes[5] + mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[5] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -521,7 +521,7 @@ func (x *Resource) String() string { func (*Resource) ProtoMessage() {} func (x *Resource) ProtoReflect() protoreflect.Message { - mi := &file_provisioner_proto_msgTypes[5] + mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[5] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -534,7 +534,7 @@ func (x *Resource) ProtoReflect() protoreflect.Message { // Deprecated: Use Resource.ProtoReflect.Descriptor instead. func (*Resource) Descriptor() ([]byte, []int) { - return file_provisioner_proto_rawDescGZIP(), []int{5} + return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{5} } func (x *Resource) GetName() string { @@ -561,7 +561,7 @@ type Provision struct { func (x *Provision) Reset() { *x = Provision{} if protoimpl.UnsafeEnabled { - mi := &file_provisioner_proto_msgTypes[6] + mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[6] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -574,7 +574,7 @@ func (x *Provision) String() string { func (*Provision) ProtoMessage() {} func (x *Provision) ProtoReflect() protoreflect.Message { - mi := &file_provisioner_proto_msgTypes[6] + mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[6] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -587,7 +587,7 @@ func (x *Provision) ProtoReflect() protoreflect.Message { // Deprecated: Use Provision.ProtoReflect.Descriptor instead. func (*Provision) Descriptor() ([]byte, []int) { - return file_provisioner_proto_rawDescGZIP(), []int{6} + return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{6} } type Parse_Request struct { @@ -601,7 +601,7 @@ type Parse_Request struct { func (x *Parse_Request) Reset() { *x = Parse_Request{} if protoimpl.UnsafeEnabled { - mi := &file_provisioner_proto_msgTypes[7] + mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[7] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -614,7 +614,7 @@ func (x *Parse_Request) String() string { func (*Parse_Request) ProtoMessage() {} func (x *Parse_Request) ProtoReflect() protoreflect.Message { - mi := &file_provisioner_proto_msgTypes[7] + mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[7] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -627,7 +627,7 @@ func (x *Parse_Request) ProtoReflect() protoreflect.Message { // Deprecated: Use Parse_Request.ProtoReflect.Descriptor instead. func (*Parse_Request) Descriptor() ([]byte, []int) { - return file_provisioner_proto_rawDescGZIP(), []int{4, 0} + return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{4, 0} } func (x *Parse_Request) GetDirectory() string { @@ -648,7 +648,7 @@ type Parse_Response struct { func (x *Parse_Response) Reset() { *x = Parse_Response{} if protoimpl.UnsafeEnabled { - mi := &file_provisioner_proto_msgTypes[8] + mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[8] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -661,7 +661,7 @@ func (x *Parse_Response) String() string { func (*Parse_Response) ProtoMessage() {} func (x *Parse_Response) ProtoReflect() protoreflect.Message { - mi := &file_provisioner_proto_msgTypes[8] + mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[8] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -674,7 +674,7 @@ func (x *Parse_Response) ProtoReflect() protoreflect.Message { // Deprecated: Use Parse_Response.ProtoReflect.Descriptor instead. func (*Parse_Response) Descriptor() ([]byte, []int) { - return file_provisioner_proto_rawDescGZIP(), []int{4, 1} + return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{4, 1} } func (x *Parse_Response) GetParameterSchemas() []*ParameterSchema { @@ -697,7 +697,7 @@ type Provision_Request struct { func (x *Provision_Request) Reset() { *x = Provision_Request{} if protoimpl.UnsafeEnabled { - mi := &file_provisioner_proto_msgTypes[9] + mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[9] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -710,7 +710,7 @@ func (x *Provision_Request) String() string { func (*Provision_Request) ProtoMessage() {} func (x *Provision_Request) ProtoReflect() protoreflect.Message { - mi := &file_provisioner_proto_msgTypes[9] + mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[9] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -723,7 +723,7 @@ func (x *Provision_Request) ProtoReflect() protoreflect.Message { // Deprecated: Use Provision_Request.ProtoReflect.Descriptor instead. func (*Provision_Request) Descriptor() ([]byte, []int) { - return file_provisioner_proto_rawDescGZIP(), []int{6, 0} + return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{6, 0} } func (x *Provision_Request) GetDirectory() string { @@ -759,7 +759,7 @@ type Provision_Response struct { func (x *Provision_Response) Reset() { *x = Provision_Response{} if protoimpl.UnsafeEnabled { - mi := &file_provisioner_proto_msgTypes[10] + mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[10] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -772,7 +772,7 @@ func (x *Provision_Response) String() string { func (*Provision_Response) ProtoMessage() {} func (x *Provision_Response) ProtoReflect() protoreflect.Message { - mi := &file_provisioner_proto_msgTypes[10] + mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[10] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -785,7 +785,7 @@ func (x *Provision_Response) ProtoReflect() protoreflect.Message { // Deprecated: Use Provision_Response.ProtoReflect.Descriptor instead. func (*Provision_Response) Descriptor() ([]byte, []int) { - return file_provisioner_proto_rawDescGZIP(), []int{6, 1} + return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{6, 1} } func (x *Provision_Response) GetState() []byte { @@ -802,137 +802,138 @@ func (x *Provision_Response) GetResources() []*Resource { return nil } -var File_provisioner_proto protoreflect.FileDescriptor - -var file_provisioner_proto_rawDesc = []byte{ - 0x0a, 0x11, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x12, 0x0b, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, - 0x22, 0x78, 0x0a, 0x0f, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x6f, 0x75, - 0x72, 0x63, 0x65, 0x12, 0x3b, 0x0a, 0x06, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x65, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x0e, 0x32, 0x23, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, - 0x72, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x6f, 0x75, 0x72, 0x63, - 0x65, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x65, 0x52, 0x06, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x65, - 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x12, 0x0a, 0x06, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x65, - 0x12, 0x08, 0x0a, 0x04, 0x44, 0x41, 0x54, 0x41, 0x10, 0x00, 0x22, 0xac, 0x01, 0x0a, 0x14, 0x50, - 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x44, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, - 0x69, 0x6f, 0x6e, 0x12, 0x40, 0x0a, 0x06, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x65, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x0e, 0x32, 0x28, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, - 0x72, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x44, 0x65, 0x73, 0x74, 0x69, - 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x65, 0x52, 0x06, 0x73, +var File_provisionersdk_proto_provisioner_proto protoreflect.FileDescriptor + +var file_provisionersdk_proto_provisioner_proto_rawDesc = []byte{ + 0x0a, 0x26, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x73, 0x64, 0x6b, + 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, + 0x65, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0b, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, + 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x22, 0x78, 0x0a, 0x0f, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, + 0x65, 0x72, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x3b, 0x0a, 0x06, 0x73, 0x63, 0x68, 0x65, + 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x23, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, + 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, + 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x65, 0x52, 0x06, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x3c, 0x0a, 0x06, 0x53, - 0x63, 0x68, 0x65, 0x6d, 0x65, 0x12, 0x18, 0x0a, 0x14, 0x45, 0x4e, 0x56, 0x49, 0x52, 0x4f, 0x4e, - 0x4d, 0x45, 0x4e, 0x54, 0x5f, 0x56, 0x41, 0x52, 0x49, 0x41, 0x42, 0x4c, 0x45, 0x10, 0x00, 0x12, - 0x18, 0x0a, 0x14, 0x50, 0x52, 0x4f, 0x56, 0x49, 0x53, 0x49, 0x4f, 0x4e, 0x45, 0x52, 0x5f, 0x56, - 0x41, 0x52, 0x49, 0x41, 0x42, 0x4c, 0x45, 0x10, 0x01, 0x22, 0x93, 0x01, 0x0a, 0x0e, 0x50, 0x61, - 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x57, 0x0a, 0x12, - 0x64, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x73, 0x63, 0x68, 0x65, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x12, 0x0a, 0x06, 0x53, + 0x63, 0x68, 0x65, 0x6d, 0x65, 0x12, 0x08, 0x0a, 0x04, 0x44, 0x41, 0x54, 0x41, 0x10, 0x00, 0x22, + 0xac, 0x01, 0x0a, 0x14, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x44, 0x65, 0x73, + 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x40, 0x0a, 0x06, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x28, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x44, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x53, 0x63, 0x68, 0x65, - 0x6d, 0x65, 0x52, 0x11, 0x64, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x53, - 0x63, 0x68, 0x65, 0x6d, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, - 0x75, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, - 0x83, 0x05, 0x0a, 0x0f, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x63, 0x68, - 0x65, 0x6d, 0x61, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, - 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, - 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x43, 0x0a, 0x0e, 0x64, 0x65, 0x66, - 0x61, 0x75, 0x6c, 0x74, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x1c, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, - 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, - 0x0d, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x32, - 0x0a, 0x15, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x5f, 0x6f, 0x76, 0x65, 0x72, 0x72, 0x69, 0x64, 0x65, - 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, 0x13, 0x61, - 0x6c, 0x6c, 0x6f, 0x77, 0x4f, 0x76, 0x65, 0x72, 0x72, 0x69, 0x64, 0x65, 0x53, 0x6f, 0x75, 0x72, - 0x63, 0x65, 0x12, 0x52, 0x0a, 0x13, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x5f, 0x64, 0x65, - 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, - 0x21, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, - 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x44, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, - 0x6f, 0x6e, 0x52, 0x12, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x44, 0x65, 0x73, 0x74, 0x69, - 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x3c, 0x0a, 0x1a, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x5f, - 0x6f, 0x76, 0x65, 0x72, 0x72, 0x69, 0x64, 0x65, 0x5f, 0x64, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, - 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x06, 0x20, 0x01, 0x28, 0x08, 0x52, 0x18, 0x61, 0x6c, 0x6c, 0x6f, - 0x77, 0x4f, 0x76, 0x65, 0x72, 0x72, 0x69, 0x64, 0x65, 0x44, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, - 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x27, 0x0a, 0x0f, 0x72, 0x65, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, - 0x79, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0e, 0x72, - 0x65, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x5d, 0x0a, - 0x16, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x74, 0x79, 0x70, 0x65, - 0x5f, 0x73, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x27, 0x2e, + 0x6d, 0x65, 0x52, 0x06, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x22, 0x3c, 0x0a, 0x06, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x65, 0x12, 0x18, 0x0a, 0x14, 0x45, 0x4e, + 0x56, 0x49, 0x52, 0x4f, 0x4e, 0x4d, 0x45, 0x4e, 0x54, 0x5f, 0x56, 0x41, 0x52, 0x49, 0x41, 0x42, + 0x4c, 0x45, 0x10, 0x00, 0x12, 0x18, 0x0a, 0x14, 0x50, 0x52, 0x4f, 0x56, 0x49, 0x53, 0x49, 0x4f, + 0x4e, 0x45, 0x52, 0x5f, 0x56, 0x41, 0x52, 0x49, 0x41, 0x42, 0x4c, 0x45, 0x10, 0x01, 0x22, 0x93, + 0x01, 0x0a, 0x0e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, + 0x65, 0x12, 0x57, 0x0a, 0x12, 0x64, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x5f, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x28, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x61, - 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x54, 0x79, 0x70, 0x65, - 0x53, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x52, 0x14, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, - 0x6f, 0x6e, 0x54, 0x79, 0x70, 0x65, 0x53, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x12, 0x32, 0x0a, 0x15, - 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x09, 0x20, 0x01, 0x28, 0x09, 0x52, 0x13, 0x76, 0x61, 0x6c, - 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x54, 0x79, 0x70, 0x65, - 0x12, 0x29, 0x0a, 0x10, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x65, - 0x72, 0x72, 0x6f, 0x72, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0f, 0x76, 0x61, 0x6c, 0x69, - 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, 0x12, 0x31, 0x0a, 0x14, 0x76, - 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x63, 0x6f, 0x6e, 0x64, 0x69, 0x74, - 0x69, 0x6f, 0x6e, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x09, 0x52, 0x13, 0x76, 0x61, 0x6c, 0x69, 0x64, - 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x15, - 0x0a, 0x0a, 0x54, 0x79, 0x70, 0x65, 0x53, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x12, 0x07, 0x0a, 0x03, - 0x48, 0x43, 0x4c, 0x10, 0x00, 0x22, 0x87, 0x01, 0x0a, 0x05, 0x50, 0x61, 0x72, 0x73, 0x65, 0x1a, - 0x27, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x64, 0x69, - 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x64, - 0x69, 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x79, 0x1a, 0x55, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, - 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x49, 0x0a, 0x11, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, - 0x72, 0x5f, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, - 0x1c, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, - 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, 0x10, 0x70, - 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x73, 0x22, - 0x32, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, - 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, - 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, - 0x79, 0x70, 0x65, 0x22, 0xea, 0x01, 0x0a, 0x09, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, - 0x6e, 0x1a, 0x85, 0x01, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, - 0x09, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x09, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x79, 0x12, 0x46, 0x0a, 0x10, 0x70, - 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, - 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, - 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, - 0x75, 0x65, 0x52, 0x0f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, - 0x75, 0x65, 0x73, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x03, 0x20, 0x01, - 0x28, 0x0c, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x1a, 0x55, 0x0a, 0x08, 0x52, 0x65, 0x73, - 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x01, - 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x12, 0x33, 0x0a, 0x09, 0x72, - 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, - 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, 0x65, 0x73, - 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, - 0x32, 0x9d, 0x01, 0x0a, 0x0b, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, - 0x12, 0x40, 0x0a, 0x05, 0x50, 0x61, 0x72, 0x73, 0x65, 0x12, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, - 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x73, 0x65, 0x2e, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1b, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, - 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x73, 0x65, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, - 0x73, 0x65, 0x12, 0x4c, 0x0a, 0x09, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x12, - 0x1e, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x72, - 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, - 0x1f, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x72, - 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, - 0x42, 0x2d, 0x5a, 0x2b, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x63, - 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x70, 0x72, 0x6f, 0x76, 0x69, - 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x73, 0x64, 0x6b, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, - 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x6d, 0x65, 0x74, 0x65, 0x72, 0x44, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x2e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x65, 0x52, 0x11, 0x64, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, + 0x74, 0x69, 0x6f, 0x6e, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, + 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x14, + 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x22, 0x83, 0x05, 0x0a, 0x0f, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, + 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x20, 0x0a, 0x0b, + 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x43, + 0x0a, 0x0e, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, + 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, + 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x6f, + 0x75, 0x72, 0x63, 0x65, 0x52, 0x0d, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x53, 0x6f, 0x75, + 0x72, 0x63, 0x65, 0x12, 0x32, 0x0a, 0x15, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x5f, 0x6f, 0x76, 0x65, + 0x72, 0x72, 0x69, 0x64, 0x65, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x04, 0x20, 0x01, + 0x28, 0x08, 0x52, 0x13, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x4f, 0x76, 0x65, 0x72, 0x72, 0x69, 0x64, + 0x65, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x52, 0x0a, 0x13, 0x64, 0x65, 0x66, 0x61, 0x75, + 0x6c, 0x74, 0x5f, 0x64, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x05, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, + 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x44, 0x65, 0x73, 0x74, + 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x12, 0x64, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, + 0x44, 0x65, 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x3c, 0x0a, 0x1a, 0x61, + 0x6c, 0x6c, 0x6f, 0x77, 0x5f, 0x6f, 0x76, 0x65, 0x72, 0x72, 0x69, 0x64, 0x65, 0x5f, 0x64, 0x65, + 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x06, 0x20, 0x01, 0x28, 0x08, 0x52, + 0x18, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x4f, 0x76, 0x65, 0x72, 0x72, 0x69, 0x64, 0x65, 0x44, 0x65, + 0x73, 0x74, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x27, 0x0a, 0x0f, 0x72, 0x65, 0x64, + 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x07, 0x20, 0x01, + 0x28, 0x08, 0x52, 0x0e, 0x72, 0x65, 0x64, 0x69, 0x73, 0x70, 0x6c, 0x61, 0x79, 0x56, 0x61, 0x6c, + 0x75, 0x65, 0x12, 0x5d, 0x0a, 0x16, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, + 0x5f, 0x74, 0x79, 0x70, 0x65, 0x5f, 0x73, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x18, 0x08, 0x20, 0x01, + 0x28, 0x0e, 0x32, 0x27, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, + 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, + 0x2e, 0x54, 0x79, 0x70, 0x65, 0x53, 0x79, 0x73, 0x74, 0x65, 0x6d, 0x52, 0x14, 0x76, 0x61, 0x6c, + 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x54, 0x79, 0x70, 0x65, 0x53, 0x79, 0x73, 0x74, 0x65, + 0x6d, 0x12, 0x32, 0x0a, 0x15, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x09, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x13, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x56, 0x61, 0x6c, 0x75, + 0x65, 0x54, 0x79, 0x70, 0x65, 0x12, 0x29, 0x0a, 0x10, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, + 0x69, 0x6f, 0x6e, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x0f, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x45, 0x72, 0x72, 0x6f, 0x72, + 0x12, 0x31, 0x0a, 0x14, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x63, + 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x09, 0x52, 0x13, + 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x64, 0x69, 0x74, + 0x69, 0x6f, 0x6e, 0x22, 0x15, 0x0a, 0x0a, 0x54, 0x79, 0x70, 0x65, 0x53, 0x79, 0x73, 0x74, 0x65, + 0x6d, 0x12, 0x07, 0x0a, 0x03, 0x48, 0x43, 0x4c, 0x10, 0x00, 0x22, 0x87, 0x01, 0x0a, 0x05, 0x50, + 0x61, 0x72, 0x73, 0x65, 0x1a, 0x27, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, + 0x1c, 0x0a, 0x09, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x79, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x09, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x79, 0x1a, 0x55, 0x0a, + 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x49, 0x0a, 0x11, 0x70, 0x61, 0x72, + 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x5f, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x73, 0x18, 0x01, + 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, + 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, + 0x6d, 0x61, 0x52, 0x10, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x63, 0x68, + 0x65, 0x6d, 0x61, 0x73, 0x22, 0x32, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, + 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, + 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x22, 0xea, 0x01, 0x0a, 0x09, 0x50, 0x72, 0x6f, + 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x1a, 0x85, 0x01, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x79, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x79, + 0x12, 0x46, 0x0a, 0x10, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x5f, 0x76, 0x61, + 0x6c, 0x75, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x70, 0x72, 0x6f, + 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, + 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, + 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, + 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x1a, 0x55, + 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, + 0x61, 0x74, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, + 0x12, 0x33, 0x0a, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, + 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, + 0x72, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x09, 0x72, 0x65, 0x73, 0x6f, + 0x75, 0x72, 0x63, 0x65, 0x73, 0x32, 0x9d, 0x01, 0x0a, 0x0b, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, + 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x12, 0x40, 0x0a, 0x05, 0x50, 0x61, 0x72, 0x73, 0x65, 0x12, 0x1a, + 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, + 0x73, 0x65, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1b, 0x2e, 0x70, 0x72, 0x6f, + 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x73, 0x65, 0x2e, 0x52, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x4c, 0x0a, 0x09, 0x50, 0x72, 0x6f, 0x76, 0x69, + 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x1e, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, + 0x65, 0x72, 0x2e, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1f, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, + 0x65, 0x72, 0x2e, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x52, 0x65, 0x73, + 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x42, 0x2d, 0x5a, 0x2b, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, + 0x63, 0x6f, 0x6d, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, + 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x73, 0x64, 0x6b, 0x2f, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( - file_provisioner_proto_rawDescOnce sync.Once - file_provisioner_proto_rawDescData = file_provisioner_proto_rawDesc + file_provisionersdk_proto_provisioner_proto_rawDescOnce sync.Once + file_provisionersdk_proto_provisioner_proto_rawDescData = file_provisionersdk_proto_provisioner_proto_rawDesc ) -func file_provisioner_proto_rawDescGZIP() []byte { - file_provisioner_proto_rawDescOnce.Do(func() { - file_provisioner_proto_rawDescData = protoimpl.X.CompressGZIP(file_provisioner_proto_rawDescData) +func file_provisionersdk_proto_provisioner_proto_rawDescGZIP() []byte { + file_provisionersdk_proto_provisioner_proto_rawDescOnce.Do(func() { + file_provisionersdk_proto_provisioner_proto_rawDescData = protoimpl.X.CompressGZIP(file_provisionersdk_proto_provisioner_proto_rawDescData) }) - return file_provisioner_proto_rawDescData + return file_provisionersdk_proto_provisioner_proto_rawDescData } -var file_provisioner_proto_enumTypes = make([]protoimpl.EnumInfo, 3) -var file_provisioner_proto_msgTypes = make([]protoimpl.MessageInfo, 11) -var file_provisioner_proto_goTypes = []interface{}{ +var file_provisionersdk_proto_provisioner_proto_enumTypes = make([]protoimpl.EnumInfo, 3) +var file_provisionersdk_proto_provisioner_proto_msgTypes = make([]protoimpl.MessageInfo, 11) +var file_provisionersdk_proto_provisioner_proto_goTypes = []interface{}{ (ParameterSource_Scheme)(0), // 0: provisioner.ParameterSource.Scheme (ParameterDestination_Scheme)(0), // 1: provisioner.ParameterDestination.Scheme (ParameterSchema_TypeSystem)(0), // 2: provisioner.ParameterSchema.TypeSystem @@ -948,7 +949,7 @@ var file_provisioner_proto_goTypes = []interface{}{ (*Provision_Request)(nil), // 12: provisioner.Provision.Request (*Provision_Response)(nil), // 13: provisioner.Provision.Response } -var file_provisioner_proto_depIdxs = []int32{ +var file_provisionersdk_proto_provisioner_proto_depIdxs = []int32{ 0, // 0: provisioner.ParameterSource.scheme:type_name -> provisioner.ParameterSource.Scheme 1, // 1: provisioner.ParameterDestination.scheme:type_name -> provisioner.ParameterDestination.Scheme 1, // 2: provisioner.ParameterValue.destination_scheme:type_name -> provisioner.ParameterDestination.Scheme @@ -969,13 +970,13 @@ var file_provisioner_proto_depIdxs = []int32{ 0, // [0:9] is the sub-list for field type_name } -func init() { file_provisioner_proto_init() } -func file_provisioner_proto_init() { - if File_provisioner_proto != nil { +func init() { file_provisionersdk_proto_provisioner_proto_init() } +func file_provisionersdk_proto_provisioner_proto_init() { + if File_provisionersdk_proto_provisioner_proto != nil { return } if !protoimpl.UnsafeEnabled { - file_provisioner_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { + file_provisionersdk_proto_provisioner_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ParameterSource); i { case 0: return &v.state @@ -987,7 +988,7 @@ func file_provisioner_proto_init() { return nil } } - file_provisioner_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { + file_provisionersdk_proto_provisioner_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ParameterDestination); i { case 0: return &v.state @@ -999,7 +1000,7 @@ func file_provisioner_proto_init() { return nil } } - file_provisioner_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { + file_provisionersdk_proto_provisioner_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ParameterValue); i { case 0: return &v.state @@ -1011,7 +1012,7 @@ func file_provisioner_proto_init() { return nil } } - file_provisioner_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { + file_provisionersdk_proto_provisioner_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ParameterSchema); i { case 0: return &v.state @@ -1023,7 +1024,7 @@ func file_provisioner_proto_init() { return nil } } - file_provisioner_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { + file_provisionersdk_proto_provisioner_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Parse); i { case 0: return &v.state @@ -1035,7 +1036,7 @@ func file_provisioner_proto_init() { return nil } } - file_provisioner_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { + file_provisionersdk_proto_provisioner_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Resource); i { case 0: return &v.state @@ -1047,7 +1048,7 @@ func file_provisioner_proto_init() { return nil } } - file_provisioner_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { + file_provisionersdk_proto_provisioner_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Provision); i { case 0: return &v.state @@ -1059,7 +1060,7 @@ func file_provisioner_proto_init() { return nil } } - file_provisioner_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { + file_provisionersdk_proto_provisioner_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Parse_Request); i { case 0: return &v.state @@ -1071,7 +1072,7 @@ func file_provisioner_proto_init() { return nil } } - file_provisioner_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { + file_provisionersdk_proto_provisioner_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Parse_Response); i { case 0: return &v.state @@ -1083,7 +1084,7 @@ func file_provisioner_proto_init() { return nil } } - file_provisioner_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { + file_provisionersdk_proto_provisioner_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Provision_Request); i { case 0: return &v.state @@ -1095,7 +1096,7 @@ func file_provisioner_proto_init() { return nil } } - file_provisioner_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { + file_provisionersdk_proto_provisioner_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Provision_Response); i { case 0: return &v.state @@ -1112,19 +1113,19 @@ func file_provisioner_proto_init() { out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), - RawDescriptor: file_provisioner_proto_rawDesc, + RawDescriptor: file_provisionersdk_proto_provisioner_proto_rawDesc, NumEnums: 3, NumMessages: 11, NumExtensions: 0, NumServices: 1, }, - GoTypes: file_provisioner_proto_goTypes, - DependencyIndexes: file_provisioner_proto_depIdxs, - EnumInfos: file_provisioner_proto_enumTypes, - MessageInfos: file_provisioner_proto_msgTypes, + GoTypes: file_provisionersdk_proto_provisioner_proto_goTypes, + DependencyIndexes: file_provisionersdk_proto_provisioner_proto_depIdxs, + EnumInfos: file_provisionersdk_proto_provisioner_proto_enumTypes, + MessageInfos: file_provisionersdk_proto_provisioner_proto_msgTypes, }.Build() - File_provisioner_proto = out.File - file_provisioner_proto_rawDesc = nil - file_provisioner_proto_goTypes = nil - file_provisioner_proto_depIdxs = nil + File_provisionersdk_proto_provisioner_proto = out.File + file_provisionersdk_proto_provisioner_proto_rawDesc = nil + file_provisionersdk_proto_provisioner_proto_goTypes = nil + file_provisionersdk_proto_provisioner_proto_depIdxs = nil } diff --git a/provisionersdk/proto/provisioner_drpc.pb.go b/provisionersdk/proto/provisioner_drpc.pb.go index 9bbd00d67f149..7a023b6631b42 100644 --- a/provisionersdk/proto/provisioner_drpc.pb.go +++ b/provisionersdk/proto/provisioner_drpc.pb.go @@ -1,6 +1,6 @@ // Code generated by protoc-gen-go-drpc. DO NOT EDIT. // protoc-gen-go-drpc version: v0.0.26 -// source: provisioner.proto +// source: provisionersdk/proto/provisioner.proto package proto @@ -13,25 +13,25 @@ import ( drpcerr "storj.io/drpc/drpcerr" ) -type drpcEncoding_File_provisioner_proto struct{} +type drpcEncoding_File_provisionersdk_proto_provisioner_proto struct{} -func (drpcEncoding_File_provisioner_proto) Marshal(msg drpc.Message) ([]byte, error) { +func (drpcEncoding_File_provisionersdk_proto_provisioner_proto) Marshal(msg drpc.Message) ([]byte, error) { return proto.Marshal(msg.(proto.Message)) } -func (drpcEncoding_File_provisioner_proto) MarshalAppend(buf []byte, msg drpc.Message) ([]byte, error) { +func (drpcEncoding_File_provisionersdk_proto_provisioner_proto) MarshalAppend(buf []byte, msg drpc.Message) ([]byte, error) { return proto.MarshalOptions{}.MarshalAppend(buf, msg.(proto.Message)) } -func (drpcEncoding_File_provisioner_proto) Unmarshal(buf []byte, msg drpc.Message) error { +func (drpcEncoding_File_provisionersdk_proto_provisioner_proto) Unmarshal(buf []byte, msg drpc.Message) error { return proto.Unmarshal(buf, msg.(proto.Message)) } -func (drpcEncoding_File_provisioner_proto) JSONMarshal(msg drpc.Message) ([]byte, error) { +func (drpcEncoding_File_provisionersdk_proto_provisioner_proto) JSONMarshal(msg drpc.Message) ([]byte, error) { return protojson.Marshal(msg.(proto.Message)) } -func (drpcEncoding_File_provisioner_proto) JSONUnmarshal(buf []byte, msg drpc.Message) error { +func (drpcEncoding_File_provisionersdk_proto_provisioner_proto) JSONUnmarshal(buf []byte, msg drpc.Message) error { return protojson.Unmarshal(buf, msg.(proto.Message)) } @@ -54,7 +54,7 @@ func (c *drpcProvisionerClient) DRPCConn() drpc.Conn { return c.cc } func (c *drpcProvisionerClient) Parse(ctx context.Context, in *Parse_Request) (*Parse_Response, error) { out := new(Parse_Response) - err := c.cc.Invoke(ctx, "/provisioner.Provisioner/Parse", drpcEncoding_File_provisioner_proto{}, in, out) + err := c.cc.Invoke(ctx, "/provisioner.Provisioner/Parse", drpcEncoding_File_provisionersdk_proto_provisioner_proto{}, in, out) if err != nil { return nil, err } @@ -63,7 +63,7 @@ func (c *drpcProvisionerClient) Parse(ctx context.Context, in *Parse_Request) (* func (c *drpcProvisionerClient) Provision(ctx context.Context, in *Provision_Request) (*Provision_Response, error) { out := new(Provision_Response) - err := c.cc.Invoke(ctx, "/provisioner.Provisioner/Provision", drpcEncoding_File_provisioner_proto{}, in, out) + err := c.cc.Invoke(ctx, "/provisioner.Provisioner/Provision", drpcEncoding_File_provisionersdk_proto_provisioner_proto{}, in, out) if err != nil { return nil, err } @@ -92,7 +92,7 @@ func (DRPCProvisionerDescription) NumMethods() int { return 2 } func (DRPCProvisionerDescription) Method(n int) (string, drpc.Encoding, drpc.Receiver, interface{}, bool) { switch n { case 0: - return "/provisioner.Provisioner/Parse", drpcEncoding_File_provisioner_proto{}, + return "/provisioner.Provisioner/Parse", drpcEncoding_File_provisionersdk_proto_provisioner_proto{}, func(srv interface{}, ctx context.Context, in1, in2 interface{}) (drpc.Message, error) { return srv.(DRPCProvisionerServer). Parse( @@ -101,7 +101,7 @@ func (DRPCProvisionerDescription) Method(n int) (string, drpc.Encoding, drpc.Rec ) }, DRPCProvisionerServer.Parse, true case 1: - return "/provisioner.Provisioner/Provision", drpcEncoding_File_provisioner_proto{}, + return "/provisioner.Provisioner/Provision", drpcEncoding_File_provisionersdk_proto_provisioner_proto{}, func(srv interface{}, ctx context.Context, in1, in2 interface{}) (drpc.Message, error) { return srv.(DRPCProvisionerServer). Provision( @@ -128,7 +128,7 @@ type drpcProvisioner_ParseStream struct { } func (x *drpcProvisioner_ParseStream) SendAndClose(m *Parse_Response) error { - if err := x.MsgSend(m, drpcEncoding_File_provisioner_proto{}); err != nil { + if err := x.MsgSend(m, drpcEncoding_File_provisionersdk_proto_provisioner_proto{}); err != nil { return err } return x.CloseSend() @@ -144,7 +144,7 @@ type drpcProvisioner_ProvisionStream struct { } func (x *drpcProvisioner_ProvisionStream) SendAndClose(m *Provision_Response) error { - if err := x.MsgSend(m, drpcEncoding_File_provisioner_proto{}); err != nil { + if err := x.MsgSend(m, drpcEncoding_File_provisionersdk_proto_provisioner_proto{}); err != nil { return err } return x.CloseSend() From d878c13bc89c181c52ea6e35555f74cb651c14db Mon Sep 17 00:00:00 2001 From: Kyle Carberry Date: Sat, 29 Jan 2022 16:03:18 +0000 Subject: [PATCH 05/17] feat: Add provisionerd service Creates the provisionerd service that interfaces with coderd to process provision jobs! --- coderd/coderd.go | 4 + coderd/provisionerd.go | 272 ++++++++++++++++++++++++++++++ coderd/workspaces.go | 37 +++- codersdk/provisionerd.go | 71 ++++++++ go.mod | 4 + go.sum | 30 ++++ provisionerd/provisionerd.go | 225 ++++++++++++++++++++++++ provisionerd/provisionerd_test.go | 84 +++++++++ 8 files changed, 724 insertions(+), 3 deletions(-) create mode 100644 coderd/provisionerd.go create mode 100644 codersdk/provisionerd.go create mode 100644 provisionerd/provisionerd.go create mode 100644 provisionerd/provisionerd_test.go diff --git a/coderd/coderd.go b/coderd/coderd.go index aa624c4cc6b8c..0f1839668de3d 100644 --- a/coderd/coderd.go +++ b/coderd/coderd.go @@ -23,6 +23,9 @@ func New(options *Options) http.Handler { projects := &projects{ Database: options.Database, } + provisionerd := &provisionerd{ + Database: options.Database, + } users := &users{ Database: options.Database, } @@ -39,6 +42,7 @@ func New(options *Options) http.Handler { }) r.Post("/login", users.loginWithPassword) r.Post("/logout", users.logout) + r.Get("/provisionerd", provisionerd.listen) // Used for setup. r.Post("/user", users.createInitialUser) r.Route("/users", func(r chi.Router) { diff --git a/coderd/provisionerd.go b/coderd/provisionerd.go new file mode 100644 index 0000000000000..a5c33f99f77f3 --- /dev/null +++ b/coderd/provisionerd.go @@ -0,0 +1,272 @@ +package coderd + +import ( + "context" + "database/sql" + "encoding/json" + "errors" + "fmt" + "net/http" + + "golang.org/x/xerrors" + "storj.io/drpc/drpcmux" + "storj.io/drpc/drpcserver" + + "github.com/google/uuid" + "github.com/hashicorp/yamux" + "github.com/moby/moby/pkg/namesgenerator" + + "github.com/coder/coder/coderd/projectparameter" + "github.com/coder/coder/database" + "github.com/coder/coder/httpapi" + "github.com/coder/coder/provisionerd/proto" + sdkproto "github.com/coder/coder/provisionersdk/proto" + + "nhooyr.io/websocket" +) + +type provisionerd struct { + Database database.Store +} + +func (p *provisionerd) listen(rw http.ResponseWriter, r *http.Request) { + conn, err := websocket.Accept(rw, r, nil) + if err != nil { + httpapi.Write(rw, http.StatusBadRequest, httpapi.Response{ + Message: fmt.Sprintf("accept websocket: %s", err), + }) + return + } + + daemon, err := p.Database.InsertProvisionerDaemon(r.Context(), database.InsertProvisionerDaemonParams{ + ID: uuid.New(), + CreatedAt: database.Now(), + Name: namesgenerator.GetRandomName(1), + Provisioners: []database.ProvisionerType{database.ProvisionerTypeCdrBasic, database.ProvisionerTypeTerraform}, + }) + if err != nil { + _ = conn.Close(websocket.StatusInternalError, fmt.Sprintf("insert provisioner daemon:% s", err)) + return + } + + session, err := yamux.Server(websocket.NetConn(r.Context(), conn, websocket.MessageBinary), nil) + if err != nil { + _ = conn.Close(websocket.StatusInternalError, fmt.Sprintf("multiplex server: %s", err)) + return + } + mux := drpcmux.New() + err = proto.DRPCRegisterProvisionerDaemon(mux, &provisionerdServer{ + ID: daemon.ID, + Database: p.Database, + }) + if err != nil { + _ = conn.Close(websocket.StatusInternalError, fmt.Sprintf("drpc register provisioner daemon: %s", err)) + return + } + server := drpcserver.New(mux) + err = server.Serve(r.Context(), session) + if err != nil { + _ = conn.Close(websocket.StatusInternalError, fmt.Sprintf("serve: %s", err)) + } +} + +// The input for a "workspace_provision" job. +type workspaceProvisionJob struct { + WorkspaceHistoryID uuid.UUID `json:"workspace_id"` +} + +// The input for a "project_import" job. +type projectImportJob struct { + ProjectHistoryID uuid.UUID `json:"project_history_id"` +} + +// An implementation of the provisionerd protobuf server definition. +type provisionerdServer struct { + ID uuid.UUID + Database database.Store +} + +func (s *provisionerdServer) AcquireJob(ctx context.Context, _ *proto.Empty) (*proto.AcquiredJob, error) { + // This locks the job. No other provisioners can acquire this job. + job, err := s.Database.AcquireProvisionerJob(ctx, database.AcquireProvisionerJobParams{ + StartedAt: sql.NullTime{ + Time: database.Now(), + Valid: true, + }, + WorkerID: uuid.NullUUID{ + UUID: s.ID, + Valid: true, + }, + Types: []database.ProvisionerType{database.ProvisionerTypeTerraform}, + }) + if errors.Is(err, sql.ErrNoRows) { + // If no jobs are available, an empty struct is sent back. + return &proto.AcquiredJob{}, nil + } + if err != nil { + return nil, xerrors.Errorf("acquire job: %w", err) + } + failJob := func(errorMessage string) error { + err = s.Database.UpdateProvisionerJobByID(ctx, database.UpdateProvisionerJobByIDParams{ + ID: job.ID, + CompletedAt: sql.NullTime{ + Time: database.Now(), + Valid: true, + }, + Error: sql.NullString{ + String: errorMessage, + Valid: true, + }, + }) + if err != nil { + return xerrors.Errorf("update provisioner job: %w", err) + } + return xerrors.Errorf("request job was invalidated: %s", errorMessage) + } + + project, err := s.Database.GetProjectByID(ctx, job.ProjectID) + if err != nil { + return nil, failJob(fmt.Sprintf("get project: %s", err)) + } + + organization, err := s.Database.GetOrganizationByID(ctx, project.OrganizationID) + if err != nil { + return nil, failJob(fmt.Sprintf("get organization: %s", err)) + } + + user, err := s.Database.GetUserByID(ctx, job.InitiatorID) + if err != nil { + return nil, failJob(fmt.Sprintf("get user: %s", err)) + } + + acquiredJob := &proto.AcquiredJob{ + JobId: job.ID.String(), + CreatedAt: job.CreatedAt.UnixMilli(), + Provisioner: string(job.Provisioner), + OrganizationName: organization.Name, + ProjectName: project.Name, + UserName: user.Username, + } + var projectHistory database.ProjectHistory + switch job.Type { + case database.ProvisionerJobTypeWorkspaceProvision: + var input workspaceProvisionJob + err = json.Unmarshal(job.Input, &input) + if err != nil { + return nil, failJob(fmt.Sprintf("unmarshal job input %q: %s", job.Input, err)) + } + workspaceHistory, err := s.Database.GetWorkspaceHistoryByID(ctx, input.WorkspaceHistoryID) + if err != nil { + return nil, failJob(fmt.Sprintf("get workspace history: %s", err)) + } + + workspace, err := s.Database.GetWorkspaceByID(ctx, workspaceHistory.WorkspaceID) + if err != nil { + return nil, failJob(fmt.Sprintf("get workspace: %s", err)) + } + + projectHistory, err = s.Database.GetProjectHistoryByID(ctx, workspaceHistory.ProjectHistoryID) + if err != nil { + return nil, failJob(fmt.Sprintf("get project history: %s", err)) + } + + parameters, err := projectparameter.Compute(ctx, s.Database, projectparameter.Scope{ + OrganizationID: organization.ID, + ProjectID: project.ID, + ProjectHistoryID: projectHistory.ID, + UserID: user.ID, + WorkspaceID: workspace.ID, + WorkspaceHistoryID: workspaceHistory.ID, + }) + if err != nil { + return nil, failJob(fmt.Sprintf("compute parameters: %s", err)) + } + protoParameters := make([]*sdkproto.ParameterValue, 0, len(parameters)) + for _, parameter := range parameters { + protoParameters = append(protoParameters, parameter.Proto) + } + + provisionerState := []byte{} + if workspaceHistory.BeforeID.Valid { + beforeHistory, err := s.Database.GetWorkspaceHistoryByID(ctx, workspaceHistory.BeforeID.UUID) + if err != nil { + return nil, failJob(fmt.Sprintf("get workspace history: %s", err)) + } + provisionerState = beforeHistory.ProvisionerState + } + + acquiredJob.Type = &proto.AcquiredJob_WorkspaceProvision_{ + WorkspaceProvision: &proto.AcquiredJob_WorkspaceProvision{ + WorkspaceHistoryId: workspaceHistory.ID.String(), + WorkspaceName: workspace.Name, + State: provisionerState, + ParameterValues: protoParameters, + }, + } + case database.ProvisionerJobTypeProjectImport: + var input projectImportJob + err = json.Unmarshal(job.Input, &input) + if err != nil { + return nil, failJob(fmt.Sprintf("unmarshal job input %q: %s", job.Input, err)) + } + projectHistory, err = s.Database.GetProjectHistoryByID(ctx, input.ProjectHistoryID) + if err != nil { + return nil, failJob(fmt.Sprintf("get project history: %s", err)) + } + } + switch projectHistory.StorageMethod { + case database.ProjectStorageMethodInlineArchive: + acquiredJob.ProjectSourceArchive = projectHistory.StorageSource + default: + return nil, failJob(fmt.Sprintf("unsupported storage source: %q", projectHistory.StorageMethod)) + } + + return acquiredJob, err +} + +func (s *provisionerdServer) UpdateJob(stream proto.DRPCProvisionerDaemon_UpdateJobStream) error { + for { + update, err := stream.Recv() + if err != nil { + return err + } + parsedID, err := uuid.Parse(update.JobId) + if err != nil { + return xerrors.Errorf("parse job id: %w", err) + } + err = s.Database.UpdateProvisionerJobByID(context.Background(), database.UpdateProvisionerJobByIDParams{ + ID: parsedID, + UpdatedAt: database.Now(), + }) + if err != nil { + return xerrors.Errorf("update job: %w", err) + } + } +} + +func (s *provisionerdServer) CancelJob(ctx context.Context, cancelJob *proto.CancelledJob) (*proto.Empty, error) { + jobID, err := uuid.Parse(cancelJob.JobId) + if err != nil { + return nil, xerrors.Errorf("parse job id: %w", err) + } + err = s.Database.UpdateProvisionerJobByID(ctx, database.UpdateProvisionerJobByIDParams{ + ID: jobID, + CancelledAt: sql.NullTime{ + Time: database.Now(), + Valid: true, + }, + UpdatedAt: database.Now(), + Error: sql.NullString{ + String: cancelJob.Error, + Valid: cancelJob.Error != "", + }, + }) + if err != nil { + return nil, xerrors.Errorf("update provisioner job: %w", err) + } + return &proto.Empty{}, nil +} + +func (s *provisionerdServer) CompleteJob(ctx context.Context, completed *proto.CompletedJob) (*proto.Empty, error) { + return nil, nil +} diff --git a/coderd/workspaces.go b/coderd/workspaces.go index f12633a5611bf..2a724daf9eab0 100644 --- a/coderd/workspaces.go +++ b/coderd/workspaces.go @@ -1,7 +1,9 @@ package coderd import ( + "context" "database/sql" + "encoding/json" "errors" "fmt" "net/http" @@ -270,6 +272,13 @@ func (w *workspaces) createWorkspaceHistory(rw http.ResponseWriter, r *http.Requ }) return } + project, err := w.Database.GetProjectByID(r.Context(), projectHistory.ProjectID) + if err != nil { + httpapi.Write(rw, http.StatusInternalServerError, httpapi.Response{ + Message: fmt.Sprintf("get project: %s", err), + }) + return + } // Store prior history ID if it exists to update it after we create new! priorHistoryID := uuid.NullUUID{} @@ -298,8 +307,31 @@ func (w *workspaces) createWorkspaceHistory(rw http.ResponseWriter, r *http.Requ // This must happen in a transaction to ensure history can be inserted, and // the prior history can update it's "after" column to point at the new. err = w.Database.InTx(func(db database.Store) error { + // Generate the ID before-hand so the provisioner job is aware of it! + workspaceHistoryID := uuid.New() + input, err := json.Marshal(workspaceProvisionJob{ + WorkspaceHistoryID: workspaceHistoryID, + }) + if err != nil { + return xerrors.Errorf("marshal provision job: %w", err) + } + + provisionerJob, err := db.InsertProvisionerJob(context.Background(), database.InsertProvisionerJobParams{ + ID: uuid.New(), + CreatedAt: database.Now(), + UpdatedAt: database.Now(), + InitiatorID: user.ID, + Provisioner: project.Provisioner, + Type: database.ProvisionerJobTypeWorkspaceProvision, + ProjectID: project.ID, + Input: input, + }) + if err != nil { + return xerrors.Errorf("insert provisioner job: %w", err) + } + workspaceHistory, err = db.InsertWorkspaceHistory(r.Context(), database.InsertWorkspaceHistoryParams{ - ID: uuid.New(), + ID: workspaceHistoryID, CreatedAt: database.Now(), UpdatedAt: database.Now(), WorkspaceID: workspace.ID, @@ -307,8 +339,7 @@ func (w *workspaces) createWorkspaceHistory(rw http.ResponseWriter, r *http.Requ BeforeID: priorHistoryID, Initiator: user.ID, Transition: createBuild.Transition, - // This should create a provision job once that gets implemented! - ProvisionJobID: uuid.New(), + ProvisionJobID: provisionerJob.ID, }) if err != nil { return xerrors.Errorf("insert workspace history: %w", err) diff --git a/codersdk/provisionerd.go b/codersdk/provisionerd.go new file mode 100644 index 0000000000000..5a9aaa9681631 --- /dev/null +++ b/codersdk/provisionerd.go @@ -0,0 +1,71 @@ +package codersdk + +import ( + "context" + + "golang.org/x/xerrors" + "nhooyr.io/websocket" + "storj.io/drpc" + "storj.io/drpc/drpcconn" + + "github.com/hashicorp/yamux" + + "github.com/coder/coder/provisionerd/proto" +) + +// ProvisionerDaemonClient returns the gRPC service for a provisioner daemon implementation. +func (c *Client) ProvisionerDaemonClient(ctx context.Context) (proto.DRPCProvisionerDaemonClient, error) { + serverURL, err := c.url.Parse("/api/v2/provisionerd") + if err != nil { + return nil, xerrors.Errorf("parse url: %w", err) + } + conn, res, err := websocket.Dial(ctx, serverURL.String(), &websocket.DialOptions{ + HTTPClient: c.httpClient, + }) + if err != nil { + if res == nil { + return nil, err + } + return nil, readBodyAsError(res) + } + session, err := yamux.Client(websocket.NetConn(context.Background(), conn, websocket.MessageBinary), nil) + if err != nil { + return nil, xerrors.Errorf("multiplex client: %w", err) + } + return proto.NewDRPCProvisionerDaemonClient(&multiplexedDRPC{ + session: session, + }), nil +} + +// dRPC is a single-stream protocol by design. It's intended to operate +// a single HTTP-request per invocation. This multiplexes the WebSocket +// using yamux to enable multiple streams to function on a single connection. +// +// If this connection is too slow, we can create a WebSocket for each request. +type multiplexedDRPC struct { + session *yamux.Session +} + +func (m *multiplexedDRPC) Close() error { + return m.session.Close() +} + +func (m *multiplexedDRPC) Closed() <-chan struct{} { + return m.session.CloseChan() +} + +func (m *multiplexedDRPC) Invoke(ctx context.Context, rpc string, enc drpc.Encoding, in, out drpc.Message) error { + conn, err := m.session.Open() + if err != nil { + return err + } + return drpcconn.New(conn).Invoke(ctx, rpc, enc, in, out) +} + +func (m *multiplexedDRPC) NewStream(ctx context.Context, rpc string, enc drpc.Encoding) (drpc.Stream, error) { + conn, err := m.session.Open() + if err != nil { + return nil, err + } + return drpcconn.New(conn).NewStream(ctx, rpc, enc) +} diff --git a/go.mod b/go.mod index 7505a34a7167c..5d2a8f7b6f0a2 100644 --- a/go.mod +++ b/go.mod @@ -7,6 +7,7 @@ replace github.com/hashicorp/terraform-config-inspect => github.com/kylecarbs/te require ( cdr.dev/slog v1.4.1 + github.com/coder/retry v1.3.0 github.com/go-chi/chi v1.5.4 github.com/go-chi/render v1.0.1 github.com/go-playground/validator/v10 v10.10.0 @@ -16,6 +17,7 @@ require ( github.com/hashicorp/hc-install v0.3.1 github.com/hashicorp/terraform-config-inspect v0.0.0-20211115214459-90acf1ca460f github.com/hashicorp/terraform-exec v0.15.0 + github.com/hashicorp/yamux v0.0.0-20211028200310-0bc27b27de87 github.com/justinas/nosurf v1.1.1 github.com/lib/pq v1.10.4 github.com/moby/moby v20.10.12+incompatible @@ -33,6 +35,7 @@ require ( golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8 golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 google.golang.org/protobuf v1.27.1 + nhooyr.io/websocket v1.8.7 storj.io/drpc v0.0.28 ) @@ -71,6 +74,7 @@ require ( github.com/hashicorp/terraform-json v0.13.0 // indirect github.com/imdario/mergo v0.3.12 // indirect github.com/inconshreveable/mousetrap v1.0.0 // indirect + github.com/klauspost/compress v1.13.6 // indirect github.com/leodido/go-urn v1.2.1 // indirect github.com/mattn/go-colorable v0.1.12 // indirect github.com/mattn/go-isatty v0.0.14 // indirect diff --git a/go.sum b/go.sum index 564c6aaae7be9..8c47b736aaa19 100644 --- a/go.sum +++ b/go.sum @@ -246,6 +246,8 @@ github.com/cncf/xds/go v0.0.0-20211130200136-a8f946100490/go.mod h1:eXthEFrGJvWH github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ= github.com/cockroachdb/cockroach-go/v2 v2.1.1/go.mod h1:7NtUnP6eK+l6k483WSYNrq3Kb23bWV10IRV1TyeSpwM= github.com/cockroachdb/datadriven v0.0.0-20190809214429-80d97fb3cbaa/go.mod h1:zn76sxSg3SzpJ0PPJaLDCu+Bu0Lg3sKTORVIj19EIF8= +github.com/coder/retry v1.3.0 h1:5lAAwt/2Cm6lVmnfBY7sOMXcBOwcwJhmV5QGSELIVWY= +github.com/coder/retry v1.3.0/go.mod h1:tXuRgZgWjUnU5LZPT4lJh4ew2elUhexhlnXzrJWdyFY= github.com/containerd/aufs v0.0.0-20200908144142-dab0cbea06f4/go.mod h1:nukgQABAEopAHvB6j7cnP5zJ+/3aVcE7hCYqvIwAHyE= github.com/containerd/aufs v0.0.0-20201003224125-76a6863f2989/go.mod h1:AkGGQs9NM2vtYHaUen+NljV0/baGCAPELGm2q9ZXpWU= github.com/containerd/aufs v0.0.0-20210316121734-20793ff83c97/go.mod h1:kL5kd6KM5TzQjR79jljyi4olc1Vrx6XBlcyj3gNv2PU= @@ -444,6 +446,10 @@ github.com/gabriel-vasile/mimetype v1.4.0/go.mod h1:fA8fi6KUiG7MgQQ+mEWotXoEOvmx github.com/garyburd/redigo v0.0.0-20150301180006-535138d7bcd7/go.mod h1:NR3MbYisc3/PwhQ00EMzDiPmrwpPxAn5GI05/YaO1SY= github.com/ghodss/yaml v0.0.0-20150909031657-73d445a93680/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= +github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE= +github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI= +github.com/gin-gonic/gin v1.6.3 h1:ahKqKTFpO5KTPHxWZjEdPScmYaGtLo8Y4DMHoEsnp14= +github.com/gin-gonic/gin v1.6.3/go.mod h1:75u5sXoLsGZoRN5Sgbi1eraJ4GU3++wFwWzhwvtwp4M= github.com/gliderlabs/ssh v0.2.2/go.mod h1:U7qILu1NlMHj9FlMhZLlkCdDnU1DBEAqr0aevW3Awn0= github.com/go-chi/chi v1.5.4 h1:QHdzF2szwjqVV4wmByUnTcsbIg7UGaQ0tPF2t5GcAIs= github.com/go-chi/chi v1.5.4/go.mod h1:uaf8YgoFazUOkPBG7fxPftUylNumIev9awIWOENIuEg= @@ -481,10 +487,13 @@ github.com/go-openapi/swag v0.19.2/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk= github.com/go-playground/assert/v2 v2.0.1 h1:MsBgLAaY856+nPRTKrp3/OZK38U/wa0CcBYNjji3q3A= github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= +github.com/go-playground/locales v0.13.0/go.mod h1:taPMhCMXrRLJO55olJkUXHZBHCxTMfnGwq/HNwmWNS8= github.com/go-playground/locales v0.14.0 h1:u50s323jtVGugKlcYeyzC0etD1HifMjqmJqb8WugfUU= github.com/go-playground/locales v0.14.0/go.mod h1:sawfccIbzZTqEDETgFXqTho0QybSa7l++s0DH+LDiLs= +github.com/go-playground/universal-translator v0.17.0/go.mod h1:UkSxE5sNxxRwHyU+Scu5vgOQjsIJAF8j9muTVoKLVtA= github.com/go-playground/universal-translator v0.18.0 h1:82dyy6p4OuJq4/CByFNOn/jYrnRPArHwAcmLoJZxyho= github.com/go-playground/universal-translator v0.18.0/go.mod h1:UvRDBj+xPUEGrFYl+lu/H90nyDXpg0fqeB/AQUGNTVA= +github.com/go-playground/validator/v10 v10.2.0/go.mod h1:uOYAAleCW8F/7oMFd6aG0GOhaH6EGOAJShg8Id5JGkI= github.com/go-playground/validator/v10 v10.10.0 h1:I7mrTYv78z8k8VXa/qJlOlEXn/nBh+BF8dHX5nt/dr0= github.com/go-playground/validator/v10 v10.10.0/go.mod h1:74x4gJWsvQexRdW8Pn3dXSGrTK4nAUsbPlLADvpJkos= github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w= @@ -518,6 +527,12 @@ github.com/gobuffalo/packd v0.1.0/go.mod h1:M2Juc+hhDXf/PnmBANFCqx4DM3wRbgDvnVWe github.com/gobuffalo/packr/v2 v2.0.9/go.mod h1:emmyGweYTm6Kdper+iywB6YK5YzuKchGtJQZ0Odn4pQ= github.com/gobuffalo/packr/v2 v2.2.0/go.mod h1:CaAwI0GPIAv+5wKLtv8Afwl+Cm78K/I/VCm/3ptBN+0= github.com/gobuffalo/syncx v0.0.0-20190224160051-33c29581e754/go.mod h1:HhnNqWY95UYwwW3uSASeV7vtgYkT2t16hJgV3AEPUpw= +github.com/gobwas/httphead v0.0.0-20180130184737-2c6c146eadee h1:s+21KNqlpePfkah2I+gwHF8xmJWRjooY+5248k6m4A0= +github.com/gobwas/httphead v0.0.0-20180130184737-2c6c146eadee/go.mod h1:L0fX3K22YWvt/FAX9NnzrNzcI4wNYi9Yku4O0LKYflo= +github.com/gobwas/pool v0.2.0 h1:QEmUOlnSjWtnpRGHF3SauEiOsy82Cup83Vf2LcMlnc8= +github.com/gobwas/pool v0.2.0/go.mod h1:q8bcK0KcYlCgd9e7WYLm9LpyS+YeLd8JVDW6WezmKEw= +github.com/gobwas/ws v1.0.2 h1:CoAavW/wd/kulfZmSIBt6p24n4j7tHgNVCjsfHVNUbo= +github.com/gobwas/ws v1.0.2/go.mod h1:szmBTxLgaFppYjEmNtny/v3w89xOydFnnZMcgRRu/EM= github.com/gocql/gocql v0.0.0-20210515062232-b7ef815b4556/go.mod h1:DL0ekTmBSTdlNF25Orwt/JMzqIq3EJ4MVa/J/uK64OY= github.com/godbus/dbus v0.0.0-20151105175453-c7fdd8b5cd55/go.mod h1:/YcGZj5zSblfDWMMoOzV4fas9FZnQYTkDnsGvmh2Grw= github.com/godbus/dbus v0.0.0-20180201030542-885f9cc04c9c/go.mod h1:/YcGZj5zSblfDWMMoOzV4fas9FZnQYTkDnsGvmh2Grw= @@ -644,6 +659,8 @@ github.com/gorilla/mux v1.7.3/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2z github.com/gorilla/mux v1.7.4/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So= github.com/gorilla/websocket v0.0.0-20170926233335-4201258b820c/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= github.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= +github.com/gorilla/websocket v1.4.1/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= +github.com/gorilla/websocket v1.4.2 h1:+/TMaTYc4QFitKJxsQ7Yye35DkWvkdLcvGKqM+x0Ufc= github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= github.com/gregjones/httpcache v0.0.0-20180305231024-9cad4c3443a7/go.mod h1:FecbI9+v66THATjSRHfNgh1IVFe/9kFxbXtjV0ctIMA= github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= @@ -712,6 +729,8 @@ github.com/hashicorp/terraform-exec v0.15.0 h1:cqjh4d8HYNQrDoEmlSGelHmg2DYDh5yay github.com/hashicorp/terraform-exec v0.15.0/go.mod h1:H4IG8ZxanU+NW0ZpDRNsvh9f0ul7C0nHP+rUR/CHs7I= github.com/hashicorp/terraform-json v0.13.0 h1:Li9L+lKD1FO5RVFRM1mMMIBDoUHslOniyEi5CM+FWGY= github.com/hashicorp/terraform-json v0.13.0/go.mod h1:y5OdLBCT+rxbwnpxZs9kGL7R9ExU76+cpdY8zHwoazk= +github.com/hashicorp/yamux v0.0.0-20211028200310-0bc27b27de87 h1:xixZ2bWeofWV68J+x6AzmKuVM/JWCQwkWm6GW/MUR6I= +github.com/hashicorp/yamux v0.0.0-20211028200310-0bc27b27de87/go.mod h1:CtWFDAQgb7dxtzFs4tWbplKIe2jSi3+5vKbgIO0SLnQ= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= github.com/huandu/xstrings v1.3.2/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= github.com/iancoleman/strcase v0.2.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho= @@ -789,6 +808,7 @@ github.com/json-iterator/go v1.1.7/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/u github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= @@ -811,6 +831,7 @@ github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQL github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/klauspost/compress v1.9.5/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= +github.com/klauspost/compress v1.10.3/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= github.com/klauspost/compress v1.11.2/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= github.com/klauspost/compress v1.11.3/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= github.com/klauspost/compress v1.11.13/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= @@ -840,6 +861,7 @@ github.com/kylecarbs/terraform-config-inspect v0.0.0-20211215004401-bbc517866b88 github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348/go.mod h1:B69LEHPfb2qLo0BaaOLcbitczOKLWTsrBG9LczfCD4k= github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= +github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII= github.com/leodido/go-urn v1.2.1 h1:BqpAaACuzVSgi/VLzGZIobT2z4v53pjosyNd9Yv6n/w= github.com/leodido/go-urn v1.2.1/go.mod h1:zt4jvISO2HfUBqxjfIshjdMTYS56ZS/qv49ictyFfxY= github.com/lib/pq v0.0.0-20180327071824-d34b9ff171c2/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= @@ -929,9 +951,11 @@ github.com/moby/term v0.0.0-20201216013528-df9cb8a40635/go.mod h1:FBS0z0QWA44HXy github.com/moby/term v0.0.0-20210619224110-3f7ff695adc6 h1:dcztxKSvZ4Id8iPpHERQBbIJfabdt4wUm5qy3wOL2Zc= github.com/moby/term v0.0.0-20210619224110-3f7ff695adc6/go.mod h1:E2VnQOmVuvZB6UYnnDB0qG5Nq/1tD9acaOpo6xmt0Kw= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc= github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A= @@ -1191,6 +1215,10 @@ github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1 github.com/tv42/httpunix v0.0.0-20150427012821-b75d8614f926/go.mod h1:9ESjWnEqriFuLhtthL60Sar/7RFoluCcXsuvEwTV5KM= github.com/tv42/httpunix v0.0.0-20191220191345-2ba4b9c3382c/go.mod h1:hzIxponao9Kjc7aWznkXaL4U4TWaDSs8zcsY4Ka08nM= github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc= +github.com/ugorji/go v1.1.7 h1:/68gy2h+1mWMrwZFeD1kQialdSzAb432dtpeJ42ovdo= +github.com/ugorji/go v1.1.7/go.mod h1:kZn38zHttfInRq0xu/PH0az30d+z6vm202qpg1oXVMw= +github.com/ugorji/go/codec v1.1.7 h1:2SvQaVZ1ouYrrKKwoSk2pzd4A9evlKJb9oTL+OaLUSs= +github.com/ugorji/go/codec v1.1.7/go.mod h1:Ax+UKWsSmolVDwsd+7N3ZtXu+yMGCf907BLYF3GoBXY= github.com/ulikunitz/xz v0.5.8 h1:ERv8V6GKqVi23rgu5cj9pVfVzJbOqAY2Ntl88O6c2nQ= github.com/ulikunitz/xz v0.5.8/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14= github.com/unrolled/secure v1.0.9 h1:BWRuEb1vDrBFFDdbCnKkof3gZ35I/bnHGyt0LB0TNyQ= @@ -1973,6 +2001,8 @@ modernc.org/token v1.0.0/go.mod h1:UGzOrNV1mAFSEB63lOFHIpNRUVMvYTc6yu1SMY/XTDM= modernc.org/z v1.0.1-0.20210308123920-1f282aa71362/go.mod h1:8/SRk5C/HgiQWCgXdfpb+1RvhORdkz5sw72d3jjtyqA= modernc.org/z v1.0.1/go.mod h1:8/SRk5C/HgiQWCgXdfpb+1RvhORdkz5sw72d3jjtyqA= modernc.org/zappy v1.0.0/go.mod h1:hHe+oGahLVII/aTTyWK/b53VDHMAGCBYYeZ9sn83HC4= +nhooyr.io/websocket v1.8.7 h1:usjR2uOr/zjjkVMy0lW+PPohFok7PCow5sDjLgX4P4g= +nhooyr.io/websocket v1.8.7/go.mod h1:B70DZP8IakI65RVQ51MsWP/8jndNma26DVA/nFSCgW0= rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4= rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= diff --git a/provisionerd/provisionerd.go b/provisionerd/provisionerd.go new file mode 100644 index 0000000000000..5ad6b0d65d811 --- /dev/null +++ b/provisionerd/provisionerd.go @@ -0,0 +1,225 @@ +package provisionerd + +import ( + "context" + "fmt" + "sync" + "time" + + "cdr.dev/slog" + "github.com/coder/coder/provisionerd/proto" + provisionersdkproto "github.com/coder/coder/provisionersdk/proto" + "github.com/coder/retry" +) + +// Dialer returns a gRPC client to communicate with. +// The provisioner daemon handles intermittent connection failures +// for upgrades to coderd. +type Dialer func(ctx context.Context) (proto.DRPCProvisionerDaemonClient, error) + +// Provisioners maps provisioner ID to implementation. +type Provisioners map[string]provisionersdkproto.DRPCProvisionerClient + +type Options struct { + AcquireInterval time.Duration + Logger slog.Logger +} + +func New(apiDialer Dialer, provisioners Provisioners, opts *Options) *API { + if opts.AcquireInterval == 0 { + opts.AcquireInterval = 5 * time.Second + } + ctx, ctxCancel := context.WithCancel(context.Background()) + api := &API{ + dialer: apiDialer, + provisioners: provisioners, + opts: opts, + + closeContext: ctx, + closeContextCancel: ctxCancel, + closed: make(chan struct{}), + } + go api.connect() + return api +} + +type API struct { + provisioners Provisioners + opts *Options + + dialer Dialer + connectMutex sync.Mutex + client proto.DRPCProvisionerDaemonClient + updateStream proto.DRPCProvisionerDaemon_UpdateJobClient + + closeContext context.Context + closeContextCancel context.CancelFunc + + closed chan struct{} + closeMutex sync.Mutex + closeError error + + activeJob *proto.AcquiredJob + activeJobMutex sync.Mutex + logQueue []proto.Log +} + +// connect establishes a connection +func (a *API) connect() { + a.connectMutex.Lock() + defer a.connectMutex.Unlock() + + var err error + for retrier := retry.New(50*time.Millisecond, 10*time.Second); retrier.Wait(a.closeContext); { + a.client, err = a.dialer(a.closeContext) + if err != nil { + // Warn + a.opts.Logger.Warn(context.Background(), "failed to dial", slog.Error(err)) + continue + } + a.updateStream, err = a.client.UpdateJob(a.closeContext) + if err != nil { + a.opts.Logger.Warn(context.Background(), "create update job stream", slog.Error(err)) + continue + } + a.opts.Logger.Debug(context.Background(), "connected") + break + } + + go func() { + if a.isClosed() { + return + } + select { + case <-a.closed: + return + case <-a.updateStream.Context().Done(): + // We use the update stream to detect when the connection + // has been interrupted. This works well, because logs need + // to buffer if a job is running in the background. + a.opts.Logger.Debug(context.Background(), "update stream ended", slog.Error(a.updateStream.Context().Err())) + a.connect() + } + }() + + go func() { + if a.isClosed() { + return + } + ticker := time.NewTicker(a.opts.AcquireInterval) + defer ticker.Stop() + for { + select { + case <-a.closed: + return + case <-a.updateStream.Context().Done(): + return + case <-ticker.C: + if a.activeJob != nil { + a.opts.Logger.Debug(context.Background(), "skipping acquire; job is already running") + continue + } + a.acquireJob() + } + } + }() +} + +func (a *API) acquireJob() { + a.opts.Logger.Debug(context.Background(), "acquiring new job") + var err error + a.activeJobMutex.Lock() + a.activeJob, err = a.client.AcquireJob(a.closeContext, &proto.Empty{}) + a.activeJobMutex.Unlock() + if err != nil { + a.opts.Logger.Error(context.Background(), "acquire job", slog.Error(err)) + return + } + if a.activeJob.JobId == "" { + a.activeJob = nil + a.opts.Logger.Info(context.Background(), "no jobs available") + return + } + a.opts.Logger.Info(context.Background(), "acquired job", + slog.F("organization_name", a.activeJob.OrganizationName), + slog.F("project_name", a.activeJob.ProjectName), + slog.F("username", a.activeJob.UserName), + slog.F("provisioner", a.activeJob.Provisioner), + ) + + provisioner, hasProvisioner := a.provisioners[a.activeJob.Provisioner] + if !hasProvisioner { + a.cancelActiveJob(fmt.Sprintf("provisioner %q not registered", a.activeJob.Provisioner)) + return + } + fmt.Printf("Provisioner: %s\n", provisioner) + // Work! +} + +func (a *API) cancelActiveJob(errMsg string) { + a.activeJobMutex.Lock() + defer a.activeJobMutex.Unlock() + + if a.client == nil { + a.activeJob = nil + return + } + + a.opts.Logger.Info(context.Background(), "canceling active job", + slog.F("error_message", errMsg), + slog.F("job_id", a.activeJob.JobId), + ) + _, err := a.client.CancelJob(a.closeContext, &proto.CancelledJob{ + JobId: a.activeJob.JobId, + Error: fmt.Sprintf("provisioner daemon: %s", errMsg), + }) + if err != nil { + a.opts.Logger.Error(context.Background(), "couldn't cancel job", slog.Error(err)) + } + a.opts.Logger.Debug(context.Background(), "canceled active job") + a.activeJob = nil +} + +// isClosed returns whether the API is closed or not. +func (a *API) isClosed() bool { + select { + case <-a.closed: + return true + default: + return false + } +} + +// Close ends the provisioner. It will mark any active jobs as canceled. +func (a *API) Close() error { + return a.closeWithError(nil) +} + +// closeWithError closes the provisioner; subsequent reads/writes will return the error err. +func (a *API) closeWithError(err error) error { + a.closeMutex.Lock() + defer a.closeMutex.Unlock() + if a.isClosed() { + return a.closeError + } + + if a.activeJob != nil { + errMsg := "" + if err != nil { + errMsg = err.Error() + } + a.cancelActiveJob(errMsg) + } + + a.opts.Logger.Debug(context.Background(), "closing server with error", slog.Error(err)) + a.closeError = err + close(a.closed) + a.closeContextCancel() + + if a.updateStream != nil { + _ = a.client.DRPCConn().Close() + _ = a.updateStream.Close() + } + + return err +} diff --git a/provisionerd/provisionerd_test.go b/provisionerd/provisionerd_test.go new file mode 100644 index 0000000000000..d5e01de960457 --- /dev/null +++ b/provisionerd/provisionerd_test.go @@ -0,0 +1,84 @@ +package provisionerd_test + +import ( + "archive/tar" + "bytes" + "context" + "testing" + "time" + + "github.com/stretchr/testify/require" + + "cdr.dev/slog" + "cdr.dev/slog/sloggers/slogtest" + "github.com/coder/coder/coderd" + "github.com/coder/coder/coderd/coderdtest" + "github.com/coder/coder/codersdk" + "github.com/coder/coder/database" + "github.com/coder/coder/provisionerd" +) + +func TestProvisionerd(t *testing.T) { + t.Parallel() + + setupProjectAndWorkspace := func(t *testing.T, client *codersdk.Client, user coderd.CreateInitialUserRequest) (coderd.Project, coderd.Workspace) { + project, err := client.CreateProject(context.Background(), user.Organization, coderd.CreateProjectRequest{ + Name: "banana", + Provisioner: database.ProvisionerTypeTerraform, + }) + require.NoError(t, err) + workspace, err := client.CreateWorkspace(context.Background(), "", coderd.CreateWorkspaceRequest{ + Name: "hiii", + ProjectID: project.ID, + }) + require.NoError(t, err) + return project, workspace + } + + setupProjectVersion := func(t *testing.T, client *codersdk.Client, user coderd.CreateInitialUserRequest, project coderd.Project) coderd.ProjectHistory { + var buffer bytes.Buffer + writer := tar.NewWriter(&buffer) + err := writer.WriteHeader(&tar.Header{ + Name: "file", + Size: 1 << 10, + }) + require.NoError(t, err) + _, err = writer.Write(make([]byte, 1<<10)) + require.NoError(t, err) + projectHistory, err := client.CreateProjectHistory(context.Background(), user.Organization, project.Name, coderd.CreateProjectVersionRequest{ + StorageMethod: database.ProjectStorageMethodInlineArchive, + StorageSource: buffer.Bytes(), + }) + require.NoError(t, err) + return projectHistory + } + + t.Run("InstantClose", func(t *testing.T) { + t.Parallel() + server := coderdtest.New(t) + api := provisionerd.New(server.Client.ProvisionerDaemonClient, provisionerd.Provisioners{}, &provisionerd.Options{ + Logger: slogtest.Make(t, nil), + }) + defer api.Close() + }) + + t.Run("ProcessJob", func(t *testing.T) { + t.Parallel() + server := coderdtest.New(t) + user := server.RandomInitialUser(t) + project, workspace := setupProjectAndWorkspace(t, server.Client, user) + projectVersion := setupProjectVersion(t, server.Client, user, project) + _, err := server.Client.CreateWorkspaceHistory(context.Background(), "", workspace.Name, coderd.CreateWorkspaceHistoryRequest{ + ProjectHistoryID: projectVersion.ID, + Transition: database.WorkspaceTransitionCreate, + }) + require.NoError(t, err) + + api := provisionerd.New(server.Client.ProvisionerDaemonClient, provisionerd.Provisioners{}, &provisionerd.Options{ + Logger: slogtest.Make(t, nil).Leveled(slog.LevelDebug), + AcquireInterval: 50 * time.Millisecond, + }) + defer api.Close() + time.Sleep(time.Millisecond * 500) + }) +} From 1e8c421c98fc017748044bfac27c9e12facad20f Mon Sep 17 00:00:00 2001 From: Kyle Carberry Date: Sat, 29 Jan 2022 17:21:19 +0000 Subject: [PATCH 06/17] Improve provisioner testing --- provisionerd/provisionerd.go | 95 ++++++++++++++++++++++++++++++- provisionerd/provisionerd_test.go | 25 +++++++- 2 files changed, 118 insertions(+), 2 deletions(-) diff --git a/provisionerd/provisionerd.go b/provisionerd/provisionerd.go index 5ad6b0d65d811..20dfb28bb3d83 100644 --- a/provisionerd/provisionerd.go +++ b/provisionerd/provisionerd.go @@ -1,8 +1,16 @@ package provisionerd import ( + "archive/tar" + "bytes" "context" + "errors" "fmt" + "io" + "os" + "path/filepath" + "reflect" + "strings" "sync" "time" @@ -23,6 +31,7 @@ type Provisioners map[string]provisionersdkproto.DRPCProvisionerClient type Options struct { AcquireInterval time.Duration Logger slog.Logger + WorkDirectory string } func New(apiDialer Dialer, provisioners Provisioners, opts *Options) *API { @@ -152,6 +161,90 @@ func (a *API) acquireJob() { a.cancelActiveJob(fmt.Sprintf("provisioner %q not registered", a.activeJob.Provisioner)) return } + defer func() { + // Cleanup the work directory after execution. + err = os.RemoveAll(a.opts.WorkDirectory) + if err != nil { + a.cancelActiveJob(fmt.Sprintf("remove all from %q directory: %s", a.opts.WorkDirectory, err)) + return + } + }() + + err = os.MkdirAll(a.opts.WorkDirectory, 0600) + if err != nil { + a.cancelActiveJob(fmt.Sprintf("create work directory %q: %s", a.opts.WorkDirectory, err)) + return + } + + a.opts.Logger.Debug(context.Background(), "unpacking project source archive", slog.F("size_bytes", len(a.activeJob.ProjectSourceArchive))) + reader := tar.NewReader(bytes.NewBuffer(a.activeJob.ProjectSourceArchive)) + for { + header, err := reader.Next() + if errors.Is(err, io.EOF) { + break + } + if err != nil { + a.cancelActiveJob(fmt.Sprintf("read project source archive: %s", err)) + return + } + // #nosec + path := filepath.Join(a.opts.WorkDirectory, header.Name) + if !strings.HasPrefix(path, filepath.Clean(a.opts.WorkDirectory)) { + a.cancelActiveJob("tar attempts to target relative upper directory") + return + } + switch header.Typeflag { + case tar.TypeDir: + err = os.MkdirAll(path, header.FileInfo().Mode()) + if err != nil { + a.cancelActiveJob(fmt.Sprintf("mkdir %q: %s", path, err)) + return + } + a.opts.Logger.Debug(context.Background(), "extracted directory", slog.F("path", path)) + case tar.TypeReg: + file, err := os.Create(path) + if err != nil { + a.cancelActiveJob(fmt.Sprintf("create file %q: %s", path, err)) + return + } + // Max file size of 10MB. + size, err := io.CopyN(file, reader, (1<<20)*10) + if errors.Is(err, io.EOF) { + err = nil + } + if err != nil { + a.cancelActiveJob(fmt.Sprintf("copy file %q: %s", path, err)) + return + } + err = file.Close() + if err != nil { + a.cancelActiveJob(fmt.Sprintf("close file %q: %s", path, err)) + return + } + a.opts.Logger.Debug(context.Background(), "extracted file", + slog.F("size_bytes", size), + slog.F("path", path), + ) + } + } + + switch jobType := a.activeJob.Type.(type) { + case *proto.AcquiredJob_ProjectImport_: + a.opts.Logger.Debug(context.Background(), "acquired job is project import", + slog.F("project_history_name", jobType.ProjectImport.ProjectHistoryName), + ) + case *proto.AcquiredJob_WorkspaceProvision_: + a.opts.Logger.Debug(context.Background(), "acquired job is workspace provision", + slog.F("workspace_name", jobType.WorkspaceProvision.WorkspaceName), + slog.F("state_length", len(jobType.WorkspaceProvision.State)), + slog.F("parameters", jobType.WorkspaceProvision.ParameterValues), + ) + + default: + a.cancelActiveJob(fmt.Sprintf("unknown job type %q; ensure your provisioner daemon is up-to-date", reflect.TypeOf(a.activeJob.Type).String())) + return + } + fmt.Printf("Provisioner: %s\n", provisioner) // Work! } @@ -204,7 +297,7 @@ func (a *API) closeWithError(err error) error { } if a.activeJob != nil { - errMsg := "" + errMsg := "provisioner daemon was shutdown gracefully" if err != nil { errMsg = err.Error() } diff --git a/provisionerd/provisionerd_test.go b/provisionerd/provisionerd_test.go index d5e01de960457..2eccf772707e6 100644 --- a/provisionerd/provisionerd_test.go +++ b/provisionerd/provisionerd_test.go @@ -8,6 +8,7 @@ import ( "time" "github.com/stretchr/testify/require" + "storj.io/drpc/drpcconn" "cdr.dev/slog" "cdr.dev/slog/sloggers/slogtest" @@ -15,7 +16,10 @@ import ( "github.com/coder/coder/coderd/coderdtest" "github.com/coder/coder/codersdk" "github.com/coder/coder/database" + "github.com/coder/coder/provisioner/terraform" "github.com/coder/coder/provisionerd" + "github.com/coder/coder/provisionersdk" + "github.com/coder/coder/provisionersdk/proto" ) func TestProvisionerd(t *testing.T) { @@ -74,9 +78,28 @@ func TestProvisionerd(t *testing.T) { }) require.NoError(t, err) - api := provisionerd.New(server.Client.ProvisionerDaemonClient, provisionerd.Provisioners{}, &provisionerd.Options{ + clientPipe, serverPipe := provisionersdk.TransportPipe() + ctx, cancelFunc := context.WithCancel(context.Background()) + t.Cleanup(func() { + _ = clientPipe.Close() + _ = serverPipe.Close() + cancelFunc() + }) + go func() { + err := terraform.Serve(ctx, &terraform.ServeOptions{ + ServeOptions: &provisionersdk.ServeOptions{ + Transport: serverPipe, + }, + }) + require.NoError(t, err) + }() + + api := provisionerd.New(server.Client.ProvisionerDaemonClient, provisionerd.Provisioners{ + string(database.ProvisionerTypeTerraform): proto.NewDRPCProvisionerClient(drpcconn.New(clientPipe)), + }, &provisionerd.Options{ Logger: slogtest.Make(t, nil).Leveled(slog.LevelDebug), AcquireInterval: 50 * time.Millisecond, + WorkDirectory: t.TempDir(), }) defer api.Close() time.Sleep(time.Millisecond * 500) From bc8c0e000b7e3667307aa723e20fc6a71cc29770 Mon Sep 17 00:00:00 2001 From: Kyle Carberry Date: Sat, 29 Jan 2022 18:50:54 +0000 Subject: [PATCH 07/17] Add support for completing a job --- coderd/provisionerd.go | 191 +++++++++++++++++++++++++- database/databasefake/databasefake.go | 2 + database/query.sql | 4 +- database/query.sql.go | 20 ++- provisionerd/provisionerd.go | 57 +++++++- provisionerd/provisionerd_test.go | 9 +- 6 files changed, 269 insertions(+), 14 deletions(-) diff --git a/coderd/provisionerd.go b/coderd/provisionerd.go index a5c33f99f77f3..c5f186151a50d 100644 --- a/coderd/provisionerd.go +++ b/coderd/provisionerd.go @@ -7,6 +7,7 @@ import ( "errors" "fmt" "net/http" + "reflect" "golang.org/x/xerrors" "storj.io/drpc/drpcmux" @@ -267,6 +268,194 @@ func (s *provisionerdServer) CancelJob(ctx context.Context, cancelJob *proto.Can return &proto.Empty{}, nil } +// CompleteJob is triggered by a provision daemon to mark a provisioner job as completed. func (s *provisionerdServer) CompleteJob(ctx context.Context, completed *proto.CompletedJob) (*proto.Empty, error) { - return nil, nil + jobID, err := uuid.Parse(completed.JobId) + if err != nil { + return nil, xerrors.Errorf("parse job id: %w", err) + } + job, err := s.Database.GetProvisionerJobByID(ctx, jobID) + if err != nil { + return nil, xerrors.Errorf("get job by id: %w", err) + } + // TODO: Check if the worker ID matches! + // If it doesn't, a provisioner daemon could be impersonating another job! + + switch jobType := completed.Type.(type) { + case *proto.CompletedJob_ProjectImport_: + var input projectImportJob + err = json.Unmarshal(job.Input, &input) + if err != nil { + return nil, xerrors.Errorf("unmarshal job data: %w", err) + } + + // Validate that all parameters send from the provisioner daemon + // follow the protocol. + projectParameters := make([]database.InsertProjectParameterParams, 0, len(jobType.ProjectImport.ParameterSchemas)) + for _, protoParameter := range jobType.ProjectImport.ParameterSchemas { + validationTypeSystem, err := convertValidationTypeSystem(protoParameter.ValidationTypeSystem) + if err != nil { + return nil, xerrors.Errorf("convert validation type system for %q: %w", protoParameter.Name, err) + } + + projectParameter := database.InsertProjectParameterParams{ + ID: uuid.New(), + CreatedAt: database.Now(), + ProjectHistoryID: input.ProjectHistoryID, + Name: protoParameter.Name, + Description: protoParameter.Description, + RedisplayValue: protoParameter.RedisplayValue, + ValidationError: protoParameter.ValidationError, + ValidationCondition: protoParameter.ValidationCondition, + ValidationValueType: protoParameter.ValidationValueType, + ValidationTypeSystem: validationTypeSystem, + + AllowOverrideDestination: protoParameter.AllowOverrideDestination, + AllowOverrideSource: protoParameter.AllowOverrideSource, + } + + // It's possible a parameter doesn't define a default source! + if protoParameter.DefaultSource != nil { + parameterSourceScheme, err := convertParameterSourceScheme(protoParameter.DefaultSource.Scheme) + if err != nil { + return nil, xerrors.Errorf("convert parameter source scheme: %w", err) + } + projectParameter.DefaultSourceScheme = parameterSourceScheme + projectParameter.DefaultSourceValue = sql.NullString{ + String: protoParameter.DefaultSource.Value, + Valid: protoParameter.DefaultSource.Value != "", + } + } + + // It's possible a parameter doesn't define a default destination! + if protoParameter.DefaultDestination != nil { + parameterDestinationScheme, err := convertParameterDestinationScheme(protoParameter.DefaultDestination.Scheme) + if err != nil { + return nil, xerrors.Errorf("convert parameter destination scheme: %w", err) + } + projectParameter.DefaultDestinationScheme = parameterDestinationScheme + projectParameter.DefaultDestinationValue = sql.NullString{ + String: protoParameter.DefaultDestination.Value, + Valid: protoParameter.DefaultDestination.Value != "", + } + } + + projectParameters = append(projectParameters, projectParameter) + } + + // This must occur in a transaction in case of failure. + err = s.Database.InTx(func(db database.Store) error { + err = db.UpdateProvisionerJobByID(ctx, database.UpdateProvisionerJobByIDParams{ + ID: jobID, + UpdatedAt: database.Now(), + CompletedAt: sql.NullTime{ + Time: database.Now(), + Valid: true, + }, + }) + if err != nil { + return xerrors.Errorf("update provisioner job: %w", err) + } + for _, projectParameter := range projectParameters { + _, err = db.InsertProjectParameter(ctx, projectParameter) + if err != nil { + return xerrors.Errorf("insert project parameter %q: %w", projectParameter.Name, err) + } + } + return nil + }) + if err != nil { + return nil, xerrors.Errorf("complete job: %w", err) + } + case *proto.CompletedJob_WorkspaceProvision_: + var input workspaceProvisionJob + err = json.Unmarshal(job.Input, &input) + if err != nil { + return nil, xerrors.Errorf("unmarshal job data: %w", err) + } + + workspaceHistory, err := s.Database.GetWorkspaceHistoryByID(ctx, input.WorkspaceHistoryID) + if err != nil { + return nil, xerrors.Errorf("get workspace history: %w", err) + } + + err = s.Database.InTx(func(db database.Store) error { + err = db.UpdateProvisionerJobByID(ctx, database.UpdateProvisionerJobByIDParams{ + ID: jobID, + UpdatedAt: database.Now(), + CompletedAt: sql.NullTime{ + Time: database.Now(), + Valid: true, + }, + }) + if err != nil { + return xerrors.Errorf("update provisioner job: %w", err) + } + err = db.UpdateWorkspaceHistoryByID(ctx, database.UpdateWorkspaceHistoryByIDParams{ + ID: workspaceHistory.ID, + UpdatedAt: database.Now(), + ProvisionerState: jobType.WorkspaceProvision.State, + CompletedAt: sql.NullTime{ + Time: database.Now(), + Valid: true, + }, + }) + if err != nil { + return xerrors.Errorf("update workspace history: %w", err) + } + for _, protoResource := range jobType.WorkspaceProvision.Resources { + _, err = db.InsertWorkspaceResource(ctx, database.InsertWorkspaceResourceParams{ + ID: uuid.New(), + CreatedAt: database.Now(), + WorkspaceHistoryID: input.WorkspaceHistoryID, + Type: protoResource.Type, + Name: protoResource.Name, + // TODO: Generate this at the variable validation phase. + // Set the value in `default_source`, and disallow overwrite. + WorkspaceAgentToken: uuid.NewString(), + }) + if err != nil { + return xerrors.Errorf("insert workspace resource %q: %w", protoResource.Name, err) + } + } + return nil + }) + if err != nil { + return nil, xerrors.Errorf("complete job: %w", err) + } + default: + return nil, xerrors.Errorf("unknown job type %q; ensure coderd and provisionerd versions match", + reflect.TypeOf(completed.Type).String()) + } + + return &proto.Empty{}, nil +} + +func convertValidationTypeSystem(typeSystem sdkproto.ParameterSchema_TypeSystem) (database.ParameterTypeSystem, error) { + switch typeSystem { + case sdkproto.ParameterSchema_HCL: + return database.ParameterTypeSystemHCL, nil + default: + return database.ParameterTypeSystem(""), xerrors.Errorf("unknown type system: %d", typeSystem) + } +} + +func convertParameterSourceScheme(sourceScheme sdkproto.ParameterSource_Scheme) (database.ParameterSourceScheme, error) { + switch sourceScheme { + case sdkproto.ParameterSource_DATA: + return database.ParameterSourceSchemeData, nil + default: + return database.ParameterSourceScheme(""), xerrors.Errorf("unknown parameter source scheme: %d", sourceScheme) + } +} + +func convertParameterDestinationScheme(destinationScheme sdkproto.ParameterDestination_Scheme) (database.ParameterDestinationScheme, error) { + switch destinationScheme { + case sdkproto.ParameterDestination_ENVIRONMENT_VARIABLE: + return database.ParameterDestinationSchemeEnvironmentVariable, nil + case sdkproto.ParameterDestination_PROVISIONER_VARIABLE: + return database.ParameterDestinationSchemeProvisionerVariable, nil + default: + return database.ParameterDestinationScheme(""), xerrors.Errorf("unknown parameter destination scheme: %d", destinationScheme) + } } diff --git a/database/databasefake/databasefake.go b/database/databasefake/databasefake.go index f0e97101321f5..e7f4f3cdb192d 100644 --- a/database/databasefake/databasefake.go +++ b/database/databasefake/databasefake.go @@ -659,7 +659,9 @@ func (q *fakeQuerier) UpdateWorkspaceHistoryByID(_ context.Context, arg database continue } workspaceHistory.UpdatedAt = arg.UpdatedAt + workspaceHistory.CompletedAt = arg.CompletedAt workspaceHistory.AfterID = arg.AfterID + workspaceHistory.ProvisionerState = arg.ProvisionerState q.workspaceHistory[index] = workspaceHistory return nil } diff --git a/database/query.sql b/database/query.sql index b361bbe3094d0..6b0345786584b 100644 --- a/database/query.sql +++ b/database/query.sql @@ -543,6 +543,8 @@ UPDATE workspace_history SET updated_at = $2, - after_id = $3 + completed_at = $3, + after_id = $4, + provisioner_state = $5 WHERE id = $1; diff --git a/database/query.sql.go b/database/query.sql.go index cb49f86eb67f0..72b5c85420069 100644 --- a/database/query.sql.go +++ b/database/query.sql.go @@ -1853,18 +1853,28 @@ UPDATE workspace_history SET updated_at = $2, - after_id = $3 + completed_at = $3, + after_id = $4, + provisioner_state = $5 WHERE id = $1 ` type UpdateWorkspaceHistoryByIDParams struct { - ID uuid.UUID `db:"id" json:"id"` - UpdatedAt time.Time `db:"updated_at" json:"updated_at"` - AfterID uuid.NullUUID `db:"after_id" json:"after_id"` + ID uuid.UUID `db:"id" json:"id"` + UpdatedAt time.Time `db:"updated_at" json:"updated_at"` + CompletedAt sql.NullTime `db:"completed_at" json:"completed_at"` + AfterID uuid.NullUUID `db:"after_id" json:"after_id"` + ProvisionerState []byte `db:"provisioner_state" json:"provisioner_state"` } func (q *sqlQuerier) UpdateWorkspaceHistoryByID(ctx context.Context, arg UpdateWorkspaceHistoryByIDParams) error { - _, err := q.db.ExecContext(ctx, updateWorkspaceHistoryByID, arg.ID, arg.UpdatedAt, arg.AfterID) + _, err := q.db.ExecContext(ctx, updateWorkspaceHistoryByID, + arg.ID, + arg.UpdatedAt, + arg.CompletedAt, + arg.AfterID, + arg.ProvisionerState, + ) return err } diff --git a/provisionerd/provisionerd.go b/provisionerd/provisionerd.go index 20dfb28bb3d83..9cc82fd0b19ec 100644 --- a/provisionerd/provisionerd.go +++ b/provisionerd/provisionerd.go @@ -168,6 +168,7 @@ func (a *API) acquireJob() { a.cancelActiveJob(fmt.Sprintf("remove all from %q directory: %s", a.opts.WorkDirectory, err)) return } + a.opts.Logger.Debug(context.Background(), "cleaned up work directory") }() err = os.MkdirAll(a.opts.WorkDirectory, 0600) @@ -233,6 +234,26 @@ func (a *API) acquireJob() { a.opts.Logger.Debug(context.Background(), "acquired job is project import", slog.F("project_history_name", jobType.ProjectImport.ProjectHistoryName), ) + + response, err := provisioner.Parse(a.closeContext, &provisionersdkproto.Parse_Request{ + Directory: a.opts.WorkDirectory, + }) + if err != nil { + a.cancelActiveJob(fmt.Sprintf("parse source: %s", err)) + return + } + _, err = a.client.CompleteJob(a.closeContext, &proto.CompletedJob{ + JobId: a.activeJob.JobId, + Type: &proto.CompletedJob_ProjectImport_{ + ProjectImport: &proto.CompletedJob_ProjectImport{ + ParameterSchemas: response.ParameterSchemas, + }, + }, + }) + if err != nil { + a.cancelActiveJob(fmt.Sprintf("complete job: %s", err)) + return + } case *proto.AcquiredJob_WorkspaceProvision_: a.opts.Logger.Debug(context.Background(), "acquired job is workspace provision", slog.F("workspace_name", jobType.WorkspaceProvision.WorkspaceName), @@ -240,13 +261,40 @@ func (a *API) acquireJob() { slog.F("parameters", jobType.WorkspaceProvision.ParameterValues), ) + response, err := provisioner.Provision(a.closeContext, &provisionersdkproto.Provision_Request{ + Directory: a.opts.WorkDirectory, + ParameterValues: jobType.WorkspaceProvision.ParameterValues, + State: jobType.WorkspaceProvision.State, + }) + if err != nil { + a.cancelActiveJob(fmt.Sprintf("provision: %s", err)) + return + } + a.opts.Logger.Debug(context.Background(), "provision successful; marking job as complete", + slog.F("resource_count", len(response.Resources)), + slog.F("resources", response.Resources), + slog.F("state_length", len(response.State)), + ) + + // Complete job may need to be async if we disconnected... + // When we reconnect we can flush any of these cached values. + _, err = a.client.CompleteJob(a.closeContext, &proto.CompletedJob{ + JobId: a.activeJob.JobId, + Type: &proto.CompletedJob_WorkspaceProvision_{ + WorkspaceProvision: &proto.CompletedJob_WorkspaceProvision{ + State: response.State, + Resources: response.Resources, + }, + }, + }) + if err != nil { + a.cancelActiveJob(fmt.Sprintf("complete job: %s", err)) + return + } default: a.cancelActiveJob(fmt.Sprintf("unknown job type %q; ensure your provisioner daemon is up-to-date", reflect.TypeOf(a.activeJob.Type).String())) return } - - fmt.Printf("Provisioner: %s\n", provisioner) - // Work! } func (a *API) cancelActiveJob(errMsg string) { @@ -257,6 +305,9 @@ func (a *API) cancelActiveJob(errMsg string) { a.activeJob = nil return } + if a.activeJob == nil { + return + } a.opts.Logger.Info(context.Background(), "canceling active job", slog.F("error_message", errMsg), diff --git a/provisionerd/provisionerd_test.go b/provisionerd/provisionerd_test.go index 2eccf772707e6..1040dc9eb2c4f 100644 --- a/provisionerd/provisionerd_test.go +++ b/provisionerd/provisionerd_test.go @@ -42,12 +42,13 @@ func TestProvisionerd(t *testing.T) { setupProjectVersion := func(t *testing.T, client *codersdk.Client, user coderd.CreateInitialUserRequest, project coderd.Project) coderd.ProjectHistory { var buffer bytes.Buffer writer := tar.NewWriter(&buffer) + content := `resource "null_resource" "hi" {}` err := writer.WriteHeader(&tar.Header{ - Name: "file", - Size: 1 << 10, + Name: "main.tf", + Size: int64(len(content)), }) require.NoError(t, err) - _, err = writer.Write(make([]byte, 1<<10)) + _, err = writer.Write([]byte(content)) require.NoError(t, err) projectHistory, err := client.CreateProjectHistory(context.Background(), user.Organization, project.Name, coderd.CreateProjectVersionRequest{ StorageMethod: database.ProjectStorageMethodInlineArchive, @@ -102,6 +103,6 @@ func TestProvisionerd(t *testing.T) { WorkDirectory: t.TempDir(), }) defer api.Close() - time.Sleep(time.Millisecond * 500) + time.Sleep(time.Millisecond * 1500) }) } From 5d16f2aec395e7402e942a22c71da6f832186450 Mon Sep 17 00:00:00 2001 From: Kyle Carberry Date: Sat, 29 Jan 2022 21:10:54 +0000 Subject: [PATCH 08/17] Use fork of terraform-exec for JSON output --- go.mod | 6 ++-- go.sum | 44 ++---------------------------- provisioner/terraform/provision.go | 2 +- 3 files changed, 6 insertions(+), 46 deletions(-) diff --git a/go.mod b/go.mod index 5d2a8f7b6f0a2..492d77a24ee2e 100644 --- a/go.mod +++ b/go.mod @@ -2,6 +2,9 @@ module github.com/coder/coder go 1.17 +// Required until https://github.com/hashicorp/terraform-exec/pull/275 is merged. +replace github.com/hashicorp/terraform-exec => github.com/kylecarbs/terraform-exec v0.15.1-0.20220129210610-65894a884c09 + // Required until https://github.com/hashicorp/terraform-config-inspect/pull/74 is merged. replace github.com/hashicorp/terraform-config-inspect => github.com/kylecarbs/terraform-config-inspect v0.0.0-20211215004401-bbc517866b88 @@ -41,14 +44,12 @@ require ( require ( cloud.google.com/go/compute v0.1.0 // indirect - cloud.google.com/go/storage v1.19.0 // indirect github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 // indirect github.com/Microsoft/go-winio v0.5.1 // indirect github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5 // indirect github.com/agext/levenshtein v1.2.3 // indirect github.com/alecthomas/chroma v0.10.0 // indirect github.com/apparentlymart/go-textseg/v13 v13.0.0 // indirect - github.com/aws/aws-sdk-go v1.42.42 // indirect github.com/cenkalti/backoff/v4 v4.1.2 // indirect github.com/containerd/continuity v0.2.2 // indirect github.com/davecgh/go-spew v1.1.1 // indirect @@ -111,7 +112,6 @@ require ( golang.org/x/sys v0.0.0-20220114195835-da31bd327af9 // indirect golang.org/x/term v0.0.0-20210927222741-03fcf44c2211 // indirect golang.org/x/text v0.3.7 // indirect - google.golang.org/api v0.65.0 // indirect google.golang.org/appengine v1.6.7 // indirect google.golang.org/genproto v0.0.0-20220118154757-00ab72f36ad5 // indirect google.golang.org/grpc v1.44.0 // indirect diff --git a/go.sum b/go.sum index 8c47b736aaa19..c1e27f32c631a 100644 --- a/go.sum +++ b/go.sum @@ -32,7 +32,6 @@ cloud.google.com/go v0.94.1/go.mod h1:qAlAugsXlC+JWO+Bke5vCtc9ONxjQT3drlTTnAplMW cloud.google.com/go v0.97.0/go.mod h1:GF7l59pYBVlXQIBLx3a761cZ41F9bBH3JUlihCt2Udc= cloud.google.com/go v0.98.0/go.mod h1:ua6Ush4NALrHk5QXDWnjvZHN93OuF0HfuEPq9I1X0cM= cloud.google.com/go v0.99.0/go.mod h1:w0Xx2nLzqWJPuozYQX+hFfCSI8WioryfRDzkoI/Y2ZA= -cloud.google.com/go v0.100.1/go.mod h1:fs4QogzfH5n2pBXBP9vRiU+eCny7lD2vmFZy79Iuw1U= cloud.google.com/go v0.100.2 h1:t9Iw5QH5v4XtlEQaCtUY7x6sCABps8sW0acw7e2WQ6Y= cloud.google.com/go v0.100.2/go.mod h1:4Xra9TjzAeYHrl5+oeLlzbM2k3mjVhZh4UqTZ//w99A= cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= @@ -46,8 +45,6 @@ cloud.google.com/go/compute v0.1.0/go.mod h1:GAesmwr110a34z04OlxYkATPBEfVhkymfTB cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= cloud.google.com/go/firestore v1.6.1/go.mod h1:asNXNOzBdyVQmEU+ggO8UPodTkEVFW5Qx+rwHnAz+EY= -cloud.google.com/go/iam v0.1.1 h1:4CapQyNFjiksks1/x7jsvsygFPhihslYk5GptIrlX68= -cloud.google.com/go/iam v0.1.1/go.mod h1:CKqrcnI/suGpybEHxZ7BMehL0oA4LpdyJdUlTl9jVMw= cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= @@ -58,8 +55,6 @@ cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0Zeo cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= -cloud.google.com/go/storage v1.19.0 h1:XOQSnPJD8hRtZJ3VdCyK0mBZsGGImrzPAMbSWcHSe6Q= -cloud.google.com/go/storage v1.19.0/go.mod h1:6rgiTRjOqI/Zd9YKimub5TIB4d+p3LH33V3ZE1DMuUM= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= gioui.org v0.0.0-20210308172011-57750fc8a0a6/go.mod h1:RSH6KIUZ0p2xy5zHDxgAM4zumjgTw83q2ge/PI+yyw8= github.com/Azure/azure-pipeline-go v0.2.3/go.mod h1:x841ezTBIMG6O3lAcl8ATHnsOPVl2bqk7S3ta6S6u4k= @@ -85,9 +80,6 @@ github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03 github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= github.com/ClickHouse/clickhouse-go v1.4.3/go.mod h1:EaI/sW7Azgz9UATzd5ZdZHRUhHgv5+JMS9NSr2smCJI= github.com/DataDog/datadog-go v3.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ= -github.com/Masterminds/goutils v1.1.0/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU= -github.com/Masterminds/semver v1.5.0/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y= -github.com/Masterminds/sprig v2.22.0+incompatible/go.mod h1:y6hNFY5UBTIWBxnzTeuNhlNS5hqE0NB0E6fgfo2Br3o= github.com/Microsoft/go-winio v0.4.11/go.mod h1:VhR8bwka0BXejwEJY73c50VrPtXAaKcyvVC4A4RozmA= github.com/Microsoft/go-winio v0.4.14/go.mod h1:qXqCSQ3Xa7+6tgxaGTIe4Kpcdsi+P8jBhyzoq1bpyYA= github.com/Microsoft/go-winio v0.4.15-0.20190919025122-fc70bd9a86b5/go.mod h1:tTuCMEN+UleMWgg9dVx4Hu52b1bJo+59jBh3ajtinzw= @@ -157,10 +149,7 @@ github.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgI github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkYZB8zMSxRWpUBQtwG5a7fFgvEO+odwuTv2gs= github.com/asaskevich/govalidator v0.0.0-20190424111038-f61b66f89f4a/go.mod h1:lB+ZfQJz7igIIfQNfa7Ml4HSf2uFQQRzpGGRXenZAgY= github.com/aws/aws-sdk-go v1.15.11/go.mod h1:mFuSZ37Z9YOHbQEwBWztmVzqXrEkub65tZoCYDt7FT0= -github.com/aws/aws-sdk-go v1.15.78/go.mod h1:E3/ieXAlvM0XWO57iftYVDLLvQ824smPP3ATZkfNZeM= github.com/aws/aws-sdk-go v1.17.7/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= -github.com/aws/aws-sdk-go v1.42.42 h1:2K61yu5BApC9ExAwC5Vk6ljWzBGbiRGRQYLW7adhP5U= -github.com/aws/aws-sdk-go v1.42.42/go.mod h1:OGr6lGMAKGlG9CVrYnWYDKIyb829c6EVBRjxqjmPepc= github.com/aws/aws-sdk-go-v2 v1.8.0/go.mod h1:xEFuWz+3TYdlPRuo+CqATbeDWIWyaT5uAPwPaWtgse0= github.com/aws/aws-sdk-go-v2 v1.9.2/go.mod h1:cK/D0BBs0b/oWPIcX/Z/obahJK1TT7IPVjy53i/mX/4= github.com/aws/aws-sdk-go-v2/config v1.6.0/go.mod h1:TNtBVmka80lRPk5+S9ZqVfFszOQAGJJ9KbT3EM3CHNU= @@ -191,8 +180,6 @@ github.com/beorn7/perks v0.0.0-20160804104726-4c0e84591b9a/go.mod h1:Dwedo/Wpr24 github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= -github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d h1:xDfNPAt8lFiC1UJrqV3uuy861HCTo708pDMbjHHdCas= -github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d/go.mod h1:6QX/PXZ00z/TKoufEY6K/a0k6AhaJrQKdFe6OfVXsa4= github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= github.com/bitly/go-hostpool v0.0.0-20171023180738-a3a6125de932/go.mod h1:NOuUCSz6Q9T7+igc/hlvDOUdtWKryOrtFyIVABv/p7k= github.com/bitly/go-simplejson v0.5.0/go.mod h1:cXHtHw4XUPsvGaxgjIAn8PhEWG9NfngEKAMDJEczWVA= @@ -219,7 +206,6 @@ github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XL github.com/checkpoint-restore/go-criu/v4 v4.1.0/go.mod h1:xUQBLp4RLc5zJtWY++yjOoMoB5lihDt7fai+75m+rGw= github.com/checkpoint-restore/go-criu/v5 v5.0.0/go.mod h1:cfwC0EG7HMUenopBsUf9d89JlCLQIfgVcNsNN0t6T2M= github.com/checkpoint-restore/go-criu/v5 v5.3.0/go.mod h1:E/eQpaFtUKGOOSEBZgmKAcn+zUUwWxqcaKZlF54wK8E= -github.com/cheggaaa/pb v1.0.27/go.mod h1:pQciLPpbU0oxA0h+VJYYLxO+XeDQb5pZijXscXHm81s= github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= @@ -648,7 +634,6 @@ github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+ github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= github.com/googleapis/gax-go/v2 v2.1.0/go.mod h1:Q3nei7sK6ybPYH7twZdmQpAd1MKb7pfu6SK+H1/DsU0= -github.com/googleapis/gax-go/v2 v2.1.1 h1:dp3bWCh+PPO1zjRRiCSczJav13sBvG4UhNyVTa1KqdU= github.com/googleapis/gax-go/v2 v2.1.1/go.mod h1:hddJymUZASv3XPyGkUpKj8pPO47Rmb0eJc8R6ouapiM= github.com/googleapis/gnostic v0.4.1/go.mod h1:LRhVm6pbyptWbWbuZ38d1eyptfvIytN3ir6b65WBswg= github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= @@ -676,14 +661,11 @@ github.com/hashicorp/errwrap v0.0.0-20141028054710-7554cd9344ce/go.mod h1:YH+1FK github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= -github.com/hashicorp/go-checkpoint v0.5.0 h1:MFYpPZCnQqQTE18jFwSII6eUQrD/oxMFp3mlgcqk5mU= github.com/hashicorp/go-checkpoint v0.5.0/go.mod h1:7nfLNL10NsxqO4iWuW6tWW0HjZuDrwkBuEQsVcpCOgg= github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ= github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= -github.com/hashicorp/go-getter v1.5.3 h1:NF5+zOlQegim+w/EUhSLh6QhXHmZMEeHLQzllkQ3ROU= -github.com/hashicorp/go-getter v1.5.3/go.mod h1:BrrV/1clo8cCYu6mxvboYg+KutTiFnXjMEgDD8+i7ZI= github.com/hashicorp/go-hclog v0.12.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= github.com/hashicorp/go-hclog v1.0.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= @@ -696,14 +678,10 @@ github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+l github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= github.com/hashicorp/go-retryablehttp v0.5.3/go.mod h1:9B5zBasrRhHXnJnui7y6sL7es7NDiJgTc6Er0maI1Xs= github.com/hashicorp/go-rootcerts v1.0.2/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8= -github.com/hashicorp/go-safetemp v1.0.0 h1:2HR189eFNrjHQyENnQMMpCiBAsRxzbTMIgBhEyExpmo= -github.com/hashicorp/go-safetemp v1.0.0/go.mod h1:oaerMy3BhqiTbVye6QuFhFtIceqFoDHxNAB65b+Rj1I= github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU= github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4= github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= -github.com/hashicorp/go-uuid v1.0.1 h1:fv1ep09latC32wFoVwnqcnKJGnMSdBanPczbHAYm1BE= github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= -github.com/hashicorp/go-version v1.1.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= github.com/hashicorp/go-version v1.3.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= github.com/hashicorp/go-version v1.4.0 h1:aAQzgqIrRKRa7w75CKpbBxYsmUoPjzVm1W59ca1L0J4= github.com/hashicorp/go-version v1.4.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= @@ -725,14 +703,11 @@ github.com/hashicorp/memberlist v0.2.2/go.mod h1:MS2lj3INKhZjWNqd3N0m3J+Jxf3DAOn github.com/hashicorp/memberlist v0.3.0/go.mod h1:MS2lj3INKhZjWNqd3N0m3J+Jxf3DAOnAH9VT3Sh9MUE= github.com/hashicorp/serf v0.9.5/go.mod h1:UWDWwZeL5cuWDJdl0C6wrvrUwEqtQ4ZKBKKENpqIUyk= github.com/hashicorp/serf v0.9.6/go.mod h1:TXZNMjZQijwlDvp+r0b63xZ45H7JmCmgg4gpTwn9UV4= -github.com/hashicorp/terraform-exec v0.15.0 h1:cqjh4d8HYNQrDoEmlSGelHmg2DYDh5yayckvJ5bV18E= -github.com/hashicorp/terraform-exec v0.15.0/go.mod h1:H4IG8ZxanU+NW0ZpDRNsvh9f0ul7C0nHP+rUR/CHs7I= github.com/hashicorp/terraform-json v0.13.0 h1:Li9L+lKD1FO5RVFRM1mMMIBDoUHslOniyEi5CM+FWGY= github.com/hashicorp/terraform-json v0.13.0/go.mod h1:y5OdLBCT+rxbwnpxZs9kGL7R9ExU76+cpdY8zHwoazk= github.com/hashicorp/yamux v0.0.0-20211028200310-0bc27b27de87 h1:xixZ2bWeofWV68J+x6AzmKuVM/JWCQwkWm6GW/MUR6I= github.com/hashicorp/yamux v0.0.0-20211028200310-0bc27b27de87/go.mod h1:CtWFDAQgb7dxtzFs4tWbplKIe2jSi3+5vKbgIO0SLnQ= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= -github.com/huandu/xstrings v1.3.2/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= github.com/iancoleman/strcase v0.2.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho= github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= @@ -796,7 +771,6 @@ github.com/jinzhu/now v1.1.1/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/ github.com/jmespath/go-jmespath v0.0.0-20160202185014-0b12d6b521d8/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= github.com/jmespath/go-jmespath v0.0.0-20160803190731-bd40a432e4c7/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= -github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg= github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U= github.com/jmoiron/sqlx v1.2.0/go.mod h1:1FEQNm3xlJgrMD+FBdI9+xvCksHtbpVBBw5dYhBSsks= @@ -832,7 +806,6 @@ github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/klauspost/compress v1.9.5/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= github.com/klauspost/compress v1.10.3/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= -github.com/klauspost/compress v1.11.2/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= github.com/klauspost/compress v1.11.3/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= github.com/klauspost/compress v1.11.13/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= github.com/klauspost/compress v1.13.1/go.mod h1:8dP1Hq4DHOhN9w426knH3Rhby4rFm6D8eO+e+Dq5Gzg= @@ -858,6 +831,8 @@ github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/ktrysmt/go-bitbucket v0.6.4/go.mod h1:9u0v3hsd2rqCHRIpbir1oP7F58uo5dq19sBYvuMoyQ4= github.com/kylecarbs/terraform-config-inspect v0.0.0-20211215004401-bbc517866b88 h1:tvG/qs5c4worwGyGnbbb4i/dYYLjpFwDMqcIT3awAf8= github.com/kylecarbs/terraform-config-inspect v0.0.0-20211215004401-bbc517866b88/go.mod h1:Z0Nnk4+3Cy89smEbrq+sl1bxc9198gIP4I7wcQF6Kqs= +github.com/kylecarbs/terraform-exec v0.15.1-0.20220129210610-65894a884c09 h1:o+8BFGukFfFmGgOJIWEeDXkXRDdFoZ9ndi/GjqnHTGg= +github.com/kylecarbs/terraform-exec v0.15.1-0.20220129210610-65894a884c09/go.mod h1:lRENyXw1BL5V0FCCE8lsW3XoVLRLnxM54jrlYSyXpvM= github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348/go.mod h1:B69LEHPfb2qLo0BaaOLcbitczOKLWTsrBG9LczfCD4k= github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= @@ -906,7 +881,6 @@ github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Ky github.com/mattn/go-isatty v0.0.14 h1:yVuAays6BHfxijgZPzw+3Zlu5yQgKGP2/hcQbHb7S9Y= github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= github.com/mattn/go-runewidth v0.0.2/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU= -github.com/mattn/go-runewidth v0.0.4/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU= github.com/mattn/go-shellwords v1.0.3/go.mod h1:3xCvwCdWdlDJUrvuMn7Wuy9eWs4pE8vqg+NOMyg4B2o= github.com/mattn/go-sqlite3 v1.9.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc= github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= @@ -918,13 +892,9 @@ github.com/miekg/dns v1.1.41/go.mod h1:p6aan82bvRIyn+zDIv9xYNUpwa73JcSh9BKwknJys github.com/miekg/pkcs11 v1.0.3/go.mod h1:XsNlhZGX73bx86s2hdc/FuaLm2CPZJemRLMA+WTFxgs= github.com/mistifyio/go-zfs v2.1.2-0.20190413222219-f784269be439+incompatible/go.mod h1:8AuVvqP/mXw1px98n46wfvcGfQ4ci2FwoAjKYxuo3Z4= github.com/mitchellh/cli v1.1.0/go.mod h1:xcISNoH86gajksDmfB23e/pu+B+GeFRMYmoHXxx3xhI= -github.com/mitchellh/cli v1.1.2/go.mod h1:6iaV0fGdElS6dPBx0EApTxHrcWvmJphyh2n8YBLPPZ4= -github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw= github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s= -github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= -github.com/mitchellh/go-testing-interface v1.0.0 h1:fzU/JVNcaqHQEcVFAKeR41fkiLdIPrefOvVG1VZ96U0= github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7/go.mod h1:ZXFpozHsX6DPmq2I0TCekCxypsnAUbP2oI0UX1GXzOo= github.com/mitchellh/go-wordwrap v1.0.0/go.mod h1:ZXFpozHsX6DPmq2I0TCekCxypsnAUbP2oI0UX1GXzOo= @@ -937,7 +907,6 @@ github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RR github.com/mitchellh/mapstructure v1.4.3 h1:OVowDSCllw/YjdLkam3/sm7wEtOy59d8ndGgCcyj8cs= github.com/mitchellh/mapstructure v1.4.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/mitchellh/osext v0.0.0-20151018003038-5e2d6d41470f/go.mod h1:OkQIRizQZAeMln+1tSwduZz7+Af5oFlKirV/MSYes2A= -github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= github.com/moby/locker v1.0.1/go.mod h1:S7SDdo5zpBK84bzzVlKr2V0hz+7x9hWbYC/kq7oQppc= github.com/moby/moby v20.10.12+incompatible h1:MJVrdG0tIQqVJQBTdtooPuZQFIgski5pYTXlcW8ToE0= @@ -1219,8 +1188,6 @@ github.com/ugorji/go v1.1.7 h1:/68gy2h+1mWMrwZFeD1kQialdSzAb432dtpeJ42ovdo= github.com/ugorji/go v1.1.7/go.mod h1:kZn38zHttfInRq0xu/PH0az30d+z6vm202qpg1oXVMw= github.com/ugorji/go/codec v1.1.7 h1:2SvQaVZ1ouYrrKKwoSk2pzd4A9evlKJb9oTL+OaLUSs= github.com/ugorji/go/codec v1.1.7/go.mod h1:Ax+UKWsSmolVDwsd+7N3ZtXu+yMGCf907BLYF3GoBXY= -github.com/ulikunitz/xz v0.5.8 h1:ERv8V6GKqVi23rgu5cj9pVfVzJbOqAY2Ntl88O6c2nQ= -github.com/ulikunitz/xz v0.5.8/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14= github.com/unrolled/secure v1.0.9 h1:BWRuEb1vDrBFFDdbCnKkof3gZ35I/bnHGyt0LB0TNyQ= github.com/unrolled/secure v1.0.9/go.mod h1:fO+mEan+FLB0CdEnHf6Q4ZZVNqG+5fuLFnP8p0BXDPI= github.com/urfave/cli v0.0.0-20171014202726-7bc6a0acffa5/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA= @@ -1333,7 +1300,6 @@ golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPh golang.org/x/crypto v0.0.0-20200323165209-0ec3e9974c59/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200728195943-123391ffb6de/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20200820211705-5c72a883971a/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20201002170205-7f63de1d35b0/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= @@ -1755,9 +1721,6 @@ google.golang.org/api v0.59.0/go.mod h1:sT2boj7M9YJxZzgeZqXogmhfmRWDtPzT31xkieUb google.golang.org/api v0.61.0/go.mod h1:xQRti5UdCmoCEqFxcz93fTl338AVqDgyaDRuOZ3hg9I= google.golang.org/api v0.62.0/go.mod h1:dKmwPCydfsad4qCH08MSdgWjfHOyfpd4VtDGgRFdavw= google.golang.org/api v0.63.0/go.mod h1:gs4ij2ffTRXwuzzgJl/56BdwJaA194ijkfn++9tDuPo= -google.golang.org/api v0.64.0/go.mod h1:931CdxA8Rm4t6zqTFGSsgwbAEZ2+GMYurbndwSimebM= -google.golang.org/api v0.65.0 h1:MTW9c+LIBAbwoS1Gb+YV7NjFBt2f7GtAS5hIzh2NjgQ= -google.golang.org/api v0.65.0/go.mod h1:ArYhxgGadlWmqO1IqVujw6Cs8IdD33bTmzKo2Sh+cbg= google.golang.org/appengine v1.0.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.3.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= @@ -1841,8 +1804,6 @@ google.golang.org/genproto v0.0.0-20211203200212-54befc351ae9/go.mod h1:5CzLGKJ6 google.golang.org/genproto v0.0.0-20211206160659-862468c7d6e0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20211208223120-3a66f561d7aa/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20211221195035-429b39de9b1c/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20211223182754-3ac035c7e7cb/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= -google.golang.org/genproto v0.0.0-20220107163113-42d7afdf6368/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20220111164026-67b88f271998/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20220118154757-00ab72f36ad5 h1:zzNejm+EgrbLfDZ6lu9Uud2IVvHySPl8vQzf04laR5Q= google.golang.org/genproto v0.0.0-20220118154757-00ab72f36ad5/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= @@ -1906,7 +1867,6 @@ gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8 gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/cheggaaa/pb.v1 v1.0.25/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qStrOgw= -gopkg.in/cheggaaa/pb.v1 v1.0.27/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qStrOgw= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= gopkg.in/gemnasium/logrus-airbrake-hook.v2 v2.1.2/go.mod h1:Xk6kEKp8OKb+X14hQBKWaSkCsqBpgog8nAV2xsGOxlo= diff --git a/provisioner/terraform/provision.go b/provisioner/terraform/provision.go index 4cf94ec0d9378..a6e6fe42e9bc8 100644 --- a/provisioner/terraform/provision.go +++ b/provisioner/terraform/provision.go @@ -38,7 +38,7 @@ func (t *terraform) Provision(ctx context.Context, request *proto.Provision_Requ } env := map[string]string{} - options := make([]tfexec.ApplyOption, 0) + options := []tfexec.ApplyOption{tfexec.JSON(true)} for _, param := range request.ParameterValues { switch param.DestinationScheme { case proto.ParameterDestination_ENVIRONMENT_VARIABLE: From ce4a9fb1b569b5c55686a1bfdc315c7770fbacae Mon Sep 17 00:00:00 2001 From: Kyle Carberry Date: Sat, 29 Jan 2022 23:17:47 +0000 Subject: [PATCH 09/17] Add logging to provision jobs --- provisioner/terraform/parse.go | 19 +- provisioner/terraform/parse_test.go | 71 ++- provisioner/terraform/provision.go | 141 ++++- provisioner/terraform/provision_test.go | 49 +- provisionerd/proto/provisionerd.pb.go | 273 ++++----- provisionerd/proto/provisionerd.proto | 19 +- provisionerd/provisionerd.go | 182 ++++-- provisionerd/provisionerd_test.go | 5 +- provisionersdk/proto/provisioner.pb.go | 593 ++++++++++++++++---- provisionersdk/proto/provisioner.proto | 36 +- provisionersdk/proto/provisioner_drpc.pb.go | 112 ++-- 11 files changed, 1042 insertions(+), 458 deletions(-) diff --git a/provisioner/terraform/parse.go b/provisioner/terraform/parse.go index cc92bc8f8008c..926ff812796da 100644 --- a/provisioner/terraform/parse.go +++ b/provisioner/terraform/parse.go @@ -1,7 +1,6 @@ package terraform import ( - "context" "encoding/json" "os" @@ -12,24 +11,30 @@ import ( ) // Parse extracts Terraform variables from source-code. -func (*terraform) Parse(_ context.Context, request *proto.Parse_Request) (*proto.Parse_Response, error) { +func (*terraform) Parse(request *proto.Parse_Request, stream proto.DRPCProvisioner_ParseStream) error { + defer stream.CloseSend() + module, diags := tfconfig.LoadModule(request.Directory) if diags.HasErrors() { - return nil, xerrors.Errorf("load module: %w", diags.Err()) + return xerrors.Errorf("load module: %w", diags.Err()) } parameters := make([]*proto.ParameterSchema, 0, len(module.Variables)) for _, v := range module.Variables { schema, err := convertVariableToParameter(v) if err != nil { - return nil, xerrors.Errorf("convert variable %q: %w", v.Name, err) + return xerrors.Errorf("convert variable %q: %w", v.Name, err) } parameters = append(parameters, schema) } - return &proto.Parse_Response{ - ParameterSchemas: parameters, - }, nil + return stream.Send(&proto.Parse_Response{ + Type: &proto.Parse_Response_Complete{ + Complete: &proto.Parse_Complete{ + ParameterSchemas: parameters, + }, + }, + }) } // Converts a Terraform variable to a provisioner parameter. diff --git a/provisioner/terraform/parse_test.go b/provisioner/terraform/parse_test.go index bbfe166827851..e678d1d36c674 100644 --- a/provisioner/terraform/parse_test.go +++ b/provisioner/terraform/parse_test.go @@ -49,10 +49,14 @@ func TestParse(t *testing.T) { }`, }, Response: &proto.Parse_Response{ - ParameterSchemas: []*proto.ParameterSchema{{ - Name: "A", - Description: "Testing!", - }}, + Type: &proto.Parse_Response_Complete{ + Complete: &proto.Parse_Complete{ + ParameterSchemas: []*proto.ParameterSchema{{ + Name: "A", + Description: "Testing!", + }}, + }, + }, }, }, { Name: "default-variable-value", @@ -62,17 +66,21 @@ func TestParse(t *testing.T) { }`, }, Response: &proto.Parse_Response{ - ParameterSchemas: []*proto.ParameterSchema{{ - Name: "A", - DefaultSource: &proto.ParameterSource{ - Scheme: proto.ParameterSource_DATA, - Value: "\"wow\"", + Type: &proto.Parse_Response_Complete{ + Complete: &proto.Parse_Complete{ + ParameterSchemas: []*proto.ParameterSchema{{ + Name: "A", + DefaultSource: &proto.ParameterSource{ + Scheme: proto.ParameterSource_DATA, + Value: "\"wow\"", + }, + DefaultDestination: &proto.ParameterDestination{ + Scheme: proto.ParameterDestination_PROVISIONER_VARIABLE, + Value: "A", + }, + }}, }, - DefaultDestination: &proto.ParameterDestination{ - Scheme: proto.ParameterDestination_PROVISIONER_VARIABLE, - Value: "A", - }, - }}, + }, }, }, { Name: "variable-validation", @@ -84,10 +92,15 @@ func TestParse(t *testing.T) { }`, }, Response: &proto.Parse_Response{ - ParameterSchemas: []*proto.ParameterSchema{{ - Name: "A", - ValidationCondition: `var.A == "value"`, - }}, + Type: &proto.Parse_Response_Complete{ + Complete: &proto.Parse_Complete{ + ParameterSchemas: []*proto.ParameterSchema{{ + Name: "A", + ValidationCondition: `var.A == "value"`, + }, + }, + }, + }, }, }} { testCase := testCase @@ -106,13 +119,23 @@ func TestParse(t *testing.T) { }) require.NoError(t, err) - // Ensure the want and got are equivalent! - want, err := json.Marshal(testCase.Response) - require.NoError(t, err) - got, err := json.Marshal(response) - require.NoError(t, err) + for { + msg, err := response.Recv() + require.NoError(t, err) + + if msg.GetComplete() == nil { + continue + } - require.Equal(t, string(want), string(got)) + // Ensure the want and got are equivalent! + want, err := json.Marshal(testCase.Response) + require.NoError(t, err) + got, err := json.Marshal(msg) + require.NoError(t, err) + + require.Equal(t, string(want), string(got)) + break + } }) } } diff --git a/provisioner/terraform/provision.go b/provisioner/terraform/provision.go index a6e6fe42e9bc8..f9c63643aedd0 100644 --- a/provisioner/terraform/provision.go +++ b/provisioner/terraform/provision.go @@ -1,10 +1,13 @@ package terraform import ( - "context" + "bufio" + "encoding/json" "fmt" + "io" "os" "path/filepath" + "strings" "github.com/hashicorp/terraform-exec/tfexec" "golang.org/x/xerrors" @@ -13,28 +16,49 @@ import ( ) // Provision executes `terraform apply`. -func (t *terraform) Provision(ctx context.Context, request *proto.Provision_Request) (*proto.Provision_Response, error) { +func (t *terraform) Provision(request *proto.Provision_Request, stream proto.DRPCProvisioner_ProvisionStream) error { + // defer stream.CloseSend() + ctx := stream.Context() statefilePath := filepath.Join(request.Directory, "terraform.tfstate") - err := os.WriteFile(statefilePath, request.State, 0600) - if err != nil { - return nil, xerrors.Errorf("write statefile %q: %w", statefilePath, err) + if len(request.State) > 0 { + err := os.WriteFile(statefilePath, request.State, 0600) + if err != nil { + return xerrors.Errorf("write statefile %q: %w", statefilePath, err) + } } terraform, err := tfexec.NewTerraform(request.Directory, t.binaryPath) if err != nil { - return nil, xerrors.Errorf("create new terraform executor: %w", err) + return xerrors.Errorf("create new terraform executor: %w", err) } version, _, err := terraform.Version(ctx, false) if err != nil { - return nil, xerrors.Errorf("get terraform version: %w", err) + return xerrors.Errorf("get terraform version: %w", err) } if !version.GreaterThanOrEqual(minimumTerraformVersion) { - return nil, xerrors.Errorf("terraform version %q is too old. required >= %q", version.String(), minimumTerraformVersion.String()) + return xerrors.Errorf("terraform version %q is too old. required >= %q", version.String(), minimumTerraformVersion.String()) } + reader, writer := io.Pipe() + defer reader.Close() + defer writer.Close() + go func() { + scanner := bufio.NewScanner(reader) + for scanner.Scan() { + _ = stream.Send(&proto.Provision_Response{ + Type: &proto.Provision_Response_Log{ + Log: &proto.Log{ + Level: proto.LogLevel_INFO, + Text: scanner.Text(), + }, + }, + }) + } + }() + terraform.SetStdout(writer) err = terraform.Init(ctx) if err != nil { - return nil, xerrors.Errorf("initialize terraform: %w", err) + return xerrors.Errorf("initialize terraform: %w", err) } env := map[string]string{} @@ -46,26 +70,73 @@ func (t *terraform) Provision(ctx context.Context, request *proto.Provision_Requ case proto.ParameterDestination_PROVISIONER_VARIABLE: options = append(options, tfexec.Var(fmt.Sprintf("%s=%s", param.Name, param.Value))) default: - return nil, xerrors.Errorf("unsupported parameter type %q for %q", param.DestinationScheme, param.Name) + return xerrors.Errorf("unsupported parameter type %q for %q", param.DestinationScheme, param.Name) } } err = terraform.SetEnv(env) if err != nil { - return nil, xerrors.Errorf("apply environment variables: %w", err) + return xerrors.Errorf("apply environment variables: %w", err) } + reader, writer = io.Pipe() + defer reader.Close() + defer writer.Close() + go func() { + decoder := json.NewDecoder(reader) + for { + var log terraformProvisionLog + err := decoder.Decode(&log) + if err != nil { + return + } + + logLevel, err := convertTerraformLogLevel(log.Level) + if err != nil { + // Not a big deal, but we should handle this at some point! + continue + } + _ = stream.Send(&proto.Provision_Response{ + Type: &proto.Provision_Response_Log{ + Log: &proto.Log{ + Level: logLevel, + Text: log.Message, + }, + }, + }) + + if log.Diagnostic == nil { + continue + } + + // If the diagnostic is provided, let's provide a bit more info! + logLevel, err = convertTerraformLogLevel(log.Diagnostic.Severity) + if err != nil { + continue + } + _ = stream.Send(&proto.Provision_Response{ + Type: &proto.Provision_Response_Log{ + Log: &proto.Log{ + Level: logLevel, + Text: log.Diagnostic.Detail, + }, + }, + }) + } + }() + + terraform.SetStdout(writer) err = terraform.Apply(ctx, options...) if err != nil { - return nil, xerrors.Errorf("apply terraform: %w", err) + return xerrors.Errorf("apply terraform: %w", err) } statefileContent, err := os.ReadFile(statefilePath) if err != nil { - return nil, xerrors.Errorf("read file %q: %w", statefilePath, err) + return xerrors.Errorf("read file %q: %w", statefilePath, err) } state, err := terraform.ShowStateFile(ctx, statefilePath) if err != nil { - return nil, xerrors.Errorf("show state file %q: %w", statefilePath, err) + return xerrors.Errorf("show state file %q: %w", statefilePath, err) } resources := make([]*proto.Resource, 0) if state.Values != nil { @@ -77,8 +148,42 @@ func (t *terraform) Provision(ctx context.Context, request *proto.Provision_Requ } } - return &proto.Provision_Response{ - Resources: resources, - State: statefileContent, - }, nil + return stream.Send(&proto.Provision_Response{ + Type: &proto.Provision_Response_Complete{ + Complete: &proto.Provision_Complete{ + State: statefileContent, + Resources: resources, + }, + }, + }) +} + +type terraformProvisionLog struct { + Level string `json:"@level"` + Message string `json:"@message"` + + Diagnostic *terraformProvisionLogDiagnostic `json:"diagnostic"` +} + +type terraformProvisionLogDiagnostic struct { + Severity string `json:"severity"` + Summary string `json:"summary"` + Detail string `json:"detail"` +} + +func convertTerraformLogLevel(logLevel string) (proto.LogLevel, error) { + switch strings.ToLower(logLevel) { + case "trace": + return proto.LogLevel_TRACE, nil + case "debug": + return proto.LogLevel_DEBUG, nil + case "info": + return proto.LogLevel_INFO, nil + case "warn": + return proto.LogLevel_WARN, nil + case "error": + return proto.LogLevel_ERROR, nil + default: + return proto.LogLevel(0), xerrors.Errorf("invalid log level %q", logLevel) + } } diff --git a/provisioner/terraform/provision_test.go b/provisioner/terraform/provision_test.go index b596c85d0bf15..45c3e08b99f22 100644 --- a/provisioner/terraform/provision_test.go +++ b/provisioner/terraform/provision_test.go @@ -68,7 +68,11 @@ func TestProvision(t *testing.T) { Value: "example", }}, }, - Response: &proto.Provision_Response{}, + Response: &proto.Provision_Response{ + Type: &proto.Provision_Response_Complete{ + Complete: &proto.Provision_Complete{}, + }, + }, }, { Name: "missing-variable", Files: map[string]string{ @@ -82,10 +86,14 @@ func TestProvision(t *testing.T) { "main.tf": `resource "null_resource" "A" {}`, }, Response: &proto.Provision_Response{ - Resources: []*proto.Resource{{ - Name: "A", - Type: "null_resource", - }}, + Type: &proto.Provision_Response_Complete{ + Complete: &proto.Provision_Complete{ + Resources: []*proto.Resource{{ + Name: "A", + Type: "null_resource", + }}, + }, + }, }, }, { Name: "invalid-sourcecode", @@ -112,20 +120,31 @@ func TestProvision(t *testing.T) { request.State = testCase.Request.State } response, err := api.Provision(ctx, request) - if testCase.Error { - require.Error(t, err) - return - } require.NoError(t, err) - require.Greater(t, len(response.State), 0) + for { + msg, err := response.Recv() + if testCase.Error { + require.Error(t, err) + return + } + require.NoError(t, err) - resourcesGot, err := json.Marshal(response.Resources) - require.NoError(t, err) + if msg.GetComplete() == nil { + continue + } - resourcesWant, err := json.Marshal(testCase.Response.Resources) - require.NoError(t, err) + require.NoError(t, err) + require.Greater(t, len(msg.GetComplete().State), 0) + + resourcesGot, err := json.Marshal(msg.GetComplete().Resources) + require.NoError(t, err) - require.Equal(t, string(resourcesWant), string(resourcesGot)) + resourcesWant, err := json.Marshal(testCase.Response.GetComplete().Resources) + require.NoError(t, err) + + require.Equal(t, string(resourcesWant), string(resourcesGot)) + break + } }) } } diff --git a/provisionerd/proto/provisionerd.pb.go b/provisionerd/proto/provisionerd.pb.go index 11dfadb372418..86218b19d237b 100644 --- a/provisionerd/proto/provisionerd.pb.go +++ b/provisionerd/proto/provisionerd.pb.go @@ -68,65 +68,6 @@ func (LogSource) EnumDescriptor() ([]byte, []int) { return file_provisionerd_proto_provisionerd_proto_rawDescGZIP(), []int{0} } -// LogLevel represents severity of the log. -type LogLevel int32 - -const ( - LogLevel_TRACE LogLevel = 0 - LogLevel_DEBUG LogLevel = 1 - LogLevel_INFO LogLevel = 2 - LogLevel_WARN LogLevel = 3 - LogLevel_ERROR LogLevel = 4 - LogLevel_FATAL LogLevel = 5 -) - -// Enum value maps for LogLevel. -var ( - LogLevel_name = map[int32]string{ - 0: "TRACE", - 1: "DEBUG", - 2: "INFO", - 3: "WARN", - 4: "ERROR", - 5: "FATAL", - } - LogLevel_value = map[string]int32{ - "TRACE": 0, - "DEBUG": 1, - "INFO": 2, - "WARN": 3, - "ERROR": 4, - "FATAL": 5, - } -) - -func (x LogLevel) Enum() *LogLevel { - p := new(LogLevel) - *p = x - return p -} - -func (x LogLevel) String() string { - return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) -} - -func (LogLevel) Descriptor() protoreflect.EnumDescriptor { - return file_provisionerd_proto_provisionerd_proto_enumTypes[1].Descriptor() -} - -func (LogLevel) Type() protoreflect.EnumType { - return &file_provisionerd_proto_provisionerd_proto_enumTypes[1] -} - -func (x LogLevel) Number() protoreflect.EnumNumber { - return protoreflect.EnumNumber(x) -} - -// Deprecated: Use LogLevel.Descriptor instead. -func (LogLevel) EnumDescriptor() ([]byte, []int) { - return file_provisionerd_proto_provisionerd_proto_rawDescGZIP(), []int{1} -} - // Empty indicates a successful request/response. type Empty struct { state protoimpl.MessageState @@ -453,9 +394,10 @@ type Log struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - Source LogSource `protobuf:"varint,1,opt,name=source,proto3,enum=provisionerd.LogSource" json:"source,omitempty"` - Level LogLevel `protobuf:"varint,2,opt,name=level,proto3,enum=provisionerd.LogLevel" json:"level,omitempty"` - CreatedAt int64 `protobuf:"varint,3,opt,name=created_at,json=createdAt,proto3" json:"created_at,omitempty"` + Source LogSource `protobuf:"varint,1,opt,name=source,proto3,enum=provisionerd.LogSource" json:"source,omitempty"` + Level proto.LogLevel `protobuf:"varint,2,opt,name=level,proto3,enum=provisioner.LogLevel" json:"level,omitempty"` + CreatedAt int64 `protobuf:"varint,3,opt,name=created_at,json=createdAt,proto3" json:"created_at,omitempty"` + Text string `protobuf:"bytes,4,opt,name=text,proto3" json:"text,omitempty"` // Types that are assignable to Type: // *Log_WorkspaceProvision_ // *Log_ProjectImport_ @@ -501,11 +443,11 @@ func (x *Log) GetSource() LogSource { return LogSource_PROVISIONER } -func (x *Log) GetLevel() LogLevel { +func (x *Log) GetLevel() proto.LogLevel { if x != nil { return x.Level } - return LogLevel_TRACE + return proto.LogLevel_TRACE } func (x *Log) GetCreatedAt() int64 { @@ -515,6 +457,13 @@ func (x *Log) GetCreatedAt() int64 { return 0 } +func (x *Log) GetText() string { + if x != nil { + return x.Text + } + return "" +} + func (m *Log) GetType() isLog_Type { if m != nil { return m.Type @@ -541,11 +490,11 @@ type isLog_Type interface { } type Log_WorkspaceProvision_ struct { - WorkspaceProvision *Log_WorkspaceProvision `protobuf:"bytes,4,opt,name=workspace_provision,json=workspaceProvision,proto3,oneof"` + WorkspaceProvision *Log_WorkspaceProvision `protobuf:"bytes,5,opt,name=workspace_provision,json=workspaceProvision,proto3,oneof"` } type Log_ProjectImport_ struct { - ProjectImport *Log_ProjectImport `protobuf:"bytes,5,opt,name=project_import,json=projectImport,proto3,oneof"` + ProjectImport *Log_ProjectImport `protobuf:"bytes,6,opt,name=project_import,json=projectImport,proto3,oneof"` } func (*Log_WorkspaceProvision_) isLog_Type() {} @@ -844,7 +793,6 @@ type Log_WorkspaceProvision struct { unknownFields protoimpl.UnknownFields WorkspaceHistoryId string `protobuf:"bytes,1,opt,name=workspace_history_id,json=workspaceHistoryId,proto3" json:"workspace_history_id,omitempty"` - Text string `protobuf:"bytes,2,opt,name=text,proto3" json:"text,omitempty"` } func (x *Log_WorkspaceProvision) Reset() { @@ -886,20 +834,12 @@ func (x *Log_WorkspaceProvision) GetWorkspaceHistoryId() string { return "" } -func (x *Log_WorkspaceProvision) GetText() string { - if x != nil { - return x.Text - } - return "" -} - type Log_ProjectImport struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields ProjectHistoryId string `protobuf:"bytes,1,opt,name=project_history_id,json=projectHistoryId,proto3" json:"project_history_id,omitempty"` - Text string `protobuf:"bytes,2,opt,name=text,proto3" json:"text,omitempty"` } func (x *Log_ProjectImport) Reset() { @@ -941,13 +881,6 @@ func (x *Log_ProjectImport) GetProjectHistoryId() string { return "" } -func (x *Log_ProjectImport) GetText() string { - if x != nil { - return x.Text - } - return "" -} - var File_provisionerd_proto_provisionerd_proto protoreflect.FileDescriptor var file_provisionerd_proto_provisionerd_proto_rawDesc = []byte{ @@ -1035,69 +968,63 @@ var file_provisionerd_proto_provisionerd_proto_rawDesc = []byte{ 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, 0x10, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, - 0x61, 0x73, 0x42, 0x06, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x22, 0xdd, 0x03, 0x0a, 0x03, 0x4c, + 0x61, 0x73, 0x42, 0x06, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x22, 0xc8, 0x03, 0x0a, 0x03, 0x4c, 0x6f, 0x67, 0x12, 0x2f, 0x0a, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x17, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x4c, 0x6f, 0x67, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x06, 0x73, 0x6f, 0x75, - 0x72, 0x63, 0x65, 0x12, 0x2c, 0x0a, 0x05, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x0e, 0x32, 0x16, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, - 0x64, 0x2e, 0x4c, 0x6f, 0x67, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x52, 0x05, 0x6c, 0x65, 0x76, 0x65, - 0x6c, 0x12, 0x1d, 0x0a, 0x0a, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, - 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x09, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, - 0x12, 0x57, 0x0a, 0x13, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x70, 0x72, - 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x24, 0x2e, - 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x4c, 0x6f, 0x67, - 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, - 0x69, 0x6f, 0x6e, 0x48, 0x00, 0x52, 0x12, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, - 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x48, 0x0a, 0x0e, 0x70, 0x72, 0x6f, - 0x6a, 0x65, 0x63, 0x74, 0x5f, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x1f, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, - 0x2e, 0x4c, 0x6f, 0x67, 0x2e, 0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x49, 0x6d, 0x70, 0x6f, - 0x72, 0x74, 0x48, 0x00, 0x52, 0x0d, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x49, 0x6d, 0x70, - 0x6f, 0x72, 0x74, 0x1a, 0x5a, 0x0a, 0x12, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, - 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x30, 0x0a, 0x14, 0x77, 0x6f, 0x72, - 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x5f, 0x69, - 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x12, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, - 0x63, 0x65, 0x48, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x49, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x74, - 0x65, 0x78, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, 0x65, 0x78, 0x74, 0x1a, - 0x51, 0x0a, 0x0d, 0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, - 0x12, 0x2c, 0x0a, 0x12, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x5f, 0x68, 0x69, 0x73, 0x74, - 0x6f, 0x72, 0x79, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x10, 0x70, 0x72, - 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x48, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x49, 0x64, 0x12, 0x12, - 0x0a, 0x04, 0x74, 0x65, 0x78, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, 0x65, - 0x78, 0x74, 0x42, 0x06, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x22, 0x49, 0x0a, 0x09, 0x4a, 0x6f, - 0x62, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x12, 0x15, 0x0a, 0x06, 0x6a, 0x6f, 0x62, 0x5f, 0x69, - 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6a, 0x6f, 0x62, 0x49, 0x64, 0x12, 0x25, - 0x0a, 0x04, 0x6c, 0x6f, 0x67, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x70, - 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x4c, 0x6f, 0x67, 0x52, - 0x04, 0x6c, 0x6f, 0x67, 0x73, 0x2a, 0x28, 0x0a, 0x09, 0x4c, 0x6f, 0x67, 0x53, 0x6f, 0x75, 0x72, - 0x63, 0x65, 0x12, 0x0f, 0x0a, 0x0b, 0x50, 0x52, 0x4f, 0x56, 0x49, 0x53, 0x49, 0x4f, 0x4e, 0x45, - 0x52, 0x10, 0x00, 0x12, 0x0a, 0x0a, 0x06, 0x44, 0x41, 0x45, 0x4d, 0x4f, 0x4e, 0x10, 0x01, 0x2a, - 0x4a, 0x0a, 0x08, 0x4c, 0x6f, 0x67, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x12, 0x09, 0x0a, 0x05, 0x54, - 0x52, 0x41, 0x43, 0x45, 0x10, 0x00, 0x12, 0x09, 0x0a, 0x05, 0x44, 0x45, 0x42, 0x55, 0x47, 0x10, - 0x01, 0x12, 0x08, 0x0a, 0x04, 0x49, 0x4e, 0x46, 0x4f, 0x10, 0x02, 0x12, 0x08, 0x0a, 0x04, 0x57, - 0x41, 0x52, 0x4e, 0x10, 0x03, 0x12, 0x09, 0x0a, 0x05, 0x45, 0x52, 0x52, 0x4f, 0x52, 0x10, 0x04, - 0x12, 0x09, 0x0a, 0x05, 0x46, 0x41, 0x54, 0x41, 0x4c, 0x10, 0x05, 0x32, 0x8c, 0x02, 0x0a, 0x11, - 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x44, 0x61, 0x65, 0x6d, 0x6f, - 0x6e, 0x12, 0x3c, 0x0a, 0x0a, 0x41, 0x63, 0x71, 0x75, 0x69, 0x72, 0x65, 0x4a, 0x6f, 0x62, 0x12, - 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x45, - 0x6d, 0x70, 0x74, 0x79, 0x1a, 0x19, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, - 0x65, 0x72, 0x64, 0x2e, 0x41, 0x63, 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x4a, 0x6f, 0x62, 0x12, - 0x3b, 0x0a, 0x09, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x4a, 0x6f, 0x62, 0x12, 0x17, 0x2e, 0x70, - 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x4a, 0x6f, 0x62, 0x55, - 0x70, 0x64, 0x61, 0x74, 0x65, 0x1a, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, - 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x28, 0x01, 0x12, 0x3c, 0x0a, 0x09, - 0x43, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x4a, 0x6f, 0x62, 0x12, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, - 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x43, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x6c, - 0x65, 0x64, 0x4a, 0x6f, 0x62, 0x1a, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, - 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x12, 0x3e, 0x0a, 0x0b, 0x43, 0x6f, - 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x4a, 0x6f, 0x62, 0x12, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, - 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, - 0x65, 0x64, 0x4a, 0x6f, 0x62, 0x1a, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, - 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x42, 0x2b, 0x5a, 0x29, 0x67, 0x69, - 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x63, - 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, - 0x64, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x72, 0x63, 0x65, 0x12, 0x2b, 0x0a, 0x05, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x0e, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, + 0x2e, 0x4c, 0x6f, 0x67, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x52, 0x05, 0x6c, 0x65, 0x76, 0x65, 0x6c, + 0x12, 0x1d, 0x0a, 0x0a, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x03, + 0x20, 0x01, 0x28, 0x03, 0x52, 0x09, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, 0x12, + 0x12, 0x0a, 0x04, 0x74, 0x65, 0x78, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, + 0x65, 0x78, 0x74, 0x12, 0x57, 0x0a, 0x13, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, + 0x5f, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x24, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, + 0x4c, 0x6f, 0x67, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x50, 0x72, 0x6f, + 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x48, 0x00, 0x52, 0x12, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, + 0x61, 0x63, 0x65, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x48, 0x0a, 0x0e, + 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x5f, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x18, 0x06, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, + 0x65, 0x72, 0x64, 0x2e, 0x4c, 0x6f, 0x67, 0x2e, 0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x49, + 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x48, 0x00, 0x52, 0x0d, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, + 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x1a, 0x46, 0x0a, 0x12, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, + 0x61, 0x63, 0x65, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x30, 0x0a, 0x14, + 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, + 0x79, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x12, 0x77, 0x6f, 0x72, 0x6b, + 0x73, 0x70, 0x61, 0x63, 0x65, 0x48, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x49, 0x64, 0x1a, 0x3d, + 0x0a, 0x0d, 0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x12, + 0x2c, 0x0a, 0x12, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x5f, 0x68, 0x69, 0x73, 0x74, 0x6f, + 0x72, 0x79, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x10, 0x70, 0x72, 0x6f, + 0x6a, 0x65, 0x63, 0x74, 0x48, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x49, 0x64, 0x42, 0x06, 0x0a, + 0x04, 0x74, 0x79, 0x70, 0x65, 0x22, 0x49, 0x0a, 0x09, 0x4a, 0x6f, 0x62, 0x55, 0x70, 0x64, 0x61, + 0x74, 0x65, 0x12, 0x15, 0x0a, 0x06, 0x6a, 0x6f, 0x62, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x05, 0x6a, 0x6f, 0x62, 0x49, 0x64, 0x12, 0x25, 0x0a, 0x04, 0x6c, 0x6f, 0x67, + 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, + 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x4c, 0x6f, 0x67, 0x52, 0x04, 0x6c, 0x6f, 0x67, 0x73, + 0x2a, 0x28, 0x0a, 0x09, 0x4c, 0x6f, 0x67, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x0f, 0x0a, + 0x0b, 0x50, 0x52, 0x4f, 0x56, 0x49, 0x53, 0x49, 0x4f, 0x4e, 0x45, 0x52, 0x10, 0x00, 0x12, 0x0a, + 0x0a, 0x06, 0x44, 0x41, 0x45, 0x4d, 0x4f, 0x4e, 0x10, 0x01, 0x32, 0x8c, 0x02, 0x0a, 0x11, 0x50, + 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x44, 0x61, 0x65, 0x6d, 0x6f, 0x6e, + 0x12, 0x3c, 0x0a, 0x0a, 0x41, 0x63, 0x71, 0x75, 0x69, 0x72, 0x65, 0x4a, 0x6f, 0x62, 0x12, 0x13, + 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x45, 0x6d, + 0x70, 0x74, 0x79, 0x1a, 0x19, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, + 0x72, 0x64, 0x2e, 0x41, 0x63, 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x4a, 0x6f, 0x62, 0x12, 0x3b, + 0x0a, 0x09, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x4a, 0x6f, 0x62, 0x12, 0x17, 0x2e, 0x70, 0x72, + 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x4a, 0x6f, 0x62, 0x55, 0x70, + 0x64, 0x61, 0x74, 0x65, 0x1a, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, + 0x65, 0x72, 0x64, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x28, 0x01, 0x12, 0x3c, 0x0a, 0x09, 0x43, + 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x4a, 0x6f, 0x62, 0x12, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, + 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x43, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x6c, 0x65, + 0x64, 0x4a, 0x6f, 0x62, 0x1a, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, + 0x65, 0x72, 0x64, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x12, 0x3e, 0x0a, 0x0b, 0x43, 0x6f, 0x6d, + 0x70, 0x6c, 0x65, 0x74, 0x65, 0x4a, 0x6f, 0x62, 0x12, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, + 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, + 0x64, 0x4a, 0x6f, 0x62, 0x1a, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, + 0x65, 0x72, 0x64, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x42, 0x2b, 0x5a, 0x29, 0x67, 0x69, 0x74, + 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x63, 0x6f, + 0x64, 0x65, 0x72, 0x2f, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, + 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -1112,48 +1039,48 @@ func file_provisionerd_proto_provisionerd_proto_rawDescGZIP() []byte { return file_provisionerd_proto_provisionerd_proto_rawDescData } -var file_provisionerd_proto_provisionerd_proto_enumTypes = make([]protoimpl.EnumInfo, 2) +var file_provisionerd_proto_provisionerd_proto_enumTypes = make([]protoimpl.EnumInfo, 1) var file_provisionerd_proto_provisionerd_proto_msgTypes = make([]protoimpl.MessageInfo, 12) var file_provisionerd_proto_provisionerd_proto_goTypes = []interface{}{ (LogSource)(0), // 0: provisionerd.LogSource - (LogLevel)(0), // 1: provisionerd.LogLevel - (*Empty)(nil), // 2: provisionerd.Empty - (*AcquiredJob)(nil), // 3: provisionerd.AcquiredJob - (*CancelledJob)(nil), // 4: provisionerd.CancelledJob - (*CompletedJob)(nil), // 5: provisionerd.CompletedJob - (*Log)(nil), // 6: provisionerd.Log - (*JobUpdate)(nil), // 7: provisionerd.JobUpdate - (*AcquiredJob_WorkspaceProvision)(nil), // 8: provisionerd.AcquiredJob.WorkspaceProvision - (*AcquiredJob_ProjectImport)(nil), // 9: provisionerd.AcquiredJob.ProjectImport - (*CompletedJob_WorkspaceProvision)(nil), // 10: provisionerd.CompletedJob.WorkspaceProvision - (*CompletedJob_ProjectImport)(nil), // 11: provisionerd.CompletedJob.ProjectImport - (*Log_WorkspaceProvision)(nil), // 12: provisionerd.Log.WorkspaceProvision - (*Log_ProjectImport)(nil), // 13: provisionerd.Log.ProjectImport + (*Empty)(nil), // 1: provisionerd.Empty + (*AcquiredJob)(nil), // 2: provisionerd.AcquiredJob + (*CancelledJob)(nil), // 3: provisionerd.CancelledJob + (*CompletedJob)(nil), // 4: provisionerd.CompletedJob + (*Log)(nil), // 5: provisionerd.Log + (*JobUpdate)(nil), // 6: provisionerd.JobUpdate + (*AcquiredJob_WorkspaceProvision)(nil), // 7: provisionerd.AcquiredJob.WorkspaceProvision + (*AcquiredJob_ProjectImport)(nil), // 8: provisionerd.AcquiredJob.ProjectImport + (*CompletedJob_WorkspaceProvision)(nil), // 9: provisionerd.CompletedJob.WorkspaceProvision + (*CompletedJob_ProjectImport)(nil), // 10: provisionerd.CompletedJob.ProjectImport + (*Log_WorkspaceProvision)(nil), // 11: provisionerd.Log.WorkspaceProvision + (*Log_ProjectImport)(nil), // 12: provisionerd.Log.ProjectImport + (proto.LogLevel)(0), // 13: provisioner.LogLevel (*proto.ParameterValue)(nil), // 14: provisioner.ParameterValue (*proto.Resource)(nil), // 15: provisioner.Resource (*proto.ParameterSchema)(nil), // 16: provisioner.ParameterSchema } var file_provisionerd_proto_provisionerd_proto_depIdxs = []int32{ - 8, // 0: provisionerd.AcquiredJob.workspace_provision:type_name -> provisionerd.AcquiredJob.WorkspaceProvision - 9, // 1: provisionerd.AcquiredJob.project_import:type_name -> provisionerd.AcquiredJob.ProjectImport - 10, // 2: provisionerd.CompletedJob.workspace_provision:type_name -> provisionerd.CompletedJob.WorkspaceProvision - 11, // 3: provisionerd.CompletedJob.project_import:type_name -> provisionerd.CompletedJob.ProjectImport + 7, // 0: provisionerd.AcquiredJob.workspace_provision:type_name -> provisionerd.AcquiredJob.WorkspaceProvision + 8, // 1: provisionerd.AcquiredJob.project_import:type_name -> provisionerd.AcquiredJob.ProjectImport + 9, // 2: provisionerd.CompletedJob.workspace_provision:type_name -> provisionerd.CompletedJob.WorkspaceProvision + 10, // 3: provisionerd.CompletedJob.project_import:type_name -> provisionerd.CompletedJob.ProjectImport 0, // 4: provisionerd.Log.source:type_name -> provisionerd.LogSource - 1, // 5: provisionerd.Log.level:type_name -> provisionerd.LogLevel - 12, // 6: provisionerd.Log.workspace_provision:type_name -> provisionerd.Log.WorkspaceProvision - 13, // 7: provisionerd.Log.project_import:type_name -> provisionerd.Log.ProjectImport - 6, // 8: provisionerd.JobUpdate.logs:type_name -> provisionerd.Log + 13, // 5: provisionerd.Log.level:type_name -> provisioner.LogLevel + 11, // 6: provisionerd.Log.workspace_provision:type_name -> provisionerd.Log.WorkspaceProvision + 12, // 7: provisionerd.Log.project_import:type_name -> provisionerd.Log.ProjectImport + 5, // 8: provisionerd.JobUpdate.logs:type_name -> provisionerd.Log 14, // 9: provisionerd.AcquiredJob.WorkspaceProvision.parameter_values:type_name -> provisioner.ParameterValue 15, // 10: provisionerd.CompletedJob.WorkspaceProvision.resources:type_name -> provisioner.Resource 16, // 11: provisionerd.CompletedJob.ProjectImport.parameter_schemas:type_name -> provisioner.ParameterSchema - 2, // 12: provisionerd.ProvisionerDaemon.AcquireJob:input_type -> provisionerd.Empty - 7, // 13: provisionerd.ProvisionerDaemon.UpdateJob:input_type -> provisionerd.JobUpdate - 4, // 14: provisionerd.ProvisionerDaemon.CancelJob:input_type -> provisionerd.CancelledJob - 5, // 15: provisionerd.ProvisionerDaemon.CompleteJob:input_type -> provisionerd.CompletedJob - 3, // 16: provisionerd.ProvisionerDaemon.AcquireJob:output_type -> provisionerd.AcquiredJob - 2, // 17: provisionerd.ProvisionerDaemon.UpdateJob:output_type -> provisionerd.Empty - 2, // 18: provisionerd.ProvisionerDaemon.CancelJob:output_type -> provisionerd.Empty - 2, // 19: provisionerd.ProvisionerDaemon.CompleteJob:output_type -> provisionerd.Empty + 1, // 12: provisionerd.ProvisionerDaemon.AcquireJob:input_type -> provisionerd.Empty + 6, // 13: provisionerd.ProvisionerDaemon.UpdateJob:input_type -> provisionerd.JobUpdate + 3, // 14: provisionerd.ProvisionerDaemon.CancelJob:input_type -> provisionerd.CancelledJob + 4, // 15: provisionerd.ProvisionerDaemon.CompleteJob:input_type -> provisionerd.CompletedJob + 2, // 16: provisionerd.ProvisionerDaemon.AcquireJob:output_type -> provisionerd.AcquiredJob + 1, // 17: provisionerd.ProvisionerDaemon.UpdateJob:output_type -> provisionerd.Empty + 1, // 18: provisionerd.ProvisionerDaemon.CancelJob:output_type -> provisionerd.Empty + 1, // 19: provisionerd.ProvisionerDaemon.CompleteJob:output_type -> provisionerd.Empty 16, // [16:20] is the sub-list for method output_type 12, // [12:16] is the sub-list for method input_type 12, // [12:12] is the sub-list for extension type_name @@ -1329,7 +1256,7 @@ func file_provisionerd_proto_provisionerd_proto_init() { File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_provisionerd_proto_provisionerd_proto_rawDesc, - NumEnums: 2, + NumEnums: 1, NumMessages: 12, NumExtensions: 0, NumServices: 1, diff --git a/provisionerd/proto/provisionerd.proto b/provisionerd/proto/provisionerd.proto index aa55358ae0920..7ea7472b0150b 100644 --- a/provisionerd/proto/provisionerd.proto +++ b/provisionerd/proto/provisionerd.proto @@ -61,32 +61,21 @@ enum LogSource { DAEMON = 1; } -// LogLevel represents severity of the log. -enum LogLevel { - TRACE = 0; - DEBUG = 1; - INFO = 2; - WARN = 3; - ERROR = 4; - FATAL = 5; -} - // Log represents output from a job. message Log { message WorkspaceProvision { string workspace_history_id = 1; - string text = 2; } message ProjectImport { string project_history_id = 1; - string text = 2; } LogSource source = 1; - LogLevel level = 2; + provisioner.LogLevel level = 2; int64 created_at = 3; + string text = 4; oneof type { - WorkspaceProvision workspace_provision = 4; - ProjectImport project_import = 5; + WorkspaceProvision workspace_provision = 5; + ProjectImport project_import = 6; } } diff --git a/provisionerd/provisionerd.go b/provisionerd/provisionerd.go index 9cc82fd0b19ec..c9ee05e816bfc 100644 --- a/provisionerd/provisionerd.go +++ b/provisionerd/provisionerd.go @@ -16,7 +16,7 @@ import ( "cdr.dev/slog" "github.com/coder/coder/provisionerd/proto" - provisionersdkproto "github.com/coder/coder/provisionersdk/proto" + sdkproto "github.com/coder/coder/provisionersdk/proto" "github.com/coder/retry" ) @@ -26,7 +26,7 @@ import ( type Dialer func(ctx context.Context) (proto.DRPCProvisionerDaemonClient, error) // Provisioners maps provisioner ID to implementation. -type Provisioners map[string]provisionersdkproto.DRPCProvisionerClient +type Provisioners map[string]sdkproto.DRPCProvisionerClient type Options struct { AcquireInterval time.Duration @@ -194,16 +194,20 @@ func (a *API) acquireJob() { a.cancelActiveJob("tar attempts to target relative upper directory") return } + mode := header.FileInfo().Mode() + if mode == 0 { + mode = 0600 + } switch header.Typeflag { case tar.TypeDir: - err = os.MkdirAll(path, header.FileInfo().Mode()) + err = os.MkdirAll(path, mode) if err != nil { a.cancelActiveJob(fmt.Sprintf("mkdir %q: %s", path, err)) return } a.opts.Logger.Debug(context.Background(), "extracted directory", slog.F("path", path)) case tar.TypeReg: - file, err := os.Create(path) + file, err := os.OpenFile(path, os.O_CREATE|os.O_RDWR, mode) if err != nil { a.cancelActiveJob(fmt.Sprintf("create file %q: %s", path, err)) return @@ -225,6 +229,7 @@ func (a *API) acquireJob() { a.opts.Logger.Debug(context.Background(), "extracted file", slog.F("size_bytes", size), slog.F("path", path), + slog.F("mode", mode), ) } } @@ -235,25 +240,7 @@ func (a *API) acquireJob() { slog.F("project_history_name", jobType.ProjectImport.ProjectHistoryName), ) - response, err := provisioner.Parse(a.closeContext, &provisionersdkproto.Parse_Request{ - Directory: a.opts.WorkDirectory, - }) - if err != nil { - a.cancelActiveJob(fmt.Sprintf("parse source: %s", err)) - return - } - _, err = a.client.CompleteJob(a.closeContext, &proto.CompletedJob{ - JobId: a.activeJob.JobId, - Type: &proto.CompletedJob_ProjectImport_{ - ProjectImport: &proto.CompletedJob_ProjectImport{ - ParameterSchemas: response.ParameterSchemas, - }, - }, - }) - if err != nil { - a.cancelActiveJob(fmt.Sprintf("complete job: %s", err)) - return - } + a.runProjectImport(provisioner, jobType) case *proto.AcquiredJob_WorkspaceProvision_: a.opts.Logger.Debug(context.Background(), "acquired job is workspace provision", slog.F("workspace_name", jobType.WorkspaceProvision.WorkspaceName), @@ -261,40 +248,139 @@ func (a *API) acquireJob() { slog.F("parameters", jobType.WorkspaceProvision.ParameterValues), ) - response, err := provisioner.Provision(a.closeContext, &provisionersdkproto.Provision_Request{ - Directory: a.opts.WorkDirectory, - ParameterValues: jobType.WorkspaceProvision.ParameterValues, - State: jobType.WorkspaceProvision.State, - }) + a.runWorkspaceProvision(provisioner, jobType) + default: + a.cancelActiveJob(fmt.Sprintf("unknown job type %q; ensure your provisioner daemon is up-to-date", reflect.TypeOf(a.activeJob.Type).String())) + return + } + + a.activeJob = nil +} + +func (a *API) runProjectImport(provisioner sdkproto.DRPCProvisionerClient, job *proto.AcquiredJob_ProjectImport_) { + stream, err := provisioner.Parse(a.closeContext, &sdkproto.Parse_Request{ + Directory: a.opts.WorkDirectory, + }) + if err != nil { + a.cancelActiveJob(fmt.Sprintf("parse source: %s", err)) + return + } + defer stream.Close() + for { + msg, err := stream.Recv() if err != nil { - a.cancelActiveJob(fmt.Sprintf("provision: %s", err)) + a.cancelActiveJob(fmt.Sprintf("recv parse source: %s", err)) return } - a.opts.Logger.Debug(context.Background(), "provision successful; marking job as complete", - slog.F("resource_count", len(response.Resources)), - slog.F("resources", response.Resources), - slog.F("state_length", len(response.State)), - ) + switch msgType := msg.Type.(type) { + case *sdkproto.Parse_Response_Log: + a.opts.Logger.Debug(context.Background(), "parse job logged", + slog.F("level", msgType.Log.Level), + slog.F("text", msgType.Log.Text), + slog.F("project_history_id", job.ProjectImport.ProjectHistoryId), + ) - // Complete job may need to be async if we disconnected... - // When we reconnect we can flush any of these cached values. - _, err = a.client.CompleteJob(a.closeContext, &proto.CompletedJob{ - JobId: a.activeJob.JobId, - Type: &proto.CompletedJob_WorkspaceProvision_{ - WorkspaceProvision: &proto.CompletedJob_WorkspaceProvision{ - State: response.State, - Resources: response.Resources, + a.logQueue = append(a.logQueue, proto.Log{ + Source: proto.LogSource_PROVISIONER, + Level: msgType.Log.Level, + CreatedAt: time.Now().UTC().UnixMilli(), + Text: msgType.Log.Text, + Type: &proto.Log_ProjectImport_{ + ProjectImport: &proto.Log_ProjectImport{ + ProjectHistoryId: job.ProjectImport.ProjectHistoryId, + }, }, - }, - }) - if err != nil { - a.cancelActiveJob(fmt.Sprintf("complete job: %s", err)) + }) + case *sdkproto.Parse_Response_Complete: + _, err = a.client.CompleteJob(a.closeContext, &proto.CompletedJob{ + JobId: a.activeJob.JobId, + Type: &proto.CompletedJob_ProjectImport_{ + ProjectImport: &proto.CompletedJob_ProjectImport{ + ParameterSchemas: msgType.Complete.ParameterSchemas, + }, + }, + }) + if err != nil { + a.cancelActiveJob(fmt.Sprintf("complete job: %s", err)) + return + } + // Return so we stop looping! + return + default: + a.cancelActiveJob(fmt.Sprintf("invalid message type %q received from provisioner", + reflect.TypeOf(msg.Type).String())) return } - default: - a.cancelActiveJob(fmt.Sprintf("unknown job type %q; ensure your provisioner daemon is up-to-date", reflect.TypeOf(a.activeJob.Type).String())) + } +} + +func (a *API) runWorkspaceProvision(provisioner sdkproto.DRPCProvisionerClient, job *proto.AcquiredJob_WorkspaceProvision_) { + stream, err := provisioner.Provision(a.closeContext, &sdkproto.Provision_Request{ + Directory: a.opts.WorkDirectory, + ParameterValues: job.WorkspaceProvision.ParameterValues, + State: job.WorkspaceProvision.State, + }) + if err != nil { + a.cancelActiveJob(fmt.Sprintf("provision: %s", err)) return } + defer stream.Close() + + for { + msg, err := stream.Recv() + if err != nil { + a.cancelActiveJob(fmt.Sprintf("recv workspace provision: %s", err)) + return + } + switch msgType := msg.Type.(type) { + case *sdkproto.Provision_Response_Log: + a.opts.Logger.Debug(context.Background(), "provision job logged", + slog.F("level", msgType.Log.Level), + slog.F("text", msgType.Log.Text), + slog.F("workspace_history_id", job.WorkspaceProvision.WorkspaceHistoryId), + ) + + a.logQueue = append(a.logQueue, proto.Log{ + Source: proto.LogSource_PROVISIONER, + Level: msgType.Log.Level, + CreatedAt: time.Now().UTC().UnixMilli(), + Text: msgType.Log.Text, + Type: &proto.Log_WorkspaceProvision_{ + WorkspaceProvision: &proto.Log_WorkspaceProvision{ + WorkspaceHistoryId: job.WorkspaceProvision.WorkspaceHistoryId, + }, + }, + }) + case *sdkproto.Provision_Response_Complete: + a.opts.Logger.Debug(context.Background(), "provision successful; marking job as complete", + slog.F("resource_count", len(msgType.Complete.Resources)), + slog.F("resources", msgType.Complete.Resources), + slog.F("state_length", len(msgType.Complete.State)), + ) + + // Complete job may need to be async if we disconnected... + // When we reconnect we can flush any of these cached values. + _, err = a.client.CompleteJob(a.closeContext, &proto.CompletedJob{ + JobId: a.activeJob.JobId, + Type: &proto.CompletedJob_WorkspaceProvision_{ + WorkspaceProvision: &proto.CompletedJob_WorkspaceProvision{ + State: msgType.Complete.State, + Resources: msgType.Complete.Resources, + }, + }, + }) + if err != nil { + a.cancelActiveJob(fmt.Sprintf("complete job: %s", err)) + return + } + // Return so we stop looping! + return + default: + a.cancelActiveJob(fmt.Sprintf("invalid message type %q received from provisioner", + reflect.TypeOf(msg.Type).String())) + return + } + } } func (a *API) cancelActiveJob(errMsg string) { diff --git a/provisionerd/provisionerd_test.go b/provisionerd/provisionerd_test.go index 1040dc9eb2c4f..889fa4f5e47ac 100644 --- a/provisionerd/provisionerd_test.go +++ b/provisionerd/provisionerd_test.go @@ -42,7 +42,8 @@ func TestProvisionerd(t *testing.T) { setupProjectVersion := func(t *testing.T, client *codersdk.Client, user coderd.CreateInitialUserRequest, project coderd.Project) coderd.ProjectHistory { var buffer bytes.Buffer writer := tar.NewWriter(&buffer) - content := `resource "null_resource" "hi" {}` + content := `variable "frog" {} +resource "null_resource" "dev" {}` err := writer.WriteHeader(&tar.Header{ Name: "main.tf", Size: int64(len(content)), @@ -103,6 +104,6 @@ func TestProvisionerd(t *testing.T) { WorkDirectory: t.TempDir(), }) defer api.Close() - time.Sleep(time.Millisecond * 1500) + time.Sleep(time.Millisecond * 2000) }) } diff --git a/provisionersdk/proto/provisioner.pb.go b/provisionersdk/proto/provisioner.pb.go index 4801450d25aaf..077058e5418bd 100644 --- a/provisionersdk/proto/provisioner.pb.go +++ b/provisionersdk/proto/provisioner.pb.go @@ -20,6 +20,65 @@ const ( _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) ) +// LogLevel represents severity of the log. +type LogLevel int32 + +const ( + LogLevel_TRACE LogLevel = 0 + LogLevel_DEBUG LogLevel = 1 + LogLevel_INFO LogLevel = 2 + LogLevel_WARN LogLevel = 3 + LogLevel_ERROR LogLevel = 4 + LogLevel_FATAL LogLevel = 5 +) + +// Enum value maps for LogLevel. +var ( + LogLevel_name = map[int32]string{ + 0: "TRACE", + 1: "DEBUG", + 2: "INFO", + 3: "WARN", + 4: "ERROR", + 5: "FATAL", + } + LogLevel_value = map[string]int32{ + "TRACE": 0, + "DEBUG": 1, + "INFO": 2, + "WARN": 3, + "ERROR": 4, + "FATAL": 5, + } +) + +func (x LogLevel) Enum() *LogLevel { + p := new(LogLevel) + *p = x + return p +} + +func (x LogLevel) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (LogLevel) Descriptor() protoreflect.EnumDescriptor { + return file_provisionersdk_proto_provisioner_proto_enumTypes[0].Descriptor() +} + +func (LogLevel) Type() protoreflect.EnumType { + return &file_provisionersdk_proto_provisioner_proto_enumTypes[0] +} + +func (x LogLevel) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Use LogLevel.Descriptor instead. +func (LogLevel) EnumDescriptor() ([]byte, []int) { + return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{0} +} + type ParameterSource_Scheme int32 const ( @@ -47,11 +106,11 @@ func (x ParameterSource_Scheme) String() string { } func (ParameterSource_Scheme) Descriptor() protoreflect.EnumDescriptor { - return file_provisionersdk_proto_provisioner_proto_enumTypes[0].Descriptor() + return file_provisionersdk_proto_provisioner_proto_enumTypes[1].Descriptor() } func (ParameterSource_Scheme) Type() protoreflect.EnumType { - return &file_provisionersdk_proto_provisioner_proto_enumTypes[0] + return &file_provisionersdk_proto_provisioner_proto_enumTypes[1] } func (x ParameterSource_Scheme) Number() protoreflect.EnumNumber { @@ -93,11 +152,11 @@ func (x ParameterDestination_Scheme) String() string { } func (ParameterDestination_Scheme) Descriptor() protoreflect.EnumDescriptor { - return file_provisionersdk_proto_provisioner_proto_enumTypes[1].Descriptor() + return file_provisionersdk_proto_provisioner_proto_enumTypes[2].Descriptor() } func (ParameterDestination_Scheme) Type() protoreflect.EnumType { - return &file_provisionersdk_proto_provisioner_proto_enumTypes[1] + return &file_provisionersdk_proto_provisioner_proto_enumTypes[2] } func (x ParameterDestination_Scheme) Number() protoreflect.EnumNumber { @@ -136,11 +195,11 @@ func (x ParameterSchema_TypeSystem) String() string { } func (ParameterSchema_TypeSystem) Descriptor() protoreflect.EnumDescriptor { - return file_provisionersdk_proto_provisioner_proto_enumTypes[2].Descriptor() + return file_provisionersdk_proto_provisioner_proto_enumTypes[3].Descriptor() } func (ParameterSchema_TypeSystem) Type() protoreflect.EnumType { - return &file_provisionersdk_proto_provisioner_proto_enumTypes[2] + return &file_provisionersdk_proto_provisioner_proto_enumTypes[3] } func (x ParameterSchema_TypeSystem) Number() protoreflect.EnumNumber { @@ -456,6 +515,62 @@ func (x *ParameterSchema) GetValidationCondition() string { return "" } +// Log represents output from a request. +type Log struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Level LogLevel `protobuf:"varint,1,opt,name=level,proto3,enum=provisioner.LogLevel" json:"level,omitempty"` + Text string `protobuf:"bytes,2,opt,name=text,proto3" json:"text,omitempty"` +} + +func (x *Log) Reset() { + *x = Log{} + if protoimpl.UnsafeEnabled { + mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[4] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Log) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Log) ProtoMessage() {} + +func (x *Log) ProtoReflect() protoreflect.Message { + mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[4] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Log.ProtoReflect.Descriptor instead. +func (*Log) Descriptor() ([]byte, []int) { + return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{4} +} + +func (x *Log) GetLevel() LogLevel { + if x != nil { + return x.Level + } + return LogLevel_TRACE +} + +func (x *Log) GetText() string { + if x != nil { + return x.Text + } + return "" +} + // Parse consumes source-code from a directory to produce inputs. type Parse struct { state protoimpl.MessageState @@ -466,7 +581,7 @@ type Parse struct { func (x *Parse) Reset() { *x = Parse{} if protoimpl.UnsafeEnabled { - mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[4] + mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[5] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -479,7 +594,7 @@ func (x *Parse) String() string { func (*Parse) ProtoMessage() {} func (x *Parse) ProtoReflect() protoreflect.Message { - mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[4] + mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[5] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -492,7 +607,7 @@ func (x *Parse) ProtoReflect() protoreflect.Message { // Deprecated: Use Parse.ProtoReflect.Descriptor instead. func (*Parse) Descriptor() ([]byte, []int) { - return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{4} + return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{5} } // Resource is a provisioned unit. @@ -508,7 +623,7 @@ type Resource struct { func (x *Resource) Reset() { *x = Resource{} if protoimpl.UnsafeEnabled { - mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[5] + mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[6] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -521,7 +636,7 @@ func (x *Resource) String() string { func (*Resource) ProtoMessage() {} func (x *Resource) ProtoReflect() protoreflect.Message { - mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[5] + mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[6] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -534,7 +649,7 @@ func (x *Resource) ProtoReflect() protoreflect.Message { // Deprecated: Use Resource.ProtoReflect.Descriptor instead. func (*Resource) Descriptor() ([]byte, []int) { - return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{5} + return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{6} } func (x *Resource) GetName() string { @@ -561,7 +676,7 @@ type Provision struct { func (x *Provision) Reset() { *x = Provision{} if protoimpl.UnsafeEnabled { - mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[6] + mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[7] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -574,7 +689,7 @@ func (x *Provision) String() string { func (*Provision) ProtoMessage() {} func (x *Provision) ProtoReflect() protoreflect.Message { - mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[6] + mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[7] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -587,7 +702,7 @@ func (x *Provision) ProtoReflect() protoreflect.Message { // Deprecated: Use Provision.ProtoReflect.Descriptor instead. func (*Provision) Descriptor() ([]byte, []int) { - return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{6} + return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{7} } type Parse_Request struct { @@ -601,7 +716,7 @@ type Parse_Request struct { func (x *Parse_Request) Reset() { *x = Parse_Request{} if protoimpl.UnsafeEnabled { - mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[7] + mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[8] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -614,7 +729,7 @@ func (x *Parse_Request) String() string { func (*Parse_Request) ProtoMessage() {} func (x *Parse_Request) ProtoReflect() protoreflect.Message { - mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[7] + mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[8] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -627,7 +742,7 @@ func (x *Parse_Request) ProtoReflect() protoreflect.Message { // Deprecated: Use Parse_Request.ProtoReflect.Descriptor instead. func (*Parse_Request) Descriptor() ([]byte, []int) { - return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{4, 0} + return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{5, 0} } func (x *Parse_Request) GetDirectory() string { @@ -637,18 +752,68 @@ func (x *Parse_Request) GetDirectory() string { return "" } +type Parse_Complete struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + ParameterSchemas []*ParameterSchema `protobuf:"bytes,2,rep,name=parameter_schemas,json=parameterSchemas,proto3" json:"parameter_schemas,omitempty"` +} + +func (x *Parse_Complete) Reset() { + *x = Parse_Complete{} + if protoimpl.UnsafeEnabled { + mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[9] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Parse_Complete) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Parse_Complete) ProtoMessage() {} + +func (x *Parse_Complete) ProtoReflect() protoreflect.Message { + mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[9] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Parse_Complete.ProtoReflect.Descriptor instead. +func (*Parse_Complete) Descriptor() ([]byte, []int) { + return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{5, 1} +} + +func (x *Parse_Complete) GetParameterSchemas() []*ParameterSchema { + if x != nil { + return x.ParameterSchemas + } + return nil +} + type Parse_Response struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - ParameterSchemas []*ParameterSchema `protobuf:"bytes,1,rep,name=parameter_schemas,json=parameterSchemas,proto3" json:"parameter_schemas,omitempty"` + // Types that are assignable to Type: + // *Parse_Response_Log + // *Parse_Response_Complete + Type isParse_Response_Type `protobuf_oneof:"type"` } func (x *Parse_Response) Reset() { *x = Parse_Response{} if protoimpl.UnsafeEnabled { - mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[8] + mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[10] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -661,7 +826,7 @@ func (x *Parse_Response) String() string { func (*Parse_Response) ProtoMessage() {} func (x *Parse_Response) ProtoReflect() protoreflect.Message { - mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[8] + mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[10] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -674,16 +839,46 @@ func (x *Parse_Response) ProtoReflect() protoreflect.Message { // Deprecated: Use Parse_Response.ProtoReflect.Descriptor instead. func (*Parse_Response) Descriptor() ([]byte, []int) { - return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{4, 1} + return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{5, 2} } -func (x *Parse_Response) GetParameterSchemas() []*ParameterSchema { - if x != nil { - return x.ParameterSchemas +func (m *Parse_Response) GetType() isParse_Response_Type { + if m != nil { + return m.Type + } + return nil +} + +func (x *Parse_Response) GetLog() *Log { + if x, ok := x.GetType().(*Parse_Response_Log); ok { + return x.Log + } + return nil +} + +func (x *Parse_Response) GetComplete() *Parse_Complete { + if x, ok := x.GetType().(*Parse_Response_Complete); ok { + return x.Complete } return nil } +type isParse_Response_Type interface { + isParse_Response_Type() +} + +type Parse_Response_Log struct { + Log *Log `protobuf:"bytes,1,opt,name=log,proto3,oneof"` +} + +type Parse_Response_Complete struct { + Complete *Parse_Complete `protobuf:"bytes,2,opt,name=complete,proto3,oneof"` +} + +func (*Parse_Response_Log) isParse_Response_Type() {} + +func (*Parse_Response_Complete) isParse_Response_Type() {} + type Provision_Request struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -697,7 +892,7 @@ type Provision_Request struct { func (x *Provision_Request) Reset() { *x = Provision_Request{} if protoimpl.UnsafeEnabled { - mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[9] + mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[11] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -710,7 +905,7 @@ func (x *Provision_Request) String() string { func (*Provision_Request) ProtoMessage() {} func (x *Provision_Request) ProtoReflect() protoreflect.Message { - mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[9] + mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[11] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -723,7 +918,7 @@ func (x *Provision_Request) ProtoReflect() protoreflect.Message { // Deprecated: Use Provision_Request.ProtoReflect.Descriptor instead. func (*Provision_Request) Descriptor() ([]byte, []int) { - return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{6, 0} + return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{7, 0} } func (x *Provision_Request) GetDirectory() string { @@ -747,7 +942,7 @@ func (x *Provision_Request) GetState() []byte { return nil } -type Provision_Response struct { +type Provision_Complete struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields @@ -756,10 +951,67 @@ type Provision_Response struct { Resources []*Resource `protobuf:"bytes,2,rep,name=resources,proto3" json:"resources,omitempty"` } +func (x *Provision_Complete) Reset() { + *x = Provision_Complete{} + if protoimpl.UnsafeEnabled { + mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[12] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *Provision_Complete) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*Provision_Complete) ProtoMessage() {} + +func (x *Provision_Complete) ProtoReflect() protoreflect.Message { + mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[12] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use Provision_Complete.ProtoReflect.Descriptor instead. +func (*Provision_Complete) Descriptor() ([]byte, []int) { + return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{7, 1} +} + +func (x *Provision_Complete) GetState() []byte { + if x != nil { + return x.State + } + return nil +} + +func (x *Provision_Complete) GetResources() []*Resource { + if x != nil { + return x.Resources + } + return nil +} + +type Provision_Response struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + // Types that are assignable to Type: + // *Provision_Response_Log + // *Provision_Response_Complete + Type isProvision_Response_Type `protobuf_oneof:"type"` +} + func (x *Provision_Response) Reset() { *x = Provision_Response{} if protoimpl.UnsafeEnabled { - mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[10] + mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[13] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -772,7 +1024,7 @@ func (x *Provision_Response) String() string { func (*Provision_Response) ProtoMessage() {} func (x *Provision_Response) ProtoReflect() protoreflect.Message { - mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[10] + mi := &file_provisionersdk_proto_provisioner_proto_msgTypes[13] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -785,23 +1037,46 @@ func (x *Provision_Response) ProtoReflect() protoreflect.Message { // Deprecated: Use Provision_Response.ProtoReflect.Descriptor instead. func (*Provision_Response) Descriptor() ([]byte, []int) { - return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{6, 1} + return file_provisionersdk_proto_provisioner_proto_rawDescGZIP(), []int{7, 2} } -func (x *Provision_Response) GetState() []byte { - if x != nil { - return x.State +func (m *Provision_Response) GetType() isProvision_Response_Type { + if m != nil { + return m.Type } return nil } -func (x *Provision_Response) GetResources() []*Resource { - if x != nil { - return x.Resources +func (x *Provision_Response) GetLog() *Log { + if x, ok := x.GetType().(*Provision_Response_Log); ok { + return x.Log } return nil } +func (x *Provision_Response) GetComplete() *Provision_Complete { + if x, ok := x.GetType().(*Provision_Response_Complete); ok { + return x.Complete + } + return nil +} + +type isProvision_Response_Type interface { + isProvision_Response_Type() +} + +type Provision_Response_Log struct { + Log *Log `protobuf:"bytes,1,opt,name=log,proto3,oneof"` +} + +type Provision_Response_Complete struct { + Complete *Provision_Complete `protobuf:"bytes,2,opt,name=complete,proto3,oneof"` +} + +func (*Provision_Response_Log) isProvision_Response_Type() {} + +func (*Provision_Response_Complete) isProvision_Response_Type() {} + var File_provisionersdk_proto_provisioner_proto protoreflect.FileDescriptor var file_provisionersdk_proto_provisioner_proto_rawDesc = []byte{ @@ -876,47 +1151,72 @@ var file_provisionersdk_proto_provisioner_proto_rawDesc = []byte{ 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x09, 0x52, 0x13, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x15, 0x0a, 0x0a, 0x54, 0x79, 0x70, 0x65, 0x53, 0x79, 0x73, 0x74, 0x65, - 0x6d, 0x12, 0x07, 0x0a, 0x03, 0x48, 0x43, 0x4c, 0x10, 0x00, 0x22, 0x87, 0x01, 0x0a, 0x05, 0x50, - 0x61, 0x72, 0x73, 0x65, 0x1a, 0x27, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, + 0x6d, 0x12, 0x07, 0x0a, 0x03, 0x48, 0x43, 0x4c, 0x10, 0x00, 0x22, 0x46, 0x0a, 0x03, 0x4c, 0x6f, + 0x67, 0x12, 0x2b, 0x0a, 0x05, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, + 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x4c, + 0x6f, 0x67, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x52, 0x05, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x12, 0x12, + 0x0a, 0x04, 0x74, 0x65, 0x78, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, 0x65, + 0x78, 0x74, 0x22, 0xfc, 0x01, 0x0a, 0x05, 0x50, 0x61, 0x72, 0x73, 0x65, 0x1a, 0x27, 0x0a, 0x07, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x64, 0x69, 0x72, 0x65, 0x63, + 0x74, 0x6f, 0x72, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x64, 0x69, 0x72, 0x65, + 0x63, 0x74, 0x6f, 0x72, 0x79, 0x1a, 0x55, 0x0a, 0x08, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, + 0x65, 0x12, 0x49, 0x0a, 0x11, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x5f, 0x73, + 0x63, 0x68, 0x65, 0x6d, 0x61, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x70, + 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, + 0x65, 0x74, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, 0x10, 0x70, 0x61, 0x72, 0x61, + 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x73, 0x1a, 0x73, 0x0a, 0x08, + 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x24, 0x0a, 0x03, 0x6c, 0x6f, 0x67, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, + 0x6e, 0x65, 0x72, 0x2e, 0x4c, 0x6f, 0x67, 0x48, 0x00, 0x52, 0x03, 0x6c, 0x6f, 0x67, 0x12, 0x39, + 0x0a, 0x08, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x1b, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, + 0x61, 0x72, 0x73, 0x65, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x48, 0x00, 0x52, + 0x08, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x42, 0x06, 0x0a, 0x04, 0x74, 0x79, 0x70, + 0x65, 0x22, 0x32, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x12, 0x0a, + 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, + 0x65, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x04, 0x74, 0x79, 0x70, 0x65, 0x22, 0xe3, 0x02, 0x0a, 0x09, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, + 0x69, 0x6f, 0x6e, 0x1a, 0x85, 0x01, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x79, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x09, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x79, 0x1a, 0x55, 0x0a, - 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x49, 0x0a, 0x11, 0x70, 0x61, 0x72, - 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x5f, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x73, 0x18, 0x01, - 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, - 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, - 0x6d, 0x61, 0x52, 0x10, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x63, 0x68, - 0x65, 0x6d, 0x61, 0x73, 0x22, 0x32, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, - 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, - 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x22, 0xea, 0x01, 0x0a, 0x09, 0x50, 0x72, 0x6f, - 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x1a, 0x85, 0x01, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x79, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x79, - 0x12, 0x46, 0x0a, 0x10, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x5f, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x70, 0x72, 0x6f, - 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, - 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, - 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, - 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x1a, 0x55, - 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, - 0x61, 0x74, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, - 0x12, 0x33, 0x0a, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, - 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, - 0x72, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x09, 0x72, 0x65, 0x73, 0x6f, - 0x75, 0x72, 0x63, 0x65, 0x73, 0x32, 0x9d, 0x01, 0x0a, 0x0b, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, - 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x12, 0x40, 0x0a, 0x05, 0x50, 0x61, 0x72, 0x73, 0x65, 0x12, 0x1a, - 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, - 0x73, 0x65, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1b, 0x2e, 0x70, 0x72, 0x6f, - 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x73, 0x65, 0x2e, 0x52, - 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x4c, 0x0a, 0x09, 0x50, 0x72, 0x6f, 0x76, 0x69, - 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x1e, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, - 0x65, 0x72, 0x2e, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1f, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, - 0x65, 0x72, 0x2e, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x52, 0x65, 0x73, - 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x42, 0x2d, 0x5a, 0x2b, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, - 0x63, 0x6f, 0x6d, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, - 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x73, 0x64, 0x6b, 0x2f, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x28, 0x09, 0x52, 0x09, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x79, 0x12, 0x46, 0x0a, + 0x10, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, + 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, + 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, + 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, + 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x03, + 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x1a, 0x55, 0x0a, 0x08, 0x43, + 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x12, 0x33, 0x0a, + 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, + 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, + 0x65, 0x73, 0x1a, 0x77, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x24, + 0x0a, 0x03, 0x6c, 0x6f, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x70, 0x72, + 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x4c, 0x6f, 0x67, 0x48, 0x00, 0x52, + 0x03, 0x6c, 0x6f, 0x67, 0x12, 0x3d, 0x0a, 0x08, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, + 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x43, + 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x48, 0x00, 0x52, 0x08, 0x63, 0x6f, 0x6d, 0x70, 0x6c, + 0x65, 0x74, 0x65, 0x42, 0x06, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x2a, 0x4a, 0x0a, 0x08, 0x4c, + 0x6f, 0x67, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x12, 0x09, 0x0a, 0x05, 0x54, 0x52, 0x41, 0x43, 0x45, + 0x10, 0x00, 0x12, 0x09, 0x0a, 0x05, 0x44, 0x45, 0x42, 0x55, 0x47, 0x10, 0x01, 0x12, 0x08, 0x0a, + 0x04, 0x49, 0x4e, 0x46, 0x4f, 0x10, 0x02, 0x12, 0x08, 0x0a, 0x04, 0x57, 0x41, 0x52, 0x4e, 0x10, + 0x03, 0x12, 0x09, 0x0a, 0x05, 0x45, 0x52, 0x52, 0x4f, 0x52, 0x10, 0x04, 0x12, 0x09, 0x0a, 0x05, + 0x46, 0x41, 0x54, 0x41, 0x4c, 0x10, 0x05, 0x32, 0xa1, 0x01, 0x0a, 0x0b, 0x50, 0x72, 0x6f, 0x76, + 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x12, 0x42, 0x0a, 0x05, 0x50, 0x61, 0x72, 0x73, 0x65, + 0x12, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, + 0x61, 0x72, 0x73, 0x65, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1b, 0x2e, 0x70, + 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x73, 0x65, + 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x30, 0x01, 0x12, 0x4e, 0x0a, 0x09, 0x50, + 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x1e, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, + 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, + 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1f, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, + 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, + 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x30, 0x01, 0x42, 0x2d, 0x5a, 0x2b, 0x67, + 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, + 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, + 0x72, 0x73, 0x64, 0x6b, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x33, } var ( @@ -931,43 +1231,52 @@ func file_provisionersdk_proto_provisioner_proto_rawDescGZIP() []byte { return file_provisionersdk_proto_provisioner_proto_rawDescData } -var file_provisionersdk_proto_provisioner_proto_enumTypes = make([]protoimpl.EnumInfo, 3) -var file_provisionersdk_proto_provisioner_proto_msgTypes = make([]protoimpl.MessageInfo, 11) +var file_provisionersdk_proto_provisioner_proto_enumTypes = make([]protoimpl.EnumInfo, 4) +var file_provisionersdk_proto_provisioner_proto_msgTypes = make([]protoimpl.MessageInfo, 14) var file_provisionersdk_proto_provisioner_proto_goTypes = []interface{}{ - (ParameterSource_Scheme)(0), // 0: provisioner.ParameterSource.Scheme - (ParameterDestination_Scheme)(0), // 1: provisioner.ParameterDestination.Scheme - (ParameterSchema_TypeSystem)(0), // 2: provisioner.ParameterSchema.TypeSystem - (*ParameterSource)(nil), // 3: provisioner.ParameterSource - (*ParameterDestination)(nil), // 4: provisioner.ParameterDestination - (*ParameterValue)(nil), // 5: provisioner.ParameterValue - (*ParameterSchema)(nil), // 6: provisioner.ParameterSchema - (*Parse)(nil), // 7: provisioner.Parse - (*Resource)(nil), // 8: provisioner.Resource - (*Provision)(nil), // 9: provisioner.Provision - (*Parse_Request)(nil), // 10: provisioner.Parse.Request - (*Parse_Response)(nil), // 11: provisioner.Parse.Response - (*Provision_Request)(nil), // 12: provisioner.Provision.Request - (*Provision_Response)(nil), // 13: provisioner.Provision.Response + (LogLevel)(0), // 0: provisioner.LogLevel + (ParameterSource_Scheme)(0), // 1: provisioner.ParameterSource.Scheme + (ParameterDestination_Scheme)(0), // 2: provisioner.ParameterDestination.Scheme + (ParameterSchema_TypeSystem)(0), // 3: provisioner.ParameterSchema.TypeSystem + (*ParameterSource)(nil), // 4: provisioner.ParameterSource + (*ParameterDestination)(nil), // 5: provisioner.ParameterDestination + (*ParameterValue)(nil), // 6: provisioner.ParameterValue + (*ParameterSchema)(nil), // 7: provisioner.ParameterSchema + (*Log)(nil), // 8: provisioner.Log + (*Parse)(nil), // 9: provisioner.Parse + (*Resource)(nil), // 10: provisioner.Resource + (*Provision)(nil), // 11: provisioner.Provision + (*Parse_Request)(nil), // 12: provisioner.Parse.Request + (*Parse_Complete)(nil), // 13: provisioner.Parse.Complete + (*Parse_Response)(nil), // 14: provisioner.Parse.Response + (*Provision_Request)(nil), // 15: provisioner.Provision.Request + (*Provision_Complete)(nil), // 16: provisioner.Provision.Complete + (*Provision_Response)(nil), // 17: provisioner.Provision.Response } var file_provisionersdk_proto_provisioner_proto_depIdxs = []int32{ - 0, // 0: provisioner.ParameterSource.scheme:type_name -> provisioner.ParameterSource.Scheme - 1, // 1: provisioner.ParameterDestination.scheme:type_name -> provisioner.ParameterDestination.Scheme - 1, // 2: provisioner.ParameterValue.destination_scheme:type_name -> provisioner.ParameterDestination.Scheme - 3, // 3: provisioner.ParameterSchema.default_source:type_name -> provisioner.ParameterSource - 4, // 4: provisioner.ParameterSchema.default_destination:type_name -> provisioner.ParameterDestination - 2, // 5: provisioner.ParameterSchema.validation_type_system:type_name -> provisioner.ParameterSchema.TypeSystem - 6, // 6: provisioner.Parse.Response.parameter_schemas:type_name -> provisioner.ParameterSchema - 5, // 7: provisioner.Provision.Request.parameter_values:type_name -> provisioner.ParameterValue - 8, // 8: provisioner.Provision.Response.resources:type_name -> provisioner.Resource - 10, // 9: provisioner.Provisioner.Parse:input_type -> provisioner.Parse.Request - 12, // 10: provisioner.Provisioner.Provision:input_type -> provisioner.Provision.Request - 11, // 11: provisioner.Provisioner.Parse:output_type -> provisioner.Parse.Response - 13, // 12: provisioner.Provisioner.Provision:output_type -> provisioner.Provision.Response - 11, // [11:13] is the sub-list for method output_type - 9, // [9:11] is the sub-list for method input_type - 9, // [9:9] is the sub-list for extension type_name - 9, // [9:9] is the sub-list for extension extendee - 0, // [0:9] is the sub-list for field type_name + 1, // 0: provisioner.ParameterSource.scheme:type_name -> provisioner.ParameterSource.Scheme + 2, // 1: provisioner.ParameterDestination.scheme:type_name -> provisioner.ParameterDestination.Scheme + 2, // 2: provisioner.ParameterValue.destination_scheme:type_name -> provisioner.ParameterDestination.Scheme + 4, // 3: provisioner.ParameterSchema.default_source:type_name -> provisioner.ParameterSource + 5, // 4: provisioner.ParameterSchema.default_destination:type_name -> provisioner.ParameterDestination + 3, // 5: provisioner.ParameterSchema.validation_type_system:type_name -> provisioner.ParameterSchema.TypeSystem + 0, // 6: provisioner.Log.level:type_name -> provisioner.LogLevel + 7, // 7: provisioner.Parse.Complete.parameter_schemas:type_name -> provisioner.ParameterSchema + 8, // 8: provisioner.Parse.Response.log:type_name -> provisioner.Log + 13, // 9: provisioner.Parse.Response.complete:type_name -> provisioner.Parse.Complete + 6, // 10: provisioner.Provision.Request.parameter_values:type_name -> provisioner.ParameterValue + 10, // 11: provisioner.Provision.Complete.resources:type_name -> provisioner.Resource + 8, // 12: provisioner.Provision.Response.log:type_name -> provisioner.Log + 16, // 13: provisioner.Provision.Response.complete:type_name -> provisioner.Provision.Complete + 12, // 14: provisioner.Provisioner.Parse:input_type -> provisioner.Parse.Request + 15, // 15: provisioner.Provisioner.Provision:input_type -> provisioner.Provision.Request + 14, // 16: provisioner.Provisioner.Parse:output_type -> provisioner.Parse.Response + 17, // 17: provisioner.Provisioner.Provision:output_type -> provisioner.Provision.Response + 16, // [16:18] is the sub-list for method output_type + 14, // [14:16] is the sub-list for method input_type + 14, // [14:14] is the sub-list for extension type_name + 14, // [14:14] is the sub-list for extension extendee + 0, // [0:14] is the sub-list for field type_name } func init() { file_provisionersdk_proto_provisioner_proto_init() } @@ -1025,7 +1334,7 @@ func file_provisionersdk_proto_provisioner_proto_init() { } } file_provisionersdk_proto_provisioner_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Parse); i { + switch v := v.(*Log); i { case 0: return &v.state case 1: @@ -1037,7 +1346,7 @@ func file_provisionersdk_proto_provisioner_proto_init() { } } file_provisionersdk_proto_provisioner_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Resource); i { + switch v := v.(*Parse); i { case 0: return &v.state case 1: @@ -1049,7 +1358,7 @@ func file_provisionersdk_proto_provisioner_proto_init() { } } file_provisionersdk_proto_provisioner_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Provision); i { + switch v := v.(*Resource); i { case 0: return &v.state case 1: @@ -1061,7 +1370,7 @@ func file_provisionersdk_proto_provisioner_proto_init() { } } file_provisionersdk_proto_provisioner_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Parse_Request); i { + switch v := v.(*Provision); i { case 0: return &v.state case 1: @@ -1073,7 +1382,7 @@ func file_provisionersdk_proto_provisioner_proto_init() { } } file_provisionersdk_proto_provisioner_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Parse_Response); i { + switch v := v.(*Parse_Request); i { case 0: return &v.state case 1: @@ -1085,7 +1394,7 @@ func file_provisionersdk_proto_provisioner_proto_init() { } } file_provisionersdk_proto_provisioner_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Provision_Request); i { + switch v := v.(*Parse_Complete); i { case 0: return &v.state case 1: @@ -1097,6 +1406,42 @@ func file_provisionersdk_proto_provisioner_proto_init() { } } file_provisionersdk_proto_provisioner_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Parse_Response); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_provisionersdk_proto_provisioner_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Provision_Request); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_provisionersdk_proto_provisioner_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*Provision_Complete); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_provisionersdk_proto_provisioner_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Provision_Response); i { case 0: return &v.state @@ -1109,13 +1454,21 @@ func file_provisionersdk_proto_provisioner_proto_init() { } } } + file_provisionersdk_proto_provisioner_proto_msgTypes[10].OneofWrappers = []interface{}{ + (*Parse_Response_Log)(nil), + (*Parse_Response_Complete)(nil), + } + file_provisionersdk_proto_provisioner_proto_msgTypes[13].OneofWrappers = []interface{}{ + (*Provision_Response_Log)(nil), + (*Provision_Response_Complete)(nil), + } type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_provisionersdk_proto_provisioner_proto_rawDesc, - NumEnums: 3, - NumMessages: 11, + NumEnums: 4, + NumMessages: 14, NumExtensions: 0, NumServices: 1, }, diff --git a/provisionersdk/proto/provisioner.proto b/provisionersdk/proto/provisioner.proto index c865dced18fe9..e59c3e5de7acb 100644 --- a/provisionersdk/proto/provisioner.proto +++ b/provisionersdk/proto/provisioner.proto @@ -49,13 +49,35 @@ message ParameterSchema { string validation_condition = 11; } +// LogLevel represents severity of the log. +enum LogLevel { + TRACE = 0; + DEBUG = 1; + INFO = 2; + WARN = 3; + ERROR = 4; + FATAL = 5; +} + +// Log represents output from a request. +message Log { + LogLevel level = 1; + string text = 2; +} + // Parse consumes source-code from a directory to produce inputs. message Parse { message Request { string directory = 1; } + message Complete { + repeated ParameterSchema parameter_schemas = 2; + } message Response { - repeated ParameterSchema parameter_schemas = 1; + oneof type { + Log log = 1; + Complete complete = 2; + } } } @@ -72,13 +94,19 @@ message Provision { repeated ParameterValue parameter_values = 2; bytes state = 3; } - message Response { + message Complete { bytes state = 1; repeated Resource resources = 2; } + message Response { + oneof type { + Log log = 1; + Complete complete = 2; + } + } } service Provisioner { - rpc Parse(Parse.Request) returns (Parse.Response); - rpc Provision(Provision.Request) returns (Provision.Response); + rpc Parse(Parse.Request) returns (stream Parse.Response); + rpc Provision(Provision.Request) returns (stream Provision.Response); } \ No newline at end of file diff --git a/provisionersdk/proto/provisioner_drpc.pb.go b/provisionersdk/proto/provisioner_drpc.pb.go index 7a023b6631b42..cff7c3b2814f1 100644 --- a/provisionersdk/proto/provisioner_drpc.pb.go +++ b/provisionersdk/proto/provisioner_drpc.pb.go @@ -38,8 +38,8 @@ func (drpcEncoding_File_provisionersdk_proto_provisioner_proto) JSONUnmarshal(bu type DRPCProvisionerClient interface { DRPCConn() drpc.Conn - Parse(ctx context.Context, in *Parse_Request) (*Parse_Response, error) - Provision(ctx context.Context, in *Provision_Request) (*Provision_Response, error) + Parse(ctx context.Context, in *Parse_Request) (DRPCProvisioner_ParseClient, error) + Provision(ctx context.Context, in *Provision_Request) (DRPCProvisioner_ProvisionClient, error) } type drpcProvisionerClient struct { @@ -52,37 +52,91 @@ func NewDRPCProvisionerClient(cc drpc.Conn) DRPCProvisionerClient { func (c *drpcProvisionerClient) DRPCConn() drpc.Conn { return c.cc } -func (c *drpcProvisionerClient) Parse(ctx context.Context, in *Parse_Request) (*Parse_Response, error) { - out := new(Parse_Response) - err := c.cc.Invoke(ctx, "/provisioner.Provisioner/Parse", drpcEncoding_File_provisionersdk_proto_provisioner_proto{}, in, out) +func (c *drpcProvisionerClient) Parse(ctx context.Context, in *Parse_Request) (DRPCProvisioner_ParseClient, error) { + stream, err := c.cc.NewStream(ctx, "/provisioner.Provisioner/Parse", drpcEncoding_File_provisionersdk_proto_provisioner_proto{}) if err != nil { return nil, err } - return out, nil + x := &drpcProvisioner_ParseClient{stream} + if err := x.MsgSend(in, drpcEncoding_File_provisionersdk_proto_provisioner_proto{}); err != nil { + return nil, err + } + if err := x.CloseSend(); err != nil { + return nil, err + } + return x, nil } -func (c *drpcProvisionerClient) Provision(ctx context.Context, in *Provision_Request) (*Provision_Response, error) { - out := new(Provision_Response) - err := c.cc.Invoke(ctx, "/provisioner.Provisioner/Provision", drpcEncoding_File_provisionersdk_proto_provisioner_proto{}, in, out) +type DRPCProvisioner_ParseClient interface { + drpc.Stream + Recv() (*Parse_Response, error) +} + +type drpcProvisioner_ParseClient struct { + drpc.Stream +} + +func (x *drpcProvisioner_ParseClient) Recv() (*Parse_Response, error) { + m := new(Parse_Response) + if err := x.MsgRecv(m, drpcEncoding_File_provisionersdk_proto_provisioner_proto{}); err != nil { + return nil, err + } + return m, nil +} + +func (x *drpcProvisioner_ParseClient) RecvMsg(m *Parse_Response) error { + return x.MsgRecv(m, drpcEncoding_File_provisionersdk_proto_provisioner_proto{}) +} + +func (c *drpcProvisionerClient) Provision(ctx context.Context, in *Provision_Request) (DRPCProvisioner_ProvisionClient, error) { + stream, err := c.cc.NewStream(ctx, "/provisioner.Provisioner/Provision", drpcEncoding_File_provisionersdk_proto_provisioner_proto{}) if err != nil { return nil, err } - return out, nil + x := &drpcProvisioner_ProvisionClient{stream} + if err := x.MsgSend(in, drpcEncoding_File_provisionersdk_proto_provisioner_proto{}); err != nil { + return nil, err + } + if err := x.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +type DRPCProvisioner_ProvisionClient interface { + drpc.Stream + Recv() (*Provision_Response, error) +} + +type drpcProvisioner_ProvisionClient struct { + drpc.Stream +} + +func (x *drpcProvisioner_ProvisionClient) Recv() (*Provision_Response, error) { + m := new(Provision_Response) + if err := x.MsgRecv(m, drpcEncoding_File_provisionersdk_proto_provisioner_proto{}); err != nil { + return nil, err + } + return m, nil +} + +func (x *drpcProvisioner_ProvisionClient) RecvMsg(m *Provision_Response) error { + return x.MsgRecv(m, drpcEncoding_File_provisionersdk_proto_provisioner_proto{}) } type DRPCProvisionerServer interface { - Parse(context.Context, *Parse_Request) (*Parse_Response, error) - Provision(context.Context, *Provision_Request) (*Provision_Response, error) + Parse(*Parse_Request, DRPCProvisioner_ParseStream) error + Provision(*Provision_Request, DRPCProvisioner_ProvisionStream) error } type DRPCProvisionerUnimplementedServer struct{} -func (s *DRPCProvisionerUnimplementedServer) Parse(context.Context, *Parse_Request) (*Parse_Response, error) { - return nil, drpcerr.WithCode(errors.New("Unimplemented"), drpcerr.Unimplemented) +func (s *DRPCProvisionerUnimplementedServer) Parse(*Parse_Request, DRPCProvisioner_ParseStream) error { + return drpcerr.WithCode(errors.New("Unimplemented"), drpcerr.Unimplemented) } -func (s *DRPCProvisionerUnimplementedServer) Provision(context.Context, *Provision_Request) (*Provision_Response, error) { - return nil, drpcerr.WithCode(errors.New("Unimplemented"), drpcerr.Unimplemented) +func (s *DRPCProvisionerUnimplementedServer) Provision(*Provision_Request, DRPCProvisioner_ProvisionStream) error { + return drpcerr.WithCode(errors.New("Unimplemented"), drpcerr.Unimplemented) } type DRPCProvisionerDescription struct{} @@ -94,19 +148,19 @@ func (DRPCProvisionerDescription) Method(n int) (string, drpc.Encoding, drpc.Rec case 0: return "/provisioner.Provisioner/Parse", drpcEncoding_File_provisionersdk_proto_provisioner_proto{}, func(srv interface{}, ctx context.Context, in1, in2 interface{}) (drpc.Message, error) { - return srv.(DRPCProvisionerServer). + return nil, srv.(DRPCProvisionerServer). Parse( - ctx, in1.(*Parse_Request), + &drpcProvisioner_ParseStream{in2.(drpc.Stream)}, ) }, DRPCProvisionerServer.Parse, true case 1: return "/provisioner.Provisioner/Provision", drpcEncoding_File_provisionersdk_proto_provisioner_proto{}, func(srv interface{}, ctx context.Context, in1, in2 interface{}) (drpc.Message, error) { - return srv.(DRPCProvisionerServer). + return nil, srv.(DRPCProvisionerServer). Provision( - ctx, in1.(*Provision_Request), + &drpcProvisioner_ProvisionStream{in2.(drpc.Stream)}, ) }, DRPCProvisionerServer.Provision, true default: @@ -120,32 +174,26 @@ func DRPCRegisterProvisioner(mux drpc.Mux, impl DRPCProvisionerServer) error { type DRPCProvisioner_ParseStream interface { drpc.Stream - SendAndClose(*Parse_Response) error + Send(*Parse_Response) error } type drpcProvisioner_ParseStream struct { drpc.Stream } -func (x *drpcProvisioner_ParseStream) SendAndClose(m *Parse_Response) error { - if err := x.MsgSend(m, drpcEncoding_File_provisionersdk_proto_provisioner_proto{}); err != nil { - return err - } - return x.CloseSend() +func (x *drpcProvisioner_ParseStream) Send(m *Parse_Response) error { + return x.MsgSend(m, drpcEncoding_File_provisionersdk_proto_provisioner_proto{}) } type DRPCProvisioner_ProvisionStream interface { drpc.Stream - SendAndClose(*Provision_Response) error + Send(*Provision_Response) error } type drpcProvisioner_ProvisionStream struct { drpc.Stream } -func (x *drpcProvisioner_ProvisionStream) SendAndClose(m *Provision_Response) error { - if err := x.MsgSend(m, drpcEncoding_File_provisionersdk_proto_provisioner_proto{}); err != nil { - return err - } - return x.CloseSend() +func (x *drpcProvisioner_ProvisionStream) Send(m *Provision_Response) error { + return x.MsgSend(m, drpcEncoding_File_provisionersdk_proto_provisioner_proto{}) } From 666529ee0f719a42d2492c1c0489b5f9bf84f1a7 Mon Sep 17 00:00:00 2001 From: Kyle Carberry Date: Sun, 30 Jan 2022 00:27:05 +0000 Subject: [PATCH 10/17] Fix linting --- provisioner/terraform/parse.go | 4 +++- provisioner/terraform/provision.go | 5 ++++- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/provisioner/terraform/parse.go b/provisioner/terraform/parse.go index 926ff812796da..c0ab765c1bf67 100644 --- a/provisioner/terraform/parse.go +++ b/provisioner/terraform/parse.go @@ -12,7 +12,9 @@ import ( // Parse extracts Terraform variables from source-code. func (*terraform) Parse(request *proto.Parse_Request, stream proto.DRPCProvisioner_ParseStream) error { - defer stream.CloseSend() + defer func() { + _ = stream.CloseSend() + }() module, diags := tfconfig.LoadModule(request.Directory) if diags.HasErrors() { diff --git a/provisioner/terraform/provision.go b/provisioner/terraform/provision.go index f9c63643aedd0..8e8952c3f3615 100644 --- a/provisioner/terraform/provision.go +++ b/provisioner/terraform/provision.go @@ -17,7 +17,10 @@ import ( // Provision executes `terraform apply`. func (t *terraform) Provision(request *proto.Provision_Request, stream proto.DRPCProvisioner_ProvisionStream) error { - // defer stream.CloseSend() + defer func() { + _ = stream.CloseSend() + }() + ctx := stream.Context() statefilePath := filepath.Join(request.Directory, "terraform.tfstate") if len(request.State) > 0 { From ab7fbec33360aa69f2812ee74260906d097a33d0 Mon Sep 17 00:00:00 2001 From: Kyle Carberry Date: Sun, 30 Jan 2022 00:36:42 +0000 Subject: [PATCH 11/17] Rename variables --- provisionerd/provisionerd.go | 277 +++++++++++++++--------------- provisionerd/provisionerd_test.go | 10 +- 2 files changed, 144 insertions(+), 143 deletions(-) diff --git a/provisionerd/provisionerd.go b/provisionerd/provisionerd.go index c9ee05e816bfc..e8c62074c9c9f 100644 --- a/provisionerd/provisionerd.go +++ b/provisionerd/provisionerd.go @@ -15,32 +15,33 @@ import ( "time" "cdr.dev/slog" + "github.com/coder/coder/codersdk" + "github.com/coder/coder/database" "github.com/coder/coder/provisionerd/proto" sdkproto "github.com/coder/coder/provisionersdk/proto" "github.com/coder/retry" ) -// Dialer returns a gRPC client to communicate with. -// The provisioner daemon handles intermittent connection failures -// for upgrades to coderd. -type Dialer func(ctx context.Context) (proto.DRPCProvisionerDaemonClient, error) - // Provisioners maps provisioner ID to implementation. -type Provisioners map[string]sdkproto.DRPCProvisionerClient +type Provisioners map[database.ProvisionerType]sdkproto.DRPCProvisionerClient +// Options provides customizations to the behavior of a provisioner daemon. type Options struct { - AcquireInterval time.Duration - Logger slog.Logger - WorkDirectory string + Logger slog.Logger + + PollInterval time.Duration + Provisioners Provisioners + WorkDirectory string } -func New(apiDialer Dialer, provisioners Provisioners, opts *Options) *API { - if opts.AcquireInterval == 0 { - opts.AcquireInterval = 5 * time.Second +// New creates and starts a provisioner daemon. +func New(apiClient *codersdk.Client, provisioners Provisioners, opts *Options) io.Closer { + if opts.PollInterval == 0 { + opts.PollInterval = 5 * time.Second } ctx, ctxCancel := context.WithCancel(context.Background()) - api := &API{ - dialer: apiDialer, + daemon := &provisionerDaemon{ + apiClient: apiClient, provisioners: provisioners, opts: opts, @@ -48,150 +49,150 @@ func New(apiDialer Dialer, provisioners Provisioners, opts *Options) *API { closeContextCancel: ctxCancel, closed: make(chan struct{}), } - go api.connect() - return api + go daemon.connect() + return daemon } -type API struct { +type provisionerDaemon struct { provisioners Provisioners opts *Options - dialer Dialer + apiClient *codersdk.Client connectMutex sync.Mutex client proto.DRPCProvisionerDaemonClient updateStream proto.DRPCProvisionerDaemon_UpdateJobClient closeContext context.Context closeContextCancel context.CancelFunc - - closed chan struct{} - closeMutex sync.Mutex - closeError error + closed chan struct{} + closeMutex sync.Mutex + closeError error activeJob *proto.AcquiredJob activeJobMutex sync.Mutex logQueue []proto.Log } -// connect establishes a connection -func (a *API) connect() { - a.connectMutex.Lock() - defer a.connectMutex.Unlock() +// Connnect establishes a connection to coderd. +func (p *provisionerDaemon) connect() { + p.connectMutex.Lock() + defer p.connectMutex.Unlock() var err error - for retrier := retry.New(50*time.Millisecond, 10*time.Second); retrier.Wait(a.closeContext); { - a.client, err = a.dialer(a.closeContext) + for retrier := retry.New(50*time.Millisecond, 10*time.Second); retrier.Wait(p.closeContext); { + p.client, err = p.apiClient.ProvisionerDaemonClient(p.closeContext) if err != nil { // Warn - a.opts.Logger.Warn(context.Background(), "failed to dial", slog.Error(err)) + p.opts.Logger.Warn(context.Background(), "failed to dial", slog.Error(err)) continue } - a.updateStream, err = a.client.UpdateJob(a.closeContext) + p.updateStream, err = p.client.UpdateJob(p.closeContext) if err != nil { - a.opts.Logger.Warn(context.Background(), "create update job stream", slog.Error(err)) + p.opts.Logger.Warn(context.Background(), "create update job stream", slog.Error(err)) continue } - a.opts.Logger.Debug(context.Background(), "connected") + p.opts.Logger.Debug(context.Background(), "connected") break } go func() { - if a.isClosed() { + if p.isClosed() { return } select { - case <-a.closed: + case <-p.closed: return - case <-a.updateStream.Context().Done(): + case <-p.updateStream.Context().Done(): // We use the update stream to detect when the connection // has been interrupted. This works well, because logs need // to buffer if a job is running in the background. - a.opts.Logger.Debug(context.Background(), "update stream ended", slog.Error(a.updateStream.Context().Err())) - a.connect() + p.opts.Logger.Debug(context.Background(), "update stream ended", slog.Error(p.updateStream.Context().Err())) + p.connect() } }() go func() { - if a.isClosed() { + if p.isClosed() { return } - ticker := time.NewTicker(a.opts.AcquireInterval) + ticker := time.NewTicker(p.opts.PollInterval) defer ticker.Stop() for { select { - case <-a.closed: + case <-p.closed: return - case <-a.updateStream.Context().Done(): + case <-p.updateStream.Context().Done(): return case <-ticker.C: - if a.activeJob != nil { - a.opts.Logger.Debug(context.Background(), "skipping acquire; job is already running") + if p.activeJob != nil { + p.opts.Logger.Debug(context.Background(), "skipping acquire; job is already running") continue } - a.acquireJob() + p.acquireJob() } } }() } -func (a *API) acquireJob() { - a.opts.Logger.Debug(context.Background(), "acquiring new job") +func (p *provisionerDaemon) acquireJob() { + p.opts.Logger.Debug(context.Background(), "acquiring new job") var err error - a.activeJobMutex.Lock() - a.activeJob, err = a.client.AcquireJob(a.closeContext, &proto.Empty{}) - a.activeJobMutex.Unlock() + p.activeJobMutex.Lock() + p.activeJob, err = p.client.AcquireJob(p.closeContext, &proto.Empty{}) + p.activeJobMutex.Unlock() if err != nil { - a.opts.Logger.Error(context.Background(), "acquire job", slog.Error(err)) + p.opts.Logger.Error(context.Background(), "acquire job", slog.Error(err)) return } - if a.activeJob.JobId == "" { - a.activeJob = nil - a.opts.Logger.Info(context.Background(), "no jobs available") + if p.activeJob.JobId == "" { + p.activeJob = nil + p.opts.Logger.Info(context.Background(), "no jobs available") return } - a.opts.Logger.Info(context.Background(), "acquired job", - slog.F("organization_name", a.activeJob.OrganizationName), - slog.F("project_name", a.activeJob.ProjectName), - slog.F("username", a.activeJob.UserName), - slog.F("provisioner", a.activeJob.Provisioner), + p.opts.Logger.Info(context.Background(), "acquired job", + slog.F("organization_name", p.activeJob.OrganizationName), + slog.F("project_name", p.activeJob.ProjectName), + slog.F("username", p.activeJob.UserName), + slog.F("provisioner", p.activeJob.Provisioner), ) - provisioner, hasProvisioner := a.provisioners[a.activeJob.Provisioner] + // It's safe to cast this ProvisionerType. This data is coming directly from coderd. + provisioner, hasProvisioner := p.provisioners[database.ProvisionerType(p.activeJob.Provisioner)] if !hasProvisioner { - a.cancelActiveJob(fmt.Sprintf("provisioner %q not registered", a.activeJob.Provisioner)) + p.cancelActiveJob(fmt.Sprintf("provisioner %q not registered", p.activeJob.Provisioner)) return } defer func() { // Cleanup the work directory after execution. - err = os.RemoveAll(a.opts.WorkDirectory) + err = os.RemoveAll(p.opts.WorkDirectory) if err != nil { - a.cancelActiveJob(fmt.Sprintf("remove all from %q directory: %s", a.opts.WorkDirectory, err)) + p.cancelActiveJob(fmt.Sprintf("remove all from %q directory: %s", p.opts.WorkDirectory, err)) return } - a.opts.Logger.Debug(context.Background(), "cleaned up work directory") + p.opts.Logger.Debug(context.Background(), "cleaned up work directory") }() - err = os.MkdirAll(a.opts.WorkDirectory, 0600) + err = os.MkdirAll(p.opts.WorkDirectory, 0600) if err != nil { - a.cancelActiveJob(fmt.Sprintf("create work directory %q: %s", a.opts.WorkDirectory, err)) + p.cancelActiveJob(fmt.Sprintf("create work directory %q: %s", p.opts.WorkDirectory, err)) return } - a.opts.Logger.Debug(context.Background(), "unpacking project source archive", slog.F("size_bytes", len(a.activeJob.ProjectSourceArchive))) - reader := tar.NewReader(bytes.NewBuffer(a.activeJob.ProjectSourceArchive)) + p.opts.Logger.Debug(context.Background(), "unpacking project source archive", slog.F("size_bytes", len(p.activeJob.ProjectSourceArchive))) + reader := tar.NewReader(bytes.NewBuffer(p.activeJob.ProjectSourceArchive)) for { header, err := reader.Next() if errors.Is(err, io.EOF) { break } if err != nil { - a.cancelActiveJob(fmt.Sprintf("read project source archive: %s", err)) + p.cancelActiveJob(fmt.Sprintf("read project source archive: %s", err)) return } // #nosec - path := filepath.Join(a.opts.WorkDirectory, header.Name) - if !strings.HasPrefix(path, filepath.Clean(a.opts.WorkDirectory)) { - a.cancelActiveJob("tar attempts to target relative upper directory") + path := filepath.Join(p.opts.WorkDirectory, header.Name) + if !strings.HasPrefix(path, filepath.Clean(p.opts.WorkDirectory)) { + p.cancelActiveJob("tar attempts to target relative upper directory") return } mode := header.FileInfo().Mode() @@ -202,14 +203,14 @@ func (a *API) acquireJob() { case tar.TypeDir: err = os.MkdirAll(path, mode) if err != nil { - a.cancelActiveJob(fmt.Sprintf("mkdir %q: %s", path, err)) + p.cancelActiveJob(fmt.Sprintf("mkdir %q: %s", path, err)) return } - a.opts.Logger.Debug(context.Background(), "extracted directory", slog.F("path", path)) + p.opts.Logger.Debug(context.Background(), "extracted directory", slog.F("path", path)) case tar.TypeReg: file, err := os.OpenFile(path, os.O_CREATE|os.O_RDWR, mode) if err != nil { - a.cancelActiveJob(fmt.Sprintf("create file %q: %s", path, err)) + p.cancelActiveJob(fmt.Sprintf("create file %q: %s", path, err)) return } // Max file size of 10MB. @@ -218,15 +219,15 @@ func (a *API) acquireJob() { err = nil } if err != nil { - a.cancelActiveJob(fmt.Sprintf("copy file %q: %s", path, err)) + p.cancelActiveJob(fmt.Sprintf("copy file %q: %s", path, err)) return } err = file.Close() if err != nil { - a.cancelActiveJob(fmt.Sprintf("close file %q: %s", path, err)) + p.cancelActiveJob(fmt.Sprintf("close file %q: %s", path, err)) return } - a.opts.Logger.Debug(context.Background(), "extracted file", + p.opts.Logger.Debug(context.Background(), "extracted file", slog.F("size_bytes", size), slog.F("path", path), slog.F("mode", mode), @@ -234,53 +235,53 @@ func (a *API) acquireJob() { } } - switch jobType := a.activeJob.Type.(type) { + switch jobType := p.activeJob.Type.(type) { case *proto.AcquiredJob_ProjectImport_: - a.opts.Logger.Debug(context.Background(), "acquired job is project import", + p.opts.Logger.Debug(context.Background(), "acquired job is project import", slog.F("project_history_name", jobType.ProjectImport.ProjectHistoryName), ) - a.runProjectImport(provisioner, jobType) + p.runProjectImport(provisioner, jobType) case *proto.AcquiredJob_WorkspaceProvision_: - a.opts.Logger.Debug(context.Background(), "acquired job is workspace provision", + p.opts.Logger.Debug(context.Background(), "acquired job is workspace provision", slog.F("workspace_name", jobType.WorkspaceProvision.WorkspaceName), slog.F("state_length", len(jobType.WorkspaceProvision.State)), slog.F("parameters", jobType.WorkspaceProvision.ParameterValues), ) - a.runWorkspaceProvision(provisioner, jobType) + p.runWorkspaceProvision(provisioner, jobType) default: - a.cancelActiveJob(fmt.Sprintf("unknown job type %q; ensure your provisioner daemon is up-to-date", reflect.TypeOf(a.activeJob.Type).String())) + p.cancelActiveJob(fmt.Sprintf("unknown job type %q; ensure your provisioner daemon is up-to-date", reflect.TypeOf(p.activeJob.Type).String())) return } - a.activeJob = nil + p.activeJob = nil } -func (a *API) runProjectImport(provisioner sdkproto.DRPCProvisionerClient, job *proto.AcquiredJob_ProjectImport_) { - stream, err := provisioner.Parse(a.closeContext, &sdkproto.Parse_Request{ - Directory: a.opts.WorkDirectory, +func (p *provisionerDaemon) runProjectImport(provisioner sdkproto.DRPCProvisionerClient, job *proto.AcquiredJob_ProjectImport_) { + stream, err := provisioner.Parse(p.closeContext, &sdkproto.Parse_Request{ + Directory: p.opts.WorkDirectory, }) if err != nil { - a.cancelActiveJob(fmt.Sprintf("parse source: %s", err)) + p.cancelActiveJob(fmt.Sprintf("parse source: %s", err)) return } defer stream.Close() for { msg, err := stream.Recv() if err != nil { - a.cancelActiveJob(fmt.Sprintf("recv parse source: %s", err)) + p.cancelActiveJob(fmt.Sprintf("recv parse source: %s", err)) return } switch msgType := msg.Type.(type) { case *sdkproto.Parse_Response_Log: - a.opts.Logger.Debug(context.Background(), "parse job logged", + p.opts.Logger.Debug(context.Background(), "parse job logged", slog.F("level", msgType.Log.Level), slog.F("text", msgType.Log.Text), slog.F("project_history_id", job.ProjectImport.ProjectHistoryId), ) - a.logQueue = append(a.logQueue, proto.Log{ + p.logQueue = append(p.logQueue, proto.Log{ Source: proto.LogSource_PROVISIONER, Level: msgType.Log.Level, CreatedAt: time.Now().UTC().UnixMilli(), @@ -292,8 +293,8 @@ func (a *API) runProjectImport(provisioner sdkproto.DRPCProvisionerClient, job * }, }) case *sdkproto.Parse_Response_Complete: - _, err = a.client.CompleteJob(a.closeContext, &proto.CompletedJob{ - JobId: a.activeJob.JobId, + _, err = p.client.CompleteJob(p.closeContext, &proto.CompletedJob{ + JobId: p.activeJob.JobId, Type: &proto.CompletedJob_ProjectImport_{ ProjectImport: &proto.CompletedJob_ProjectImport{ ParameterSchemas: msgType.Complete.ParameterSchemas, @@ -301,27 +302,27 @@ func (a *API) runProjectImport(provisioner sdkproto.DRPCProvisionerClient, job * }, }) if err != nil { - a.cancelActiveJob(fmt.Sprintf("complete job: %s", err)) + p.cancelActiveJob(fmt.Sprintf("complete job: %s", err)) return } // Return so we stop looping! return default: - a.cancelActiveJob(fmt.Sprintf("invalid message type %q received from provisioner", + p.cancelActiveJob(fmt.Sprintf("invalid message type %q received from provisioner", reflect.TypeOf(msg.Type).String())) return } } } -func (a *API) runWorkspaceProvision(provisioner sdkproto.DRPCProvisionerClient, job *proto.AcquiredJob_WorkspaceProvision_) { - stream, err := provisioner.Provision(a.closeContext, &sdkproto.Provision_Request{ - Directory: a.opts.WorkDirectory, +func (p *provisionerDaemon) runWorkspaceProvision(provisioner sdkproto.DRPCProvisionerClient, job *proto.AcquiredJob_WorkspaceProvision_) { + stream, err := provisioner.Provision(p.closeContext, &sdkproto.Provision_Request{ + Directory: p.opts.WorkDirectory, ParameterValues: job.WorkspaceProvision.ParameterValues, State: job.WorkspaceProvision.State, }) if err != nil { - a.cancelActiveJob(fmt.Sprintf("provision: %s", err)) + p.cancelActiveJob(fmt.Sprintf("provision: %s", err)) return } defer stream.Close() @@ -329,18 +330,18 @@ func (a *API) runWorkspaceProvision(provisioner sdkproto.DRPCProvisionerClient, for { msg, err := stream.Recv() if err != nil { - a.cancelActiveJob(fmt.Sprintf("recv workspace provision: %s", err)) + p.cancelActiveJob(fmt.Sprintf("recv workspace provision: %s", err)) return } switch msgType := msg.Type.(type) { case *sdkproto.Provision_Response_Log: - a.opts.Logger.Debug(context.Background(), "provision job logged", + p.opts.Logger.Debug(context.Background(), "provision job logged", slog.F("level", msgType.Log.Level), slog.F("text", msgType.Log.Text), slog.F("workspace_history_id", job.WorkspaceProvision.WorkspaceHistoryId), ) - a.logQueue = append(a.logQueue, proto.Log{ + p.logQueue = append(p.logQueue, proto.Log{ Source: proto.LogSource_PROVISIONER, Level: msgType.Log.Level, CreatedAt: time.Now().UTC().UnixMilli(), @@ -352,7 +353,7 @@ func (a *API) runWorkspaceProvision(provisioner sdkproto.DRPCProvisionerClient, }, }) case *sdkproto.Provision_Response_Complete: - a.opts.Logger.Debug(context.Background(), "provision successful; marking job as complete", + p.opts.Logger.Debug(context.Background(), "provision successful; marking job as complete", slog.F("resource_count", len(msgType.Complete.Resources)), slog.F("resources", msgType.Complete.Resources), slog.F("state_length", len(msgType.Complete.State)), @@ -360,8 +361,8 @@ func (a *API) runWorkspaceProvision(provisioner sdkproto.DRPCProvisionerClient, // Complete job may need to be async if we disconnected... // When we reconnect we can flush any of these cached values. - _, err = a.client.CompleteJob(a.closeContext, &proto.CompletedJob{ - JobId: a.activeJob.JobId, + _, err = p.client.CompleteJob(p.closeContext, &proto.CompletedJob{ + JobId: p.activeJob.JobId, Type: &proto.CompletedJob_WorkspaceProvision_{ WorkspaceProvision: &proto.CompletedJob_WorkspaceProvision{ State: msgType.Complete.State, @@ -370,50 +371,50 @@ func (a *API) runWorkspaceProvision(provisioner sdkproto.DRPCProvisionerClient, }, }) if err != nil { - a.cancelActiveJob(fmt.Sprintf("complete job: %s", err)) + p.cancelActiveJob(fmt.Sprintf("complete job: %s", err)) return } // Return so we stop looping! return default: - a.cancelActiveJob(fmt.Sprintf("invalid message type %q received from provisioner", + p.cancelActiveJob(fmt.Sprintf("invalid message type %q received from provisioner", reflect.TypeOf(msg.Type).String())) return } } } -func (a *API) cancelActiveJob(errMsg string) { - a.activeJobMutex.Lock() - defer a.activeJobMutex.Unlock() +func (p *provisionerDaemon) cancelActiveJob(errMsg string) { + p.activeJobMutex.Lock() + defer p.activeJobMutex.Unlock() - if a.client == nil { - a.activeJob = nil + if p.client == nil { + p.activeJob = nil return } - if a.activeJob == nil { + if p.activeJob == nil { return } - a.opts.Logger.Info(context.Background(), "canceling active job", + p.opts.Logger.Info(context.Background(), "canceling active job", slog.F("error_message", errMsg), - slog.F("job_id", a.activeJob.JobId), + slog.F("job_id", p.activeJob.JobId), ) - _, err := a.client.CancelJob(a.closeContext, &proto.CancelledJob{ - JobId: a.activeJob.JobId, + _, err := p.client.CancelJob(p.closeContext, &proto.CancelledJob{ + JobId: p.activeJob.JobId, Error: fmt.Sprintf("provisioner daemon: %s", errMsg), }) if err != nil { - a.opts.Logger.Error(context.Background(), "couldn't cancel job", slog.Error(err)) + p.opts.Logger.Error(context.Background(), "couldn't cancel job", slog.Error(err)) } - a.opts.Logger.Debug(context.Background(), "canceled active job") - a.activeJob = nil + p.opts.Logger.Debug(context.Background(), "canceled active job") + p.activeJob = nil } // isClosed returns whether the API is closed or not. -func (a *API) isClosed() bool { +func (p *provisionerDaemon) isClosed() bool { select { - case <-a.closed: + case <-p.closed: return true default: return false @@ -421,34 +422,34 @@ func (a *API) isClosed() bool { } // Close ends the provisioner. It will mark any active jobs as canceled. -func (a *API) Close() error { - return a.closeWithError(nil) +func (p *provisionerDaemon) Close() error { + return p.closeWithError(nil) } // closeWithError closes the provisioner; subsequent reads/writes will return the error err. -func (a *API) closeWithError(err error) error { - a.closeMutex.Lock() - defer a.closeMutex.Unlock() - if a.isClosed() { - return a.closeError +func (p *provisionerDaemon) closeWithError(err error) error { + p.closeMutex.Lock() + defer p.closeMutex.Unlock() + if p.isClosed() { + return p.closeError } - if a.activeJob != nil { + if p.activeJob != nil { errMsg := "provisioner daemon was shutdown gracefully" if err != nil { errMsg = err.Error() } - a.cancelActiveJob(errMsg) + p.cancelActiveJob(errMsg) } - a.opts.Logger.Debug(context.Background(), "closing server with error", slog.Error(err)) - a.closeError = err - close(a.closed) - a.closeContextCancel() + p.opts.Logger.Debug(context.Background(), "closing server with error", slog.Error(err)) + p.closeError = err + close(p.closed) + p.closeContextCancel() - if a.updateStream != nil { - _ = a.client.DRPCConn().Close() - _ = a.updateStream.Close() + if p.updateStream != nil { + _ = p.client.DRPCConn().Close() + _ = p.updateStream.Close() } return err diff --git a/provisionerd/provisionerd_test.go b/provisionerd/provisionerd_test.go index 889fa4f5e47ac..8f725b8cbf3a4 100644 --- a/provisionerd/provisionerd_test.go +++ b/provisionerd/provisionerd_test.go @@ -62,7 +62,7 @@ resource "null_resource" "dev" {}` t.Run("InstantClose", func(t *testing.T) { t.Parallel() server := coderdtest.New(t) - api := provisionerd.New(server.Client.ProvisionerDaemonClient, provisionerd.Provisioners{}, &provisionerd.Options{ + api := provisionerd.New(server.Client, provisionerd.Provisioners{}, &provisionerd.Options{ Logger: slogtest.Make(t, nil), }) defer api.Close() @@ -96,12 +96,12 @@ resource "null_resource" "dev" {}` require.NoError(t, err) }() - api := provisionerd.New(server.Client.ProvisionerDaemonClient, provisionerd.Provisioners{ + api := provisionerd.New(server.Client, provisionerd.Provisioners{ string(database.ProvisionerTypeTerraform): proto.NewDRPCProvisionerClient(drpcconn.New(clientPipe)), }, &provisionerd.Options{ - Logger: slogtest.Make(t, nil).Leveled(slog.LevelDebug), - AcquireInterval: 50 * time.Millisecond, - WorkDirectory: t.TempDir(), + Logger: slogtest.Make(t, nil).Leveled(slog.LevelDebug), + PollInterval: 50 * time.Millisecond, + WorkDirectory: t.TempDir(), }) defer api.Close() time.Sleep(time.Millisecond * 2000) From c79653e85ceb701175ef3a127a815acf82115c3f Mon Sep 17 00:00:00 2001 From: Kyle Carberry Date: Mon, 31 Jan 2022 18:02:54 +0000 Subject: [PATCH 12/17] Add new query functions for storing project history logs --- database/databasefake/databasefake.go | 106 ++++++++-- database/dump.sql | 45 +++-- database/migrations/000002_projects.up.sql | 22 +++ database/migrations/000003_workspaces.up.sql | 27 +-- database/querier.go | 4 + database/query.sql | 42 ++++ database/query.sql.go | 195 +++++++++++++++++++ 7 files changed, 385 insertions(+), 56 deletions(-) diff --git a/database/databasefake/databasefake.go b/database/databasefake/databasefake.go index e7f4f3cdb192d..3eae2ea41d04a 100644 --- a/database/databasefake/databasefake.go +++ b/database/databasefake/databasefake.go @@ -18,16 +18,18 @@ func New() database.Store { organizationMembers: make([]database.OrganizationMember, 0), users: make([]database.User, 0), - parameterValue: make([]database.ParameterValue, 0), - project: make([]database.Project, 0), - projectHistory: make([]database.ProjectHistory, 0), - projectParameter: make([]database.ProjectParameter, 0), - provisionerDaemons: make([]database.ProvisionerDaemon, 0), - provisionerJobs: make([]database.ProvisionerJob, 0), - workspace: make([]database.Workspace, 0), - workspaceResource: make([]database.WorkspaceResource, 0), - workspaceHistory: make([]database.WorkspaceHistory, 0), - workspaceAgent: make([]database.WorkspaceAgent, 0), + parameterValue: make([]database.ParameterValue, 0), + project: make([]database.Project, 0), + projectHistory: make([]database.ProjectHistory, 0), + projectHistoryLog: make([]database.ProjectHistoryLog, 0), + projectParameter: make([]database.ProjectParameter, 0), + provisionerDaemons: make([]database.ProvisionerDaemon, 0), + provisionerJobs: make([]database.ProvisionerJob, 0), + workspace: make([]database.Workspace, 0), + workspaceResource: make([]database.WorkspaceResource, 0), + workspaceHistory: make([]database.WorkspaceHistory, 0), + workspaceHistoryLog: make([]database.WorkspaceHistoryLog, 0), + workspaceAgent: make([]database.WorkspaceAgent, 0), } } @@ -40,16 +42,18 @@ type fakeQuerier struct { users []database.User // New tables - parameterValue []database.ParameterValue - project []database.Project - projectHistory []database.ProjectHistory - projectParameter []database.ProjectParameter - provisionerDaemons []database.ProvisionerDaemon - provisionerJobs []database.ProvisionerJob - workspace []database.Workspace - workspaceResource []database.WorkspaceResource - workspaceHistory []database.WorkspaceHistory - workspaceAgent []database.WorkspaceAgent + parameterValue []database.ParameterValue + project []database.Project + projectHistory []database.ProjectHistory + projectHistoryLog []database.ProjectHistoryLog + projectParameter []database.ProjectParameter + provisionerDaemons []database.ProvisionerDaemon + provisionerJobs []database.ProvisionerJob + workspace []database.Workspace + workspaceAgent []database.WorkspaceAgent + workspaceHistory []database.WorkspaceHistory + workspaceHistoryLog []database.WorkspaceHistoryLog + workspaceResource []database.WorkspaceResource } // InTx doesn't rollback data properly for in-memory yet. @@ -318,6 +322,36 @@ func (q *fakeQuerier) GetProjectHistoryByProjectID(_ context.Context, projectID return history, nil } +func (q *fakeQuerier) GetProjectHistoryByProjectIDAndName(_ context.Context, arg database.GetProjectHistoryByProjectIDAndNameParams) (database.ProjectHistory, error) { + for _, projectHistory := range q.projectHistory { + if projectHistory.ProjectID.String() != arg.ProjectID.String() { + continue + } + if !strings.EqualFold(projectHistory.Name, arg.Name) { + continue + } + return projectHistory, nil + } + return database.ProjectHistory{}, sql.ErrNoRows +} + +func (q *fakeQuerier) GetProjectHistoryLogsByIDBefore(_ context.Context, arg database.GetProjectHistoryLogsByIDBeforeParams) ([]database.ProjectHistoryLog, error) { + logs := make([]database.ProjectHistoryLog, 0) + for _, projectHistoryLog := range q.projectHistoryLog { + if projectHistoryLog.ProjectHistoryID.String() != arg.ProjectHistoryID.String() { + continue + } + if projectHistoryLog.CreatedAt.After(arg.CreatedAt) { + continue + } + logs = append(logs, projectHistoryLog) + } + if len(logs) == 0 { + return nil, sql.ErrNoRows + } + return logs, nil +} + func (q *fakeQuerier) GetProjectHistoryByID(_ context.Context, projectHistoryID uuid.UUID) (database.ProjectHistory, error) { for _, projectHistory := range q.projectHistory { if projectHistory.ID.String() != projectHistoryID.String() { @@ -486,6 +520,22 @@ func (q *fakeQuerier) InsertProjectHistory(_ context.Context, arg database.Inser return history, nil } +func (q *fakeQuerier) InsertProjectHistoryLogs(_ context.Context, arg database.InsertProjectHistoryLogsParams) ([]database.ProjectHistoryLog, error) { + logs := make([]database.ProjectHistoryLog, 0) + for index, output := range arg.Output { + logs = append(logs, database.ProjectHistoryLog{ + ProjectHistoryID: arg.ProjectHistoryID, + ID: arg.ID[index], + CreatedAt: arg.CreatedAt[index], + Source: arg.Source[index], + Level: arg.Level[index], + Output: output, + }) + } + q.projectHistoryLog = append(q.projectHistoryLog, logs...) + return logs, nil +} + func (q *fakeQuerier) InsertProjectParameter(_ context.Context, arg database.InsertProjectParameterParams) (database.ProjectParameter, error) { //nolint:gosimple param := database.ProjectParameter{ @@ -596,6 +646,22 @@ func (q *fakeQuerier) InsertWorkspaceHistory(_ context.Context, arg database.Ins return workspaceHistory, nil } +func (q *fakeQuerier) InsertWorkspaceHistoryLogs(_ context.Context, arg database.InsertWorkspaceHistoryLogsParams) ([]database.WorkspaceHistoryLog, error) { + logs := make([]database.WorkspaceHistoryLog, 0) + for index, output := range arg.Output { + logs = append(logs, database.WorkspaceHistoryLog{ + WorkspaceHistoryID: arg.WorkspaceHistoryID, + ID: arg.ID[index], + CreatedAt: arg.CreatedAt[index], + Source: arg.Source[index], + Level: arg.Level[index], + Output: output, + }) + } + q.workspaceHistoryLog = append(q.workspaceHistoryLog, logs...) + return logs, nil +} + func (q *fakeQuerier) InsertWorkspaceResource(_ context.Context, arg database.InsertWorkspaceResourceParams) (database.WorkspaceResource, error) { workspaceResource := database.WorkspaceResource{ ID: arg.ID, diff --git a/database/dump.sql b/database/dump.sql index af4874e96db94..67ffe460d32c1 100644 --- a/database/dump.sql +++ b/database/dump.sql @@ -5,8 +5,12 @@ CREATE TYPE log_level AS ENUM ( 'debug', 'info', 'warn', - 'error', - 'fatal' + 'error' +); + +CREATE TYPE log_source AS ENUM ( + 'provisioner_daemon', + 'provisioner' ); CREATE TYPE login_type AS ENUM ( @@ -142,6 +146,15 @@ CREATE TABLE project_history ( import_job_id uuid NOT NULL ); +CREATE TABLE project_history_log ( + id uuid NOT NULL, + project_history_id uuid NOT NULL, + created_at timestamp with time zone NOT NULL, + source log_source NOT NULL, + level log_level NOT NULL, + output character varying(1024) NOT NULL +); + CREATE TABLE project_parameter ( id uuid NOT NULL, created_at timestamp with time zone NOT NULL, @@ -241,13 +254,13 @@ CREATE TABLE workspace_history ( provision_job_id uuid NOT NULL ); -CREATE TABLE workspace_log ( - workspace_id uuid NOT NULL, +CREATE TABLE workspace_history_log ( + id uuid NOT NULL, workspace_history_id uuid NOT NULL, - created timestamp with time zone NOT NULL, - logged_by character varying(255), + created_at timestamp with time zone NOT NULL, + source log_source NOT NULL, level log_level NOT NULL, - log jsonb NOT NULL + output character varying(1024) NOT NULL ); CREATE TABLE workspace_resource ( @@ -269,6 +282,9 @@ ALTER TABLE ONLY parameter_value ALTER TABLE ONLY project_history ADD CONSTRAINT project_history_id_key UNIQUE (id); +ALTER TABLE ONLY project_history_log + ADD CONSTRAINT project_history_log_id_key UNIQUE (id); + ALTER TABLE ONLY project_history ADD CONSTRAINT project_history_project_id_name_key UNIQUE (project_id, name); @@ -299,6 +315,9 @@ ALTER TABLE ONLY workspace_agent ALTER TABLE ONLY workspace_history ADD CONSTRAINT workspace_history_id_key UNIQUE (id); +ALTER TABLE ONLY workspace_history_log + ADD CONSTRAINT workspace_history_log_id_key UNIQUE (id); + ALTER TABLE ONLY workspace ADD CONSTRAINT workspace_id_key UNIQUE (id); @@ -311,7 +330,8 @@ ALTER TABLE ONLY workspace_resource ALTER TABLE ONLY workspace_resource ADD CONSTRAINT workspace_resource_workspace_history_id_name_key UNIQUE (workspace_history_id, name); -CREATE INDEX workspace_log_index ON workspace_log USING btree (workspace_id, workspace_history_id); +ALTER TABLE ONLY project_history_log + ADD CONSTRAINT project_history_log_project_history_id_fkey FOREIGN KEY (project_history_id) REFERENCES project_history(id) ON DELETE CASCADE; ALTER TABLE ONLY project_history ADD CONSTRAINT project_history_project_id_fkey FOREIGN KEY (project_id) REFERENCES project(id); @@ -325,18 +345,15 @@ ALTER TABLE ONLY provisioner_job ALTER TABLE ONLY workspace_agent ADD CONSTRAINT workspace_agent_workspace_resource_id_fkey FOREIGN KEY (workspace_resource_id) REFERENCES workspace_resource(id) ON DELETE CASCADE; +ALTER TABLE ONLY workspace_history_log + ADD CONSTRAINT workspace_history_log_workspace_history_id_fkey FOREIGN KEY (workspace_history_id) REFERENCES workspace_history(id) ON DELETE CASCADE; + ALTER TABLE ONLY workspace_history ADD CONSTRAINT workspace_history_project_history_id_fkey FOREIGN KEY (project_history_id) REFERENCES project_history(id) ON DELETE CASCADE; ALTER TABLE ONLY workspace_history ADD CONSTRAINT workspace_history_workspace_id_fkey FOREIGN KEY (workspace_id) REFERENCES workspace(id) ON DELETE CASCADE; -ALTER TABLE ONLY workspace_log - ADD CONSTRAINT workspace_log_workspace_history_id_fkey FOREIGN KEY (workspace_history_id) REFERENCES workspace_history(id) ON DELETE CASCADE; - -ALTER TABLE ONLY workspace_log - ADD CONSTRAINT workspace_log_workspace_id_fkey FOREIGN KEY (workspace_id) REFERENCES workspace(id) ON DELETE CASCADE; - ALTER TABLE ONLY workspace ADD CONSTRAINT workspace_project_id_fkey FOREIGN KEY (project_id) REFERENCES project(id); diff --git a/database/migrations/000002_projects.up.sql b/database/migrations/000002_projects.up.sql index 251b368ef3701..9a0df4fb44d43 100644 --- a/database/migrations/000002_projects.up.sql +++ b/database/migrations/000002_projects.up.sql @@ -90,3 +90,25 @@ CREATE TABLE project_parameter ( validation_value_type varchar(64) NOT NULL, UNIQUE(project_history_id, name) ); + +CREATE TYPE log_level AS ENUM ( + 'trace', + 'debug', + 'info', + 'warn', + 'error' +); + +CREATE TYPE log_source AS ENUM ( + 'provisioner_daemon', + 'provisioner' +); + +CREATE TABLE project_history_log ( + id uuid NOT NULL UNIQUE, + project_history_id uuid NOT NULL REFERENCES project_history (id) ON DELETE CASCADE, + created_at timestamptz NOT NULL, + source log_source NOT NULL, + level log_level NOT NULL, + output varchar(1024) NOT NULL +); diff --git a/database/migrations/000003_workspaces.up.sql b/database/migrations/000003_workspaces.up.sql index 55b6150815723..92fe7862214e8 100644 --- a/database/migrations/000003_workspaces.up.sql +++ b/database/migrations/000003_workspaces.up.sql @@ -63,28 +63,11 @@ CREATE TABLE workspace_agent ( resource_metadata jsonb NOT NULL ); -CREATE TYPE log_level AS ENUM ( - 'trace', - 'debug', - 'info', - 'warn', - 'error', - 'fatal' -); - -CREATE TABLE workspace_log ( - workspace_id uuid NOT NULL REFERENCES workspace (id) ON DELETE CASCADE, - -- workspace_history_id can be NULL because some events are not going to be part of a - -- deliberate transition, e.g. an infrastructure failure that kills the workspace +CREATE TABLE workspace_history_log ( + id uuid NOT NULL UNIQUE, workspace_history_id uuid NOT NULL REFERENCES workspace_history (id) ON DELETE CASCADE, - created timestamptz NOT NULL, --- not sure this is necessary, also not sure it's necessary separate from the log column - logged_by varchar(255), + created_at timestamptz NOT NULL, + source log_source NOT NULL, level log_level NOT NULL, - log jsonb NOT NULL + output varchar(1024) NOT NULL ); - -CREATE INDEX workspace_log_index ON workspace_log ( - workspace_id, - workspace_history_id -); \ No newline at end of file diff --git a/database/querier.go b/database/querier.go index 1c908c186c544..203bf677655c8 100644 --- a/database/querier.go +++ b/database/querier.go @@ -20,6 +20,8 @@ type querier interface { GetProjectByOrganizationAndName(ctx context.Context, arg GetProjectByOrganizationAndNameParams) (Project, error) GetProjectHistoryByID(ctx context.Context, id uuid.UUID) (ProjectHistory, error) GetProjectHistoryByProjectID(ctx context.Context, projectID uuid.UUID) ([]ProjectHistory, error) + GetProjectHistoryByProjectIDAndName(ctx context.Context, arg GetProjectHistoryByProjectIDAndNameParams) (ProjectHistory, error) + GetProjectHistoryLogsByIDBefore(ctx context.Context, arg GetProjectHistoryLogsByIDBeforeParams) ([]ProjectHistoryLog, error) GetProjectParametersByHistoryID(ctx context.Context, projectHistoryID uuid.UUID) ([]ProjectParameter, error) GetProjectsByOrganizationIDs(ctx context.Context, ids []string) ([]Project, error) GetProvisionerDaemonByID(ctx context.Context, id uuid.UUID) (ProvisionerDaemon, error) @@ -42,6 +44,7 @@ type querier interface { InsertParameterValue(ctx context.Context, arg InsertParameterValueParams) (ParameterValue, error) InsertProject(ctx context.Context, arg InsertProjectParams) (Project, error) InsertProjectHistory(ctx context.Context, arg InsertProjectHistoryParams) (ProjectHistory, error) + InsertProjectHistoryLogs(ctx context.Context, arg InsertProjectHistoryLogsParams) ([]ProjectHistoryLog, error) InsertProjectParameter(ctx context.Context, arg InsertProjectParameterParams) (ProjectParameter, error) InsertProvisionerDaemon(ctx context.Context, arg InsertProvisionerDaemonParams) (ProvisionerDaemon, error) InsertProvisionerJob(ctx context.Context, arg InsertProvisionerJobParams) (ProvisionerJob, error) @@ -49,6 +52,7 @@ type querier interface { InsertWorkspace(ctx context.Context, arg InsertWorkspaceParams) (Workspace, error) InsertWorkspaceAgent(ctx context.Context, arg InsertWorkspaceAgentParams) (WorkspaceAgent, error) InsertWorkspaceHistory(ctx context.Context, arg InsertWorkspaceHistoryParams) (WorkspaceHistory, error) + InsertWorkspaceHistoryLogs(ctx context.Context, arg InsertWorkspaceHistoryLogsParams) ([]WorkspaceHistoryLog, error) InsertWorkspaceResource(ctx context.Context, arg InsertWorkspaceResourceParams) (WorkspaceResource, error) UpdateAPIKeyByID(ctx context.Context, arg UpdateAPIKeyByIDParams) error UpdateProvisionerDaemonByID(ctx context.Context, arg UpdateProvisionerDaemonByIDParams) error diff --git a/database/query.sql b/database/query.sql index 41e0d0f3dedb1..56314fa12d549 100644 --- a/database/query.sql +++ b/database/query.sql @@ -171,6 +171,15 @@ FROM WHERE project_id = $1; +-- name: GetProjectHistoryByProjectIDAndName :one +SELECT + * +FROM + project_history +WHERE + project_id = $1 + AND name = $2; + -- name: GetProjectHistoryByID :one SELECT * @@ -179,6 +188,17 @@ FROM WHERE id = $1; +-- name: GetProjectHistoryLogsByIDBefore :many +SELECT + * +FROM + project_history_log +WHERE + project_history_id = $1 + AND created_at <= $2 +ORDER BY + created_at; + -- name: GetProvisionerDaemonByID :one SELECT * @@ -378,6 +398,17 @@ INSERT INTO VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9) RETURNING *; +-- name: InsertProjectHistoryLogs :many +INSERT INTO + project_history_log +SELECT + @project_history_id :: uuid AS project_history_id, + unnset(@id :: uuid [ ]) AS id, + unnest(@created_at :: timestamptz [ ]) AS created_at, + unnset(@source :: log_source [ ]) as source, + unnset(@level :: log_level [ ]) as level, + unnset(@output :: varchar(1024) [ ]) as output RETURNING *; + -- name: InsertProjectParameter :one INSERT INTO project_parameter ( @@ -499,6 +530,17 @@ INSERT INTO VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9) RETURNING *; +-- name: InsertWorkspaceHistoryLogs :many +INSERT INTO + workspace_history_log +SELECT + @workspace_history_id :: uuid AS workspace_history_id, + unnset(@id :: uuid [ ]) AS id, + unnest(@created_at :: timestamptz [ ]) AS created_at, + unnset(@source :: log_source [ ]) as source, + unnset(@level :: log_level [ ]) as level, + unnset(@output :: varchar(1024) [ ]) as output RETURNING *; + -- name: InsertWorkspaceResource :one INSERT INTO workspace_resource ( diff --git a/database/query.sql.go b/database/query.sql.go index d87bf0636e555..f6e1f5f165dff 100644 --- a/database/query.sql.go +++ b/database/query.sql.go @@ -418,6 +418,85 @@ func (q *sqlQuerier) GetProjectHistoryByProjectID(ctx context.Context, projectID return items, nil } +const getProjectHistoryByProjectIDAndName = `-- name: GetProjectHistoryByProjectIDAndName :one +SELECT + id, project_id, created_at, updated_at, name, description, storage_method, storage_source, import_job_id +FROM + project_history +WHERE + project_id = $1 + AND name = $2 +` + +type GetProjectHistoryByProjectIDAndNameParams struct { + ProjectID uuid.UUID `db:"project_id" json:"project_id"` + Name string `db:"name" json:"name"` +} + +func (q *sqlQuerier) GetProjectHistoryByProjectIDAndName(ctx context.Context, arg GetProjectHistoryByProjectIDAndNameParams) (ProjectHistory, error) { + row := q.db.QueryRowContext(ctx, getProjectHistoryByProjectIDAndName, arg.ProjectID, arg.Name) + var i ProjectHistory + err := row.Scan( + &i.ID, + &i.ProjectID, + &i.CreatedAt, + &i.UpdatedAt, + &i.Name, + &i.Description, + &i.StorageMethod, + &i.StorageSource, + &i.ImportJobID, + ) + return i, err +} + +const getProjectHistoryLogsByIDBefore = `-- name: GetProjectHistoryLogsByIDBefore :many +SELECT + id, project_history_id, created_at, source, level, output +FROM + project_history_log +WHERE + project_history_id = $1 + AND created_at <= $2 +ORDER BY + created_at +` + +type GetProjectHistoryLogsByIDBeforeParams struct { + ProjectHistoryID uuid.UUID `db:"project_history_id" json:"project_history_id"` + CreatedAt time.Time `db:"created_at" json:"created_at"` +} + +func (q *sqlQuerier) GetProjectHistoryLogsByIDBefore(ctx context.Context, arg GetProjectHistoryLogsByIDBeforeParams) ([]ProjectHistoryLog, error) { + rows, err := q.db.QueryContext(ctx, getProjectHistoryLogsByIDBefore, arg.ProjectHistoryID, arg.CreatedAt) + if err != nil { + return nil, err + } + defer rows.Close() + var items []ProjectHistoryLog + for rows.Next() { + var i ProjectHistoryLog + if err := rows.Scan( + &i.ID, + &i.ProjectHistoryID, + &i.CreatedAt, + &i.Source, + &i.Level, + &i.Output, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + const getProjectParametersByHistoryID = `-- name: GetProjectParametersByHistoryID :many SELECT id, created_at, project_history_id, name, description, default_source_scheme, default_source_value, allow_override_source, default_destination_scheme, default_destination_value, allow_override_destination, default_refresh, redisplay_value, validation_error, validation_condition, validation_type_system, validation_value_type @@ -1317,6 +1396,64 @@ func (q *sqlQuerier) InsertProjectHistory(ctx context.Context, arg InsertProject return i, err } +const insertProjectHistoryLogs = `-- name: InsertProjectHistoryLogs :many +INSERT INTO + project_history_log +SELECT + $1 :: uuid AS project_history_id, + unnset($2 :: uuid [ ]) AS id, + unnest($3 :: timestamptz [ ]) AS created_at, + unnset($4 :: log_source [ ]) as source, + unnset($5 :: log_level [ ]) as level, + unnset($6 :: varchar(1024) [ ]) as output RETURNING id, project_history_id, created_at, source, level, output +` + +type InsertProjectHistoryLogsParams struct { + ProjectHistoryID uuid.UUID `db:"project_history_id" json:"project_history_id"` + ID []uuid.UUID `db:"id" json:"id"` + CreatedAt []time.Time `db:"created_at" json:"created_at"` + Source []LogSource `db:"source" json:"source"` + Level []LogLevel `db:"level" json:"level"` + Output []string `db:"output" json:"output"` +} + +func (q *sqlQuerier) InsertProjectHistoryLogs(ctx context.Context, arg InsertProjectHistoryLogsParams) ([]ProjectHistoryLog, error) { + rows, err := q.db.QueryContext(ctx, insertProjectHistoryLogs, + arg.ProjectHistoryID, + pq.Array(arg.ID), + pq.Array(arg.CreatedAt), + pq.Array(arg.Source), + pq.Array(arg.Level), + pq.Array(arg.Output), + ) + if err != nil { + return nil, err + } + defer rows.Close() + var items []ProjectHistoryLog + for rows.Next() { + var i ProjectHistoryLog + if err := rows.Scan( + &i.ID, + &i.ProjectHistoryID, + &i.CreatedAt, + &i.Source, + &i.Level, + &i.Output, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + const insertProjectParameter = `-- name: InsertProjectParameter :one INSERT INTO project_parameter ( @@ -1723,6 +1860,64 @@ func (q *sqlQuerier) InsertWorkspaceHistory(ctx context.Context, arg InsertWorks return i, err } +const insertWorkspaceHistoryLogs = `-- name: InsertWorkspaceHistoryLogs :many +INSERT INTO + workspace_history_log +SELECT + $1 :: uuid AS workspace_history_id, + unnset($2 :: uuid [ ]) AS id, + unnest($3 :: timestamptz [ ]) AS created_at, + unnset($4 :: log_source [ ]) as source, + unnset($5 :: log_level [ ]) as level, + unnset($6 :: varchar(1024) [ ]) as output RETURNING id, workspace_history_id, created_at, source, level, output +` + +type InsertWorkspaceHistoryLogsParams struct { + WorkspaceHistoryID uuid.UUID `db:"workspace_history_id" json:"workspace_history_id"` + ID []uuid.UUID `db:"id" json:"id"` + CreatedAt []time.Time `db:"created_at" json:"created_at"` + Source []LogSource `db:"source" json:"source"` + Level []LogLevel `db:"level" json:"level"` + Output []string `db:"output" json:"output"` +} + +func (q *sqlQuerier) InsertWorkspaceHistoryLogs(ctx context.Context, arg InsertWorkspaceHistoryLogsParams) ([]WorkspaceHistoryLog, error) { + rows, err := q.db.QueryContext(ctx, insertWorkspaceHistoryLogs, + arg.WorkspaceHistoryID, + pq.Array(arg.ID), + pq.Array(arg.CreatedAt), + pq.Array(arg.Source), + pq.Array(arg.Level), + pq.Array(arg.Output), + ) + if err != nil { + return nil, err + } + defer rows.Close() + var items []WorkspaceHistoryLog + for rows.Next() { + var i WorkspaceHistoryLog + if err := rows.Scan( + &i.ID, + &i.WorkspaceHistoryID, + &i.CreatedAt, + &i.Source, + &i.Level, + &i.Output, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + const insertWorkspaceResource = `-- name: InsertWorkspaceResource :one INSERT INTO workspace_resource ( From 1f4dfc7c31243f6209fcffef8b2103b9e015a08d Mon Sep 17 00:00:00 2001 From: Kyle Carberry Date: Mon, 31 Jan 2022 19:20:37 +0000 Subject: [PATCH 13/17] Add queries for workspace logs --- database/databasefake/databasefake.go | 31 +++++++ database/dump.sql | 7 ++ database/migrations/000003_workspaces.up.sql | 7 +- database/models.go | 44 +++++++-- database/querier.go | 2 + database/query.sql | 23 ++++- database/query.sql.go | 98 +++++++++++++++++++- 7 files changed, 197 insertions(+), 15 deletions(-) diff --git a/database/databasefake/databasefake.go b/database/databasefake/databasefake.go index 3eae2ea41d04a..3061a11494dc1 100644 --- a/database/databasefake/databasefake.go +++ b/database/databasefake/databasefake.go @@ -188,6 +188,23 @@ func (q *fakeQuerier) GetWorkspaceHistoryByWorkspaceIDWithoutAfter(_ context.Con return database.WorkspaceHistory{}, sql.ErrNoRows } +func (q *fakeQuerier) GetWorkspaceHistoryLogsByIDBefore(ctx context.Context, arg database.GetWorkspaceHistoryLogsByIDBeforeParams) ([]database.WorkspaceHistoryLog, error) { + logs := make([]database.WorkspaceHistoryLog, 0) + for _, workspaceHistoryLog := range q.workspaceHistoryLog { + if workspaceHistoryLog.WorkspaceHistoryID.String() != arg.WorkspaceHistoryID.String() { + continue + } + if workspaceHistoryLog.CreatedAt.After(arg.CreatedAt) { + continue + } + logs = append(logs, workspaceHistoryLog) + } + if len(logs) == 0 { + return nil, sql.ErrNoRows + } + return logs, nil +} + func (q *fakeQuerier) GetWorkspaceHistoryByWorkspaceID(_ context.Context, workspaceID uuid.UUID) ([]database.WorkspaceHistory, error) { history := make([]database.WorkspaceHistory, 0) for _, workspaceHistory := range q.workspaceHistory { @@ -201,6 +218,19 @@ func (q *fakeQuerier) GetWorkspaceHistoryByWorkspaceID(_ context.Context, worksp return history, nil } +func (q *fakeQuerier) GetWorkspaceHistoryByWorkspaceIDAndName(ctx context.Context, arg database.GetWorkspaceHistoryByWorkspaceIDAndNameParams) (database.WorkspaceHistory, error) { + for _, workspaceHistory := range q.workspaceHistory { + if workspaceHistory.WorkspaceID.String() != arg.WorkspaceID.String() { + continue + } + if !strings.EqualFold(workspaceHistory.Name, arg.Name) { + continue + } + return workspaceHistory, nil + } + return database.WorkspaceHistory{}, sql.ErrNoRows +} + func (q *fakeQuerier) GetWorkspacesByProjectAndUserID(_ context.Context, arg database.GetWorkspacesByProjectAndUserIDParams) ([]database.Workspace, error) { workspaces := make([]database.Workspace, 0) for _, workspace := range q.workspace { @@ -636,6 +666,7 @@ func (q *fakeQuerier) InsertWorkspaceHistory(_ context.Context, arg database.Ins CreatedAt: arg.CreatedAt, UpdatedAt: arg.UpdatedAt, WorkspaceID: arg.WorkspaceID, + Name: arg.Name, ProjectHistoryID: arg.ProjectHistoryID, BeforeID: arg.BeforeID, Transition: arg.Transition, diff --git a/database/dump.sql b/database/dump.sql index 67ffe460d32c1..0cea42a2355aa 100644 --- a/database/dump.sql +++ b/database/dump.sql @@ -246,6 +246,7 @@ CREATE TABLE workspace_history ( completed_at timestamp with time zone, workspace_id uuid NOT NULL, project_history_id uuid NOT NULL, + name character varying(64) NOT NULL, before_id uuid, after_id uuid, transition workspace_transition NOT NULL, @@ -318,9 +319,15 @@ ALTER TABLE ONLY workspace_history ALTER TABLE ONLY workspace_history_log ADD CONSTRAINT workspace_history_log_id_key UNIQUE (id); +ALTER TABLE ONLY workspace_history + ADD CONSTRAINT workspace_history_workspace_id_name_key UNIQUE (workspace_id, name); + ALTER TABLE ONLY workspace ADD CONSTRAINT workspace_id_key UNIQUE (id); +ALTER TABLE ONLY workspace + ADD CONSTRAINT workspace_owner_id_name_key UNIQUE (owner_id, name); + ALTER TABLE ONLY workspace_resource ADD CONSTRAINT workspace_resource_id_key UNIQUE (id); diff --git a/database/migrations/000003_workspaces.up.sql b/database/migrations/000003_workspaces.up.sql index 92fe7862214e8..7e7172483d7c1 100644 --- a/database/migrations/000003_workspaces.up.sql +++ b/database/migrations/000003_workspaces.up.sql @@ -4,7 +4,8 @@ CREATE TABLE workspace ( updated_at timestamptz NOT NULL, owner_id text NOT NULL, project_id uuid NOT NULL REFERENCES project (id), - name varchar(64) NOT NULL + name varchar(64) NOT NULL, + UNIQUE(owner_id, name) ); CREATE TYPE workspace_transition AS ENUM ( @@ -22,6 +23,7 @@ CREATE TABLE workspace_history ( completed_at timestamptz, workspace_id uuid NOT NULL REFERENCES workspace (id) ON DELETE CASCADE, project_history_id uuid NOT NULL REFERENCES project_history (id) ON DELETE CASCADE, + name varchar(64) NOT NULL, before_id uuid, after_id uuid, transition workspace_transition NOT NULL, @@ -29,7 +31,8 @@ CREATE TABLE workspace_history ( -- State stored by the provisioner provisioner_state bytea, -- Job ID of the action - provision_job_id uuid NOT NULL + provision_job_id uuid NOT NULL, + UNIQUE(workspace_id, name) ); -- Cloud resources produced by a provision job. diff --git a/database/models.go b/database/models.go index 3b9fdfcd83668..6fd1dad97d4fd 100644 --- a/database/models.go +++ b/database/models.go @@ -19,7 +19,6 @@ const ( LogLevelInfo LogLevel = "info" LogLevelWarn LogLevel = "warn" LogLevelError LogLevel = "error" - LogLevelFatal LogLevel = "fatal" ) func (e *LogLevel) Scan(src interface{}) error { @@ -34,6 +33,25 @@ func (e *LogLevel) Scan(src interface{}) error { return nil } +type LogSource string + +const ( + LogSourceProvisionerDaemon LogSource = "provisioner_daemon" + LogSourceProvisioner LogSource = "provisioner" +) + +func (e *LogSource) Scan(src interface{}) error { + switch s := src.(type) { + case []byte: + *e = LogSource(s) + case string: + *e = LogSource(s) + default: + return fmt.Errorf("unsupported scan type for LogSource: %T", src) + } + return nil +} + type LoginType string const ( @@ -307,6 +325,15 @@ type ProjectHistory struct { ImportJobID uuid.UUID `db:"import_job_id" json:"import_job_id"` } +type ProjectHistoryLog struct { + ID uuid.UUID `db:"id" json:"id"` + ProjectHistoryID uuid.UUID `db:"project_history_id" json:"project_history_id"` + CreatedAt time.Time `db:"created_at" json:"created_at"` + Source LogSource `db:"source" json:"source"` + Level LogLevel `db:"level" json:"level"` + Output string `db:"output" json:"output"` +} + type ProjectParameter struct { ID uuid.UUID `db:"id" json:"id"` CreatedAt time.Time `db:"created_at" json:"created_at"` @@ -398,6 +425,7 @@ type WorkspaceHistory struct { CompletedAt sql.NullTime `db:"completed_at" json:"completed_at"` WorkspaceID uuid.UUID `db:"workspace_id" json:"workspace_id"` ProjectHistoryID uuid.UUID `db:"project_history_id" json:"project_history_id"` + Name string `db:"name" json:"name"` BeforeID uuid.NullUUID `db:"before_id" json:"before_id"` AfterID uuid.NullUUID `db:"after_id" json:"after_id"` Transition WorkspaceTransition `db:"transition" json:"transition"` @@ -406,13 +434,13 @@ type WorkspaceHistory struct { ProvisionJobID uuid.UUID `db:"provision_job_id" json:"provision_job_id"` } -type WorkspaceLog struct { - WorkspaceID uuid.UUID `db:"workspace_id" json:"workspace_id"` - WorkspaceHistoryID uuid.UUID `db:"workspace_history_id" json:"workspace_history_id"` - Created time.Time `db:"created" json:"created"` - LoggedBy sql.NullString `db:"logged_by" json:"logged_by"` - Level LogLevel `db:"level" json:"level"` - Log json.RawMessage `db:"log" json:"log"` +type WorkspaceHistoryLog struct { + ID uuid.UUID `db:"id" json:"id"` + WorkspaceHistoryID uuid.UUID `db:"workspace_history_id" json:"workspace_history_id"` + CreatedAt time.Time `db:"created_at" json:"created_at"` + Source LogSource `db:"source" json:"source"` + Level LogLevel `db:"level" json:"level"` + Output string `db:"output" json:"output"` } type WorkspaceResource struct { diff --git a/database/querier.go b/database/querier.go index 203bf677655c8..fac25eed8f4da 100644 --- a/database/querier.go +++ b/database/querier.go @@ -34,7 +34,9 @@ type querier interface { GetWorkspaceByUserIDAndName(ctx context.Context, arg GetWorkspaceByUserIDAndNameParams) (Workspace, error) GetWorkspaceHistoryByID(ctx context.Context, id uuid.UUID) (WorkspaceHistory, error) GetWorkspaceHistoryByWorkspaceID(ctx context.Context, workspaceID uuid.UUID) ([]WorkspaceHistory, error) + GetWorkspaceHistoryByWorkspaceIDAndName(ctx context.Context, arg GetWorkspaceHistoryByWorkspaceIDAndNameParams) (WorkspaceHistory, error) GetWorkspaceHistoryByWorkspaceIDWithoutAfter(ctx context.Context, workspaceID uuid.UUID) (WorkspaceHistory, error) + GetWorkspaceHistoryLogsByIDBefore(ctx context.Context, arg GetWorkspaceHistoryLogsByIDBeforeParams) ([]WorkspaceHistoryLog, error) GetWorkspaceResourcesByHistoryID(ctx context.Context, workspaceHistoryID uuid.UUID) ([]WorkspaceResource, error) GetWorkspacesByProjectAndUserID(ctx context.Context, arg GetWorkspacesByProjectAndUserIDParams) ([]Workspace, error) GetWorkspacesByUserID(ctx context.Context, ownerID string) ([]Workspace, error) diff --git a/database/query.sql b/database/query.sql index 56314fa12d549..75fea94b2e09d 100644 --- a/database/query.sql +++ b/database/query.sql @@ -261,6 +261,15 @@ WHERE LIMIT 1; +-- name: GetWorkspaceHistoryByWorkspaceIDAndName :one +SELECT + * +FROM + workspace_history +WHERE + workspace_id = $1 + AND name = $2; + -- name: GetWorkspaceHistoryByWorkspaceID :many SELECT * @@ -280,6 +289,17 @@ WHERE LIMIT 1; +-- name: GetWorkspaceHistoryLogsByIDBefore :many +SELECT + * +FROM + workspace_history_log +WHERE + workspace_history_id = $1 + AND created_at <= $2 +ORDER BY + created_at; + -- name: GetWorkspaceResourcesByHistoryID :many SELECT * @@ -523,12 +543,13 @@ INSERT INTO workspace_id, project_history_id, before_id, + name, transition, initiator, provision_job_id ) VALUES - ($1, $2, $3, $4, $5, $6, $7, $8, $9) RETURNING *; + ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10) RETURNING *; -- name: InsertWorkspaceHistoryLogs :many INSERT INTO diff --git a/database/query.sql.go b/database/query.sql.go index f6e1f5f165dff..3c61628450c90 100644 --- a/database/query.sql.go +++ b/database/query.sql.go @@ -830,7 +830,7 @@ func (q *sqlQuerier) GetWorkspaceByUserIDAndName(ctx context.Context, arg GetWor const getWorkspaceHistoryByID = `-- name: GetWorkspaceHistoryByID :one SELECT - id, created_at, updated_at, completed_at, workspace_id, project_history_id, before_id, after_id, transition, initiator, provisioner_state, provision_job_id + id, created_at, updated_at, completed_at, workspace_id, project_history_id, name, before_id, after_id, transition, initiator, provisioner_state, provision_job_id FROM workspace_history WHERE @@ -849,6 +849,7 @@ func (q *sqlQuerier) GetWorkspaceHistoryByID(ctx context.Context, id uuid.UUID) &i.CompletedAt, &i.WorkspaceID, &i.ProjectHistoryID, + &i.Name, &i.BeforeID, &i.AfterID, &i.Transition, @@ -861,7 +862,7 @@ func (q *sqlQuerier) GetWorkspaceHistoryByID(ctx context.Context, id uuid.UUID) const getWorkspaceHistoryByWorkspaceID = `-- name: GetWorkspaceHistoryByWorkspaceID :many SELECT - id, created_at, updated_at, completed_at, workspace_id, project_history_id, before_id, after_id, transition, initiator, provisioner_state, provision_job_id + id, created_at, updated_at, completed_at, workspace_id, project_history_id, name, before_id, after_id, transition, initiator, provisioner_state, provision_job_id FROM workspace_history WHERE @@ -884,6 +885,7 @@ func (q *sqlQuerier) GetWorkspaceHistoryByWorkspaceID(ctx context.Context, works &i.CompletedAt, &i.WorkspaceID, &i.ProjectHistoryID, + &i.Name, &i.BeforeID, &i.AfterID, &i.Transition, @@ -904,9 +906,45 @@ func (q *sqlQuerier) GetWorkspaceHistoryByWorkspaceID(ctx context.Context, works return items, nil } +const getWorkspaceHistoryByWorkspaceIDAndName = `-- name: GetWorkspaceHistoryByWorkspaceIDAndName :one +SELECT + id, created_at, updated_at, completed_at, workspace_id, project_history_id, name, before_id, after_id, transition, initiator, provisioner_state, provision_job_id +FROM + workspace_history +WHERE + workspace_id = $1 + AND name = $2 +` + +type GetWorkspaceHistoryByWorkspaceIDAndNameParams struct { + WorkspaceID uuid.UUID `db:"workspace_id" json:"workspace_id"` + Name string `db:"name" json:"name"` +} + +func (q *sqlQuerier) GetWorkspaceHistoryByWorkspaceIDAndName(ctx context.Context, arg GetWorkspaceHistoryByWorkspaceIDAndNameParams) (WorkspaceHistory, error) { + row := q.db.QueryRowContext(ctx, getWorkspaceHistoryByWorkspaceIDAndName, arg.WorkspaceID, arg.Name) + var i WorkspaceHistory + err := row.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.CompletedAt, + &i.WorkspaceID, + &i.ProjectHistoryID, + &i.Name, + &i.BeforeID, + &i.AfterID, + &i.Transition, + &i.Initiator, + &i.ProvisionerState, + &i.ProvisionJobID, + ) + return i, err +} + const getWorkspaceHistoryByWorkspaceIDWithoutAfter = `-- name: GetWorkspaceHistoryByWorkspaceIDWithoutAfter :one SELECT - id, created_at, updated_at, completed_at, workspace_id, project_history_id, before_id, after_id, transition, initiator, provisioner_state, provision_job_id + id, created_at, updated_at, completed_at, workspace_id, project_history_id, name, before_id, after_id, transition, initiator, provisioner_state, provision_job_id FROM workspace_history WHERE @@ -926,6 +964,7 @@ func (q *sqlQuerier) GetWorkspaceHistoryByWorkspaceIDWithoutAfter(ctx context.Co &i.CompletedAt, &i.WorkspaceID, &i.ProjectHistoryID, + &i.Name, &i.BeforeID, &i.AfterID, &i.Transition, @@ -936,6 +975,53 @@ func (q *sqlQuerier) GetWorkspaceHistoryByWorkspaceIDWithoutAfter(ctx context.Co return i, err } +const getWorkspaceHistoryLogsByIDBefore = `-- name: GetWorkspaceHistoryLogsByIDBefore :many +SELECT + id, workspace_history_id, created_at, source, level, output +FROM + workspace_history_log +WHERE + workspace_history_id = $1 + AND created_at <= $2 +ORDER BY + created_at +` + +type GetWorkspaceHistoryLogsByIDBeforeParams struct { + WorkspaceHistoryID uuid.UUID `db:"workspace_history_id" json:"workspace_history_id"` + CreatedAt time.Time `db:"created_at" json:"created_at"` +} + +func (q *sqlQuerier) GetWorkspaceHistoryLogsByIDBefore(ctx context.Context, arg GetWorkspaceHistoryLogsByIDBeforeParams) ([]WorkspaceHistoryLog, error) { + rows, err := q.db.QueryContext(ctx, getWorkspaceHistoryLogsByIDBefore, arg.WorkspaceHistoryID, arg.CreatedAt) + if err != nil { + return nil, err + } + defer rows.Close() + var items []WorkspaceHistoryLog + for rows.Next() { + var i WorkspaceHistoryLog + if err := rows.Scan( + &i.ID, + &i.WorkspaceHistoryID, + &i.CreatedAt, + &i.Source, + &i.Level, + &i.Output, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + const getWorkspaceResourcesByHistoryID = `-- name: GetWorkspaceResourcesByHistoryID :many SELECT id, created_at, workspace_history_id, type, name, workspace_agent_token, workspace_agent_id @@ -1810,12 +1896,13 @@ INSERT INTO workspace_id, project_history_id, before_id, + name, transition, initiator, provision_job_id ) VALUES - ($1, $2, $3, $4, $5, $6, $7, $8, $9) RETURNING id, created_at, updated_at, completed_at, workspace_id, project_history_id, before_id, after_id, transition, initiator, provisioner_state, provision_job_id + ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10) RETURNING id, created_at, updated_at, completed_at, workspace_id, project_history_id, name, before_id, after_id, transition, initiator, provisioner_state, provision_job_id ` type InsertWorkspaceHistoryParams struct { @@ -1825,6 +1912,7 @@ type InsertWorkspaceHistoryParams struct { WorkspaceID uuid.UUID `db:"workspace_id" json:"workspace_id"` ProjectHistoryID uuid.UUID `db:"project_history_id" json:"project_history_id"` BeforeID uuid.NullUUID `db:"before_id" json:"before_id"` + Name string `db:"name" json:"name"` Transition WorkspaceTransition `db:"transition" json:"transition"` Initiator string `db:"initiator" json:"initiator"` ProvisionJobID uuid.UUID `db:"provision_job_id" json:"provision_job_id"` @@ -1838,6 +1926,7 @@ func (q *sqlQuerier) InsertWorkspaceHistory(ctx context.Context, arg InsertWorks arg.WorkspaceID, arg.ProjectHistoryID, arg.BeforeID, + arg.Name, arg.Transition, arg.Initiator, arg.ProvisionJobID, @@ -1850,6 +1939,7 @@ func (q *sqlQuerier) InsertWorkspaceHistory(ctx context.Context, arg InsertWorks &i.CompletedAt, &i.WorkspaceID, &i.ProjectHistoryID, + &i.Name, &i.BeforeID, &i.AfterID, &i.Transition, From ba3ce5792a576a16d4502f9873d748969dab3dd5 Mon Sep 17 00:00:00 2001 From: Kyle Carberry Date: Mon, 31 Jan 2022 19:20:50 +0000 Subject: [PATCH 14/17] add workspace and project history parameters --- httpmw/projecthistoryparam.go | 60 ++++++++++ httpmw/projecthistoryparam_test.go | 161 +++++++++++++++++++++++++++ httpmw/workspacehistoryparam.go | 60 ++++++++++ httpmw/workspacehistoryparam_test.go | 145 ++++++++++++++++++++++++ 4 files changed, 426 insertions(+) create mode 100644 httpmw/projecthistoryparam.go create mode 100644 httpmw/projecthistoryparam_test.go create mode 100644 httpmw/workspacehistoryparam.go create mode 100644 httpmw/workspacehistoryparam_test.go diff --git a/httpmw/projecthistoryparam.go b/httpmw/projecthistoryparam.go new file mode 100644 index 0000000000000..48702163bc3dc --- /dev/null +++ b/httpmw/projecthistoryparam.go @@ -0,0 +1,60 @@ +package httpmw + +import ( + "context" + "database/sql" + "errors" + "fmt" + "net/http" + + "github.com/go-chi/chi" + + "github.com/coder/coder/database" + "github.com/coder/coder/httpapi" +) + +type projectHistoryParamContextKey struct{} + +// ProjectHistoryParam returns the project history from the ExtractProjectHistoryParam handler. +func ProjectHistoryParam(r *http.Request) database.ProjectHistory { + projectHistory, ok := r.Context().Value(projectHistoryParamContextKey{}).(database.ProjectHistory) + if !ok { + panic("developer error: project history param middleware not provided") + } + return projectHistory +} + +// ExtractProjectHistoryParam grabs project history from the "projecthistory" URL parameter. +func ExtractProjectHistoryParam(db database.Store) func(http.Handler) http.Handler { + return func(next http.Handler) http.Handler { + return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { + project := ProjectParam(r) + projectHistoryName := chi.URLParam(r, "projecthistory") + if projectHistoryName == "" { + httpapi.Write(rw, http.StatusBadRequest, httpapi.Response{ + Message: "project history name must be provided", + }) + return + } + projectHistory, err := db.GetProjectHistoryByProjectIDAndName(r.Context(), database.GetProjectHistoryByProjectIDAndNameParams{ + ProjectID: project.ID, + Name: projectHistoryName, + }) + if errors.Is(err, sql.ErrNoRows) { + httpapi.Write(rw, http.StatusNotFound, httpapi.Response{ + Message: fmt.Sprintf("project history %q does not exist", projectHistoryName), + }) + return + } + if err != nil { + httpapi.Write(rw, http.StatusInternalServerError, httpapi.Response{ + Message: fmt.Sprintf("get project history: %s", err.Error()), + }) + return + } + + ctx := context.WithValue(r.Context(), projectHistoryParamContextKey{}, projectHistory) + next.ServeHTTP(rw, r.WithContext(ctx)) + }) + } +} diff --git a/httpmw/projecthistoryparam_test.go b/httpmw/projecthistoryparam_test.go new file mode 100644 index 0000000000000..c72b6fe37be66 --- /dev/null +++ b/httpmw/projecthistoryparam_test.go @@ -0,0 +1,161 @@ +package httpmw_test + +import ( + "context" + "crypto/sha256" + "fmt" + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/go-chi/chi" + "github.com/google/uuid" + "github.com/stretchr/testify/require" + + "github.com/coder/coder/cryptorand" + "github.com/coder/coder/database" + "github.com/coder/coder/database/databasefake" + "github.com/coder/coder/httpmw" +) + +func TestProjectHistoryParam(t *testing.T) { + t.Parallel() + + setupAuthentication := func(db database.Store) (*http.Request, database.Project) { + var ( + id, secret = randomAPIKeyParts() + hashed = sha256.Sum256([]byte(secret)) + ) + r := httptest.NewRequest("GET", "/", nil) + r.AddCookie(&http.Cookie{ + Name: httpmw.AuthCookie, + Value: fmt.Sprintf("%s-%s", id, secret), + }) + userID, err := cryptorand.String(16) + require.NoError(t, err) + username, err := cryptorand.String(8) + require.NoError(t, err) + user, err := db.InsertUser(r.Context(), database.InsertUserParams{ + ID: userID, + Email: "testaccount@coder.com", + Name: "example", + LoginType: database.LoginTypeBuiltIn, + HashedPassword: hashed[:], + Username: username, + CreatedAt: database.Now(), + UpdatedAt: database.Now(), + }) + require.NoError(t, err) + _, err = db.InsertAPIKey(r.Context(), database.InsertAPIKeyParams{ + ID: id, + UserID: user.ID, + HashedSecret: hashed[:], + LastUsed: database.Now(), + ExpiresAt: database.Now().Add(time.Minute), + }) + require.NoError(t, err) + orgID, err := cryptorand.String(16) + require.NoError(t, err) + organization, err := db.InsertOrganization(r.Context(), database.InsertOrganizationParams{ + ID: orgID, + Name: "banana", + Description: "wowie", + CreatedAt: database.Now(), + UpdatedAt: database.Now(), + }) + require.NoError(t, err) + _, err = db.InsertOrganizationMember(r.Context(), database.InsertOrganizationMemberParams{ + OrganizationID: orgID, + UserID: user.ID, + CreatedAt: database.Now(), + UpdatedAt: database.Now(), + }) + require.NoError(t, err) + project, err := db.InsertProject(context.Background(), database.InsertProjectParams{ + ID: uuid.New(), + OrganizationID: organization.ID, + Name: "moo", + }) + require.NoError(t, err) + + ctx := chi.NewRouteContext() + ctx.URLParams.Add("organization", organization.Name) + ctx.URLParams.Add("project", project.Name) + r = r.WithContext(context.WithValue(r.Context(), chi.RouteCtxKey, ctx)) + return r, project + } + + t.Run("None", func(t *testing.T) { + t.Parallel() + db := databasefake.New() + rtr := chi.NewRouter() + rtr.Use( + httpmw.ExtractAPIKey(db, nil), + httpmw.ExtractOrganizationParam(db), + httpmw.ExtractProjectParam(db), + httpmw.ExtractProjectHistoryParam(db), + ) + rtr.Get("/", nil) + r, _ := setupAuthentication(db) + rw := httptest.NewRecorder() + rtr.ServeHTTP(rw, r) + + res := rw.Result() + defer res.Body.Close() + require.Equal(t, http.StatusBadRequest, res.StatusCode) + }) + + t.Run("NotFound", func(t *testing.T) { + t.Parallel() + db := databasefake.New() + rtr := chi.NewRouter() + rtr.Use( + httpmw.ExtractAPIKey(db, nil), + httpmw.ExtractOrganizationParam(db), + httpmw.ExtractProjectParam(db), + httpmw.ExtractProjectHistoryParam(db), + ) + rtr.Get("/", nil) + + r, _ := setupAuthentication(db) + chi.RouteContext(r.Context()).URLParams.Add("projecthistory", "nothin") + rw := httptest.NewRecorder() + rtr.ServeHTTP(rw, r) + + res := rw.Result() + defer res.Body.Close() + require.Equal(t, http.StatusNotFound, res.StatusCode) + }) + + t.Run("ProjectHistory", func(t *testing.T) { + t.Parallel() + db := databasefake.New() + rtr := chi.NewRouter() + rtr.Use( + httpmw.ExtractAPIKey(db, nil), + httpmw.ExtractOrganizationParam(db), + httpmw.ExtractProjectParam(db), + httpmw.ExtractProjectHistoryParam(db), + ) + rtr.Get("/", func(rw http.ResponseWriter, r *http.Request) { + _ = httpmw.ProjectHistoryParam(r) + rw.WriteHeader(http.StatusOK) + }) + + r, project := setupAuthentication(db) + projectHistory, err := db.InsertProjectHistory(context.Background(), database.InsertProjectHistoryParams{ + ID: uuid.New(), + ProjectID: project.ID, + Name: "moo", + }) + require.NoError(t, err) + chi.RouteContext(r.Context()).URLParams.Add("projecthistory", projectHistory.Name) + rw := httptest.NewRecorder() + rtr.ServeHTTP(rw, r) + + res := rw.Result() + defer res.Body.Close() + require.Equal(t, http.StatusOK, res.StatusCode) + }) +} diff --git a/httpmw/workspacehistoryparam.go b/httpmw/workspacehistoryparam.go new file mode 100644 index 0000000000000..e210414290d21 --- /dev/null +++ b/httpmw/workspacehistoryparam.go @@ -0,0 +1,60 @@ +package httpmw + +import ( + "context" + "database/sql" + "errors" + "fmt" + "net/http" + + "github.com/go-chi/chi" + + "github.com/coder/coder/database" + "github.com/coder/coder/httpapi" +) + +type workspaceHistoryParamContextKey struct{} + +// WorkspaceHistoryParam returns the workspace history from the ExtractWorkspaceHistoryParam handler. +func WorkspaceHistoryParam(r *http.Request) database.WorkspaceHistory { + workspaceHistory, ok := r.Context().Value(workspaceHistoryParamContextKey{}).(database.WorkspaceHistory) + if !ok { + panic("developer error: workspace history param middleware not provided") + } + return workspaceHistory +} + +// ExtractWorkspaceHistoryParam grabs workspace history from the "workspacehistory" URL parameter. +func ExtractWorkspaceHistoryParam(db database.Store) func(http.Handler) http.Handler { + return func(next http.Handler) http.Handler { + return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { + workspace := WorkspaceParam(r) + workspaceHistoryName := chi.URLParam(r, "workspacehistory") + if workspaceHistoryName == "" { + httpapi.Write(rw, http.StatusBadRequest, httpapi.Response{ + Message: "workspace history name must be provided", + }) + return + } + workspaceHistory, err := db.GetWorkspaceHistoryByWorkspaceIDAndName(r.Context(), database.GetWorkspaceHistoryByWorkspaceIDAndNameParams{ + WorkspaceID: workspace.ID, + Name: workspaceHistoryName, + }) + if errors.Is(err, sql.ErrNoRows) { + httpapi.Write(rw, http.StatusNotFound, httpapi.Response{ + Message: fmt.Sprintf("workspace history %q does not exist", workspaceHistoryName), + }) + return + } + if err != nil { + httpapi.Write(rw, http.StatusInternalServerError, httpapi.Response{ + Message: fmt.Sprintf("get workspace history: %s", err.Error()), + }) + return + } + + ctx := context.WithValue(r.Context(), workspaceHistoryParamContextKey{}, workspaceHistory) + next.ServeHTTP(rw, r.WithContext(ctx)) + }) + } +} diff --git a/httpmw/workspacehistoryparam_test.go b/httpmw/workspacehistoryparam_test.go new file mode 100644 index 0000000000000..374a501eeabdd --- /dev/null +++ b/httpmw/workspacehistoryparam_test.go @@ -0,0 +1,145 @@ +package httpmw_test + +import ( + "context" + "crypto/sha256" + "fmt" + "net/http" + "net/http/httptest" + "testing" + "time" + + "github.com/go-chi/chi" + "github.com/google/uuid" + "github.com/stretchr/testify/require" + + "github.com/coder/coder/cryptorand" + "github.com/coder/coder/database" + "github.com/coder/coder/database/databasefake" + "github.com/coder/coder/httpmw" +) + +func TestWorkspaceHistoryParam(t *testing.T) { + t.Parallel() + + setupAuthentication := func(db database.Store) (*http.Request, database.Workspace) { + var ( + id, secret = randomAPIKeyParts() + hashed = sha256.Sum256([]byte(secret)) + ) + r := httptest.NewRequest("GET", "/", nil) + r.AddCookie(&http.Cookie{ + Name: httpmw.AuthCookie, + Value: fmt.Sprintf("%s-%s", id, secret), + }) + userID, err := cryptorand.String(16) + require.NoError(t, err) + username, err := cryptorand.String(8) + require.NoError(t, err) + user, err := db.InsertUser(r.Context(), database.InsertUserParams{ + ID: userID, + Email: "testaccount@coder.com", + Name: "example", + LoginType: database.LoginTypeBuiltIn, + HashedPassword: hashed[:], + Username: username, + CreatedAt: database.Now(), + UpdatedAt: database.Now(), + }) + require.NoError(t, err) + _, err = db.InsertAPIKey(r.Context(), database.InsertAPIKeyParams{ + ID: id, + UserID: user.ID, + HashedSecret: hashed[:], + LastUsed: database.Now(), + ExpiresAt: database.Now().Add(time.Minute), + }) + require.NoError(t, err) + workspace, err := db.InsertWorkspace(context.Background(), database.InsertWorkspaceParams{ + ID: uuid.New(), + ProjectID: uuid.New(), + OwnerID: user.ID, + Name: "potato", + }) + require.NoError(t, err) + + ctx := chi.NewRouteContext() + ctx.URLParams.Add("user", userID) + ctx.URLParams.Add("workspace", workspace.Name) + r = r.WithContext(context.WithValue(r.Context(), chi.RouteCtxKey, ctx)) + return r, workspace + } + + t.Run("None", func(t *testing.T) { + t.Parallel() + db := databasefake.New() + rtr := chi.NewRouter() + rtr.Use( + httpmw.ExtractAPIKey(db, nil), + httpmw.ExtractUserParam(db), + httpmw.ExtractWorkspaceParam(db), + httpmw.ExtractWorkspaceHistoryParam(db), + ) + rtr.Get("/", nil) + r, _ := setupAuthentication(db) + rw := httptest.NewRecorder() + rtr.ServeHTTP(rw, r) + + res := rw.Result() + defer res.Body.Close() + require.Equal(t, http.StatusBadRequest, res.StatusCode) + }) + + t.Run("NotFound", func(t *testing.T) { + t.Parallel() + db := databasefake.New() + rtr := chi.NewRouter() + rtr.Use( + httpmw.ExtractAPIKey(db, nil), + httpmw.ExtractUserParam(db), + httpmw.ExtractWorkspaceParam(db), + httpmw.ExtractWorkspaceHistoryParam(db), + ) + rtr.Get("/", nil) + + r, _ := setupAuthentication(db) + chi.RouteContext(r.Context()).URLParams.Add("workspacehistory", "nothin") + rw := httptest.NewRecorder() + rtr.ServeHTTP(rw, r) + + res := rw.Result() + defer res.Body.Close() + require.Equal(t, http.StatusNotFound, res.StatusCode) + }) + + t.Run("WorkspaceHistory", func(t *testing.T) { + t.Parallel() + db := databasefake.New() + rtr := chi.NewRouter() + rtr.Use( + httpmw.ExtractAPIKey(db, nil), + httpmw.ExtractUserParam(db), + httpmw.ExtractWorkspaceParam(db), + httpmw.ExtractWorkspaceHistoryParam(db), + ) + rtr.Get("/", func(rw http.ResponseWriter, r *http.Request) { + _ = httpmw.WorkspaceHistoryParam(r) + rw.WriteHeader(http.StatusOK) + }) + + r, workspace := setupAuthentication(db) + workspaceHistory, err := db.InsertWorkspaceHistory(context.Background(), database.InsertWorkspaceHistoryParams{ + ID: uuid.New(), + WorkspaceID: workspace.ID, + Name: "moo", + }) + require.NoError(t, err) + chi.RouteContext(r.Context()).URLParams.Add("workspacehistory", workspaceHistory.Name) + rw := httptest.NewRecorder() + rtr.ServeHTTP(rw, r) + + res := rw.Result() + defer res.Body.Close() + require.Equal(t, http.StatusOK, res.StatusCode) + }) +} From c01860d1ab322fbce72e431fa60f88d63ebabf5d Mon Sep 17 00:00:00 2001 From: Kyle Carberry Date: Mon, 31 Jan 2022 19:45:36 +0000 Subject: [PATCH 15/17] Update protobufs for logs --- provisioner/terraform/provision.go | 12 +- provisionerd/proto/provisionerd.pb.go | 339 +++++++------------------ provisionerd/proto/provisionerd.proto | 19 +- provisionerd/provisionerd.go | 50 ++-- provisionersdk/proto/provisioner.pb.go | 138 +++++----- provisionersdk/proto/provisioner.proto | 3 +- 6 files changed, 191 insertions(+), 370 deletions(-) diff --git a/provisioner/terraform/provision.go b/provisioner/terraform/provision.go index 8e8952c3f3615..5f52139b35ccb 100644 --- a/provisioner/terraform/provision.go +++ b/provisioner/terraform/provision.go @@ -51,8 +51,8 @@ func (t *terraform) Provision(request *proto.Provision_Request, stream proto.DRP _ = stream.Send(&proto.Provision_Response{ Type: &proto.Provision_Response_Log{ Log: &proto.Log{ - Level: proto.LogLevel_INFO, - Text: scanner.Text(), + Level: proto.LogLevel_INFO, + Output: scanner.Text(), }, }, }) @@ -101,8 +101,8 @@ func (t *terraform) Provision(request *proto.Provision_Request, stream proto.DRP _ = stream.Send(&proto.Provision_Response{ Type: &proto.Provision_Response_Log{ Log: &proto.Log{ - Level: logLevel, - Text: log.Message, + Level: logLevel, + Output: log.Message, }, }, }) @@ -119,8 +119,8 @@ func (t *terraform) Provision(request *proto.Provision_Request, stream proto.DRP _ = stream.Send(&proto.Provision_Response{ Type: &proto.Provision_Response_Log{ Log: &proto.Log{ - Level: logLevel, - Text: log.Diagnostic.Detail, + Level: logLevel, + Output: log.Diagnostic.Detail, }, }, }) diff --git a/provisionerd/proto/provisionerd.pb.go b/provisionerd/proto/provisionerd.pb.go index 86218b19d237b..0f1723b09b4e4 100644 --- a/provisionerd/proto/provisionerd.pb.go +++ b/provisionerd/proto/provisionerd.pb.go @@ -25,19 +25,19 @@ const ( type LogSource int32 const ( - LogSource_PROVISIONER LogSource = 0 - LogSource_DAEMON LogSource = 1 + LogSource_PROVISIONER_DAEMON LogSource = 0 + LogSource_PROVISIONER LogSource = 1 ) // Enum value maps for LogSource. var ( LogSource_name = map[int32]string{ - 0: "PROVISIONER", - 1: "DAEMON", + 0: "PROVISIONER_DAEMON", + 1: "PROVISIONER", } LogSource_value = map[string]int32{ - "PROVISIONER": 0, - "DAEMON": 1, + "PROVISIONER_DAEMON": 0, + "PROVISIONER": 1, } ) @@ -397,11 +397,7 @@ type Log struct { Source LogSource `protobuf:"varint,1,opt,name=source,proto3,enum=provisionerd.LogSource" json:"source,omitempty"` Level proto.LogLevel `protobuf:"varint,2,opt,name=level,proto3,enum=provisioner.LogLevel" json:"level,omitempty"` CreatedAt int64 `protobuf:"varint,3,opt,name=created_at,json=createdAt,proto3" json:"created_at,omitempty"` - Text string `protobuf:"bytes,4,opt,name=text,proto3" json:"text,omitempty"` - // Types that are assignable to Type: - // *Log_WorkspaceProvision_ - // *Log_ProjectImport_ - Type isLog_Type `protobuf_oneof:"type"` + Output string `protobuf:"bytes,4,opt,name=output,proto3" json:"output,omitempty"` } func (x *Log) Reset() { @@ -440,7 +436,7 @@ func (x *Log) GetSource() LogSource { if x != nil { return x.Source } - return LogSource_PROVISIONER + return LogSource_PROVISIONER_DAEMON } func (x *Log) GetLevel() proto.LogLevel { @@ -457,50 +453,13 @@ func (x *Log) GetCreatedAt() int64 { return 0 } -func (x *Log) GetText() string { +func (x *Log) GetOutput() string { if x != nil { - return x.Text + return x.Output } return "" } -func (m *Log) GetType() isLog_Type { - if m != nil { - return m.Type - } - return nil -} - -func (x *Log) GetWorkspaceProvision() *Log_WorkspaceProvision { - if x, ok := x.GetType().(*Log_WorkspaceProvision_); ok { - return x.WorkspaceProvision - } - return nil -} - -func (x *Log) GetProjectImport() *Log_ProjectImport { - if x, ok := x.GetType().(*Log_ProjectImport_); ok { - return x.ProjectImport - } - return nil -} - -type isLog_Type interface { - isLog_Type() -} - -type Log_WorkspaceProvision_ struct { - WorkspaceProvision *Log_WorkspaceProvision `protobuf:"bytes,5,opt,name=workspace_provision,json=workspaceProvision,proto3,oneof"` -} - -type Log_ProjectImport_ struct { - ProjectImport *Log_ProjectImport `protobuf:"bytes,6,opt,name=project_import,json=projectImport,proto3,oneof"` -} - -func (*Log_WorkspaceProvision_) isLog_Type() {} - -func (*Log_ProjectImport_) isLog_Type() {} - // JobUpdate represents an update to a job. // There may be no log output, but this message // should still be sent periodically as a heartbeat. @@ -509,8 +468,9 @@ type JobUpdate struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - JobId string `protobuf:"bytes,1,opt,name=job_id,json=jobId,proto3" json:"job_id,omitempty"` - Logs []*Log `protobuf:"bytes,2,rep,name=logs,proto3" json:"logs,omitempty"` + JobId string `protobuf:"bytes,1,opt,name=job_id,json=jobId,proto3" json:"job_id,omitempty"` + WorkspaceProvisionLogs []*Log `protobuf:"bytes,2,rep,name=workspace_provision_logs,json=workspaceProvisionLogs,proto3" json:"workspace_provision_logs,omitempty"` + ProjectImportLogs []*Log `protobuf:"bytes,3,rep,name=project_import_logs,json=projectImportLogs,proto3" json:"project_import_logs,omitempty"` } func (x *JobUpdate) Reset() { @@ -552,9 +512,16 @@ func (x *JobUpdate) GetJobId() string { return "" } -func (x *JobUpdate) GetLogs() []*Log { +func (x *JobUpdate) GetWorkspaceProvisionLogs() []*Log { + if x != nil { + return x.WorkspaceProvisionLogs + } + return nil +} + +func (x *JobUpdate) GetProjectImportLogs() []*Log { if x != nil { - return x.Logs + return x.ProjectImportLogs } return nil } @@ -787,100 +754,6 @@ func (x *CompletedJob_ProjectImport) GetParameterSchemas() []*proto.ParameterSch return nil } -type Log_WorkspaceProvision struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - WorkspaceHistoryId string `protobuf:"bytes,1,opt,name=workspace_history_id,json=workspaceHistoryId,proto3" json:"workspace_history_id,omitempty"` -} - -func (x *Log_WorkspaceProvision) Reset() { - *x = Log_WorkspaceProvision{} - if protoimpl.UnsafeEnabled { - mi := &file_provisionerd_proto_provisionerd_proto_msgTypes[10] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *Log_WorkspaceProvision) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*Log_WorkspaceProvision) ProtoMessage() {} - -func (x *Log_WorkspaceProvision) ProtoReflect() protoreflect.Message { - mi := &file_provisionerd_proto_provisionerd_proto_msgTypes[10] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use Log_WorkspaceProvision.ProtoReflect.Descriptor instead. -func (*Log_WorkspaceProvision) Descriptor() ([]byte, []int) { - return file_provisionerd_proto_provisionerd_proto_rawDescGZIP(), []int{4, 0} -} - -func (x *Log_WorkspaceProvision) GetWorkspaceHistoryId() string { - if x != nil { - return x.WorkspaceHistoryId - } - return "" -} - -type Log_ProjectImport struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - ProjectHistoryId string `protobuf:"bytes,1,opt,name=project_history_id,json=projectHistoryId,proto3" json:"project_history_id,omitempty"` -} - -func (x *Log_ProjectImport) Reset() { - *x = Log_ProjectImport{} - if protoimpl.UnsafeEnabled { - mi := &file_provisionerd_proto_provisionerd_proto_msgTypes[11] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *Log_ProjectImport) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*Log_ProjectImport) ProtoMessage() {} - -func (x *Log_ProjectImport) ProtoReflect() protoreflect.Message { - mi := &file_provisionerd_proto_provisionerd_proto_msgTypes[11] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use Log_ProjectImport.ProtoReflect.Descriptor instead. -func (*Log_ProjectImport) Descriptor() ([]byte, []int) { - return file_provisionerd_proto_provisionerd_proto_rawDescGZIP(), []int{4, 1} -} - -func (x *Log_ProjectImport) GetProjectHistoryId() string { - if x != nil { - return x.ProjectHistoryId - } - return "" -} - var File_provisionerd_proto_provisionerd_proto protoreflect.FileDescriptor var file_provisionerd_proto_provisionerd_proto_rawDesc = []byte{ @@ -968,7 +841,7 @@ var file_provisionerd_proto_provisionerd_proto_rawDesc = []byte{ 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, 0x10, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, - 0x61, 0x73, 0x42, 0x06, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x22, 0xc8, 0x03, 0x0a, 0x03, 0x4c, + 0x61, 0x73, 0x42, 0x06, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x22, 0x9a, 0x01, 0x0a, 0x03, 0x4c, 0x6f, 0x67, 0x12, 0x2f, 0x0a, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x17, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x4c, 0x6f, 0x67, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x06, 0x73, 0x6f, 0x75, @@ -977,54 +850,43 @@ var file_provisionerd_proto_provisionerd_proto_rawDesc = []byte{ 0x2e, 0x4c, 0x6f, 0x67, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x52, 0x05, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x12, 0x1d, 0x0a, 0x0a, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x61, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x09, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, 0x12, - 0x12, 0x0a, 0x04, 0x74, 0x65, 0x78, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, - 0x65, 0x78, 0x74, 0x12, 0x57, 0x0a, 0x13, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, - 0x5f, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x24, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, - 0x4c, 0x6f, 0x67, 0x2e, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x50, 0x72, 0x6f, - 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x48, 0x00, 0x52, 0x12, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, - 0x61, 0x63, 0x65, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x48, 0x0a, 0x0e, - 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x5f, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x18, 0x06, - 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, - 0x65, 0x72, 0x64, 0x2e, 0x4c, 0x6f, 0x67, 0x2e, 0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x49, - 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x48, 0x00, 0x52, 0x0d, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, - 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x1a, 0x46, 0x0a, 0x12, 0x57, 0x6f, 0x72, 0x6b, 0x73, 0x70, - 0x61, 0x63, 0x65, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x30, 0x0a, 0x14, - 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x68, 0x69, 0x73, 0x74, 0x6f, 0x72, - 0x79, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x12, 0x77, 0x6f, 0x72, 0x6b, - 0x73, 0x70, 0x61, 0x63, 0x65, 0x48, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x49, 0x64, 0x1a, 0x3d, - 0x0a, 0x0d, 0x50, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x12, - 0x2c, 0x0a, 0x12, 0x70, 0x72, 0x6f, 0x6a, 0x65, 0x63, 0x74, 0x5f, 0x68, 0x69, 0x73, 0x74, 0x6f, - 0x72, 0x79, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x10, 0x70, 0x72, 0x6f, - 0x6a, 0x65, 0x63, 0x74, 0x48, 0x69, 0x73, 0x74, 0x6f, 0x72, 0x79, 0x49, 0x64, 0x42, 0x06, 0x0a, - 0x04, 0x74, 0x79, 0x70, 0x65, 0x22, 0x49, 0x0a, 0x09, 0x4a, 0x6f, 0x62, 0x55, 0x70, 0x64, 0x61, - 0x74, 0x65, 0x12, 0x15, 0x0a, 0x06, 0x6a, 0x6f, 0x62, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x05, 0x6a, 0x6f, 0x62, 0x49, 0x64, 0x12, 0x25, 0x0a, 0x04, 0x6c, 0x6f, 0x67, - 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, - 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x4c, 0x6f, 0x67, 0x52, 0x04, 0x6c, 0x6f, 0x67, 0x73, - 0x2a, 0x28, 0x0a, 0x09, 0x4c, 0x6f, 0x67, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x0f, 0x0a, - 0x0b, 0x50, 0x52, 0x4f, 0x56, 0x49, 0x53, 0x49, 0x4f, 0x4e, 0x45, 0x52, 0x10, 0x00, 0x12, 0x0a, - 0x0a, 0x06, 0x44, 0x41, 0x45, 0x4d, 0x4f, 0x4e, 0x10, 0x01, 0x32, 0x8c, 0x02, 0x0a, 0x11, 0x50, - 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x44, 0x61, 0x65, 0x6d, 0x6f, 0x6e, - 0x12, 0x3c, 0x0a, 0x0a, 0x41, 0x63, 0x71, 0x75, 0x69, 0x72, 0x65, 0x4a, 0x6f, 0x62, 0x12, 0x13, - 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x45, 0x6d, - 0x70, 0x74, 0x79, 0x1a, 0x19, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, - 0x72, 0x64, 0x2e, 0x41, 0x63, 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x4a, 0x6f, 0x62, 0x12, 0x3b, - 0x0a, 0x09, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x4a, 0x6f, 0x62, 0x12, 0x17, 0x2e, 0x70, 0x72, - 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x4a, 0x6f, 0x62, 0x55, 0x70, - 0x64, 0x61, 0x74, 0x65, 0x1a, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, - 0x65, 0x72, 0x64, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x28, 0x01, 0x12, 0x3c, 0x0a, 0x09, 0x43, - 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x4a, 0x6f, 0x62, 0x12, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, - 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x43, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x6c, 0x65, - 0x64, 0x4a, 0x6f, 0x62, 0x1a, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, - 0x65, 0x72, 0x64, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x12, 0x3e, 0x0a, 0x0b, 0x43, 0x6f, 0x6d, - 0x70, 0x6c, 0x65, 0x74, 0x65, 0x4a, 0x6f, 0x62, 0x12, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, - 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, - 0x64, 0x4a, 0x6f, 0x62, 0x1a, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, - 0x65, 0x72, 0x64, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x42, 0x2b, 0x5a, 0x29, 0x67, 0x69, 0x74, - 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x63, 0x6f, - 0x64, 0x65, 0x72, 0x2f, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, - 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x16, 0x0a, 0x06, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x06, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x22, 0xb2, 0x01, 0x0a, 0x09, 0x4a, 0x6f, 0x62, 0x55, + 0x70, 0x64, 0x61, 0x74, 0x65, 0x12, 0x15, 0x0a, 0x06, 0x6a, 0x6f, 0x62, 0x5f, 0x69, 0x64, 0x18, + 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6a, 0x6f, 0x62, 0x49, 0x64, 0x12, 0x4b, 0x0a, 0x18, + 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, + 0x69, 0x6f, 0x6e, 0x5f, 0x6c, 0x6f, 0x67, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x11, + 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x4c, 0x6f, + 0x67, 0x52, 0x16, 0x77, 0x6f, 0x72, 0x6b, 0x73, 0x70, 0x61, 0x63, 0x65, 0x50, 0x72, 0x6f, 0x76, + 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x4c, 0x6f, 0x67, 0x73, 0x12, 0x41, 0x0a, 0x13, 0x70, 0x72, 0x6f, + 0x6a, 0x65, 0x63, 0x74, 0x5f, 0x69, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x5f, 0x6c, 0x6f, 0x67, 0x73, + 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, + 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x4c, 0x6f, 0x67, 0x52, 0x11, 0x70, 0x72, 0x6f, 0x6a, 0x65, + 0x63, 0x74, 0x49, 0x6d, 0x70, 0x6f, 0x72, 0x74, 0x4c, 0x6f, 0x67, 0x73, 0x2a, 0x34, 0x0a, 0x09, + 0x4c, 0x6f, 0x67, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x16, 0x0a, 0x12, 0x50, 0x52, 0x4f, + 0x56, 0x49, 0x53, 0x49, 0x4f, 0x4e, 0x45, 0x52, 0x5f, 0x44, 0x41, 0x45, 0x4d, 0x4f, 0x4e, 0x10, + 0x00, 0x12, 0x0f, 0x0a, 0x0b, 0x50, 0x52, 0x4f, 0x56, 0x49, 0x53, 0x49, 0x4f, 0x4e, 0x45, 0x52, + 0x10, 0x01, 0x32, 0x8c, 0x02, 0x0a, 0x11, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, + 0x65, 0x72, 0x44, 0x61, 0x65, 0x6d, 0x6f, 0x6e, 0x12, 0x3c, 0x0a, 0x0a, 0x41, 0x63, 0x71, 0x75, + 0x69, 0x72, 0x65, 0x4a, 0x6f, 0x62, 0x12, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, + 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x1a, 0x19, 0x2e, 0x70, 0x72, + 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x41, 0x63, 0x71, 0x75, 0x69, + 0x72, 0x65, 0x64, 0x4a, 0x6f, 0x62, 0x12, 0x3b, 0x0a, 0x09, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, + 0x4a, 0x6f, 0x62, 0x12, 0x17, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, + 0x72, 0x64, 0x2e, 0x4a, 0x6f, 0x62, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x1a, 0x13, 0x2e, 0x70, + 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x45, 0x6d, 0x70, 0x74, + 0x79, 0x28, 0x01, 0x12, 0x3c, 0x0a, 0x09, 0x43, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x4a, 0x6f, 0x62, + 0x12, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, + 0x43, 0x61, 0x6e, 0x63, 0x65, 0x6c, 0x6c, 0x65, 0x64, 0x4a, 0x6f, 0x62, 0x1a, 0x13, 0x2e, 0x70, + 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x45, 0x6d, 0x70, 0x74, + 0x79, 0x12, 0x3e, 0x0a, 0x0b, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x4a, 0x6f, 0x62, + 0x12, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, + 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x64, 0x4a, 0x6f, 0x62, 0x1a, 0x13, 0x2e, 0x70, + 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2e, 0x45, 0x6d, 0x70, 0x74, + 0x79, 0x42, 0x2b, 0x5a, 0x29, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, + 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x70, 0x72, 0x6f, 0x76, + 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x64, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -1040,7 +902,7 @@ func file_provisionerd_proto_provisionerd_proto_rawDescGZIP() []byte { } var file_provisionerd_proto_provisionerd_proto_enumTypes = make([]protoimpl.EnumInfo, 1) -var file_provisionerd_proto_provisionerd_proto_msgTypes = make([]protoimpl.MessageInfo, 12) +var file_provisionerd_proto_provisionerd_proto_msgTypes = make([]protoimpl.MessageInfo, 10) var file_provisionerd_proto_provisionerd_proto_goTypes = []interface{}{ (LogSource)(0), // 0: provisionerd.LogSource (*Empty)(nil), // 1: provisionerd.Empty @@ -1053,12 +915,10 @@ var file_provisionerd_proto_provisionerd_proto_goTypes = []interface{}{ (*AcquiredJob_ProjectImport)(nil), // 8: provisionerd.AcquiredJob.ProjectImport (*CompletedJob_WorkspaceProvision)(nil), // 9: provisionerd.CompletedJob.WorkspaceProvision (*CompletedJob_ProjectImport)(nil), // 10: provisionerd.CompletedJob.ProjectImport - (*Log_WorkspaceProvision)(nil), // 11: provisionerd.Log.WorkspaceProvision - (*Log_ProjectImport)(nil), // 12: provisionerd.Log.ProjectImport - (proto.LogLevel)(0), // 13: provisioner.LogLevel - (*proto.ParameterValue)(nil), // 14: provisioner.ParameterValue - (*proto.Resource)(nil), // 15: provisioner.Resource - (*proto.ParameterSchema)(nil), // 16: provisioner.ParameterSchema + (proto.LogLevel)(0), // 11: provisioner.LogLevel + (*proto.ParameterValue)(nil), // 12: provisioner.ParameterValue + (*proto.Resource)(nil), // 13: provisioner.Resource + (*proto.ParameterSchema)(nil), // 14: provisioner.ParameterSchema } var file_provisionerd_proto_provisionerd_proto_depIdxs = []int32{ 7, // 0: provisionerd.AcquiredJob.workspace_provision:type_name -> provisionerd.AcquiredJob.WorkspaceProvision @@ -1066,26 +926,25 @@ var file_provisionerd_proto_provisionerd_proto_depIdxs = []int32{ 9, // 2: provisionerd.CompletedJob.workspace_provision:type_name -> provisionerd.CompletedJob.WorkspaceProvision 10, // 3: provisionerd.CompletedJob.project_import:type_name -> provisionerd.CompletedJob.ProjectImport 0, // 4: provisionerd.Log.source:type_name -> provisionerd.LogSource - 13, // 5: provisionerd.Log.level:type_name -> provisioner.LogLevel - 11, // 6: provisionerd.Log.workspace_provision:type_name -> provisionerd.Log.WorkspaceProvision - 12, // 7: provisionerd.Log.project_import:type_name -> provisionerd.Log.ProjectImport - 5, // 8: provisionerd.JobUpdate.logs:type_name -> provisionerd.Log - 14, // 9: provisionerd.AcquiredJob.WorkspaceProvision.parameter_values:type_name -> provisioner.ParameterValue - 15, // 10: provisionerd.CompletedJob.WorkspaceProvision.resources:type_name -> provisioner.Resource - 16, // 11: provisionerd.CompletedJob.ProjectImport.parameter_schemas:type_name -> provisioner.ParameterSchema - 1, // 12: provisionerd.ProvisionerDaemon.AcquireJob:input_type -> provisionerd.Empty - 6, // 13: provisionerd.ProvisionerDaemon.UpdateJob:input_type -> provisionerd.JobUpdate - 3, // 14: provisionerd.ProvisionerDaemon.CancelJob:input_type -> provisionerd.CancelledJob - 4, // 15: provisionerd.ProvisionerDaemon.CompleteJob:input_type -> provisionerd.CompletedJob - 2, // 16: provisionerd.ProvisionerDaemon.AcquireJob:output_type -> provisionerd.AcquiredJob - 1, // 17: provisionerd.ProvisionerDaemon.UpdateJob:output_type -> provisionerd.Empty - 1, // 18: provisionerd.ProvisionerDaemon.CancelJob:output_type -> provisionerd.Empty - 1, // 19: provisionerd.ProvisionerDaemon.CompleteJob:output_type -> provisionerd.Empty - 16, // [16:20] is the sub-list for method output_type - 12, // [12:16] is the sub-list for method input_type - 12, // [12:12] is the sub-list for extension type_name - 12, // [12:12] is the sub-list for extension extendee - 0, // [0:12] is the sub-list for field type_name + 11, // 5: provisionerd.Log.level:type_name -> provisioner.LogLevel + 5, // 6: provisionerd.JobUpdate.workspace_provision_logs:type_name -> provisionerd.Log + 5, // 7: provisionerd.JobUpdate.project_import_logs:type_name -> provisionerd.Log + 12, // 8: provisionerd.AcquiredJob.WorkspaceProvision.parameter_values:type_name -> provisioner.ParameterValue + 13, // 9: provisionerd.CompletedJob.WorkspaceProvision.resources:type_name -> provisioner.Resource + 14, // 10: provisionerd.CompletedJob.ProjectImport.parameter_schemas:type_name -> provisioner.ParameterSchema + 1, // 11: provisionerd.ProvisionerDaemon.AcquireJob:input_type -> provisionerd.Empty + 6, // 12: provisionerd.ProvisionerDaemon.UpdateJob:input_type -> provisionerd.JobUpdate + 3, // 13: provisionerd.ProvisionerDaemon.CancelJob:input_type -> provisionerd.CancelledJob + 4, // 14: provisionerd.ProvisionerDaemon.CompleteJob:input_type -> provisionerd.CompletedJob + 2, // 15: provisionerd.ProvisionerDaemon.AcquireJob:output_type -> provisionerd.AcquiredJob + 1, // 16: provisionerd.ProvisionerDaemon.UpdateJob:output_type -> provisionerd.Empty + 1, // 17: provisionerd.ProvisionerDaemon.CancelJob:output_type -> provisionerd.Empty + 1, // 18: provisionerd.ProvisionerDaemon.CompleteJob:output_type -> provisionerd.Empty + 15, // [15:19] is the sub-list for method output_type + 11, // [11:15] is the sub-list for method input_type + 11, // [11:11] is the sub-list for extension type_name + 11, // [11:11] is the sub-list for extension extendee + 0, // [0:11] is the sub-list for field type_name } func init() { file_provisionerd_proto_provisionerd_proto_init() } @@ -1214,30 +1073,6 @@ func file_provisionerd_proto_provisionerd_proto_init() { return nil } } - file_provisionerd_proto_provisionerd_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Log_WorkspaceProvision); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_provisionerd_proto_provisionerd_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Log_ProjectImport); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } } file_provisionerd_proto_provisionerd_proto_msgTypes[1].OneofWrappers = []interface{}{ (*AcquiredJob_WorkspaceProvision_)(nil), @@ -1247,17 +1082,13 @@ func file_provisionerd_proto_provisionerd_proto_init() { (*CompletedJob_WorkspaceProvision_)(nil), (*CompletedJob_ProjectImport_)(nil), } - file_provisionerd_proto_provisionerd_proto_msgTypes[4].OneofWrappers = []interface{}{ - (*Log_WorkspaceProvision_)(nil), - (*Log_ProjectImport_)(nil), - } type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_provisionerd_proto_provisionerd_proto_rawDesc, NumEnums: 1, - NumMessages: 12, + NumMessages: 10, NumExtensions: 0, NumServices: 1, }, diff --git a/provisionerd/proto/provisionerd.proto b/provisionerd/proto/provisionerd.proto index 7ea7472b0150b..836d2d5f49a3a 100644 --- a/provisionerd/proto/provisionerd.proto +++ b/provisionerd/proto/provisionerd.proto @@ -57,26 +57,16 @@ message CompletedJob { // LogSource represents the sender of the log. enum LogSource { - PROVISIONER = 0; - DAEMON = 1; + PROVISIONER_DAEMON = 0; + PROVISIONER = 1; } // Log represents output from a job. message Log { - message WorkspaceProvision { - string workspace_history_id = 1; - } - message ProjectImport { - string project_history_id = 1; - } LogSource source = 1; provisioner.LogLevel level = 2; int64 created_at = 3; - string text = 4; - oneof type { - WorkspaceProvision workspace_provision = 5; - ProjectImport project_import = 6; - } + string output = 4; } // JobUpdate represents an update to a job. @@ -84,7 +74,8 @@ message Log { // should still be sent periodically as a heartbeat. message JobUpdate { string job_id = 1; - repeated Log logs = 2; + repeated Log workspace_provision_logs = 2; + repeated Log project_import_logs = 3; } service ProvisionerDaemon { diff --git a/provisionerd/provisionerd.go b/provisionerd/provisionerd.go index e8c62074c9c9f..ec3526a4e1737 100644 --- a/provisionerd/provisionerd.go +++ b/provisionerd/provisionerd.go @@ -277,21 +277,23 @@ func (p *provisionerDaemon) runProjectImport(provisioner sdkproto.DRPCProvisione case *sdkproto.Parse_Response_Log: p.opts.Logger.Debug(context.Background(), "parse job logged", slog.F("level", msgType.Log.Level), - slog.F("text", msgType.Log.Text), + slog.F("output", msgType.Log.Output), slog.F("project_history_id", job.ProjectImport.ProjectHistoryId), ) - p.logQueue = append(p.logQueue, proto.Log{ - Source: proto.LogSource_PROVISIONER, - Level: msgType.Log.Level, - CreatedAt: time.Now().UTC().UnixMilli(), - Text: msgType.Log.Text, - Type: &proto.Log_ProjectImport_{ - ProjectImport: &proto.Log_ProjectImport{ - ProjectHistoryId: job.ProjectImport.ProjectHistoryId, - }, - }, + err = p.updateStream.Send(&proto.JobUpdate{ + JobId: p.activeJob.JobId, + ProjectImportLogs: []*proto.Log{{ + Source: proto.LogSource_PROVISIONER, + Level: msgType.Log.Level, + CreatedAt: time.Now().UTC().UnixMilli(), + Output: msgType.Log.Output, + }}, }) + if err != nil { + p.cancelActiveJob(fmt.Sprintf("update job: %s", err)) + return + } case *sdkproto.Parse_Response_Complete: _, err = p.client.CompleteJob(p.closeContext, &proto.CompletedJob{ JobId: p.activeJob.JobId, @@ -335,23 +337,25 @@ func (p *provisionerDaemon) runWorkspaceProvision(provisioner sdkproto.DRPCProvi } switch msgType := msg.Type.(type) { case *sdkproto.Provision_Response_Log: - p.opts.Logger.Debug(context.Background(), "provision job logged", + p.opts.Logger.Debug(context.Background(), "workspace provision job logged", slog.F("level", msgType.Log.Level), - slog.F("text", msgType.Log.Text), + slog.F("output", msgType.Log.Output), slog.F("workspace_history_id", job.WorkspaceProvision.WorkspaceHistoryId), ) - p.logQueue = append(p.logQueue, proto.Log{ - Source: proto.LogSource_PROVISIONER, - Level: msgType.Log.Level, - CreatedAt: time.Now().UTC().UnixMilli(), - Text: msgType.Log.Text, - Type: &proto.Log_WorkspaceProvision_{ - WorkspaceProvision: &proto.Log_WorkspaceProvision{ - WorkspaceHistoryId: job.WorkspaceProvision.WorkspaceHistoryId, - }, - }, + err = p.updateStream.Send(&proto.JobUpdate{ + JobId: p.activeJob.JobId, + WorkspaceProvisionLogs: []*proto.Log{{ + Source: proto.LogSource_PROVISIONER, + Level: msgType.Log.Level, + CreatedAt: time.Now().UTC().UnixMilli(), + Output: msgType.Log.Output, + }}, }) + if err != nil { + p.cancelActiveJob(fmt.Sprintf("send job update: %s", err)) + return + } case *sdkproto.Provision_Response_Complete: p.opts.Logger.Debug(context.Background(), "provision successful; marking job as complete", slog.F("resource_count", len(msgType.Complete.Resources)), diff --git a/provisionersdk/proto/provisioner.pb.go b/provisionersdk/proto/provisioner.pb.go index 077058e5418bd..e3b16c832ade7 100644 --- a/provisionersdk/proto/provisioner.pb.go +++ b/provisionersdk/proto/provisioner.pb.go @@ -29,7 +29,6 @@ const ( LogLevel_INFO LogLevel = 2 LogLevel_WARN LogLevel = 3 LogLevel_ERROR LogLevel = 4 - LogLevel_FATAL LogLevel = 5 ) // Enum value maps for LogLevel. @@ -40,7 +39,6 @@ var ( 2: "INFO", 3: "WARN", 4: "ERROR", - 5: "FATAL", } LogLevel_value = map[string]int32{ "TRACE": 0, @@ -48,7 +46,6 @@ var ( "INFO": 2, "WARN": 3, "ERROR": 4, - "FATAL": 5, } ) @@ -521,8 +518,8 @@ type Log struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - Level LogLevel `protobuf:"varint,1,opt,name=level,proto3,enum=provisioner.LogLevel" json:"level,omitempty"` - Text string `protobuf:"bytes,2,opt,name=text,proto3" json:"text,omitempty"` + Level LogLevel `protobuf:"varint,1,opt,name=level,proto3,enum=provisioner.LogLevel" json:"level,omitempty"` + Output string `protobuf:"bytes,2,opt,name=output,proto3" json:"output,omitempty"` } func (x *Log) Reset() { @@ -564,9 +561,9 @@ func (x *Log) GetLevel() LogLevel { return LogLevel_TRACE } -func (x *Log) GetText() string { +func (x *Log) GetOutput() string { if x != nil { - return x.Text + return x.Output } return "" } @@ -1151,72 +1148,71 @@ var file_provisionersdk_proto_provisioner_proto_rawDesc = []byte{ 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x09, 0x52, 0x13, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x43, 0x6f, 0x6e, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x22, 0x15, 0x0a, 0x0a, 0x54, 0x79, 0x70, 0x65, 0x53, 0x79, 0x73, 0x74, 0x65, - 0x6d, 0x12, 0x07, 0x0a, 0x03, 0x48, 0x43, 0x4c, 0x10, 0x00, 0x22, 0x46, 0x0a, 0x03, 0x4c, 0x6f, + 0x6d, 0x12, 0x07, 0x0a, 0x03, 0x48, 0x43, 0x4c, 0x10, 0x00, 0x22, 0x4a, 0x0a, 0x03, 0x4c, 0x6f, 0x67, 0x12, 0x2b, 0x0a, 0x05, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x4c, - 0x6f, 0x67, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x52, 0x05, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x12, 0x12, - 0x0a, 0x04, 0x74, 0x65, 0x78, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, 0x65, - 0x78, 0x74, 0x22, 0xfc, 0x01, 0x0a, 0x05, 0x50, 0x61, 0x72, 0x73, 0x65, 0x1a, 0x27, 0x0a, 0x07, - 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x64, 0x69, 0x72, 0x65, 0x63, - 0x74, 0x6f, 0x72, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x64, 0x69, 0x72, 0x65, - 0x63, 0x74, 0x6f, 0x72, 0x79, 0x1a, 0x55, 0x0a, 0x08, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, - 0x65, 0x12, 0x49, 0x0a, 0x11, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x5f, 0x73, - 0x63, 0x68, 0x65, 0x6d, 0x61, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x70, - 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, - 0x65, 0x74, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, 0x10, 0x70, 0x61, 0x72, 0x61, - 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x73, 0x1a, 0x73, 0x0a, 0x08, - 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x24, 0x0a, 0x03, 0x6c, 0x6f, 0x67, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, - 0x6e, 0x65, 0x72, 0x2e, 0x4c, 0x6f, 0x67, 0x48, 0x00, 0x52, 0x03, 0x6c, 0x6f, 0x67, 0x12, 0x39, - 0x0a, 0x08, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x1b, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, - 0x61, 0x72, 0x73, 0x65, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x48, 0x00, 0x52, - 0x08, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x42, 0x06, 0x0a, 0x04, 0x74, 0x79, 0x70, - 0x65, 0x22, 0x32, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x12, 0x0a, - 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, - 0x65, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x04, 0x74, 0x79, 0x70, 0x65, 0x22, 0xe3, 0x02, 0x0a, 0x09, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, - 0x69, 0x6f, 0x6e, 0x1a, 0x85, 0x01, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, - 0x1c, 0x0a, 0x09, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x79, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x09, 0x52, 0x09, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x79, 0x12, 0x46, 0x0a, - 0x10, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, - 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, - 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x56, - 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x03, - 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x1a, 0x55, 0x0a, 0x08, 0x43, - 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x12, 0x33, 0x0a, - 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, - 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x52, - 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, - 0x65, 0x73, 0x1a, 0x77, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x24, - 0x0a, 0x03, 0x6c, 0x6f, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x70, 0x72, - 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x4c, 0x6f, 0x67, 0x48, 0x00, 0x52, - 0x03, 0x6c, 0x6f, 0x67, 0x12, 0x3d, 0x0a, 0x08, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, - 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x43, - 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x48, 0x00, 0x52, 0x08, 0x63, 0x6f, 0x6d, 0x70, 0x6c, - 0x65, 0x74, 0x65, 0x42, 0x06, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x2a, 0x4a, 0x0a, 0x08, 0x4c, - 0x6f, 0x67, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x12, 0x09, 0x0a, 0x05, 0x54, 0x52, 0x41, 0x43, 0x45, - 0x10, 0x00, 0x12, 0x09, 0x0a, 0x05, 0x44, 0x45, 0x42, 0x55, 0x47, 0x10, 0x01, 0x12, 0x08, 0x0a, - 0x04, 0x49, 0x4e, 0x46, 0x4f, 0x10, 0x02, 0x12, 0x08, 0x0a, 0x04, 0x57, 0x41, 0x52, 0x4e, 0x10, - 0x03, 0x12, 0x09, 0x0a, 0x05, 0x45, 0x52, 0x52, 0x4f, 0x52, 0x10, 0x04, 0x12, 0x09, 0x0a, 0x05, - 0x46, 0x41, 0x54, 0x41, 0x4c, 0x10, 0x05, 0x32, 0xa1, 0x01, 0x0a, 0x0b, 0x50, 0x72, 0x6f, 0x76, - 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x12, 0x42, 0x0a, 0x05, 0x50, 0x61, 0x72, 0x73, 0x65, - 0x12, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, - 0x61, 0x72, 0x73, 0x65, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1b, 0x2e, 0x70, - 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x73, 0x65, - 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x30, 0x01, 0x12, 0x4e, 0x0a, 0x09, 0x50, - 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x1e, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, - 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, - 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1f, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, - 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, - 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x30, 0x01, 0x42, 0x2d, 0x5a, 0x2b, 0x67, - 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, - 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, - 0x72, 0x73, 0x64, 0x6b, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x33, + 0x6f, 0x67, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x52, 0x05, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x12, 0x16, + 0x0a, 0x06, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, + 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x22, 0xfc, 0x01, 0x0a, 0x05, 0x50, 0x61, 0x72, 0x73, 0x65, + 0x1a, 0x27, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x64, + 0x69, 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, + 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x79, 0x1a, 0x55, 0x0a, 0x08, 0x43, 0x6f, 0x6d, + 0x70, 0x6c, 0x65, 0x74, 0x65, 0x12, 0x49, 0x0a, 0x11, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, + 0x65, 0x72, 0x5f, 0x73, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, + 0x32, 0x1c, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, + 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x52, 0x10, + 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x73, + 0x1a, 0x73, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x24, 0x0a, 0x03, + 0x6c, 0x6f, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x70, 0x72, 0x6f, 0x76, + 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x4c, 0x6f, 0x67, 0x48, 0x00, 0x52, 0x03, 0x6c, + 0x6f, 0x67, 0x12, 0x39, 0x0a, 0x08, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, + 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x73, 0x65, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, + 0x65, 0x48, 0x00, 0x52, 0x08, 0x63, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x42, 0x06, 0x0a, + 0x04, 0x74, 0x79, 0x70, 0x65, 0x22, 0x32, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, + 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, + 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x22, 0xe3, 0x02, 0x0a, 0x09, 0x50, 0x72, + 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x1a, 0x85, 0x01, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x79, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, + 0x79, 0x12, 0x46, 0x0a, 0x10, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, 0x74, 0x65, 0x72, 0x5f, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x70, 0x72, + 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x61, 0x6d, 0x65, + 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x0f, 0x70, 0x61, 0x72, 0x61, 0x6d, 0x65, + 0x74, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, + 0x74, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x1a, + 0x55, 0x0a, 0x08, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x73, + 0x74, 0x61, 0x74, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, + 0x65, 0x12, 0x33, 0x0a, 0x09, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x18, 0x02, + 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, + 0x65, 0x72, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x52, 0x09, 0x72, 0x65, 0x73, + 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x1a, 0x77, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x12, 0x24, 0x0a, 0x03, 0x6c, 0x6f, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x10, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x4c, 0x6f, + 0x67, 0x48, 0x00, 0x52, 0x03, 0x6c, 0x6f, 0x67, 0x12, 0x3d, 0x0a, 0x08, 0x63, 0x6f, 0x6d, 0x70, + 0x6c, 0x65, 0x74, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x70, 0x72, 0x6f, + 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, + 0x6f, 0x6e, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x48, 0x00, 0x52, 0x08, 0x63, + 0x6f, 0x6d, 0x70, 0x6c, 0x65, 0x74, 0x65, 0x42, 0x06, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x2a, + 0x3f, 0x0a, 0x08, 0x4c, 0x6f, 0x67, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x12, 0x09, 0x0a, 0x05, 0x54, + 0x52, 0x41, 0x43, 0x45, 0x10, 0x00, 0x12, 0x09, 0x0a, 0x05, 0x44, 0x45, 0x42, 0x55, 0x47, 0x10, + 0x01, 0x12, 0x08, 0x0a, 0x04, 0x49, 0x4e, 0x46, 0x4f, 0x10, 0x02, 0x12, 0x08, 0x0a, 0x04, 0x57, + 0x41, 0x52, 0x4e, 0x10, 0x03, 0x12, 0x09, 0x0a, 0x05, 0x45, 0x52, 0x52, 0x4f, 0x52, 0x10, 0x04, + 0x32, 0xa1, 0x01, 0x0a, 0x0b, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, + 0x12, 0x42, 0x0a, 0x05, 0x50, 0x61, 0x72, 0x73, 0x65, 0x12, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x76, + 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x73, 0x65, 0x2e, 0x52, 0x65, + 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1b, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, + 0x6e, 0x65, 0x72, 0x2e, 0x50, 0x61, 0x72, 0x73, 0x65, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x30, 0x01, 0x12, 0x4e, 0x0a, 0x09, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, + 0x6e, 0x12, 0x1e, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, + 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x1a, 0x1f, 0x2e, 0x70, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x2e, + 0x50, 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x30, 0x01, 0x42, 0x2d, 0x5a, 0x2b, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, + 0x6f, 0x6d, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x63, 0x6f, 0x64, 0x65, 0x72, 0x2f, 0x70, + 0x72, 0x6f, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x72, 0x73, 0x64, 0x6b, 0x2f, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( diff --git a/provisionersdk/proto/provisioner.proto b/provisionersdk/proto/provisioner.proto index e59c3e5de7acb..9150c7d9df527 100644 --- a/provisionersdk/proto/provisioner.proto +++ b/provisionersdk/proto/provisioner.proto @@ -56,13 +56,12 @@ enum LogLevel { INFO = 2; WARN = 3; ERROR = 4; - FATAL = 5; } // Log represents output from a request. message Log { LogLevel level = 1; - string text = 2; + string output = 2; } // Parse consumes source-code from a directory to produce inputs. From c440af48a218218be6ce082441e55b92801d7ac1 Mon Sep 17 00:00:00 2001 From: Kyle Carberry Date: Mon, 31 Jan 2022 19:58:49 +0000 Subject: [PATCH 16/17] Log streaming --- coderd/coderd.go | 12 ++ coderd/coderdtest/coderdtest.go | 5 + coderd/projects.go | 134 +++++++++++++++++ coderd/provisionerd.go | 127 +++++++++++++++- coderd/provisionerd_test.go | 21 +++ coderd/workspaces.go | 137 ++++++++++++++++++ codersdk/projects.go | 38 +++++ codersdk/workspaces.go | 30 ++++ go.sum | 3 + provisionerd/provisionerd.go | 12 +- provisionerd/provisionerd_test.go | 23 ++- .../provisionerdtest/provisionerdtest.go | 48 ++++++ 12 files changed, 578 insertions(+), 12 deletions(-) create mode 100644 coderd/provisionerd_test.go create mode 100644 provisionerd/provisionerdtest/provisionerdtest.go diff --git a/coderd/coderd.go b/coderd/coderd.go index 0f1839668de3d..70e8ce7988ced 100644 --- a/coderd/coderd.go +++ b/coderd/coderd.go @@ -16,21 +16,25 @@ import ( type Options struct { Logger slog.Logger Database database.Store + Pubsub database.Pubsub } // New constructs the Coder API into an HTTP handler. func New(options *Options) http.Handler { projects := &projects{ Database: options.Database, + Pubsub: options.Pubsub, } provisionerd := &provisionerd{ Database: options.Database, + Pubsub: options.Pubsub, } users := &users{ Database: options.Database, } workspaces := &workspaces{ Database: options.Database, + Pubsub: options.Pubsub, } r := chi.NewRouter() @@ -71,6 +75,10 @@ func New(options *Options) http.Handler { r.Route("/history", func(r chi.Router) { r.Get("/", projects.allProjectHistory) r.Post("/", projects.createProjectHistory) + r.Route("/{projecthistory}", func(r chi.Router) { + r.Use(httpmw.ExtractProjectHistoryParam(options.Database)) + r.Get("/logs", projects.projectHistoryLogs) + }) }) r.Get("/workspaces", workspaces.allWorkspacesForProject) }) @@ -93,6 +101,10 @@ func New(options *Options) http.Handler { r.Post("/", workspaces.createWorkspaceHistory) r.Get("/", workspaces.listAllWorkspaceHistory) r.Get("/latest", workspaces.latestWorkspaceHistory) + r.Route("/{workspacehistory}", func(r chi.Router) { + r.Use(httpmw.ExtractWorkspaceHistoryParam(options.Database)) + r.Get("/logs", workspaces.workspaceHistoryLogs) + }) }) }) }) diff --git a/coderd/coderdtest/coderdtest.go b/coderd/coderdtest/coderdtest.go index 1ecf069bce864..ab11a9d5d102f 100644 --- a/coderd/coderdtest/coderdtest.go +++ b/coderd/coderdtest/coderdtest.go @@ -62,6 +62,7 @@ func (s *Server) RandomInitialUser(t *testing.T) coderd.CreateInitialUserRequest func New(t *testing.T) Server { // This can be hotswapped for a live database instance. db := databasefake.New() + pubsub := database.NewPubsubInMemory() if os.Getenv("DB") != "" { connectionURL, close, err := postgres.Open() require.NoError(t, err) @@ -74,11 +75,15 @@ func New(t *testing.T) Server { err = database.Migrate(sqlDB) require.NoError(t, err) db = database.New(sqlDB) + + pubsub, err = database.NewPubsub(context.Background(), sqlDB, connectionURL) + require.NoError(t, err) } handler := coderd.New(&coderd.Options{ Logger: slogtest.Make(t, nil), Database: db, + Pubsub: pubsub, }) srv := httptest.NewServer(handler) serverURL, err := url.Parse(srv.URL) diff --git a/coderd/projects.go b/coderd/projects.go index 5ef2ea5067b6a..157da1ccd9651 100644 --- a/coderd/projects.go +++ b/coderd/projects.go @@ -3,7 +3,9 @@ package coderd import ( "archive/tar" "bytes" + "context" "database/sql" + "encoding/json" "errors" "fmt" "net/http" @@ -34,6 +36,14 @@ type ProjectHistory struct { StorageMethod database.ProjectStorageMethod `json:"storage_method"` } +type ProjectHistoryLog struct { + ID uuid.UUID + CreatedAt time.Time `json:"created_at"` + Source database.LogSource `json:"log_source"` + Level database.LogLevel `json:"log_level"` + Output string `json:"output"` +} + // CreateProjectRequest enables callers to create a new Project. type CreateProjectRequest struct { Name string `json:"name" validate:"username,required"` @@ -48,6 +58,7 @@ type CreateProjectVersionRequest struct { type projects struct { Database database.Store + Pubsub database.Pubsub } // Lists all projects the authenticated user has access to. @@ -222,6 +233,115 @@ func (p *projects) createProjectHistory(rw http.ResponseWriter, r *http.Request) render.JSON(rw, r, convertProjectHistory(history)) } +func (p *projects) projectHistoryLogs(rw http.ResponseWriter, r *http.Request) { + projectHistory := httpmw.ProjectHistoryParam(r) + follow := r.URL.Query().Has("follow") + + if !follow { + // If we're not attempting to follow logs, + // we can exit immediately! + logs, err := p.Database.GetProjectHistoryLogsByIDBefore(r.Context(), database.GetProjectHistoryLogsByIDBeforeParams{ + ProjectHistoryID: projectHistory.ID, + CreatedAt: time.Now(), + }) + if errors.Is(err, sql.ErrNoRows) { + err = nil + } + if err != nil { + httpapi.Write(rw, http.StatusInternalServerError, httpapi.Response{ + Message: fmt.Sprintf("get project history logs: %s", err), + }) + return + } + render.Status(r, http.StatusOK) + render.JSON(rw, r, logs) + return + } + + // We only want to fetch messages before subscribe, so that + // there aren't any duplicates. + timeBeforeSubscribe := database.Now() + // Start subscribing immediately, otherwise we could miss messages + // that occur during the database read. + newLogNotify := make(chan ProjectHistoryLog, 128) + cancelNewLogNotify, err := p.Pubsub.Subscribe(projectHistoryLogsChannel(projectHistory.ID), func(ctx context.Context, message []byte) { + var logs []database.ProjectHistoryLog + err := json.Unmarshal(message, &logs) + if err != nil { + httpapi.Write(rw, http.StatusInternalServerError, httpapi.Response{ + Message: fmt.Sprintf("parse logs from publish: %s", err), + }) + return + } + for _, log := range logs { + // If many logs are sent during our database query, this channel + // could overflow. The Go scheduler would decide the order to send + // logs in at that point, which is an unfortunate (but not fatal) + // flaw of this approach. + // + // This is an extremely unlikely outcome given reasonable database + // query times. + newLogNotify <- convertProjectHistoryLog(log) + } + }) + if err != nil { + httpapi.Write(rw, http.StatusInternalServerError, httpapi.Response{ + Message: fmt.Sprintf("listen for new logs: %s", err), + }) + return + } + defer cancelNewLogNotify() + + // In-between here logs could be missed! + projectHistoryLogs, err := p.Database.GetProjectHistoryLogsByIDBefore(r.Context(), database.GetProjectHistoryLogsByIDBeforeParams{ + ProjectHistoryID: projectHistory.ID, + CreatedAt: timeBeforeSubscribe, + }) + if errors.Is(err, sql.ErrNoRows) { + err = nil + } + if err != nil { + httpapi.Write(rw, http.StatusInternalServerError, httpapi.Response{ + Message: fmt.Sprintf("get project history logs: %s", err), + }) + return + } + + // "follow" uses the ndjson format to stream data. + // See: https://canjs.com/doc/can-ndjson-stream.html + rw.Header().Set("Content-Type", "application/stream+json") + rw.WriteHeader(http.StatusOK) + rw.(http.Flusher).Flush() + + // The Go stdlib JSON encoder appends a newline character after message write. + encoder := json.NewEncoder(rw) + for _, projectHistoryLog := range projectHistoryLogs { + // JSON separated by a newline + err = encoder.Encode(convertProjectHistoryLog(projectHistoryLog)) + if err != nil { + httpapi.Write(rw, http.StatusInternalServerError, httpapi.Response{ + Message: fmt.Sprintf("marshal: %s", err), + }) + return + } + } + + for { + select { + case <-r.Context().Done(): + return + case log := <-newLogNotify: + err = encoder.Encode(log) + if err != nil { + httpapi.Write(rw, http.StatusInternalServerError, httpapi.Response{ + Message: fmt.Sprintf("marshal follow: %s", err), + }) + return + } + } + } +} + func convertProjectHistory(history database.ProjectHistory) ProjectHistory { return ProjectHistory{ ID: history.ID, @@ -231,3 +351,17 @@ func convertProjectHistory(history database.ProjectHistory) ProjectHistory { Name: history.Name, } } + +func convertProjectHistoryLog(log database.ProjectHistoryLog) ProjectHistoryLog { + return ProjectHistoryLog{ + ID: log.ID, + CreatedAt: log.CreatedAt, + Source: log.Source, + Level: log.Level, + Output: log.Output, + } +} + +func projectHistoryLogsChannel(projectHistoryID uuid.UUID) string { + return fmt.Sprintf("project-history-logs:%s", projectHistoryID) +} diff --git a/coderd/provisionerd.go b/coderd/provisionerd.go index c5f186151a50d..e00c70af7d6c7 100644 --- a/coderd/provisionerd.go +++ b/coderd/provisionerd.go @@ -8,6 +8,7 @@ import ( "fmt" "net/http" "reflect" + "time" "golang.org/x/xerrors" "storj.io/drpc/drpcmux" @@ -28,6 +29,7 @@ import ( type provisionerd struct { Database database.Store + Pubsub database.Pubsub } func (p *provisionerd) listen(rw http.ResponseWriter, r *http.Request) { @@ -59,6 +61,7 @@ func (p *provisionerd) listen(rw http.ResponseWriter, r *http.Request) { err = proto.DRPCRegisterProvisionerDaemon(mux, &provisionerdServer{ ID: daemon.ID, Database: p.Database, + Pubsub: p.Pubsub, }) if err != nil { _ = conn.Close(websocket.StatusInternalError, fmt.Sprintf("drpc register provisioner daemon: %s", err)) @@ -73,7 +76,7 @@ func (p *provisionerd) listen(rw http.ResponseWriter, r *http.Request) { // The input for a "workspace_provision" job. type workspaceProvisionJob struct { - WorkspaceHistoryID uuid.UUID `json:"workspace_id"` + WorkspaceHistoryID uuid.UUID `json:"workspace_history_id"` } // The input for a "project_import" job. @@ -85,6 +88,7 @@ type projectImportJob struct { type provisionerdServer struct { ID uuid.UUID Database database.Store + Pubsub database.Pubsub } func (s *provisionerdServer) AcquireJob(ctx context.Context, _ *proto.Empty) (*proto.AcquiredJob, error) { @@ -235,13 +239,104 @@ func (s *provisionerdServer) UpdateJob(stream proto.DRPCProvisionerDaemon_Update if err != nil { return xerrors.Errorf("parse job id: %w", err) } - err = s.Database.UpdateProvisionerJobByID(context.Background(), database.UpdateProvisionerJobByIDParams{ + job, err := s.Database.GetProvisionerJobByID(stream.Context(), parsedID) + if err != nil { + return xerrors.Errorf("get job: %w", err) + } + if !job.WorkerID.Valid { + return errors.New("job isn't running yet") + } + if job.WorkerID.UUID.String() != s.ID.String() { + return errors.New("you don't own this job") + } + + err = s.Database.UpdateProvisionerJobByID(stream.Context(), database.UpdateProvisionerJobByIDParams{ ID: parsedID, UpdatedAt: database.Now(), }) if err != nil { return xerrors.Errorf("update job: %w", err) } + switch job.Type { + case database.ProvisionerJobTypeProjectImport: + if len(update.ProjectImportLogs) == 0 { + continue + } + var input projectImportJob + err = json.Unmarshal(job.Input, &input) + if err != nil { + return xerrors.Errorf("unmarshal job input %q: %s", job.Input, err) + } + insertParams := database.InsertProjectHistoryLogsParams{ + ProjectHistoryID: input.ProjectHistoryID, + } + for _, log := range update.ProjectImportLogs { + logLevel, err := convertLogLevel(log.Level) + if err != nil { + return xerrors.Errorf("convert log level: %w", err) + } + logSource, err := convertLogSource(log.Source) + if err != nil { + return xerrors.Errorf("convert log source: %w", err) + } + insertParams.ID = append(insertParams.ID, uuid.New()) + insertParams.CreatedAt = append(insertParams.CreatedAt, time.UnixMilli(log.CreatedAt)) + insertParams.Level = append(insertParams.Level, logLevel) + insertParams.Source = append(insertParams.Source, logSource) + insertParams.Output = append(insertParams.Output, log.Output) + } + logs, err := s.Database.InsertProjectHistoryLogs(stream.Context(), insertParams) + if err != nil { + return xerrors.Errorf("insert project logs: %w", err) + } + data, err := json.Marshal(logs) + if err != nil { + return xerrors.Errorf("marshal project log: %w", err) + } + err = s.Pubsub.Publish(projectHistoryLogsChannel(input.ProjectHistoryID), data) + if err != nil { + return xerrors.Errorf("publish history log: %w", err) + } + case database.ProvisionerJobTypeWorkspaceProvision: + if len(update.WorkspaceProvisionLogs) == 0 { + continue + } + var input workspaceProvisionJob + err = json.Unmarshal(job.Input, &input) + if err != nil { + return xerrors.Errorf("unmarshal job input %q: %s", job.Input, err) + } + insertParams := database.InsertWorkspaceHistoryLogsParams{ + WorkspaceHistoryID: input.WorkspaceHistoryID, + } + for _, log := range update.WorkspaceProvisionLogs { + logLevel, err := convertLogLevel(log.Level) + if err != nil { + return xerrors.Errorf("convert log level: %w", err) + } + logSource, err := convertLogSource(log.Source) + if err != nil { + return xerrors.Errorf("convert log source: %w", err) + } + insertParams.ID = append(insertParams.ID, uuid.New()) + insertParams.CreatedAt = append(insertParams.CreatedAt, time.UnixMilli(log.CreatedAt)) + insertParams.Level = append(insertParams.Level, logLevel) + insertParams.Source = append(insertParams.Source, logSource) + insertParams.Output = append(insertParams.Output, log.Output) + } + logs, err := s.Database.InsertWorkspaceHistoryLogs(stream.Context(), insertParams) + if err != nil { + return xerrors.Errorf("insert workspace logs: %w", err) + } + data, err := json.Marshal(logs) + if err != nil { + return xerrors.Errorf("marshal project log: %w", err) + } + err = s.Pubsub.Publish(workspaceHistoryLogsChannel(input.WorkspaceHistoryID), data) + if err != nil { + return xerrors.Errorf("publish history log: %w", err) + } + } } } @@ -459,3 +554,31 @@ func convertParameterDestinationScheme(destinationScheme sdkproto.ParameterDesti return database.ParameterDestinationScheme(""), xerrors.Errorf("unknown parameter destination scheme: %d", destinationScheme) } } + +func convertLogLevel(logLevel sdkproto.LogLevel) (database.LogLevel, error) { + switch logLevel { + case sdkproto.LogLevel_TRACE: + return database.LogLevelTrace, nil + case sdkproto.LogLevel_DEBUG: + return database.LogLevelDebug, nil + case sdkproto.LogLevel_INFO: + return database.LogLevelInfo, nil + case sdkproto.LogLevel_WARN: + return database.LogLevelWarn, nil + case sdkproto.LogLevel_ERROR: + return database.LogLevelError, nil + default: + return database.LogLevel(""), xerrors.Errorf("unknown log level: %d", logLevel) + } +} + +func convertLogSource(logSource proto.LogSource) (database.LogSource, error) { + switch logSource { + case proto.LogSource_PROVISIONER_DAEMON: + return database.LogSourceProvisionerDaemon, nil + case proto.LogSource_PROVISIONER: + return database.LogSourceProvisioner, nil + default: + return database.LogSource(""), xerrors.Errorf("unknown log source: %d", logSource) + } +} diff --git a/coderd/provisionerd_test.go b/coderd/provisionerd_test.go new file mode 100644 index 0000000000000..cdfa23701a9c6 --- /dev/null +++ b/coderd/provisionerd_test.go @@ -0,0 +1,21 @@ +package coderd_test + +import ( + "testing" + "time" + + "github.com/coder/coder/coderd/coderdtest" + "github.com/coder/coder/provisionerd/provisionerdtest" +) + +func TestProvisionerd(t *testing.T) { + t.Parallel() + t.Run("Listen", func(t *testing.T) { + t.Parallel() + server := coderdtest.New(t) + _ = server.RandomInitialUser(t) + _ = provisionerdtest.New(t, server.Client) + + time.Sleep(time.Second) + }) +} diff --git a/coderd/workspaces.go b/coderd/workspaces.go index 2a724daf9eab0..47961ff1dbdb8 100644 --- a/coderd/workspaces.go +++ b/coderd/workspaces.go @@ -11,6 +11,7 @@ import ( "github.com/go-chi/render" "github.com/google/uuid" + "github.com/moby/moby/pkg/namesgenerator" "golang.org/x/xerrors" "github.com/coder/coder/database" @@ -26,6 +27,7 @@ type Workspace database.Workspace // Iterate on before/after to determine a chronological history. type WorkspaceHistory struct { ID uuid.UUID `json:"id"` + Name string `json:"name"` CreatedAt time.Time `json:"created_at"` UpdatedAt time.Time `json:"updated_at"` CompletedAt time.Time `json:"completed_at"` @@ -37,6 +39,14 @@ type WorkspaceHistory struct { Initiator string `json:"initiator"` } +type WorkspaceHistoryLog struct { + ID uuid.UUID + CreatedAt time.Time `json:"created_at"` + Source database.LogSource `json:"log_source"` + Level database.LogLevel `json:"log_level"` + Output string `json:"output"` +} + // CreateWorkspaceRequest provides options for creating a new workspace. type CreateWorkspaceRequest struct { ProjectID uuid.UUID `json:"project_id" validate:"required"` @@ -51,6 +61,7 @@ type CreateWorkspaceHistoryRequest struct { type workspaces struct { Database database.Store + Pubsub database.Pubsub } // Returns all workspaces across all projects and organizations. @@ -335,6 +346,7 @@ func (w *workspaces) createWorkspaceHistory(rw http.ResponseWriter, r *http.Requ CreatedAt: database.Now(), UpdatedAt: database.Now(), WorkspaceID: workspace.ID, + Name: namesgenerator.GetRandomName(1), ProjectHistoryID: projectHistory.ID, BeforeID: priorHistoryID, Initiator: user.ID, @@ -373,6 +385,116 @@ func (w *workspaces) createWorkspaceHistory(rw http.ResponseWriter, r *http.Requ render.JSON(rw, r, convertWorkspaceHistory(workspaceHistory)) } +func (w *workspaces) workspaceHistoryLogs(rw http.ResponseWriter, r *http.Request) { + workspaceHistory := httpmw.WorkspaceHistoryParam(r) + follow := r.URL.Query().Has("follow") + + if !follow { + // If we're not attempting to follow logs, + // we can exit immediately! + logs, err := w.Database.GetWorkspaceHistoryLogsByIDBefore(r.Context(), database.GetWorkspaceHistoryLogsByIDBeforeParams{ + WorkspaceHistoryID: workspaceHistory.ID, + CreatedAt: time.Now(), + }) + if errors.Is(err, sql.ErrNoRows) { + err = nil + } + if err != nil { + httpapi.Write(rw, http.StatusInternalServerError, httpapi.Response{ + Message: fmt.Sprintf("get workspace history logs: %s", err), + }) + return + } + render.Status(r, http.StatusOK) + render.JSON(rw, r, logs) + return + } + + // We only want to fetch messages before subscribe, so that + // there aren't any duplicates. + timeBeforeSubscribe := database.Now() + // Start subscribing immediately, otherwise we could miss messages + // that occur during the database read. + newLogNotify := make(chan WorkspaceHistoryLog, 128) + cancelNewLogNotify, err := w.Pubsub.Subscribe(workspaceHistoryLogsChannel(workspaceHistory.ID), func(ctx context.Context, message []byte) { + var logs []database.WorkspaceHistoryLog + err := json.Unmarshal(message, &logs) + if err != nil { + httpapi.Write(rw, http.StatusInternalServerError, httpapi.Response{ + Message: fmt.Sprintf("parse logs from publish: %s", err), + }) + return + } + for _, log := range logs { + // If many logs are sent during our database query, this channel + // could overflow. The Go scheduler would decide the order to send + // logs in at that point, which is an unfortunate (but not fatal) + // flaw of this approach. + // + // This is an extremely unlikely outcome given reasonable database + // query times. + newLogNotify <- convertWorkspaceHistoryLog(log) + } + }) + if err != nil { + httpapi.Write(rw, http.StatusInternalServerError, httpapi.Response{ + Message: fmt.Sprintf("listen for new logs: %s", err), + }) + return + } + defer cancelNewLogNotify() + + workspaceHistoryLogs, err := w.Database.GetWorkspaceHistoryLogsByIDBefore(r.Context(), database.GetWorkspaceHistoryLogsByIDBeforeParams{ + WorkspaceHistoryID: workspaceHistory.ID, + CreatedAt: timeBeforeSubscribe, + }) + if errors.Is(err, sql.ErrNoRows) { + err = nil + } + if err != nil { + httpapi.Write(rw, http.StatusInternalServerError, httpapi.Response{ + Message: fmt.Sprintf("get workspace history logs: %s", err), + }) + return + } + + // "follow" uses the ndjson format to stream data. + // See: https://canjs.com/doc/can-ndjson-stream.html + rw.Header().Set("Content-Type", "application/stream+json") + rw.WriteHeader(http.StatusOK) + rw.(http.Flusher).Flush() + + // The Go stdlib JSON encoder appends a newline character after message write. + encoder := json.NewEncoder(rw) + for _, workspaceHistoryLog := range workspaceHistoryLogs { + // JSON separated by a newline + err = encoder.Encode(convertWorkspaceHistoryLog(workspaceHistoryLog)) + if err != nil { + httpapi.Write(rw, http.StatusInternalServerError, httpapi.Response{ + Message: fmt.Sprintf("marshal: %s", err), + }) + return + } + rw.(http.Flusher).Flush() + } + + for { + select { + case <-r.Context().Done(): + return + case log := <-newLogNotify: + err = encoder.Encode(log) + if err != nil { + httpapi.Write(rw, http.StatusInternalServerError, httpapi.Response{ + Message: fmt.Sprintf("marshal follow: %s", err), + }) + return + } + rw.(http.Flusher).Flush() + } + } +} + // Converts the internal workspace representation to a public external-facing model. func convertWorkspace(workspace database.Workspace) Workspace { return Workspace(workspace) @@ -383,6 +505,7 @@ func convertWorkspaceHistory(workspaceHistory database.WorkspaceHistory) Workspa //nolint:unconvert return WorkspaceHistory(WorkspaceHistory{ ID: workspaceHistory.ID, + Name: workspaceHistory.Name, CreatedAt: workspaceHistory.CreatedAt, UpdatedAt: workspaceHistory.UpdatedAt, CompletedAt: workspaceHistory.CompletedAt.Time, @@ -394,3 +517,17 @@ func convertWorkspaceHistory(workspaceHistory database.WorkspaceHistory) Workspa Initiator: workspaceHistory.Initiator, }) } + +func convertWorkspaceHistoryLog(workspaceHistoryLog database.WorkspaceHistoryLog) WorkspaceHistoryLog { + return WorkspaceHistoryLog{ + ID: workspaceHistoryLog.ID, + CreatedAt: workspaceHistoryLog.CreatedAt, + Source: workspaceHistoryLog.Source, + Level: workspaceHistoryLog.Level, + Output: workspaceHistoryLog.Output, + } +} + +func workspaceHistoryLogsChannel(workspaceHistoryID uuid.UUID) string { + return fmt.Sprintf("workspace-history-logs:%s", workspaceHistoryID) +} diff --git a/codersdk/projects.go b/codersdk/projects.go index a075ebee084db..56a50228fdada 100644 --- a/codersdk/projects.go +++ b/codersdk/projects.go @@ -84,3 +84,41 @@ func (c *Client) CreateProjectHistory(ctx context.Context, organization, project var projectVersion coderd.ProjectHistory return projectVersion, json.NewDecoder(res.Body).Decode(&projectVersion) } + +func (c *Client) ProjectHistoryLogs(ctx context.Context, organization, project, history string) ([]coderd.ProjectHistoryLog, error) { + res, err := c.request(ctx, http.MethodGet, fmt.Sprintf("/api/v2/projects/%s/%s/history/%s/logs", organization, project, history), nil) + if err != nil { + return nil, err + } + defer res.Body.Close() + if res.StatusCode != http.StatusOK { + return nil, readBodyAsError(res) + } + var logs []coderd.ProjectHistoryLog + return logs, json.NewDecoder(res.Body).Decode(&logs) +} + +func (c *Client) FollowProjectHistoryLogs(ctx context.Context, organization, project, history string) (<-chan coderd.ProjectHistoryLog, error) { + res, err := c.request(ctx, http.MethodGet, fmt.Sprintf("/api/v2/projects/%s/%s/history/%s/logs?follow", organization, project, history), nil) + if err != nil { + return nil, err + } + if res.StatusCode != http.StatusOK { + defer res.Body.Close() + return nil, readBodyAsError(res) + } + + logs := make(chan coderd.ProjectHistoryLog) + decoder := json.NewDecoder(res.Body) + go func() { + defer close(logs) + var log coderd.ProjectHistoryLog + for { + err = decoder.Decode(&log) + if err != nil { + return + } + } + }() + return logs, nil +} diff --git a/codersdk/workspaces.go b/codersdk/workspaces.go index 937f58e861b11..256aa244503e6 100644 --- a/codersdk/workspaces.go +++ b/codersdk/workspaces.go @@ -127,3 +127,33 @@ func (c *Client) CreateWorkspaceHistory(ctx context.Context, owner, workspace st var workspaceHistory coderd.WorkspaceHistory return workspaceHistory, json.NewDecoder(res.Body).Decode(&workspaceHistory) } + +func (c *Client) FollowWorkspaceHistoryLogs(ctx context.Context, owner, workspace, history string) (<-chan coderd.WorkspaceHistoryLog, error) { + res, err := c.request(ctx, http.MethodGet, fmt.Sprintf("/api/v2/workspaces/%s/%s/history/%s/logs?follow", owner, workspace, history), nil) + if err != nil { + return nil, err + } + if res.StatusCode != http.StatusOK { + defer res.Body.Close() + return nil, readBodyAsError(res) + } + + logs := make(chan coderd.WorkspaceHistoryLog) + decoder := json.NewDecoder(res.Body) + go func() { + defer close(logs) + var log coderd.WorkspaceHistoryLog + for { + err = decoder.Decode(&log) + if err != nil { + return + } + select { + case <-ctx.Done(): + return + case logs <- log: + } + } + }() + return logs, nil +} diff --git a/go.sum b/go.sum index 04b6656f39b06..c1e27f32c631a 100644 --- a/go.sum +++ b/go.sum @@ -688,6 +688,8 @@ github.com/hashicorp/go-version v1.4.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09 github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4= +github.com/hashicorp/hc-install v0.3.1 h1:VIjllE6KyAI1A244G8kTaHXy+TL5/XYzvrtFi8po/Yk= +github.com/hashicorp/hc-install v0.3.1/go.mod h1:3LCdWcCDS1gaHC9mhHCGbkYfoY6vdsKohGjugbZdZak= github.com/hashicorp/hcl v0.0.0-20170504190234-a4b07c25de5f/go.mod h1:oZtUIOe8dh44I2q6ScRibXws4Ajl+d+nod3AaR9vL5w= github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= @@ -1301,6 +1303,7 @@ golang.org/x/crypto v0.0.0-20200728195943-123391ffb6de/go.mod h1:LzIPMQfyMNhhGPh golang.org/x/crypto v0.0.0-20201002170205-7f63de1d35b0/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= +golang.org/x/crypto v0.0.0-20210616213533-5ff15b29337e/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20210817164053-32db794688a5/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= diff --git a/provisionerd/provisionerd.go b/provisionerd/provisionerd.go index ec3526a4e1737..d8fbb70892a20 100644 --- a/provisionerd/provisionerd.go +++ b/provisionerd/provisionerd.go @@ -35,15 +35,14 @@ type Options struct { } // New creates and starts a provisioner daemon. -func New(apiClient *codersdk.Client, provisioners Provisioners, opts *Options) io.Closer { +func New(apiClient *codersdk.Client, opts *Options) io.Closer { if opts.PollInterval == 0 { opts.PollInterval = 5 * time.Second } ctx, ctxCancel := context.WithCancel(context.Background()) daemon := &provisionerDaemon{ - apiClient: apiClient, - provisioners: provisioners, - opts: opts, + apiClient: apiClient, + opts: opts, closeContext: ctx, closeContextCancel: ctxCancel, @@ -54,8 +53,7 @@ func New(apiClient *codersdk.Client, provisioners Provisioners, opts *Options) i } type provisionerDaemon struct { - provisioners Provisioners - opts *Options + opts *Options apiClient *codersdk.Client connectMutex sync.Mutex @@ -157,7 +155,7 @@ func (p *provisionerDaemon) acquireJob() { ) // It's safe to cast this ProvisionerType. This data is coming directly from coderd. - provisioner, hasProvisioner := p.provisioners[database.ProvisionerType(p.activeJob.Provisioner)] + provisioner, hasProvisioner := p.opts.Provisioners[database.ProvisionerType(p.activeJob.Provisioner)] if !hasProvisioner { p.cancelActiveJob(fmt.Sprintf("provisioner %q not registered", p.activeJob.Provisioner)) return diff --git a/provisionerd/provisionerd_test.go b/provisionerd/provisionerd_test.go index 8f725b8cbf3a4..7372279b02281 100644 --- a/provisionerd/provisionerd_test.go +++ b/provisionerd/provisionerd_test.go @@ -4,6 +4,7 @@ import ( "archive/tar" "bytes" "context" + "fmt" "testing" "time" @@ -74,7 +75,7 @@ resource "null_resource" "dev" {}` user := server.RandomInitialUser(t) project, workspace := setupProjectAndWorkspace(t, server.Client, user) projectVersion := setupProjectVersion(t, server.Client, user, project) - _, err := server.Client.CreateWorkspaceHistory(context.Background(), "", workspace.Name, coderd.CreateWorkspaceHistoryRequest{ + workspaceHistory, err := server.Client.CreateWorkspaceHistory(context.Background(), "", workspace.Name, coderd.CreateWorkspaceHistoryRequest{ ProjectHistoryID: projectVersion.ID, Transition: database.WorkspaceTransitionCreate, }) @@ -97,13 +98,29 @@ resource "null_resource" "dev" {}` }() api := provisionerd.New(server.Client, provisionerd.Provisioners{ - string(database.ProvisionerTypeTerraform): proto.NewDRPCProvisionerClient(drpcconn.New(clientPipe)), + database.ProvisionerTypeTerraform: proto.NewDRPCProvisionerClient(drpcconn.New(clientPipe)), }, &provisionerd.Options{ Logger: slogtest.Make(t, nil).Leveled(slog.LevelDebug), PollInterval: 50 * time.Millisecond, WorkDirectory: t.TempDir(), }) defer api.Close() - time.Sleep(time.Millisecond * 2000) + + time.Sleep(time.Millisecond * 400) + + logs, err := server.Client.FollowWorkspaceHistoryLogs(context.Background(), "me", workspace.Name, workspaceHistory.Name) + require.NoError(t, err) + go func() { + for { + select { + case <-ctx.Done(): + return + case log := <-logs: + fmt.Printf("Got a log! %+v\n", log.Output) + } + } + }() + + time.Sleep(time.Millisecond * 1000) }) } diff --git a/provisionerd/provisionerdtest/provisionerdtest.go b/provisionerd/provisionerdtest/provisionerdtest.go new file mode 100644 index 0000000000000..dda556f263123 --- /dev/null +++ b/provisionerd/provisionerdtest/provisionerdtest.go @@ -0,0 +1,48 @@ +package provisionerdtest + +import ( + "context" + "io" + "testing" + "time" + + "github.com/stretchr/testify/require" + "storj.io/drpc/drpcconn" + + "cdr.dev/slog" + "cdr.dev/slog/sloggers/slogtest" + "github.com/coder/coder/codersdk" + "github.com/coder/coder/database" + "github.com/coder/coder/provisioner/terraform" + "github.com/coder/coder/provisionerd" + "github.com/coder/coder/provisionersdk" + "github.com/coder/coder/provisionersdk/proto" +) + +// New creates a provisionerd instance with provisioners registered. +func New(t *testing.T, client *codersdk.Client) io.Closer { + tfClient, tfServer := provisionersdk.TransportPipe() + ctx, cancelFunc := context.WithCancel(context.Background()) + t.Cleanup(func() { + _ = tfClient.Close() + _ = tfServer.Close() + cancelFunc() + }) + go func() { + err := terraform.Serve(ctx, &terraform.ServeOptions{ + ServeOptions: &provisionersdk.ServeOptions{ + Transport: tfServer, + }, + }) + require.NoError(t, err) + }() + + return provisionerd.New(client, &provisionerd.Options{ + Logger: slogtest.Make(t, nil).Named("provisionerd").Leveled(slog.LevelDebug), + PollInterval: 50 * time.Millisecond, + Provisioners: provisionerd.Provisioners{ + database.ProvisionerTypeTerraform: proto.NewDRPCProvisionerClient(drpcconn.New(tfClient)), + }, + WorkDirectory: t.TempDir(), + }) +} From b495991e0941c14a82ea9878961f0fb1e915976c Mon Sep 17 00:00:00 2001 From: Kyle Carberry Date: Tue, 1 Feb 2022 04:39:29 +0000 Subject: [PATCH 17/17] Refactor provisionerd tests to disconnect from coderd --- coderd/coderd.go | 9 +- coderd/coderdtest/coderdtest.go | 38 ++ coderd/coderdtest/coderdtest_test.go | 1 + coderd/provisionerd_test.go | 21 - coderd/{provisionerd.go => provisioners.go} | 23 +- coderd/provisioners_test.go | 81 +++ codersdk/provisionerd.go | 71 --- codersdk/provisioners.go | 51 ++ codersdk/provisioners_test.go | 20 + database/databasefake/databasefake.go | 7 + database/querier.go | 1 + database/query.sql | 6 + database/query.sql.go | 36 ++ peerbroker/dial_test.go | 3 +- peerbroker/listen.go | 6 +- peerbroker/listen_test.go | 3 +- provisioner/terraform/parse_test.go | 5 +- provisioner/terraform/provision.go | 4 - provisioner/terraform/provision_test.go | 5 +- provisionerd/provisionerd.go | 122 ++-- provisionerd/provisionerd_test.go | 534 +++++++++++++++--- .../provisionerdtest/provisionerdtest.go | 48 -- provisionersdk/serve.go | 30 +- provisionersdk/serve_test.go | 7 +- provisionersdk/transport.go | 76 ++- 25 files changed, 866 insertions(+), 342 deletions(-) delete mode 100644 coderd/provisionerd_test.go rename coderd/{provisionerd.go => provisioners.go} (97%) create mode 100644 coderd/provisioners_test.go delete mode 100644 codersdk/provisionerd.go create mode 100644 codersdk/provisioners.go create mode 100644 codersdk/provisioners_test.go delete mode 100644 provisionerd/provisionerdtest/provisionerdtest.go diff --git a/coderd/coderd.go b/coderd/coderd.go index 70e8ce7988ced..76d56fa7f67fd 100644 --- a/coderd/coderd.go +++ b/coderd/coderd.go @@ -25,7 +25,7 @@ func New(options *Options) http.Handler { Database: options.Database, Pubsub: options.Pubsub, } - provisionerd := &provisionerd{ + provisioners := &provisioners{ Database: options.Database, Pubsub: options.Pubsub, } @@ -46,7 +46,8 @@ func New(options *Options) http.Handler { }) r.Post("/login", users.loginWithPassword) r.Post("/logout", users.logout) - r.Get("/provisionerd", provisionerd.listen) + r.Get("/provisionerd", provisioners.listen) + // Used for setup. r.Post("/user", users.createInitialUser) r.Route("/users", func(r chi.Router) { @@ -109,6 +110,10 @@ func New(options *Options) http.Handler { }) }) }) + + r.Route("/provisioners", func(r chi.Router) { + r.Get("/daemons", provisioners.listDaemons) + }) }) r.NotFound(site.Handler().ServeHTTP) return r diff --git a/coderd/coderdtest/coderdtest.go b/coderd/coderdtest/coderdtest.go index ab11a9d5d102f..554882b6359b6 100644 --- a/coderd/coderdtest/coderdtest.go +++ b/coderd/coderdtest/coderdtest.go @@ -3,13 +3,16 @@ package coderdtest import ( "context" "database/sql" + "io" "net/http/httptest" "net/url" "os" "testing" + "time" "github.com/stretchr/testify/require" + "cdr.dev/slog" "cdr.dev/slog/sloggers/slogtest" "github.com/coder/coder/coderd" "github.com/coder/coder/codersdk" @@ -17,6 +20,10 @@ import ( "github.com/coder/coder/database" "github.com/coder/coder/database/databasefake" "github.com/coder/coder/database/postgres" + "github.com/coder/coder/provisioner/terraform" + "github.com/coder/coder/provisionerd" + "github.com/coder/coder/provisionersdk" + "github.com/coder/coder/provisionersdk/proto" ) // Server represents a test instance of coderd. @@ -57,6 +64,37 @@ func (s *Server) RandomInitialUser(t *testing.T) coderd.CreateInitialUserRequest return req } +func (s *Server) AddProvisionerd(t *testing.T) io.Closer { + tfClient, tfServer := provisionersdk.TransportPipe() + ctx, cancelFunc := context.WithCancel(context.Background()) + t.Cleanup(func() { + _ = tfClient.Close() + _ = tfServer.Close() + cancelFunc() + }) + go func() { + err := terraform.Serve(ctx, &terraform.ServeOptions{ + ServeOptions: &provisionersdk.ServeOptions{ + Listener: tfServer, + }, + }) + require.NoError(t, err) + }() + + closer := provisionerd.New(s.Client.ProvisionerDaemonClient, &provisionerd.Options{ + Logger: slogtest.Make(t, nil).Named("provisionerd").Leveled(slog.LevelInfo), + PollInterval: 50 * time.Millisecond, + Provisioners: provisionerd.Provisioners{ + string(database.ProvisionerTypeTerraform): proto.NewDRPCProvisionerClient(provisionersdk.Conn(tfClient)), + }, + WorkDirectory: t.TempDir(), + }) + t.Cleanup(func() { + _ = closer.Close() + }) + return closer +} + // New constructs a new coderd test instance. This returned Server // should contain no side-effects. func New(t *testing.T) Server { diff --git a/coderd/coderdtest/coderdtest_test.go b/coderd/coderdtest/coderdtest_test.go index e36d1c1408cd1..b7312f96864fc 100644 --- a/coderd/coderdtest/coderdtest_test.go +++ b/coderd/coderdtest/coderdtest_test.go @@ -16,4 +16,5 @@ func TestNew(t *testing.T) { t.Parallel() server := coderdtest.New(t) _ = server.RandomInitialUser(t) + _ = server.AddProvisionerd(t) } diff --git a/coderd/provisionerd_test.go b/coderd/provisionerd_test.go deleted file mode 100644 index cdfa23701a9c6..0000000000000 --- a/coderd/provisionerd_test.go +++ /dev/null @@ -1,21 +0,0 @@ -package coderd_test - -import ( - "testing" - "time" - - "github.com/coder/coder/coderd/coderdtest" - "github.com/coder/coder/provisionerd/provisionerdtest" -) - -func TestProvisionerd(t *testing.T) { - t.Parallel() - t.Run("Listen", func(t *testing.T) { - t.Parallel() - server := coderdtest.New(t) - _ = server.RandomInitialUser(t) - _ = provisionerdtest.New(t, server.Client) - - time.Sleep(time.Second) - }) -} diff --git a/coderd/provisionerd.go b/coderd/provisioners.go similarity index 97% rename from coderd/provisionerd.go rename to coderd/provisioners.go index e00c70af7d6c7..3da0efb64758f 100644 --- a/coderd/provisionerd.go +++ b/coderd/provisioners.go @@ -14,6 +14,7 @@ import ( "storj.io/drpc/drpcmux" "storj.io/drpc/drpcserver" + "github.com/go-chi/render" "github.com/google/uuid" "github.com/hashicorp/yamux" "github.com/moby/moby/pkg/namesgenerator" @@ -27,12 +28,30 @@ import ( "nhooyr.io/websocket" ) -type provisionerd struct { +type ProvisionerDaemon database.ProvisionerDaemon + +type provisioners struct { Database database.Store Pubsub database.Pubsub } -func (p *provisionerd) listen(rw http.ResponseWriter, r *http.Request) { +func (p *provisioners) listDaemons(rw http.ResponseWriter, r *http.Request) { + daemons, err := p.Database.GetProvisionerDaemons(r.Context()) + if errors.Is(err, sql.ErrNoRows) { + err = nil + } + if err != nil { + httpapi.Write(rw, http.StatusInternalServerError, httpapi.Response{ + Message: fmt.Sprintf("get provisioner daemons: %s", err), + }) + return + } + + render.Status(r, http.StatusOK) + render.JSON(rw, r, daemons) +} + +func (p *provisioners) listen(rw http.ResponseWriter, r *http.Request) { conn, err := websocket.Accept(rw, r, nil) if err != nil { httpapi.Write(rw, http.StatusBadRequest, httpapi.Response{ diff --git a/coderd/provisioners_test.go b/coderd/provisioners_test.go new file mode 100644 index 0000000000000..e65a8ced2a508 --- /dev/null +++ b/coderd/provisioners_test.go @@ -0,0 +1,81 @@ +package coderd_test + +import ( + "archive/tar" + "bytes" + "context" + "fmt" + "testing" + "time" + + "github.com/stretchr/testify/require" + + "github.com/coder/coder/coderd" + "github.com/coder/coder/coderd/coderdtest" + "github.com/coder/coder/database" +) + +func TestProvisionerd(t *testing.T) { + t.Parallel() + t.Run("ListDaemons", func(t *testing.T) { + t.Parallel() + server := coderdtest.New(t) + _ = server.AddProvisionerd(t) + require.Eventually(t, func() bool { + daemons, err := server.Client.ProvisionerDaemons(context.Background()) + require.NoError(t, err) + return len(daemons) > 0 + }, time.Second, 10*time.Millisecond) + }) + + t.Run("RunJob", func(t *testing.T) { + t.Parallel() + server := coderdtest.New(t) + user := server.RandomInitialUser(t) + _ = server.AddProvisionerd(t) + + project, err := server.Client.CreateProject(context.Background(), user.Organization, coderd.CreateProjectRequest{ + Name: "my-project", + Provisioner: database.ProvisionerTypeTerraform, + }) + require.NoError(t, err) + + var buffer bytes.Buffer + writer := tar.NewWriter(&buffer) + content := `variable "frog" {} + resource "null_resource" "dev" {}` + err = writer.WriteHeader(&tar.Header{ + Name: "main.tf", + Size: int64(len(content)), + }) + require.NoError(t, err) + _, err = writer.Write([]byte(content)) + require.NoError(t, err) + + projectHistory, err := server.Client.CreateProjectHistory(context.Background(), user.Organization, project.Name, coderd.CreateProjectVersionRequest{ + StorageMethod: database.ProjectStorageMethodInlineArchive, + StorageSource: buffer.Bytes(), + }) + require.NoError(t, err) + + workspace, err := server.Client.CreateWorkspace(context.Background(), "", coderd.CreateWorkspaceRequest{ + ProjectID: project.ID, + Name: "wowie", + }) + require.NoError(t, err) + + workspaceHistory, err := server.Client.CreateWorkspaceHistory(context.Background(), "", workspace.Name, coderd.CreateWorkspaceHistoryRequest{ + ProjectHistoryID: projectHistory.ID, + Transition: database.WorkspaceTransitionCreate, + }) + require.NoError(t, err) + + logs, err := server.Client.FollowWorkspaceHistoryLogs(context.Background(), "me", workspace.Name, workspaceHistory.Name) + require.NoError(t, err) + + for { + log := <-logs + fmt.Printf("Got %s %s\n", log.CreatedAt, log.Output) + } + }) +} diff --git a/codersdk/provisionerd.go b/codersdk/provisionerd.go deleted file mode 100644 index 5a9aaa9681631..0000000000000 --- a/codersdk/provisionerd.go +++ /dev/null @@ -1,71 +0,0 @@ -package codersdk - -import ( - "context" - - "golang.org/x/xerrors" - "nhooyr.io/websocket" - "storj.io/drpc" - "storj.io/drpc/drpcconn" - - "github.com/hashicorp/yamux" - - "github.com/coder/coder/provisionerd/proto" -) - -// ProvisionerDaemonClient returns the gRPC service for a provisioner daemon implementation. -func (c *Client) ProvisionerDaemonClient(ctx context.Context) (proto.DRPCProvisionerDaemonClient, error) { - serverURL, err := c.url.Parse("/api/v2/provisionerd") - if err != nil { - return nil, xerrors.Errorf("parse url: %w", err) - } - conn, res, err := websocket.Dial(ctx, serverURL.String(), &websocket.DialOptions{ - HTTPClient: c.httpClient, - }) - if err != nil { - if res == nil { - return nil, err - } - return nil, readBodyAsError(res) - } - session, err := yamux.Client(websocket.NetConn(context.Background(), conn, websocket.MessageBinary), nil) - if err != nil { - return nil, xerrors.Errorf("multiplex client: %w", err) - } - return proto.NewDRPCProvisionerDaemonClient(&multiplexedDRPC{ - session: session, - }), nil -} - -// dRPC is a single-stream protocol by design. It's intended to operate -// a single HTTP-request per invocation. This multiplexes the WebSocket -// using yamux to enable multiple streams to function on a single connection. -// -// If this connection is too slow, we can create a WebSocket for each request. -type multiplexedDRPC struct { - session *yamux.Session -} - -func (m *multiplexedDRPC) Close() error { - return m.session.Close() -} - -func (m *multiplexedDRPC) Closed() <-chan struct{} { - return m.session.CloseChan() -} - -func (m *multiplexedDRPC) Invoke(ctx context.Context, rpc string, enc drpc.Encoding, in, out drpc.Message) error { - conn, err := m.session.Open() - if err != nil { - return err - } - return drpcconn.New(conn).Invoke(ctx, rpc, enc, in, out) -} - -func (m *multiplexedDRPC) NewStream(ctx context.Context, rpc string, enc drpc.Encoding) (drpc.Stream, error) { - conn, err := m.session.Open() - if err != nil { - return nil, err - } - return drpcconn.New(conn).NewStream(ctx, rpc, enc) -} diff --git a/codersdk/provisioners.go b/codersdk/provisioners.go new file mode 100644 index 0000000000000..afafc58ed4bae --- /dev/null +++ b/codersdk/provisioners.go @@ -0,0 +1,51 @@ +package codersdk + +import ( + "context" + "encoding/json" + "net/http" + + "golang.org/x/xerrors" + "nhooyr.io/websocket" + + "github.com/hashicorp/yamux" + + "github.com/coder/coder/coderd" + "github.com/coder/coder/provisionerd/proto" + "github.com/coder/coder/provisionersdk" +) + +func (c *Client) ProvisionerDaemons(ctx context.Context) ([]coderd.ProvisionerDaemon, error) { + res, err := c.request(ctx, http.MethodGet, "/api/v2/provisioners/daemons", nil) + if err != nil { + return nil, err + } + defer res.Body.Close() + if res.StatusCode != http.StatusOK { + return nil, readBodyAsError(res) + } + var daemons []coderd.ProvisionerDaemon + return daemons, json.NewDecoder(res.Body).Decode(&daemons) +} + +// ProvisionerDaemonClient returns the gRPC service for a provisioner daemon implementation. +func (c *Client) ProvisionerDaemonClient(ctx context.Context) (proto.DRPCProvisionerDaemonClient, error) { + serverURL, err := c.url.Parse("/api/v2/provisionerd") + if err != nil { + return nil, xerrors.Errorf("parse url: %w", err) + } + conn, res, err := websocket.Dial(ctx, serverURL.String(), &websocket.DialOptions{ + HTTPClient: c.httpClient, + }) + if err != nil { + if res == nil { + return nil, err + } + return nil, readBodyAsError(res) + } + session, err := yamux.Client(websocket.NetConn(context.Background(), conn, websocket.MessageBinary), nil) + if err != nil { + return nil, xerrors.Errorf("multiplex client: %w", err) + } + return proto.NewDRPCProvisionerDaemonClient(provisionersdk.Conn(session)), nil +} diff --git a/codersdk/provisioners_test.go b/codersdk/provisioners_test.go new file mode 100644 index 0000000000000..d87c21d60cdf7 --- /dev/null +++ b/codersdk/provisioners_test.go @@ -0,0 +1,20 @@ +package codersdk_test + +import ( + "context" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/coder/coder/coderd/coderdtest" +) + +func TestProvisioners(t *testing.T) { + t.Parallel() + t.Run("ListDaemons", func(t *testing.T) { + t.Parallel() + server := coderdtest.New(t) + _, err := server.Client.ProvisionerDaemons(context.Background()) + require.NoError(t, err) + }) +} diff --git a/database/databasefake/databasefake.go b/database/databasefake/databasefake.go index 3061a11494dc1..ca547f9a1f6f7 100644 --- a/database/databasefake/databasefake.go +++ b/database/databasefake/databasefake.go @@ -435,6 +435,13 @@ func (q *fakeQuerier) GetOrganizationMemberByUserID(_ context.Context, arg datab return database.OrganizationMember{}, sql.ErrNoRows } +func (q *fakeQuerier) GetProvisionerDaemons(_ context.Context) ([]database.ProvisionerDaemon, error) { + if len(q.provisionerDaemons) == 0 { + return nil, sql.ErrNoRows + } + return q.provisionerDaemons, nil +} + func (q *fakeQuerier) GetProvisionerDaemonByID(_ context.Context, id uuid.UUID) (database.ProvisionerDaemon, error) { for _, provisionerDaemon := range q.provisionerDaemons { if provisionerDaemon.ID.String() != id.String() { diff --git a/database/querier.go b/database/querier.go index fac25eed8f4da..870d122f11440 100644 --- a/database/querier.go +++ b/database/querier.go @@ -25,6 +25,7 @@ type querier interface { GetProjectParametersByHistoryID(ctx context.Context, projectHistoryID uuid.UUID) ([]ProjectParameter, error) GetProjectsByOrganizationIDs(ctx context.Context, ids []string) ([]Project, error) GetProvisionerDaemonByID(ctx context.Context, id uuid.UUID) (ProvisionerDaemon, error) + GetProvisionerDaemons(ctx context.Context) ([]ProvisionerDaemon, error) GetProvisionerJobByID(ctx context.Context, id uuid.UUID) (ProvisionerJob, error) GetUserByEmailOrUsername(ctx context.Context, arg GetUserByEmailOrUsernameParams) (User, error) GetUserByID(ctx context.Context, id string) (User, error) diff --git a/database/query.sql b/database/query.sql index 75fea94b2e09d..4c18f5ed10ec7 100644 --- a/database/query.sql +++ b/database/query.sql @@ -199,6 +199,12 @@ WHERE ORDER BY created_at; +-- name: GetProvisionerDaemons :many +SELECT + * +FROM + provisioner_daemon; + -- name: GetProvisionerDaemonByID :one SELECT * diff --git a/database/query.sql.go b/database/query.sql.go index 3c61628450c90..45507b50c7928 100644 --- a/database/query.sql.go +++ b/database/query.sql.go @@ -609,6 +609,42 @@ func (q *sqlQuerier) GetProvisionerDaemonByID(ctx context.Context, id uuid.UUID) return i, err } +const getProvisionerDaemons = `-- name: GetProvisionerDaemons :many +SELECT + id, created_at, updated_at, name, provisioners +FROM + provisioner_daemon +` + +func (q *sqlQuerier) GetProvisionerDaemons(ctx context.Context) ([]ProvisionerDaemon, error) { + rows, err := q.db.QueryContext(ctx, getProvisionerDaemons) + if err != nil { + return nil, err + } + defer rows.Close() + var items []ProvisionerDaemon + for rows.Next() { + var i ProvisionerDaemon + if err := rows.Scan( + &i.ID, + &i.CreatedAt, + &i.UpdatedAt, + &i.Name, + pq.Array(&i.Provisioners), + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + const getProvisionerJobByID = `-- name: GetProvisionerJobByID :one SELECT id, created_at, updated_at, started_at, cancelled_at, completed_at, error, initiator_id, provisioner, type, project_id, input, worker_id diff --git a/peerbroker/dial_test.go b/peerbroker/dial_test.go index 30066b8d82397..537dc157e0a79 100644 --- a/peerbroker/dial_test.go +++ b/peerbroker/dial_test.go @@ -7,7 +7,6 @@ import ( "github.com/pion/webrtc/v3" "github.com/stretchr/testify/require" "go.uber.org/goleak" - "storj.io/drpc/drpcconn" "cdr.dev/slog" "cdr.dev/slog/sloggers/slogtest" @@ -37,7 +36,7 @@ func TestDial(t *testing.T) { }) require.NoError(t, err) - api := proto.NewDRPCPeerBrokerClient(drpcconn.New(client)) + api := proto.NewDRPCPeerBrokerClient(provisionersdk.Conn(client)) stream, err := api.NegotiateConnection(ctx) require.NoError(t, err) clientConn, err := peerbroker.Dial(stream, []webrtc.ICEServer{{ diff --git a/peerbroker/listen.go b/peerbroker/listen.go index fa68023689c7e..8e92fe7a7c82d 100644 --- a/peerbroker/listen.go +++ b/peerbroker/listen.go @@ -4,12 +4,12 @@ import ( "context" "errors" "io" + "net" "reflect" "sync" "github.com/pion/webrtc/v3" "golang.org/x/xerrors" - "storj.io/drpc" "storj.io/drpc/drpcmux" "storj.io/drpc/drpcserver" @@ -19,7 +19,7 @@ import ( // Listen consumes the transport as the server-side of the PeerBroker dRPC service. // The Accept function must be serviced, or new connections will hang. -func Listen(transport drpc.Transport, opts *peer.ConnOptions) (*Listener, error) { +func Listen(connListener net.Listener, opts *peer.ConnOptions) (*Listener, error) { ctx, cancelFunc := context.WithCancel(context.Background()) listener := &Listener{ connectionChannel: make(chan *peer.Conn), @@ -39,7 +39,7 @@ func Listen(transport drpc.Transport, opts *peer.ConnOptions) (*Listener, error) } srv := drpcserver.New(mux) go func() { - err := srv.ServeOne(ctx, transport) + err := srv.Serve(ctx, connListener) _ = listener.closeWithError(err) }() diff --git a/peerbroker/listen_test.go b/peerbroker/listen_test.go index c66d8a480a176..81582a91d4b84 100644 --- a/peerbroker/listen_test.go +++ b/peerbroker/listen_test.go @@ -6,7 +6,6 @@ import ( "testing" "github.com/stretchr/testify/require" - "storj.io/drpc/drpcconn" "github.com/coder/coder/peerbroker" "github.com/coder/coder/peerbroker/proto" @@ -27,7 +26,7 @@ func TestListen(t *testing.T) { listener, err := peerbroker.Listen(server, nil) require.NoError(t, err) - api := proto.NewDRPCPeerBrokerClient(drpcconn.New(client)) + api := proto.NewDRPCPeerBrokerClient(provisionersdk.Conn(client)) stream, err := api.NegotiateConnection(ctx) require.NoError(t, err) clientConn, err := peerbroker.Dial(stream, nil, nil) diff --git a/provisioner/terraform/parse_test.go b/provisioner/terraform/parse_test.go index e678d1d36c674..9d5bec03338f8 100644 --- a/provisioner/terraform/parse_test.go +++ b/provisioner/terraform/parse_test.go @@ -10,7 +10,6 @@ import ( "testing" "github.com/stretchr/testify/require" - "storj.io/drpc/drpcconn" "github.com/coder/coder/provisionersdk" "github.com/coder/coder/provisionersdk/proto" @@ -30,12 +29,12 @@ func TestParse(t *testing.T) { go func() { err := Serve(ctx, &ServeOptions{ ServeOptions: &provisionersdk.ServeOptions{ - Transport: server, + Listener: server, }, }) require.NoError(t, err) }() - api := proto.NewDRPCProvisionerClient(drpcconn.New(client)) + api := proto.NewDRPCProvisionerClient(provisionersdk.Conn(client)) for _, testCase := range []struct { Name string diff --git a/provisioner/terraform/provision.go b/provisioner/terraform/provision.go index 5f52139b35ccb..fe0e9bec46425 100644 --- a/provisioner/terraform/provision.go +++ b/provisioner/terraform/provision.go @@ -17,10 +17,6 @@ import ( // Provision executes `terraform apply`. func (t *terraform) Provision(request *proto.Provision_Request, stream proto.DRPCProvisioner_ProvisionStream) error { - defer func() { - _ = stream.CloseSend() - }() - ctx := stream.Context() statefilePath := filepath.Join(request.Directory, "terraform.tfstate") if len(request.State) > 0 { diff --git a/provisioner/terraform/provision_test.go b/provisioner/terraform/provision_test.go index 07ac1bde9dace..27117daa8464a 100644 --- a/provisioner/terraform/provision_test.go +++ b/provisioner/terraform/provision_test.go @@ -10,7 +10,6 @@ import ( "testing" "github.com/stretchr/testify/require" - "storj.io/drpc/drpcconn" "github.com/coder/coder/provisionersdk" "github.com/coder/coder/provisionersdk/proto" @@ -29,12 +28,12 @@ func TestProvision(t *testing.T) { go func() { err := Serve(ctx, &ServeOptions{ ServeOptions: &provisionersdk.ServeOptions{ - Transport: server, + Listener: server, }, }) require.NoError(t, err) }() - api := proto.NewDRPCProvisionerClient(drpcconn.New(client)) + api := proto.NewDRPCProvisionerClient(provisionersdk.Conn(client)) for _, testCase := range []struct { Name string diff --git a/provisionerd/provisionerd.go b/provisionerd/provisionerd.go index d8fbb70892a20..2ce59bd0810fb 100644 --- a/provisionerd/provisionerd.go +++ b/provisionerd/provisionerd.go @@ -14,16 +14,19 @@ import ( "sync" "time" + "go.uber.org/atomic" + "cdr.dev/slog" - "github.com/coder/coder/codersdk" - "github.com/coder/coder/database" "github.com/coder/coder/provisionerd/proto" sdkproto "github.com/coder/coder/provisionersdk/proto" "github.com/coder/retry" ) +// Dialer represents the function to create a daemon client connection. +type Dialer func(ctx context.Context) (proto.DRPCProvisionerDaemonClient, error) + // Provisioners maps provisioner ID to implementation. -type Provisioners map[database.ProvisionerType]sdkproto.DRPCProvisionerClient +type Provisioners map[string]sdkproto.DRPCProvisionerClient // Options provides customizations to the behavior of a provisioner daemon. type Options struct { @@ -35,14 +38,14 @@ type Options struct { } // New creates and starts a provisioner daemon. -func New(apiClient *codersdk.Client, opts *Options) io.Closer { +func New(clientDialer Dialer, opts *Options) io.Closer { if opts.PollInterval == 0 { opts.PollInterval = 5 * time.Second } ctx, ctxCancel := context.WithCancel(context.Background()) daemon := &provisionerDaemon{ - apiClient: apiClient, - opts: opts, + clientDialer: clientDialer, + opts: opts, closeContext: ctx, closeContextCancel: ctxCancel, @@ -55,7 +58,7 @@ func New(apiClient *codersdk.Client, opts *Options) io.Closer { type provisionerDaemon struct { opts *Options - apiClient *codersdk.Client + clientDialer Dialer connectMutex sync.Mutex client proto.DRPCProvisionerDaemonClient updateStream proto.DRPCProvisionerDaemon_UpdateJobClient @@ -66,9 +69,11 @@ type provisionerDaemon struct { closeMutex sync.Mutex closeError error - activeJob *proto.AcquiredJob - activeJobMutex sync.Mutex - logQueue []proto.Log + runningJob *proto.AcquiredJob + runningJobContext context.Context + runningJobContextCancel context.CancelFunc + runningJobMutex sync.Mutex + isRunningJob atomic.Bool } // Connnect establishes a connection to coderd. @@ -78,7 +83,7 @@ func (p *provisionerDaemon) connect() { var err error for retrier := retry.New(50*time.Millisecond, 10*time.Second); retrier.Wait(p.closeContext); { - p.client, err = p.apiClient.ProvisionerDaemonClient(p.closeContext) + p.client, err = p.clientDialer(p.closeContext) if err != nil { // Warn p.opts.Logger.Warn(context.Background(), "failed to dial", slog.Error(err)) @@ -122,10 +127,6 @@ func (p *provisionerDaemon) connect() { case <-p.updateStream.Context().Done(): return case <-ticker.C: - if p.activeJob != nil { - p.opts.Logger.Debug(context.Background(), "skipping acquire; job is already running") - continue - } p.acquireJob() } } @@ -133,36 +134,45 @@ func (p *provisionerDaemon) connect() { } func (p *provisionerDaemon) acquireJob() { - p.opts.Logger.Debug(context.Background(), "acquiring new job") + p.runningJobMutex.Lock() + defer p.runningJobMutex.Unlock() + if p.isRunningJob.Load() { + p.opts.Logger.Debug(context.Background(), "skipping acquire; job is already running") + return + } var err error - p.activeJobMutex.Lock() - p.activeJob, err = p.client.AcquireJob(p.closeContext, &proto.Empty{}) - p.activeJobMutex.Unlock() + p.runningJob, err = p.client.AcquireJob(p.closeContext, &proto.Empty{}) if err != nil { p.opts.Logger.Error(context.Background(), "acquire job", slog.Error(err)) return } - if p.activeJob.JobId == "" { - p.activeJob = nil - p.opts.Logger.Info(context.Background(), "no jobs available") + if p.runningJob.JobId == "" { + p.opts.Logger.Debug(context.Background(), "no jobs available") return } + p.runningJobContext, p.runningJobContextCancel = context.WithCancel(p.closeContext) + p.isRunningJob.Store(true) + p.opts.Logger.Info(context.Background(), "acquired job", - slog.F("organization_name", p.activeJob.OrganizationName), - slog.F("project_name", p.activeJob.ProjectName), - slog.F("username", p.activeJob.UserName), - slog.F("provisioner", p.activeJob.Provisioner), + slog.F("organization_name", p.runningJob.OrganizationName), + slog.F("project_name", p.runningJob.ProjectName), + slog.F("username", p.runningJob.UserName), + slog.F("provisioner", p.runningJob.Provisioner), ) + go p.runJob() +} + +func (p *provisionerDaemon) runJob() { // It's safe to cast this ProvisionerType. This data is coming directly from coderd. - provisioner, hasProvisioner := p.opts.Provisioners[database.ProvisionerType(p.activeJob.Provisioner)] + provisioner, hasProvisioner := p.opts.Provisioners[p.runningJob.Provisioner] if !hasProvisioner { - p.cancelActiveJob(fmt.Sprintf("provisioner %q not registered", p.activeJob.Provisioner)) + p.cancelActiveJob(fmt.Sprintf("provisioner %q not registered", p.runningJob.Provisioner)) return } defer func() { // Cleanup the work directory after execution. - err = os.RemoveAll(p.opts.WorkDirectory) + err := os.RemoveAll(p.opts.WorkDirectory) if err != nil { p.cancelActiveJob(fmt.Sprintf("remove all from %q directory: %s", p.opts.WorkDirectory, err)) return @@ -170,14 +180,14 @@ func (p *provisionerDaemon) acquireJob() { p.opts.Logger.Debug(context.Background(), "cleaned up work directory") }() - err = os.MkdirAll(p.opts.WorkDirectory, 0600) + err := os.MkdirAll(p.opts.WorkDirectory, 0600) if err != nil { p.cancelActiveJob(fmt.Sprintf("create work directory %q: %s", p.opts.WorkDirectory, err)) return } - p.opts.Logger.Debug(context.Background(), "unpacking project source archive", slog.F("size_bytes", len(p.activeJob.ProjectSourceArchive))) - reader := tar.NewReader(bytes.NewBuffer(p.activeJob.ProjectSourceArchive)) + p.opts.Logger.Info(context.Background(), "unpacking project source archive", slog.F("size_bytes", len(p.runningJob.ProjectSourceArchive))) + reader := tar.NewReader(bytes.NewBuffer(p.runningJob.ProjectSourceArchive)) for { header, err := reader.Next() if errors.Is(err, io.EOF) { @@ -233,7 +243,7 @@ func (p *provisionerDaemon) acquireJob() { } } - switch jobType := p.activeJob.Type.(type) { + switch jobType := p.runningJob.Type.(type) { case *proto.AcquiredJob_ProjectImport_: p.opts.Logger.Debug(context.Background(), "acquired job is project import", slog.F("project_history_name", jobType.ProjectImport.ProjectHistoryName), @@ -249,15 +259,16 @@ func (p *provisionerDaemon) acquireJob() { p.runWorkspaceProvision(provisioner, jobType) default: - p.cancelActiveJob(fmt.Sprintf("unknown job type %q; ensure your provisioner daemon is up-to-date", reflect.TypeOf(p.activeJob.Type).String())) + p.cancelActiveJob(fmt.Sprintf("unknown job type %q; ensure your provisioner daemon is up-to-date", reflect.TypeOf(p.runningJob.Type).String())) return } - p.activeJob = nil + p.opts.Logger.Info(context.Background(), "completed job") + p.isRunningJob.Store(false) } func (p *provisionerDaemon) runProjectImport(provisioner sdkproto.DRPCProvisionerClient, job *proto.AcquiredJob_ProjectImport_) { - stream, err := provisioner.Parse(p.closeContext, &sdkproto.Parse_Request{ + stream, err := provisioner.Parse(p.runningJobContext, &sdkproto.Parse_Request{ Directory: p.opts.WorkDirectory, }) if err != nil { @@ -280,7 +291,7 @@ func (p *provisionerDaemon) runProjectImport(provisioner sdkproto.DRPCProvisione ) err = p.updateStream.Send(&proto.JobUpdate{ - JobId: p.activeJob.JobId, + JobId: p.runningJob.JobId, ProjectImportLogs: []*proto.Log{{ Source: proto.LogSource_PROVISIONER, Level: msgType.Log.Level, @@ -293,8 +304,8 @@ func (p *provisionerDaemon) runProjectImport(provisioner sdkproto.DRPCProvisione return } case *sdkproto.Parse_Response_Complete: - _, err = p.client.CompleteJob(p.closeContext, &proto.CompletedJob{ - JobId: p.activeJob.JobId, + _, err = p.client.CompleteJob(p.runningJobContext, &proto.CompletedJob{ + JobId: p.runningJob.JobId, Type: &proto.CompletedJob_ProjectImport_{ ProjectImport: &proto.CompletedJob_ProjectImport{ ParameterSchemas: msgType.Complete.ParameterSchemas, @@ -342,7 +353,7 @@ func (p *provisionerDaemon) runWorkspaceProvision(provisioner sdkproto.DRPCProvi ) err = p.updateStream.Send(&proto.JobUpdate{ - JobId: p.activeJob.JobId, + JobId: p.runningJob.JobId, WorkspaceProvisionLogs: []*proto.Log{{ Source: proto.LogSource_PROVISIONER, Level: msgType.Log.Level, @@ -355,7 +366,7 @@ func (p *provisionerDaemon) runWorkspaceProvision(provisioner sdkproto.DRPCProvi return } case *sdkproto.Provision_Response_Complete: - p.opts.Logger.Debug(context.Background(), "provision successful; marking job as complete", + p.opts.Logger.Info(context.Background(), "provision successful; marking job as complete", slog.F("resource_count", len(msgType.Complete.Resources)), slog.F("resources", msgType.Complete.Resources), slog.F("state_length", len(msgType.Complete.State)), @@ -364,7 +375,7 @@ func (p *provisionerDaemon) runWorkspaceProvision(provisioner sdkproto.DRPCProvi // Complete job may need to be async if we disconnected... // When we reconnect we can flush any of these cached values. _, err = p.client.CompleteJob(p.closeContext, &proto.CompletedJob{ - JobId: p.activeJob.JobId, + JobId: p.runningJob.JobId, Type: &proto.CompletedJob_WorkspaceProvision_{ WorkspaceProvision: &proto.CompletedJob_WorkspaceProvision{ State: msgType.Complete.State, @@ -387,30 +398,27 @@ func (p *provisionerDaemon) runWorkspaceProvision(provisioner sdkproto.DRPCProvi } func (p *provisionerDaemon) cancelActiveJob(errMsg string) { - p.activeJobMutex.Lock() - defer p.activeJobMutex.Unlock() - - if p.client == nil { - p.activeJob = nil - return - } - if p.activeJob == nil { + p.runningJobMutex.Lock() + defer p.runningJobMutex.Unlock() + if !p.isRunningJob.Load() { + p.opts.Logger.Warn(context.Background(), "skipping job cancel; none running", slog.F("error_message", errMsg)) return } - p.opts.Logger.Info(context.Background(), "canceling active job", + p.opts.Logger.Info(context.Background(), "canceling running job", slog.F("error_message", errMsg), - slog.F("job_id", p.activeJob.JobId), + slog.F("job_id", p.runningJob.JobId), ) _, err := p.client.CancelJob(p.closeContext, &proto.CancelledJob{ - JobId: p.activeJob.JobId, + JobId: p.runningJob.JobId, Error: fmt.Sprintf("provisioner daemon: %s", errMsg), }) if err != nil { p.opts.Logger.Error(context.Background(), "couldn't cancel job", slog.Error(err)) } - p.opts.Logger.Debug(context.Background(), "canceled active job") - p.activeJob = nil + p.opts.Logger.Debug(context.Background(), "canceled running job") + p.runningJobContextCancel() + p.isRunningJob.Store(false) } // isClosed returns whether the API is closed or not. @@ -423,7 +431,7 @@ func (p *provisionerDaemon) isClosed() bool { } } -// Close ends the provisioner. It will mark any active jobs as canceled. +// Close ends the provisioner. It will mark any running jobs as canceled. func (p *provisionerDaemon) Close() error { return p.closeWithError(nil) } @@ -436,7 +444,7 @@ func (p *provisionerDaemon) closeWithError(err error) error { return p.closeError } - if p.activeJob != nil { + if p.isRunningJob.Load() { errMsg := "provisioner daemon was shutdown gracefully" if err != nil { errMsg = err.Error() diff --git a/provisionerd/provisionerd_test.go b/provisionerd/provisionerd_test.go index 7372279b02281..7b2e621ff4e33 100644 --- a/provisionerd/provisionerd_test.go +++ b/provisionerd/provisionerd_test.go @@ -4,123 +4,485 @@ import ( "archive/tar" "bytes" "context" - "fmt" + "errors" + "io" + "os" + "path/filepath" + "sync" "testing" "time" + "github.com/hashicorp/yamux" "github.com/stretchr/testify/require" + "go.uber.org/atomic" + "go.uber.org/goleak" + "storj.io/drpc" "storj.io/drpc/drpcconn" + "storj.io/drpc/drpcmux" + "storj.io/drpc/drpcserver" "cdr.dev/slog" "cdr.dev/slog/sloggers/slogtest" - "github.com/coder/coder/coderd" - "github.com/coder/coder/coderd/coderdtest" - "github.com/coder/coder/codersdk" - "github.com/coder/coder/database" - "github.com/coder/coder/provisioner/terraform" + "github.com/coder/coder/provisionerd" - "github.com/coder/coder/provisionersdk" - "github.com/coder/coder/provisionersdk/proto" + "github.com/coder/coder/provisionerd/proto" + sdkproto "github.com/coder/coder/provisionersdk/proto" ) +func TestMain(m *testing.M) { + goleak.VerifyTestMain(m) +} + func TestProvisionerd(t *testing.T) { t.Parallel() - setupProjectAndWorkspace := func(t *testing.T, client *codersdk.Client, user coderd.CreateInitialUserRequest) (coderd.Project, coderd.Workspace) { - project, err := client.CreateProject(context.Background(), user.Organization, coderd.CreateProjectRequest{ - Name: "banana", - Provisioner: database.ProvisionerTypeTerraform, + noopUpdateJob := func(stream proto.DRPCProvisionerDaemon_UpdateJobStream) error { + <-stream.Context().Done() + return nil + } + + t.Run("InstantClose", func(t *testing.T) { + t.Parallel() + closer := createProvisionerd(t, func(ctx context.Context) (proto.DRPCProvisionerDaemonClient, error) { + return createProvisionerDaemonClient(t, provisionerDaemonTestServer{}), nil + }, provisionerd.Provisioners{}) + require.NoError(t, closer.Close()) + }) + + t.Run("ConnectErrorClose", func(t *testing.T) { + t.Parallel() + completeChan := make(chan struct{}) + closer := createProvisionerd(t, func(ctx context.Context) (proto.DRPCProvisionerDaemonClient, error) { + defer close(completeChan) + return nil, errors.New("an error") + }, provisionerd.Provisioners{}) + <-completeChan + require.NoError(t, closer.Close()) + }) + + t.Run("AcquireEmptyJob", func(t *testing.T) { + // The provisioner daemon is supposed to skip the job acquire if + // the job provided is empty. This is to show it successfully + // tried to get a job, but none were available. + t.Parallel() + completeChan := make(chan struct{}) + closer := createProvisionerd(t, func(ctx context.Context) (proto.DRPCProvisionerDaemonClient, error) { + acquireJobAttempt := 0 + return createProvisionerDaemonClient(t, provisionerDaemonTestServer{ + acquireJob: func(ctx context.Context, _ *proto.Empty) (*proto.AcquiredJob, error) { + if acquireJobAttempt == 1 { + close(completeChan) + } + acquireJobAttempt++ + return &proto.AcquiredJob{}, nil + }, + updateJob: noopUpdateJob, + }), nil + }, provisionerd.Provisioners{}) + <-completeChan + require.NoError(t, closer.Close()) + }) + + t.Run("CloseCancelsJob", func(t *testing.T) { + t.Parallel() + completeChan := make(chan struct{}) + var closer io.Closer + var closerMutex sync.Mutex + closerMutex.Lock() + closer = createProvisionerd(t, func(ctx context.Context) (proto.DRPCProvisionerDaemonClient, error) { + return createProvisionerDaemonClient(t, provisionerDaemonTestServer{ + acquireJob: func(ctx context.Context, _ *proto.Empty) (*proto.AcquiredJob, error) { + return &proto.AcquiredJob{ + JobId: "test", + Provisioner: "someprovisioner", + ProjectSourceArchive: createTar(t, map[string]string{ + "test.txt": "content", + }), + Type: &proto.AcquiredJob_ProjectImport_{ + ProjectImport: &proto.AcquiredJob_ProjectImport{}, + }, + }, nil + }, + updateJob: noopUpdateJob, + cancelJob: func(ctx context.Context, job *proto.CancelledJob) (*proto.Empty, error) { + close(completeChan) + return &proto.Empty{}, nil + }, + }), nil + }, provisionerd.Provisioners{ + "someprovisioner": createProvisionerClient(t, provisionerTestServer{ + parse: func(request *sdkproto.Parse_Request, stream sdkproto.DRPCProvisioner_ParseStream) error { + closerMutex.Lock() + defer closerMutex.Unlock() + return closer.Close() + }, + }), }) - require.NoError(t, err) - workspace, err := client.CreateWorkspace(context.Background(), "", coderd.CreateWorkspaceRequest{ - Name: "hiii", - ProjectID: project.ID, + closerMutex.Unlock() + <-completeChan + require.NoError(t, closer.Close()) + }) + + t.Run("MaliciousTar", func(t *testing.T) { + // Ensures tars with "../../../etc/passwd" as the path + // are not allowed to run, and will fail the job. + t.Parallel() + completeChan := make(chan struct{}) + closer := createProvisionerd(t, func(ctx context.Context) (proto.DRPCProvisionerDaemonClient, error) { + return createProvisionerDaemonClient(t, provisionerDaemonTestServer{ + acquireJob: func(ctx context.Context, _ *proto.Empty) (*proto.AcquiredJob, error) { + return &proto.AcquiredJob{ + JobId: "test", + Provisioner: "someprovisioner", + ProjectSourceArchive: createTar(t, map[string]string{ + "../../../etc/passwd": "content", + }), + Type: &proto.AcquiredJob_ProjectImport_{ + ProjectImport: &proto.AcquiredJob_ProjectImport{}, + }, + }, nil + }, + updateJob: noopUpdateJob, + cancelJob: func(ctx context.Context, job *proto.CancelledJob) (*proto.Empty, error) { + close(completeChan) + return &proto.Empty{}, nil + }, + }), nil + }, provisionerd.Provisioners{ + "someprovisioner": createProvisionerClient(t, provisionerTestServer{}), }) - require.NoError(t, err) - return project, workspace - } + <-completeChan + require.NoError(t, closer.Close()) + }) + + t.Run("ProjectImport", func(t *testing.T) { + t.Parallel() + var ( + didComplete atomic.Bool + didLog atomic.Bool + didAcquireJob atomic.Bool + ) + completeChan := make(chan struct{}) + closer := createProvisionerd(t, func(ctx context.Context) (proto.DRPCProvisionerDaemonClient, error) { + return createProvisionerDaemonClient(t, provisionerDaemonTestServer{ + acquireJob: func(ctx context.Context, _ *proto.Empty) (*proto.AcquiredJob, error) { + if didAcquireJob.Load() { + close(completeChan) + return &proto.AcquiredJob{}, nil + } + didAcquireJob.Store(true) + return &proto.AcquiredJob{ + JobId: "test", + Provisioner: "someprovisioner", + ProjectSourceArchive: createTar(t, map[string]string{ + "test.txt": "content", + }), + Type: &proto.AcquiredJob_ProjectImport_{ + ProjectImport: &proto.AcquiredJob_ProjectImport{}, + }, + }, nil + }, + updateJob: func(stream proto.DRPCProvisionerDaemon_UpdateJobStream) error { + for { + msg, err := stream.Recv() + if err != nil { + return err + } + if len(msg.ProjectImportLogs) == 0 { + continue + } + + didLog.Store(true) + } + }, + completeJob: func(ctx context.Context, job *proto.CompletedJob) (*proto.Empty, error) { + didComplete.Store(true) + return &proto.Empty{}, nil + }, + }), nil + }, provisionerd.Provisioners{ + "someprovisioner": createProvisionerClient(t, provisionerTestServer{ + parse: func(request *sdkproto.Parse_Request, stream sdkproto.DRPCProvisioner_ParseStream) error { + data, err := os.ReadFile(filepath.Join(request.Directory, "test.txt")) + require.NoError(t, err) + require.Equal(t, "content", string(data)) + + err = stream.Send(&sdkproto.Parse_Response{ + Type: &sdkproto.Parse_Response_Log{ + Log: &sdkproto.Log{ + Level: sdkproto.LogLevel_INFO, + Output: "hello", + }, + }, + }) + require.NoError(t, err) + + err = stream.Send(&sdkproto.Parse_Response{ + Type: &sdkproto.Parse_Response_Complete{ + Complete: &sdkproto.Parse_Complete{ + ParameterSchemas: []*sdkproto.ParameterSchema{}, + }, + }, + }) + require.NoError(t, err) + return nil + }, + }), + }) + <-completeChan + require.True(t, didLog.Load()) + require.True(t, didComplete.Load()) + require.NoError(t, closer.Close()) + }) + + t.Run("WorkspaceProvision", func(t *testing.T) { + t.Parallel() + var ( + didComplete atomic.Bool + didLog atomic.Bool + didAcquireJob atomic.Bool + ) + completeChan := make(chan struct{}) + closer := createProvisionerd(t, func(ctx context.Context) (proto.DRPCProvisionerDaemonClient, error) { + return createProvisionerDaemonClient(t, provisionerDaemonTestServer{ + acquireJob: func(ctx context.Context, _ *proto.Empty) (*proto.AcquiredJob, error) { + if didAcquireJob.Load() { + close(completeChan) + return &proto.AcquiredJob{}, nil + } + didAcquireJob.Store(true) + return &proto.AcquiredJob{ + JobId: "test", + Provisioner: "someprovisioner", + ProjectSourceArchive: createTar(t, map[string]string{ + "test.txt": "content", + }), + Type: &proto.AcquiredJob_WorkspaceProvision_{ + WorkspaceProvision: &proto.AcquiredJob_WorkspaceProvision{}, + }, + }, nil + }, + updateJob: func(stream proto.DRPCProvisionerDaemon_UpdateJobStream) error { + for { + msg, err := stream.Recv() + if err != nil { + return err + } + if len(msg.WorkspaceProvisionLogs) == 0 { + continue + } + + didLog.Store(true) + } + }, + completeJob: func(ctx context.Context, job *proto.CompletedJob) (*proto.Empty, error) { + didComplete.Store(true) + return &proto.Empty{}, nil + }, + }), nil + }, provisionerd.Provisioners{ + "someprovisioner": createProvisionerClient(t, provisionerTestServer{ + provision: func(request *sdkproto.Provision_Request, stream sdkproto.DRPCProvisioner_ProvisionStream) error { + err := stream.Send(&sdkproto.Provision_Response{ + Type: &sdkproto.Provision_Response_Log{ + Log: &sdkproto.Log{ + Level: sdkproto.LogLevel_DEBUG, + Output: "wow", + }, + }, + }) + require.NoError(t, err) - setupProjectVersion := func(t *testing.T, client *codersdk.Client, user coderd.CreateInitialUserRequest, project coderd.Project) coderd.ProjectHistory { - var buffer bytes.Buffer - writer := tar.NewWriter(&buffer) - content := `variable "frog" {} -resource "null_resource" "dev" {}` + err = stream.Send(&sdkproto.Provision_Response{ + Type: &sdkproto.Provision_Response_Complete{ + Complete: &sdkproto.Provision_Complete{}, + }, + }) + require.NoError(t, err) + return nil + }, + }), + }) + <-completeChan + require.True(t, didLog.Load()) + require.True(t, didComplete.Load()) + require.NoError(t, closer.Close()) + }) +} + +// Creates an in-memory tar of the files provided. +func createTar(t *testing.T, files map[string]string) []byte { + var buffer bytes.Buffer + writer := tar.NewWriter(&buffer) + for path, content := range files { err := writer.WriteHeader(&tar.Header{ - Name: "main.tf", + Name: path, Size: int64(len(content)), }) require.NoError(t, err) + _, err = writer.Write([]byte(content)) require.NoError(t, err) - projectHistory, err := client.CreateProjectHistory(context.Background(), user.Organization, project.Name, coderd.CreateProjectVersionRequest{ - StorageMethod: database.ProjectStorageMethodInlineArchive, - StorageSource: buffer.Bytes(), - }) - require.NoError(t, err) - return projectHistory } - t.Run("InstantClose", func(t *testing.T) { - t.Parallel() - server := coderdtest.New(t) - api := provisionerd.New(server.Client, provisionerd.Provisioners{}, &provisionerd.Options{ - Logger: slogtest.Make(t, nil), - }) - defer api.Close() + err := writer.Flush() + require.NoError(t, err) + return buffer.Bytes() +} + +// Creates a provisionerd implementation with the provided dialer and provisioners. +func createProvisionerd(t *testing.T, dialer provisionerd.Dialer, provisioners provisionerd.Provisioners) io.Closer { + closer := provisionerd.New(dialer, &provisionerd.Options{ + Logger: slogtest.Make(t, nil).Named("provisionerd").Leveled(slog.LevelDebug), + PollInterval: 50 * time.Millisecond, + Provisioners: provisioners, + WorkDirectory: t.TempDir(), }) + t.Cleanup(func() { + _ = closer.Close() + }) + return closer +} - t.Run("ProcessJob", func(t *testing.T) { - t.Parallel() - server := coderdtest.New(t) - user := server.RandomInitialUser(t) - project, workspace := setupProjectAndWorkspace(t, server.Client, user) - projectVersion := setupProjectVersion(t, server.Client, user, project) - workspaceHistory, err := server.Client.CreateWorkspaceHistory(context.Background(), "", workspace.Name, coderd.CreateWorkspaceHistoryRequest{ - ProjectHistoryID: projectVersion.ID, - Transition: database.WorkspaceTransitionCreate, - }) +// Creates a provisionerd protobuf client that's connected +// to the server implementation provided. +func createProvisionerDaemonClient(t *testing.T, server provisionerDaemonTestServer) proto.DRPCProvisionerDaemonClient { + clientPipe, serverPipe := createTransports(t) + t.Cleanup(func() { + _ = clientPipe.Close() + _ = serverPipe.Close() + }) + mux := drpcmux.New() + err := proto.DRPCRegisterProvisionerDaemon(mux, &server) + require.NoError(t, err) + srv := drpcserver.New(mux) + go func() { + ctx, cancelFunc := context.WithCancel(context.Background()) + t.Cleanup(cancelFunc) + err := srv.Serve(ctx, serverPipe) require.NoError(t, err) + }() + return proto.NewDRPCProvisionerDaemonClient(&multiplexedDRPC{clientPipe}) +} - clientPipe, serverPipe := provisionersdk.TransportPipe() +// Creates a provisioner protobuf client that's connected +// to the server implementation provided. +func createProvisionerClient(t *testing.T, server provisionerTestServer) sdkproto.DRPCProvisionerClient { + clientPipe, serverPipe := createTransports(t) + t.Cleanup(func() { + _ = clientPipe.Close() + _ = serverPipe.Close() + }) + mux := drpcmux.New() + err := sdkproto.DRPCRegisterProvisioner(mux, &server) + require.NoError(t, err) + srv := drpcserver.New(mux) + go func() { ctx, cancelFunc := context.WithCancel(context.Background()) - t.Cleanup(func() { - _ = clientPipe.Close() - _ = serverPipe.Close() - cancelFunc() - }) - go func() { - err := terraform.Serve(ctx, &terraform.ServeOptions{ - ServeOptions: &provisionersdk.ServeOptions{ - Transport: serverPipe, - }, - }) - require.NoError(t, err) - }() - - api := provisionerd.New(server.Client, provisionerd.Provisioners{ - database.ProvisionerTypeTerraform: proto.NewDRPCProvisionerClient(drpcconn.New(clientPipe)), - }, &provisionerd.Options{ - Logger: slogtest.Make(t, nil).Leveled(slog.LevelDebug), - PollInterval: 50 * time.Millisecond, - WorkDirectory: t.TempDir(), - }) - defer api.Close() + t.Cleanup(cancelFunc) + err := srv.Serve(ctx, serverPipe) + require.NoError(t, err) + }() + return sdkproto.NewDRPCProvisionerClient(&multiplexedDRPC{clientPipe}) +} - time.Sleep(time.Millisecond * 400) +type provisionerTestServer struct { + parse func(request *sdkproto.Parse_Request, stream sdkproto.DRPCProvisioner_ParseStream) error + provision func(request *sdkproto.Provision_Request, stream sdkproto.DRPCProvisioner_ProvisionStream) error +} - logs, err := server.Client.FollowWorkspaceHistoryLogs(context.Background(), "me", workspace.Name, workspaceHistory.Name) - require.NoError(t, err) - go func() { - for { - select { - case <-ctx.Done(): - return - case log := <-logs: - fmt.Printf("Got a log! %+v\n", log.Output) - } - } - }() - - time.Sleep(time.Millisecond * 1000) +func (p *provisionerTestServer) Parse(request *sdkproto.Parse_Request, stream sdkproto.DRPCProvisioner_ParseStream) error { + return p.parse(request, stream) +} + +func (p *provisionerTestServer) Provision(request *sdkproto.Provision_Request, stream sdkproto.DRPCProvisioner_ProvisionStream) error { + return p.provision(request, stream) +} + +// Fulfills the protobuf interface for a ProvisionerDaemon with +// passable functions for dynamic functionality. +type provisionerDaemonTestServer struct { + acquireJob func(ctx context.Context, _ *proto.Empty) (*proto.AcquiredJob, error) + updateJob func(stream proto.DRPCProvisionerDaemon_UpdateJobStream) error + cancelJob func(ctx context.Context, job *proto.CancelledJob) (*proto.Empty, error) + completeJob func(ctx context.Context, job *proto.CompletedJob) (*proto.Empty, error) +} + +func (p *provisionerDaemonTestServer) AcquireJob(ctx context.Context, empty *proto.Empty) (*proto.AcquiredJob, error) { + return p.acquireJob(ctx, empty) +} + +func (p *provisionerDaemonTestServer) UpdateJob(stream proto.DRPCProvisionerDaemon_UpdateJobStream) error { + return p.updateJob(stream) +} + +func (p *provisionerDaemonTestServer) CancelJob(ctx context.Context, job *proto.CancelledJob) (*proto.Empty, error) { + return p.cancelJob(ctx, job) +} + +func (p *provisionerDaemonTestServer) CompleteJob(ctx context.Context, job *proto.CompletedJob) (*proto.Empty, error) { + return p.completeJob(ctx, job) +} + +// Creates an in-memory pipe of two yamux sessions. +func createTransports(t *testing.T) (*yamux.Session, *yamux.Session) { + clientReader, clientWriter := io.Pipe() + serverReader, serverWriter := io.Pipe() + yamuxConfig := yamux.DefaultConfig() + yamuxConfig.LogOutput = io.Discard + client, err := yamux.Client(&readWriteCloser{ + ReadCloser: clientReader, + Writer: serverWriter, + }, yamuxConfig) + require.NoError(t, err) + + server, err := yamux.Server(&readWriteCloser{ + ReadCloser: serverReader, + Writer: clientWriter, + }, yamuxConfig) + require.NoError(t, err) + t.Cleanup(func() { + _ = clientReader.Close() + _ = clientWriter.Close() + _ = serverReader.Close() + _ = serverWriter.Close() + _ = client.Close() + _ = server.Close() }) + return client, server +} + +type readWriteCloser struct { + io.ReadCloser + io.Writer +} + +// Allows concurrent requests on a single dRPC connection. +// Required for calling functions concurrently. +type multiplexedDRPC struct { + session *yamux.Session +} + +func (m *multiplexedDRPC) Close() error { + return m.session.Close() +} + +func (m *multiplexedDRPC) Closed() <-chan struct{} { + return m.session.CloseChan() +} + +func (m *multiplexedDRPC) Invoke(ctx context.Context, rpc string, enc drpc.Encoding, in, out drpc.Message) error { + conn, err := m.session.Open() + if err != nil { + return err + } + return drpcconn.New(conn).Invoke(ctx, rpc, enc, in, out) +} + +func (m *multiplexedDRPC) NewStream(ctx context.Context, rpc string, enc drpc.Encoding) (drpc.Stream, error) { + conn, err := m.session.Open() + if err != nil { + return nil, err + } + return drpcconn.New(conn).NewStream(ctx, rpc, enc) } diff --git a/provisionerd/provisionerdtest/provisionerdtest.go b/provisionerd/provisionerdtest/provisionerdtest.go deleted file mode 100644 index dda556f263123..0000000000000 --- a/provisionerd/provisionerdtest/provisionerdtest.go +++ /dev/null @@ -1,48 +0,0 @@ -package provisionerdtest - -import ( - "context" - "io" - "testing" - "time" - - "github.com/stretchr/testify/require" - "storj.io/drpc/drpcconn" - - "cdr.dev/slog" - "cdr.dev/slog/sloggers/slogtest" - "github.com/coder/coder/codersdk" - "github.com/coder/coder/database" - "github.com/coder/coder/provisioner/terraform" - "github.com/coder/coder/provisionerd" - "github.com/coder/coder/provisionersdk" - "github.com/coder/coder/provisionersdk/proto" -) - -// New creates a provisionerd instance with provisioners registered. -func New(t *testing.T, client *codersdk.Client) io.Closer { - tfClient, tfServer := provisionersdk.TransportPipe() - ctx, cancelFunc := context.WithCancel(context.Background()) - t.Cleanup(func() { - _ = tfClient.Close() - _ = tfServer.Close() - cancelFunc() - }) - go func() { - err := terraform.Serve(ctx, &terraform.ServeOptions{ - ServeOptions: &provisionersdk.ServeOptions{ - Transport: tfServer, - }, - }) - require.NoError(t, err) - }() - - return provisionerd.New(client, &provisionerd.Options{ - Logger: slogtest.Make(t, nil).Named("provisionerd").Leveled(slog.LevelDebug), - PollInterval: 50 * time.Millisecond, - Provisioners: provisionerd.Provisioners{ - database.ProvisionerTypeTerraform: proto.NewDRPCProvisionerClient(drpcconn.New(tfClient)), - }, - WorkDirectory: t.TempDir(), - }) -} diff --git a/provisionersdk/serve.go b/provisionersdk/serve.go index 9b7952001f96c..c278715a145e6 100644 --- a/provisionersdk/serve.go +++ b/provisionersdk/serve.go @@ -4,19 +4,22 @@ import ( "context" "errors" "io" + "net" + "os" "golang.org/x/xerrors" - "storj.io/drpc" "storj.io/drpc/drpcmux" "storj.io/drpc/drpcserver" + "github.com/hashicorp/yamux" + "github.com/coder/coder/provisionersdk/proto" ) // ServeOptions are configurations to serve a provisioner. type ServeOptions struct { - // Transport specifies a custom transport to serve the dRPC connection. - Transport drpc.Transport + // Conn specifies a custom transport to serve the dRPC connection. + Listener net.Listener } // Serve starts a dRPC connection for the provisioner and transport provided. @@ -25,8 +28,17 @@ func Serve(ctx context.Context, server proto.DRPCProvisionerServer, options *Ser options = &ServeOptions{} } // Default to using stdio. - if options.Transport == nil { - options.Transport = TransportStdio() + if options.Listener == nil { + config := yamux.DefaultConfig() + config.LogOutput = io.Discard + stdio, err := yamux.Server(readWriteCloser{ + ReadCloser: os.Stdin, + Writer: os.Stdout, + }, config) + if err != nil { + return xerrors.Errorf("create yamux: %w", err) + } + options.Listener = stdio } // dRPC is a drop-in replacement for gRPC with less generated code, and faster transports. @@ -40,16 +52,12 @@ func Serve(ctx context.Context, server proto.DRPCProvisionerServer, options *Ser // Only serve a single connection on the transport. // Transports are not multiplexed, and provisioners are // short-lived processes that can be executed concurrently. - err = srv.ServeOne(ctx, options.Transport) + err = srv.Serve(ctx, options.Listener) if err != nil { if errors.Is(err, context.Canceled) { return nil } - if errors.Is(err, io.ErrClosedPipe) { - // This may occur if the transport on either end is - // closed before the context. It's fine to return - // nil here, since the server has nothing to - // communicate with. + if errors.Is(err, yamux.ErrSessionShutdown) { return nil } return xerrors.Errorf("serve transport: %w", err) diff --git a/provisionersdk/serve_test.go b/provisionersdk/serve_test.go index 08ac393eb8dfc..cf2dd7517df82 100644 --- a/provisionersdk/serve_test.go +++ b/provisionersdk/serve_test.go @@ -6,7 +6,6 @@ import ( "github.com/stretchr/testify/require" "go.uber.org/goleak" - "storj.io/drpc/drpcconn" "storj.io/drpc/drpcerr" "github.com/coder/coder/provisionersdk" @@ -29,12 +28,12 @@ func TestProvisionerSDK(t *testing.T) { defer cancelFunc() go func() { err := provisionersdk.Serve(ctx, &proto.DRPCProvisionerUnimplementedServer{}, &provisionersdk.ServeOptions{ - Transport: server, + Listener: server, }) require.NoError(t, err) }() - api := proto.NewDRPCProvisionerClient(drpcconn.New(client)) + api := proto.NewDRPCProvisionerClient(provisionersdk.Conn(client)) stream, err := api.Parse(context.Background(), &proto.Parse_Request{}) require.NoError(t, err) _, err = stream.Recv() @@ -47,7 +46,7 @@ func TestProvisionerSDK(t *testing.T) { _ = server.Close() err := provisionersdk.Serve(context.Background(), &proto.DRPCProvisionerUnimplementedServer{}, &provisionersdk.ServeOptions{ - Transport: server, + Listener: server, }) require.NoError(t, err) }) diff --git a/provisionersdk/transport.go b/provisionersdk/transport.go index c01a7ab8269e9..7fd87839d174b 100644 --- a/provisionersdk/transport.go +++ b/provisionersdk/transport.go @@ -1,44 +1,74 @@ package provisionersdk import ( + "context" "io" - "os" + "github.com/hashicorp/yamux" "storj.io/drpc" + "storj.io/drpc/drpcconn" ) -// Transport creates a dRPC transport using stdin and stdout. -func TransportStdio() drpc.Transport { - return &transport{ - in: os.Stdin, - out: os.Stdout, +// TransportPipe creates an in-memory pipe for dRPC transport. +func TransportPipe() (*yamux.Session, *yamux.Session) { + clientReader, clientWriter := io.Pipe() + serverReader, serverWriter := io.Pipe() + yamuxConfig := yamux.DefaultConfig() + yamuxConfig.LogOutput = io.Discard + client, err := yamux.Client(&readWriteCloser{ + ReadCloser: clientReader, + Writer: serverWriter, + }, yamuxConfig) + if err != nil { + panic(err) + } + + server, err := yamux.Server(&readWriteCloser{ + ReadCloser: serverReader, + Writer: clientWriter, + }, yamuxConfig) + if err != nil { + panic(err) } + return client, server } -// TransportPipe creates an in-memory pipe for dRPC transport. -func TransportPipe() (drpc.Transport, drpc.Transport) { - clientReader, serverWriter := io.Pipe() - serverReader, clientWriter := io.Pipe() - clientTransport := &transport{clientReader, clientWriter} - serverTransport := &transport{serverReader, serverWriter} +// Conn returns a multiplexed dRPC connection from a yamux session. +func Conn(session *yamux.Session) drpc.Conn { + return &multiplexedDRPC{session} +} - return clientTransport, serverTransport +type readWriteCloser struct { + io.ReadCloser + io.Writer } -// transport wraps an input and output to pipe data. -type transport struct { - in io.ReadCloser - out io.Writer +// Allows concurrent requests on a single dRPC connection. +// Required for calling functions concurrently. +type multiplexedDRPC struct { + session *yamux.Session } -func (s *transport) Read(data []byte) (int, error) { - return s.in.Read(data) +func (m *multiplexedDRPC) Close() error { + return m.session.Close() } -func (s *transport) Write(data []byte) (int, error) { - return s.out.Write(data) +func (m *multiplexedDRPC) Closed() <-chan struct{} { + return m.session.CloseChan() } -func (s *transport) Close() error { - return s.in.Close() +func (m *multiplexedDRPC) Invoke(ctx context.Context, rpc string, enc drpc.Encoding, in, out drpc.Message) error { + conn, err := m.session.Open() + if err != nil { + return err + } + return drpcconn.New(conn).Invoke(ctx, rpc, enc, in, out) +} + +func (m *multiplexedDRPC) NewStream(ctx context.Context, rpc string, enc drpc.Encoding) (drpc.Stream, error) { + conn, err := m.session.Open() + if err != nil { + return nil, err + } + return drpcconn.New(conn).NewStream(ctx, rpc, enc) }