Skip to content

Commit e50c8c3

Browse files
committed
update proto to reuse
1 parent 0ec5d1b commit e50c8c3

File tree

2 files changed

+70
-98
lines changed

2 files changed

+70
-98
lines changed

provisionerd/proto/provisionerd.proto

Lines changed: 33 additions & 61 deletions
Original file line numberDiff line numberDiff line change
@@ -74,30 +74,30 @@ message FailedJob {
7474

7575
// CompletedJob is sent when the provisioner daemon completes a job.
7676
message CompletedJob {
77-
message WorkspaceBuild {
78-
bytes state = 1;
79-
repeated provisioner.Resource resources = 2;
80-
repeated provisioner.Timing timings = 3;
81-
repeated provisioner.Module modules = 4;
82-
repeated provisioner.ResourceReplacement resource_replacements = 5;
83-
}
84-
message TemplateImport {
85-
repeated provisioner.Resource start_resources = 1;
86-
repeated provisioner.Resource stop_resources = 2;
87-
repeated provisioner.RichParameter rich_parameters = 3;
88-
repeated string external_auth_providers_names = 4;
89-
repeated provisioner.ExternalAuthProviderResource external_auth_providers = 5;
90-
repeated provisioner.Module start_modules = 6;
91-
repeated provisioner.Module stop_modules = 7;
92-
repeated provisioner.Preset presets = 8;
93-
bytes plan = 9;
94-
bytes module_files = 10;
95-
bytes module_files_hash = 11;
96-
}
97-
message TemplateDryRun {
98-
repeated provisioner.Resource resources = 1;
99-
repeated provisioner.Module modules = 2;
100-
}
77+
message WorkspaceBuild {
78+
bytes state = 1;
79+
repeated provisioner.Resource resources = 2;
80+
repeated provisioner.Timing timings = 3;
81+
repeated provisioner.Module modules = 4;
82+
repeated provisioner.ResourceReplacement resource_replacements = 5;
83+
}
84+
message TemplateImport {
85+
repeated provisioner.Resource start_resources = 1;
86+
repeated provisioner.Resource stop_resources = 2;
87+
repeated provisioner.RichParameter rich_parameters = 3;
88+
repeated string external_auth_providers_names = 4;
89+
repeated provisioner.ExternalAuthProviderResource external_auth_providers = 5;
90+
repeated provisioner.Module start_modules = 6;
91+
repeated provisioner.Module stop_modules = 7;
92+
repeated provisioner.Preset presets = 8;
93+
bytes plan = 9;
94+
bytes module_files = 10;
95+
bytes module_files_hash = 11;
96+
}
97+
message TemplateDryRun {
98+
repeated provisioner.Resource resources = 1;
99+
repeated provisioner.Module modules = 2;
100+
}
101101

102102
string job_id = 1;
103103
oneof type {
@@ -107,34 +107,6 @@ message CompletedJob {
107107
}
108108
}
109109

110-
enum DataUploadType {
111-
UPLOAD_TYPE_UNKNOWN = 0;
112-
// UPLOAD_TYPE_MODULE_FILES is used to stream over terraform module files.
113-
// These files are located in `.terraform/modules` and are used for dynamic
114-
// parameters.
115-
UPLOAD_TYPE_MODULE_FILES = 1;
116-
}
117-
118-
message DataUpload {
119-
DataUploadType upload_type = 1;
120-
// data_hash is the sha256 of the payload to be uploaded.
121-
// This is also used to uniquely identify the upload.
122-
bytes data_hash = 2;
123-
// file_size is the total size of the data being uploaded.
124-
int64 file_size = 3;
125-
// Number of chunks to be uploaded.
126-
int32 chunks = 4;
127-
}
128-
129-
// ChunkPiece is used to stream over large files (over the 4mb limit).
130-
message ChunkPiece {
131-
bytes data = 1;
132-
// full_data_hash should match the hash from the original
133-
// DataUpload message
134-
bytes full_data_hash = 2;
135-
int32 piece_index = 3;
136-
}
137-
138110
// LogSource represents the sender of the log.
139111
enum LogSource {
140112
PROVISIONER_DAEMON = 0;
@@ -183,10 +155,10 @@ message CommitQuotaResponse {
183155
message CancelAcquire {}
184156

185157
message UploadFileRequest {
186-
oneof type {
187-
DataUpload data_upload = 1;
188-
ChunkPiece chunk_piece = 2;
189-
}
158+
oneof type {
159+
provisioner.DataUpload data_upload = 1;
160+
provisioner.ChunkPiece chunk_piece = 2;
161+
}
190162
}
191163

192164
service ProvisionerDaemon {
@@ -214,10 +186,10 @@ service ProvisionerDaemon {
214186
// FailJob indicates a job has failed.
215187
rpc FailJob(FailedJob) returns (Empty);
216188

217-
// CompleteJob indicates a job has been completed.
218-
rpc CompleteJob(CompletedJob) returns (Empty);
189+
// CompleteJob indicates a job has been completed.
190+
rpc CompleteJob(CompletedJob) returns (Empty);
219191

220-
// UploadFile streams files to be inserted into the database.
221-
// The file upload_type should be used to determine how to handle the file.
222-
rpc UploadFile(stream UploadFileRequest) returns (Empty);
192+
// UploadFile streams files to be inserted into the database.
193+
// The file upload_type should be used to determine how to handle the file.
194+
rpc UploadFile(stream UploadFileRequest) returns (Empty);
223195
}

provisionersdk/proto/provisioner.proto

Lines changed: 37 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -361,18 +361,18 @@ message ParseComplete {
361361

362362
// PlanRequest asks the provisioner to plan what resources & parameters it will create
363363
message PlanRequest {
364-
Metadata metadata = 1;
365-
repeated RichParameterValue rich_parameter_values = 2;
366-
repeated VariableValue variable_values = 3;
367-
repeated ExternalAuthProvider external_auth_providers = 4;
368-
repeated RichParameterValue previous_parameter_values = 5;
364+
Metadata metadata = 1;
365+
repeated RichParameterValue rich_parameter_values = 2;
366+
repeated VariableValue variable_values = 3;
367+
repeated ExternalAuthProvider external_auth_providers = 4;
368+
repeated RichParameterValue previous_parameter_values = 5;
369369

370-
// If true, the provisioner can safely assume the caller does not need the
371-
// module files downloaded by the `terraform init` command.
372-
// Ideally this boolean would be flipped in its truthy value, however for
373-
// backwards compatibility reasons, the zero value should be the previous
374-
// behavior of downloading the module files.
375-
bool omit_module_files = 6;
370+
// If true, the provisioner can safely assume the caller does not need the
371+
// module files downloaded by the `terraform init` command.
372+
// Ideally this boolean would be flipped in its truthy value, however for
373+
// backwards compatibility reasons, the zero value should be the previous
374+
// behavior of downloading the module files.
375+
bool omit_module_files = 6;
376376
}
377377

378378
// PlanComplete indicates a request to plan completed.
@@ -435,42 +435,42 @@ message Request {
435435
}
436436

437437
message Response {
438-
oneof type {
439-
Log log = 1;
440-
ParseComplete parse = 2;
441-
PlanComplete plan = 3;
442-
ApplyComplete apply = 4;
443-
DataUpload data_upload = 5;
444-
ChunkPiece chunk_piece = 6;
445-
}
438+
oneof type {
439+
Log log = 1;
440+
ParseComplete parse = 2;
441+
PlanComplete plan = 3;
442+
ApplyComplete apply = 4;
443+
DataUpload data_upload = 5;
444+
ChunkPiece chunk_piece = 6;
445+
}
446446
}
447447

448448
enum DataUploadType {
449-
UPLOAD_TYPE_UNKNOWN = 0;
450-
// UPLOAD_TYPE_MODULE_FILES is used to stream over terraform module files.
451-
// These files are located in `.terraform/modules` and are used for dynamic
452-
// parameters.
453-
UPLOAD_TYPE_MODULE_FILES = 1;
449+
UPLOAD_TYPE_UNKNOWN = 0;
450+
// UPLOAD_TYPE_MODULE_FILES is used to stream over terraform module files.
451+
// These files are located in `.terraform/modules` and are used for dynamic
452+
// parameters.
453+
UPLOAD_TYPE_MODULE_FILES = 1;
454454
}
455455

456456
message DataUpload {
457-
DataUploadType upload_type = 1;
458-
// data_hash is the sha256 of the payload to be uploaded.
459-
// This is also used to uniquely identify the upload.
460-
bytes data_hash = 2;
461-
// file_size is the total size of the data being uploaded.
462-
int64 file_size = 3;
463-
// Number of chunks to be uploaded.
464-
int32 chunks = 4;
457+
DataUploadType upload_type = 1;
458+
// data_hash is the sha256 of the payload to be uploaded.
459+
// This is also used to uniquely identify the upload.
460+
bytes data_hash = 2;
461+
// file_size is the total size of the data being uploaded.
462+
int64 file_size = 3;
463+
// Number of chunks to be uploaded.
464+
int32 chunks = 4;
465465
}
466466

467467
// ChunkPiece is used to stream over large files (over the 4mb limit).
468468
message ChunkPiece {
469-
bytes data = 1;
470-
// full_data_hash should match the hash from the original
471-
// DataUpload message
472-
bytes full_data_hash = 2;
473-
int32 piece_index = 3;
469+
bytes data = 1;
470+
// full_data_hash should match the hash from the original
471+
// DataUpload message
472+
bytes full_data_hash = 2;
473+
int32 piece_index = 3;
474474
}
475475

476476
service Provisioner {

0 commit comments

Comments
 (0)