From 9ec9b28dbb59216db25b0aef49f5c7c307ebe4a9 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Wed, 29 Jan 2020 14:31:59 -0800 Subject: [PATCH 1/7] docs(dataproc): remove 'cloud' from references to dataproc; add py2 deprecation warning; add 3.8 tests (via synth) (#10066) --- docs/_static/custom.css | 2 +- docs/_templates/layout.html | 1 + google/cloud/dataproc_v1/__init__.py | 11 + .../gapic/cluster_controller_client.py | 12 +- google/cloud/dataproc_v1/gapic/enums.py | 4 +- .../gapic/job_controller_client.py | 12 +- .../gapic/workflow_template_service_client.py | 2 +- .../proto/autoscaling_policies.proto | 2 +- .../proto/autoscaling_policies_pb2_grpc.py | 4 +- google/cloud/dataproc_v1/proto/clusters.proto | 67 +++--- .../cloud/dataproc_v1/proto/clusters_pb2.py | 207 +++++++++--------- google/cloud/dataproc_v1/proto/jobs.proto | 47 ++-- google/cloud/dataproc_v1/proto/jobs_pb2.py | 77 +++---- .../proto/workflow_templates.proto | 11 +- .../proto/workflow_templates_pb2.py | 30 ++- .../proto/workflow_templates_pb2_grpc.py | 4 +- google/cloud/dataproc_v1beta2/__init__.py | 11 + .../proto/workflow_templates.proto | 5 +- .../proto/workflow_templates_pb2.py | 26 +-- noxfile.py | 2 +- synth.metadata | 12 +- 21 files changed, 276 insertions(+), 273 deletions(-) diff --git a/docs/_static/custom.css b/docs/_static/custom.css index 9a6f9f8d..0abaf229 100644 --- a/docs/_static/custom.css +++ b/docs/_static/custom.css @@ -1,4 +1,4 @@ div#python2-eol { border-color: red; border-width: medium; -} \ No newline at end of file +} \ No newline at end of file diff --git a/docs/_templates/layout.html b/docs/_templates/layout.html index de457b2c..228529ef 100644 --- a/docs/_templates/layout.html +++ b/docs/_templates/layout.html @@ -1,3 +1,4 @@ + {% extends "!layout.html" %} {%- block content %} {%- if theme_fixed_sidebar|lower == 'true' %} diff --git a/google/cloud/dataproc_v1/__init__.py b/google/cloud/dataproc_v1/__init__.py index e82a68ed..3b5fda86 100644 --- a/google/cloud/dataproc_v1/__init__.py +++ b/google/cloud/dataproc_v1/__init__.py @@ -16,6 +16,8 @@ from __future__ import absolute_import +import sys +import warnings from google.cloud.dataproc_v1 import types from google.cloud.dataproc_v1.gapic import cluster_controller_client @@ -24,6 +26,15 @@ from google.cloud.dataproc_v1.gapic import workflow_template_service_client +if sys.version_info[:2] == (2, 7): + message = ( + "A future version of this library will drop support for Python 2.7." + "More details about Python 2 support for Google Cloud Client Libraries" + "can be found at https://cloud.google.com/python/docs/python2-sunset/" + ) + warnings.warn(message, DeprecationWarning) + + class ClusterControllerClient(cluster_controller_client.ClusterControllerClient): __doc__ = cluster_controller_client.ClusterControllerClient.__doc__ enums = enums diff --git a/google/cloud/dataproc_v1/gapic/cluster_controller_client.py b/google/cloud/dataproc_v1/gapic/cluster_controller_client.py index 82571d6e..e234297b 100644 --- a/google/cloud/dataproc_v1/gapic/cluster_controller_client.py +++ b/google/cloud/dataproc_v1/gapic/cluster_controller_client.py @@ -235,7 +235,7 @@ def create_cluster( Args: project_id (str): Required. The ID of the Google Cloud Platform project that the cluster belongs to. - region (str): Required. The Cloud Dataproc region in which to handle the request. + region (str): Required. The Dataproc region in which to handle the request. cluster (Union[dict, ~google.cloud.dataproc_v1.types.Cluster]): Required. The cluster to create. If a dict is provided, it must be of the same form as the protobuf @@ -346,7 +346,7 @@ def update_cluster( Args: project_id (str): Required. The ID of the Google Cloud Platform project the cluster belongs to. - region (str): Required. The Cloud Dataproc region in which to handle the request. + region (str): Required. The Dataproc region in which to handle the request. cluster_name (str): Required. The cluster name. cluster (Union[dict, ~google.cloud.dataproc_v1.types.Cluster]): Required. The changes to the cluster. @@ -529,7 +529,7 @@ def delete_cluster( Args: project_id (str): Required. The ID of the Google Cloud Platform project that the cluster belongs to. - region (str): Required. The Cloud Dataproc region in which to handle the request. + region (str): Required. The Dataproc region in which to handle the request. cluster_name (str): Required. The cluster name. cluster_uuid (str): Optional. Specifying the ``cluster_uuid`` means the RPC should fail (with error NOT\_FOUND) if cluster with specified UUID does not exist. @@ -622,7 +622,7 @@ def get_cluster( Args: project_id (str): Required. The ID of the Google Cloud Platform project that the cluster belongs to. - region (str): Required. The Cloud Dataproc region in which to handle the request. + region (str): Required. The Dataproc region in which to handle the request. cluster_name (str): Required. The cluster name. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will @@ -702,7 +702,7 @@ def list_clusters( Args: project_id (str): Required. The ID of the Google Cloud Platform project that the cluster belongs to. - region (str): Required. The Cloud Dataproc region in which to handle the request. + region (str): Required. The Dataproc region in which to handle the request. filter_ (str): Optional. A filter constraining the clusters to list. Filters are case-sensitive and have the following syntax: @@ -823,7 +823,7 @@ def diagnose_cluster( Args: project_id (str): Required. The ID of the Google Cloud Platform project that the cluster belongs to. - region (str): Required. The Cloud Dataproc region in which to handle the request. + region (str): Required. The Dataproc region in which to handle the request. cluster_name (str): Required. The cluster name. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will diff --git a/google/cloud/dataproc_v1/gapic/enums.py b/google/cloud/dataproc_v1/gapic/enums.py index e29c9d7f..80626cef 100644 --- a/google/cloud/dataproc_v1/gapic/enums.py +++ b/google/cloud/dataproc_v1/gapic/enums.py @@ -89,7 +89,7 @@ class Substate(enum.IntEnum): Applies to RUNNING state. STALE_STATUS (int): The agent-reported status is out of date (may occur if - Cloud Dataproc loses communication with Agent). + Dataproc loses communication with Agent). Applies to RUNNING state. """ @@ -148,7 +148,7 @@ class Substate(enum.IntEnum): Applies to RUNNING state. STALE_STATUS (int): The agent-reported status is out of date, which may be caused by a - loss of communication between the agent and Cloud Dataproc. If the + loss of communication between the agent and Dataproc. If the agent does not send a timely update, the job will fail. Applies to RUNNING state. diff --git a/google/cloud/dataproc_v1/gapic/job_controller_client.py b/google/cloud/dataproc_v1/gapic/job_controller_client.py index 6bf8c7d7..77327202 100644 --- a/google/cloud/dataproc_v1/gapic/job_controller_client.py +++ b/google/cloud/dataproc_v1/gapic/job_controller_client.py @@ -221,7 +221,7 @@ def submit_job( Args: project_id (str): Required. The ID of the Google Cloud Platform project that the job belongs to. - region (str): Required. The Cloud Dataproc region in which to handle the request. + region (str): Required. The Dataproc region in which to handle the request. job (Union[dict, ~google.cloud.dataproc_v1.types.Job]): Required. The job resource. If a dict is provided, it must be of the same form as the protobuf @@ -304,7 +304,7 @@ def get_job( Args: project_id (str): Required. The ID of the Google Cloud Platform project that the job belongs to. - region (str): Required. The Cloud Dataproc region in which to handle the request. + region (str): Required. The Dataproc region in which to handle the request. job_id (str): Required. The job ID. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will @@ -386,7 +386,7 @@ def list_jobs( Args: project_id (str): Required. The ID of the Google Cloud Platform project that the job belongs to. - region (str): Required. The Cloud Dataproc region in which to handle the request. + region (str): Required. The Dataproc region in which to handle the request. page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- resource, this parameter does not affect the return value. If page @@ -507,7 +507,7 @@ def update_job( Args: project_id (str): Required. The ID of the Google Cloud Platform project that the job belongs to. - region (str): Required. The Cloud Dataproc region in which to handle the request. + region (str): Required. The Dataproc region in which to handle the request. job_id (str): Required. The job ID. job (Union[dict, ~google.cloud.dataproc_v1.types.Job]): Required. The changes to the job. @@ -597,7 +597,7 @@ def cancel_job( Args: project_id (str): Required. The ID of the Google Cloud Platform project that the job belongs to. - region (str): Required. The Cloud Dataproc region in which to handle the request. + region (str): Required. The Dataproc region in which to handle the request. job_id (str): Required. The job ID. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will @@ -668,7 +668,7 @@ def delete_job( Args: project_id (str): Required. The ID of the Google Cloud Platform project that the job belongs to. - region (str): Required. The Cloud Dataproc region in which to handle the request. + region (str): Required. The Dataproc region in which to handle the request. job_id (str): Required. The job ID. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will diff --git a/google/cloud/dataproc_v1/gapic/workflow_template_service_client.py b/google/cloud/dataproc_v1/gapic/workflow_template_service_client.py index 0b39b3d6..df8e79dd 100644 --- a/google/cloud/dataproc_v1/gapic/workflow_template_service_client.py +++ b/google/cloud/dataproc_v1/gapic/workflow_template_service_client.py @@ -57,7 +57,7 @@ class WorkflowTemplateServiceClient(object): """ The API interface for managing Workflow Templates in the - Cloud Dataproc API. + Dataproc API. """ SERVICE_ADDRESS = "dataproc.googleapis.com:443" diff --git a/google/cloud/dataproc_v1/proto/autoscaling_policies.proto b/google/cloud/dataproc_v1/proto/autoscaling_policies.proto index cb466ee8..65035a59 100644 --- a/google/cloud/dataproc_v1/proto/autoscaling_policies.proto +++ b/google/cloud/dataproc_v1/proto/autoscaling_policies.proto @@ -30,7 +30,7 @@ option java_outer_classname = "AutoscalingPoliciesProto"; option java_package = "com.google.cloud.dataproc.v1"; // The API interface for managing autoscaling policies in the -// Google Cloud Dataproc API. +// Dataproc API. service AutoscalingPolicyService { option (google.api.default_host) = "dataproc.googleapis.com"; option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; diff --git a/google/cloud/dataproc_v1/proto/autoscaling_policies_pb2_grpc.py b/google/cloud/dataproc_v1/proto/autoscaling_policies_pb2_grpc.py index 953e9237..172d1815 100644 --- a/google/cloud/dataproc_v1/proto/autoscaling_policies_pb2_grpc.py +++ b/google/cloud/dataproc_v1/proto/autoscaling_policies_pb2_grpc.py @@ -9,7 +9,7 @@ class AutoscalingPolicyServiceStub(object): """The API interface for managing autoscaling policies in the - Google Cloud Dataproc API. + Dataproc API. """ def __init__(self, channel): @@ -47,7 +47,7 @@ def __init__(self, channel): class AutoscalingPolicyServiceServicer(object): """The API interface for managing autoscaling policies in the - Google Cloud Dataproc API. + Dataproc API. """ def CreateAutoscalingPolicy(self, request, context): diff --git a/google/cloud/dataproc_v1/proto/clusters.proto b/google/cloud/dataproc_v1/proto/clusters.proto index 8ab5cd1b..bc254589 100644 --- a/google/cloud/dataproc_v1/proto/clusters.proto +++ b/google/cloud/dataproc_v1/proto/clusters.proto @@ -65,6 +65,7 @@ service ClusterController { response_type: "Cluster" metadata_type: "google.cloud.dataproc.v1.ClusterOperationMetadata" }; + option (google.api.method_signature) = "project_id,region,cluster_name,cluster,update_mask"; } // Deletes a cluster in a project. The returned @@ -122,15 +123,15 @@ service ClusterController { // a cluster of Compute Engine instances. message Cluster { // Required. The Google Cloud Platform project ID that the cluster belongs to. - string project_id = 1; + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The cluster name. Cluster names within a project must be // unique. Names of deleted clusters can be reused. - string cluster_name = 2; + string cluster_name = 2 [(google.api.field_behavior) = REQUIRED]; - // Required. The cluster config. Note that Cloud Dataproc may set + // Required. The cluster config. Note that Dataproc may set // default values, and values may change when clusters are updated. - ClusterConfig config = 3; + ClusterConfig config = 3 [(google.api.field_behavior) = REQUIRED]; // Optional. The labels to associate with this cluster. // Label **keys** must contain 1 to 63 characters, and must conform to @@ -147,7 +148,7 @@ message Cluster { // Output only. The previous cluster status. repeated ClusterStatus status_history = 7 [(google.api.field_behavior) = OUTPUT_ONLY]; - // Output only. A cluster UUID (Unique Universal Identifier). Cloud Dataproc + // Output only. A cluster UUID (Unique Universal Identifier). Dataproc // generates this value when it creates the cluster. string cluster_uuid = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; @@ -160,14 +161,14 @@ message Cluster { // The cluster config. message ClusterConfig { - // Optional. A Google Cloud Storage bucket used to stage job + // Optional. A Cloud Storage bucket used to stage job // dependencies, config files, and job driver console output. // If you do not specify a staging bucket, Cloud // Dataproc will determine a Cloud Storage location (US, - // ASIA, or EU) for your cluster's staging bucket according to the Google + // ASIA, or EU) for your cluster's staging bucket according to the // Compute Engine zone where your cluster is deployed, and then create // and manage this project-level, per-location bucket (see - // [Cloud Dataproc staging + // [Dataproc staging // bucket](/dataproc/docs/concepts/configuring-clusters/staging-bucket)). string config_bucket = 1 [(google.api.field_behavior) = OPTIONAL]; @@ -226,7 +227,7 @@ message AutoscalingConfig { // * `https://www.googleapis.com/compute/v1/projects/[project_id]/locations/[dataproc_region]/autoscalingPolicies/[policy_id]` // * `projects/[project_id]/locations/[dataproc_region]/autoscalingPolicies/[policy_id]` // - // Note that the policy must be in the same project and Cloud Dataproc region. + // Note that the policy must be in the same project and Dataproc region. string policy_uri = 1 [(google.api.field_behavior) = OPTIONAL]; } @@ -242,7 +243,7 @@ message EncryptionConfig { message GceClusterConfig { // Optional. The zone where the Compute Engine cluster will be located. // On a create request, it is required in the "global" region. If omitted - // in a non-global Cloud Dataproc region, the service will pick a zone in the + // in a non-global Dataproc region, the service will pick a zone in the // corresponding Compute Engine region. On a get request, zone will // always be present. // @@ -284,17 +285,17 @@ message GceClusterConfig { // configured to be accessible without external IP addresses. bool internal_ip_only = 7 [(google.api.field_behavior) = OPTIONAL]; - // Optional. The service account of the instances. Defaults to the default - // Compute Engine service account. Custom service accounts need - // permissions equivalent to the following IAM roles: - // - // * roles/logging.logWriter - // * roles/storage.objectAdmin - // - // (see - // https://cloud.google.com/compute/docs/access/service-accounts#custom_service_accounts - // for more information). - // Example: `[account_id]@[project_id].iam.gserviceaccount.com` + // Optional. The [Dataproc service + // account](/dataproc/docs/concepts/configuring-clusters/service-accounts#service_accounts_in_cloud_dataproc) + // (also see [VM Data Plane + // identity](/dataproc/docs/concepts/iam/dataproc-principals#vm_service_account_data_plane_identity)) + // used by Dataproc cluster VM instances to access Google Cloud Platform + // services. + // + // If not specified, the + // [Compute Engine default service + // account](/compute/docs/access/service-accounts#default_service_account) + // is used. string service_account = 8 [(google.api.field_behavior) = OPTIONAL]; // Optional. The URIs of service account scopes to be included in @@ -330,7 +331,7 @@ message InstanceGroupConfig { // For master instance groups, must be set to 1. int32 num_instances = 1 [(google.api.field_behavior) = OPTIONAL]; - // Output only. The list of instance names. Cloud Dataproc derives the names + // Output only. The list of instance names. Dataproc derives the names // from `cluster_name`, `num_instances`, and the instance group. repeated string instance_names = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; @@ -347,7 +348,7 @@ message InstanceGroupConfig { // * `projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2` // * `n1-standard-2` // - // **Auto Zone Exception**: If you are using the Cloud Dataproc + // **Auto Zone Exception**: If you are using the Dataproc // [Auto Zone // Placement](/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) // feature, you must use the short name of the machine type @@ -371,7 +372,7 @@ message InstanceGroupConfig { repeated AcceleratorConfig accelerators = 8 [(google.api.field_behavior) = OPTIONAL]; // Optional. Specifies the minimum cpu platform for the Instance Group. - // See [Cloud Dataproc→Minimum CPU Platform] + // See [Dataproc→Minimum CPU Platform] // (/dataproc/docs/concepts/compute/dataproc-min-cpu). string min_cpu_platform = 9 [(google.api.field_behavior) = OPTIONAL]; } @@ -400,7 +401,7 @@ message AcceleratorConfig { // * `projects/[project_id]/zones/us-east1-a/acceleratorTypes/nvidia-tesla-k80` // * `nvidia-tesla-k80` // - // **Auto Zone Exception**: If you are using the Cloud Dataproc + // **Auto Zone Exception**: If you are using the Dataproc // [Auto Zone // Placement](/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) // feature, you must use the short name of the accelerator type @@ -479,7 +480,7 @@ message ClusterStatus { UNHEALTHY = 1; // The agent-reported status is out of date (may occur if - // Cloud Dataproc loses communication with Agent). + // Dataproc loses communication with Agent). // // Applies to RUNNING state. STALE_STATUS = 2; @@ -580,7 +581,7 @@ message KerberosConfig { // Specifies the selection and config of software inside the cluster. message SoftwareConfig { // Optional. The version of software inside the cluster. It must be one of the - // supported [Cloud Dataproc + // supported [Dataproc // Versions](/dataproc/docs/concepts/versioning/dataproc-versions#supported_cloud_dataproc_versions), // such as "1.2" (including a subminor version, such as "1.2.29"), or the // ["preview" @@ -630,7 +631,7 @@ message CreateClusterRequest { // belongs to. string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - // Required. The Cloud Dataproc region in which to handle the request. + // Required. The Dataproc region in which to handle the request. string region = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The cluster to create. @@ -656,7 +657,7 @@ message UpdateClusterRequest { // cluster belongs to. string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - // Required. The Cloud Dataproc region in which to handle the request. + // Required. The Dataproc region in which to handle the request. string region = 5 [(google.api.field_behavior) = REQUIRED]; // Required. The cluster name. @@ -748,7 +749,7 @@ message DeleteClusterRequest { // belongs to. string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - // Required. The Cloud Dataproc region in which to handle the request. + // Required. The Dataproc region in which to handle the request. string region = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The cluster name. @@ -778,7 +779,7 @@ message GetClusterRequest { // belongs to. string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - // Required. The Cloud Dataproc region in which to handle the request. + // Required. The Dataproc region in which to handle the request. string region = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The cluster name. @@ -791,7 +792,7 @@ message ListClustersRequest { // belongs to. string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - // Required. The Cloud Dataproc region in which to handle the request. + // Required. The Dataproc region in which to handle the request. string region = 4 [(google.api.field_behavior) = REQUIRED]; // Optional. A filter constraining the clusters to list. Filters are @@ -839,7 +840,7 @@ message DiagnoseClusterRequest { // belongs to. string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - // Required. The Cloud Dataproc region in which to handle the request. + // Required. The Dataproc region in which to handle the request. string region = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The cluster name. diff --git a/google/cloud/dataproc_v1/proto/clusters_pb2.py b/google/cloud/dataproc_v1/proto/clusters_pb2.py index ca9065f5..59c9e460 100644 --- a/google/cloud/dataproc_v1/proto/clusters_pb2.py +++ b/google/cloud/dataproc_v1/proto/clusters_pb2.py @@ -40,7 +40,7 @@ "\n\034com.google.cloud.dataproc.v1B\rClustersProtoP\001Z@google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataproc" ), serialized_pb=_b( - '\n-google/cloud/dataproc_v1/proto/clusters.proto\x12\x18google.cloud.dataproc.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a/google/cloud/dataproc_v1/proto/operations.proto\x1a+google/cloud/dataproc_v1/proto/shared.proto\x1a#google/longrunning/operations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xb9\x03\n\x07\x43luster\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x14\n\x0c\x63luster_name\x18\x02 \x01(\t\x12\x37\n\x06\x63onfig\x18\x03 \x01(\x0b\x32\'.google.cloud.dataproc.v1.ClusterConfig\x12\x42\n\x06labels\x18\x08 \x03(\x0b\x32-.google.cloud.dataproc.v1.Cluster.LabelsEntryB\x03\xe0\x41\x01\x12<\n\x06status\x18\x04 \x01(\x0b\x32\'.google.cloud.dataproc.v1.ClusterStatusB\x03\xe0\x41\x03\x12\x44\n\x0estatus_history\x18\x07 \x03(\x0b\x32\'.google.cloud.dataproc.v1.ClusterStatusB\x03\xe0\x41\x03\x12\x19\n\x0c\x63luster_uuid\x18\x06 \x01(\tB\x03\xe0\x41\x03\x12\x39\n\x07metrics\x18\t \x01(\x0b\x32(.google.cloud.dataproc.v1.ClusterMetrics\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xe6\x05\n\rClusterConfig\x12\x1a\n\rconfig_bucket\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12K\n\x12gce_cluster_config\x18\x08 \x01(\x0b\x32*.google.cloud.dataproc.v1.GceClusterConfigB\x03\xe0\x41\x01\x12I\n\rmaster_config\x18\t \x01(\x0b\x32-.google.cloud.dataproc.v1.InstanceGroupConfigB\x03\xe0\x41\x01\x12I\n\rworker_config\x18\n \x01(\x0b\x32-.google.cloud.dataproc.v1.InstanceGroupConfigB\x03\xe0\x41\x01\x12S\n\x17secondary_worker_config\x18\x0c \x01(\x0b\x32-.google.cloud.dataproc.v1.InstanceGroupConfigB\x03\xe0\x41\x01\x12\x46\n\x0fsoftware_config\x18\r \x01(\x0b\x32(.google.cloud.dataproc.v1.SoftwareConfigB\x03\xe0\x41\x01\x12W\n\x16initialization_actions\x18\x0b \x03(\x0b\x32\x32.google.cloud.dataproc.v1.NodeInitializationActionB\x03\xe0\x41\x01\x12J\n\x11\x65ncryption_config\x18\x0f \x01(\x0b\x32*.google.cloud.dataproc.v1.EncryptionConfigB\x03\xe0\x41\x01\x12L\n\x12\x61utoscaling_config\x18\x12 \x01(\x0b\x32+.google.cloud.dataproc.v1.AutoscalingConfigB\x03\xe0\x41\x01\x12\x46\n\x0fsecurity_config\x18\x10 \x01(\x0b\x32(.google.cloud.dataproc.v1.SecurityConfigB\x03\xe0\x41\x01",\n\x11\x41utoscalingConfig\x12\x17\n\npolicy_uri\x18\x01 \x01(\tB\x03\xe0\x41\x01"4\n\x10\x45ncryptionConfig\x12 \n\x13gce_pd_kms_key_name\x18\x01 \x01(\tB\x03\xe0\x41\x01"\xcd\x02\n\x10GceClusterConfig\x12\x15\n\x08zone_uri\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12\x18\n\x0bnetwork_uri\x18\x02 \x01(\tB\x03\xe0\x41\x01\x12\x1b\n\x0esubnetwork_uri\x18\x06 \x01(\tB\x03\xe0\x41\x01\x12\x1d\n\x10internal_ip_only\x18\x07 \x01(\x08\x42\x03\xe0\x41\x01\x12\x1c\n\x0fservice_account\x18\x08 \x01(\tB\x03\xe0\x41\x01\x12#\n\x16service_account_scopes\x18\x03 \x03(\tB\x03\xe0\x41\x01\x12\x0c\n\x04tags\x18\x04 \x03(\t\x12J\n\x08metadata\x18\x05 \x03(\x0b\x32\x38.google.cloud.dataproc.v1.GceClusterConfig.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x9a\x03\n\x13InstanceGroupConfig\x12\x1a\n\rnum_instances\x18\x01 \x01(\x05\x42\x03\xe0\x41\x01\x12\x1b\n\x0einstance_names\x18\x02 \x03(\tB\x03\xe0\x41\x03\x12\x16\n\timage_uri\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12\x1d\n\x10machine_type_uri\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12>\n\x0b\x64isk_config\x18\x05 \x01(\x0b\x32$.google.cloud.dataproc.v1.DiskConfigB\x03\xe0\x41\x01\x12\x1b\n\x0eis_preemptible\x18\x06 \x01(\x08\x42\x03\xe0\x41\x01\x12O\n\x14managed_group_config\x18\x07 \x01(\x0b\x32,.google.cloud.dataproc.v1.ManagedGroupConfigB\x03\xe0\x41\x03\x12\x46\n\x0c\x61\x63\x63\x65lerators\x18\x08 \x03(\x0b\x32+.google.cloud.dataproc.v1.AcceleratorConfigB\x03\xe0\x41\x01\x12\x1d\n\x10min_cpu_platform\x18\t \x01(\tB\x03\xe0\x41\x01"c\n\x12ManagedGroupConfig\x12#\n\x16instance_template_name\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12(\n\x1binstance_group_manager_name\x18\x02 \x01(\tB\x03\xe0\x41\x03"L\n\x11\x41\x63\x63\x65leratorConfig\x12\x1c\n\x14\x61\x63\x63\x65lerator_type_uri\x18\x01 \x01(\t\x12\x19\n\x11\x61\x63\x63\x65lerator_count\x18\x02 \x01(\x05"f\n\nDiskConfig\x12\x1b\n\x0e\x62oot_disk_type\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12\x1e\n\x11\x62oot_disk_size_gb\x18\x01 \x01(\x05\x42\x03\xe0\x41\x01\x12\x1b\n\x0enum_local_ssds\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01"s\n\x18NodeInitializationAction\x12\x1c\n\x0f\x65xecutable_file\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x39\n\x11\x65xecution_timeout\x18\x02 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x01"\x84\x03\n\rClusterStatus\x12\x41\n\x05state\x18\x01 \x01(\x0e\x32-.google.cloud.dataproc.v1.ClusterStatus.StateB\x03\xe0\x41\x03\x12\x16\n\x06\x64\x65tail\x18\x02 \x01(\tB\x06\xe0\x41\x03\xe0\x41\x01\x12\x39\n\x10state_start_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12G\n\x08substate\x18\x04 \x01(\x0e\x32\x30.google.cloud.dataproc.v1.ClusterStatus.SubstateB\x03\xe0\x41\x03"V\n\x05State\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0c\n\x08\x43REATING\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\x12\t\n\x05\x45RROR\x10\x03\x12\x0c\n\x08\x44\x45LETING\x10\x04\x12\x0c\n\x08UPDATING\x10\x05"<\n\x08Substate\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\r\n\tUNHEALTHY\x10\x01\x12\x10\n\x0cSTALE_STATUS\x10\x02"S\n\x0eSecurityConfig\x12\x41\n\x0fkerberos_config\x18\x01 \x01(\x0b\x32(.google.cloud.dataproc.v1.KerberosConfig"\x90\x04\n\x0eKerberosConfig\x12\x1c\n\x0f\x65nable_kerberos\x18\x01 \x01(\x08\x42\x03\xe0\x41\x01\x12(\n\x1broot_principal_password_uri\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x18\n\x0bkms_key_uri\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0ckeystore_uri\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12\x1b\n\x0etruststore_uri\x18\x05 \x01(\tB\x03\xe0\x41\x01\x12"\n\x15keystore_password_uri\x18\x06 \x01(\tB\x03\xe0\x41\x01\x12\x1d\n\x10key_password_uri\x18\x07 \x01(\tB\x03\xe0\x41\x01\x12$\n\x17truststore_password_uri\x18\x08 \x01(\tB\x03\xe0\x41\x01\x12$\n\x17\x63ross_realm_trust_realm\x18\t \x01(\tB\x03\xe0\x41\x01\x12"\n\x15\x63ross_realm_trust_kdc\x18\n \x01(\tB\x03\xe0\x41\x01\x12+\n\x1e\x63ross_realm_trust_admin_server\x18\x0b \x01(\tB\x03\xe0\x41\x01\x12\x32\n%cross_realm_trust_shared_password_uri\x18\x0c \x01(\tB\x03\xe0\x41\x01\x12\x1b\n\x0ekdc_db_key_uri\x18\r \x01(\tB\x03\xe0\x41\x01\x12\x1f\n\x12tgt_lifetime_hours\x18\x0e \x01(\x05\x42\x03\xe0\x41\x01\x12\x12\n\x05realm\x18\x0f \x01(\tB\x03\xe0\x41\x01"\xf9\x01\n\x0eSoftwareConfig\x12\x1a\n\rimage_version\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12Q\n\nproperties\x18\x02 \x03(\x0b\x32\x38.google.cloud.dataproc.v1.SoftwareConfig.PropertiesEntryB\x03\xe0\x41\x01\x12\x45\n\x13optional_components\x18\x03 \x03(\x0e\x32#.google.cloud.dataproc.v1.ComponentB\x03\xe0\x41\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x9a\x02\n\x0e\x43lusterMetrics\x12O\n\x0chdfs_metrics\x18\x01 \x03(\x0b\x32\x39.google.cloud.dataproc.v1.ClusterMetrics.HdfsMetricsEntry\x12O\n\x0cyarn_metrics\x18\x02 \x03(\x0b\x32\x39.google.cloud.dataproc.v1.ClusterMetrics.YarnMetricsEntry\x1a\x32\n\x10HdfsMetricsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x03:\x02\x38\x01\x1a\x32\n\x10YarnMetricsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x03:\x02\x38\x01"\x96\x01\n\x14\x43reateClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x37\n\x07\x63luster\x18\x02 \x01(\x0b\x32!.google.cloud.dataproc.v1.ClusterB\x03\xe0\x41\x02\x12\x17\n\nrequest_id\x18\x04 \x01(\tB\x03\xe0\x41\x01"\xae\x02\n\x14UpdateClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x05 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x37\n\x07\x63luster\x18\x03 \x01(\x0b\x32!.google.cloud.dataproc.v1.ClusterB\x03\xe0\x41\x02\x12\x45\n\x1dgraceful_decommission_timeout\x18\x06 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x01\x12\x34\n\x0bupdate_mask\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02\x12\x17\n\nrequest_id\x18\x07 \x01(\tB\x03\xe0\x41\x01"\x93\x01\n\x14\x44\x65leteClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_uuid\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12\x17\n\nrequest_id\x18\x05 \x01(\tB\x03\xe0\x41\x01"\\\n\x11GetClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02"\x89\x01\n\x13ListClustersRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x04 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06\x66ilter\x18\x05 \x01(\tB\x03\xe0\x41\x01\x12\x16\n\tpage_size\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x12\x17\n\npage_token\x18\x03 \x01(\tB\x03\xe0\x41\x01"n\n\x14ListClustersResponse\x12\x38\n\x08\x63lusters\x18\x01 \x03(\x0b\x32!.google.cloud.dataproc.v1.ClusterB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03"a\n\x16\x44iagnoseClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02"1\n\x16\x44iagnoseClusterResults\x12\x17\n\noutput_uri\x18\x01 \x01(\tB\x03\xe0\x41\x03\x32\xae\x0c\n\x11\x43lusterController\x12\x80\x02\n\rCreateCluster\x12..google.cloud.dataproc.v1.CreateClusterRequest\x1a\x1d.google.longrunning.Operation"\x9f\x01\x82\xd3\xe4\x93\x02>"3/v1/projects/{project_id}/regions/{region}/clusters:\x07\x63luster\xda\x41\x19project_id,region,cluster\xca\x41<\n\x07\x43luster\x12\x31google.cloud.dataproc.v1.ClusterOperationMetadata\x12\xf3\x01\n\rUpdateCluster\x12..google.cloud.dataproc.v1.UpdateClusterRequest\x1a\x1d.google.longrunning.Operation"\x92\x01\x82\xd3\xe4\x93\x02M2B/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}:\x07\x63luster\xca\x41<\n\x07\x43luster\x12\x31google.cloud.dataproc.v1.ClusterOperationMetadata\x12\x99\x02\n\rDeleteCluster\x12..google.cloud.dataproc.v1.DeleteClusterRequest\x1a\x1d.google.longrunning.Operation"\xb8\x01\x82\xd3\xe4\x93\x02\x44*B/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}\xda\x41\x1eproject_id,region,cluster_name\xca\x41J\n\x15google.protobuf.Empty\x12\x31google.cloud.dataproc.v1.ClusterOperationMetadata\x12\xc9\x01\n\nGetCluster\x12+.google.cloud.dataproc.v1.GetClusterRequest\x1a!.google.cloud.dataproc.v1.Cluster"k\x82\xd3\xe4\x93\x02\x44\x12\x42/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}\xda\x41\x1eproject_id,region,cluster_name\x12\xd9\x01\n\x0cListClusters\x12-.google.cloud.dataproc.v1.ListClustersRequest\x1a..google.cloud.dataproc.v1.ListClustersResponse"j\x82\xd3\xe4\x93\x02\x35\x12\x33/v1/projects/{project_id}/regions/{region}/clusters\xda\x41\x11project_id,region\xda\x41\x18project_id,region,filter\x12\x8e\x02\n\x0f\x44iagnoseCluster\x12\x30.google.cloud.dataproc.v1.DiagnoseClusterRequest\x1a\x1d.google.longrunning.Operation"\xa9\x01\x82\xd3\xe4\x93\x02P"K/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}:diagnose:\x01*\xda\x41\x1eproject_id,region,cluster_name\xca\x41/\n\x15google.protobuf.Empty\x12\x16\x44iagnoseClusterResults\x1aK\xca\x41\x17\x64\x61taproc.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformBq\n\x1c\x63om.google.cloud.dataproc.v1B\rClustersProtoP\x01Z@google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataprocb\x06proto3' + '\n-google/cloud/dataproc_v1/proto/clusters.proto\x12\x18google.cloud.dataproc.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a/google/cloud/dataproc_v1/proto/operations.proto\x1a+google/cloud/dataproc_v1/proto/shared.proto\x1a#google/longrunning/operations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xc8\x03\n\x07\x43luster\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12<\n\x06\x63onfig\x18\x03 \x01(\x0b\x32\'.google.cloud.dataproc.v1.ClusterConfigB\x03\xe0\x41\x02\x12\x42\n\x06labels\x18\x08 \x03(\x0b\x32-.google.cloud.dataproc.v1.Cluster.LabelsEntryB\x03\xe0\x41\x01\x12<\n\x06status\x18\x04 \x01(\x0b\x32\'.google.cloud.dataproc.v1.ClusterStatusB\x03\xe0\x41\x03\x12\x44\n\x0estatus_history\x18\x07 \x03(\x0b\x32\'.google.cloud.dataproc.v1.ClusterStatusB\x03\xe0\x41\x03\x12\x19\n\x0c\x63luster_uuid\x18\x06 \x01(\tB\x03\xe0\x41\x03\x12\x39\n\x07metrics\x18\t \x01(\x0b\x32(.google.cloud.dataproc.v1.ClusterMetrics\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xe6\x05\n\rClusterConfig\x12\x1a\n\rconfig_bucket\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12K\n\x12gce_cluster_config\x18\x08 \x01(\x0b\x32*.google.cloud.dataproc.v1.GceClusterConfigB\x03\xe0\x41\x01\x12I\n\rmaster_config\x18\t \x01(\x0b\x32-.google.cloud.dataproc.v1.InstanceGroupConfigB\x03\xe0\x41\x01\x12I\n\rworker_config\x18\n \x01(\x0b\x32-.google.cloud.dataproc.v1.InstanceGroupConfigB\x03\xe0\x41\x01\x12S\n\x17secondary_worker_config\x18\x0c \x01(\x0b\x32-.google.cloud.dataproc.v1.InstanceGroupConfigB\x03\xe0\x41\x01\x12\x46\n\x0fsoftware_config\x18\r \x01(\x0b\x32(.google.cloud.dataproc.v1.SoftwareConfigB\x03\xe0\x41\x01\x12W\n\x16initialization_actions\x18\x0b \x03(\x0b\x32\x32.google.cloud.dataproc.v1.NodeInitializationActionB\x03\xe0\x41\x01\x12J\n\x11\x65ncryption_config\x18\x0f \x01(\x0b\x32*.google.cloud.dataproc.v1.EncryptionConfigB\x03\xe0\x41\x01\x12L\n\x12\x61utoscaling_config\x18\x12 \x01(\x0b\x32+.google.cloud.dataproc.v1.AutoscalingConfigB\x03\xe0\x41\x01\x12\x46\n\x0fsecurity_config\x18\x10 \x01(\x0b\x32(.google.cloud.dataproc.v1.SecurityConfigB\x03\xe0\x41\x01",\n\x11\x41utoscalingConfig\x12\x17\n\npolicy_uri\x18\x01 \x01(\tB\x03\xe0\x41\x01"4\n\x10\x45ncryptionConfig\x12 \n\x13gce_pd_kms_key_name\x18\x01 \x01(\tB\x03\xe0\x41\x01"\xcd\x02\n\x10GceClusterConfig\x12\x15\n\x08zone_uri\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12\x18\n\x0bnetwork_uri\x18\x02 \x01(\tB\x03\xe0\x41\x01\x12\x1b\n\x0esubnetwork_uri\x18\x06 \x01(\tB\x03\xe0\x41\x01\x12\x1d\n\x10internal_ip_only\x18\x07 \x01(\x08\x42\x03\xe0\x41\x01\x12\x1c\n\x0fservice_account\x18\x08 \x01(\tB\x03\xe0\x41\x01\x12#\n\x16service_account_scopes\x18\x03 \x03(\tB\x03\xe0\x41\x01\x12\x0c\n\x04tags\x18\x04 \x03(\t\x12J\n\x08metadata\x18\x05 \x03(\x0b\x32\x38.google.cloud.dataproc.v1.GceClusterConfig.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x9a\x03\n\x13InstanceGroupConfig\x12\x1a\n\rnum_instances\x18\x01 \x01(\x05\x42\x03\xe0\x41\x01\x12\x1b\n\x0einstance_names\x18\x02 \x03(\tB\x03\xe0\x41\x03\x12\x16\n\timage_uri\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12\x1d\n\x10machine_type_uri\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12>\n\x0b\x64isk_config\x18\x05 \x01(\x0b\x32$.google.cloud.dataproc.v1.DiskConfigB\x03\xe0\x41\x01\x12\x1b\n\x0eis_preemptible\x18\x06 \x01(\x08\x42\x03\xe0\x41\x01\x12O\n\x14managed_group_config\x18\x07 \x01(\x0b\x32,.google.cloud.dataproc.v1.ManagedGroupConfigB\x03\xe0\x41\x03\x12\x46\n\x0c\x61\x63\x63\x65lerators\x18\x08 \x03(\x0b\x32+.google.cloud.dataproc.v1.AcceleratorConfigB\x03\xe0\x41\x01\x12\x1d\n\x10min_cpu_platform\x18\t \x01(\tB\x03\xe0\x41\x01"c\n\x12ManagedGroupConfig\x12#\n\x16instance_template_name\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12(\n\x1binstance_group_manager_name\x18\x02 \x01(\tB\x03\xe0\x41\x03"L\n\x11\x41\x63\x63\x65leratorConfig\x12\x1c\n\x14\x61\x63\x63\x65lerator_type_uri\x18\x01 \x01(\t\x12\x19\n\x11\x61\x63\x63\x65lerator_count\x18\x02 \x01(\x05"f\n\nDiskConfig\x12\x1b\n\x0e\x62oot_disk_type\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12\x1e\n\x11\x62oot_disk_size_gb\x18\x01 \x01(\x05\x42\x03\xe0\x41\x01\x12\x1b\n\x0enum_local_ssds\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01"s\n\x18NodeInitializationAction\x12\x1c\n\x0f\x65xecutable_file\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x39\n\x11\x65xecution_timeout\x18\x02 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x01"\x84\x03\n\rClusterStatus\x12\x41\n\x05state\x18\x01 \x01(\x0e\x32-.google.cloud.dataproc.v1.ClusterStatus.StateB\x03\xe0\x41\x03\x12\x16\n\x06\x64\x65tail\x18\x02 \x01(\tB\x06\xe0\x41\x03\xe0\x41\x01\x12\x39\n\x10state_start_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12G\n\x08substate\x18\x04 \x01(\x0e\x32\x30.google.cloud.dataproc.v1.ClusterStatus.SubstateB\x03\xe0\x41\x03"V\n\x05State\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0c\n\x08\x43REATING\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\x12\t\n\x05\x45RROR\x10\x03\x12\x0c\n\x08\x44\x45LETING\x10\x04\x12\x0c\n\x08UPDATING\x10\x05"<\n\x08Substate\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\r\n\tUNHEALTHY\x10\x01\x12\x10\n\x0cSTALE_STATUS\x10\x02"S\n\x0eSecurityConfig\x12\x41\n\x0fkerberos_config\x18\x01 \x01(\x0b\x32(.google.cloud.dataproc.v1.KerberosConfig"\x90\x04\n\x0eKerberosConfig\x12\x1c\n\x0f\x65nable_kerberos\x18\x01 \x01(\x08\x42\x03\xe0\x41\x01\x12(\n\x1broot_principal_password_uri\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x18\n\x0bkms_key_uri\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0ckeystore_uri\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12\x1b\n\x0etruststore_uri\x18\x05 \x01(\tB\x03\xe0\x41\x01\x12"\n\x15keystore_password_uri\x18\x06 \x01(\tB\x03\xe0\x41\x01\x12\x1d\n\x10key_password_uri\x18\x07 \x01(\tB\x03\xe0\x41\x01\x12$\n\x17truststore_password_uri\x18\x08 \x01(\tB\x03\xe0\x41\x01\x12$\n\x17\x63ross_realm_trust_realm\x18\t \x01(\tB\x03\xe0\x41\x01\x12"\n\x15\x63ross_realm_trust_kdc\x18\n \x01(\tB\x03\xe0\x41\x01\x12+\n\x1e\x63ross_realm_trust_admin_server\x18\x0b \x01(\tB\x03\xe0\x41\x01\x12\x32\n%cross_realm_trust_shared_password_uri\x18\x0c \x01(\tB\x03\xe0\x41\x01\x12\x1b\n\x0ekdc_db_key_uri\x18\r \x01(\tB\x03\xe0\x41\x01\x12\x1f\n\x12tgt_lifetime_hours\x18\x0e \x01(\x05\x42\x03\xe0\x41\x01\x12\x12\n\x05realm\x18\x0f \x01(\tB\x03\xe0\x41\x01"\xf9\x01\n\x0eSoftwareConfig\x12\x1a\n\rimage_version\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12Q\n\nproperties\x18\x02 \x03(\x0b\x32\x38.google.cloud.dataproc.v1.SoftwareConfig.PropertiesEntryB\x03\xe0\x41\x01\x12\x45\n\x13optional_components\x18\x03 \x03(\x0e\x32#.google.cloud.dataproc.v1.ComponentB\x03\xe0\x41\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x9a\x02\n\x0e\x43lusterMetrics\x12O\n\x0chdfs_metrics\x18\x01 \x03(\x0b\x32\x39.google.cloud.dataproc.v1.ClusterMetrics.HdfsMetricsEntry\x12O\n\x0cyarn_metrics\x18\x02 \x03(\x0b\x32\x39.google.cloud.dataproc.v1.ClusterMetrics.YarnMetricsEntry\x1a\x32\n\x10HdfsMetricsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x03:\x02\x38\x01\x1a\x32\n\x10YarnMetricsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x03:\x02\x38\x01"\x96\x01\n\x14\x43reateClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x37\n\x07\x63luster\x18\x02 \x01(\x0b\x32!.google.cloud.dataproc.v1.ClusterB\x03\xe0\x41\x02\x12\x17\n\nrequest_id\x18\x04 \x01(\tB\x03\xe0\x41\x01"\xae\x02\n\x14UpdateClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x05 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x37\n\x07\x63luster\x18\x03 \x01(\x0b\x32!.google.cloud.dataproc.v1.ClusterB\x03\xe0\x41\x02\x12\x45\n\x1dgraceful_decommission_timeout\x18\x06 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x01\x12\x34\n\x0bupdate_mask\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02\x12\x17\n\nrequest_id\x18\x07 \x01(\tB\x03\xe0\x41\x01"\x93\x01\n\x14\x44\x65leteClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_uuid\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12\x17\n\nrequest_id\x18\x05 \x01(\tB\x03\xe0\x41\x01"\\\n\x11GetClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02"\x89\x01\n\x13ListClustersRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x04 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06\x66ilter\x18\x05 \x01(\tB\x03\xe0\x41\x01\x12\x16\n\tpage_size\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x12\x17\n\npage_token\x18\x03 \x01(\tB\x03\xe0\x41\x01"n\n\x14ListClustersResponse\x12\x38\n\x08\x63lusters\x18\x01 \x03(\x0b\x32!.google.cloud.dataproc.v1.ClusterB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03"a\n\x16\x44iagnoseClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02"1\n\x16\x44iagnoseClusterResults\x12\x17\n\noutput_uri\x18\x01 \x01(\tB\x03\xe0\x41\x03\x32\xe3\x0c\n\x11\x43lusterController\x12\x80\x02\n\rCreateCluster\x12..google.cloud.dataproc.v1.CreateClusterRequest\x1a\x1d.google.longrunning.Operation"\x9f\x01\x82\xd3\xe4\x93\x02>"3/v1/projects/{project_id}/regions/{region}/clusters:\x07\x63luster\xda\x41\x19project_id,region,cluster\xca\x41<\n\x07\x43luster\x12\x31google.cloud.dataproc.v1.ClusterOperationMetadata\x12\xa8\x02\n\rUpdateCluster\x12..google.cloud.dataproc.v1.UpdateClusterRequest\x1a\x1d.google.longrunning.Operation"\xc7\x01\x82\xd3\xe4\x93\x02M2B/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}:\x07\x63luster\xca\x41<\n\x07\x43luster\x12\x31google.cloud.dataproc.v1.ClusterOperationMetadata\xda\x41\x32project_id,region,cluster_name,cluster,update_mask\x12\x99\x02\n\rDeleteCluster\x12..google.cloud.dataproc.v1.DeleteClusterRequest\x1a\x1d.google.longrunning.Operation"\xb8\x01\x82\xd3\xe4\x93\x02\x44*B/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}\xda\x41\x1eproject_id,region,cluster_name\xca\x41J\n\x15google.protobuf.Empty\x12\x31google.cloud.dataproc.v1.ClusterOperationMetadata\x12\xc9\x01\n\nGetCluster\x12+.google.cloud.dataproc.v1.GetClusterRequest\x1a!.google.cloud.dataproc.v1.Cluster"k\x82\xd3\xe4\x93\x02\x44\x12\x42/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}\xda\x41\x1eproject_id,region,cluster_name\x12\xd9\x01\n\x0cListClusters\x12-.google.cloud.dataproc.v1.ListClustersRequest\x1a..google.cloud.dataproc.v1.ListClustersResponse"j\x82\xd3\xe4\x93\x02\x35\x12\x33/v1/projects/{project_id}/regions/{region}/clusters\xda\x41\x11project_id,region\xda\x41\x18project_id,region,filter\x12\x8e\x02\n\x0f\x44iagnoseCluster\x12\x30.google.cloud.dataproc.v1.DiagnoseClusterRequest\x1a\x1d.google.longrunning.Operation"\xa9\x01\x82\xd3\xe4\x93\x02P"K/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}:diagnose:\x01*\xda\x41\x1eproject_id,region,cluster_name\xca\x41/\n\x15google.protobuf.Empty\x12\x16\x44iagnoseClusterResults\x1aK\xca\x41\x17\x64\x61taproc.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformBq\n\x1c\x63om.google.cloud.dataproc.v1B\rClustersProtoP\x01Z@google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataprocb\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, @@ -83,8 +83,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=3072, - serialized_end=3158, + serialized_start=3087, + serialized_end=3173, ) _sym_db.RegisterEnumDescriptor(_CLUSTERSTATUS_STATE) @@ -106,8 +106,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=3160, - serialized_end=3220, + serialized_start=3175, + serialized_end=3235, ) _sym_db.RegisterEnumDescriptor(_CLUSTERSTATUS_SUBSTATE) @@ -164,8 +164,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=790, - serialized_end=835, + serialized_start=805, + serialized_end=850, ) _CLUSTER = _descriptor.Descriptor( @@ -190,7 +190,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -208,7 +208,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -226,7 +226,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -329,7 +329,7 @@ extension_ranges=[], oneofs=[], serialized_start=394, - serialized_end=835, + serialized_end=850, ) @@ -529,8 +529,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=838, - serialized_end=1580, + serialized_start=853, + serialized_end=1595, ) @@ -568,8 +568,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1582, - serialized_end=1626, + serialized_start=1597, + serialized_end=1641, ) @@ -607,8 +607,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1628, - serialized_end=1680, + serialized_start=1643, + serialized_end=1695, ) @@ -664,8 +664,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1969, - serialized_end=2016, + serialized_start=1984, + serialized_end=2031, ) _GCECLUSTERCONFIG = _descriptor.Descriptor( @@ -828,8 +828,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1683, - serialized_end=2016, + serialized_start=1698, + serialized_end=2031, ) @@ -1011,8 +1011,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2019, - serialized_end=2429, + serialized_start=2034, + serialized_end=2444, ) @@ -1068,8 +1068,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2431, - serialized_end=2530, + serialized_start=2446, + serialized_end=2545, ) @@ -1125,8 +1125,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2532, - serialized_end=2608, + serialized_start=2547, + serialized_end=2623, ) @@ -1200,8 +1200,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2610, - serialized_end=2712, + serialized_start=2625, + serialized_end=2727, ) @@ -1257,8 +1257,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2714, - serialized_end=2829, + serialized_start=2729, + serialized_end=2844, ) @@ -1350,8 +1350,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2832, - serialized_end=3220, + serialized_start=2847, + serialized_end=3235, ) @@ -1389,8 +1389,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3222, - serialized_end=3305, + serialized_start=3237, + serialized_end=3320, ) @@ -1680,8 +1680,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3308, - serialized_end=3836, + serialized_start=3323, + serialized_end=3851, ) @@ -1737,8 +1737,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4039, - serialized_end=4088, + serialized_start=4054, + serialized_end=4103, ) _SOFTWARECONFIG = _descriptor.Descriptor( @@ -1811,8 +1811,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3839, - serialized_end=4088, + serialized_start=3854, + serialized_end=4103, ) @@ -1868,8 +1868,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4271, - serialized_end=4321, + serialized_start=4286, + serialized_end=4336, ) _CLUSTERMETRICS_YARNMETRICSENTRY = _descriptor.Descriptor( @@ -1924,8 +1924,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4323, - serialized_end=4373, + serialized_start=4338, + serialized_end=4388, ) _CLUSTERMETRICS = _descriptor.Descriptor( @@ -1980,8 +1980,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4091, - serialized_end=4373, + serialized_start=4106, + serialized_end=4388, ) @@ -2073,8 +2073,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4376, - serialized_end=4526, + serialized_start=4391, + serialized_end=4541, ) @@ -2220,8 +2220,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4529, - serialized_end=4831, + serialized_start=4544, + serialized_end=4846, ) @@ -2331,8 +2331,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4834, - serialized_end=4981, + serialized_start=4849, + serialized_end=4996, ) @@ -2406,8 +2406,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4983, - serialized_end=5075, + serialized_start=4998, + serialized_end=5090, ) @@ -2517,8 +2517,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5078, - serialized_end=5215, + serialized_start=5093, + serialized_end=5230, ) @@ -2574,8 +2574,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5217, - serialized_end=5327, + serialized_start=5232, + serialized_end=5342, ) @@ -2649,8 +2649,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5329, - serialized_end=5426, + serialized_start=5344, + serialized_end=5441, ) @@ -2688,8 +2688,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5428, - serialized_end=5477, + serialized_start=5443, + serialized_end=5492, ) _CLUSTER_LABELSENTRY.containing_type = _CLUSTER @@ -2807,7 +2807,7 @@ Required. The cluster name. Cluster names within a project must be unique. Names of deleted clusters can be reused. config: - Required. The cluster config. Note that Cloud Dataproc may set + Required. The cluster config. Note that Dataproc may set default values, and values may change when clusters are updated. labels: @@ -2824,8 +2824,7 @@ Output only. The previous cluster status. cluster_uuid: Output only. A cluster UUID (Unique Universal Identifier). - Cloud Dataproc generates this value when it creates the - cluster. + Dataproc generates this value when it creates the cluster. metrics: Contains cluster daemon metrics such as HDFS and YARN stats. **Beta Feature**: This report is available for testing @@ -2848,15 +2847,15 @@ Attributes: config_bucket: - Optional. A Google Cloud Storage bucket used to stage job + Optional. A Cloud Storage bucket used to stage job dependencies, config files, and job driver console output. If you do not specify a staging bucket, Cloud Dataproc will determine a Cloud Storage location (US, ASIA, or EU) for your - cluster's staging bucket according to the Google Compute - Engine zone where your cluster is deployed, and then create - and manage this project-level, per-location bucket (see `Cloud - Dataproc staging bucket `__). + cluster's staging bucket according to the Compute Engine zone + where your cluster is deployed, and then create and manage + this project-level, per-location bucket (see `Dataproc staging + bucket `__). gce_cluster_config: Optional. The shared Compute Engine config settings for all instances in a cluster. @@ -2912,7 +2911,7 @@ rojects/[project_id]/locations/[dataproc_region]/autoscalingPo licies/[policy_id]`` - ``projects/[project_id]/locations/[dat aproc_region]/autoscalingPolicies/[policy_id]`` Note that the - policy must be in the same project and Cloud Dataproc region. + policy must be in the same project and Dataproc region. """, # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.AutoscalingConfig) ), @@ -2961,7 +2960,7 @@ zone_uri: Optional. The zone where the Compute Engine cluster will be located. On a create request, it is required in the "global" - region. If omitted in a non-global Cloud Dataproc region, the + region. If omitted in a non-global Dataproc region, the service will pick a zone in the corresponding Compute Engine region. On a get request, zone will always be present. A full URL, partial URI, or short name are valid. Examples: - ``htt @@ -2997,14 +2996,15 @@ networks, and all off-cluster dependencies must be configured to be accessible without external IP addresses. service_account: - Optional. The service account of the instances. Defaults to - the default Compute Engine service account. Custom service - accounts need permissions equivalent to the following IAM - roles: - roles/logging.logWriter - - roles/storage.objectAdmin (see - https://cloud.google.com/compute/docs/access/service- - accounts#custom\_service\_accounts for more information). - Example: ``[account_id]@[project_id].iam.gserviceaccount.com`` + Optional. The `Dataproc service account + `__ (also see `VM + Data Plane identity `__) used by + Dataproc cluster VM instances to access Google Cloud Platform + services. If not specified, the `Compute Engine default + service account `__ is used. service_account_scopes: Optional. The URIs of service account scopes to be included in Compute Engine instances. The following base set of scopes is @@ -3047,9 +3047,9 @@ Optional. The number of VM instances in the instance group. For master instance groups, must be set to 1. instance_names: - Output only. The list of instance names. Cloud Dataproc - derives the names from ``cluster_name``, ``num_instances``, - and the instance group. + Output only. The list of instance names. Dataproc derives the + names from ``cluster_name``, ``num_instances``, and the + instance group. image_uri: Optional. The Compute Engine image resource used for cluster instances. It can be specified or may be inferred from @@ -3061,8 +3061,8 @@ /[project_id]/zones/us-east1-a/machineTypes/n1-standard-2`` - ``projects/[project_id]/zones/us- east1-a/machineTypes/n1-standard-2`` - ``n1-standard-2`` - **Auto Zone Exception**: If you are using the Cloud Dataproc - `Auto Zone Placement `__ feature, you must use the short name of the machine type resource, for example, ``n1-standard-2``. @@ -3080,7 +3080,7 @@ these instances. min_cpu_platform: Optional. Specifies the minimum cpu platform for the Instance - Group. See [Cloud Dataproc→Minimum CPU Platform] + Group. See [Dataproc→Minimum CPU Platform] (/dataproc/docs/concepts/compute/dataproc-min-cpu). """, # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.InstanceGroupConfig) @@ -3132,7 +3132,7 @@ ``projects/[project_id]/zones/us- east1-a/acceleratorTypes/nvidia-tesla-k80`` - ``nvidia- tesla-k80`` **Auto Zone Exception**: If you are using the - Cloud Dataproc `Auto Zone Placement + Dataproc `Auto Zone Placement `__ feature, you must use the short name of the accelerator type resource, for example, @@ -3337,7 +3337,7 @@ Attributes: image_version: Optional. The version of software inside the cluster. It must - be one of the supported `Cloud Dataproc Versions + be one of the supported `Dataproc Versions `__, such as "1.2" (including a subminor version, such as "1.2.29"), or the @@ -3422,8 +3422,7 @@ Required. The ID of the Google Cloud Platform project that the cluster belongs to. region: - Required. The Cloud Dataproc region in which to handle the - request. + Required. The Dataproc region in which to handle the request. cluster: Required. The cluster to create. request_id: @@ -3458,8 +3457,7 @@ Required. The ID of the Google Cloud Platform project the cluster belongs to. region: - Required. The Cloud Dataproc region in which to handle the - request. + Required. The Dataproc region in which to handle the request. cluster_name: Required. The cluster name. cluster: @@ -3539,8 +3537,7 @@ Required. The ID of the Google Cloud Platform project that the cluster belongs to. region: - Required. The Cloud Dataproc region in which to handle the - request. + Required. The Dataproc region in which to handle the request. cluster_name: Required. The cluster name. cluster_uuid: @@ -3579,8 +3576,7 @@ Required. The ID of the Google Cloud Platform project that the cluster belongs to. region: - Required. The Cloud Dataproc region in which to handle the - request. + Required. The Dataproc region in which to handle the request. cluster_name: Required. The cluster name. """, @@ -3603,8 +3599,7 @@ Required. The ID of the Google Cloud Platform project that the cluster belongs to. region: - Required. The Cloud Dataproc region in which to handle the - request. + Required. The Dataproc region in which to handle the request. filter: Optional. A filter constraining the clusters to list. Filters are case-sensitive and have the following syntax: field = @@ -3669,8 +3664,7 @@ Required. The ID of the Google Cloud Platform project that the cluster belongs to. region: - Required. The Cloud Dataproc region in which to handle the - request. + Required. The Dataproc region in which to handle the request. cluster_name: Required. The cluster name. """, @@ -3702,6 +3696,9 @@ DESCRIPTOR._options = None _CLUSTER_LABELSENTRY._options = None +_CLUSTER.fields_by_name["project_id"]._options = None +_CLUSTER.fields_by_name["cluster_name"]._options = None +_CLUSTER.fields_by_name["config"]._options = None _CLUSTER.fields_by_name["labels"]._options = None _CLUSTER.fields_by_name["status"]._options = None _CLUSTER.fields_by_name["status_history"]._options = None @@ -3805,8 +3802,8 @@ serialized_options=_b( "\312A\027dataproc.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" ), - serialized_start=5480, - serialized_end=7062, + serialized_start=5495, + serialized_end=7130, methods=[ _descriptor.MethodDescriptor( name="CreateCluster", @@ -3827,7 +3824,7 @@ input_type=_UPDATECLUSTERREQUEST, output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, serialized_options=_b( - "\202\323\344\223\002M2B/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}:\007cluster\312A<\n\007Cluster\0221google.cloud.dataproc.v1.ClusterOperationMetadata" + "\202\323\344\223\002M2B/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}:\007cluster\312A<\n\007Cluster\0221google.cloud.dataproc.v1.ClusterOperationMetadata\332A2project_id,region,cluster_name,cluster,update_mask" ), ), _descriptor.MethodDescriptor( diff --git a/google/cloud/dataproc_v1/proto/jobs.proto b/google/cloud/dataproc_v1/proto/jobs.proto index eeba155d..bcb68fed 100644 --- a/google/cloud/dataproc_v1/proto/jobs.proto +++ b/google/cloud/dataproc_v1/proto/jobs.proto @@ -11,7 +11,6 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. -// syntax = "proto3"; @@ -132,7 +131,7 @@ message LoggingConfig { map driver_log_levels = 2; } -// A Cloud Dataproc job for running +// A Dataproc job for running // [Apache Hadoop // MapReduce](https://hadoop.apache.org/docs/current/hadoop-mapreduce-client/hadoop-mapreduce-client-core/MapReduceTutorial.html) // jobs on [Apache Hadoop @@ -176,7 +175,7 @@ message HadoopJob { repeated string archive_uris = 6 [(google.api.field_behavior) = OPTIONAL]; // Optional. A mapping of property names to values, used to configure Hadoop. - // Properties that conflict with values set by the Cloud Dataproc API may be + // Properties that conflict with values set by the Dataproc API may be // overwritten. Can include properties set in /etc/hadoop/conf/*-site and // classes in user code. map properties = 7 [(google.api.field_behavior) = OPTIONAL]; @@ -185,7 +184,7 @@ message HadoopJob { LoggingConfig logging_config = 8 [(google.api.field_behavior) = OPTIONAL]; } -// A Cloud Dataproc job for running [Apache Spark](http://spark.apache.org/) +// A Dataproc job for running [Apache Spark](http://spark.apache.org/) // applications on YARN. message SparkJob { // Required. The specification of the main method to call to drive the job. @@ -221,7 +220,7 @@ message SparkJob { repeated string archive_uris = 6 [(google.api.field_behavior) = OPTIONAL]; // Optional. A mapping of property names to values, used to configure Spark. - // Properties that conflict with values set by the Cloud Dataproc API may be + // Properties that conflict with values set by the Dataproc API may be // overwritten. Can include properties set in // /etc/spark/conf/spark-defaults.conf and classes in user code. map properties = 7 [(google.api.field_behavior) = OPTIONAL]; @@ -230,7 +229,7 @@ message SparkJob { LoggingConfig logging_config = 8 [(google.api.field_behavior) = OPTIONAL]; } -// A Cloud Dataproc job for running +// A Dataproc job for running // [Apache // PySpark](https://spark.apache.org/docs/0.9.0/python-programming-guide.html) // applications on YARN. @@ -261,7 +260,7 @@ message PySparkJob { repeated string archive_uris = 6 [(google.api.field_behavior) = OPTIONAL]; // Optional. A mapping of property names to values, used to configure PySpark. - // Properties that conflict with values set by the Cloud Dataproc API may be + // Properties that conflict with values set by the Dataproc API may be // overwritten. Can include properties set in // /etc/spark/conf/spark-defaults.conf and classes in user code. map properties = 7 [(google.api.field_behavior) = OPTIONAL]; @@ -289,7 +288,7 @@ message QueryList { repeated string queries = 1 [(google.api.field_behavior) = REQUIRED]; } -// A Cloud Dataproc job for running [Apache Hive](https://hive.apache.org/) +// A Dataproc job for running [Apache Hive](https://hive.apache.org/) // queries on YARN. message HiveJob { // Required. The sequence of Hive queries to execute, specified as either @@ -312,7 +311,7 @@ message HiveJob { map script_variables = 4 [(google.api.field_behavior) = OPTIONAL]; // Optional. A mapping of property names and values, used to configure Hive. - // Properties that conflict with values set by the Cloud Dataproc API may be + // Properties that conflict with values set by the Dataproc API may be // overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, // /etc/hive/conf/hive-site.xml, and classes in user code. map properties = 5 [(google.api.field_behavior) = OPTIONAL]; @@ -323,7 +322,7 @@ message HiveJob { repeated string jar_file_uris = 6 [(google.api.field_behavior) = OPTIONAL]; } -// A Cloud Dataproc job for running [Apache Spark +// A Dataproc job for running [Apache Spark // SQL](http://spark.apache.org/sql/) queries. message SparkSqlJob { // Required. The sequence of Spark SQL queries to execute, specified as @@ -342,7 +341,7 @@ message SparkSqlJob { // Optional. A mapping of property names to values, used to configure // Spark SQL's SparkConf. Properties that conflict with values set by the - // Cloud Dataproc API may be overwritten. + // Dataproc API may be overwritten. map properties = 4 [(google.api.field_behavior) = OPTIONAL]; // Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH. @@ -352,7 +351,7 @@ message SparkSqlJob { LoggingConfig logging_config = 6 [(google.api.field_behavior) = OPTIONAL]; } -// A Cloud Dataproc job for running [Apache Pig](https://pig.apache.org/) +// A Dataproc job for running [Apache Pig](https://pig.apache.org/) // queries on YARN. message PigJob { // Required. The sequence of Pig queries to execute, specified as an HCFS @@ -375,7 +374,7 @@ message PigJob { map script_variables = 4 [(google.api.field_behavior) = OPTIONAL]; // Optional. A mapping of property names to values, used to configure Pig. - // Properties that conflict with values set by the Cloud Dataproc API may be + // Properties that conflict with values set by the Dataproc API may be // overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, // /etc/pig/conf/pig.properties, and classes in user code. map properties = 5 [(google.api.field_behavior) = OPTIONAL]; @@ -388,17 +387,17 @@ message PigJob { LoggingConfig logging_config = 7 [(google.api.field_behavior) = OPTIONAL]; } -// Cloud Dataproc job config. +// Dataproc job config. message JobPlacement { // Required. The name of the cluster where the job will be submitted. string cluster_name = 1 [(google.api.field_behavior) = REQUIRED]; - // Output only. A cluster UUID generated by the Cloud Dataproc service when + // Output only. A cluster UUID generated by the Dataproc service when // the job is submitted. string cluster_uuid = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; } -// Cloud Dataproc job status. +// Dataproc job status. message JobStatus { // The job state. enum State { @@ -456,7 +455,7 @@ message JobStatus { QUEUED = 2; // The agent-reported status is out of date, which may be caused by a - // loss of communication between the agent and Cloud Dataproc. If the + // loss of communication between the agent and Dataproc. If the // agent does not send a timely update, the job will fail. // // Applies to RUNNING state. @@ -549,7 +548,7 @@ message YarnApplication { string tracking_url = 4 [(google.api.field_behavior) = OPTIONAL]; } -// A Cloud Dataproc job resource. +// A Dataproc job resource. message Job { // Optional. The fully qualified reference to the job, which can be used to // obtain the equivalent REST path of the job resource. If this property @@ -642,7 +641,7 @@ message SubmitJobRequest { // belongs to. string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - // Required. The Cloud Dataproc region in which to handle the request. + // Required. The Dataproc region in which to handle the request. string region = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The job resource. @@ -668,7 +667,7 @@ message GetJobRequest { // belongs to. string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - // Required. The Cloud Dataproc region in which to handle the request. + // Required. The Dataproc region in which to handle the request. string region = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The job ID. @@ -694,7 +693,7 @@ message ListJobsRequest { // belongs to. string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - // Required. The Cloud Dataproc region in which to handle the request. + // Required. The Dataproc region in which to handle the request. string region = 6 [(google.api.field_behavior) = REQUIRED]; // Optional. The number of results to return in each response. @@ -737,7 +736,7 @@ message UpdateJobRequest { // belongs to. string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - // Required. The Cloud Dataproc region in which to handle the request. + // Required. The Dataproc region in which to handle the request. string region = 2 [(google.api.field_behavior) = REQUIRED]; // Required. The job ID. @@ -772,7 +771,7 @@ message CancelJobRequest { // belongs to. string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - // Required. The Cloud Dataproc region in which to handle the request. + // Required. The Dataproc region in which to handle the request. string region = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The job ID. @@ -785,7 +784,7 @@ message DeleteJobRequest { // belongs to. string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - // Required. The Cloud Dataproc region in which to handle the request. + // Required. The Dataproc region in which to handle the request. string region = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The job ID. diff --git a/google/cloud/dataproc_v1/proto/jobs_pb2.py b/google/cloud/dataproc_v1/proto/jobs_pb2.py index 294c5acc..470b4bb9 100644 --- a/google/cloud/dataproc_v1/proto/jobs_pb2.py +++ b/google/cloud/dataproc_v1/proto/jobs_pb2.py @@ -3361,7 +3361,7 @@ ), DESCRIPTOR=_HADOOPJOB, __module__="google.cloud.dataproc_v1.proto.jobs_pb2", - __doc__="""A Cloud Dataproc job for running `Apache Hadoop + __doc__="""A Dataproc job for running `Apache Hadoop MapReduce `__ jobs on `Apache Hadoop YARN `__. @@ -3403,9 +3403,8 @@ properties: Optional. A mapping of property names to values, used to configure Hadoop. Properties that conflict with values set by - the Cloud Dataproc API may be overwritten. Can include - properties set in /etc/hadoop/conf/\*-site and classes in user - code. + the Dataproc API may be overwritten. Can include properties + set in /etc/hadoop/conf/\*-site and classes in user code. logging_config: Optional. The runtime log config for job execution. """, @@ -3430,8 +3429,8 @@ ), DESCRIPTOR=_SPARKJOB, __module__="google.cloud.dataproc_v1.proto.jobs_pb2", - __doc__="""A Cloud Dataproc job for running `Apache - Spark `__ applications on YARN. + __doc__="""A Dataproc job for running `Apache Spark `__ + applications on YARN. Attributes: @@ -3467,9 +3466,9 @@ properties: Optional. A mapping of property names to values, used to configure Spark. Properties that conflict with values set by - the Cloud Dataproc API may be overwritten. Can include - properties set in /etc/spark/conf/spark-defaults.conf and - classes in user code. + the Dataproc API may be overwritten. Can include properties + set in /etc/spark/conf/spark-defaults.conf and classes in user + code. logging_config: Optional. The runtime log config for job execution. """, @@ -3494,7 +3493,7 @@ ), DESCRIPTOR=_PYSPARKJOB, __module__="google.cloud.dataproc_v1.proto.jobs_pb2", - __doc__="""A Cloud Dataproc job for running `Apache + __doc__="""A Dataproc job for running `Apache PySpark `__ applications on YARN. @@ -3524,9 +3523,9 @@ properties: Optional. A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by - the Cloud Dataproc API may be overwritten. Can include - properties set in /etc/spark/conf/spark-defaults.conf and - classes in user code. + the Dataproc API may be overwritten. Can include properties + set in /etc/spark/conf/spark-defaults.conf and classes in user + code. logging_config: Optional. The runtime log config for job execution. """, @@ -3584,8 +3583,8 @@ ), DESCRIPTOR=_HIVEJOB, __module__="google.cloud.dataproc_v1.proto.jobs_pb2", - __doc__="""A Cloud Dataproc job for running `Apache - Hive `__ queries on YARN. + __doc__="""A Dataproc job for running `Apache Hive `__ + queries on YARN. Attributes: @@ -3606,9 +3605,9 @@ properties: Optional. A mapping of property names and values, used to configure Hive. Properties that conflict with values set by - the Cloud Dataproc API may be overwritten. Can include - properties set in /etc/hadoop/conf/\*-site.xml, - /etc/hive/conf/hive-site.xml, and classes in user code. + the Dataproc API may be overwritten. Can include properties + set in /etc/hadoop/conf/\*-site.xml, /etc/hive/conf/hive- + site.xml, and classes in user code. jar_file_uris: Optional. HCFS URIs of jar files to add to the CLASSPATH of the Hive server and Hadoop MapReduce (MR) tasks. Can contain @@ -3645,7 +3644,7 @@ ), DESCRIPTOR=_SPARKSQLJOB, __module__="google.cloud.dataproc_v1.proto.jobs_pb2", - __doc__="""A Cloud Dataproc job for running `Apache Spark + __doc__="""A Dataproc job for running `Apache Spark SQL `__ queries. @@ -3663,7 +3662,7 @@ properties: Optional. A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with - values set by the Cloud Dataproc API may be overwritten. + values set by the Dataproc API may be overwritten. jar_file_uris: Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH. @@ -3701,8 +3700,8 @@ ), DESCRIPTOR=_PIGJOB, __module__="google.cloud.dataproc_v1.proto.jobs_pb2", - __doc__="""A Cloud Dataproc job for running `Apache - Pig `__ queries on YARN. + __doc__="""A Dataproc job for running `Apache Pig `__ + queries on YARN. Attributes: @@ -3723,9 +3722,9 @@ properties: Optional. A mapping of property names to values, used to configure Pig. Properties that conflict with values set by the - Cloud Dataproc API may be overwritten. Can include properties - set in /etc/hadoop/conf/\*-site.xml, - /etc/pig/conf/pig.properties, and classes in user code. + Dataproc API may be overwritten. Can include properties set in + /etc/hadoop/conf/\*-site.xml, /etc/pig/conf/pig.properties, + and classes in user code. jar_file_uris: Optional. HCFS URIs of jar files to add to the CLASSPATH of the Pig Client and Hadoop MapReduce (MR) tasks. Can contain @@ -3746,7 +3745,7 @@ dict( DESCRIPTOR=_JOBPLACEMENT, __module__="google.cloud.dataproc_v1.proto.jobs_pb2", - __doc__="""Cloud Dataproc job config. + __doc__="""Dataproc job config. Attributes: @@ -3754,8 +3753,8 @@ Required. The name of the cluster where the job will be submitted. cluster_uuid: - Output only. A cluster UUID generated by the Cloud Dataproc - service when the job is submitted. + Output only. A cluster UUID generated by the Dataproc service + when the job is submitted. """, # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.JobPlacement) ), @@ -3768,7 +3767,7 @@ dict( DESCRIPTOR=_JOBSTATUS, __module__="google.cloud.dataproc_v1.proto.jobs_pb2", - __doc__="""Cloud Dataproc job status. + __doc__="""Dataproc job status. Attributes: @@ -3860,7 +3859,7 @@ ), DESCRIPTOR=_JOB, __module__="google.cloud.dataproc_v1.proto.jobs_pb2", - __doc__="""A Cloud Dataproc job resource. + __doc__="""A Dataproc job resource. Attributes: @@ -3962,8 +3961,7 @@ Required. The ID of the Google Cloud Platform project that the job belongs to. region: - Required. The Cloud Dataproc region in which to handle the - request. + Required. The Dataproc region in which to handle the request. job: Required. The job resource. request_id: @@ -3998,8 +3996,7 @@ Required. The ID of the Google Cloud Platform project that the job belongs to. region: - Required. The Cloud Dataproc region in which to handle the - request. + Required. The Dataproc region in which to handle the request. job_id: Required. The job ID. """, @@ -4022,8 +4019,7 @@ Required. The ID of the Google Cloud Platform project that the job belongs to. region: - Required. The Cloud Dataproc region in which to handle the - request. + Required. The Dataproc region in which to handle the request. page_size: Optional. The number of results to return in each response. page_token: @@ -4067,8 +4063,7 @@ Required. The ID of the Google Cloud Platform project that the job belongs to. region: - Required. The Cloud Dataproc region in which to handle the - request. + Required. The Dataproc region in which to handle the request. job_id: Required. The job ID. job: @@ -4122,8 +4117,7 @@ Required. The ID of the Google Cloud Platform project that the job belongs to. region: - Required. The Cloud Dataproc region in which to handle the - request. + Required. The Dataproc region in which to handle the request. job_id: Required. The job ID. """, @@ -4146,8 +4140,7 @@ Required. The ID of the Google Cloud Platform project that the job belongs to. region: - Required. The Cloud Dataproc region in which to handle the - request. + Required. The Dataproc region in which to handle the request. job_id: Required. The job ID. """, diff --git a/google/cloud/dataproc_v1/proto/workflow_templates.proto b/google/cloud/dataproc_v1/proto/workflow_templates.proto index 8976c42e..30b5ced4 100644 --- a/google/cloud/dataproc_v1/proto/workflow_templates.proto +++ b/google/cloud/dataproc_v1/proto/workflow_templates.proto @@ -33,7 +33,7 @@ option java_outer_classname = "WorkflowTemplatesProto"; option java_package = "com.google.cloud.dataproc.v1"; // The API interface for managing Workflow Templates in the -// Cloud Dataproc API. +// Dataproc API. service WorkflowTemplateService { option (google.api.default_host) = "dataproc.googleapis.com"; option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; @@ -179,7 +179,7 @@ service WorkflowTemplateService { } } -// A Cloud Dataproc workflow template resource. +// A Dataproc workflow template resource. message WorkflowTemplate { option (google.api.resource) = { type: "dataproc.googleapis.com/WorkflowTemplate" @@ -458,7 +458,7 @@ message ValueValidation { repeated string values = 1 [(google.api.field_behavior) = REQUIRED]; } -// A Cloud Dataproc workflow template resource. +// A Dataproc workflow template resource. message WorkflowMetadata { // The operation state. enum State { @@ -709,10 +709,7 @@ message UpdateWorkflowTemplateRequest { // // The `template.version` field must match the current version. WorkflowTemplate template = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "dataproc.googleapis.com/WorkflowTemplate" - } + (google.api.field_behavior) = REQUIRED ]; } diff --git a/google/cloud/dataproc_v1/proto/workflow_templates_pb2.py b/google/cloud/dataproc_v1/proto/workflow_templates_pb2.py index e539c2c1..87c81d7b 100644 --- a/google/cloud/dataproc_v1/proto/workflow_templates_pb2.py +++ b/google/cloud/dataproc_v1/proto/workflow_templates_pb2.py @@ -40,7 +40,7 @@ "\n\034com.google.cloud.dataproc.v1B\026WorkflowTemplatesProtoP\001Z@google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataproc" ), serialized_pb=_b( - '\n7google/cloud/dataproc_v1/proto/workflow_templates.proto\x12\x18google.cloud.dataproc.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a-google/cloud/dataproc_v1/proto/clusters.proto\x1a)google/cloud/dataproc_v1/proto/jobs.proto\x1a#google/longrunning/operations.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xcd\x05\n\x10WorkflowTemplate\x12\x0f\n\x02id\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12\x14\n\x07version\x18\x03 \x01(\x05\x42\x03\xe0\x41\x01\x12\x34\n\x0b\x63reate_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x34\n\x0bupdate_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12K\n\x06labels\x18\x06 \x03(\x0b\x32\x36.google.cloud.dataproc.v1.WorkflowTemplate.LabelsEntryB\x03\xe0\x41\x01\x12K\n\tplacement\x18\x07 \x01(\x0b\x32\x33.google.cloud.dataproc.v1.WorkflowTemplatePlacementB\x03\xe0\x41\x02\x12\x37\n\x04jobs\x18\x08 \x03(\x0b\x32$.google.cloud.dataproc.v1.OrderedJobB\x03\xe0\x41\x02\x12\x44\n\nparameters\x18\t \x03(\x0b\x32+.google.cloud.dataproc.v1.TemplateParameterB\x03\xe0\x41\x01\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01:\xca\x01\xea\x41\xc6\x01\n(dataproc.googleapis.com/WorkflowTemplate\x12Iprojects/{project}/regions/{region}/workflowTemplates/{workflow_template}\x12Mprojects/{project}/locations/{location}/workflowTemplates/{workflow_template} \x01"\xb4\x01\n\x19WorkflowTemplatePlacement\x12\x43\n\x0fmanaged_cluster\x18\x01 \x01(\x0b\x32(.google.cloud.dataproc.v1.ManagedClusterH\x00\x12\x45\n\x10\x63luster_selector\x18\x02 \x01(\x0b\x32).google.cloud.dataproc.v1.ClusterSelectorH\x00\x42\x0b\n\tplacement"\xe3\x01\n\x0eManagedCluster\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12<\n\x06\x63onfig\x18\x03 \x01(\x0b\x32\'.google.cloud.dataproc.v1.ClusterConfigB\x03\xe0\x41\x02\x12I\n\x06labels\x18\x04 \x03(\x0b\x32\x34.google.cloud.dataproc.v1.ManagedCluster.LabelsEntryB\x03\xe0\x41\x01\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xb5\x01\n\x0f\x43lusterSelector\x12\x11\n\x04zone\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12Y\n\x0e\x63luster_labels\x18\x02 \x03(\x0b\x32<.google.cloud.dataproc.v1.ClusterSelector.ClusterLabelsEntryB\x03\xe0\x41\x02\x1a\x34\n\x12\x43lusterLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xe7\x04\n\nOrderedJob\x12\x14\n\x07step_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x39\n\nhadoop_job\x18\x02 \x01(\x0b\x32#.google.cloud.dataproc.v1.HadoopJobH\x00\x12\x37\n\tspark_job\x18\x03 \x01(\x0b\x32".google.cloud.dataproc.v1.SparkJobH\x00\x12;\n\x0bpyspark_job\x18\x04 \x01(\x0b\x32$.google.cloud.dataproc.v1.PySparkJobH\x00\x12\x35\n\x08hive_job\x18\x05 \x01(\x0b\x32!.google.cloud.dataproc.v1.HiveJobH\x00\x12\x33\n\x07pig_job\x18\x06 \x01(\x0b\x32 .google.cloud.dataproc.v1.PigJobH\x00\x12>\n\rspark_sql_job\x18\x07 \x01(\x0b\x32%.google.cloud.dataproc.v1.SparkSqlJobH\x00\x12\x45\n\x06labels\x18\x08 \x03(\x0b\x32\x30.google.cloud.dataproc.v1.OrderedJob.LabelsEntryB\x03\xe0\x41\x01\x12@\n\nscheduling\x18\t \x01(\x0b\x32\'.google.cloud.dataproc.v1.JobSchedulingB\x03\xe0\x41\x01\x12"\n\x15prerequisite_step_ids\x18\n \x03(\tB\x03\xe0\x41\x01\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\n\n\x08job_type"\x9d\x01\n\x11TemplateParameter\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06\x66ields\x18\x02 \x03(\tB\x03\xe0\x41\x02\x12\x18\n\x0b\x64\x65scription\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12\x46\n\nvalidation\x18\x04 \x01(\x0b\x32-.google.cloud.dataproc.v1.ParameterValidationB\x03\xe0\x41\x01"\xa1\x01\n\x13ParameterValidation\x12:\n\x05regex\x18\x01 \x01(\x0b\x32).google.cloud.dataproc.v1.RegexValidationH\x00\x12;\n\x06values\x18\x02 \x01(\x0b\x32).google.cloud.dataproc.v1.ValueValidationH\x00\x42\x11\n\x0fvalidation_type"\'\n\x0fRegexValidation\x12\x14\n\x07regexes\x18\x01 \x03(\tB\x03\xe0\x41\x02"&\n\x0fValueValidation\x12\x13\n\x06values\x18\x01 \x03(\tB\x03\xe0\x41\x02"\xaf\x05\n\x10WorkflowMetadata\x12\x15\n\x08template\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12\x14\n\x07version\x18\x02 \x01(\x05\x42\x03\xe0\x41\x03\x12G\n\x0e\x63reate_cluster\x18\x03 \x01(\x0b\x32*.google.cloud.dataproc.v1.ClusterOperationB\x03\xe0\x41\x03\x12;\n\x05graph\x18\x04 \x01(\x0b\x32\'.google.cloud.dataproc.v1.WorkflowGraphB\x03\xe0\x41\x03\x12G\n\x0e\x64\x65lete_cluster\x18\x05 \x01(\x0b\x32*.google.cloud.dataproc.v1.ClusterOperationB\x03\xe0\x41\x03\x12\x44\n\x05state\x18\x06 \x01(\x0e\x32\x30.google.cloud.dataproc.v1.WorkflowMetadata.StateB\x03\xe0\x41\x03\x12\x19\n\x0c\x63luster_name\x18\x07 \x01(\tB\x03\xe0\x41\x03\x12N\n\nparameters\x18\x08 \x03(\x0b\x32:.google.cloud.dataproc.v1.WorkflowMetadata.ParametersEntry\x12\x33\n\nstart_time\x18\t \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x31\n\x08\x65nd_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x19\n\x0c\x63luster_uuid\x18\x0b \x01(\tB\x03\xe0\x41\x03\x1a\x31\n\x0fParametersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"8\n\x05State\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0b\n\x07PENDING\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\x12\x08\n\x04\x44ONE\x10\x03"T\n\x10\x43lusterOperation\x12\x19\n\x0coperation_id\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12\x12\n\x05\x65rror\x18\x02 \x01(\tB\x03\xe0\x41\x03\x12\x11\n\x04\x64one\x18\x03 \x01(\x08\x42\x03\xe0\x41\x03"K\n\rWorkflowGraph\x12:\n\x05nodes\x18\x01 \x03(\x0b\x32&.google.cloud.dataproc.v1.WorkflowNodeB\x03\xe0\x41\x03"\xa3\x02\n\x0cWorkflowNode\x12\x14\n\x07step_id\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12"\n\x15prerequisite_step_ids\x18\x02 \x03(\tB\x03\xe0\x41\x03\x12\x13\n\x06job_id\x18\x03 \x01(\tB\x03\xe0\x41\x03\x12\x44\n\x05state\x18\x05 \x01(\x0e\x32\x30.google.cloud.dataproc.v1.WorkflowNode.NodeStateB\x03\xe0\x41\x03\x12\x12\n\x05\x65rror\x18\x06 \x01(\tB\x03\xe0\x41\x03"j\n\tNodeState\x12\x1a\n\x16NODE_STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07\x42LOCKED\x10\x01\x12\x0c\n\x08RUNNABLE\x10\x02\x12\x0b\n\x07RUNNING\x10\x03\x12\r\n\tCOMPLETED\x10\x04\x12\n\n\x06\x46\x41ILED\x10\x05"\xa4\x01\n\x1d\x43reateWorkflowTemplateRequest\x12@\n\x06parent\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\x12(dataproc.googleapis.com/WorkflowTemplate\x12\x41\n\x08template\x18\x02 \x01(\x0b\x32*.google.cloud.dataproc.v1.WorkflowTemplateB\x03\xe0\x41\x02"r\n\x1aGetWorkflowTemplateRequest\x12>\n\x04name\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\n(dataproc.googleapis.com/WorkflowTemplate\x12\x14\n\x07version\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01"\xad\x02\n"InstantiateWorkflowTemplateRequest\x12>\n\x04name\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\n(dataproc.googleapis.com/WorkflowTemplate\x12\x14\n\x07version\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x12\x17\n\nrequest_id\x18\x05 \x01(\tB\x03\xe0\x41\x01\x12\x65\n\nparameters\x18\x06 \x03(\x0b\x32L.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest.ParametersEntryB\x03\xe0\x41\x01\x1a\x31\n\x0fParametersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xc8\x01\n(InstantiateInlineWorkflowTemplateRequest\x12@\n\x06parent\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\x12(dataproc.googleapis.com/WorkflowTemplate\x12\x41\n\x08template\x18\x02 \x01(\x0b\x32*.google.cloud.dataproc.v1.WorkflowTemplateB\x03\xe0\x41\x02\x12\x17\n\nrequest_id\x18\x03 \x01(\tB\x03\xe0\x41\x01"\x8f\x01\n\x1dUpdateWorkflowTemplateRequest\x12n\n\x08template\x18\x01 \x01(\x0b\x32*.google.cloud.dataproc.v1.WorkflowTemplateB0\xe0\x41\x02\xfa\x41*\n(dataproc.googleapis.com/WorkflowTemplate"\x91\x01\n\x1cListWorkflowTemplatesRequest\x12@\n\x06parent\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\x12(dataproc.googleapis.com/WorkflowTemplate\x12\x16\n\tpage_size\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x12\x17\n\npage_token\x18\x03 \x01(\tB\x03\xe0\x41\x01"\x81\x01\n\x1dListWorkflowTemplatesResponse\x12\x42\n\ttemplates\x18\x01 \x03(\x0b\x32*.google.cloud.dataproc.v1.WorkflowTemplateB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03"u\n\x1d\x44\x65leteWorkflowTemplateRequest\x12>\n\x04name\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\n(dataproc.googleapis.com/WorkflowTemplate\x12\x14\n\x07version\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x32\xe6\x10\n\x17WorkflowTemplateService\x12\x9b\x02\n\x16\x43reateWorkflowTemplate\x12\x37.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest\x1a*.google.cloud.dataproc.v1.WorkflowTemplate"\x9b\x01\x82\xd3\xe4\x93\x02\x82\x01"5/v1/{parent=projects/*/locations/*}/workflowTemplates:\x08templateZ?"3/v1/{parent=projects/*/regions/*}/workflowTemplates:\x08template\xda\x41\x0fparent,template\x12\xf4\x01\n\x13GetWorkflowTemplate\x12\x34.google.cloud.dataproc.v1.GetWorkflowTemplateRequest\x1a*.google.cloud.dataproc.v1.WorkflowTemplate"{\x82\xd3\xe4\x93\x02n\x12\x35/v1/{name=projects/*/locations/*/workflowTemplates/*}Z5\x12\x33/v1/{name=projects/*/regions/*/workflowTemplates/*}\xda\x41\x04name\x12\xd5\x02\n\x1bInstantiateWorkflowTemplate\x12<.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest\x1a\x1d.google.longrunning.Operation"\xd8\x01\x82\xd3\xe4\x93\x02\x8c\x01"A/v1/{name=projects/*/locations/*/workflowTemplates/*}:instantiate:\x01*ZD"?/v1/{name=projects/*/regions/*/workflowTemplates/*}:instantiate:\x01*\xda\x41\x04name\xda\x41\x0fname,parameters\xca\x41)\n\x15google.protobuf.Empty\x12\x10WorkflowMetadata\x12\xf4\x02\n!InstantiateInlineWorkflowTemplate\x12\x42.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest\x1a\x1d.google.longrunning.Operation"\xeb\x01\x82\xd3\xe4\x93\x02\xa6\x01"G/v1/{parent=projects/*/locations/*}/workflowTemplates:instantiateInline:\x08templateZQ"E/v1/{parent=projects/*/regions/*}/workflowTemplates:instantiateInline:\x08template\xda\x41\x0fparent,template\xca\x41)\n\x15google.protobuf.Empty\x12\x10WorkflowMetadata\x12\xa6\x02\n\x16UpdateWorkflowTemplate\x12\x37.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest\x1a*.google.cloud.dataproc.v1.WorkflowTemplate"\xa6\x01\x82\xd3\xe4\x93\x02\x94\x01\x1a>/v1/{template.name=projects/*/locations/*/workflowTemplates/*}:\x08templateZH\x1a\n\rspark_sql_job\x18\x07 \x01(\x0b\x32%.google.cloud.dataproc.v1.SparkSqlJobH\x00\x12\x45\n\x06labels\x18\x08 \x03(\x0b\x32\x30.google.cloud.dataproc.v1.OrderedJob.LabelsEntryB\x03\xe0\x41\x01\x12@\n\nscheduling\x18\t \x01(\x0b\x32\'.google.cloud.dataproc.v1.JobSchedulingB\x03\xe0\x41\x01\x12"\n\x15prerequisite_step_ids\x18\n \x03(\tB\x03\xe0\x41\x01\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\n\n\x08job_type"\x9d\x01\n\x11TemplateParameter\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06\x66ields\x18\x02 \x03(\tB\x03\xe0\x41\x02\x12\x18\n\x0b\x64\x65scription\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12\x46\n\nvalidation\x18\x04 \x01(\x0b\x32-.google.cloud.dataproc.v1.ParameterValidationB\x03\xe0\x41\x01"\xa1\x01\n\x13ParameterValidation\x12:\n\x05regex\x18\x01 \x01(\x0b\x32).google.cloud.dataproc.v1.RegexValidationH\x00\x12;\n\x06values\x18\x02 \x01(\x0b\x32).google.cloud.dataproc.v1.ValueValidationH\x00\x42\x11\n\x0fvalidation_type"\'\n\x0fRegexValidation\x12\x14\n\x07regexes\x18\x01 \x03(\tB\x03\xe0\x41\x02"&\n\x0fValueValidation\x12\x13\n\x06values\x18\x01 \x03(\tB\x03\xe0\x41\x02"\xaf\x05\n\x10WorkflowMetadata\x12\x15\n\x08template\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12\x14\n\x07version\x18\x02 \x01(\x05\x42\x03\xe0\x41\x03\x12G\n\x0e\x63reate_cluster\x18\x03 \x01(\x0b\x32*.google.cloud.dataproc.v1.ClusterOperationB\x03\xe0\x41\x03\x12;\n\x05graph\x18\x04 \x01(\x0b\x32\'.google.cloud.dataproc.v1.WorkflowGraphB\x03\xe0\x41\x03\x12G\n\x0e\x64\x65lete_cluster\x18\x05 \x01(\x0b\x32*.google.cloud.dataproc.v1.ClusterOperationB\x03\xe0\x41\x03\x12\x44\n\x05state\x18\x06 \x01(\x0e\x32\x30.google.cloud.dataproc.v1.WorkflowMetadata.StateB\x03\xe0\x41\x03\x12\x19\n\x0c\x63luster_name\x18\x07 \x01(\tB\x03\xe0\x41\x03\x12N\n\nparameters\x18\x08 \x03(\x0b\x32:.google.cloud.dataproc.v1.WorkflowMetadata.ParametersEntry\x12\x33\n\nstart_time\x18\t \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x31\n\x08\x65nd_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x19\n\x0c\x63luster_uuid\x18\x0b \x01(\tB\x03\xe0\x41\x03\x1a\x31\n\x0fParametersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"8\n\x05State\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0b\n\x07PENDING\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\x12\x08\n\x04\x44ONE\x10\x03"T\n\x10\x43lusterOperation\x12\x19\n\x0coperation_id\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12\x12\n\x05\x65rror\x18\x02 \x01(\tB\x03\xe0\x41\x03\x12\x11\n\x04\x64one\x18\x03 \x01(\x08\x42\x03\xe0\x41\x03"K\n\rWorkflowGraph\x12:\n\x05nodes\x18\x01 \x03(\x0b\x32&.google.cloud.dataproc.v1.WorkflowNodeB\x03\xe0\x41\x03"\xa3\x02\n\x0cWorkflowNode\x12\x14\n\x07step_id\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12"\n\x15prerequisite_step_ids\x18\x02 \x03(\tB\x03\xe0\x41\x03\x12\x13\n\x06job_id\x18\x03 \x01(\tB\x03\xe0\x41\x03\x12\x44\n\x05state\x18\x05 \x01(\x0e\x32\x30.google.cloud.dataproc.v1.WorkflowNode.NodeStateB\x03\xe0\x41\x03\x12\x12\n\x05\x65rror\x18\x06 \x01(\tB\x03\xe0\x41\x03"j\n\tNodeState\x12\x1a\n\x16NODE_STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07\x42LOCKED\x10\x01\x12\x0c\n\x08RUNNABLE\x10\x02\x12\x0b\n\x07RUNNING\x10\x03\x12\r\n\tCOMPLETED\x10\x04\x12\n\n\x06\x46\x41ILED\x10\x05"\xa4\x01\n\x1d\x43reateWorkflowTemplateRequest\x12@\n\x06parent\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\x12(dataproc.googleapis.com/WorkflowTemplate\x12\x41\n\x08template\x18\x02 \x01(\x0b\x32*.google.cloud.dataproc.v1.WorkflowTemplateB\x03\xe0\x41\x02"r\n\x1aGetWorkflowTemplateRequest\x12>\n\x04name\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\n(dataproc.googleapis.com/WorkflowTemplate\x12\x14\n\x07version\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01"\xad\x02\n"InstantiateWorkflowTemplateRequest\x12>\n\x04name\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\n(dataproc.googleapis.com/WorkflowTemplate\x12\x14\n\x07version\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x12\x17\n\nrequest_id\x18\x05 \x01(\tB\x03\xe0\x41\x01\x12\x65\n\nparameters\x18\x06 \x03(\x0b\x32L.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest.ParametersEntryB\x03\xe0\x41\x01\x1a\x31\n\x0fParametersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xc8\x01\n(InstantiateInlineWorkflowTemplateRequest\x12@\n\x06parent\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\x12(dataproc.googleapis.com/WorkflowTemplate\x12\x41\n\x08template\x18\x02 \x01(\x0b\x32*.google.cloud.dataproc.v1.WorkflowTemplateB\x03\xe0\x41\x02\x12\x17\n\nrequest_id\x18\x03 \x01(\tB\x03\xe0\x41\x01"b\n\x1dUpdateWorkflowTemplateRequest\x12\x41\n\x08template\x18\x01 \x01(\x0b\x32*.google.cloud.dataproc.v1.WorkflowTemplateB\x03\xe0\x41\x02"\x91\x01\n\x1cListWorkflowTemplatesRequest\x12@\n\x06parent\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\x12(dataproc.googleapis.com/WorkflowTemplate\x12\x16\n\tpage_size\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x12\x17\n\npage_token\x18\x03 \x01(\tB\x03\xe0\x41\x01"\x81\x01\n\x1dListWorkflowTemplatesResponse\x12\x42\n\ttemplates\x18\x01 \x03(\x0b\x32*.google.cloud.dataproc.v1.WorkflowTemplateB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03"u\n\x1d\x44\x65leteWorkflowTemplateRequest\x12>\n\x04name\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\n(dataproc.googleapis.com/WorkflowTemplate\x12\x14\n\x07version\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x32\xe6\x10\n\x17WorkflowTemplateService\x12\x9b\x02\n\x16\x43reateWorkflowTemplate\x12\x37.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest\x1a*.google.cloud.dataproc.v1.WorkflowTemplate"\x9b\x01\x82\xd3\xe4\x93\x02\x82\x01"5/v1/{parent=projects/*/locations/*}/workflowTemplates:\x08templateZ?"3/v1/{parent=projects/*/regions/*}/workflowTemplates:\x08template\xda\x41\x0fparent,template\x12\xf4\x01\n\x13GetWorkflowTemplate\x12\x34.google.cloud.dataproc.v1.GetWorkflowTemplateRequest\x1a*.google.cloud.dataproc.v1.WorkflowTemplate"{\x82\xd3\xe4\x93\x02n\x12\x35/v1/{name=projects/*/locations/*/workflowTemplates/*}Z5\x12\x33/v1/{name=projects/*/regions/*/workflowTemplates/*}\xda\x41\x04name\x12\xd5\x02\n\x1bInstantiateWorkflowTemplate\x12<.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest\x1a\x1d.google.longrunning.Operation"\xd8\x01\x82\xd3\xe4\x93\x02\x8c\x01"A/v1/{name=projects/*/locations/*/workflowTemplates/*}:instantiate:\x01*ZD"?/v1/{name=projects/*/regions/*/workflowTemplates/*}:instantiate:\x01*\xda\x41\x04name\xda\x41\x0fname,parameters\xca\x41)\n\x15google.protobuf.Empty\x12\x10WorkflowMetadata\x12\xf4\x02\n!InstantiateInlineWorkflowTemplate\x12\x42.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest\x1a\x1d.google.longrunning.Operation"\xeb\x01\x82\xd3\xe4\x93\x02\xa6\x01"G/v1/{parent=projects/*/locations/*}/workflowTemplates:instantiateInline:\x08templateZQ"E/v1/{parent=projects/*/regions/*}/workflowTemplates:instantiateInline:\x08template\xda\x41\x0fparent,template\xca\x41)\n\x15google.protobuf.Empty\x12\x10WorkflowMetadata\x12\xa6\x02\n\x16UpdateWorkflowTemplate\x12\x37.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest\x1a*.google.cloud.dataproc.v1.WorkflowTemplate"\xa6\x01\x82\xd3\xe4\x93\x02\x94\x01\x1a>/v1/{template.name=projects/*/locations/*/workflowTemplates/*}:\x08templateZH\x1a\n\nhadoop_job\x18\x02 \x01(\x0b\x32(.google.cloud.dataproc.v1beta2.HadoopJobH\x00\x12<\n\tspark_job\x18\x03 \x01(\x0b\x32\'.google.cloud.dataproc.v1beta2.SparkJobH\x00\x12@\n\x0bpyspark_job\x18\x04 \x01(\x0b\x32).google.cloud.dataproc.v1beta2.PySparkJobH\x00\x12:\n\x08hive_job\x18\x05 \x01(\x0b\x32&.google.cloud.dataproc.v1beta2.HiveJobH\x00\x12\x38\n\x07pig_job\x18\x06 \x01(\x0b\x32%.google.cloud.dataproc.v1beta2.PigJobH\x00\x12\x43\n\rspark_sql_job\x18\x07 \x01(\x0b\x32*.google.cloud.dataproc.v1beta2.SparkSqlJobH\x00\x12\x45\n\x06labels\x18\x08 \x03(\x0b\x32\x35.google.cloud.dataproc.v1beta2.OrderedJob.LabelsEntry\x12@\n\nscheduling\x18\t \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.JobScheduling\x12\x1d\n\x15prerequisite_step_ids\x18\n \x03(\t\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\n\n\x08job_type"\x8e\x01\n\x11TemplateParameter\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06\x66ields\x18\x02 \x03(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12\x46\n\nvalidation\x18\x04 \x01(\x0b\x32\x32.google.cloud.dataproc.v1beta2.ParameterValidation"\xab\x01\n\x13ParameterValidation\x12?\n\x05regex\x18\x01 \x01(\x0b\x32..google.cloud.dataproc.v1beta2.RegexValidationH\x00\x12@\n\x06values\x18\x02 \x01(\x0b\x32..google.cloud.dataproc.v1beta2.ValueValidationH\x00\x42\x11\n\x0fvalidation_type""\n\x0fRegexValidation\x12\x0f\n\x07regexes\x18\x01 \x03(\t"!\n\x0fValueValidation\x12\x0e\n\x06values\x18\x01 \x03(\t"\xc8\x05\n\x10WorkflowMetadata\x12\x15\n\x08template\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12\x14\n\x07version\x18\x02 \x01(\x05\x42\x03\xe0\x41\x03\x12L\n\x0e\x63reate_cluster\x18\x03 \x01(\x0b\x32/.google.cloud.dataproc.v1beta2.ClusterOperationB\x03\xe0\x41\x03\x12@\n\x05graph\x18\x04 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.WorkflowGraphB\x03\xe0\x41\x03\x12L\n\x0e\x64\x65lete_cluster\x18\x05 \x01(\x0b\x32/.google.cloud.dataproc.v1beta2.ClusterOperationB\x03\xe0\x41\x03\x12I\n\x05state\x18\x06 \x01(\x0e\x32\x35.google.cloud.dataproc.v1beta2.WorkflowMetadata.StateB\x03\xe0\x41\x03\x12\x19\n\x0c\x63luster_name\x18\x07 \x01(\tB\x03\xe0\x41\x03\x12S\n\nparameters\x18\x08 \x03(\x0b\x32?.google.cloud.dataproc.v1beta2.WorkflowMetadata.ParametersEntry\x12\x33\n\nstart_time\x18\t \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x31\n\x08\x65nd_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x19\n\x0c\x63luster_uuid\x18\x0b \x01(\tB\x03\xe0\x41\x03\x1a\x31\n\x0fParametersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"8\n\x05State\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0b\n\x07PENDING\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\x12\x08\n\x04\x44ONE\x10\x03"T\n\x10\x43lusterOperation\x12\x19\n\x0coperation_id\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12\x12\n\x05\x65rror\x18\x02 \x01(\tB\x03\xe0\x41\x03\x12\x11\n\x04\x64one\x18\x03 \x01(\x08\x42\x03\xe0\x41\x03"P\n\rWorkflowGraph\x12?\n\x05nodes\x18\x01 \x03(\x0b\x32+.google.cloud.dataproc.v1beta2.WorkflowNodeB\x03\xe0\x41\x03"\xa9\x02\n\x0cWorkflowNode\x12\x14\n\x07step_id\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12"\n\x15prerequisite_step_ids\x18\x02 \x03(\tB\x03\xe0\x41\x03\x12\x13\n\x06job_id\x18\x03 \x01(\tB\x03\xe0\x41\x03\x12I\n\x05state\x18\x05 \x01(\x0e\x32\x35.google.cloud.dataproc.v1beta2.WorkflowNode.NodeStateB\x03\xe0\x41\x03\x12\x12\n\x05\x65rror\x18\x06 \x01(\tB\x03\xe0\x41\x03"k\n\tNodeState\x12\x1b\n\x17NODE_STATUS_UNSPECIFIED\x10\x00\x12\x0b\n\x07\x42LOCKED\x10\x01\x12\x0c\n\x08RUNNABLE\x10\x02\x12\x0b\n\x07RUNNING\x10\x03\x12\r\n\tCOMPLETED\x10\x04\x12\n\n\x06\x46\x41ILED\x10\x05"\xa9\x01\n\x1d\x43reateWorkflowTemplateRequest\x12@\n\x06parent\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\x12(dataproc.googleapis.com/WorkflowTemplate\x12\x46\n\x08template\x18\x02 \x01(\x0b\x32/.google.cloud.dataproc.v1beta2.WorkflowTemplateB\x03\xe0\x41\x02"m\n\x1aGetWorkflowTemplateRequest\x12>\n\x04name\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\n(dataproc.googleapis.com/WorkflowTemplate\x12\x0f\n\x07version\x18\x02 \x01(\x05"\xbc\x02\n"InstantiateWorkflowTemplateRequest\x12>\n\x04name\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\n(dataproc.googleapis.com/WorkflowTemplate\x12\x0f\n\x07version\x18\x02 \x01(\x05\x12\x17\n\x0binstance_id\x18\x03 \x01(\tB\x02\x18\x01\x12\x12\n\nrequest_id\x18\x05 \x01(\t\x12\x65\n\nparameters\x18\x04 \x03(\x0b\x32Q.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest.ParametersEntry\x1a\x31\n\x0fParametersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xdd\x01\n(InstantiateInlineWorkflowTemplateRequest\x12@\n\x06parent\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\x12(dataproc.googleapis.com/WorkflowTemplate\x12\x46\n\x08template\x18\x02 \x01(\x0b\x32/.google.cloud.dataproc.v1beta2.WorkflowTemplateB\x03\xe0\x41\x02\x12\x13\n\x0binstance_id\x18\x03 \x01(\t\x12\x12\n\nrequest_id\x18\x04 \x01(\t"\x94\x01\n\x1dUpdateWorkflowTemplateRequest\x12s\n\x08template\x18\x01 \x01(\x0b\x32/.google.cloud.dataproc.v1beta2.WorkflowTemplateB0\xe0\x41\x02\xfa\x41*\n(dataproc.googleapis.com/WorkflowTemplate"Z\n\x1cListWorkflowTemplatesRequest\x12\x13\n\x06parent\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"\x86\x01\n\x1dListWorkflowTemplatesResponse\x12G\n\ttemplates\x18\x01 \x03(\x0b\x32/.google.cloud.dataproc.v1beta2.WorkflowTemplateB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03"p\n\x1d\x44\x65leteWorkflowTemplateRequest\x12>\n\x04name\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\n(dataproc.googleapis.com/WorkflowTemplate\x12\x0f\n\x07version\x18\x02 \x01(\x05\x32\xe9\x11\n\x17WorkflowTemplateService\x12\xb0\x02\n\x16\x43reateWorkflowTemplate\x12<.google.cloud.dataproc.v1beta2.CreateWorkflowTemplateRequest\x1a/.google.cloud.dataproc.v1beta2.WorkflowTemplate"\xa6\x01\x82\xd3\xe4\x93\x02\x8c\x01"8/v1beta2/{parent=projects/*/regions/*}/workflowTemplates:\x08templateZF":/v1beta2/{parent=projects/*/locations/*}/workflowTemplates:\x08template\xda\x41\x10parent, template\x12\x89\x02\n\x13GetWorkflowTemplate\x12\x39.google.cloud.dataproc.v1beta2.GetWorkflowTemplateRequest\x1a/.google.cloud.dataproc.v1beta2.WorkflowTemplate"\x85\x01\x82\xd3\xe4\x93\x02x\x12\x38/v1beta2/{name=projects/*/regions/*/workflowTemplates/*}Z<\x12:/v1beta2/{name=projects/*/locations/*/workflowTemplates/*}\xda\x41\x04name\x12\xe5\x02\n\x1bInstantiateWorkflowTemplate\x12\x41.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest\x1a\x1d.google.longrunning.Operation"\xe3\x01\x82\xd3\xe4\x93\x02\x96\x01"D/v1beta2/{name=projects/*/regions/*/workflowTemplates/*}:instantiate:\x01*ZK"F/v1beta2/{name=projects/*/locations/*/workflowTemplates/*}:instantiate:\x01*\xda\x41\x04name\xda\x41\x10name, parameters\xca\x41)\n\x15google.protobuf.Empty\x12\x10WorkflowMetadata\x12\x84\x03\n!InstantiateInlineWorkflowTemplate\x12G.google.cloud.dataproc.v1beta2.InstantiateInlineWorkflowTemplateRequest\x1a\x1d.google.longrunning.Operation"\xf6\x01\x82\xd3\xe4\x93\x02\xb0\x01"L/v1beta2/{parent=projects/*/locations/*}/workflowTemplates:instantiateInline:\x08templateZV"J/v1beta2/{parent=projects/*/regions/*}/workflowTemplates:instantiateInline:\x08template\xda\x41\x10parent, template\xca\x41)\n\x15google.protobuf.Empty\x12\x10WorkflowMetadata\x12\xba\x02\n\x16UpdateWorkflowTemplate\x12<.google.cloud.dataproc.v1beta2.UpdateWorkflowTemplateRequest\x1a/.google.cloud.dataproc.v1beta2.WorkflowTemplate"\xb0\x01\x82\xd3\xe4\x93\x02\x9e\x01\x1a\x41/v1beta2/{template.name=projects/*/regions/*/workflowTemplates/*}:\x08templateZO\x1a\x43/v1beta2/{template.name=projects/*/locations/*/workflowTemplates/*}:\x08template\xda\x41\x08template\x12\x9c\x02\n\x15ListWorkflowTemplates\x12;.google.cloud.dataproc.v1beta2.ListWorkflowTemplatesRequest\x1a<.google.cloud.dataproc.v1beta2.ListWorkflowTemplatesResponse"\x87\x01\x82\xd3\xe4\x93\x02x\x12\x38/v1beta2/{parent=projects/*/regions/*}/workflowTemplatesZ<\x12:/v1beta2/{parent=projects/*/locations/*}/workflowTemplates\xda\x41\x06parent\x12\xf6\x01\n\x16\x44\x65leteWorkflowTemplate\x12<.google.cloud.dataproc.v1beta2.DeleteWorkflowTemplateRequest\x1a\x16.google.protobuf.Empty"\x85\x01\x82\xd3\xe4\x93\x02x*8/v1beta2/{name=projects/*/regions/*/workflowTemplates/*}Z<*:/v1beta2/{name=projects/*/locations/*/workflowTemplates/*}\xda\x41\x04name\x1aK\xca\x41\x17\x64\x61taproc.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB\x84\x01\n!com.google.cloud.dataproc.v1beta2B\x16WorkflowTemplatesProtoP\x01ZEgoogle.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataprocb\x06proto3' + '\n\n\nhadoop_job\x18\x02 \x01(\x0b\x32(.google.cloud.dataproc.v1beta2.HadoopJobH\x00\x12<\n\tspark_job\x18\x03 \x01(\x0b\x32\'.google.cloud.dataproc.v1beta2.SparkJobH\x00\x12@\n\x0bpyspark_job\x18\x04 \x01(\x0b\x32).google.cloud.dataproc.v1beta2.PySparkJobH\x00\x12:\n\x08hive_job\x18\x05 \x01(\x0b\x32&.google.cloud.dataproc.v1beta2.HiveJobH\x00\x12\x38\n\x07pig_job\x18\x06 \x01(\x0b\x32%.google.cloud.dataproc.v1beta2.PigJobH\x00\x12\x43\n\rspark_sql_job\x18\x07 \x01(\x0b\x32*.google.cloud.dataproc.v1beta2.SparkSqlJobH\x00\x12\x45\n\x06labels\x18\x08 \x03(\x0b\x32\x35.google.cloud.dataproc.v1beta2.OrderedJob.LabelsEntry\x12@\n\nscheduling\x18\t \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.JobScheduling\x12\x1d\n\x15prerequisite_step_ids\x18\n \x03(\t\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\n\n\x08job_type"\x8e\x01\n\x11TemplateParameter\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06\x66ields\x18\x02 \x03(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12\x46\n\nvalidation\x18\x04 \x01(\x0b\x32\x32.google.cloud.dataproc.v1beta2.ParameterValidation"\xab\x01\n\x13ParameterValidation\x12?\n\x05regex\x18\x01 \x01(\x0b\x32..google.cloud.dataproc.v1beta2.RegexValidationH\x00\x12@\n\x06values\x18\x02 \x01(\x0b\x32..google.cloud.dataproc.v1beta2.ValueValidationH\x00\x42\x11\n\x0fvalidation_type""\n\x0fRegexValidation\x12\x0f\n\x07regexes\x18\x01 \x03(\t"!\n\x0fValueValidation\x12\x0e\n\x06values\x18\x01 \x03(\t"\xc8\x05\n\x10WorkflowMetadata\x12\x15\n\x08template\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12\x14\n\x07version\x18\x02 \x01(\x05\x42\x03\xe0\x41\x03\x12L\n\x0e\x63reate_cluster\x18\x03 \x01(\x0b\x32/.google.cloud.dataproc.v1beta2.ClusterOperationB\x03\xe0\x41\x03\x12@\n\x05graph\x18\x04 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.WorkflowGraphB\x03\xe0\x41\x03\x12L\n\x0e\x64\x65lete_cluster\x18\x05 \x01(\x0b\x32/.google.cloud.dataproc.v1beta2.ClusterOperationB\x03\xe0\x41\x03\x12I\n\x05state\x18\x06 \x01(\x0e\x32\x35.google.cloud.dataproc.v1beta2.WorkflowMetadata.StateB\x03\xe0\x41\x03\x12\x19\n\x0c\x63luster_name\x18\x07 \x01(\tB\x03\xe0\x41\x03\x12S\n\nparameters\x18\x08 \x03(\x0b\x32?.google.cloud.dataproc.v1beta2.WorkflowMetadata.ParametersEntry\x12\x33\n\nstart_time\x18\t \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x31\n\x08\x65nd_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x19\n\x0c\x63luster_uuid\x18\x0b \x01(\tB\x03\xe0\x41\x03\x1a\x31\n\x0fParametersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"8\n\x05State\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0b\n\x07PENDING\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\x12\x08\n\x04\x44ONE\x10\x03"T\n\x10\x43lusterOperation\x12\x19\n\x0coperation_id\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12\x12\n\x05\x65rror\x18\x02 \x01(\tB\x03\xe0\x41\x03\x12\x11\n\x04\x64one\x18\x03 \x01(\x08\x42\x03\xe0\x41\x03"P\n\rWorkflowGraph\x12?\n\x05nodes\x18\x01 \x03(\x0b\x32+.google.cloud.dataproc.v1beta2.WorkflowNodeB\x03\xe0\x41\x03"\xa9\x02\n\x0cWorkflowNode\x12\x14\n\x07step_id\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12"\n\x15prerequisite_step_ids\x18\x02 \x03(\tB\x03\xe0\x41\x03\x12\x13\n\x06job_id\x18\x03 \x01(\tB\x03\xe0\x41\x03\x12I\n\x05state\x18\x05 \x01(\x0e\x32\x35.google.cloud.dataproc.v1beta2.WorkflowNode.NodeStateB\x03\xe0\x41\x03\x12\x12\n\x05\x65rror\x18\x06 \x01(\tB\x03\xe0\x41\x03"k\n\tNodeState\x12\x1b\n\x17NODE_STATUS_UNSPECIFIED\x10\x00\x12\x0b\n\x07\x42LOCKED\x10\x01\x12\x0c\n\x08RUNNABLE\x10\x02\x12\x0b\n\x07RUNNING\x10\x03\x12\r\n\tCOMPLETED\x10\x04\x12\n\n\x06\x46\x41ILED\x10\x05"\xa9\x01\n\x1d\x43reateWorkflowTemplateRequest\x12@\n\x06parent\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\x12(dataproc.googleapis.com/WorkflowTemplate\x12\x46\n\x08template\x18\x02 \x01(\x0b\x32/.google.cloud.dataproc.v1beta2.WorkflowTemplateB\x03\xe0\x41\x02"m\n\x1aGetWorkflowTemplateRequest\x12>\n\x04name\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\n(dataproc.googleapis.com/WorkflowTemplate\x12\x0f\n\x07version\x18\x02 \x01(\x05"\xbc\x02\n"InstantiateWorkflowTemplateRequest\x12>\n\x04name\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\n(dataproc.googleapis.com/WorkflowTemplate\x12\x0f\n\x07version\x18\x02 \x01(\x05\x12\x17\n\x0binstance_id\x18\x03 \x01(\tB\x02\x18\x01\x12\x12\n\nrequest_id\x18\x05 \x01(\t\x12\x65\n\nparameters\x18\x04 \x03(\x0b\x32Q.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest.ParametersEntry\x1a\x31\n\x0fParametersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xdd\x01\n(InstantiateInlineWorkflowTemplateRequest\x12@\n\x06parent\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\x12(dataproc.googleapis.com/WorkflowTemplate\x12\x46\n\x08template\x18\x02 \x01(\x0b\x32/.google.cloud.dataproc.v1beta2.WorkflowTemplateB\x03\xe0\x41\x02\x12\x13\n\x0binstance_id\x18\x03 \x01(\t\x12\x12\n\nrequest_id\x18\x04 \x01(\t"g\n\x1dUpdateWorkflowTemplateRequest\x12\x46\n\x08template\x18\x01 \x01(\x0b\x32/.google.cloud.dataproc.v1beta2.WorkflowTemplateB\x03\xe0\x41\x02"Z\n\x1cListWorkflowTemplatesRequest\x12\x13\n\x06parent\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"\x86\x01\n\x1dListWorkflowTemplatesResponse\x12G\n\ttemplates\x18\x01 \x03(\x0b\x32/.google.cloud.dataproc.v1beta2.WorkflowTemplateB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03"p\n\x1d\x44\x65leteWorkflowTemplateRequest\x12>\n\x04name\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\n(dataproc.googleapis.com/WorkflowTemplate\x12\x0f\n\x07version\x18\x02 \x01(\x05\x32\xe9\x11\n\x17WorkflowTemplateService\x12\xb0\x02\n\x16\x43reateWorkflowTemplate\x12<.google.cloud.dataproc.v1beta2.CreateWorkflowTemplateRequest\x1a/.google.cloud.dataproc.v1beta2.WorkflowTemplate"\xa6\x01\x82\xd3\xe4\x93\x02\x8c\x01"8/v1beta2/{parent=projects/*/regions/*}/workflowTemplates:\x08templateZF":/v1beta2/{parent=projects/*/locations/*}/workflowTemplates:\x08template\xda\x41\x10parent, template\x12\x89\x02\n\x13GetWorkflowTemplate\x12\x39.google.cloud.dataproc.v1beta2.GetWorkflowTemplateRequest\x1a/.google.cloud.dataproc.v1beta2.WorkflowTemplate"\x85\x01\x82\xd3\xe4\x93\x02x\x12\x38/v1beta2/{name=projects/*/regions/*/workflowTemplates/*}Z<\x12:/v1beta2/{name=projects/*/locations/*/workflowTemplates/*}\xda\x41\x04name\x12\xe5\x02\n\x1bInstantiateWorkflowTemplate\x12\x41.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest\x1a\x1d.google.longrunning.Operation"\xe3\x01\x82\xd3\xe4\x93\x02\x96\x01"D/v1beta2/{name=projects/*/regions/*/workflowTemplates/*}:instantiate:\x01*ZK"F/v1beta2/{name=projects/*/locations/*/workflowTemplates/*}:instantiate:\x01*\xda\x41\x04name\xda\x41\x10name, parameters\xca\x41)\n\x15google.protobuf.Empty\x12\x10WorkflowMetadata\x12\x84\x03\n!InstantiateInlineWorkflowTemplate\x12G.google.cloud.dataproc.v1beta2.InstantiateInlineWorkflowTemplateRequest\x1a\x1d.google.longrunning.Operation"\xf6\x01\x82\xd3\xe4\x93\x02\xb0\x01"L/v1beta2/{parent=projects/*/locations/*}/workflowTemplates:instantiateInline:\x08templateZV"J/v1beta2/{parent=projects/*/regions/*}/workflowTemplates:instantiateInline:\x08template\xda\x41\x10parent, template\xca\x41)\n\x15google.protobuf.Empty\x12\x10WorkflowMetadata\x12\xba\x02\n\x16UpdateWorkflowTemplate\x12<.google.cloud.dataproc.v1beta2.UpdateWorkflowTemplateRequest\x1a/.google.cloud.dataproc.v1beta2.WorkflowTemplate"\xb0\x01\x82\xd3\xe4\x93\x02\x9e\x01\x1a\x41/v1beta2/{template.name=projects/*/regions/*/workflowTemplates/*}:\x08templateZO\x1a\x43/v1beta2/{template.name=projects/*/locations/*/workflowTemplates/*}:\x08template\xda\x41\x08template\x12\x9c\x02\n\x15ListWorkflowTemplates\x12;.google.cloud.dataproc.v1beta2.ListWorkflowTemplatesRequest\x1a<.google.cloud.dataproc.v1beta2.ListWorkflowTemplatesResponse"\x87\x01\x82\xd3\xe4\x93\x02x\x12\x38/v1beta2/{parent=projects/*/regions/*}/workflowTemplatesZ<\x12:/v1beta2/{parent=projects/*/locations/*}/workflowTemplates\xda\x41\x06parent\x12\xf6\x01\n\x16\x44\x65leteWorkflowTemplate\x12<.google.cloud.dataproc.v1beta2.DeleteWorkflowTemplateRequest\x1a\x16.google.protobuf.Empty"\x85\x01\x82\xd3\xe4\x93\x02x*8/v1beta2/{name=projects/*/regions/*/workflowTemplates/*}Z<*:/v1beta2/{name=projects/*/locations/*/workflowTemplates/*}\xda\x41\x04name\x1aK\xca\x41\x17\x64\x61taproc.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB\x84\x01\n!com.google.cloud.dataproc.v1beta2B\x16WorkflowTemplatesProtoP\x01ZEgoogle.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataprocb\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, @@ -2074,9 +2074,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b( - "\340A\002\372A*\n(dataproc.googleapis.com/WorkflowTemplate" - ), + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ) ], @@ -2088,8 +2086,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4774, - serialized_end=4922, + serialized_start=4773, + serialized_end=4876, ) @@ -2163,8 +2161,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4924, - serialized_end=5014, + serialized_start=4878, + serialized_end=4968, ) @@ -2220,8 +2218,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5017, - serialized_end=5151, + serialized_start=4971, + serialized_end=5105, ) @@ -2279,8 +2277,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5153, - serialized_end=5265, + serialized_start=5107, + serialized_end=5219, ) _WORKFLOWTEMPLATE_LABELSENTRY.containing_type = _WORKFLOWTEMPLATE @@ -3292,8 +3290,8 @@ serialized_options=_b( "\312A\027dataproc.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" ), - serialized_start=5268, - serialized_end=7549, + serialized_start=5222, + serialized_end=7503, methods=[ _descriptor.MethodDescriptor( name="CreateWorkflowTemplate", diff --git a/noxfile.py b/noxfile.py index a2eefbb6..7949a4e3 100644 --- a/noxfile.py +++ b/noxfile.py @@ -86,7 +86,7 @@ def default(session): ) -@nox.session(python=["2.7", "3.5", "3.6", "3.7"]) +@nox.session(python=["2.7", "3.5", "3.6", "3.7", "3.8"]) def unit(session): """Run the unit test suite.""" default(session) diff --git a/synth.metadata b/synth.metadata index dfd138d5..e96f585b 100644 --- a/synth.metadata +++ b/synth.metadata @@ -1,26 +1,26 @@ { - "updateTime": "2019-10-15T12:20:15.152154Z", + "updateTime": "2020-01-08T13:17:56.610055Z", "sources": [ { "generator": { "name": "artman", - "version": "0.39.0", - "dockerImage": "googleapis/artman@sha256:72554d0b3bdc0b4ac7d6726a6a606c00c14b454339037ed86be94574fb05d9f3" + "version": "0.43.0", + "dockerImage": "googleapis/artman@sha256:264654a37596a44b0668b8ce6ac41082d713f6ee150b3fc6425fa78cc64e4f20" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "82e14b22669d5748d7a0922634159794ce0bf796", - "internalRef": "274692507" + "sha": "08b488e0660c59842a7dee0e3e2b65d9e3a514a9", + "internalRef": "288625007" } }, { "template": { "name": "python_library", "origin": "synthtool.gcp", - "version": "2019.5.2" + "version": "2019.10.17" } } ], From 851913873c74abc20b4950fa4429e232f3ecf974 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Thu, 30 Jan 2020 09:15:15 -0800 Subject: [PATCH 2/7] chore(dataproc): bump copyright year to 2020; change docstring line breaks (via synth) (#10260) --- google/__init__.py | 2 +- google/cloud/__init__.py | 2 +- google/cloud/dataproc.py | 2 +- google/cloud/dataproc_v1/__init__.py | 2 +- .../gapic/cluster_controller_client.py | 2 +- google/cloud/dataproc_v1/gapic/enums.py | 2 +- .../gapic/job_controller_client.py | 2 +- .../cluster_controller_grpc_transport.py | 2 +- .../job_controller_grpc_transport.py | 2 +- ...orkflow_template_service_grpc_transport.py | 2 +- .../gapic/workflow_template_service_client.py | 2 +- .../proto/autoscaling_policies_pb2.py | 10 +- .../cloud/dataproc_v1/proto/clusters_pb2.py | 35 +- google/cloud/dataproc_v1/proto/jobs_pb2.py | 20 +- .../proto/workflow_templates_pb2.py | 14 +- google/cloud/dataproc_v1/types.py | 2 +- google/cloud/dataproc_v1beta2/__init__.py | 2 +- .../autoscaling_policy_service_client.py | 2 +- .../gapic/cluster_controller_client.py | 2 +- google/cloud/dataproc_v1beta2/gapic/enums.py | 2 +- .../gapic/job_controller_client.py | 2 +- ...toscaling_policy_service_grpc_transport.py | 2 +- .../cluster_controller_grpc_transport.py | 2 +- .../job_controller_grpc_transport.py | 2 +- ...orkflow_template_service_grpc_transport.py | 2 +- .../gapic/workflow_template_service_client.py | 2 +- .../proto/autoscaling_policies_pb2.py | 10 +- .../dataproc_v1beta2/proto/clusters_pb2.py | 38 ++- .../cloud/dataproc_v1beta2/proto/jobs_pb2.py | 8 +- .../proto/workflow_templates_pb2.py | 14 +- google/cloud/dataproc_v1beta2/types.py | 2 +- synth.metadata | 322 +++++++++++++++++- .../v1/test_system_cluster_controller_v1.py | 2 +- .../test_system_cluster_controller_v1beta2.py | 2 +- .../v1/test_cluster_controller_client_v1.py | 2 +- .../gapic/v1/test_job_controller_client_v1.py | 2 +- ...est_workflow_template_service_client_v1.py | 2 +- ...toscaling_policy_service_client_v1beta2.py | 2 +- .../test_cluster_controller_client_v1beta2.py | 2 +- .../test_job_controller_client_v1beta2.py | 2 +- ...orkflow_template_service_client_v1beta2.py | 2 +- 41 files changed, 435 insertions(+), 100 deletions(-) diff --git a/google/__init__.py b/google/__init__.py index 8fcc60e2..9a1b64a6 100644 --- a/google/__init__.py +++ b/google/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/__init__.py b/google/cloud/__init__.py index 8fcc60e2..9a1b64a6 100644 --- a/google/cloud/__init__.py +++ b/google/cloud/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/dataproc.py b/google/cloud/dataproc.py index 772a0dbf..25b76795 100644 --- a/google/cloud/dataproc.py +++ b/google/cloud/dataproc.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/dataproc_v1/__init__.py b/google/cloud/dataproc_v1/__init__.py index 3b5fda86..395e618f 100644 --- a/google/cloud/dataproc_v1/__init__.py +++ b/google/cloud/dataproc_v1/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/dataproc_v1/gapic/cluster_controller_client.py b/google/cloud/dataproc_v1/gapic/cluster_controller_client.py index e234297b..f849ff06 100644 --- a/google/cloud/dataproc_v1/gapic/cluster_controller_client.py +++ b/google/cloud/dataproc_v1/gapic/cluster_controller_client.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/dataproc_v1/gapic/enums.py b/google/cloud/dataproc_v1/gapic/enums.py index 80626cef..9bbaf2a6 100644 --- a/google/cloud/dataproc_v1/gapic/enums.py +++ b/google/cloud/dataproc_v1/gapic/enums.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/dataproc_v1/gapic/job_controller_client.py b/google/cloud/dataproc_v1/gapic/job_controller_client.py index 77327202..7752a4b7 100644 --- a/google/cloud/dataproc_v1/gapic/job_controller_client.py +++ b/google/cloud/dataproc_v1/gapic/job_controller_client.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/dataproc_v1/gapic/transports/cluster_controller_grpc_transport.py b/google/cloud/dataproc_v1/gapic/transports/cluster_controller_grpc_transport.py index 3c4a813d..8ffede67 100644 --- a/google/cloud/dataproc_v1/gapic/transports/cluster_controller_grpc_transport.py +++ b/google/cloud/dataproc_v1/gapic/transports/cluster_controller_grpc_transport.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/dataproc_v1/gapic/transports/job_controller_grpc_transport.py b/google/cloud/dataproc_v1/gapic/transports/job_controller_grpc_transport.py index e1fc88fb..d71efe43 100644 --- a/google/cloud/dataproc_v1/gapic/transports/job_controller_grpc_transport.py +++ b/google/cloud/dataproc_v1/gapic/transports/job_controller_grpc_transport.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/dataproc_v1/gapic/transports/workflow_template_service_grpc_transport.py b/google/cloud/dataproc_v1/gapic/transports/workflow_template_service_grpc_transport.py index 86a35d06..06564b07 100644 --- a/google/cloud/dataproc_v1/gapic/transports/workflow_template_service_grpc_transport.py +++ b/google/cloud/dataproc_v1/gapic/transports/workflow_template_service_grpc_transport.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/dataproc_v1/gapic/workflow_template_service_client.py b/google/cloud/dataproc_v1/gapic/workflow_template_service_client.py index df8e79dd..27e3eadc 100644 --- a/google/cloud/dataproc_v1/gapic/workflow_template_service_client.py +++ b/google/cloud/dataproc_v1/gapic/workflow_template_service_client.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/dataproc_v1/proto/autoscaling_policies_pb2.py b/google/cloud/dataproc_v1/proto/autoscaling_policies_pb2.py index 0dc596cd..8d76aae2 100644 --- a/google/cloud/dataproc_v1/proto/autoscaling_policies_pb2.py +++ b/google/cloud/dataproc_v1/proto/autoscaling_policies_pb2.py @@ -790,7 +790,8 @@ dict( DESCRIPTOR=_AUTOSCALINGPOLICY, __module__="google.cloud.dataproc_v1.proto.autoscaling_policies_pb2", - __doc__="""Describes an autoscaling policy for Dataproc cluster autoscaler. + __doc__="""Describes an autoscaling policy for Dataproc cluster + autoscaler. Attributes: @@ -903,8 +904,8 @@ dict( DESCRIPTOR=_INSTANCEGROUPAUTOSCALINGPOLICYCONFIG, __module__="google.cloud.dataproc_v1.proto.autoscaling_policies_pb2", - __doc__="""Configuration for the size bounds of an instance group, including its - proportional size to other groups. + __doc__="""Configuration for the size bounds of an instance group, + including its proportional size to other groups. Attributes: @@ -1084,7 +1085,8 @@ dict( DESCRIPTOR=_LISTAUTOSCALINGPOLICIESRESPONSE, __module__="google.cloud.dataproc_v1.proto.autoscaling_policies_pb2", - __doc__="""A response to a request to list autoscaling policies in a project. + __doc__="""A response to a request to list autoscaling policies in a + project. Attributes: diff --git a/google/cloud/dataproc_v1/proto/clusters_pb2.py b/google/cloud/dataproc_v1/proto/clusters_pb2.py index 59c9e460..b4c0aa2b 100644 --- a/google/cloud/dataproc_v1/proto/clusters_pb2.py +++ b/google/cloud/dataproc_v1/proto/clusters_pb2.py @@ -2795,8 +2795,8 @@ ), DESCRIPTOR=_CLUSTER, __module__="google.cloud.dataproc_v1.proto.clusters_pb2", - __doc__="""Describes the identifying information, config, and status of a cluster - of Compute Engine instances. + __doc__="""Describes the identifying information, config, and status + of a cluster of Compute Engine instances. Attributes: @@ -2952,8 +2952,8 @@ ), DESCRIPTOR=_GCECLUSTERCONFIG, __module__="google.cloud.dataproc_v1.proto.clusters_pb2", - __doc__="""Common config settings for resources of Compute Engine cluster - instances, applicable to all instances in the cluster. + __doc__="""Common config settings for resources of Compute Engine + cluster instances, applicable to all instances in the cluster. Attributes: @@ -3038,8 +3038,8 @@ dict( DESCRIPTOR=_INSTANCEGROUPCONFIG, __module__="google.cloud.dataproc_v1.proto.clusters_pb2", - __doc__="""Optional. The config settings for Compute Engine resources in an - instance group, such as a master or worker group. + __doc__="""Optional. The config settings for Compute Engine resources + in an instance group, such as a master or worker group. Attributes: @@ -3094,7 +3094,8 @@ dict( DESCRIPTOR=_MANAGEDGROUPCONFIG, __module__="google.cloud.dataproc_v1.proto.clusters_pb2", - __doc__="""Specifies the resources used to actively manage an instance group. + __doc__="""Specifies the resources used to actively manage an + instance group. Attributes: @@ -3116,8 +3117,8 @@ dict( DESCRIPTOR=_ACCELERATORCONFIG, __module__="google.cloud.dataproc_v1.proto.clusters_pb2", - __doc__="""Specifies the type and number of accelerator cards attached to the - instances of an instance. See `GPUs on Compute + __doc__="""Specifies the type and number of accelerator cards + attached to the instances of an instance. See `GPUs on Compute Engine `__. @@ -3152,7 +3153,8 @@ dict( DESCRIPTOR=_DISKCONFIG, __module__="google.cloud.dataproc_v1.proto.clusters_pb2", - __doc__="""Specifies the config of disk options for a group of VM instances. + __doc__="""Specifies the config of disk options for a group of VM + instances. Attributes: @@ -3182,8 +3184,8 @@ dict( DESCRIPTOR=_NODEINITIALIZATIONACTION, __module__="google.cloud.dataproc_v1.proto.clusters_pb2", - __doc__="""Specifies an executable to run on a fully configured node and a timeout - period for executable completion. + __doc__="""Specifies an executable to run on a fully configured node + and a timeout period for executable completion. Attributes: @@ -3331,7 +3333,8 @@ ), DESCRIPTOR=_SOFTWARECONFIG, __module__="google.cloud.dataproc_v1.proto.clusters_pb2", - __doc__="""Specifies the selection and config of software inside the cluster. + __doc__="""Specifies the selection and config of software inside the + cluster. Attributes: @@ -3389,7 +3392,8 @@ ), DESCRIPTOR=_CLUSTERMETRICS, __module__="google.cloud.dataproc_v1.proto.clusters_pb2", - __doc__="""Contains cluster daemon metrics, such as HDFS and YARN stats. + __doc__="""Contains cluster daemon metrics, such as HDFS and YARN + stats. **Beta Feature**: This report is available for testing purposes only. It may be changed before final release. @@ -3568,7 +3572,8 @@ dict( DESCRIPTOR=_GETCLUSTERREQUEST, __module__="google.cloud.dataproc_v1.proto.clusters_pb2", - __doc__="""Request to get the resource representation for a cluster in a project. + __doc__="""Request to get the resource representation for a cluster + in a project. Attributes: diff --git a/google/cloud/dataproc_v1/proto/jobs_pb2.py b/google/cloud/dataproc_v1/proto/jobs_pb2.py index 470b4bb9..68b7392f 100644 --- a/google/cloud/dataproc_v1/proto/jobs_pb2.py +++ b/google/cloud/dataproc_v1/proto/jobs_pb2.py @@ -3429,8 +3429,8 @@ ), DESCRIPTOR=_SPARKJOB, __module__="google.cloud.dataproc_v1.proto.jobs_pb2", - __doc__="""A Dataproc job for running `Apache Spark `__ - applications on YARN. + __doc__="""A Dataproc job for running `Apache + Spark `__ applications on YARN. Attributes: @@ -3583,8 +3583,8 @@ ), DESCRIPTOR=_HIVEJOB, __module__="google.cloud.dataproc_v1.proto.jobs_pb2", - __doc__="""A Dataproc job for running `Apache Hive `__ - queries on YARN. + __doc__="""A Dataproc job for running `Apache + Hive `__ queries on YARN. Attributes: @@ -3700,8 +3700,8 @@ ), DESCRIPTOR=_PIGJOB, __module__="google.cloud.dataproc_v1.proto.jobs_pb2", - __doc__="""A Dataproc job for running `Apache Pig `__ - queries on YARN. + __doc__="""A Dataproc job for running `Apache + Pig `__ queries on YARN. Attributes: @@ -3818,8 +3818,9 @@ dict( DESCRIPTOR=_YARNAPPLICATION, __module__="google.cloud.dataproc_v1.proto.jobs_pb2", - __doc__="""A YARN application created by a job. Application information is a subset - of org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto. + __doc__="""A YARN application created by a job. Application + information is a subset of + org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto. **Beta Feature**: This report is available for testing purposes only. It may be changed before final release. @@ -3988,7 +3989,8 @@ dict( DESCRIPTOR=_GETJOBREQUEST, __module__="google.cloud.dataproc_v1.proto.jobs_pb2", - __doc__="""A request to get the resource representation for a job in a project. + __doc__="""A request to get the resource representation for a job in + a project. Attributes: diff --git a/google/cloud/dataproc_v1/proto/workflow_templates_pb2.py b/google/cloud/dataproc_v1/proto/workflow_templates_pb2.py index 87c81d7b..0c3125b1 100644 --- a/google/cloud/dataproc_v1/proto/workflow_templates_pb2.py +++ b/google/cloud/dataproc_v1/proto/workflow_templates_pb2.py @@ -2591,7 +2591,8 @@ ), DESCRIPTOR=_CLUSTERSELECTOR, __module__="google.cloud.dataproc_v1.proto.workflow_templates_pb2", - __doc__="""A selector that chooses target cluster for jobs based on metadata. + __doc__="""A selector that chooses target cluster for jobs based on + metadata. Attributes: @@ -2676,10 +2677,10 @@ dict( DESCRIPTOR=_TEMPLATEPARAMETER, __module__="google.cloud.dataproc_v1.proto.workflow_templates_pb2", - __doc__="""A configurable parameter that replaces one or more fields in the - template. Parameterizable fields: - Labels - File uris - Job properties - - Job arguments - Script variables - Main class (in HadoopJob and - SparkJob) - Zone (in ClusterSelector) + __doc__="""A configurable parameter that replaces one or more fields + in the template. Parameterizable fields: - Labels - File uris - Job + properties - Job arguments - Script variables - Main class (in HadoopJob + and SparkJob) - Zone (in ClusterSelector) Attributes: @@ -3132,7 +3133,8 @@ dict( DESCRIPTOR=_LISTWORKFLOWTEMPLATESRESPONSE, __module__="google.cloud.dataproc_v1.proto.workflow_templates_pb2", - __doc__="""A response to a request to list workflow templates in a project. + __doc__="""A response to a request to list workflow templates in a + project. Attributes: diff --git a/google/cloud/dataproc_v1/types.py b/google/cloud/dataproc_v1/types.py index 4d3c90bc..600233b2 100644 --- a/google/cloud/dataproc_v1/types.py +++ b/google/cloud/dataproc_v1/types.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/dataproc_v1beta2/__init__.py b/google/cloud/dataproc_v1beta2/__init__.py index 3092615d..52c85448 100644 --- a/google/cloud/dataproc_v1beta2/__init__.py +++ b/google/cloud/dataproc_v1beta2/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/dataproc_v1beta2/gapic/autoscaling_policy_service_client.py b/google/cloud/dataproc_v1beta2/gapic/autoscaling_policy_service_client.py index 47bcdc67..9f7b4695 100644 --- a/google/cloud/dataproc_v1beta2/gapic/autoscaling_policy_service_client.py +++ b/google/cloud/dataproc_v1beta2/gapic/autoscaling_policy_service_client.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/dataproc_v1beta2/gapic/cluster_controller_client.py b/google/cloud/dataproc_v1beta2/gapic/cluster_controller_client.py index 246638c9..e64fc0d7 100644 --- a/google/cloud/dataproc_v1beta2/gapic/cluster_controller_client.py +++ b/google/cloud/dataproc_v1beta2/gapic/cluster_controller_client.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/dataproc_v1beta2/gapic/enums.py b/google/cloud/dataproc_v1beta2/gapic/enums.py index f99be1fe..8c3b0980 100644 --- a/google/cloud/dataproc_v1beta2/gapic/enums.py +++ b/google/cloud/dataproc_v1beta2/gapic/enums.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/dataproc_v1beta2/gapic/job_controller_client.py b/google/cloud/dataproc_v1beta2/gapic/job_controller_client.py index 22c84bbc..21b6ca49 100644 --- a/google/cloud/dataproc_v1beta2/gapic/job_controller_client.py +++ b/google/cloud/dataproc_v1beta2/gapic/job_controller_client.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/dataproc_v1beta2/gapic/transports/autoscaling_policy_service_grpc_transport.py b/google/cloud/dataproc_v1beta2/gapic/transports/autoscaling_policy_service_grpc_transport.py index ff40b5b2..98651ade 100644 --- a/google/cloud/dataproc_v1beta2/gapic/transports/autoscaling_policy_service_grpc_transport.py +++ b/google/cloud/dataproc_v1beta2/gapic/transports/autoscaling_policy_service_grpc_transport.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/dataproc_v1beta2/gapic/transports/cluster_controller_grpc_transport.py b/google/cloud/dataproc_v1beta2/gapic/transports/cluster_controller_grpc_transport.py index b708113b..767268e5 100644 --- a/google/cloud/dataproc_v1beta2/gapic/transports/cluster_controller_grpc_transport.py +++ b/google/cloud/dataproc_v1beta2/gapic/transports/cluster_controller_grpc_transport.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/dataproc_v1beta2/gapic/transports/job_controller_grpc_transport.py b/google/cloud/dataproc_v1beta2/gapic/transports/job_controller_grpc_transport.py index f906f9cf..33bcf39e 100644 --- a/google/cloud/dataproc_v1beta2/gapic/transports/job_controller_grpc_transport.py +++ b/google/cloud/dataproc_v1beta2/gapic/transports/job_controller_grpc_transport.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/dataproc_v1beta2/gapic/transports/workflow_template_service_grpc_transport.py b/google/cloud/dataproc_v1beta2/gapic/transports/workflow_template_service_grpc_transport.py index 14398811..39b8c85e 100644 --- a/google/cloud/dataproc_v1beta2/gapic/transports/workflow_template_service_grpc_transport.py +++ b/google/cloud/dataproc_v1beta2/gapic/transports/workflow_template_service_grpc_transport.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/dataproc_v1beta2/gapic/workflow_template_service_client.py b/google/cloud/dataproc_v1beta2/gapic/workflow_template_service_client.py index 9e67cd3f..5319e2f1 100644 --- a/google/cloud/dataproc_v1beta2/gapic/workflow_template_service_client.py +++ b/google/cloud/dataproc_v1beta2/gapic/workflow_template_service_client.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/dataproc_v1beta2/proto/autoscaling_policies_pb2.py b/google/cloud/dataproc_v1beta2/proto/autoscaling_policies_pb2.py index 50c0c54d..7c3be028 100644 --- a/google/cloud/dataproc_v1beta2/proto/autoscaling_policies_pb2.py +++ b/google/cloud/dataproc_v1beta2/proto/autoscaling_policies_pb2.py @@ -792,7 +792,8 @@ dict( DESCRIPTOR=_AUTOSCALINGPOLICY, __module__="google.cloud.dataproc_v1beta2.proto.autoscaling_policies_pb2", - __doc__="""Describes an autoscaling policy for Dataproc cluster autoscaler. + __doc__="""Describes an autoscaling policy for Dataproc cluster + autoscaler. Attributes: @@ -905,8 +906,8 @@ dict( DESCRIPTOR=_INSTANCEGROUPAUTOSCALINGPOLICYCONFIG, __module__="google.cloud.dataproc_v1beta2.proto.autoscaling_policies_pb2", - __doc__="""Configuration for the size bounds of an instance group, including its - proportional size to other groups. + __doc__="""Configuration for the size bounds of an instance group, + including its proportional size to other groups. Attributes: @@ -1086,7 +1087,8 @@ dict( DESCRIPTOR=_LISTAUTOSCALINGPOLICIESRESPONSE, __module__="google.cloud.dataproc_v1beta2.proto.autoscaling_policies_pb2", - __doc__="""A response to a request to list autoscaling policies in a project. + __doc__="""A response to a request to list autoscaling policies in a + project. Attributes: diff --git a/google/cloud/dataproc_v1beta2/proto/clusters_pb2.py b/google/cloud/dataproc_v1beta2/proto/clusters_pb2.py index 48f0feaf..d043480d 100644 --- a/google/cloud/dataproc_v1beta2/proto/clusters_pb2.py +++ b/google/cloud/dataproc_v1beta2/proto/clusters_pb2.py @@ -3219,8 +3219,8 @@ ), DESCRIPTOR=_CLUSTER, __module__="google.cloud.dataproc_v1beta2.proto.clusters_pb2", - __doc__="""Describes the identifying information, config, and status of a cluster - of Compute Engine instances. + __doc__="""Describes the identifying information, config, and status + of a cluster of Compute Engine instances. Attributes: @@ -3413,8 +3413,8 @@ ), DESCRIPTOR=_GCECLUSTERCONFIG, __module__="google.cloud.dataproc_v1beta2.proto.clusters_pb2", - __doc__="""Common config settings for resources of Compute Engine cluster - instances, applicable to all instances in the cluster. + __doc__="""Common config settings for resources of Compute Engine + cluster instances, applicable to all instances in the cluster. Attributes: @@ -3501,8 +3501,8 @@ dict( DESCRIPTOR=_INSTANCEGROUPCONFIG, __module__="google.cloud.dataproc_v1beta2.proto.clusters_pb2", - __doc__="""The config settings for Compute Engine resources in an instance group, - such as a master or worker group. + __doc__="""The config settings for Compute Engine resources in an + instance group, such as a master or worker group. Attributes: @@ -3557,7 +3557,8 @@ dict( DESCRIPTOR=_MANAGEDGROUPCONFIG, __module__="google.cloud.dataproc_v1beta2.proto.clusters_pb2", - __doc__="""Specifies the resources used to actively manage an instance group. + __doc__="""Specifies the resources used to actively manage an + instance group. Attributes: @@ -3579,8 +3580,8 @@ dict( DESCRIPTOR=_ACCELERATORCONFIG, __module__="google.cloud.dataproc_v1beta2.proto.clusters_pb2", - __doc__="""Specifies the type and number of accelerator cards attached to the - instances of an instance group (see `GPUs on Compute + __doc__="""Specifies the type and number of accelerator cards + attached to the instances of an instance group (see `GPUs on Compute Engine `__). @@ -3615,7 +3616,8 @@ dict( DESCRIPTOR=_DISKCONFIG, __module__="google.cloud.dataproc_v1beta2.proto.clusters_pb2", - __doc__="""Specifies the config of disk options for a group of VM instances. + __doc__="""Specifies the config of disk options for a group of VM + instances. Attributes: @@ -3680,7 +3682,8 @@ dict( DESCRIPTOR=_SECURITYCONFIG, __module__="google.cloud.dataproc_v1beta2.proto.clusters_pb2", - __doc__="""Security related configuration, including encryption, Kerberos, etc. + __doc__="""Security related configuration, including encryption, + Kerberos, etc. Attributes: @@ -3770,8 +3773,8 @@ dict( DESCRIPTOR=_NODEINITIALIZATIONACTION, __module__="google.cloud.dataproc_v1beta2.proto.clusters_pb2", - __doc__="""Specifies an executable to run on a fully configured node and a timeout - period for executable completion. + __doc__="""Specifies an executable to run on a fully configured node + and a timeout period for executable completion. Attributes: @@ -3829,7 +3832,8 @@ ), DESCRIPTOR=_SOFTWARECONFIG, __module__="google.cloud.dataproc_v1beta2.proto.clusters_pb2", - __doc__="""Specifies the selection and config of software inside the cluster. + __doc__="""Specifies the selection and config of software inside the + cluster. Attributes: @@ -3887,7 +3891,8 @@ ), DESCRIPTOR=_CLUSTERMETRICS, __module__="google.cloud.dataproc_v1beta2.proto.clusters_pb2", - __doc__="""Contains cluster daemon metrics, such as HDFS and YARN stats. + __doc__="""Contains cluster daemon metrics, such as HDFS and YARN + stats. **Beta Feature**: This report is available for testing purposes only. It may be changed before final release. @@ -4080,7 +4085,8 @@ dict( DESCRIPTOR=_GETCLUSTERREQUEST, __module__="google.cloud.dataproc_v1beta2.proto.clusters_pb2", - __doc__="""Request to get the resource representation for a cluster in a project. + __doc__="""Request to get the resource representation for a cluster + in a project. Attributes: diff --git a/google/cloud/dataproc_v1beta2/proto/jobs_pb2.py b/google/cloud/dataproc_v1beta2/proto/jobs_pb2.py index b25037c0..c40e358b 100644 --- a/google/cloud/dataproc_v1beta2/proto/jobs_pb2.py +++ b/google/cloud/dataproc_v1beta2/proto/jobs_pb2.py @@ -4097,8 +4097,9 @@ dict( DESCRIPTOR=_YARNAPPLICATION, __module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2", - __doc__="""A YARN application created by a job. Application information is a subset - of org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto. + __doc__="""A YARN application created by a job. Application + information is a subset of + org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto. **Beta Feature**: This report is available for testing purposes only. It may be changed before final release. @@ -4273,7 +4274,8 @@ dict( DESCRIPTOR=_GETJOBREQUEST, __module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2", - __doc__="""A request to get the resource representation for a job in a project. + __doc__="""A request to get the resource representation for a job in + a project. Attributes: diff --git a/google/cloud/dataproc_v1beta2/proto/workflow_templates_pb2.py b/google/cloud/dataproc_v1beta2/proto/workflow_templates_pb2.py index 8b83c1d3..af679f35 100644 --- a/google/cloud/dataproc_v1beta2/proto/workflow_templates_pb2.py +++ b/google/cloud/dataproc_v1beta2/proto/workflow_templates_pb2.py @@ -2638,7 +2638,8 @@ ), DESCRIPTOR=_CLUSTERSELECTOR, __module__="google.cloud.dataproc_v1beta2.proto.workflow_templates_pb2", - __doc__="""A selector that chooses target cluster for jobs based on metadata. + __doc__="""A selector that chooses target cluster for jobs based on + metadata. Attributes: @@ -2723,10 +2724,10 @@ dict( DESCRIPTOR=_TEMPLATEPARAMETER, __module__="google.cloud.dataproc_v1beta2.proto.workflow_templates_pb2", - __doc__="""A configurable parameter that replaces one or more fields in the - template. Parameterizable fields: - Labels - File uris - Job properties - - Job arguments - Script variables - Main class (in HadoopJob and - SparkJob) - Zone (in ClusterSelector) + __doc__="""A configurable parameter that replaces one or more fields + in the template. Parameterizable fields: - Labels - File uris - Job + properties - Job arguments - Script variables - Main class (in HadoopJob + and SparkJob) - Zone (in ClusterSelector) Attributes: @@ -3183,7 +3184,8 @@ dict( DESCRIPTOR=_LISTWORKFLOWTEMPLATESRESPONSE, __module__="google.cloud.dataproc_v1beta2.proto.workflow_templates_pb2", - __doc__="""A response to a request to list workflow templates in a project. + __doc__="""A response to a request to list workflow templates in a + project. Attributes: diff --git a/google/cloud/dataproc_v1beta2/types.py b/google/cloud/dataproc_v1beta2/types.py index 811c656a..23d3f87e 100644 --- a/google/cloud/dataproc_v1beta2/types.py +++ b/google/cloud/dataproc_v1beta2/types.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/synth.metadata b/synth.metadata index e96f585b..314ca5ce 100644 --- a/synth.metadata +++ b/synth.metadata @@ -1,19 +1,20 @@ { - "updateTime": "2020-01-08T13:17:56.610055Z", + "updateTime": "2020-01-30T13:21:23.253293Z", "sources": [ { "generator": { "name": "artman", - "version": "0.43.0", - "dockerImage": "googleapis/artman@sha256:264654a37596a44b0668b8ce6ac41082d713f6ee150b3fc6425fa78cc64e4f20" + "version": "0.44.4", + "dockerImage": "googleapis/artman@sha256:19e945954fc960a4bdfee6cb34695898ab21a8cf0bac063ee39b91f00a1faec8" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "08b488e0660c59842a7dee0e3e2b65d9e3a514a9", - "internalRef": "288625007" + "sha": "c1246a29e22b0f98e800a536b5b0da2d933a55f2", + "internalRef": "292310790", + "log": "c1246a29e22b0f98e800a536b5b0da2d933a55f2\nUpdating v1 protos with the latest inline documentation (in comments) and config options. Also adding a per-service .yaml file.\n\nPiperOrigin-RevId: 292310790\n\nb491d07cadaae7cde5608321f913e5ca1459b32d\nRevert accidental local_repository change\n\nPiperOrigin-RevId: 292245373\n\naf3400a8cb6110025198b59a0f7d018ae3cda700\nUpdate gapic-generator dependency (prebuilt PHP binary support).\n\nPiperOrigin-RevId: 292243997\n\n341fd5690fae36f36cf626ef048fbcf4bbe7cee6\ngrafeas: v1 add resource_definition for the grafeas.io/Project and change references for Project.\n\nPiperOrigin-RevId: 292221998\n\n42e915ec2ece1cd37a590fbcd10aa2c0fb0e5b06\nUpdate the gapic-generator, protoc-java-resource-name-plugin and protoc-docs-plugin to the latest commit.\n\nPiperOrigin-RevId: 292182368\n\nf035f47250675d31492a09f4a7586cfa395520a7\nFix grafeas build and update build.sh script to include gerafeas.\n\nPiperOrigin-RevId: 292168753\n\n26ccb214b7bc4a716032a6266bcb0a9ca55d6dbb\nasset: v1p1beta1 add client config annotations and retry config\n\nPiperOrigin-RevId: 292154210\n\n974ee5c0b5d03e81a50dafcedf41e0efebb5b749\nasset: v1beta1 add client config annotations\n\nPiperOrigin-RevId: 292152573\n\ncf3b61102ed5f36b827bc82ec39be09525f018c8\n Fix to protos for v1p1beta1 release of Cloud Security Command Center\n\nPiperOrigin-RevId: 292034635\n\n4e1cfaa7c0fede9e65d64213ca3da1b1255816c0\nUpdate the public proto to support UTF-8 encoded id for CatalogService API, increase the ListCatalogItems deadline to 300s and some minor documentation change\n\nPiperOrigin-RevId: 292030970\n\n9c483584f8fd5a1b862ae07973f4cc7bb3e46648\nasset: add annotations to v1p1beta1\n\nPiperOrigin-RevId: 292009868\n\ne19209fac29731d0baf6d9ac23da1164f7bdca24\nAdd the google.rpc.context.AttributeContext message to the open source\ndirectories.\n\nPiperOrigin-RevId: 291999930\n\nae5662960573f279502bf98a108a35ba1175e782\noslogin API: move file level option on top of the file to avoid protobuf.js bug.\n\nPiperOrigin-RevId: 291990506\n\neba3897fff7c49ed85d3c47fc96fe96e47f6f684\nAdd cc_proto_library and cc_grpc_library targets for Spanner and IAM protos.\n\nPiperOrigin-RevId: 291988651\n\n8e981acfd9b97ea2f312f11bbaa7b6c16e412dea\nBeta launch for PersonDetection and FaceDetection features.\n\nPiperOrigin-RevId: 291821782\n\n994e067fae3b21e195f7da932b08fff806d70b5d\nasset: add annotations to v1p2beta1\n\nPiperOrigin-RevId: 291815259\n\n244e1d2c89346ca2e0701b39e65552330d68545a\nAdd Playable Locations service\n\nPiperOrigin-RevId: 291806349\n\n909f8f67963daf45dd88d020877fb9029b76788d\nasset: add annotations to v1beta2\n\nPiperOrigin-RevId: 291805301\n\n3c39a1d6e23c1ef63c7fba4019c25e76c40dfe19\nKMS: add file-level message for CryptoKeyPath, it is defined in gapic yaml but not\nin proto files.\n\nPiperOrigin-RevId: 291420695\n\nc6f3f350b8387f8d1b85ed4506f30187ebaaddc3\ncontaineranalysis: update v1beta1 and bazel build with annotations\n\nPiperOrigin-RevId: 291401900\n\n92887d74b44e4e636252b7b8477d0d2570cd82db\nfix: fix the location of grpc config file.\n\nPiperOrigin-RevId: 291396015\n\ne26cab8afd19d396b929039dac5d874cf0b5336c\nexpr: add default_host and method_signature annotations to CelService\n\nPiperOrigin-RevId: 291240093\n\n06093ae3952441c34ec176d1f7431b8765cec0be\nirm: fix v1alpha2 bazel build by adding missing proto imports\n\nPiperOrigin-RevId: 291227940\n\na8a2514af326e4673063f9a3c9d0ef1091c87e6c\nAdd proto annotation for cloud/irm API\n\nPiperOrigin-RevId: 291217859\n\n8d16f76de065f530d395a4c7eabbf766d6a120fd\nGenerate Memcache v1beta2 API protos and gRPC ServiceConfig files\n\nPiperOrigin-RevId: 291008516\n\n3af1dabd93df9a9f17bf3624d3b875c11235360b\ngrafeas: Add containeranalysis default_host to Grafeas service\n\nPiperOrigin-RevId: 290965849\n\nbe2663fa95e31cba67d0cd62611a6674db9f74b7\nfix(google/maps/roads): add missing opening bracket\n\nPiperOrigin-RevId: 290964086\n\nfacc26550a0af0696e0534bc9cae9df14275aa7c\nUpdating v2 protos with the latest inline documentation (in comments) and adding a per-service .yaml file.\n\nPiperOrigin-RevId: 290952261\n\ncda99c1f7dc5e4ca9b1caeae1dc330838cbc1461\nChange api_name to 'asset' for v1p1beta1\n\nPiperOrigin-RevId: 290800639\n\n94e9e90c303a820ce40643d9129e7f0d2054e8a1\nAdds Google Maps Road service\n\nPiperOrigin-RevId: 290795667\n\na3b23dcb2eaecce98c600c7d009451bdec52dbda\nrpc: new message ErrorInfo, other comment updates\n\nPiperOrigin-RevId: 290781668\n\n26420ef4e46c37f193c0fbe53d6ebac481de460e\nAdd proto definition for Org Policy v1.\n\nPiperOrigin-RevId: 290771923\n\n7f0dab8177cf371ae019a082e2512de7ac102888\nPublish Routes Preferred API v1 service definitions.\n\nPiperOrigin-RevId: 290326986\n\nad6e508d0728e1d1bca6e3f328cd562718cb772d\nFix: Qualify resource type references with \"jobs.googleapis.com/\"\n\nPiperOrigin-RevId: 290285762\n\n58e770d568a2b78168ddc19a874178fee8265a9d\ncts client library\n\nPiperOrigin-RevId: 290146169\n\naf9daa4c3b4c4a8b7133b81588dd9ffd37270af2\nAdd more programming language options to public proto\n\nPiperOrigin-RevId: 290144091\n\nd9f2bbf2df301ef84641d4cec7c828736a0bd907\ntalent: add missing resource.proto dep to Bazel build target\n\nPiperOrigin-RevId: 290143164\n\n3b3968237451d027b42471cd28884a5a1faed6c7\nAnnotate Talent API.\nAdd gRPC service config for retry.\nUpdate bazel file with google.api.resource dependency.\n\nPiperOrigin-RevId: 290125172\n\n0735b4b096872960568d1f366bfa75b7b0e1f1a3\nWeekly library update.\n\nPiperOrigin-RevId: 289939042\n\n8760d3d9a4543d7f9c0d1c7870aca08b116e4095\nWeekly library update.\n\nPiperOrigin-RevId: 289939020\n\n8607df842f782a901805187e02fff598145b0b0e\nChange Talent API timeout to 30s.\n\nPiperOrigin-RevId: 289912621\n\n908155991fe32570653bcb72ecfdcfc896642f41\nAdd Recommendations AI V1Beta1\n\nPiperOrigin-RevId: 289901914\n\n5c9a8c2bebd8b71aa66d1cc473edfaac837a2c78\nAdding no-arg method signatures for ListBillingAccounts and ListServices\n\nPiperOrigin-RevId: 289891136\n\n50b0e8286ac988b0593bd890eb31fef6ea2f5767\nlongrunning: add grpc service config and default_host annotation to operations.proto\n\nPiperOrigin-RevId: 289876944\n\n6cac27dabe51c54807b0401698c32d34998948a9\n Updating default deadline for Cloud Security Command Center's v1 APIs.\n\nPiperOrigin-RevId: 289875412\n\nd99df0d67057a233c711187e0689baa4f8e6333d\nFix: Correct spelling in C# namespace option\n\nPiperOrigin-RevId: 289709813\n\n2fa8d48165cc48e35b0c62e6f7bdade12229326c\nfeat: Publish Recommender v1 to GitHub.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289619243\n\n9118db63d1ab493a2e44a3b4973fde810a835c49\nfirestore: don't retry reads that fail with Aborted\n\nFor transaction reads that fail with ABORTED, we need to rollback and start a new transaction. Our current configuration makes it so that GAPIC retries ABORTED reads multiple times without making any progress. Instead, we should retry at the transaction level.\n\nPiperOrigin-RevId: 289532382\n\n1dbfd3fe4330790b1e99c0bb20beb692f1e20b8a\nFix bazel build\nAdd other langauges (Java was already there) for bigquery/storage/v1alpha2 api.\n\nPiperOrigin-RevId: 289519766\n\nc06599cdd7d11f8d3fd25f8d3249e5bb1a3d5d73\nInitial commit of google.cloud.policytroubleshooter API, The API helps in troubleshooting GCP policies. Refer https://cloud.google.com/iam/docs/troubleshooting-access for more information\n\nPiperOrigin-RevId: 289491444\n\nfce7d80fa16ea241e87f7bc33d68595422e94ecd\nDo not pass samples option for Artman config of recommender v1 API.\n\nPiperOrigin-RevId: 289477403\n\nef179e8c61436297e6bb124352e47e45c8c80cb1\nfix: Address missing Bazel dependency.\n\nBazel builds stopped working in 06ec6d5 because\nthe google/longrunning/operations.proto file took\nan import from google/api/client.proto, but that\nimport was not added to BUILD.bazel.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289446074\n\n8841655b242c84fd691d77d7bcf21b61044f01ff\nMigrate Data Labeling v1beta1 to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289446026\n\n06ec6d5d053fff299eaa6eaa38afdd36c5e2fc68\nAdd annotations to google.longrunning.v1\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289413169\n\n0480cf40be1d3cc231f4268a2fdb36a8dd60e641\nMigrate IAM Admin v1 to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289411084\n\n1017173e9adeb858587639af61889ad970c728b1\nSpecify a C# namespace for BigQuery Connection v1beta1\n\nPiperOrigin-RevId: 289396763\n\nb08714b378e8e5b0c4ecdde73f92c36d6303b4b6\nfix: Integrate latest proto-docs-plugin fix.\nFixes dialogflow v2\n\nPiperOrigin-RevId: 289189004\n\n51217a67e79255ee1f2e70a6a3919df082513327\nCreate BUILD file for recommender v1\n\nPiperOrigin-RevId: 289183234\n\nacacd87263c0a60e458561b8b8ce9f67c760552a\nGenerate recommender v1 API protos and gRPC ServiceConfig files\n\nPiperOrigin-RevId: 289177510\n\n9d2f7133b97720b1fa3601f6dcd30760ba6d8a1e\nFix kokoro build script\n\nPiperOrigin-RevId: 289166315\n\nc43a67530d2a47a0220cad20ca8de39b3fbaf2c5\ncloudtasks: replace missing RPC timeout config for v2beta2 and v2beta3\n\nPiperOrigin-RevId: 289162391\n\n4cefc229a9197236fc0adf02d69b71c0c5cf59de\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 289158456\n\n56f263fe959c50786dab42e3c61402d32d1417bd\nCatalog API: Adding config necessary to build client libraries\n\nPiperOrigin-RevId: 289149879\n\n4543762b23a57fc3c53d409efc3a9affd47b6ab3\nFix Bazel build\nbilling/v1 and dialogflow/v2 remain broken (not bazel-related issues).\nBilling has wrong configuration, dialogflow failure is caused by a bug in documentation plugin.\n\nPiperOrigin-RevId: 289140194\n\nc9dce519127b97e866ca133a01157f4ce27dcceb\nUpdate Bigtable docs\n\nPiperOrigin-RevId: 289114419\n\n802c5c5f2bf94c3facb011267d04e71942e0d09f\nMigrate DLP to proto annotations (but not GAPIC v2).\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289102579\n\n6357f30f2ec3cff1d8239d18b707ff9d438ea5da\nRemove gRPC configuration file that was in the wrong place.\n\nPiperOrigin-RevId: 289096111\n\n360a8792ed62f944109d7e22d613a04a010665b4\n Protos for v1p1beta1 release of Cloud Security Command Center\n\nPiperOrigin-RevId: 289011995\n\na79211c20c4f2807eec524d00123bf7c06ad3d6e\nRoll back containeranalysis v1 to GAPIC v1.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288999068\n\n9e60345ba603e03484a8aaa33ce5ffa19c1c652b\nPublish Routes Preferred API v1 proto definitions.\n\nPiperOrigin-RevId: 288941399\n\nd52885b642ad2aa1f42b132ee62dbf49a73e1e24\nMigrate the service management API to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288909426\n\n6ace586805c08896fef43e28a261337fcf3f022b\ncloudtasks: replace missing RPC timeout config\n\nPiperOrigin-RevId: 288783603\n\n51d906cabee4876b12497054b15b05d4a50ad027\nImport of Grafeas from Github.\n\nUpdate BUILD.bazel accordingly.\n\nPiperOrigin-RevId: 288783426\n\n5ef42bcd363ba0440f0ee65b3c80b499e9067ede\nMigrate Recommender v1beta1 to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288713066\n\n94f986afd365b7d7e132315ddcd43d7af0e652fb\nMigrate Container Analysis v1 to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288708382\n\n7a751a279184970d3b6ba90e4dd4d22a382a0747\nRemove Container Analysis v1alpha1 (nobody publishes it).\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288707473\n\n3c0d9c71242e70474b2b640e15bb0a435fd06ff0\nRemove specious annotation from BigQuery Data Transfer before\nanyone accidentally does anything that uses it.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288701604\n\n1af307a4764bd415ef942ac5187fa1def043006f\nMigrate BigQuery Connection to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288698681\n\n" } }, { @@ -45,5 +46,316 @@ "config": "google/cloud/dataproc/artman_dataproc_v1.yaml" } } + ], + "newFiles": [ + { + "path": ".coveragerc" + }, + { + "path": ".flake8" + }, + { + "path": ".repo-metadata.json" + }, + { + "path": "CHANGELOG.md" + }, + { + "path": "LICENSE" + }, + { + "path": "MANIFEST.in" + }, + { + "path": "README.rst" + }, + { + "path": "docs/README.rst" + }, + { + "path": "docs/_static/custom.css" + }, + { + "path": "docs/_templates/layout.html" + }, + { + "path": "docs/changelog.md" + }, + { + "path": "docs/conf.py" + }, + { + "path": "docs/gapic/v1/api.rst" + }, + { + "path": "docs/gapic/v1/types.rst" + }, + { + "path": "docs/gapic/v1beta2/api.rst" + }, + { + "path": "docs/gapic/v1beta2/types.rst" + }, + { + "path": "docs/index.rst" + }, + { + "path": "google/__init__.py" + }, + { + "path": "google/cloud/__init__.py" + }, + { + "path": "google/cloud/dataproc.py" + }, + { + "path": "google/cloud/dataproc_v1/__init__.py" + }, + { + "path": "google/cloud/dataproc_v1/gapic/__init__.py" + }, + { + "path": "google/cloud/dataproc_v1/gapic/cluster_controller_client.py" + }, + { + "path": "google/cloud/dataproc_v1/gapic/cluster_controller_client_config.py" + }, + { + "path": "google/cloud/dataproc_v1/gapic/enums.py" + }, + { + "path": "google/cloud/dataproc_v1/gapic/job_controller_client.py" + }, + { + "path": "google/cloud/dataproc_v1/gapic/job_controller_client_config.py" + }, + { + "path": "google/cloud/dataproc_v1/gapic/transports/__init__.py" + }, + { + "path": "google/cloud/dataproc_v1/gapic/transports/cluster_controller_grpc_transport.py" + }, + { + "path": "google/cloud/dataproc_v1/gapic/transports/job_controller_grpc_transport.py" + }, + { + "path": "google/cloud/dataproc_v1/gapic/transports/workflow_template_service_grpc_transport.py" + }, + { + "path": "google/cloud/dataproc_v1/gapic/workflow_template_service_client.py" + }, + { + "path": "google/cloud/dataproc_v1/gapic/workflow_template_service_client_config.py" + }, + { + "path": "google/cloud/dataproc_v1/proto/__init__.py" + }, + { + "path": "google/cloud/dataproc_v1/proto/autoscaling_policies.proto" + }, + { + "path": "google/cloud/dataproc_v1/proto/autoscaling_policies_pb2.py" + }, + { + "path": "google/cloud/dataproc_v1/proto/autoscaling_policies_pb2_grpc.py" + }, + { + "path": "google/cloud/dataproc_v1/proto/clusters.proto" + }, + { + "path": "google/cloud/dataproc_v1/proto/clusters_pb2.py" + }, + { + "path": "google/cloud/dataproc_v1/proto/clusters_pb2_grpc.py" + }, + { + "path": "google/cloud/dataproc_v1/proto/jobs.proto" + }, + { + "path": "google/cloud/dataproc_v1/proto/jobs_pb2.py" + }, + { + "path": "google/cloud/dataproc_v1/proto/jobs_pb2_grpc.py" + }, + { + "path": "google/cloud/dataproc_v1/proto/operations.proto" + }, + { + "path": "google/cloud/dataproc_v1/proto/operations_pb2.py" + }, + { + "path": "google/cloud/dataproc_v1/proto/operations_pb2_grpc.py" + }, + { + "path": "google/cloud/dataproc_v1/proto/shared.proto" + }, + { + "path": "google/cloud/dataproc_v1/proto/shared_pb2.py" + }, + { + "path": "google/cloud/dataproc_v1/proto/shared_pb2_grpc.py" + }, + { + "path": "google/cloud/dataproc_v1/proto/workflow_templates.proto" + }, + { + "path": "google/cloud/dataproc_v1/proto/workflow_templates_pb2.py" + }, + { + "path": "google/cloud/dataproc_v1/proto/workflow_templates_pb2_grpc.py" + }, + { + "path": "google/cloud/dataproc_v1/types.py" + }, + { + "path": "google/cloud/dataproc_v1beta2/__init__.py" + }, + { + "path": "google/cloud/dataproc_v1beta2/gapic/__init__.py" + }, + { + "path": "google/cloud/dataproc_v1beta2/gapic/autoscaling_policy_service_client.py" + }, + { + "path": "google/cloud/dataproc_v1beta2/gapic/autoscaling_policy_service_client_config.py" + }, + { + "path": "google/cloud/dataproc_v1beta2/gapic/cluster_controller_client.py" + }, + { + "path": "google/cloud/dataproc_v1beta2/gapic/cluster_controller_client_config.py" + }, + { + "path": "google/cloud/dataproc_v1beta2/gapic/enums.py" + }, + { + "path": "google/cloud/dataproc_v1beta2/gapic/job_controller_client.py" + }, + { + "path": "google/cloud/dataproc_v1beta2/gapic/job_controller_client_config.py" + }, + { + "path": "google/cloud/dataproc_v1beta2/gapic/transports/__init__.py" + }, + { + "path": "google/cloud/dataproc_v1beta2/gapic/transports/autoscaling_policy_service_grpc_transport.py" + }, + { + "path": "google/cloud/dataproc_v1beta2/gapic/transports/cluster_controller_grpc_transport.py" + }, + { + "path": "google/cloud/dataproc_v1beta2/gapic/transports/job_controller_grpc_transport.py" + }, + { + "path": "google/cloud/dataproc_v1beta2/gapic/transports/workflow_template_service_grpc_transport.py" + }, + { + "path": "google/cloud/dataproc_v1beta2/gapic/workflow_template_service_client.py" + }, + { + "path": "google/cloud/dataproc_v1beta2/gapic/workflow_template_service_client_config.py" + }, + { + "path": "google/cloud/dataproc_v1beta2/proto/__init__.py" + }, + { + "path": "google/cloud/dataproc_v1beta2/proto/autoscaling_policies.proto" + }, + { + "path": "google/cloud/dataproc_v1beta2/proto/autoscaling_policies_pb2.py" + }, + { + "path": "google/cloud/dataproc_v1beta2/proto/autoscaling_policies_pb2_grpc.py" + }, + { + "path": "google/cloud/dataproc_v1beta2/proto/clusters.proto" + }, + { + "path": "google/cloud/dataproc_v1beta2/proto/clusters_pb2.py" + }, + { + "path": "google/cloud/dataproc_v1beta2/proto/clusters_pb2_grpc.py" + }, + { + "path": "google/cloud/dataproc_v1beta2/proto/jobs.proto" + }, + { + "path": "google/cloud/dataproc_v1beta2/proto/jobs_pb2.py" + }, + { + "path": "google/cloud/dataproc_v1beta2/proto/jobs_pb2_grpc.py" + }, + { + "path": "google/cloud/dataproc_v1beta2/proto/operations.proto" + }, + { + "path": "google/cloud/dataproc_v1beta2/proto/operations_pb2.py" + }, + { + "path": "google/cloud/dataproc_v1beta2/proto/operations_pb2_grpc.py" + }, + { + "path": "google/cloud/dataproc_v1beta2/proto/shared.proto" + }, + { + "path": "google/cloud/dataproc_v1beta2/proto/shared_pb2.py" + }, + { + "path": "google/cloud/dataproc_v1beta2/proto/shared_pb2_grpc.py" + }, + { + "path": "google/cloud/dataproc_v1beta2/proto/workflow_templates.proto" + }, + { + "path": "google/cloud/dataproc_v1beta2/proto/workflow_templates_pb2.py" + }, + { + "path": "google/cloud/dataproc_v1beta2/proto/workflow_templates_pb2_grpc.py" + }, + { + "path": "google/cloud/dataproc_v1beta2/types.py" + }, + { + "path": "noxfile.py" + }, + { + "path": "setup.cfg" + }, + { + "path": "setup.py" + }, + { + "path": "synth.metadata" + }, + { + "path": "synth.py" + }, + { + "path": "tests/system/gapic/v1/test_system_cluster_controller_v1.py" + }, + { + "path": "tests/system/gapic/v1beta2/test_system_cluster_controller_v1beta2.py" + }, + { + "path": "tests/unit/gapic/v1/test_cluster_controller_client_v1.py" + }, + { + "path": "tests/unit/gapic/v1/test_job_controller_client_v1.py" + }, + { + "path": "tests/unit/gapic/v1/test_workflow_template_service_client_v1.py" + }, + { + "path": "tests/unit/gapic/v1beta2/test_autoscaling_policy_service_client_v1beta2.py" + }, + { + "path": "tests/unit/gapic/v1beta2/test_cluster_controller_client_v1beta2.py" + }, + { + "path": "tests/unit/gapic/v1beta2/test_job_controller_client_v1beta2.py" + }, + { + "path": "tests/unit/gapic/v1beta2/test_workflow_template_service_client_v1beta2.py" + } ] } \ No newline at end of file diff --git a/tests/system/gapic/v1/test_system_cluster_controller_v1.py b/tests/system/gapic/v1/test_system_cluster_controller_v1.py index 86fbb3c7..a595af5b 100644 --- a/tests/system/gapic/v1/test_system_cluster_controller_v1.py +++ b/tests/system/gapic/v1/test_system_cluster_controller_v1.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/system/gapic/v1beta2/test_system_cluster_controller_v1beta2.py b/tests/system/gapic/v1beta2/test_system_cluster_controller_v1beta2.py index 15d2ad67..8db97e8c 100644 --- a/tests/system/gapic/v1beta2/test_system_cluster_controller_v1beta2.py +++ b/tests/system/gapic/v1beta2/test_system_cluster_controller_v1beta2.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/v1/test_cluster_controller_client_v1.py b/tests/unit/gapic/v1/test_cluster_controller_client_v1.py index 10d2c29e..1c15fdcf 100644 --- a/tests/unit/gapic/v1/test_cluster_controller_client_v1.py +++ b/tests/unit/gapic/v1/test_cluster_controller_client_v1.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/v1/test_job_controller_client_v1.py b/tests/unit/gapic/v1/test_job_controller_client_v1.py index c3a0c0a6..3508c7e2 100644 --- a/tests/unit/gapic/v1/test_job_controller_client_v1.py +++ b/tests/unit/gapic/v1/test_job_controller_client_v1.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/v1/test_workflow_template_service_client_v1.py b/tests/unit/gapic/v1/test_workflow_template_service_client_v1.py index 33024817..5ed3c69e 100644 --- a/tests/unit/gapic/v1/test_workflow_template_service_client_v1.py +++ b/tests/unit/gapic/v1/test_workflow_template_service_client_v1.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/v1beta2/test_autoscaling_policy_service_client_v1beta2.py b/tests/unit/gapic/v1beta2/test_autoscaling_policy_service_client_v1beta2.py index 6679ddd6..2cc573a3 100644 --- a/tests/unit/gapic/v1beta2/test_autoscaling_policy_service_client_v1beta2.py +++ b/tests/unit/gapic/v1beta2/test_autoscaling_policy_service_client_v1beta2.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/v1beta2/test_cluster_controller_client_v1beta2.py b/tests/unit/gapic/v1beta2/test_cluster_controller_client_v1beta2.py index 2953b818..7c75dc57 100644 --- a/tests/unit/gapic/v1beta2/test_cluster_controller_client_v1beta2.py +++ b/tests/unit/gapic/v1beta2/test_cluster_controller_client_v1beta2.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/v1beta2/test_job_controller_client_v1beta2.py b/tests/unit/gapic/v1beta2/test_job_controller_client_v1beta2.py index 517a7bcd..00802240 100644 --- a/tests/unit/gapic/v1beta2/test_job_controller_client_v1beta2.py +++ b/tests/unit/gapic/v1beta2/test_job_controller_client_v1beta2.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/v1beta2/test_workflow_template_service_client_v1beta2.py b/tests/unit/gapic/v1beta2/test_workflow_template_service_client_v1beta2.py index 05f990ca..bb7e0b4f 100644 --- a/tests/unit/gapic/v1beta2/test_workflow_template_service_client_v1beta2.py +++ b/tests/unit/gapic/v1beta2/test_workflow_template_service_client_v1beta2.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From 555dddaabc9bd6160ffee73972f118dc2086923d Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Wed, 5 Feb 2020 23:13:02 +0000 Subject: [PATCH 3/7] chore: add split repo templates (#1) * chore: add split repo templates * Loosen coverage threshold to 89% --- .github/CONTRIBUTING.md | 28 ++ .github/ISSUE_TEMPLATE/bug_report.md | 44 +++ .github/ISSUE_TEMPLATE/feature_request.md | 18 ++ .github/ISSUE_TEMPLATE/support_request.md | 7 + .github/PULL_REQUEST_TEMPLATE.md | 7 + .github/release-please.yml | 1 + .gitignore | 58 ++++ .kokoro/build.sh | 39 +++ .kokoro/continuous/common.cfg | 27 ++ .kokoro/continuous/continuous.cfg | 1 + .kokoro/docs/common.cfg | 48 ++++ .kokoro/docs/docs.cfg | 1 + .kokoro/presubmit/common.cfg | 27 ++ .kokoro/presubmit/presubmit.cfg | 1 + .kokoro/publish-docs.sh | 57 ++++ .kokoro/release.sh | 34 +++ .kokoro/release/common.cfg | 64 +++++ .kokoro/release/release.cfg | 1 + .kokoro/trampoline.sh | 23 ++ .repo-metadata.json | 2 +- CODE_OF_CONDUCT.md | 44 +++ CONTRIBUTING.rst | 279 +++++++++++++++++++ MANIFEST.in | 1 + docs/conf.py | 17 +- noxfile.py | 12 +- renovate.json | 5 + setup.py | 2 +- synth.metadata | 320 +--------------------- synth.py | 2 +- 29 files changed, 832 insertions(+), 338 deletions(-) create mode 100644 .github/CONTRIBUTING.md create mode 100644 .github/ISSUE_TEMPLATE/bug_report.md create mode 100644 .github/ISSUE_TEMPLATE/feature_request.md create mode 100644 .github/ISSUE_TEMPLATE/support_request.md create mode 100644 .github/PULL_REQUEST_TEMPLATE.md create mode 100644 .github/release-please.yml create mode 100644 .gitignore create mode 100755 .kokoro/build.sh create mode 100644 .kokoro/continuous/common.cfg create mode 100644 .kokoro/continuous/continuous.cfg create mode 100644 .kokoro/docs/common.cfg create mode 100644 .kokoro/docs/docs.cfg create mode 100644 .kokoro/presubmit/common.cfg create mode 100644 .kokoro/presubmit/presubmit.cfg create mode 100755 .kokoro/publish-docs.sh create mode 100755 .kokoro/release.sh create mode 100644 .kokoro/release/common.cfg create mode 100644 .kokoro/release/release.cfg create mode 100755 .kokoro/trampoline.sh create mode 100644 CODE_OF_CONDUCT.md create mode 100644 CONTRIBUTING.rst create mode 100644 renovate.json diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md new file mode 100644 index 00000000..939e5341 --- /dev/null +++ b/.github/CONTRIBUTING.md @@ -0,0 +1,28 @@ +# How to Contribute + +We'd love to accept your patches and contributions to this project. There are +just a few small guidelines you need to follow. + +## Contributor License Agreement + +Contributions to this project must be accompanied by a Contributor License +Agreement. You (or your employer) retain the copyright to your contribution; +this simply gives us permission to use and redistribute your contributions as +part of the project. Head over to to see +your current agreements on file or to sign a new one. + +You generally only need to submit a CLA once, so if you've already submitted one +(even if it was for a different project), you probably don't need to do it +again. + +## Code reviews + +All submissions, including submissions by project members, require review. We +use GitHub pull requests for this purpose. Consult +[GitHub Help](https://help.github.com/articles/about-pull-requests/) for more +information on using pull requests. + +## Community Guidelines + +This project follows [Google's Open Source Community +Guidelines](https://opensource.google.com/conduct/). diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 00000000..15601531 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,44 @@ +--- +name: Bug report +about: Create a report to help us improve + +--- + +Thanks for stopping by to let us know something could be better! + +**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. + +Please run down the following list and make sure you've tried the usual "quick fixes": + + - Search the issues already opened: https://github.com/googleapis/python-dataproc/issues + - Search the issues on our "catch-all" repository: https://github.com/googleapis/google-cloud-python + - Search StackOverflow: http://stackoverflow.com/questions/tagged/google-cloud-platform+python + +If you are still having issues, please be sure to include as much information as possible: + +#### Environment details + + - OS type and version: + - Python version: `python --version` + - pip version: `pip --version` + - `google-cloud-dataproc` version: `pip show google-cloud-dataproc` + +#### Steps to reproduce + + 1. ? + 2. ? + +#### Code example + +```python +# example +``` + +#### Stack trace +``` +# example +``` + +Making sure to follow these steps will guarantee the quickest resolution possible. + +Thanks! diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 00000000..6365857f --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,18 @@ +--- +name: Feature request +about: Suggest an idea for this library + +--- + +Thanks for stopping by to let us know something could be better! + +**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. + + **Is your feature request related to a problem? Please describe.** +A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] + **Describe the solution you'd like** +A clear and concise description of what you want to happen. + **Describe alternatives you've considered** +A clear and concise description of any alternative solutions or features you've considered. + **Additional context** +Add any other context or screenshots about the feature request here. diff --git a/.github/ISSUE_TEMPLATE/support_request.md b/.github/ISSUE_TEMPLATE/support_request.md new file mode 100644 index 00000000..99586903 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/support_request.md @@ -0,0 +1,7 @@ +--- +name: Support request +about: If you have a support contract with Google, please create an issue in the Google Cloud Support console. + +--- + +**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 00000000..38f15f11 --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,7 @@ +Thank you for opening a Pull Request! Before submitting your PR, there are a few things you can do to make sure it goes smoothly: +- [ ] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/python-dataproc/issues/new/choose) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea +- [ ] Ensure the tests and linter pass +- [ ] Code coverage does not decrease (if any source code was changed) +- [ ] Appropriate docs were updated (if necessary) + +Fixes # 🦕 diff --git a/.github/release-please.yml b/.github/release-please.yml new file mode 100644 index 00000000..4507ad05 --- /dev/null +++ b/.github/release-please.yml @@ -0,0 +1 @@ +releaseType: python diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000..3fb06e09 --- /dev/null +++ b/.gitignore @@ -0,0 +1,58 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated + +# Virtual environment +env/ +coverage.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test \ No newline at end of file diff --git a/.kokoro/build.sh b/.kokoro/build.sh new file mode 100755 index 00000000..0458ba16 --- /dev/null +++ b/.kokoro/build.sh @@ -0,0 +1,39 @@ +#!/bin/bash +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +cd github/python-dataproc + +# Disable buffering, so that the logs stream through. +export PYTHONUNBUFFERED=1 + +# Debug: show build environment +env | grep KOKORO + +# Setup service account credentials. +export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json + +# Setup project id. +export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") + +# Remove old nox +python3.6 -m pip uninstall --yes --quiet nox-automation + +# Install nox +python3.6 -m pip install --upgrade --quiet nox +python3.6 -m nox --version + +python3.6 -m nox diff --git a/.kokoro/continuous/common.cfg b/.kokoro/continuous/common.cfg new file mode 100644 index 00000000..1de06401 --- /dev/null +++ b/.kokoro/continuous/common.cfg @@ -0,0 +1,27 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Download resources for system tests (service account key, etc.) +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python" + +# Use the trampoline script to run in docker. +build_file: "python-dataproc/.kokoro/trampoline.sh" + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" +} +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-dataproc/.kokoro/build.sh" +} diff --git a/.kokoro/continuous/continuous.cfg b/.kokoro/continuous/continuous.cfg new file mode 100644 index 00000000..8f43917d --- /dev/null +++ b/.kokoro/continuous/continuous.cfg @@ -0,0 +1 @@ +# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/.kokoro/docs/common.cfg b/.kokoro/docs/common.cfg new file mode 100644 index 00000000..8b957cf6 --- /dev/null +++ b/.kokoro/docs/common.cfg @@ -0,0 +1,48 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-dataproc/.kokoro/trampoline.sh" + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" +} +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-dataproc/.kokoro/publish-docs.sh" +} + +env_vars: { + key: "STAGING_BUCKET" + value: "docs-staging" +} + +# Fetch the token needed for reporting release status to GitHub +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "yoshi-automation-github-key" + } + } +} + +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "docuploader_service_account" + } + } +} \ No newline at end of file diff --git a/.kokoro/docs/docs.cfg b/.kokoro/docs/docs.cfg new file mode 100644 index 00000000..8f43917d --- /dev/null +++ b/.kokoro/docs/docs.cfg @@ -0,0 +1 @@ +# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/.kokoro/presubmit/common.cfg b/.kokoro/presubmit/common.cfg new file mode 100644 index 00000000..1de06401 --- /dev/null +++ b/.kokoro/presubmit/common.cfg @@ -0,0 +1,27 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Download resources for system tests (service account key, etc.) +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python" + +# Use the trampoline script to run in docker. +build_file: "python-dataproc/.kokoro/trampoline.sh" + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" +} +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-dataproc/.kokoro/build.sh" +} diff --git a/.kokoro/presubmit/presubmit.cfg b/.kokoro/presubmit/presubmit.cfg new file mode 100644 index 00000000..8f43917d --- /dev/null +++ b/.kokoro/presubmit/presubmit.cfg @@ -0,0 +1 @@ +# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/.kokoro/publish-docs.sh b/.kokoro/publish-docs.sh new file mode 100755 index 00000000..81ce21ed --- /dev/null +++ b/.kokoro/publish-docs.sh @@ -0,0 +1,57 @@ +#!/bin/bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +#!/bin/bash + +set -eo pipefail + +# Disable buffering, so that the logs stream through. +export PYTHONUNBUFFERED=1 + +cd github/python-dataproc + +# Remove old nox +python3.6 -m pip uninstall --yes --quiet nox-automation + +# Install nox +python3.6 -m pip install --upgrade --quiet nox +python3.6 -m nox --version + +# build docs +nox -s docs + +python3 -m pip install gcp-docuploader + +# install a json parser +sudo apt-get update +sudo apt-get -y install software-properties-common +sudo add-apt-repository universe +sudo apt-get update +sudo apt-get -y install jq + +# create metadata +python3 -m docuploader create-metadata \ + --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ + --version=$(python3 setup.py --version) \ + --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ + --distribution-name=$(python3 setup.py --name) \ + --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ + --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ + --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) + +cat docs.metadata + +# upload docs +python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket docs-staging diff --git a/.kokoro/release.sh b/.kokoro/release.sh new file mode 100755 index 00000000..a785fb31 --- /dev/null +++ b/.kokoro/release.sh @@ -0,0 +1,34 @@ +#!/bin/bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +#!/bin/bash + +set -eo pipefail + +# Start the releasetool reporter +python3 -m pip install gcp-releasetool +python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script + +# Ensure that we have the latest versions of Twine, Wheel, and Setuptools. +python3 -m pip install --upgrade twine wheel setuptools + +# Disable buffering, so that the logs stream through. +export PYTHONUNBUFFERED=1 + +# Move into the package, build the distribution and upload. +TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google_cloud_pypi_password") +cd github/python-dataproc +python3 setup.py sdist bdist_wheel +twine upload --username gcloudpypi --password "${TWINE_PASSWORD}" dist/* diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg new file mode 100644 index 00000000..4bf136c7 --- /dev/null +++ b/.kokoro/release/common.cfg @@ -0,0 +1,64 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-dataproc/.kokoro/trampoline.sh" + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" +} +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-dataproc/.kokoro/release.sh" +} + +# Fetch the token needed for reporting release status to GitHub +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "yoshi-automation-github-key" + } + } +} + +# Fetch PyPI password +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "google_cloud_pypi_password" + } + } +} + +# Fetch magictoken to use with Magic Github Proxy +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "releasetool-magictoken" + } + } +} + +# Fetch api key to use with Magic Github Proxy +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "magic-github-proxy-api-key" + } + } +} diff --git a/.kokoro/release/release.cfg b/.kokoro/release/release.cfg new file mode 100644 index 00000000..8f43917d --- /dev/null +++ b/.kokoro/release/release.cfg @@ -0,0 +1 @@ +# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/.kokoro/trampoline.sh b/.kokoro/trampoline.sh new file mode 100755 index 00000000..e8c4251f --- /dev/null +++ b/.kokoro/trampoline.sh @@ -0,0 +1,23 @@ +#!/bin/bash +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" || ret_code=$? + +chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh +${KOKORO_GFILE_DIR}/trampoline_cleanup.sh || true + +exit ${ret_code} diff --git a/.repo-metadata.json b/.repo-metadata.json index c7d1f8c0..cc849460 100644 --- a/.repo-metadata.json +++ b/.repo-metadata.json @@ -6,7 +6,7 @@ "issue_tracker": "https://issuetracker.google.com/savedsearches/559745", "release_level": "alpha", "language": "python", - "repo": "googleapis/google-cloud-python", + "repo": "googleapis/python-dataproc", "distribution_name": "google-cloud-dataproc", "api_id": "dataproc.googleapis.com", "requires_billing": true diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 00000000..b3d1f602 --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,44 @@ + +# Contributor Code of Conduct + +As contributors and maintainers of this project, +and in the interest of fostering an open and welcoming community, +we pledge to respect all people who contribute through reporting issues, +posting feature requests, updating documentation, +submitting pull requests or patches, and other activities. + +We are committed to making participation in this project +a harassment-free experience for everyone, +regardless of level of experience, gender, gender identity and expression, +sexual orientation, disability, personal appearance, +body size, race, ethnicity, age, religion, or nationality. + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery +* Personal attacks +* Trolling or insulting/derogatory comments +* Public or private harassment +* Publishing other's private information, +such as physical or electronic +addresses, without explicit permission +* Other unethical or unprofessional conduct. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions +that are not aligned to this Code of Conduct. +By adopting this Code of Conduct, +project maintainers commit themselves to fairly and consistently +applying these principles to every aspect of managing this project. +Project maintainers who do not follow or enforce the Code of Conduct +may be permanently removed from the project team. + +This code of conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. + +Instances of abusive, harassing, or otherwise unacceptable behavior +may be reported by opening an issue +or contacting one or more of the project maintainers. + +This Code of Conduct is adapted from the [Contributor Covenant](http://contributor-covenant.org), version 1.2.0, +available at [http://contributor-covenant.org/version/1/2/0/](http://contributor-covenant.org/version/1/2/0/) diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst new file mode 100644 index 00000000..e21bbd63 --- /dev/null +++ b/CONTRIBUTING.rst @@ -0,0 +1,279 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: 2.7, + 3.5, 3.6, and 3.7 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``python-dataproc`` `repo`_ on GitHub. + +- Fork and clone the ``python-dataproc`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``python-dataproc`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-python-dataproc``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/python-dataproc.git hack-on-python-dataproc + $ cd hack-on-python-dataproc + # Configure remotes such that you can pull changes from the googleapis/python-dataproc + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/python-dataproc.git + # fetch and merge changes from upstream into master + $ git fetch upstream + $ git merge upstream/master + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/python-dataproc + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + + $ nox -s unit-2.7 + $ nox -s unit-3.7 + $ ... + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +Note on Editable Installs / Develop Mode +======================================== + +- As mentioned previously, using ``setuptools`` in `develop mode`_ + or a ``pip`` `editable install`_ is not possible with this + library. This is because this library uses `namespace packages`_. + For context see `Issue #2316`_ and the relevant `PyPA issue`_. + + Since ``editable`` / ``develop`` mode can't be used, packages + need to be installed directly. Hence your changes to the source + tree don't get incorporated into the **already installed** + package. + +.. _namespace packages: https://www.python.org/dev/peps/pep-0420/ +.. _Issue #2316: https://github.com/GoogleCloudPlatform/google-cloud-python/issues/2316 +.. _PyPA issue: https://github.com/pypa/packaging-problems/issues/12 +.. _develop mode: https://setuptools.readthedocs.io/en/latest/setuptools.html#development-mode +.. _editable install: https://pip.pypa.io/en/stable/reference/pip_install/#editable-installs + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ + +- PEP8 compliance, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="master" + + By doing this, you are specifying the location of the most up-to-date + version of ``python-dataproc``. The the suggested remote name ``upstream`` + should point to the official ``googleapis`` checkout and the + the branch should be the main branch on that remote (``master``). + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + $ nox -s system-3.7 + $ nox -s system-2.7 + + .. note:: + + System tests are only configured to run under Python 2.7 and + Python 3.7. For expediency, we do not run them in older versions + of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project and + so you'll need to provide some environment variables to facilitate + authentication to your project: + + - ``GOOGLE_APPLICATION_CREDENTIALS``: The path to a JSON key file; + Such a file can be downloaded directly from the developer's console by clicking + "Generate new JSON key". See private key + `docs `__ + for more details. + +- Once you have downloaded your json keys, set the environment variable + ``GOOGLE_APPLICATION_CREDENTIALS`` to the absolute path of the json file:: + + $ export GOOGLE_APPLICATION_CREDENTIALS="/Users//path/to/app_credentials.json" + + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/python-dataproc/blob/master/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-cloud-dataproc + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.5`_ +- `Python 3.6`_ +- `Python 3.7`_ + +.. _Python 3.5: https://docs.python.org/3.5/ +.. _Python 3.6: https://docs.python.org/3.6/ +.. _Python 3.7: https://docs.python.org/3.7/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/python-dataproc/blob/master/noxfile.py + +We explicitly decided not to support `Python 2.5`_ due to `decreased usage`_ +and lack of continuous integration `support`_. + +.. _Python 2.5: https://docs.python.org/2.5/ +.. _decreased usage: https://caremad.io/2013/10/a-look-at-pypi-downloads/ +.. _support: https://blog.travis-ci.com/2013-11-18-upcoming-build-environment-updates/ + +We have `dropped 2.6`_ as a supported version as well since Python 2.6 is no +longer supported by the core development team. + +Python 2.7 support is deprecated. All code changes should maintain Python 2.7 compatibility until January 1, 2020. + +We also explicitly decided to support Python 3 beginning with version +3.5. Reasons for this include: + +- Encouraging use of newest versions of Python 3 +- Taking the lead of `prominent`_ open-source `projects`_ +- `Unicode literal support`_ which allows for a cleaner codebase that + works in both Python 2 and Python 3 + +.. _prominent: https://docs.djangoproject.com/en/1.9/faq/install/#what-python-version-can-i-use-with-django +.. _projects: http://flask.pocoo.org/docs/0.10/python3/ +.. _Unicode literal support: https://www.python.org/dev/peps/pep-0414/ +.. _dropped 2.6: https://github.com/googleapis/google-cloud-python/issues/995 + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/MANIFEST.in b/MANIFEST.in index 9cbf175a..cd011be2 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,3 +1,4 @@ +# Generated by synthtool. DO NOT EDIT! include README.rst LICENSE recursive-include google *.json *.proto recursive-include tests * diff --git a/docs/conf.py b/docs/conf.py index f4cff2d0..ac3f1331 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -20,7 +20,7 @@ # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath("..")) -__version__ = "0.1.0" +__version__ = "" # -- General configuration ------------------------------------------------ @@ -66,7 +66,7 @@ # General information about the project. project = u"google-cloud-dataproc" -copyright = u"2017, Google" +copyright = u"2019, Google" author = u"Google APIs" # The version info for the project you're documenting, acts as replacement for @@ -133,9 +133,9 @@ # further. For a list of options available for each theme, see the # documentation. html_theme_options = { - "description": "Google Cloud Client Libraries for Python", + "description": "Google Cloud Client Libraries for google-cloud-dataproc", "github_user": "googleapis", - "github_repo": "google-cloud-python", + "github_repo": "python-dataproc", "github_banner": True, "font_family": "'Roboto', Georgia, sans", "head_font_family": "'Roboto', Georgia, serif", @@ -318,7 +318,7 @@ u"google-cloud-dataproc Documentation", author, "google-cloud-dataproc", - "GAPIC library for the {metadata.shortName} v1 service", + "google-cloud-dataproc Library", "APIs", ) ] @@ -339,14 +339,9 @@ # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://requests.kennethreitz.org/en/master/", None), - "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), - "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } diff --git a/noxfile.py b/noxfile.py index 7949a4e3..3c386712 100644 --- a/noxfile.py +++ b/noxfile.py @@ -23,7 +23,6 @@ import nox -LOCAL_DEPS = (os.path.join("..", "api_core"), os.path.join("..", "core")) BLACK_VERSION = "black==19.3b0" BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] @@ -38,7 +37,7 @@ def lint(session): Returns a failure if the linters find linting errors or sufficiently serious code quality issues. """ - session.install("flake8", BLACK_VERSION, *LOCAL_DEPS) + session.install("flake8", BLACK_VERSION) session.run("black", "--check", *BLACK_PATHS) session.run("flake8", "google", "tests") @@ -67,14 +66,13 @@ def lint_setup_py(session): def default(session): # Install all test dependencies, then install this package in-place. session.install("mock", "pytest", "pytest-cov") - for local_dep in LOCAL_DEPS: - session.install("-e", local_dep) session.install("-e", ".") # Run py.test against the unit tests. session.run( "py.test", "--quiet", + "--cov=google.cloud.dataproc", "--cov=google.cloud", "--cov=tests.unit", "--cov-append", @@ -113,9 +111,7 @@ def system(session): # Install all test dependencies, then install this package into the # virtualenv's dist-packages. session.install("mock", "pytest") - for local_dep in LOCAL_DEPS: - session.install("-e", local_dep) - session.install("-e", "../test_utils/") + session.install("-e", ".") # Run py.test against the system tests. @@ -133,7 +129,7 @@ def cover(session): test runs (not system test runs), and then erases coverage data. """ session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") + session.run("coverage", "report", "--show-missing", "--fail-under=89") session.run("coverage", "erase") diff --git a/renovate.json b/renovate.json new file mode 100644 index 00000000..4fa94931 --- /dev/null +++ b/renovate.json @@ -0,0 +1,5 @@ +{ + "extends": [ + "config:base", ":preserveSemverRanges" + ] +} diff --git a/setup.py b/setup.py index 6d69fd1b..74c90047 100644 --- a/setup.py +++ b/setup.py @@ -60,7 +60,7 @@ author="Google LLC", author_email="googleapis-packages@google.com", license="Apache 2.0", - url="https://github.com/GoogleCloudPlatform/google-cloud-python", + url="https://github.com/googleapis/python-dataproc", classifiers=[ release_status, "Intended Audience :: Developers", diff --git a/synth.metadata b/synth.metadata index 314ca5ce..92bd53b1 100644 --- a/synth.metadata +++ b/synth.metadata @@ -1,5 +1,5 @@ { - "updateTime": "2020-01-30T13:21:23.253293Z", + "updateTime": "2020-02-05T12:31:43.985272Z", "sources": [ { "generator": { @@ -12,14 +12,13 @@ "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "c1246a29e22b0f98e800a536b5b0da2d933a55f2", - "internalRef": "292310790", - "log": "c1246a29e22b0f98e800a536b5b0da2d933a55f2\nUpdating v1 protos with the latest inline documentation (in comments) and config options. Also adding a per-service .yaml file.\n\nPiperOrigin-RevId: 292310790\n\nb491d07cadaae7cde5608321f913e5ca1459b32d\nRevert accidental local_repository change\n\nPiperOrigin-RevId: 292245373\n\naf3400a8cb6110025198b59a0f7d018ae3cda700\nUpdate gapic-generator dependency (prebuilt PHP binary support).\n\nPiperOrigin-RevId: 292243997\n\n341fd5690fae36f36cf626ef048fbcf4bbe7cee6\ngrafeas: v1 add resource_definition for the grafeas.io/Project and change references for Project.\n\nPiperOrigin-RevId: 292221998\n\n42e915ec2ece1cd37a590fbcd10aa2c0fb0e5b06\nUpdate the gapic-generator, protoc-java-resource-name-plugin and protoc-docs-plugin to the latest commit.\n\nPiperOrigin-RevId: 292182368\n\nf035f47250675d31492a09f4a7586cfa395520a7\nFix grafeas build and update build.sh script to include gerafeas.\n\nPiperOrigin-RevId: 292168753\n\n26ccb214b7bc4a716032a6266bcb0a9ca55d6dbb\nasset: v1p1beta1 add client config annotations and retry config\n\nPiperOrigin-RevId: 292154210\n\n974ee5c0b5d03e81a50dafcedf41e0efebb5b749\nasset: v1beta1 add client config annotations\n\nPiperOrigin-RevId: 292152573\n\ncf3b61102ed5f36b827bc82ec39be09525f018c8\n Fix to protos for v1p1beta1 release of Cloud Security Command Center\n\nPiperOrigin-RevId: 292034635\n\n4e1cfaa7c0fede9e65d64213ca3da1b1255816c0\nUpdate the public proto to support UTF-8 encoded id for CatalogService API, increase the ListCatalogItems deadline to 300s and some minor documentation change\n\nPiperOrigin-RevId: 292030970\n\n9c483584f8fd5a1b862ae07973f4cc7bb3e46648\nasset: add annotations to v1p1beta1\n\nPiperOrigin-RevId: 292009868\n\ne19209fac29731d0baf6d9ac23da1164f7bdca24\nAdd the google.rpc.context.AttributeContext message to the open source\ndirectories.\n\nPiperOrigin-RevId: 291999930\n\nae5662960573f279502bf98a108a35ba1175e782\noslogin API: move file level option on top of the file to avoid protobuf.js bug.\n\nPiperOrigin-RevId: 291990506\n\neba3897fff7c49ed85d3c47fc96fe96e47f6f684\nAdd cc_proto_library and cc_grpc_library targets for Spanner and IAM protos.\n\nPiperOrigin-RevId: 291988651\n\n8e981acfd9b97ea2f312f11bbaa7b6c16e412dea\nBeta launch for PersonDetection and FaceDetection features.\n\nPiperOrigin-RevId: 291821782\n\n994e067fae3b21e195f7da932b08fff806d70b5d\nasset: add annotations to v1p2beta1\n\nPiperOrigin-RevId: 291815259\n\n244e1d2c89346ca2e0701b39e65552330d68545a\nAdd Playable Locations service\n\nPiperOrigin-RevId: 291806349\n\n909f8f67963daf45dd88d020877fb9029b76788d\nasset: add annotations to v1beta2\n\nPiperOrigin-RevId: 291805301\n\n3c39a1d6e23c1ef63c7fba4019c25e76c40dfe19\nKMS: add file-level message for CryptoKeyPath, it is defined in gapic yaml but not\nin proto files.\n\nPiperOrigin-RevId: 291420695\n\nc6f3f350b8387f8d1b85ed4506f30187ebaaddc3\ncontaineranalysis: update v1beta1 and bazel build with annotations\n\nPiperOrigin-RevId: 291401900\n\n92887d74b44e4e636252b7b8477d0d2570cd82db\nfix: fix the location of grpc config file.\n\nPiperOrigin-RevId: 291396015\n\ne26cab8afd19d396b929039dac5d874cf0b5336c\nexpr: add default_host and method_signature annotations to CelService\n\nPiperOrigin-RevId: 291240093\n\n06093ae3952441c34ec176d1f7431b8765cec0be\nirm: fix v1alpha2 bazel build by adding missing proto imports\n\nPiperOrigin-RevId: 291227940\n\na8a2514af326e4673063f9a3c9d0ef1091c87e6c\nAdd proto annotation for cloud/irm API\n\nPiperOrigin-RevId: 291217859\n\n8d16f76de065f530d395a4c7eabbf766d6a120fd\nGenerate Memcache v1beta2 API protos and gRPC ServiceConfig files\n\nPiperOrigin-RevId: 291008516\n\n3af1dabd93df9a9f17bf3624d3b875c11235360b\ngrafeas: Add containeranalysis default_host to Grafeas service\n\nPiperOrigin-RevId: 290965849\n\nbe2663fa95e31cba67d0cd62611a6674db9f74b7\nfix(google/maps/roads): add missing opening bracket\n\nPiperOrigin-RevId: 290964086\n\nfacc26550a0af0696e0534bc9cae9df14275aa7c\nUpdating v2 protos with the latest inline documentation (in comments) and adding a per-service .yaml file.\n\nPiperOrigin-RevId: 290952261\n\ncda99c1f7dc5e4ca9b1caeae1dc330838cbc1461\nChange api_name to 'asset' for v1p1beta1\n\nPiperOrigin-RevId: 290800639\n\n94e9e90c303a820ce40643d9129e7f0d2054e8a1\nAdds Google Maps Road service\n\nPiperOrigin-RevId: 290795667\n\na3b23dcb2eaecce98c600c7d009451bdec52dbda\nrpc: new message ErrorInfo, other comment updates\n\nPiperOrigin-RevId: 290781668\n\n26420ef4e46c37f193c0fbe53d6ebac481de460e\nAdd proto definition for Org Policy v1.\n\nPiperOrigin-RevId: 290771923\n\n7f0dab8177cf371ae019a082e2512de7ac102888\nPublish Routes Preferred API v1 service definitions.\n\nPiperOrigin-RevId: 290326986\n\nad6e508d0728e1d1bca6e3f328cd562718cb772d\nFix: Qualify resource type references with \"jobs.googleapis.com/\"\n\nPiperOrigin-RevId: 290285762\n\n58e770d568a2b78168ddc19a874178fee8265a9d\ncts client library\n\nPiperOrigin-RevId: 290146169\n\naf9daa4c3b4c4a8b7133b81588dd9ffd37270af2\nAdd more programming language options to public proto\n\nPiperOrigin-RevId: 290144091\n\nd9f2bbf2df301ef84641d4cec7c828736a0bd907\ntalent: add missing resource.proto dep to Bazel build target\n\nPiperOrigin-RevId: 290143164\n\n3b3968237451d027b42471cd28884a5a1faed6c7\nAnnotate Talent API.\nAdd gRPC service config for retry.\nUpdate bazel file with google.api.resource dependency.\n\nPiperOrigin-RevId: 290125172\n\n0735b4b096872960568d1f366bfa75b7b0e1f1a3\nWeekly library update.\n\nPiperOrigin-RevId: 289939042\n\n8760d3d9a4543d7f9c0d1c7870aca08b116e4095\nWeekly library update.\n\nPiperOrigin-RevId: 289939020\n\n8607df842f782a901805187e02fff598145b0b0e\nChange Talent API timeout to 30s.\n\nPiperOrigin-RevId: 289912621\n\n908155991fe32570653bcb72ecfdcfc896642f41\nAdd Recommendations AI V1Beta1\n\nPiperOrigin-RevId: 289901914\n\n5c9a8c2bebd8b71aa66d1cc473edfaac837a2c78\nAdding no-arg method signatures for ListBillingAccounts and ListServices\n\nPiperOrigin-RevId: 289891136\n\n50b0e8286ac988b0593bd890eb31fef6ea2f5767\nlongrunning: add grpc service config and default_host annotation to operations.proto\n\nPiperOrigin-RevId: 289876944\n\n6cac27dabe51c54807b0401698c32d34998948a9\n Updating default deadline for Cloud Security Command Center's v1 APIs.\n\nPiperOrigin-RevId: 289875412\n\nd99df0d67057a233c711187e0689baa4f8e6333d\nFix: Correct spelling in C# namespace option\n\nPiperOrigin-RevId: 289709813\n\n2fa8d48165cc48e35b0c62e6f7bdade12229326c\nfeat: Publish Recommender v1 to GitHub.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289619243\n\n9118db63d1ab493a2e44a3b4973fde810a835c49\nfirestore: don't retry reads that fail with Aborted\n\nFor transaction reads that fail with ABORTED, we need to rollback and start a new transaction. Our current configuration makes it so that GAPIC retries ABORTED reads multiple times without making any progress. Instead, we should retry at the transaction level.\n\nPiperOrigin-RevId: 289532382\n\n1dbfd3fe4330790b1e99c0bb20beb692f1e20b8a\nFix bazel build\nAdd other langauges (Java was already there) for bigquery/storage/v1alpha2 api.\n\nPiperOrigin-RevId: 289519766\n\nc06599cdd7d11f8d3fd25f8d3249e5bb1a3d5d73\nInitial commit of google.cloud.policytroubleshooter API, The API helps in troubleshooting GCP policies. Refer https://cloud.google.com/iam/docs/troubleshooting-access for more information\n\nPiperOrigin-RevId: 289491444\n\nfce7d80fa16ea241e87f7bc33d68595422e94ecd\nDo not pass samples option for Artman config of recommender v1 API.\n\nPiperOrigin-RevId: 289477403\n\nef179e8c61436297e6bb124352e47e45c8c80cb1\nfix: Address missing Bazel dependency.\n\nBazel builds stopped working in 06ec6d5 because\nthe google/longrunning/operations.proto file took\nan import from google/api/client.proto, but that\nimport was not added to BUILD.bazel.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289446074\n\n8841655b242c84fd691d77d7bcf21b61044f01ff\nMigrate Data Labeling v1beta1 to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289446026\n\n06ec6d5d053fff299eaa6eaa38afdd36c5e2fc68\nAdd annotations to google.longrunning.v1\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289413169\n\n0480cf40be1d3cc231f4268a2fdb36a8dd60e641\nMigrate IAM Admin v1 to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289411084\n\n1017173e9adeb858587639af61889ad970c728b1\nSpecify a C# namespace for BigQuery Connection v1beta1\n\nPiperOrigin-RevId: 289396763\n\nb08714b378e8e5b0c4ecdde73f92c36d6303b4b6\nfix: Integrate latest proto-docs-plugin fix.\nFixes dialogflow v2\n\nPiperOrigin-RevId: 289189004\n\n51217a67e79255ee1f2e70a6a3919df082513327\nCreate BUILD file for recommender v1\n\nPiperOrigin-RevId: 289183234\n\nacacd87263c0a60e458561b8b8ce9f67c760552a\nGenerate recommender v1 API protos and gRPC ServiceConfig files\n\nPiperOrigin-RevId: 289177510\n\n9d2f7133b97720b1fa3601f6dcd30760ba6d8a1e\nFix kokoro build script\n\nPiperOrigin-RevId: 289166315\n\nc43a67530d2a47a0220cad20ca8de39b3fbaf2c5\ncloudtasks: replace missing RPC timeout config for v2beta2 and v2beta3\n\nPiperOrigin-RevId: 289162391\n\n4cefc229a9197236fc0adf02d69b71c0c5cf59de\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 289158456\n\n56f263fe959c50786dab42e3c61402d32d1417bd\nCatalog API: Adding config necessary to build client libraries\n\nPiperOrigin-RevId: 289149879\n\n4543762b23a57fc3c53d409efc3a9affd47b6ab3\nFix Bazel build\nbilling/v1 and dialogflow/v2 remain broken (not bazel-related issues).\nBilling has wrong configuration, dialogflow failure is caused by a bug in documentation plugin.\n\nPiperOrigin-RevId: 289140194\n\nc9dce519127b97e866ca133a01157f4ce27dcceb\nUpdate Bigtable docs\n\nPiperOrigin-RevId: 289114419\n\n802c5c5f2bf94c3facb011267d04e71942e0d09f\nMigrate DLP to proto annotations (but not GAPIC v2).\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289102579\n\n6357f30f2ec3cff1d8239d18b707ff9d438ea5da\nRemove gRPC configuration file that was in the wrong place.\n\nPiperOrigin-RevId: 289096111\n\n360a8792ed62f944109d7e22d613a04a010665b4\n Protos for v1p1beta1 release of Cloud Security Command Center\n\nPiperOrigin-RevId: 289011995\n\na79211c20c4f2807eec524d00123bf7c06ad3d6e\nRoll back containeranalysis v1 to GAPIC v1.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288999068\n\n9e60345ba603e03484a8aaa33ce5ffa19c1c652b\nPublish Routes Preferred API v1 proto definitions.\n\nPiperOrigin-RevId: 288941399\n\nd52885b642ad2aa1f42b132ee62dbf49a73e1e24\nMigrate the service management API to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288909426\n\n6ace586805c08896fef43e28a261337fcf3f022b\ncloudtasks: replace missing RPC timeout config\n\nPiperOrigin-RevId: 288783603\n\n51d906cabee4876b12497054b15b05d4a50ad027\nImport of Grafeas from Github.\n\nUpdate BUILD.bazel accordingly.\n\nPiperOrigin-RevId: 288783426\n\n5ef42bcd363ba0440f0ee65b3c80b499e9067ede\nMigrate Recommender v1beta1 to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288713066\n\n94f986afd365b7d7e132315ddcd43d7af0e652fb\nMigrate Container Analysis v1 to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288708382\n\n7a751a279184970d3b6ba90e4dd4d22a382a0747\nRemove Container Analysis v1alpha1 (nobody publishes it).\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288707473\n\n3c0d9c71242e70474b2b640e15bb0a435fd06ff0\nRemove specious annotation from BigQuery Data Transfer before\nanyone accidentally does anything that uses it.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288701604\n\n1af307a4764bd415ef942ac5187fa1def043006f\nMigrate BigQuery Connection to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288698681\n\n" + "sha": "a8ed9d921fdddc61d8467bfd7c1668f0ad90435c", + "internalRef": "293257997" } }, { "template": { - "name": "python_library", + "name": "python_split_library", "origin": "synthtool.gcp", "version": "2019.10.17" } @@ -46,316 +45,5 @@ "config": "google/cloud/dataproc/artman_dataproc_v1.yaml" } } - ], - "newFiles": [ - { - "path": ".coveragerc" - }, - { - "path": ".flake8" - }, - { - "path": ".repo-metadata.json" - }, - { - "path": "CHANGELOG.md" - }, - { - "path": "LICENSE" - }, - { - "path": "MANIFEST.in" - }, - { - "path": "README.rst" - }, - { - "path": "docs/README.rst" - }, - { - "path": "docs/_static/custom.css" - }, - { - "path": "docs/_templates/layout.html" - }, - { - "path": "docs/changelog.md" - }, - { - "path": "docs/conf.py" - }, - { - "path": "docs/gapic/v1/api.rst" - }, - { - "path": "docs/gapic/v1/types.rst" - }, - { - "path": "docs/gapic/v1beta2/api.rst" - }, - { - "path": "docs/gapic/v1beta2/types.rst" - }, - { - "path": "docs/index.rst" - }, - { - "path": "google/__init__.py" - }, - { - "path": "google/cloud/__init__.py" - }, - { - "path": "google/cloud/dataproc.py" - }, - { - "path": "google/cloud/dataproc_v1/__init__.py" - }, - { - "path": "google/cloud/dataproc_v1/gapic/__init__.py" - }, - { - "path": "google/cloud/dataproc_v1/gapic/cluster_controller_client.py" - }, - { - "path": "google/cloud/dataproc_v1/gapic/cluster_controller_client_config.py" - }, - { - "path": "google/cloud/dataproc_v1/gapic/enums.py" - }, - { - "path": "google/cloud/dataproc_v1/gapic/job_controller_client.py" - }, - { - "path": "google/cloud/dataproc_v1/gapic/job_controller_client_config.py" - }, - { - "path": "google/cloud/dataproc_v1/gapic/transports/__init__.py" - }, - { - "path": "google/cloud/dataproc_v1/gapic/transports/cluster_controller_grpc_transport.py" - }, - { - "path": "google/cloud/dataproc_v1/gapic/transports/job_controller_grpc_transport.py" - }, - { - "path": "google/cloud/dataproc_v1/gapic/transports/workflow_template_service_grpc_transport.py" - }, - { - "path": "google/cloud/dataproc_v1/gapic/workflow_template_service_client.py" - }, - { - "path": "google/cloud/dataproc_v1/gapic/workflow_template_service_client_config.py" - }, - { - "path": "google/cloud/dataproc_v1/proto/__init__.py" - }, - { - "path": "google/cloud/dataproc_v1/proto/autoscaling_policies.proto" - }, - { - "path": "google/cloud/dataproc_v1/proto/autoscaling_policies_pb2.py" - }, - { - "path": "google/cloud/dataproc_v1/proto/autoscaling_policies_pb2_grpc.py" - }, - { - "path": "google/cloud/dataproc_v1/proto/clusters.proto" - }, - { - "path": "google/cloud/dataproc_v1/proto/clusters_pb2.py" - }, - { - "path": "google/cloud/dataproc_v1/proto/clusters_pb2_grpc.py" - }, - { - "path": "google/cloud/dataproc_v1/proto/jobs.proto" - }, - { - "path": "google/cloud/dataproc_v1/proto/jobs_pb2.py" - }, - { - "path": "google/cloud/dataproc_v1/proto/jobs_pb2_grpc.py" - }, - { - "path": "google/cloud/dataproc_v1/proto/operations.proto" - }, - { - "path": "google/cloud/dataproc_v1/proto/operations_pb2.py" - }, - { - "path": "google/cloud/dataproc_v1/proto/operations_pb2_grpc.py" - }, - { - "path": "google/cloud/dataproc_v1/proto/shared.proto" - }, - { - "path": "google/cloud/dataproc_v1/proto/shared_pb2.py" - }, - { - "path": "google/cloud/dataproc_v1/proto/shared_pb2_grpc.py" - }, - { - "path": "google/cloud/dataproc_v1/proto/workflow_templates.proto" - }, - { - "path": "google/cloud/dataproc_v1/proto/workflow_templates_pb2.py" - }, - { - "path": "google/cloud/dataproc_v1/proto/workflow_templates_pb2_grpc.py" - }, - { - "path": "google/cloud/dataproc_v1/types.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/__init__.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/gapic/__init__.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/gapic/autoscaling_policy_service_client.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/gapic/autoscaling_policy_service_client_config.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/gapic/cluster_controller_client.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/gapic/cluster_controller_client_config.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/gapic/enums.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/gapic/job_controller_client.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/gapic/job_controller_client_config.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/gapic/transports/__init__.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/gapic/transports/autoscaling_policy_service_grpc_transport.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/gapic/transports/cluster_controller_grpc_transport.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/gapic/transports/job_controller_grpc_transport.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/gapic/transports/workflow_template_service_grpc_transport.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/gapic/workflow_template_service_client.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/gapic/workflow_template_service_client_config.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/proto/__init__.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/proto/autoscaling_policies.proto" - }, - { - "path": "google/cloud/dataproc_v1beta2/proto/autoscaling_policies_pb2.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/proto/autoscaling_policies_pb2_grpc.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/proto/clusters.proto" - }, - { - "path": "google/cloud/dataproc_v1beta2/proto/clusters_pb2.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/proto/clusters_pb2_grpc.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/proto/jobs.proto" - }, - { - "path": "google/cloud/dataproc_v1beta2/proto/jobs_pb2.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/proto/jobs_pb2_grpc.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/proto/operations.proto" - }, - { - "path": "google/cloud/dataproc_v1beta2/proto/operations_pb2.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/proto/operations_pb2_grpc.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/proto/shared.proto" - }, - { - "path": "google/cloud/dataproc_v1beta2/proto/shared_pb2.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/proto/shared_pb2_grpc.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/proto/workflow_templates.proto" - }, - { - "path": "google/cloud/dataproc_v1beta2/proto/workflow_templates_pb2.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/proto/workflow_templates_pb2_grpc.py" - }, - { - "path": "google/cloud/dataproc_v1beta2/types.py" - }, - { - "path": "noxfile.py" - }, - { - "path": "setup.cfg" - }, - { - "path": "setup.py" - }, - { - "path": "synth.metadata" - }, - { - "path": "synth.py" - }, - { - "path": "tests/system/gapic/v1/test_system_cluster_controller_v1.py" - }, - { - "path": "tests/system/gapic/v1beta2/test_system_cluster_controller_v1beta2.py" - }, - { - "path": "tests/unit/gapic/v1/test_cluster_controller_client_v1.py" - }, - { - "path": "tests/unit/gapic/v1/test_job_controller_client_v1.py" - }, - { - "path": "tests/unit/gapic/v1/test_workflow_template_service_client_v1.py" - }, - { - "path": "tests/unit/gapic/v1beta2/test_autoscaling_policy_service_client_v1beta2.py" - }, - { - "path": "tests/unit/gapic/v1beta2/test_cluster_controller_client_v1beta2.py" - }, - { - "path": "tests/unit/gapic/v1beta2/test_job_controller_client_v1beta2.py" - }, - { - "path": "tests/unit/gapic/v1beta2/test_workflow_template_service_client_v1beta2.py" - } ] } \ No newline at end of file diff --git a/synth.py b/synth.py index a2c50b0e..d32f07ba 100644 --- a/synth.py +++ b/synth.py @@ -81,7 +81,7 @@ # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- -templated_files = common.py_library(unit_cov_level=97, cov_level=100) +templated_files = common.py_library(unit_cov_level=97, cov_level=89) s.move(templated_files) s.shell.run(["nox", "-s", "blacken"], hide_output=False) From 3f7fd5bef4ba959b9a6e153fb4ab6d8b6819948e Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Wed, 19 Feb 2020 14:44:56 -0800 Subject: [PATCH 4/7] feat: add autoscaling policy service client (via synth) (#5) --- google/cloud/dataproc.py | 2 + google/cloud/dataproc_v1/__init__.py | 9 + .../autoscaling_policy_service_client.py | 648 ++++++++++++++++++ ...utoscaling_policy_service_client_config.py | 48 ++ ...toscaling_policy_service_grpc_transport.py | 179 +++++ .../proto/autoscaling_policies.proto | 6 + .../proto/autoscaling_policies_pb2.py | 48 +- google/cloud/dataproc_v1/types.py | 9 +- synth.metadata | 27 +- ...st_autoscaling_policy_service_client_v1.py | 281 ++++++++ 10 files changed, 1226 insertions(+), 31 deletions(-) create mode 100644 google/cloud/dataproc_v1/gapic/autoscaling_policy_service_client.py create mode 100644 google/cloud/dataproc_v1/gapic/autoscaling_policy_service_client_config.py create mode 100644 google/cloud/dataproc_v1/gapic/transports/autoscaling_policy_service_grpc_transport.py create mode 100644 tests/unit/gapic/v1/test_autoscaling_policy_service_client_v1.py diff --git a/google/cloud/dataproc.py b/google/cloud/dataproc.py index 25b76795..aa7757bd 100644 --- a/google/cloud/dataproc.py +++ b/google/cloud/dataproc.py @@ -17,6 +17,7 @@ from __future__ import absolute_import +from google.cloud.dataproc_v1 import AutoscalingPolicyServiceClient from google.cloud.dataproc_v1 import ClusterControllerClient from google.cloud.dataproc_v1 import JobControllerClient from google.cloud.dataproc_v1 import WorkflowTemplateServiceClient @@ -30,4 +31,5 @@ "ClusterControllerClient", "JobControllerClient", "WorkflowTemplateServiceClient", + "AutoscalingPolicyServiceClient", ) diff --git a/google/cloud/dataproc_v1/__init__.py b/google/cloud/dataproc_v1/__init__.py index 395e618f..267bdb29 100644 --- a/google/cloud/dataproc_v1/__init__.py +++ b/google/cloud/dataproc_v1/__init__.py @@ -20,6 +20,7 @@ import warnings from google.cloud.dataproc_v1 import types +from google.cloud.dataproc_v1.gapic import autoscaling_policy_service_client from google.cloud.dataproc_v1.gapic import cluster_controller_client from google.cloud.dataproc_v1.gapic import enums from google.cloud.dataproc_v1.gapic import job_controller_client @@ -52,10 +53,18 @@ class WorkflowTemplateServiceClient( enums = enums +class AutoscalingPolicyServiceClient( + autoscaling_policy_service_client.AutoscalingPolicyServiceClient +): + __doc__ = autoscaling_policy_service_client.AutoscalingPolicyServiceClient.__doc__ + enums = enums + + __all__ = ( "enums", "types", "ClusterControllerClient", "JobControllerClient", "WorkflowTemplateServiceClient", + "AutoscalingPolicyServiceClient", ) diff --git a/google/cloud/dataproc_v1/gapic/autoscaling_policy_service_client.py b/google/cloud/dataproc_v1/gapic/autoscaling_policy_service_client.py new file mode 100644 index 00000000..603aa14f --- /dev/null +++ b/google/cloud/dataproc_v1/gapic/autoscaling_policy_service_client.py @@ -0,0 +1,648 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Accesses the google.cloud.dataproc.v1 AutoscalingPolicyService API.""" + +import functools +import pkg_resources +import warnings + +from google.oauth2 import service_account +import google.api_core.client_options +import google.api_core.gapic_v1.client_info +import google.api_core.gapic_v1.config +import google.api_core.gapic_v1.method +import google.api_core.gapic_v1.routing_header +import google.api_core.grpc_helpers +import google.api_core.page_iterator +import google.api_core.path_template +import grpc + +from google.cloud.dataproc_v1.gapic import autoscaling_policy_service_client_config +from google.cloud.dataproc_v1.gapic import enums +from google.cloud.dataproc_v1.gapic.transports import ( + autoscaling_policy_service_grpc_transport, +) +from google.cloud.dataproc_v1.proto import autoscaling_policies_pb2 +from google.cloud.dataproc_v1.proto import autoscaling_policies_pb2_grpc +from google.cloud.dataproc_v1.proto import clusters_pb2 +from google.cloud.dataproc_v1.proto import clusters_pb2_grpc +from google.cloud.dataproc_v1.proto import jobs_pb2 +from google.cloud.dataproc_v1.proto import jobs_pb2_grpc +from google.cloud.dataproc_v1.proto import operations_pb2 as proto_operations_pb2 +from google.cloud.dataproc_v1.proto import workflow_templates_pb2 +from google.cloud.dataproc_v1.proto import workflow_templates_pb2_grpc +from google.longrunning import operations_pb2 as longrunning_operations_pb2 +from google.protobuf import duration_pb2 +from google.protobuf import empty_pb2 +from google.protobuf import field_mask_pb2 + + +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-dataproc").version + + +class AutoscalingPolicyServiceClient(object): + """ + The API interface for managing autoscaling policies in the + Dataproc API. + """ + + SERVICE_ADDRESS = "dataproc.googleapis.com:443" + """The default address of the service.""" + + # The name of the interface for this client. This is the key used to + # find the method configuration in the client_config dictionary. + _INTERFACE_NAME = "google.cloud.dataproc.v1.AutoscalingPolicyService" + + @classmethod + def from_service_account_file(cls, filename, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AutoscalingPolicyServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @classmethod + def autoscaling_policy_path(cls, project, region, autoscaling_policy): + """Return a fully-qualified autoscaling_policy string.""" + return google.api_core.path_template.expand( + "projects/{project}/regions/{region}/autoscalingPolicies/{autoscaling_policy}", + project=project, + region=region, + autoscaling_policy=autoscaling_policy, + ) + + @classmethod + def region_path(cls, project, region): + """Return a fully-qualified region string.""" + return google.api_core.path_template.expand( + "projects/{project}/regions/{region}", project=project, region=region + ) + + def __init__( + self, + transport=None, + channel=None, + credentials=None, + client_config=None, + client_info=None, + client_options=None, + ): + """Constructor. + + Args: + transport (Union[~.AutoscalingPolicyServiceGrpcTransport, + Callable[[~.Credentials, type], ~.AutoscalingPolicyServiceGrpcTransport]): A transport + instance, responsible for actually making the API calls. + The default transport uses the gRPC protocol. + This argument may also be a callable which returns a + transport instance. Callables will be sent the credentials + as the first argument and the default transport class as + the second argument. + channel (grpc.Channel): DEPRECATED. A ``Channel`` instance + through which to make calls. This argument is mutually exclusive + with ``credentials``; providing both will raise an exception. + credentials (google.auth.credentials.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is mutually exclusive with providing a + transport instance to ``transport``; doing so will raise + an exception. + client_config (dict): DEPRECATED. A dictionary of call options for + each method. If not specified, the default configuration is used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + client_options (Union[dict, google.api_core.client_options.ClientOptions]): + Client options used to set user options on the client. API Endpoint + should be set through client_options. + """ + # Raise deprecation warnings for things we want to go away. + if client_config is not None: + warnings.warn( + "The `client_config` argument is deprecated.", + PendingDeprecationWarning, + stacklevel=2, + ) + else: + client_config = autoscaling_policy_service_client_config.config + + if channel: + warnings.warn( + "The `channel` argument is deprecated; use " "`transport` instead.", + PendingDeprecationWarning, + stacklevel=2, + ) + + api_endpoint = self.SERVICE_ADDRESS + if client_options: + if type(client_options) == dict: + client_options = google.api_core.client_options.from_dict( + client_options + ) + if client_options.api_endpoint: + api_endpoint = client_options.api_endpoint + + # Instantiate the transport. + # The transport is responsible for handling serialization and + # deserialization and actually sending data to the service. + if transport: + if callable(transport): + self.transport = transport( + credentials=credentials, + default_class=autoscaling_policy_service_grpc_transport.AutoscalingPolicyServiceGrpcTransport, + address=api_endpoint, + ) + else: + if credentials: + raise ValueError( + "Received both a transport instance and " + "credentials; these are mutually exclusive." + ) + self.transport = transport + else: + self.transport = autoscaling_policy_service_grpc_transport.AutoscalingPolicyServiceGrpcTransport( + address=api_endpoint, channel=channel, credentials=credentials + ) + + if client_info is None: + client_info = google.api_core.gapic_v1.client_info.ClientInfo( + gapic_version=_GAPIC_LIBRARY_VERSION + ) + else: + client_info.gapic_version = _GAPIC_LIBRARY_VERSION + self._client_info = client_info + + # Parse out the default settings for retry and timeout for each RPC + # from the client configuration. + # (Ordinarily, these are the defaults specified in the `*_config.py` + # file next to this one.) + self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( + client_config["interfaces"][self._INTERFACE_NAME] + ) + + # Save a dictionary of cached API call functions. + # These are the actual callables which invoke the proper + # transport methods, wrapped with `wrap_method` to add retry, + # timeout, and the like. + self._inner_api_calls = {} + + # Service calls + def create_autoscaling_policy( + self, + parent, + policy, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Creates new autoscaling policy. + + Example: + >>> from google.cloud import dataproc_v1 + >>> + >>> client = dataproc_v1.AutoscalingPolicyServiceClient() + >>> + >>> parent = client.region_path('[PROJECT]', '[REGION]') + >>> + >>> # TODO: Initialize `policy`: + >>> policy = {} + >>> + >>> response = client.create_autoscaling_policy(parent, policy) + + Args: + parent (str): Required. The "resource name" of the region or location, as described in + https://cloud.google.com/apis/design/resource\_names. + + - For ``projects.regions.autoscalingPolicies.create``, the resource + name of the region has the following format: + ``projects/{project_id}/regions/{region}`` + + - For ``projects.locations.autoscalingPolicies.create``, the resource + name of the location has the following format: + ``projects/{project_id}/locations/{location}`` + policy (Union[dict, ~google.cloud.dataproc_v1.types.AutoscalingPolicy]): The autoscaling policy to create. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.dataproc_v1.types.AutoscalingPolicy` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.dataproc_v1.types.AutoscalingPolicy` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "create_autoscaling_policy" not in self._inner_api_calls: + self._inner_api_calls[ + "create_autoscaling_policy" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.create_autoscaling_policy, + default_retry=self._method_configs["CreateAutoscalingPolicy"].retry, + default_timeout=self._method_configs["CreateAutoscalingPolicy"].timeout, + client_info=self._client_info, + ) + + request = autoscaling_policies_pb2.CreateAutoscalingPolicyRequest( + parent=parent, policy=policy + ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("parent", parent)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["create_autoscaling_policy"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def update_autoscaling_policy( + self, + policy, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Updates (replaces) autoscaling policy. + + Disabled check for update\_mask, because all updates will be full + replacements. + + Example: + >>> from google.cloud import dataproc_v1 + >>> + >>> client = dataproc_v1.AutoscalingPolicyServiceClient() + >>> + >>> # TODO: Initialize `policy`: + >>> policy = {} + >>> + >>> response = client.update_autoscaling_policy(policy) + + Args: + policy (Union[dict, ~google.cloud.dataproc_v1.types.AutoscalingPolicy]): Required. The updated autoscaling policy. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.dataproc_v1.types.AutoscalingPolicy` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.dataproc_v1.types.AutoscalingPolicy` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "update_autoscaling_policy" not in self._inner_api_calls: + self._inner_api_calls[ + "update_autoscaling_policy" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.update_autoscaling_policy, + default_retry=self._method_configs["UpdateAutoscalingPolicy"].retry, + default_timeout=self._method_configs["UpdateAutoscalingPolicy"].timeout, + client_info=self._client_info, + ) + + request = autoscaling_policies_pb2.UpdateAutoscalingPolicyRequest(policy=policy) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("policy.name", policy.name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["update_autoscaling_policy"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def get_autoscaling_policy( + self, + name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Retrieves autoscaling policy. + + Example: + >>> from google.cloud import dataproc_v1 + >>> + >>> client = dataproc_v1.AutoscalingPolicyServiceClient() + >>> + >>> name = client.autoscaling_policy_path('[PROJECT]', '[REGION]', '[AUTOSCALING_POLICY]') + >>> + >>> response = client.get_autoscaling_policy(name) + + Args: + name (str): Required. The "resource name" of the autoscaling policy, as described in + https://cloud.google.com/apis/design/resource\_names. + + - For ``projects.regions.autoscalingPolicies.get``, the resource name + of the policy has the following format: + ``projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}`` + + - For ``projects.locations.autoscalingPolicies.get``, the resource name + of the policy has the following format: + ``projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}`` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.dataproc_v1.types.AutoscalingPolicy` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "get_autoscaling_policy" not in self._inner_api_calls: + self._inner_api_calls[ + "get_autoscaling_policy" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.get_autoscaling_policy, + default_retry=self._method_configs["GetAutoscalingPolicy"].retry, + default_timeout=self._method_configs["GetAutoscalingPolicy"].timeout, + client_info=self._client_info, + ) + + request = autoscaling_policies_pb2.GetAutoscalingPolicyRequest(name=name) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("name", name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["get_autoscaling_policy"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def list_autoscaling_policies( + self, + parent, + page_size=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Lists autoscaling policies in the project. + + Example: + >>> from google.cloud import dataproc_v1 + >>> + >>> client = dataproc_v1.AutoscalingPolicyServiceClient() + >>> + >>> parent = client.region_path('[PROJECT]', '[REGION]') + >>> + >>> # Iterate over all results + >>> for element in client.list_autoscaling_policies(parent): + ... # process element + ... pass + >>> + >>> + >>> # Alternatively: + >>> + >>> # Iterate over results one page at a time + >>> for page in client.list_autoscaling_policies(parent).pages: + ... for element in page: + ... # process element + ... pass + + Args: + parent (str): Required. The "resource name" of the region or location, as described in + https://cloud.google.com/apis/design/resource\_names. + + - For ``projects.regions.autoscalingPolicies.list``, the resource name + of the region has the following format: + ``projects/{project_id}/regions/{region}`` + + - For ``projects.locations.autoscalingPolicies.list``, the resource + name of the location has the following format: + ``projects/{project_id}/locations/{location}`` + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.api_core.page_iterator.PageIterator` instance. + An iterable of :class:`~google.cloud.dataproc_v1.types.AutoscalingPolicy` instances. + You can also iterate over the pages of the response + using its `pages` property. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "list_autoscaling_policies" not in self._inner_api_calls: + self._inner_api_calls[ + "list_autoscaling_policies" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.list_autoscaling_policies, + default_retry=self._method_configs["ListAutoscalingPolicies"].retry, + default_timeout=self._method_configs["ListAutoscalingPolicies"].timeout, + client_info=self._client_info, + ) + + request = autoscaling_policies_pb2.ListAutoscalingPoliciesRequest( + parent=parent, page_size=page_size + ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("parent", parent)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + iterator = google.api_core.page_iterator.GRPCIterator( + client=None, + method=functools.partial( + self._inner_api_calls["list_autoscaling_policies"], + retry=retry, + timeout=timeout, + metadata=metadata, + ), + request=request, + items_field="policies", + request_token_field="page_token", + response_token_field="next_page_token", + ) + return iterator + + def delete_autoscaling_policy( + self, + name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Deletes an autoscaling policy. It is an error to delete an autoscaling + policy that is in use by one or more clusters. + + Example: + >>> from google.cloud import dataproc_v1 + >>> + >>> client = dataproc_v1.AutoscalingPolicyServiceClient() + >>> + >>> name = client.autoscaling_policy_path('[PROJECT]', '[REGION]', '[AUTOSCALING_POLICY]') + >>> + >>> client.delete_autoscaling_policy(name) + + Args: + name (str): Required. The "resource name" of the autoscaling policy, as described in + https://cloud.google.com/apis/design/resource\_names. + + - For ``projects.regions.autoscalingPolicies.delete``, the resource + name of the policy has the following format: + ``projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}`` + + - For ``projects.locations.autoscalingPolicies.delete``, the resource + name of the policy has the following format: + ``projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}`` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "delete_autoscaling_policy" not in self._inner_api_calls: + self._inner_api_calls[ + "delete_autoscaling_policy" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.delete_autoscaling_policy, + default_retry=self._method_configs["DeleteAutoscalingPolicy"].retry, + default_timeout=self._method_configs["DeleteAutoscalingPolicy"].timeout, + client_info=self._client_info, + ) + + request = autoscaling_policies_pb2.DeleteAutoscalingPolicyRequest(name=name) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("name", name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + self._inner_api_calls["delete_autoscaling_policy"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) diff --git a/google/cloud/dataproc_v1/gapic/autoscaling_policy_service_client_config.py b/google/cloud/dataproc_v1/gapic/autoscaling_policy_service_client_config.py new file mode 100644 index 00000000..61c50a6c --- /dev/null +++ b/google/cloud/dataproc_v1/gapic/autoscaling_policy_service_client_config.py @@ -0,0 +1,48 @@ +config = { + "interfaces": { + "google.cloud.dataproc.v1.AutoscalingPolicyService": { + "retry_codes": { + "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], + "non_idempotent": [], + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 20000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 20000, + "total_timeout_millis": 600000, + } + }, + "methods": { + "CreateAutoscalingPolicy": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + "UpdateAutoscalingPolicy": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + "GetAutoscalingPolicy": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + "ListAutoscalingPolicies": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + "DeleteAutoscalingPolicy": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + }, + } + } +} diff --git a/google/cloud/dataproc_v1/gapic/transports/autoscaling_policy_service_grpc_transport.py b/google/cloud/dataproc_v1/gapic/transports/autoscaling_policy_service_grpc_transport.py new file mode 100644 index 00000000..7e815bba --- /dev/null +++ b/google/cloud/dataproc_v1/gapic/transports/autoscaling_policy_service_grpc_transport.py @@ -0,0 +1,179 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import google.api_core.grpc_helpers + +from google.cloud.dataproc_v1.proto import autoscaling_policies_pb2_grpc + + +class AutoscalingPolicyServiceGrpcTransport(object): + """gRPC transport class providing stubs for + google.cloud.dataproc.v1 AutoscalingPolicyService API. + + The transport provides access to the raw gRPC stubs, + which can be used to take advantage of advanced + features of gRPC. + """ + + # The scopes needed to make gRPC calls to all of the methods defined + # in this service. + _OAUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + def __init__( + self, channel=None, credentials=None, address="dataproc.googleapis.com:443" + ): + """Instantiate the transport class. + + Args: + channel (grpc.Channel): A ``Channel`` instance through + which to make calls. This argument is mutually exclusive + with ``credentials``; providing both will raise an exception. + credentials (google.auth.credentials.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If none + are specified, the client will attempt to ascertain the + credentials from the environment. + address (str): The address where the service is hosted. + """ + # If both `channel` and `credentials` are specified, raise an + # exception (channels come with credentials baked in already). + if channel is not None and credentials is not None: + raise ValueError( + "The `channel` and `credentials` arguments are mutually " "exclusive." + ) + + # Create the channel. + if channel is None: + channel = self.create_channel( + address=address, + credentials=credentials, + options={ + "grpc.max_send_message_length": -1, + "grpc.max_receive_message_length": -1, + }.items(), + ) + + self._channel = channel + + # gRPC uses objects called "stubs" that are bound to the + # channel and provide a basic method for each RPC. + self._stubs = { + "autoscaling_policy_service_stub": autoscaling_policies_pb2_grpc.AutoscalingPolicyServiceStub( + channel + ) + } + + @classmethod + def create_channel( + cls, address="dataproc.googleapis.com:443", credentials=None, **kwargs + ): + """Create and return a gRPC channel object. + + Args: + address (str): The host for the channel to use. + credentials (~.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + kwargs (dict): Keyword arguments, which are passed to the + channel creation. + + Returns: + grpc.Channel: A gRPC channel object. + """ + return google.api_core.grpc_helpers.create_channel( + address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs + ) + + @property + def channel(self): + """The gRPC channel used by the transport. + + Returns: + grpc.Channel: A gRPC channel object. + """ + return self._channel + + @property + def create_autoscaling_policy(self): + """Return the gRPC stub for :meth:`AutoscalingPolicyServiceClient.create_autoscaling_policy`. + + Creates new autoscaling policy. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["autoscaling_policy_service_stub"].CreateAutoscalingPolicy + + @property + def update_autoscaling_policy(self): + """Return the gRPC stub for :meth:`AutoscalingPolicyServiceClient.update_autoscaling_policy`. + + Updates (replaces) autoscaling policy. + + Disabled check for update\_mask, because all updates will be full + replacements. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["autoscaling_policy_service_stub"].UpdateAutoscalingPolicy + + @property + def get_autoscaling_policy(self): + """Return the gRPC stub for :meth:`AutoscalingPolicyServiceClient.get_autoscaling_policy`. + + Retrieves autoscaling policy. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["autoscaling_policy_service_stub"].GetAutoscalingPolicy + + @property + def list_autoscaling_policies(self): + """Return the gRPC stub for :meth:`AutoscalingPolicyServiceClient.list_autoscaling_policies`. + + Lists autoscaling policies in the project. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["autoscaling_policy_service_stub"].ListAutoscalingPolicies + + @property + def delete_autoscaling_policy(self): + """Return the gRPC stub for :meth:`AutoscalingPolicyServiceClient.delete_autoscaling_policy`. + + Deletes an autoscaling policy. It is an error to delete an autoscaling + policy that is in use by one or more clusters. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["autoscaling_policy_service_stub"].DeleteAutoscalingPolicy diff --git a/google/cloud/dataproc_v1/proto/autoscaling_policies.proto b/google/cloud/dataproc_v1/proto/autoscaling_policies.proto index 65035a59..51fbc87d 100644 --- a/google/cloud/dataproc_v1/proto/autoscaling_policies.proto +++ b/google/cloud/dataproc_v1/proto/autoscaling_policies.proto @@ -29,6 +29,11 @@ option java_multiple_files = true; option java_outer_classname = "AutoscalingPoliciesProto"; option java_package = "com.google.cloud.dataproc.v1"; +option (google.api.resource_definition) = { + type: "dataproc.googleapis.com/Region" + pattern: "projects/{project}/regions/{region}" +}; + // The API interface for managing autoscaling policies in the // Dataproc API. service AutoscalingPolicyService { @@ -98,6 +103,7 @@ service AutoscalingPolicyService { message AutoscalingPolicy { option (google.api.resource) = { type: "dataproc.googleapis.com/AutoscalingPolicy" + pattern: "projects/{project}/locations/{location}/autoscalingPolicies/{autoscaling_policy}" pattern: "projects/{project}/regions/{region}/autoscalingPolicies/{autoscaling_policy}" }; diff --git a/google/cloud/dataproc_v1/proto/autoscaling_policies_pb2.py b/google/cloud/dataproc_v1/proto/autoscaling_policies_pb2.py index 8d76aae2..79e205d1 100644 --- a/google/cloud/dataproc_v1/proto/autoscaling_policies_pb2.py +++ b/google/cloud/dataproc_v1/proto/autoscaling_policies_pb2.py @@ -28,10 +28,10 @@ package="google.cloud.dataproc.v1", syntax="proto3", serialized_options=_b( - "\n\034com.google.cloud.dataproc.v1B\030AutoscalingPoliciesProtoP\001Z@google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataproc" + "\n\034com.google.cloud.dataproc.v1B\030AutoscalingPoliciesProtoP\001Z@google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataproc\352AE\n\036dataproc.googleapis.com/Region\022#projects/{project}/regions/{region}" ), serialized_pb=_b( - '\n9google/cloud/dataproc_v1/proto/autoscaling_policies.proto\x12\x18google.cloud.dataproc.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x17google/api/client.proto"\xd4\x03\n\x11\x41utoscalingPolicy\x12\n\n\x02id\x18\x01 \x01(\t\x12\x11\n\x04name\x18\x02 \x01(\tB\x03\xe0\x41\x03\x12S\n\x0f\x62\x61sic_algorithm\x18\x03 \x01(\x0b\x32\x33.google.cloud.dataproc.v1.BasicAutoscalingAlgorithmB\x03\xe0\x41\x02H\x00\x12Z\n\rworker_config\x18\x04 \x01(\x0b\x32>.google.cloud.dataproc.v1.InstanceGroupAutoscalingPolicyConfigB\x03\xe0\x41\x02\x12\x64\n\x17secondary_worker_config\x18\x05 \x01(\x0b\x32>.google.cloud.dataproc.v1.InstanceGroupAutoscalingPolicyConfigB\x03\xe0\x41\x01:|\xea\x41y\n)dataproc.googleapis.com/AutoscalingPolicy\x12Lprojects/{project}/regions/{region}/autoscalingPolicies/{autoscaling_policy}B\x0b\n\talgorithm"\xa4\x01\n\x19\x42\x61sicAutoscalingAlgorithm\x12N\n\x0byarn_config\x18\x01 \x01(\x0b\x32\x34.google.cloud.dataproc.v1.BasicYarnAutoscalingConfigB\x03\xe0\x41\x02\x12\x37\n\x0f\x63ooldown_period\x18\x02 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x01"\xf9\x01\n\x1a\x42\x61sicYarnAutoscalingConfig\x12\x45\n\x1dgraceful_decommission_timeout\x18\x05 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x02\x12\x1c\n\x0fscale_up_factor\x18\x01 \x01(\x01\x42\x03\xe0\x41\x02\x12\x1e\n\x11scale_down_factor\x18\x02 \x01(\x01\x42\x03\xe0\x41\x02\x12)\n\x1cscale_up_min_worker_fraction\x18\x03 \x01(\x01\x42\x03\xe0\x41\x01\x12+\n\x1escale_down_min_worker_fraction\x18\x04 \x01(\x01\x42\x03\xe0\x41\x01"s\n$InstanceGroupAutoscalingPolicyConfig\x12\x1a\n\rmin_instances\x18\x01 \x01(\x05\x42\x03\xe0\x41\x01\x12\x1a\n\rmax_instances\x18\x02 \x01(\x05\x42\x03\xe0\x41\x02\x12\x13\n\x06weight\x18\x03 \x01(\x05\x42\x03\xe0\x41\x01"\xa0\x01\n\x1e\x43reateAutoscalingPolicyRequest\x12\x41\n\x06parent\x18\x01 \x01(\tB1\xe0\x41\x02\xfa\x41+\x12)dataproc.googleapis.com/AutoscalingPolicy\x12;\n\x06policy\x18\x02 \x01(\x0b\x32+.google.cloud.dataproc.v1.AutoscalingPolicy"^\n\x1bGetAutoscalingPolicyRequest\x12?\n\x04name\x18\x01 \x01(\tB1\xe0\x41\x02\xfa\x41+\n)dataproc.googleapis.com/AutoscalingPolicy"b\n\x1eUpdateAutoscalingPolicyRequest\x12@\n\x06policy\x18\x01 \x01(\x0b\x32+.google.cloud.dataproc.v1.AutoscalingPolicyB\x03\xe0\x41\x02"a\n\x1e\x44\x65leteAutoscalingPolicyRequest\x12?\n\x04name\x18\x01 \x01(\tB1\xe0\x41\x02\xfa\x41+\n)dataproc.googleapis.com/AutoscalingPolicy"\x94\x01\n\x1eListAutoscalingPoliciesRequest\x12\x41\n\x06parent\x18\x01 \x01(\tB1\xe0\x41\x02\xfa\x41+\x12)dataproc.googleapis.com/AutoscalingPolicy\x12\x16\n\tpage_size\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x12\x17\n\npage_token\x18\x03 \x01(\tB\x03\xe0\x41\x01"\x83\x01\n\x1fListAutoscalingPoliciesResponse\x12\x42\n\x08policies\x18\x01 \x03(\x0b\x32+.google.cloud.dataproc.v1.AutoscalingPolicyB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03\x32\xfd\n\n\x18\x41utoscalingPolicyService\x12\x8c\x02\n\x17\x43reateAutoscalingPolicy\x12\x38.google.cloud.dataproc.v1.CreateAutoscalingPolicyRequest\x1a+.google.cloud.dataproc.v1.AutoscalingPolicy"\x89\x01\x82\xd3\xe4\x93\x02\x82\x01"7/v1/{parent=projects/*/locations/*}/autoscalingPolicies:\x06policyZ?"5/v1/{parent=projects/*/regions/*}/autoscalingPolicies:\x06policy\x12\x9a\x02\n\x17UpdateAutoscalingPolicy\x12\x38.google.cloud.dataproc.v1.UpdateAutoscalingPolicyRequest\x1a+.google.cloud.dataproc.v1.AutoscalingPolicy"\x97\x01\x82\xd3\xe4\x93\x02\x90\x01\x1a>/v1/{policy.name=projects/*/locations/*/autoscalingPolicies/*}:\x06policyZF\x1a.google.cloud.dataproc.v1.InstanceGroupAutoscalingPolicyConfigB\x03\xe0\x41\x02\x12\x64\n\x17secondary_worker_config\x18\x05 \x01(\x0b\x32>.google.cloud.dataproc.v1.InstanceGroupAutoscalingPolicyConfigB\x03\xe0\x41\x01:\xcf\x01\xea\x41\xcb\x01\n)dataproc.googleapis.com/AutoscalingPolicy\x12Pprojects/{project}/locations/{location}/autoscalingPolicies/{autoscaling_policy}\x12Lprojects/{project}/regions/{region}/autoscalingPolicies/{autoscaling_policy}B\x0b\n\talgorithm"\xa4\x01\n\x19\x42\x61sicAutoscalingAlgorithm\x12N\n\x0byarn_config\x18\x01 \x01(\x0b\x32\x34.google.cloud.dataproc.v1.BasicYarnAutoscalingConfigB\x03\xe0\x41\x02\x12\x37\n\x0f\x63ooldown_period\x18\x02 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x01"\xf9\x01\n\x1a\x42\x61sicYarnAutoscalingConfig\x12\x45\n\x1dgraceful_decommission_timeout\x18\x05 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x02\x12\x1c\n\x0fscale_up_factor\x18\x01 \x01(\x01\x42\x03\xe0\x41\x02\x12\x1e\n\x11scale_down_factor\x18\x02 \x01(\x01\x42\x03\xe0\x41\x02\x12)\n\x1cscale_up_min_worker_fraction\x18\x03 \x01(\x01\x42\x03\xe0\x41\x01\x12+\n\x1escale_down_min_worker_fraction\x18\x04 \x01(\x01\x42\x03\xe0\x41\x01"s\n$InstanceGroupAutoscalingPolicyConfig\x12\x1a\n\rmin_instances\x18\x01 \x01(\x05\x42\x03\xe0\x41\x01\x12\x1a\n\rmax_instances\x18\x02 \x01(\x05\x42\x03\xe0\x41\x02\x12\x13\n\x06weight\x18\x03 \x01(\x05\x42\x03\xe0\x41\x01"\xa0\x01\n\x1e\x43reateAutoscalingPolicyRequest\x12\x41\n\x06parent\x18\x01 \x01(\tB1\xe0\x41\x02\xfa\x41+\x12)dataproc.googleapis.com/AutoscalingPolicy\x12;\n\x06policy\x18\x02 \x01(\x0b\x32+.google.cloud.dataproc.v1.AutoscalingPolicy"^\n\x1bGetAutoscalingPolicyRequest\x12?\n\x04name\x18\x01 \x01(\tB1\xe0\x41\x02\xfa\x41+\n)dataproc.googleapis.com/AutoscalingPolicy"b\n\x1eUpdateAutoscalingPolicyRequest\x12@\n\x06policy\x18\x01 \x01(\x0b\x32+.google.cloud.dataproc.v1.AutoscalingPolicyB\x03\xe0\x41\x02"a\n\x1e\x44\x65leteAutoscalingPolicyRequest\x12?\n\x04name\x18\x01 \x01(\tB1\xe0\x41\x02\xfa\x41+\n)dataproc.googleapis.com/AutoscalingPolicy"\x94\x01\n\x1eListAutoscalingPoliciesRequest\x12\x41\n\x06parent\x18\x01 \x01(\tB1\xe0\x41\x02\xfa\x41+\x12)dataproc.googleapis.com/AutoscalingPolicy\x12\x16\n\tpage_size\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x12\x17\n\npage_token\x18\x03 \x01(\tB\x03\xe0\x41\x01"\x83\x01\n\x1fListAutoscalingPoliciesResponse\x12\x42\n\x08policies\x18\x01 \x03(\x0b\x32+.google.cloud.dataproc.v1.AutoscalingPolicyB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03\x32\xfd\n\n\x18\x41utoscalingPolicyService\x12\x8c\x02\n\x17\x43reateAutoscalingPolicy\x12\x38.google.cloud.dataproc.v1.CreateAutoscalingPolicyRequest\x1a+.google.cloud.dataproc.v1.AutoscalingPolicy"\x89\x01\x82\xd3\xe4\x93\x02\x82\x01"7/v1/{parent=projects/*/locations/*}/autoscalingPolicies:\x06policyZ?"5/v1/{parent=projects/*/regions/*}/autoscalingPolicies:\x06policy\x12\x9a\x02\n\x17UpdateAutoscalingPolicy\x12\x38.google.cloud.dataproc.v1.UpdateAutoscalingPolicyRequest\x1a+.google.cloud.dataproc.v1.AutoscalingPolicy"\x97\x01\x82\xd3\xe4\x93\x02\x90\x01\x1a>/v1/{policy.name=projects/*/locations/*/autoscalingPolicies/*}:\x06policyZF\x1a Date: Fri, 21 Feb 2020 10:50:02 -0800 Subject: [PATCH 5/7] chore: change docstring line breaks (via synth) (#7) --- .../proto/autoscaling_policies_pb2.py | 3 +-- synth.metadata | 12 ++++++------ 2 files changed, 7 insertions(+), 8 deletions(-) diff --git a/google/cloud/dataproc_v1beta2/proto/autoscaling_policies_pb2.py b/google/cloud/dataproc_v1beta2/proto/autoscaling_policies_pb2.py index 7c3be028..e10b0b75 100644 --- a/google/cloud/dataproc_v1beta2/proto/autoscaling_policies_pb2.py +++ b/google/cloud/dataproc_v1beta2/proto/autoscaling_policies_pb2.py @@ -792,8 +792,7 @@ dict( DESCRIPTOR=_AUTOSCALINGPOLICY, __module__="google.cloud.dataproc_v1beta2.proto.autoscaling_policies_pb2", - __doc__="""Describes an autoscaling policy for Dataproc cluster - autoscaler. + __doc__="""Describes an autoscaling policy for Dataproc cluster autoscaler. Attributes: diff --git a/synth.metadata b/synth.metadata index 04f0f205..c746925d 100644 --- a/synth.metadata +++ b/synth.metadata @@ -1,5 +1,5 @@ { - "updateTime": "2020-02-19T02:22:05.032308Z", + "updateTime": "2020-02-20T23:02:30.926314Z", "sources": [ { "generator": { @@ -12,23 +12,23 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-dataproc.git", - "sha": "555dddaabc9bd6160ffee73972f118dc2086923d" + "sha": "3f7fd5bef4ba959b9a6e153fb4ab6d8b6819948e" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "ce4f4c21d9dd2bfab18873a80449b9d9851efde8", - "internalRef": "295861722", - "log": "ce4f4c21d9dd2bfab18873a80449b9d9851efde8\nasset: v1p1beta1 remove SearchResources and SearchIamPolicies\n\nPiperOrigin-RevId: 295861722\n\ncb61d6c2d070b589980c779b68ffca617f789116\nasset: v1p1beta1 remove SearchResources and SearchIamPolicies\n\nPiperOrigin-RevId: 295855449\n\nab2685d8d3a0e191dc8aef83df36773c07cb3d06\nfix: Dataproc v1 - AutoscalingPolicy annotation\n\nThis adds the second resource name pattern to the\nAutoscalingPolicy resource.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 295738415\n\n8a1020bf6828f6e3c84c3014f2c51cb62b739140\nUpdate cloud asset api v1p4beta1.\n\nPiperOrigin-RevId: 295286165\n\n5cfa105206e77670369e4b2225597386aba32985\nAdd service control related proto build rule.\n\nPiperOrigin-RevId: 295262088\n\nee4dddf805072004ab19ac94df2ce669046eec26\nmonitoring v3: Add prefix \"https://cloud.google.com/\" into the link for global access\ncl 295167522, get ride of synth.py hacks\n\nPiperOrigin-RevId: 295238095\n\nd9835e922ea79eed8497db270d2f9f85099a519c\nUpdate some minor docs changes about user event proto\n\nPiperOrigin-RevId: 295185610\n\n5f311e416e69c170243de722023b22f3df89ec1c\nfix: use correct PHP package name in gapic configuration\n\nPiperOrigin-RevId: 295161330\n\n6cdd74dcdb071694da6a6b5a206e3a320b62dd11\npubsub: v1 add client config annotations and retry config\n\nPiperOrigin-RevId: 295158776\n\n5169f46d9f792e2934d9fa25c36d0515b4fd0024\nAdded cloud asset api v1p4beta1.\n\nPiperOrigin-RevId: 295026522\n\n56b55aa8818cd0a532a7d779f6ef337ba809ccbd\nFix: Resource annotations for CreateTimeSeriesRequest and ListTimeSeriesRequest should refer to valid resources. TimeSeries is not a named resource.\n\nPiperOrigin-RevId: 294931650\n\n0646bc775203077226c2c34d3e4d50cc4ec53660\nRemove unnecessary languages from bigquery-related artman configuration files.\n\nPiperOrigin-RevId: 294809380\n\n8b78aa04382e3d4147112ad6d344666771bb1909\nUpdate backend.proto for schemes and protocol\n\nPiperOrigin-RevId: 294788800\n\n80b8f8b3de2359831295e24e5238641a38d8488f\nAdds artman config files for bigquerystorage endpoints v1beta2, v1alpha2, v1\n\nPiperOrigin-RevId: 294763931\n\n2c17ac33b226194041155bb5340c3f34733f1b3a\nAdd parameter to sample generated for UpdateInstance. Related to https://github.com/googleapis/python-redis/issues/4\n\nPiperOrigin-RevId: 294734008\n\nd5e8a8953f2acdfe96fb15e85eb2f33739623957\nMove bigquery datatransfer to gapic v2.\n\nPiperOrigin-RevId: 294703703\n\nefd36705972cfcd7d00ab4c6dfa1135bafacd4ae\nfix: Add two annotations that we missed.\n\nPiperOrigin-RevId: 294664231\n\n8a36b928873ff9c05b43859b9d4ea14cd205df57\nFix: Define the \"bigquery.googleapis.com/Table\" resource in the BigQuery Storage API (v1beta2).\n\nPiperOrigin-RevId: 294459768\n\nc7a3caa2c40c49f034a3c11079dd90eb24987047\nFix: Define the \"bigquery.googleapis.com/Table\" resource in the BigQuery Storage API (v1).\n\nPiperOrigin-RevId: 294456889\n\n5006247aa157e59118833658084345ee59af7c09\nFix: Make deprecated fields optional\nFix: Deprecate SetLoggingServiceRequest.zone in line with the comments\nFeature: Add resource name method signatures where appropriate\n\nPiperOrigin-RevId: 294383128\n\neabba40dac05c5cbe0fca3a35761b17e372036c4\nFix: C# and PHP package/namespace capitalization for BigQuery Storage v1.\n\nPiperOrigin-RevId: 294382444\n\nf8d9a858a7a55eba8009a23aa3f5cc5fe5e88dde\nfix: artman configuration file for bigtable-admin\n\nPiperOrigin-RevId: 294322616\n\n0f29555d1cfcf96add5c0b16b089235afbe9b1a9\nAPI definition for (not-yet-launched) GCS gRPC.\n\nPiperOrigin-RevId: 294321472\n\nfcc86bee0e84dc11e9abbff8d7c3529c0626f390\nfix: Bigtable Admin v2\n\nChange LRO metadata from PartialUpdateInstanceMetadata\nto UpdateInstanceMetadata. (Otherwise, it will not build.)\n\nPiperOrigin-RevId: 294264582\n\n6d9361eae2ebb3f42d8c7ce5baf4bab966fee7c0\nrefactor: Add annotations to Bigtable Admin v2.\n\nPiperOrigin-RevId: 294243406\n\nad7616f3fc8e123451c8b3a7987bc91cea9e6913\nFix: Resource type in CreateLogMetricRequest should use logging.googleapis.com.\nFix: ListLogEntries should have a method signature for convenience of calling it.\n\nPiperOrigin-RevId: 294222165\n\n63796fcbb08712676069e20a3e455c9f7aa21026\nFix: Remove extraneous resource definition for cloudkms.googleapis.com/CryptoKey.\n\nPiperOrigin-RevId: 294176658\n\ne7d8a694f4559201e6913f6610069cb08b39274e\nDepend on the latest gapic-generator and resource names plugin.\n\nThis fixes the very old an very annoying bug: https://github.com/googleapis/gapic-generator/pull/3087\n\nPiperOrigin-RevId: 293903652\n\n806b2854a966d55374ee26bb0cef4e30eda17b58\nfix: correct capitalization of Ruby namespaces in SecurityCenter V1p1beta1\n\nPiperOrigin-RevId: 293903613\n\n1b83c92462b14d67a7644e2980f723112472e03a\nPublish annotations and grpc service config for Logging API.\n\nPiperOrigin-RevId: 293893514\n\ne46f761cd6ec15a9e3d5ed4ff321a4bcba8e8585\nGenerate the Bazel build file for recommendengine public api\n\nPiperOrigin-RevId: 293710856\n\n68477017c4173c98addac0373950c6aa9d7b375f\nMake `language_code` optional for UpdateIntentRequest and BatchUpdateIntentsRequest.\n\nThe comments and proto annotations describe this parameter as optional.\n\nPiperOrigin-RevId: 293703548\n\n16f823f578bca4e845a19b88bb9bc5870ea71ab2\nAdd BUILD.bazel files for managedidentities API\n\nPiperOrigin-RevId: 293698246\n\n2f53fd8178c9a9de4ad10fae8dd17a7ba36133f2\nAdd v1p1beta1 config file\n\nPiperOrigin-RevId: 293696729\n\n052b274138fce2be80f97b6dcb83ab343c7c8812\nAdd source field for user event and add field behavior annotations\n\nPiperOrigin-RevId: 293693115\n\n1e89732b2d69151b1b3418fff3d4cc0434f0dded\ndatacatalog: v1beta1 add three new RPCs to gapic v1beta1 config\n\nPiperOrigin-RevId: 293692823\n\n9c8bd09bbdc7c4160a44f1fbab279b73cd7a2337\nchange the name of AccessApproval service to AccessApprovalAdmin\n\nPiperOrigin-RevId: 293690934\n\n2e23b8fbc45f5d9e200572ca662fe1271bcd6760\nAdd ListEntryGroups method, add http bindings to support entry group tagging, and update some comments.\n\nPiperOrigin-RevId: 293666452\n\n0275e38a4ca03a13d3f47a9613aac8c8b0d3f1f2\nAdd proto_package field to managedidentities API. It is needed for APIs that still depend on artman generation.\n\nPiperOrigin-RevId: 293643323\n\n4cdfe8278cb6f308106580d70648001c9146e759\nRegenerating public protos for Data Catalog to add new Custom Type Entry feature.\n\nPiperOrigin-RevId: 293614782\n\n45d2a569ab526a1fad3720f95eefb1c7330eaada\nEnable client generation for v1 ManagedIdentities API.\n\nPiperOrigin-RevId: 293515675\n\n2c17086b77e6f3bcf04a1f65758dfb0c3da1568f\nAdd the Actions on Google common types (//google/actions/type/*).\n\nPiperOrigin-RevId: 293478245\n\n781aadb932e64a12fb6ead7cd842698d99588433\nDialogflow weekly v2/v2beta1 library update:\n- Documentation updates\nImportant updates are also posted at\nhttps://cloud.google.com/dialogflow/docs/release-notes\n\nPiperOrigin-RevId: 293443396\n\ne2602608c9138c2fca24162720e67f9307c30b95\nDialogflow weekly v2/v2beta1 library update:\n- Documentation updates\nImportant updates are also posted at\nhttps://cloud.google.com/dialogflow/docs/release-notes\n\nPiperOrigin-RevId: 293442964\n\nc8aef82028d06b7992278fa9294c18570dc86c3d\nAdd cc_proto_library and cc_grpc_library targets for Bigtable protos.\n\nAlso fix indentation of cc_grpc_library targets in Spanner and IAM protos.\n\nPiperOrigin-RevId: 293440538\n\ne2faab04f4cb7f9755072330866689b1943a16e9\ncloudtasks: v2 replace non-standard retry params in gapic config v2\n\nPiperOrigin-RevId: 293424055\n\ndfb4097ea628a8470292c6590a4313aee0c675bd\nerrorreporting: v1beta1 add legacy artman config for php\n\nPiperOrigin-RevId: 293423790\n\nb18aed55b45bfe5b62476292c72759e6c3e573c6\nasset: v1p1beta1 updated comment for `page_size` limit.\n\nPiperOrigin-RevId: 293421386\n\nc9ef36b7956d9859a2fc86ad35fcaa16958ab44f\nbazel: Refactor CI build scripts\n\nPiperOrigin-RevId: 293387911\n\n" + "sha": "3eaaaf8626ce5b0c0bc7eee05e143beffa373b01", + "internalRef": "296274723", + "log": "3eaaaf8626ce5b0c0bc7eee05e143beffa373b01\nAdd BUILD.bazel for v1 secretmanager.googleapis.com\n\nPiperOrigin-RevId: 296274723\n\ne76149c3d992337f85eeb45643106aacae7ede82\nMove securitycenter v1 to use generate from annotations.\n\nPiperOrigin-RevId: 296266862\n\n203740c78ac69ee07c3bf6be7408048751f618f8\nAdd StackdriverLoggingConfig field to Cloud Tasks v2 API.\n\nPiperOrigin-RevId: 296256388\n\ne4117d5e9ed8bbca28da4a60a94947ca51cb2083\nCreate a Bazel BUILD file for the google.actions.type export.\n\nPiperOrigin-RevId: 296212567\n\na9639a0a9854fd6e1be08bba1ac3897f4f16cb2f\nAdd secretmanager.googleapis.com v1 protos\n\nPiperOrigin-RevId: 295983266\n\n" } }, { "git": { "name": "synthtool", "remote": "rpc://devrel/cloud/libraries/tools/autosynth", - "sha": "b4b7af4a16a07b40bfd8dcdda89f9f193ff4e2ed" + "sha": "706a38c26db42299845396cdae55db635c38794a" } }, { From bb36194d4b0cfb6f2c5a0358625a17c629f71b21 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Thu, 5 Mar 2020 10:11:00 -0800 Subject: [PATCH 6/7] feat: add lifecycle config and reservation affinity support to v1 (via synth) (#10) --- .../gapic/cluster_controller_client.py | 10 +- google/cloud/dataproc_v1/gapic/enums.py | 19 + .../proto/autoscaling_policies.proto | 4 +- google/cloud/dataproc_v1/proto/clusters.proto | 92 +- .../cloud/dataproc_v1/proto/clusters_pb2.py | 522 ++++++++++-- .../dataproc_v1/proto/clusters_pb2_grpc.py | 10 +- google/cloud/dataproc_v1/proto/jobs.proto | 102 ++- google/cloud/dataproc_v1/proto/jobs_pb2.py | 786 +++++++++++++++--- .../cloud/dataproc_v1/proto/jobs_pb2_grpc.py | 4 +- .../cloud/dataproc_v1/proto/operations.proto | 3 +- google/cloud/dataproc_v1/proto/shared.proto | 3 +- .../proto/workflow_templates.proto | 21 +- .../proto/workflow_templates_pb2.py | 12 - .../proto/workflow_templates_pb2_grpc.py | 8 +- .../autoscaling_policy_service_client.py | 2 +- .../gapic/cluster_controller_client.py | 24 +- google/cloud/dataproc_v1beta2/gapic/enums.py | 4 +- .../gapic/job_controller_client.py | 12 +- .../cluster_controller_grpc_transport.py | 2 +- .../gapic/workflow_template_service_client.py | 2 +- .../proto/autoscaling_policies.proto | 5 +- .../proto/autoscaling_policies_pb2.py | 3 +- .../proto/autoscaling_policies_pb2_grpc.py | 4 +- .../dataproc_v1beta2/proto/clusters.proto | 114 +-- .../dataproc_v1beta2/proto/clusters_pb2.py | 280 ++++--- .../proto/clusters_pb2_grpc.py | 10 +- .../cloud/dataproc_v1beta2/proto/jobs.proto | 194 +++-- .../cloud/dataproc_v1beta2/proto/jobs_pb2.py | 449 +++++----- .../dataproc_v1beta2/proto/jobs_pb2_grpc.py | 4 +- .../dataproc_v1beta2/proto/operations.proto | 3 +- .../cloud/dataproc_v1beta2/proto/shared.proto | 3 +- .../proto/workflow_templates.proto | 27 +- .../proto/workflow_templates_pb2.py | 16 +- .../proto/workflow_templates_pb2_grpc.py | 12 +- synth.metadata | 28 +- 35 files changed, 1951 insertions(+), 843 deletions(-) diff --git a/google/cloud/dataproc_v1/gapic/cluster_controller_client.py b/google/cloud/dataproc_v1/gapic/cluster_controller_client.py index f849ff06..fdf10818 100644 --- a/google/cloud/dataproc_v1/gapic/cluster_controller_client.py +++ b/google/cloud/dataproc_v1/gapic/cluster_controller_client.py @@ -416,10 +416,12 @@ def update_cluster( message :class:`~google.cloud.dataproc_v1.types.FieldMask` graceful_decommission_timeout (Union[dict, ~google.cloud.dataproc_v1.types.Duration]): Optional. Timeout for graceful YARN decomissioning. Graceful decommissioning allows removing nodes from the cluster without - interrupting jobs in progress. Timeout specifies how long to wait for jobs - in progress to finish before forcefully removing nodes (and potentially - interrupting jobs). Default timeout is 0 (for forceful decommission), and - the maximum allowed timeout is 1 day. + interrupting jobs in progress. Timeout specifies how long to wait for + jobs in progress to finish before forcefully removing nodes (and + potentially interrupting jobs). Default timeout is 0 (for forceful + decommission), and the maximum allowed timeout is 1 day. (see JSON + representation of + `Duration `__). Only supported on Dataproc image versions 1.2 and higher. diff --git a/google/cloud/dataproc_v1/gapic/enums.py b/google/cloud/dataproc_v1/gapic/enums.py index 9bbaf2a6..b7d7023c 100644 --- a/google/cloud/dataproc_v1/gapic/enums.py +++ b/google/cloud/dataproc_v1/gapic/enums.py @@ -207,6 +207,25 @@ class Level(enum.IntEnum): OFF = 8 +class ReservationAffinity(object): + class Type(enum.IntEnum): + """ + Indicates whether to consume capacity from an reservation or not. + + Attributes: + TYPE_UNSPECIFIED (int) + NO_RESERVATION (int): Do not consume from any allocated capacity. + ANY_RESERVATION (int): Consume any reservation available. + SPECIFIC_RESERVATION (int): Must consume from a specific reservation. Must specify key value fields + for specifying the reservations. + """ + + TYPE_UNSPECIFIED = 0 + NO_RESERVATION = 1 + ANY_RESERVATION = 2 + SPECIFIC_RESERVATION = 3 + + class WorkflowMetadata(object): class State(enum.IntEnum): """ diff --git a/google/cloud/dataproc_v1/proto/autoscaling_policies.proto b/google/cloud/dataproc_v1/proto/autoscaling_policies.proto index 51fbc87d..53321d89 100644 --- a/google/cloud/dataproc_v1/proto/autoscaling_policies.proto +++ b/google/cloud/dataproc_v1/proto/autoscaling_policies.proto @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC. +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,7 +11,6 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. -// syntax = "proto3"; @@ -28,7 +27,6 @@ option go_package = "google.golang.org/genproto/googleapis/cloud/dataproc/v1;dat option java_multiple_files = true; option java_outer_classname = "AutoscalingPoliciesProto"; option java_package = "com.google.cloud.dataproc.v1"; - option (google.api.resource_definition) = { type: "dataproc.googleapis.com/Region" pattern: "projects/{project}/regions/{region}" diff --git a/google/cloud/dataproc_v1/proto/clusters.proto b/google/cloud/dataproc_v1/proto/clusters.proto index bc254589..a20b8f31 100644 --- a/google/cloud/dataproc_v1/proto/clusters.proto +++ b/google/cloud/dataproc_v1/proto/clusters.proto @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC. +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,7 +11,6 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. -// syntax = "proto3"; @@ -20,7 +19,6 @@ package google.cloud.dataproc.v1; import "google/api/annotations.proto"; import "google/api/client.proto"; import "google/api/field_behavior.proto"; -import "google/cloud/dataproc/v1/operations.proto"; import "google/cloud/dataproc/v1/shared.proto"; import "google/longrunning/operations.proto"; import "google/protobuf/duration.proto"; @@ -40,7 +38,7 @@ service ClusterController { // Creates a cluster in a project. The returned // [Operation.metadata][google.longrunning.Operation.metadata] will be - // [ClusterOperationMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). + // [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). rpc CreateCluster(CreateClusterRequest) returns (google.longrunning.Operation) { option (google.api.http) = { post: "/v1/projects/{project_id}/regions/{region}/clusters" @@ -55,22 +53,22 @@ service ClusterController { // Updates a cluster in a project. The returned // [Operation.metadata][google.longrunning.Operation.metadata] will be - // [ClusterOperationMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). + // [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). rpc UpdateCluster(UpdateClusterRequest) returns (google.longrunning.Operation) { option (google.api.http) = { patch: "/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}" body: "cluster" }; + option (google.api.method_signature) = "project_id,region,cluster_name,cluster,update_mask"; option (google.longrunning.operation_info) = { response_type: "Cluster" metadata_type: "google.cloud.dataproc.v1.ClusterOperationMetadata" }; - option (google.api.method_signature) = "project_id,region,cluster_name,cluster,update_mask"; } // Deletes a cluster in a project. The returned // [Operation.metadata][google.longrunning.Operation.metadata] will be - // [ClusterOperationMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). + // [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). rpc DeleteCluster(DeleteClusterRequest) returns (google.longrunning.Operation) { option (google.api.http) = { delete: "/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}" @@ -101,11 +99,11 @@ service ClusterController { // Gets cluster diagnostic information. The returned // [Operation.metadata][google.longrunning.Operation.metadata] will be - // [ClusterOperationMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). + // [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). // After the operation completes, // [Operation.response][google.longrunning.Operation.response] // contains - // [DiagnoseClusterResults](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#diagnoseclusterresults). + // [DiagnoseClusterResults](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#diagnoseclusterresults). rpc DiagnoseCluster(DiagnoseClusterRequest) returns (google.longrunning.Operation) { option (google.api.http) = { post: "/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}:diagnose" @@ -215,6 +213,9 @@ message ClusterConfig { // Optional. Security settings for the cluster. SecurityConfig security_config = 16 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Lifecycle setting for the cluster. + LifecycleConfig lifecycle_config = 17 [(google.api.field_behavior) = OPTIONAL]; } // Autoscaling Policy config associated with the cluster. @@ -322,9 +323,12 @@ message GceClusterConfig { // [Project and instance // metadata](https://cloud.google.com/compute/docs/storing-retrieving-metadata#project_and_instance_metadata)). map metadata = 5; + + // Optional. Reservation Affinity for consuming Zonal reservation. + ReservationAffinity reservation_affinity = 11 [(google.api.field_behavior) = OPTIONAL]; } -// Optional. The config settings for Compute Engine resources in +// The config settings for Compute Engine resources in // an instance group, such as a master or worker group. message InstanceGroupConfig { // Optional. The number of VM instances in the instance group. @@ -438,7 +442,10 @@ message NodeInitializationAction { string executable_file = 1 [(google.api.field_behavior) = REQUIRED]; // Optional. Amount of time executable has to complete. Default is - // 10 minutes. Cluster creation fails with an explanatory error message (the + // 10 minutes (see JSON representation of + // [Duration](https://developers.google.com/protocol-buffers/docs/proto3#json)). + // + // Cluster creation fails with an explanatory error message (the // name of the executable that caused the error and the exceeded timeout // period) if the executable is not completed at end of the timeout period. google.protobuf.Duration execution_timeout = 2 [(google.api.field_behavior) = OPTIONAL]; @@ -495,7 +502,8 @@ message ClusterStatus { (google.api.field_behavior) = OPTIONAL ]; - // Output only. Time when this state was entered. + // Output only. Time when this state was entered (see JSON representation of + // [Timestamp](https://developers.google.com/protocol-buffers/docs/proto3#json)). google.protobuf.Timestamp state_start_time = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Additional state information that includes @@ -613,6 +621,36 @@ message SoftwareConfig { repeated Component optional_components = 3 [(google.api.field_behavior) = OPTIONAL]; } +// Specifies the cluster auto-delete schedule configuration. +message LifecycleConfig { + // Optional. The duration to keep the cluster alive while idling (when no jobs + // are running). Passing this threshold will cause the cluster to be + // deleted. Minimum value is 10 minutes; maximum value is 14 days (see JSON + // representation of + // [Duration](https://developers.google.com/protocol-buffers/docs/proto3#json). + google.protobuf.Duration idle_delete_ttl = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Either the exact time the cluster should be deleted at or + // the cluster maximum age. + oneof ttl { + // Optional. The time when cluster will be auto-deleted (see JSON representation of + // [Timestamp](https://developers.google.com/protocol-buffers/docs/proto3#json)). + google.protobuf.Timestamp auto_delete_time = 2 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The lifetime duration of cluster. The cluster will be + // auto-deleted at the end of this period. Minimum value is 10 minutes; + // maximum value is 14 days (see JSON representation of + // [Duration](https://developers.google.com/protocol-buffers/docs/proto3#json)). + google.protobuf.Duration auto_delete_ttl = 3 [(google.api.field_behavior) = OPTIONAL]; + } + + // Output only. The time when cluster became idle (most recent job finished) + // and became eligible for deletion due to idleness (see JSON representation + // of + // [Timestamp](https://developers.google.com/protocol-buffers/docs/proto3#json)). + google.protobuf.Timestamp idle_start_time = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; +} + // Contains cluster daemon metrics, such as HDFS and YARN stats. // // **Beta Feature**: This report is available for testing purposes only. It may @@ -671,7 +709,8 @@ message UpdateClusterRequest { // interrupting jobs in progress. Timeout specifies how long to wait for jobs // in progress to finish before forcefully removing nodes (and potentially // interrupting jobs). Default timeout is 0 (for forceful decommission), and - // the maximum allowed timeout is 1 day. + // the maximum allowed timeout is 1 day. (see JSON representation of + // [Duration](https://developers.google.com/protocol-buffers/docs/proto3#json)). // // Only supported on Dataproc image versions 1.2 and higher. google.protobuf.Duration graceful_decommission_timeout = 6 [(google.api.field_behavior) = OPTIONAL]; @@ -854,3 +893,30 @@ message DiagnoseClusterResults { // diagnostics. string output_uri = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; } + +// Reservation Affinity for consuming Zonal reservation. +message ReservationAffinity { + // Indicates whether to consume capacity from an reservation or not. + enum Type { + TYPE_UNSPECIFIED = 0; + + // Do not consume from any allocated capacity. + NO_RESERVATION = 1; + + // Consume any reservation available. + ANY_RESERVATION = 2; + + // Must consume from a specific reservation. Must specify key value fields + // for specifying the reservations. + SPECIFIC_RESERVATION = 3; + } + + // Optional. Type of reservation to consume + Type consume_reservation_type = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Corresponds to the label key of reservation resource. + string key = 2 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Corresponds to the label values of reservation resource. + repeated string values = 3 [(google.api.field_behavior) = OPTIONAL]; +} diff --git a/google/cloud/dataproc_v1/proto/clusters_pb2.py b/google/cloud/dataproc_v1/proto/clusters_pb2.py index b4c0aa2b..3c4ebdae 100644 --- a/google/cloud/dataproc_v1/proto/clusters_pb2.py +++ b/google/cloud/dataproc_v1/proto/clusters_pb2.py @@ -18,9 +18,6 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 from google.api import client_pb2 as google_dot_api_dot_client__pb2 from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 -from google.cloud.dataproc_v1.proto import ( - operations_pb2 as google_dot_cloud_dot_dataproc__v1_dot_proto_dot_operations__pb2, -) from google.cloud.dataproc_v1.proto import ( shared_pb2 as google_dot_cloud_dot_dataproc__v1_dot_proto_dot_shared__pb2, ) @@ -40,13 +37,12 @@ "\n\034com.google.cloud.dataproc.v1B\rClustersProtoP\001Z@google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataproc" ), serialized_pb=_b( - '\n-google/cloud/dataproc_v1/proto/clusters.proto\x12\x18google.cloud.dataproc.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a/google/cloud/dataproc_v1/proto/operations.proto\x1a+google/cloud/dataproc_v1/proto/shared.proto\x1a#google/longrunning/operations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xc8\x03\n\x07\x43luster\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12<\n\x06\x63onfig\x18\x03 \x01(\x0b\x32\'.google.cloud.dataproc.v1.ClusterConfigB\x03\xe0\x41\x02\x12\x42\n\x06labels\x18\x08 \x03(\x0b\x32-.google.cloud.dataproc.v1.Cluster.LabelsEntryB\x03\xe0\x41\x01\x12<\n\x06status\x18\x04 \x01(\x0b\x32\'.google.cloud.dataproc.v1.ClusterStatusB\x03\xe0\x41\x03\x12\x44\n\x0estatus_history\x18\x07 \x03(\x0b\x32\'.google.cloud.dataproc.v1.ClusterStatusB\x03\xe0\x41\x03\x12\x19\n\x0c\x63luster_uuid\x18\x06 \x01(\tB\x03\xe0\x41\x03\x12\x39\n\x07metrics\x18\t \x01(\x0b\x32(.google.cloud.dataproc.v1.ClusterMetrics\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xe6\x05\n\rClusterConfig\x12\x1a\n\rconfig_bucket\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12K\n\x12gce_cluster_config\x18\x08 \x01(\x0b\x32*.google.cloud.dataproc.v1.GceClusterConfigB\x03\xe0\x41\x01\x12I\n\rmaster_config\x18\t \x01(\x0b\x32-.google.cloud.dataproc.v1.InstanceGroupConfigB\x03\xe0\x41\x01\x12I\n\rworker_config\x18\n \x01(\x0b\x32-.google.cloud.dataproc.v1.InstanceGroupConfigB\x03\xe0\x41\x01\x12S\n\x17secondary_worker_config\x18\x0c \x01(\x0b\x32-.google.cloud.dataproc.v1.InstanceGroupConfigB\x03\xe0\x41\x01\x12\x46\n\x0fsoftware_config\x18\r \x01(\x0b\x32(.google.cloud.dataproc.v1.SoftwareConfigB\x03\xe0\x41\x01\x12W\n\x16initialization_actions\x18\x0b \x03(\x0b\x32\x32.google.cloud.dataproc.v1.NodeInitializationActionB\x03\xe0\x41\x01\x12J\n\x11\x65ncryption_config\x18\x0f \x01(\x0b\x32*.google.cloud.dataproc.v1.EncryptionConfigB\x03\xe0\x41\x01\x12L\n\x12\x61utoscaling_config\x18\x12 \x01(\x0b\x32+.google.cloud.dataproc.v1.AutoscalingConfigB\x03\xe0\x41\x01\x12\x46\n\x0fsecurity_config\x18\x10 \x01(\x0b\x32(.google.cloud.dataproc.v1.SecurityConfigB\x03\xe0\x41\x01",\n\x11\x41utoscalingConfig\x12\x17\n\npolicy_uri\x18\x01 \x01(\tB\x03\xe0\x41\x01"4\n\x10\x45ncryptionConfig\x12 \n\x13gce_pd_kms_key_name\x18\x01 \x01(\tB\x03\xe0\x41\x01"\xcd\x02\n\x10GceClusterConfig\x12\x15\n\x08zone_uri\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12\x18\n\x0bnetwork_uri\x18\x02 \x01(\tB\x03\xe0\x41\x01\x12\x1b\n\x0esubnetwork_uri\x18\x06 \x01(\tB\x03\xe0\x41\x01\x12\x1d\n\x10internal_ip_only\x18\x07 \x01(\x08\x42\x03\xe0\x41\x01\x12\x1c\n\x0fservice_account\x18\x08 \x01(\tB\x03\xe0\x41\x01\x12#\n\x16service_account_scopes\x18\x03 \x03(\tB\x03\xe0\x41\x01\x12\x0c\n\x04tags\x18\x04 \x03(\t\x12J\n\x08metadata\x18\x05 \x03(\x0b\x32\x38.google.cloud.dataproc.v1.GceClusterConfig.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x9a\x03\n\x13InstanceGroupConfig\x12\x1a\n\rnum_instances\x18\x01 \x01(\x05\x42\x03\xe0\x41\x01\x12\x1b\n\x0einstance_names\x18\x02 \x03(\tB\x03\xe0\x41\x03\x12\x16\n\timage_uri\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12\x1d\n\x10machine_type_uri\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12>\n\x0b\x64isk_config\x18\x05 \x01(\x0b\x32$.google.cloud.dataproc.v1.DiskConfigB\x03\xe0\x41\x01\x12\x1b\n\x0eis_preemptible\x18\x06 \x01(\x08\x42\x03\xe0\x41\x01\x12O\n\x14managed_group_config\x18\x07 \x01(\x0b\x32,.google.cloud.dataproc.v1.ManagedGroupConfigB\x03\xe0\x41\x03\x12\x46\n\x0c\x61\x63\x63\x65lerators\x18\x08 \x03(\x0b\x32+.google.cloud.dataproc.v1.AcceleratorConfigB\x03\xe0\x41\x01\x12\x1d\n\x10min_cpu_platform\x18\t \x01(\tB\x03\xe0\x41\x01"c\n\x12ManagedGroupConfig\x12#\n\x16instance_template_name\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12(\n\x1binstance_group_manager_name\x18\x02 \x01(\tB\x03\xe0\x41\x03"L\n\x11\x41\x63\x63\x65leratorConfig\x12\x1c\n\x14\x61\x63\x63\x65lerator_type_uri\x18\x01 \x01(\t\x12\x19\n\x11\x61\x63\x63\x65lerator_count\x18\x02 \x01(\x05"f\n\nDiskConfig\x12\x1b\n\x0e\x62oot_disk_type\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12\x1e\n\x11\x62oot_disk_size_gb\x18\x01 \x01(\x05\x42\x03\xe0\x41\x01\x12\x1b\n\x0enum_local_ssds\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01"s\n\x18NodeInitializationAction\x12\x1c\n\x0f\x65xecutable_file\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x39\n\x11\x65xecution_timeout\x18\x02 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x01"\x84\x03\n\rClusterStatus\x12\x41\n\x05state\x18\x01 \x01(\x0e\x32-.google.cloud.dataproc.v1.ClusterStatus.StateB\x03\xe0\x41\x03\x12\x16\n\x06\x64\x65tail\x18\x02 \x01(\tB\x06\xe0\x41\x03\xe0\x41\x01\x12\x39\n\x10state_start_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12G\n\x08substate\x18\x04 \x01(\x0e\x32\x30.google.cloud.dataproc.v1.ClusterStatus.SubstateB\x03\xe0\x41\x03"V\n\x05State\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0c\n\x08\x43REATING\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\x12\t\n\x05\x45RROR\x10\x03\x12\x0c\n\x08\x44\x45LETING\x10\x04\x12\x0c\n\x08UPDATING\x10\x05"<\n\x08Substate\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\r\n\tUNHEALTHY\x10\x01\x12\x10\n\x0cSTALE_STATUS\x10\x02"S\n\x0eSecurityConfig\x12\x41\n\x0fkerberos_config\x18\x01 \x01(\x0b\x32(.google.cloud.dataproc.v1.KerberosConfig"\x90\x04\n\x0eKerberosConfig\x12\x1c\n\x0f\x65nable_kerberos\x18\x01 \x01(\x08\x42\x03\xe0\x41\x01\x12(\n\x1broot_principal_password_uri\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x18\n\x0bkms_key_uri\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0ckeystore_uri\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12\x1b\n\x0etruststore_uri\x18\x05 \x01(\tB\x03\xe0\x41\x01\x12"\n\x15keystore_password_uri\x18\x06 \x01(\tB\x03\xe0\x41\x01\x12\x1d\n\x10key_password_uri\x18\x07 \x01(\tB\x03\xe0\x41\x01\x12$\n\x17truststore_password_uri\x18\x08 \x01(\tB\x03\xe0\x41\x01\x12$\n\x17\x63ross_realm_trust_realm\x18\t \x01(\tB\x03\xe0\x41\x01\x12"\n\x15\x63ross_realm_trust_kdc\x18\n \x01(\tB\x03\xe0\x41\x01\x12+\n\x1e\x63ross_realm_trust_admin_server\x18\x0b \x01(\tB\x03\xe0\x41\x01\x12\x32\n%cross_realm_trust_shared_password_uri\x18\x0c \x01(\tB\x03\xe0\x41\x01\x12\x1b\n\x0ekdc_db_key_uri\x18\r \x01(\tB\x03\xe0\x41\x01\x12\x1f\n\x12tgt_lifetime_hours\x18\x0e \x01(\x05\x42\x03\xe0\x41\x01\x12\x12\n\x05realm\x18\x0f \x01(\tB\x03\xe0\x41\x01"\xf9\x01\n\x0eSoftwareConfig\x12\x1a\n\rimage_version\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12Q\n\nproperties\x18\x02 \x03(\x0b\x32\x38.google.cloud.dataproc.v1.SoftwareConfig.PropertiesEntryB\x03\xe0\x41\x01\x12\x45\n\x13optional_components\x18\x03 \x03(\x0e\x32#.google.cloud.dataproc.v1.ComponentB\x03\xe0\x41\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x9a\x02\n\x0e\x43lusterMetrics\x12O\n\x0chdfs_metrics\x18\x01 \x03(\x0b\x32\x39.google.cloud.dataproc.v1.ClusterMetrics.HdfsMetricsEntry\x12O\n\x0cyarn_metrics\x18\x02 \x03(\x0b\x32\x39.google.cloud.dataproc.v1.ClusterMetrics.YarnMetricsEntry\x1a\x32\n\x10HdfsMetricsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x03:\x02\x38\x01\x1a\x32\n\x10YarnMetricsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x03:\x02\x38\x01"\x96\x01\n\x14\x43reateClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x37\n\x07\x63luster\x18\x02 \x01(\x0b\x32!.google.cloud.dataproc.v1.ClusterB\x03\xe0\x41\x02\x12\x17\n\nrequest_id\x18\x04 \x01(\tB\x03\xe0\x41\x01"\xae\x02\n\x14UpdateClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x05 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x37\n\x07\x63luster\x18\x03 \x01(\x0b\x32!.google.cloud.dataproc.v1.ClusterB\x03\xe0\x41\x02\x12\x45\n\x1dgraceful_decommission_timeout\x18\x06 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x01\x12\x34\n\x0bupdate_mask\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02\x12\x17\n\nrequest_id\x18\x07 \x01(\tB\x03\xe0\x41\x01"\x93\x01\n\x14\x44\x65leteClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_uuid\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12\x17\n\nrequest_id\x18\x05 \x01(\tB\x03\xe0\x41\x01"\\\n\x11GetClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02"\x89\x01\n\x13ListClustersRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x04 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06\x66ilter\x18\x05 \x01(\tB\x03\xe0\x41\x01\x12\x16\n\tpage_size\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x12\x17\n\npage_token\x18\x03 \x01(\tB\x03\xe0\x41\x01"n\n\x14ListClustersResponse\x12\x38\n\x08\x63lusters\x18\x01 \x03(\x0b\x32!.google.cloud.dataproc.v1.ClusterB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03"a\n\x16\x44iagnoseClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02"1\n\x16\x44iagnoseClusterResults\x12\x17\n\noutput_uri\x18\x01 \x01(\tB\x03\xe0\x41\x03\x32\xe3\x0c\n\x11\x43lusterController\x12\x80\x02\n\rCreateCluster\x12..google.cloud.dataproc.v1.CreateClusterRequest\x1a\x1d.google.longrunning.Operation"\x9f\x01\x82\xd3\xe4\x93\x02>"3/v1/projects/{project_id}/regions/{region}/clusters:\x07\x63luster\xda\x41\x19project_id,region,cluster\xca\x41<\n\x07\x43luster\x12\x31google.cloud.dataproc.v1.ClusterOperationMetadata\x12\xa8\x02\n\rUpdateCluster\x12..google.cloud.dataproc.v1.UpdateClusterRequest\x1a\x1d.google.longrunning.Operation"\xc7\x01\x82\xd3\xe4\x93\x02M2B/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}:\x07\x63luster\xca\x41<\n\x07\x43luster\x12\x31google.cloud.dataproc.v1.ClusterOperationMetadata\xda\x41\x32project_id,region,cluster_name,cluster,update_mask\x12\x99\x02\n\rDeleteCluster\x12..google.cloud.dataproc.v1.DeleteClusterRequest\x1a\x1d.google.longrunning.Operation"\xb8\x01\x82\xd3\xe4\x93\x02\x44*B/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}\xda\x41\x1eproject_id,region,cluster_name\xca\x41J\n\x15google.protobuf.Empty\x12\x31google.cloud.dataproc.v1.ClusterOperationMetadata\x12\xc9\x01\n\nGetCluster\x12+.google.cloud.dataproc.v1.GetClusterRequest\x1a!.google.cloud.dataproc.v1.Cluster"k\x82\xd3\xe4\x93\x02\x44\x12\x42/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}\xda\x41\x1eproject_id,region,cluster_name\x12\xd9\x01\n\x0cListClusters\x12-.google.cloud.dataproc.v1.ListClustersRequest\x1a..google.cloud.dataproc.v1.ListClustersResponse"j\x82\xd3\xe4\x93\x02\x35\x12\x33/v1/projects/{project_id}/regions/{region}/clusters\xda\x41\x11project_id,region\xda\x41\x18project_id,region,filter\x12\x8e\x02\n\x0f\x44iagnoseCluster\x12\x30.google.cloud.dataproc.v1.DiagnoseClusterRequest\x1a\x1d.google.longrunning.Operation"\xa9\x01\x82\xd3\xe4\x93\x02P"K/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}:diagnose:\x01*\xda\x41\x1eproject_id,region,cluster_name\xca\x41/\n\x15google.protobuf.Empty\x12\x16\x44iagnoseClusterResults\x1aK\xca\x41\x17\x64\x61taproc.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformBq\n\x1c\x63om.google.cloud.dataproc.v1B\rClustersProtoP\x01Z@google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataprocb\x06proto3' + '\n-google/cloud/dataproc_v1/proto/clusters.proto\x12\x18google.cloud.dataproc.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a+google/cloud/dataproc_v1/proto/shared.proto\x1a#google/longrunning/operations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xc8\x03\n\x07\x43luster\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12<\n\x06\x63onfig\x18\x03 \x01(\x0b\x32\'.google.cloud.dataproc.v1.ClusterConfigB\x03\xe0\x41\x02\x12\x42\n\x06labels\x18\x08 \x03(\x0b\x32-.google.cloud.dataproc.v1.Cluster.LabelsEntryB\x03\xe0\x41\x01\x12<\n\x06status\x18\x04 \x01(\x0b\x32\'.google.cloud.dataproc.v1.ClusterStatusB\x03\xe0\x41\x03\x12\x44\n\x0estatus_history\x18\x07 \x03(\x0b\x32\'.google.cloud.dataproc.v1.ClusterStatusB\x03\xe0\x41\x03\x12\x19\n\x0c\x63luster_uuid\x18\x06 \x01(\tB\x03\xe0\x41\x03\x12\x39\n\x07metrics\x18\t \x01(\x0b\x32(.google.cloud.dataproc.v1.ClusterMetrics\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xb0\x06\n\rClusterConfig\x12\x1a\n\rconfig_bucket\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12K\n\x12gce_cluster_config\x18\x08 \x01(\x0b\x32*.google.cloud.dataproc.v1.GceClusterConfigB\x03\xe0\x41\x01\x12I\n\rmaster_config\x18\t \x01(\x0b\x32-.google.cloud.dataproc.v1.InstanceGroupConfigB\x03\xe0\x41\x01\x12I\n\rworker_config\x18\n \x01(\x0b\x32-.google.cloud.dataproc.v1.InstanceGroupConfigB\x03\xe0\x41\x01\x12S\n\x17secondary_worker_config\x18\x0c \x01(\x0b\x32-.google.cloud.dataproc.v1.InstanceGroupConfigB\x03\xe0\x41\x01\x12\x46\n\x0fsoftware_config\x18\r \x01(\x0b\x32(.google.cloud.dataproc.v1.SoftwareConfigB\x03\xe0\x41\x01\x12W\n\x16initialization_actions\x18\x0b \x03(\x0b\x32\x32.google.cloud.dataproc.v1.NodeInitializationActionB\x03\xe0\x41\x01\x12J\n\x11\x65ncryption_config\x18\x0f \x01(\x0b\x32*.google.cloud.dataproc.v1.EncryptionConfigB\x03\xe0\x41\x01\x12L\n\x12\x61utoscaling_config\x18\x12 \x01(\x0b\x32+.google.cloud.dataproc.v1.AutoscalingConfigB\x03\xe0\x41\x01\x12\x46\n\x0fsecurity_config\x18\x10 \x01(\x0b\x32(.google.cloud.dataproc.v1.SecurityConfigB\x03\xe0\x41\x01\x12H\n\x10lifecycle_config\x18\x11 \x01(\x0b\x32).google.cloud.dataproc.v1.LifecycleConfigB\x03\xe0\x41\x01",\n\x11\x41utoscalingConfig\x12\x17\n\npolicy_uri\x18\x01 \x01(\tB\x03\xe0\x41\x01"4\n\x10\x45ncryptionConfig\x12 \n\x13gce_pd_kms_key_name\x18\x01 \x01(\tB\x03\xe0\x41\x01"\x9f\x03\n\x10GceClusterConfig\x12\x15\n\x08zone_uri\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12\x18\n\x0bnetwork_uri\x18\x02 \x01(\tB\x03\xe0\x41\x01\x12\x1b\n\x0esubnetwork_uri\x18\x06 \x01(\tB\x03\xe0\x41\x01\x12\x1d\n\x10internal_ip_only\x18\x07 \x01(\x08\x42\x03\xe0\x41\x01\x12\x1c\n\x0fservice_account\x18\x08 \x01(\tB\x03\xe0\x41\x01\x12#\n\x16service_account_scopes\x18\x03 \x03(\tB\x03\xe0\x41\x01\x12\x0c\n\x04tags\x18\x04 \x03(\t\x12J\n\x08metadata\x18\x05 \x03(\x0b\x32\x38.google.cloud.dataproc.v1.GceClusterConfig.MetadataEntry\x12P\n\x14reservation_affinity\x18\x0b \x01(\x0b\x32-.google.cloud.dataproc.v1.ReservationAffinityB\x03\xe0\x41\x01\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x9a\x03\n\x13InstanceGroupConfig\x12\x1a\n\rnum_instances\x18\x01 \x01(\x05\x42\x03\xe0\x41\x01\x12\x1b\n\x0einstance_names\x18\x02 \x03(\tB\x03\xe0\x41\x03\x12\x16\n\timage_uri\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12\x1d\n\x10machine_type_uri\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12>\n\x0b\x64isk_config\x18\x05 \x01(\x0b\x32$.google.cloud.dataproc.v1.DiskConfigB\x03\xe0\x41\x01\x12\x1b\n\x0eis_preemptible\x18\x06 \x01(\x08\x42\x03\xe0\x41\x01\x12O\n\x14managed_group_config\x18\x07 \x01(\x0b\x32,.google.cloud.dataproc.v1.ManagedGroupConfigB\x03\xe0\x41\x03\x12\x46\n\x0c\x61\x63\x63\x65lerators\x18\x08 \x03(\x0b\x32+.google.cloud.dataproc.v1.AcceleratorConfigB\x03\xe0\x41\x01\x12\x1d\n\x10min_cpu_platform\x18\t \x01(\tB\x03\xe0\x41\x01"c\n\x12ManagedGroupConfig\x12#\n\x16instance_template_name\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12(\n\x1binstance_group_manager_name\x18\x02 \x01(\tB\x03\xe0\x41\x03"L\n\x11\x41\x63\x63\x65leratorConfig\x12\x1c\n\x14\x61\x63\x63\x65lerator_type_uri\x18\x01 \x01(\t\x12\x19\n\x11\x61\x63\x63\x65lerator_count\x18\x02 \x01(\x05"f\n\nDiskConfig\x12\x1b\n\x0e\x62oot_disk_type\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12\x1e\n\x11\x62oot_disk_size_gb\x18\x01 \x01(\x05\x42\x03\xe0\x41\x01\x12\x1b\n\x0enum_local_ssds\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01"s\n\x18NodeInitializationAction\x12\x1c\n\x0f\x65xecutable_file\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x39\n\x11\x65xecution_timeout\x18\x02 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x01"\x84\x03\n\rClusterStatus\x12\x41\n\x05state\x18\x01 \x01(\x0e\x32-.google.cloud.dataproc.v1.ClusterStatus.StateB\x03\xe0\x41\x03\x12\x16\n\x06\x64\x65tail\x18\x02 \x01(\tB\x06\xe0\x41\x03\xe0\x41\x01\x12\x39\n\x10state_start_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12G\n\x08substate\x18\x04 \x01(\x0e\x32\x30.google.cloud.dataproc.v1.ClusterStatus.SubstateB\x03\xe0\x41\x03"V\n\x05State\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0c\n\x08\x43REATING\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\x12\t\n\x05\x45RROR\x10\x03\x12\x0c\n\x08\x44\x45LETING\x10\x04\x12\x0c\n\x08UPDATING\x10\x05"<\n\x08Substate\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\r\n\tUNHEALTHY\x10\x01\x12\x10\n\x0cSTALE_STATUS\x10\x02"S\n\x0eSecurityConfig\x12\x41\n\x0fkerberos_config\x18\x01 \x01(\x0b\x32(.google.cloud.dataproc.v1.KerberosConfig"\x90\x04\n\x0eKerberosConfig\x12\x1c\n\x0f\x65nable_kerberos\x18\x01 \x01(\x08\x42\x03\xe0\x41\x01\x12(\n\x1broot_principal_password_uri\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x18\n\x0bkms_key_uri\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0ckeystore_uri\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12\x1b\n\x0etruststore_uri\x18\x05 \x01(\tB\x03\xe0\x41\x01\x12"\n\x15keystore_password_uri\x18\x06 \x01(\tB\x03\xe0\x41\x01\x12\x1d\n\x10key_password_uri\x18\x07 \x01(\tB\x03\xe0\x41\x01\x12$\n\x17truststore_password_uri\x18\x08 \x01(\tB\x03\xe0\x41\x01\x12$\n\x17\x63ross_realm_trust_realm\x18\t \x01(\tB\x03\xe0\x41\x01\x12"\n\x15\x63ross_realm_trust_kdc\x18\n \x01(\tB\x03\xe0\x41\x01\x12+\n\x1e\x63ross_realm_trust_admin_server\x18\x0b \x01(\tB\x03\xe0\x41\x01\x12\x32\n%cross_realm_trust_shared_password_uri\x18\x0c \x01(\tB\x03\xe0\x41\x01\x12\x1b\n\x0ekdc_db_key_uri\x18\r \x01(\tB\x03\xe0\x41\x01\x12\x1f\n\x12tgt_lifetime_hours\x18\x0e \x01(\x05\x42\x03\xe0\x41\x01\x12\x12\n\x05realm\x18\x0f \x01(\tB\x03\xe0\x41\x01"\xf9\x01\n\x0eSoftwareConfig\x12\x1a\n\rimage_version\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12Q\n\nproperties\x18\x02 \x03(\x0b\x32\x38.google.cloud.dataproc.v1.SoftwareConfig.PropertiesEntryB\x03\xe0\x41\x01\x12\x45\n\x13optional_components\x18\x03 \x03(\x0e\x32#.google.cloud.dataproc.v1.ComponentB\x03\xe0\x41\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x83\x02\n\x0fLifecycleConfig\x12\x37\n\x0fidle_delete_ttl\x18\x01 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x01\x12;\n\x10\x61uto_delete_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x01H\x00\x12\x39\n\x0f\x61uto_delete_ttl\x18\x03 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x01H\x00\x12\x38\n\x0fidle_start_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x42\x05\n\x03ttl"\x9a\x02\n\x0e\x43lusterMetrics\x12O\n\x0chdfs_metrics\x18\x01 \x03(\x0b\x32\x39.google.cloud.dataproc.v1.ClusterMetrics.HdfsMetricsEntry\x12O\n\x0cyarn_metrics\x18\x02 \x03(\x0b\x32\x39.google.cloud.dataproc.v1.ClusterMetrics.YarnMetricsEntry\x1a\x32\n\x10HdfsMetricsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x03:\x02\x38\x01\x1a\x32\n\x10YarnMetricsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x03:\x02\x38\x01"\x96\x01\n\x14\x43reateClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x37\n\x07\x63luster\x18\x02 \x01(\x0b\x32!.google.cloud.dataproc.v1.ClusterB\x03\xe0\x41\x02\x12\x17\n\nrequest_id\x18\x04 \x01(\tB\x03\xe0\x41\x01"\xae\x02\n\x14UpdateClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x05 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x37\n\x07\x63luster\x18\x03 \x01(\x0b\x32!.google.cloud.dataproc.v1.ClusterB\x03\xe0\x41\x02\x12\x45\n\x1dgraceful_decommission_timeout\x18\x06 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x01\x12\x34\n\x0bupdate_mask\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02\x12\x17\n\nrequest_id\x18\x07 \x01(\tB\x03\xe0\x41\x01"\x93\x01\n\x14\x44\x65leteClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_uuid\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12\x17\n\nrequest_id\x18\x05 \x01(\tB\x03\xe0\x41\x01"\\\n\x11GetClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02"\x89\x01\n\x13ListClustersRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x04 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06\x66ilter\x18\x05 \x01(\tB\x03\xe0\x41\x01\x12\x16\n\tpage_size\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x12\x17\n\npage_token\x18\x03 \x01(\tB\x03\xe0\x41\x01"n\n\x14ListClustersResponse\x12\x38\n\x08\x63lusters\x18\x01 \x03(\x0b\x32!.google.cloud.dataproc.v1.ClusterB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03"a\n\x16\x44iagnoseClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02"1\n\x16\x44iagnoseClusterResults\x12\x17\n\noutput_uri\x18\x01 \x01(\tB\x03\xe0\x41\x03"\xf8\x01\n\x13ReservationAffinity\x12Y\n\x18\x63onsume_reservation_type\x18\x01 \x01(\x0e\x32\x32.google.cloud.dataproc.v1.ReservationAffinity.TypeB\x03\xe0\x41\x01\x12\x10\n\x03key\x18\x02 \x01(\tB\x03\xe0\x41\x01\x12\x13\n\x06values\x18\x03 \x03(\tB\x03\xe0\x41\x01"_\n\x04Type\x12\x14\n\x10TYPE_UNSPECIFIED\x10\x00\x12\x12\n\x0eNO_RESERVATION\x10\x01\x12\x13\n\x0f\x41NY_RESERVATION\x10\x02\x12\x18\n\x14SPECIFIC_RESERVATION\x10\x03\x32\xe3\x0c\n\x11\x43lusterController\x12\x80\x02\n\rCreateCluster\x12..google.cloud.dataproc.v1.CreateClusterRequest\x1a\x1d.google.longrunning.Operation"\x9f\x01\x82\xd3\xe4\x93\x02>"3/v1/projects/{project_id}/regions/{region}/clusters:\x07\x63luster\xda\x41\x19project_id,region,cluster\xca\x41<\n\x07\x43luster\x12\x31google.cloud.dataproc.v1.ClusterOperationMetadata\x12\xa8\x02\n\rUpdateCluster\x12..google.cloud.dataproc.v1.UpdateClusterRequest\x1a\x1d.google.longrunning.Operation"\xc7\x01\x82\xd3\xe4\x93\x02M2B/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}:\x07\x63luster\xda\x41\x32project_id,region,cluster_name,cluster,update_mask\xca\x41<\n\x07\x43luster\x12\x31google.cloud.dataproc.v1.ClusterOperationMetadata\x12\x99\x02\n\rDeleteCluster\x12..google.cloud.dataproc.v1.DeleteClusterRequest\x1a\x1d.google.longrunning.Operation"\xb8\x01\x82\xd3\xe4\x93\x02\x44*B/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}\xda\x41\x1eproject_id,region,cluster_name\xca\x41J\n\x15google.protobuf.Empty\x12\x31google.cloud.dataproc.v1.ClusterOperationMetadata\x12\xc9\x01\n\nGetCluster\x12+.google.cloud.dataproc.v1.GetClusterRequest\x1a!.google.cloud.dataproc.v1.Cluster"k\x82\xd3\xe4\x93\x02\x44\x12\x42/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}\xda\x41\x1eproject_id,region,cluster_name\x12\xd9\x01\n\x0cListClusters\x12-.google.cloud.dataproc.v1.ListClustersRequest\x1a..google.cloud.dataproc.v1.ListClustersResponse"j\x82\xd3\xe4\x93\x02\x35\x12\x33/v1/projects/{project_id}/regions/{region}/clusters\xda\x41\x11project_id,region\xda\x41\x18project_id,region,filter\x12\x8e\x02\n\x0f\x44iagnoseCluster\x12\x30.google.cloud.dataproc.v1.DiagnoseClusterRequest\x1a\x1d.google.longrunning.Operation"\xa9\x01\x82\xd3\xe4\x93\x02P"K/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}:diagnose:\x01*\xda\x41\x1eproject_id,region,cluster_name\xca\x41/\n\x15google.protobuf.Empty\x12\x16\x44iagnoseClusterResults\x1aK\xca\x41\x17\x64\x61taproc.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformBq\n\x1c\x63om.google.cloud.dataproc.v1B\rClustersProtoP\x01Z@google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataprocb\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, google_dot_api_dot_client__pb2.DESCRIPTOR, google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, - google_dot_cloud_dot_dataproc__v1_dot_proto_dot_operations__pb2.DESCRIPTOR, google_dot_cloud_dot_dataproc__v1_dot_proto_dot_shared__pb2.DESCRIPTOR, google_dot_longrunning_dot_operations__pb2.DESCRIPTOR, google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, @@ -83,8 +79,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=3087, - serialized_end=3173, + serialized_start=3194, + serialized_end=3280, ) _sym_db.RegisterEnumDescriptor(_CLUSTERSTATUS_STATE) @@ -106,11 +102,49 @@ ], containing_type=None, serialized_options=None, - serialized_start=3175, - serialized_end=3235, + serialized_start=3282, + serialized_end=3342, ) _sym_db.RegisterEnumDescriptor(_CLUSTERSTATUS_SUBSTATE) +_RESERVATIONAFFINITY_TYPE = _descriptor.EnumDescriptor( + name="Type", + full_name="google.cloud.dataproc.v1.ReservationAffinity.Type", + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name="TYPE_UNSPECIFIED", + index=0, + number=0, + serialized_options=None, + type=None, + ), + _descriptor.EnumValueDescriptor( + name="NO_RESERVATION", index=1, number=1, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="ANY_RESERVATION", + index=2, + number=2, + serialized_options=None, + type=None, + ), + _descriptor.EnumValueDescriptor( + name="SPECIFIC_RESERVATION", + index=3, + number=3, + serialized_options=None, + type=None, + ), + ], + containing_type=None, + serialized_options=None, + serialized_start=6017, + serialized_end=6112, +) +_sym_db.RegisterEnumDescriptor(_RESERVATIONAFFINITY_TYPE) + _CLUSTER_LABELSENTRY = _descriptor.Descriptor( name="LabelsEntry", @@ -164,8 +198,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=805, - serialized_end=850, + serialized_start=756, + serialized_end=801, ) _CLUSTER = _descriptor.Descriptor( @@ -328,8 +362,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=394, - serialized_end=850, + serialized_start=345, + serialized_end=801, ) @@ -520,6 +554,24 @@ serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), + _descriptor.FieldDescriptor( + name="lifecycle_config", + full_name="google.cloud.dataproc.v1.ClusterConfig.lifecycle_config", + index=10, + number=17, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), ], extensions=[], nested_types=[], @@ -529,8 +581,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=853, - serialized_end=1595, + serialized_start=804, + serialized_end=1620, ) @@ -568,8 +620,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1597, - serialized_end=1641, + serialized_start=1622, + serialized_end=1666, ) @@ -607,8 +659,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1643, - serialized_end=1695, + serialized_start=1668, + serialized_end=1720, ) @@ -664,8 +716,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1984, - serialized_end=2031, + serialized_start=2091, + serialized_end=2138, ) _GCECLUSTERCONFIG = _descriptor.Descriptor( @@ -819,6 +871,24 @@ serialized_options=None, file=DESCRIPTOR, ), + _descriptor.FieldDescriptor( + name="reservation_affinity", + full_name="google.cloud.dataproc.v1.GceClusterConfig.reservation_affinity", + index=8, + number=11, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), ], extensions=[], nested_types=[_GCECLUSTERCONFIG_METADATAENTRY], @@ -828,8 +898,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1698, - serialized_end=2031, + serialized_start=1723, + serialized_end=2138, ) @@ -1011,8 +1081,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2034, - serialized_end=2444, + serialized_start=2141, + serialized_end=2551, ) @@ -1068,8 +1138,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2446, - serialized_end=2545, + serialized_start=2553, + serialized_end=2652, ) @@ -1125,8 +1195,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2547, - serialized_end=2623, + serialized_start=2654, + serialized_end=2730, ) @@ -1200,8 +1270,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2625, - serialized_end=2727, + serialized_start=2732, + serialized_end=2834, ) @@ -1257,8 +1327,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2729, - serialized_end=2844, + serialized_start=2836, + serialized_end=2951, ) @@ -1350,8 +1420,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2847, - serialized_end=3235, + serialized_start=2954, + serialized_end=3342, ) @@ -1389,8 +1459,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3237, - serialized_end=3320, + serialized_start=3344, + serialized_end=3427, ) @@ -1680,8 +1750,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3323, - serialized_end=3851, + serialized_start=3430, + serialized_end=3958, ) @@ -1737,8 +1807,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4054, - serialized_end=4103, + serialized_start=4161, + serialized_end=4210, ) _SOFTWARECONFIG = _descriptor.Descriptor( @@ -1811,8 +1881,109 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3854, - serialized_end=4103, + serialized_start=3961, + serialized_end=4210, +) + + +_LIFECYCLECONFIG = _descriptor.Descriptor( + name="LifecycleConfig", + full_name="google.cloud.dataproc.v1.LifecycleConfig", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="idle_delete_ttl", + full_name="google.cloud.dataproc.v1.LifecycleConfig.idle_delete_ttl", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="auto_delete_time", + full_name="google.cloud.dataproc.v1.LifecycleConfig.auto_delete_time", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="auto_delete_ttl", + full_name="google.cloud.dataproc.v1.LifecycleConfig.auto_delete_ttl", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="idle_start_time", + full_name="google.cloud.dataproc.v1.LifecycleConfig.idle_start_time", + index=3, + number=4, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\003"), + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="ttl", + full_name="google.cloud.dataproc.v1.LifecycleConfig.ttl", + index=0, + containing_type=None, + fields=[], + ) + ], + serialized_start=4213, + serialized_end=4472, ) @@ -1868,8 +2039,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4286, - serialized_end=4336, + serialized_start=4655, + serialized_end=4705, ) _CLUSTERMETRICS_YARNMETRICSENTRY = _descriptor.Descriptor( @@ -1924,8 +2095,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4338, - serialized_end=4388, + serialized_start=4707, + serialized_end=4757, ) _CLUSTERMETRICS = _descriptor.Descriptor( @@ -1980,8 +2151,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4106, - serialized_end=4388, + serialized_start=4475, + serialized_end=4757, ) @@ -2073,8 +2244,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4391, - serialized_end=4541, + serialized_start=4760, + serialized_end=4910, ) @@ -2220,8 +2391,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4544, - serialized_end=4846, + serialized_start=4913, + serialized_end=5215, ) @@ -2331,8 +2502,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4849, - serialized_end=4996, + serialized_start=5218, + serialized_end=5365, ) @@ -2406,8 +2577,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4998, - serialized_end=5090, + serialized_start=5367, + serialized_end=5459, ) @@ -2517,8 +2688,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5093, - serialized_end=5230, + serialized_start=5462, + serialized_end=5599, ) @@ -2574,8 +2745,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5232, - serialized_end=5342, + serialized_start=5601, + serialized_end=5711, ) @@ -2649,8 +2820,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5344, - serialized_end=5441, + serialized_start=5713, + serialized_end=5810, ) @@ -2688,8 +2859,83 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5443, - serialized_end=5492, + serialized_start=5812, + serialized_end=5861, +) + + +_RESERVATIONAFFINITY = _descriptor.Descriptor( + name="ReservationAffinity", + full_name="google.cloud.dataproc.v1.ReservationAffinity", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="consume_reservation_type", + full_name="google.cloud.dataproc.v1.ReservationAffinity.consume_reservation_type", + index=0, + number=1, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="key", + full_name="google.cloud.dataproc.v1.ReservationAffinity.key", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="values", + full_name="google.cloud.dataproc.v1.ReservationAffinity.values", + index=2, + number=3, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[_RESERVATIONAFFINITY_TYPE], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=5864, + serialized_end=6112, ) _CLUSTER_LABELSENTRY.containing_type = _CLUSTER @@ -2711,10 +2957,14 @@ _CLUSTERCONFIG.fields_by_name["encryption_config"].message_type = _ENCRYPTIONCONFIG _CLUSTERCONFIG.fields_by_name["autoscaling_config"].message_type = _AUTOSCALINGCONFIG _CLUSTERCONFIG.fields_by_name["security_config"].message_type = _SECURITYCONFIG +_CLUSTERCONFIG.fields_by_name["lifecycle_config"].message_type = _LIFECYCLECONFIG _GCECLUSTERCONFIG_METADATAENTRY.containing_type = _GCECLUSTERCONFIG _GCECLUSTERCONFIG.fields_by_name[ "metadata" ].message_type = _GCECLUSTERCONFIG_METADATAENTRY +_GCECLUSTERCONFIG.fields_by_name[ + "reservation_affinity" +].message_type = _RESERVATIONAFFINITY _INSTANCEGROUPCONFIG.fields_by_name["disk_config"].message_type = _DISKCONFIG _INSTANCEGROUPCONFIG.fields_by_name[ "managed_group_config" @@ -2738,6 +2988,30 @@ _SOFTWARECONFIG.fields_by_name[ "optional_components" ].enum_type = google_dot_cloud_dot_dataproc__v1_dot_proto_dot_shared__pb2._COMPONENT +_LIFECYCLECONFIG.fields_by_name[ + "idle_delete_ttl" +].message_type = google_dot_protobuf_dot_duration__pb2._DURATION +_LIFECYCLECONFIG.fields_by_name[ + "auto_delete_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_LIFECYCLECONFIG.fields_by_name[ + "auto_delete_ttl" +].message_type = google_dot_protobuf_dot_duration__pb2._DURATION +_LIFECYCLECONFIG.fields_by_name[ + "idle_start_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_LIFECYCLECONFIG.oneofs_by_name["ttl"].fields.append( + _LIFECYCLECONFIG.fields_by_name["auto_delete_time"] +) +_LIFECYCLECONFIG.fields_by_name[ + "auto_delete_time" +].containing_oneof = _LIFECYCLECONFIG.oneofs_by_name["ttl"] +_LIFECYCLECONFIG.oneofs_by_name["ttl"].fields.append( + _LIFECYCLECONFIG.fields_by_name["auto_delete_ttl"] +) +_LIFECYCLECONFIG.fields_by_name[ + "auto_delete_ttl" +].containing_oneof = _LIFECYCLECONFIG.oneofs_by_name["ttl"] _CLUSTERMETRICS_HDFSMETRICSENTRY.containing_type = _CLUSTERMETRICS _CLUSTERMETRICS_YARNMETRICSENTRY.containing_type = _CLUSTERMETRICS _CLUSTERMETRICS.fields_by_name[ @@ -2755,6 +3029,10 @@ "update_mask" ].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK _LISTCLUSTERSRESPONSE.fields_by_name["clusters"].message_type = _CLUSTER +_RESERVATIONAFFINITY.fields_by_name[ + "consume_reservation_type" +].enum_type = _RESERVATIONAFFINITY_TYPE +_RESERVATIONAFFINITY_TYPE.containing_type = _RESERVATIONAFFINITY DESCRIPTOR.message_types_by_name["Cluster"] = _CLUSTER DESCRIPTOR.message_types_by_name["ClusterConfig"] = _CLUSTERCONFIG DESCRIPTOR.message_types_by_name["AutoscalingConfig"] = _AUTOSCALINGCONFIG @@ -2769,6 +3047,7 @@ DESCRIPTOR.message_types_by_name["SecurityConfig"] = _SECURITYCONFIG DESCRIPTOR.message_types_by_name["KerberosConfig"] = _KERBEROSCONFIG DESCRIPTOR.message_types_by_name["SoftwareConfig"] = _SOFTWARECONFIG +DESCRIPTOR.message_types_by_name["LifecycleConfig"] = _LIFECYCLECONFIG DESCRIPTOR.message_types_by_name["ClusterMetrics"] = _CLUSTERMETRICS DESCRIPTOR.message_types_by_name["CreateClusterRequest"] = _CREATECLUSTERREQUEST DESCRIPTOR.message_types_by_name["UpdateClusterRequest"] = _UPDATECLUSTERREQUEST @@ -2778,6 +3057,7 @@ DESCRIPTOR.message_types_by_name["ListClustersResponse"] = _LISTCLUSTERSRESPONSE DESCRIPTOR.message_types_by_name["DiagnoseClusterRequest"] = _DIAGNOSECLUSTERREQUEST DESCRIPTOR.message_types_by_name["DiagnoseClusterResults"] = _DIAGNOSECLUSTERRESULTS +DESCRIPTOR.message_types_by_name["ReservationAffinity"] = _RESERVATIONAFFINITY _sym_db.RegisterFileDescriptor(DESCRIPTOR) Cluster = _reflection.GeneratedProtocolMessageType( @@ -2888,6 +3168,8 @@ unset. security_config: Optional. Security settings for the cluster. + lifecycle_config: + Optional. Lifecycle setting for the cluster. """, # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.ClusterConfig) ), @@ -3025,6 +3307,9 @@ (see `Project and instance metadata `__). + reservation_affinity: + Optional. Reservation Affinity for consuming Zonal + reservation. """, # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.GceClusterConfig) ), @@ -3038,8 +3323,8 @@ dict( DESCRIPTOR=_INSTANCEGROUPCONFIG, __module__="google.cloud.dataproc_v1.proto.clusters_pb2", - __doc__="""Optional. The config settings for Compute Engine resources - in an instance group, such as a master or worker group. + __doc__="""The config settings for Compute Engine resources in an + instance group, such as a master or worker group. Attributes: @@ -3193,10 +3478,12 @@ Required. Cloud Storage URI of executable file. execution_timeout: Optional. Amount of time executable has to complete. Default - is 10 minutes. Cluster creation fails with an explanatory - error message (the name of the executable that caused the - error and the exceeded timeout period) if the executable is - not completed at end of the timeout period. + is 10 minutes (see JSON representation of `Duration + `__). Cluster creation fails with an + explanatory error message (the name of the executable that + caused the error and the exceeded timeout period) if the + executable is not completed at end of the timeout period. """, # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.NodeInitializationAction) ), @@ -3218,7 +3505,10 @@ detail: Optional. Output only. Details of cluster's state. state_start_time: - Output only. Time when this state was entered. + Output only. Time when this state was entered (see JSON + representation of `Timestamp + `__). substate: Output only. Additional state information that includes status reported by the agent. @@ -3368,6 +3658,49 @@ _sym_db.RegisterMessage(SoftwareConfig) _sym_db.RegisterMessage(SoftwareConfig.PropertiesEntry) +LifecycleConfig = _reflection.GeneratedProtocolMessageType( + "LifecycleConfig", + (_message.Message,), + dict( + DESCRIPTOR=_LIFECYCLECONFIG, + __module__="google.cloud.dataproc_v1.proto.clusters_pb2", + __doc__="""Specifies the cluster auto-delete schedule configuration. + + + Attributes: + idle_delete_ttl: + Optional. The duration to keep the cluster alive while idling + (when no jobs are running). Passing this threshold will cause + the cluster to be deleted. Minimum value is 10 minutes; + maximum value is 14 days (see JSON representation of `Duration + `__. + ttl: + Either the exact time the cluster should be deleted at or the + cluster maximum age. + auto_delete_time: + Optional. The time when cluster will be auto-deleted (see JSON + representation of `Timestamp + `__). + auto_delete_ttl: + Optional. The lifetime duration of cluster. The cluster will + be auto-deleted at the end of this period. Minimum value is 10 + minutes; maximum value is 14 days (see JSON representation of + `Duration `__). + idle_start_time: + Output only. The time when cluster became idle (most recent + job finished) and became eligible for deletion due to idleness + (see JSON representation of `Timestamp + `__). + """, + # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.LifecycleConfig) + ), +) +_sym_db.RegisterMessage(LifecycleConfig) + ClusterMetrics = _reflection.GeneratedProtocolMessageType( "ClusterMetrics", (_message.Message,), @@ -3473,8 +3806,10 @@ wait for jobs in progress to finish before forcefully removing nodes (and potentially interrupting jobs). Default timeout is 0 (for forceful decommission), and the maximum allowed timeout - is 1 day. Only supported on Dataproc image versions 1.2 and - higher. + is 1 day. (see JSON representation of `Duration + `__). Only supported on Dataproc + image versions 1.2 and higher. update_mask: Required. Specifies the path, relative to ``Cluster``, of the field to update. For example, to change the number of workers @@ -3698,6 +4033,30 @@ ) _sym_db.RegisterMessage(DiagnoseClusterResults) +ReservationAffinity = _reflection.GeneratedProtocolMessageType( + "ReservationAffinity", + (_message.Message,), + dict( + DESCRIPTOR=_RESERVATIONAFFINITY, + __module__="google.cloud.dataproc_v1.proto.clusters_pb2", + __doc__="""Reservation Affinity for consuming Zonal reservation. + + + Attributes: + consume_reservation_type: + Optional. Type of reservation to consume + key: + Optional. Corresponds to the label key of reservation + resource. + values: + Optional. Corresponds to the label values of reservation + resource. + """, + # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.ReservationAffinity) + ), +) +_sym_db.RegisterMessage(ReservationAffinity) + DESCRIPTOR._options = None _CLUSTER_LABELSENTRY._options = None @@ -3718,6 +4077,7 @@ _CLUSTERCONFIG.fields_by_name["encryption_config"]._options = None _CLUSTERCONFIG.fields_by_name["autoscaling_config"]._options = None _CLUSTERCONFIG.fields_by_name["security_config"]._options = None +_CLUSTERCONFIG.fields_by_name["lifecycle_config"]._options = None _AUTOSCALINGCONFIG.fields_by_name["policy_uri"]._options = None _ENCRYPTIONCONFIG.fields_by_name["gce_pd_kms_key_name"]._options = None _GCECLUSTERCONFIG_METADATAENTRY._options = None @@ -3727,6 +4087,7 @@ _GCECLUSTERCONFIG.fields_by_name["internal_ip_only"]._options = None _GCECLUSTERCONFIG.fields_by_name["service_account"]._options = None _GCECLUSTERCONFIG.fields_by_name["service_account_scopes"]._options = None +_GCECLUSTERCONFIG.fields_by_name["reservation_affinity"]._options = None _INSTANCEGROUPCONFIG.fields_by_name["num_instances"]._options = None _INSTANCEGROUPCONFIG.fields_by_name["instance_names"]._options = None _INSTANCEGROUPCONFIG.fields_by_name["image_uri"]._options = None @@ -3766,6 +4127,10 @@ _SOFTWARECONFIG.fields_by_name["image_version"]._options = None _SOFTWARECONFIG.fields_by_name["properties"]._options = None _SOFTWARECONFIG.fields_by_name["optional_components"]._options = None +_LIFECYCLECONFIG.fields_by_name["idle_delete_ttl"]._options = None +_LIFECYCLECONFIG.fields_by_name["auto_delete_time"]._options = None +_LIFECYCLECONFIG.fields_by_name["auto_delete_ttl"]._options = None +_LIFECYCLECONFIG.fields_by_name["idle_start_time"]._options = None _CLUSTERMETRICS_HDFSMETRICSENTRY._options = None _CLUSTERMETRICS_YARNMETRICSENTRY._options = None _CREATECLUSTERREQUEST.fields_by_name["project_id"]._options = None @@ -3798,6 +4163,9 @@ _DIAGNOSECLUSTERREQUEST.fields_by_name["region"]._options = None _DIAGNOSECLUSTERREQUEST.fields_by_name["cluster_name"]._options = None _DIAGNOSECLUSTERRESULTS.fields_by_name["output_uri"]._options = None +_RESERVATIONAFFINITY.fields_by_name["consume_reservation_type"]._options = None +_RESERVATIONAFFINITY.fields_by_name["key"]._options = None +_RESERVATIONAFFINITY.fields_by_name["values"]._options = None _CLUSTERCONTROLLER = _descriptor.ServiceDescriptor( name="ClusterController", @@ -3807,8 +4175,8 @@ serialized_options=_b( "\312A\027dataproc.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" ), - serialized_start=5495, - serialized_end=7130, + serialized_start=6115, + serialized_end=7750, methods=[ _descriptor.MethodDescriptor( name="CreateCluster", @@ -3829,7 +4197,7 @@ input_type=_UPDATECLUSTERREQUEST, output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, serialized_options=_b( - "\202\323\344\223\002M2B/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}:\007cluster\312A<\n\007Cluster\0221google.cloud.dataproc.v1.ClusterOperationMetadata\332A2project_id,region,cluster_name,cluster,update_mask" + "\202\323\344\223\002M2B/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}:\007cluster\332A2project_id,region,cluster_name,cluster,update_mask\312A<\n\007Cluster\0221google.cloud.dataproc.v1.ClusterOperationMetadata" ), ), _descriptor.MethodDescriptor( diff --git a/google/cloud/dataproc_v1/proto/clusters_pb2_grpc.py b/google/cloud/dataproc_v1/proto/clusters_pb2_grpc.py index def69f14..8d5bbde7 100644 --- a/google/cloud/dataproc_v1/proto/clusters_pb2_grpc.py +++ b/google/cloud/dataproc_v1/proto/clusters_pb2_grpc.py @@ -60,7 +60,7 @@ class ClusterControllerServicer(object): def CreateCluster(self, request, context): """Creates a cluster in a project. The returned [Operation.metadata][google.longrunning.Operation.metadata] will be - [ClusterOperationMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). + [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") @@ -69,7 +69,7 @@ def CreateCluster(self, request, context): def UpdateCluster(self, request, context): """Updates a cluster in a project. The returned [Operation.metadata][google.longrunning.Operation.metadata] will be - [ClusterOperationMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). + [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") @@ -78,7 +78,7 @@ def UpdateCluster(self, request, context): def DeleteCluster(self, request, context): """Deletes a cluster in a project. The returned [Operation.metadata][google.longrunning.Operation.metadata] will be - [ClusterOperationMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). + [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") @@ -101,11 +101,11 @@ def ListClusters(self, request, context): def DiagnoseCluster(self, request, context): """Gets cluster diagnostic information. The returned [Operation.metadata][google.longrunning.Operation.metadata] will be - [ClusterOperationMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). + [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). After the operation completes, [Operation.response][google.longrunning.Operation.response] contains - [DiagnoseClusterResults](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#diagnoseclusterresults). + [DiagnoseClusterResults](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#diagnoseclusterresults). """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") diff --git a/google/cloud/dataproc_v1/proto/jobs.proto b/google/cloud/dataproc_v1/proto/jobs.proto index bcb68fed..85921dc4 100644 --- a/google/cloud/dataproc_v1/proto/jobs.proto +++ b/google/cloud/dataproc_v1/proto/jobs.proto @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC. +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -19,6 +19,7 @@ package google.cloud.dataproc.v1; import "google/api/annotations.proto"; import "google/api/client.proto"; import "google/api/field_behavior.proto"; +import "google/longrunning/operations.proto"; import "google/protobuf/empty.proto"; import "google/protobuf/field_mask.proto"; import "google/protobuf/timestamp.proto"; @@ -69,9 +70,9 @@ service JobController { // Starts a job cancellation request. To access the job resource // after cancellation, call - // [regions/{region}/jobs.list](/dataproc/docs/reference/rest/v1/projects.regions.jobs/list) + // [regions/{region}/jobs.list](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs/list) // or - // [regions/{region}/jobs.get](/dataproc/docs/reference/rest/v1/projects.regions.jobs/get). + // [regions/{region}/jobs.get](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs/get). rpc CancelJob(CancelJobRequest) returns (Job) { option (google.api.http) = { post: "/v1/projects/{project_id}/regions/{region}/jobs/{job_id}:cancel" @@ -387,6 +388,71 @@ message PigJob { LoggingConfig logging_config = 7 [(google.api.field_behavior) = OPTIONAL]; } +// A Dataproc job for running +// [Apache SparkR](https://spark.apache.org/docs/latest/sparkr.html) +// applications on YARN. +message SparkRJob { + // Required. The HCFS URI of the main R file to use as the driver. + // Must be a .R file. + string main_r_file_uri = 1 [(google.api.field_behavior) = REQUIRED]; + + // Optional. The arguments to pass to the driver. Do not include arguments, + // such as `--conf`, that can be set as job properties, since a collision may + // occur that causes an incorrect job submission. + repeated string args = 2 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. HCFS URIs of files to be copied to the working directory of + // R drivers and distributed tasks. Useful for naively parallel tasks. + repeated string file_uris = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. HCFS URIs of archives to be extracted in the working directory of + // Spark drivers and tasks. Supported file types: + // .jar, .tar, .tar.gz, .tgz, and .zip. + repeated string archive_uris = 4 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. A mapping of property names to values, used to configure SparkR. + // Properties that conflict with values set by the Dataproc API may be + // overwritten. Can include properties set in + // /etc/spark/conf/spark-defaults.conf and classes in user code. + map properties = 5 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The runtime log config for job execution. + LoggingConfig logging_config = 6 [(google.api.field_behavior) = OPTIONAL]; +} + +// A Dataproc job for running [Presto](https://prestosql.io/) queries +message PrestoJob { + // Required. The sequence of Presto queries to execute, specified as + // either an HCFS file URI or as a list of queries. + oneof queries { + // The HCFS URI of the script that contains SQL queries. + string query_file_uri = 1; + + // A list of queries. + QueryList query_list = 2; + } + + // Optional. Whether to continue executing queries if a query fails. + // The default value is `false`. Setting to `true` can be useful when + // executing independent parallel queries. + bool continue_on_failure = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The format in which query output will be displayed. See the + // Presto documentation for supported output formats + string output_format = 4 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Presto client tags to attach to this query + repeated string client_tags = 5 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. A mapping of property names to values. Used to set Presto + // [session properties](https://prestodb.io/docs/current/sql/set-session.html) + // Equivalent to using the --session flag in the Presto CLI + map properties = 6 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The runtime log config for job execution. + LoggingConfig logging_config = 7 [(google.api.field_behavior) = OPTIONAL]; +} + // Dataproc job config. message JobPlacement { // Required. The name of the cluster where the job will be submitted. @@ -562,23 +628,29 @@ message Job { // Required. The application/framework-specific portion of the job. oneof type_job { - // Job is a Hadoop job. - HadoopJob hadoop_job = 3; + // Optional. Job is a Hadoop job. + HadoopJob hadoop_job = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Job is a Spark job. + SparkJob spark_job = 4 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Job is a PySpark job. + PySparkJob pyspark_job = 5 [(google.api.field_behavior) = OPTIONAL]; - // Job is a Spark job. - SparkJob spark_job = 4; + // Optional. Job is a Hive job. + HiveJob hive_job = 6 [(google.api.field_behavior) = OPTIONAL]; - // Job is a Pyspark job. - PySparkJob pyspark_job = 5; + // Optional. Job is a Pig job. + PigJob pig_job = 7 [(google.api.field_behavior) = OPTIONAL]; - // Job is a Hive job. - HiveJob hive_job = 6; + // Optional. Job is a SparkR job. + SparkRJob spark_r_job = 21 [(google.api.field_behavior) = OPTIONAL]; - // Job is a Pig job. - PigJob pig_job = 7; + // Optional. Job is a SparkSql job. + SparkSqlJob spark_sql_job = 12 [(google.api.field_behavior) = OPTIONAL]; - // Job is a SparkSql job. - SparkSqlJob spark_sql_job = 12; + // Optional. Job is a Presto job. + PrestoJob presto_job = 23 [(google.api.field_behavior) = OPTIONAL]; } // Output only. The job status. Additional application-specific diff --git a/google/cloud/dataproc_v1/proto/jobs_pb2.py b/google/cloud/dataproc_v1/proto/jobs_pb2.py index 68b7392f..b7f6ef45 100644 --- a/google/cloud/dataproc_v1/proto/jobs_pb2.py +++ b/google/cloud/dataproc_v1/proto/jobs_pb2.py @@ -18,6 +18,9 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 from google.api import client_pb2 as google_dot_api_dot_client__pb2 from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.longrunning import ( + operations_pb2 as google_dot_longrunning_dot_operations__pb2, +) from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 @@ -31,12 +34,13 @@ "\n\034com.google.cloud.dataproc.v1B\tJobsProtoP\001Z@google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataproc" ), serialized_pb=_b( - '\n)google/cloud/dataproc_v1/proto/jobs.proto\x12\x18google.cloud.dataproc.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xc1\x02\n\rLoggingConfig\x12W\n\x11\x64river_log_levels\x18\x02 \x03(\x0b\x32<.google.cloud.dataproc.v1.LoggingConfig.DriverLogLevelsEntry\x1a\x65\n\x14\x44riverLogLevelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12<\n\x05value\x18\x02 \x01(\x0e\x32-.google.cloud.dataproc.v1.LoggingConfig.Level:\x02\x38\x01"p\n\x05Level\x12\x15\n\x11LEVEL_UNSPECIFIED\x10\x00\x12\x07\n\x03\x41LL\x10\x01\x12\t\n\x05TRACE\x10\x02\x12\t\n\x05\x44\x45\x42UG\x10\x03\x12\x08\n\x04INFO\x10\x04\x12\x08\n\x04WARN\x10\x05\x12\t\n\x05\x45RROR\x10\x06\x12\t\n\x05\x46\x41TAL\x10\x07\x12\x07\n\x03OFF\x10\x08"\xf1\x02\n\tHadoopJob\x12\x1b\n\x11main_jar_file_uri\x18\x01 \x01(\tH\x00\x12\x14\n\nmain_class\x18\x02 \x01(\tH\x00\x12\x11\n\x04\x61rgs\x18\x03 \x03(\tB\x03\xe0\x41\x01\x12\x1a\n\rjar_file_uris\x18\x04 \x03(\tB\x03\xe0\x41\x01\x12\x16\n\tfile_uris\x18\x05 \x03(\tB\x03\xe0\x41\x01\x12\x19\n\x0c\x61rchive_uris\x18\x06 \x03(\tB\x03\xe0\x41\x01\x12L\n\nproperties\x18\x07 \x03(\x0b\x32\x33.google.cloud.dataproc.v1.HadoopJob.PropertiesEntryB\x03\xe0\x41\x01\x12\x44\n\x0elogging_config\x18\x08 \x01(\x0b\x32\'.google.cloud.dataproc.v1.LoggingConfigB\x03\xe0\x41\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x08\n\x06\x64river"\xef\x02\n\x08SparkJob\x12\x1b\n\x11main_jar_file_uri\x18\x01 \x01(\tH\x00\x12\x14\n\nmain_class\x18\x02 \x01(\tH\x00\x12\x11\n\x04\x61rgs\x18\x03 \x03(\tB\x03\xe0\x41\x01\x12\x1a\n\rjar_file_uris\x18\x04 \x03(\tB\x03\xe0\x41\x01\x12\x16\n\tfile_uris\x18\x05 \x03(\tB\x03\xe0\x41\x01\x12\x19\n\x0c\x61rchive_uris\x18\x06 \x03(\tB\x03\xe0\x41\x01\x12K\n\nproperties\x18\x07 \x03(\x0b\x32\x32.google.cloud.dataproc.v1.SparkJob.PropertiesEntryB\x03\xe0\x41\x01\x12\x44\n\x0elogging_config\x18\x08 \x01(\x0b\x32\'.google.cloud.dataproc.v1.LoggingConfigB\x03\xe0\x41\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x08\n\x06\x64river"\xf8\x02\n\nPySparkJob\x12!\n\x14main_python_file_uri\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x11\n\x04\x61rgs\x18\x02 \x03(\tB\x03\xe0\x41\x01\x12\x1d\n\x10python_file_uris\x18\x03 \x03(\tB\x03\xe0\x41\x01\x12\x1a\n\rjar_file_uris\x18\x04 \x03(\tB\x03\xe0\x41\x01\x12\x16\n\tfile_uris\x18\x05 \x03(\tB\x03\xe0\x41\x01\x12\x19\n\x0c\x61rchive_uris\x18\x06 \x03(\tB\x03\xe0\x41\x01\x12M\n\nproperties\x18\x07 \x03(\x0b\x32\x34.google.cloud.dataproc.v1.PySparkJob.PropertiesEntryB\x03\xe0\x41\x01\x12\x44\n\x0elogging_config\x18\x08 \x01(\x0b\x32\'.google.cloud.dataproc.v1.LoggingConfigB\x03\xe0\x41\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"!\n\tQueryList\x12\x14\n\x07queries\x18\x01 \x03(\tB\x03\xe0\x41\x02"\xb5\x03\n\x07HiveJob\x12\x18\n\x0equery_file_uri\x18\x01 \x01(\tH\x00\x12\x39\n\nquery_list\x18\x02 \x01(\x0b\x32#.google.cloud.dataproc.v1.QueryListH\x00\x12 \n\x13\x63ontinue_on_failure\x18\x03 \x01(\x08\x42\x03\xe0\x41\x01\x12U\n\x10script_variables\x18\x04 \x03(\x0b\x32\x36.google.cloud.dataproc.v1.HiveJob.ScriptVariablesEntryB\x03\xe0\x41\x01\x12J\n\nproperties\x18\x05 \x03(\x0b\x32\x31.google.cloud.dataproc.v1.HiveJob.PropertiesEntryB\x03\xe0\x41\x01\x12\x1a\n\rjar_file_uris\x18\x06 \x03(\tB\x03\xe0\x41\x01\x1a\x36\n\x14ScriptVariablesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07queries"\xe5\x03\n\x0bSparkSqlJob\x12\x18\n\x0equery_file_uri\x18\x01 \x01(\tH\x00\x12\x39\n\nquery_list\x18\x02 \x01(\x0b\x32#.google.cloud.dataproc.v1.QueryListH\x00\x12Y\n\x10script_variables\x18\x03 \x03(\x0b\x32:.google.cloud.dataproc.v1.SparkSqlJob.ScriptVariablesEntryB\x03\xe0\x41\x01\x12N\n\nproperties\x18\x04 \x03(\x0b\x32\x35.google.cloud.dataproc.v1.SparkSqlJob.PropertiesEntryB\x03\xe0\x41\x01\x12\x1a\n\rjar_file_uris\x18\x38 \x03(\tB\x03\xe0\x41\x01\x12\x44\n\x0elogging_config\x18\x06 \x01(\x0b\x32\'.google.cloud.dataproc.v1.LoggingConfigB\x03\xe0\x41\x01\x1a\x36\n\x14ScriptVariablesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07queries"\xf8\x03\n\x06PigJob\x12\x18\n\x0equery_file_uri\x18\x01 \x01(\tH\x00\x12\x39\n\nquery_list\x18\x02 \x01(\x0b\x32#.google.cloud.dataproc.v1.QueryListH\x00\x12 \n\x13\x63ontinue_on_failure\x18\x03 \x01(\x08\x42\x03\xe0\x41\x01\x12T\n\x10script_variables\x18\x04 \x03(\x0b\x32\x35.google.cloud.dataproc.v1.PigJob.ScriptVariablesEntryB\x03\xe0\x41\x01\x12I\n\nproperties\x18\x05 \x03(\x0b\x32\x30.google.cloud.dataproc.v1.PigJob.PropertiesEntryB\x03\xe0\x41\x01\x12\x1a\n\rjar_file_uris\x18\x06 \x03(\tB\x03\xe0\x41\x01\x12\x44\n\x0elogging_config\x18\x07 \x01(\x0b\x32\'.google.cloud.dataproc.v1.LoggingConfigB\x03\xe0\x41\x01\x1a\x36\n\x14ScriptVariablesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07queries"D\n\x0cJobPlacement\x12\x19\n\x0c\x63luster_name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_uuid\x18\x02 \x01(\tB\x03\xe0\x41\x03"\xd9\x03\n\tJobStatus\x12=\n\x05state\x18\x01 \x01(\x0e\x32).google.cloud.dataproc.v1.JobStatus.StateB\x03\xe0\x41\x03\x12\x17\n\x07\x64\x65tails\x18\x02 \x01(\tB\x06\xe0\x41\x03\xe0\x41\x01\x12\x39\n\x10state_start_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x43\n\x08substate\x18\x07 \x01(\x0e\x32,.google.cloud.dataproc.v1.JobStatus.SubstateB\x03\xe0\x41\x03"\xa9\x01\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07PENDING\x10\x01\x12\x0e\n\nSETUP_DONE\x10\x08\x12\x0b\n\x07RUNNING\x10\x02\x12\x12\n\x0e\x43\x41NCEL_PENDING\x10\x03\x12\x12\n\x0e\x43\x41NCEL_STARTED\x10\x07\x12\r\n\tCANCELLED\x10\x04\x12\x08\n\x04\x44ONE\x10\x05\x12\t\n\x05\x45RROR\x10\x06\x12\x13\n\x0f\x41TTEMPT_FAILURE\x10\t"H\n\x08Substate\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\r\n\tSUBMITTED\x10\x01\x12\n\n\x06QUEUED\x10\x02\x12\x10\n\x0cSTALE_STATUS\x10\x03"<\n\x0cJobReference\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06job_id\x18\x02 \x01(\tB\x03\xe0\x41\x01"\xa5\x02\n\x0fYarnApplication\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x43\n\x05state\x18\x02 \x01(\x0e\x32/.google.cloud.dataproc.v1.YarnApplication.StateB\x03\xe0\x41\x02\x12\x15\n\x08progress\x18\x03 \x01(\x02\x42\x03\xe0\x41\x02\x12\x19\n\x0ctracking_url\x18\x04 \x01(\tB\x03\xe0\x41\x01"\x87\x01\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x07\n\x03NEW\x10\x01\x12\x0e\n\nNEW_SAVING\x10\x02\x12\r\n\tSUBMITTED\x10\x03\x12\x0c\n\x08\x41\x43\x43\x45PTED\x10\x04\x12\x0b\n\x07RUNNING\x10\x05\x12\x0c\n\x08\x46INISHED\x10\x06\x12\n\n\x06\x46\x41ILED\x10\x07\x12\n\n\x06KILLED\x10\x08"\xcd\x07\n\x03Job\x12>\n\treference\x18\x01 \x01(\x0b\x32&.google.cloud.dataproc.v1.JobReferenceB\x03\xe0\x41\x01\x12>\n\tplacement\x18\x02 \x01(\x0b\x32&.google.cloud.dataproc.v1.JobPlacementB\x03\xe0\x41\x02\x12\x39\n\nhadoop_job\x18\x03 \x01(\x0b\x32#.google.cloud.dataproc.v1.HadoopJobH\x00\x12\x37\n\tspark_job\x18\x04 \x01(\x0b\x32".google.cloud.dataproc.v1.SparkJobH\x00\x12;\n\x0bpyspark_job\x18\x05 \x01(\x0b\x32$.google.cloud.dataproc.v1.PySparkJobH\x00\x12\x35\n\x08hive_job\x18\x06 \x01(\x0b\x32!.google.cloud.dataproc.v1.HiveJobH\x00\x12\x33\n\x07pig_job\x18\x07 \x01(\x0b\x32 .google.cloud.dataproc.v1.PigJobH\x00\x12>\n\rspark_sql_job\x18\x0c \x01(\x0b\x32%.google.cloud.dataproc.v1.SparkSqlJobH\x00\x12\x38\n\x06status\x18\x08 \x01(\x0b\x32#.google.cloud.dataproc.v1.JobStatusB\x03\xe0\x41\x03\x12@\n\x0estatus_history\x18\r \x03(\x0b\x32#.google.cloud.dataproc.v1.JobStatusB\x03\xe0\x41\x03\x12I\n\x11yarn_applications\x18\t \x03(\x0b\x32).google.cloud.dataproc.v1.YarnApplicationB\x03\xe0\x41\x03\x12\'\n\x1a\x64river_output_resource_uri\x18\x11 \x01(\tB\x03\xe0\x41\x03\x12%\n\x18\x64river_control_files_uri\x18\x0f \x01(\tB\x03\xe0\x41\x03\x12>\n\x06labels\x18\x12 \x03(\x0b\x32).google.cloud.dataproc.v1.Job.LabelsEntryB\x03\xe0\x41\x01\x12@\n\nscheduling\x18\x14 \x01(\x0b\x32\'.google.cloud.dataproc.v1.JobSchedulingB\x03\xe0\x41\x01\x12\x15\n\x08job_uuid\x18\x16 \x01(\tB\x03\xe0\x41\x03\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\n\n\x08type_job"3\n\rJobScheduling\x12"\n\x15max_failures_per_hour\x18\x01 \x01(\x05\x42\x03\xe0\x41\x01"\x8a\x01\n\x10SubmitJobRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12/\n\x03job\x18\x02 \x01(\x0b\x32\x1d.google.cloud.dataproc.v1.JobB\x03\xe0\x41\x02\x12\x17\n\nrequest_id\x18\x04 \x01(\tB\x03\xe0\x41\x01"R\n\rGetJobRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06job_id\x18\x02 \x01(\tB\x03\xe0\x41\x02"\xb3\x02\n\x0fListJobsRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x06 \x01(\tB\x03\xe0\x41\x02\x12\x16\n\tpage_size\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x12\x17\n\npage_token\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12\x19\n\x0c\x63luster_name\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12Y\n\x11job_state_matcher\x18\x05 \x01(\x0e\x32\x39.google.cloud.dataproc.v1.ListJobsRequest.JobStateMatcherB\x03\xe0\x41\x01\x12\x13\n\x06\x66ilter\x18\x07 \x01(\tB\x03\xe0\x41\x01"6\n\x0fJobStateMatcher\x12\x07\n\x03\x41LL\x10\x00\x12\n\n\x06\x41\x43TIVE\x10\x01\x12\x0e\n\nNON_ACTIVE\x10\x02"\xbc\x01\n\x10UpdateJobRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06job_id\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12/\n\x03job\x18\x04 \x01(\x0b\x32\x1d.google.cloud.dataproc.v1.JobB\x03\xe0\x41\x02\x12\x34\n\x0bupdate_mask\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02"b\n\x10ListJobsResponse\x12\x30\n\x04jobs\x18\x01 \x03(\x0b\x32\x1d.google.cloud.dataproc.v1.JobB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x01"U\n\x10\x43\x61ncelJobRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06job_id\x18\x02 \x01(\tB\x03\xe0\x41\x02"U\n\x10\x44\x65leteJobRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06job_id\x18\x02 \x01(\tB\x03\xe0\x41\x02\x32\x9b\t\n\rJobController\x12\xb1\x01\n\tSubmitJob\x12*.google.cloud.dataproc.v1.SubmitJobRequest\x1a\x1d.google.cloud.dataproc.v1.Job"Y\x82\xd3\xe4\x93\x02;"6/v1/projects/{project_id}/regions/{region}/jobs:submit:\x01*\xda\x41\x15project_id,region,job\x12\xad\x01\n\x06GetJob\x12\'.google.cloud.dataproc.v1.GetJobRequest\x1a\x1d.google.cloud.dataproc.v1.Job"[\x82\xd3\xe4\x93\x02:\x12\x38/v1/projects/{project_id}/regions/{region}/jobs/{job_id}\xda\x41\x18project_id,region,job_id\x12\xc9\x01\n\x08ListJobs\x12).google.cloud.dataproc.v1.ListJobsRequest\x1a*.google.cloud.dataproc.v1.ListJobsResponse"f\x82\xd3\xe4\x93\x02\x31\x12//v1/projects/{project_id}/regions/{region}/jobs\xda\x41\x11project_id,region\xda\x41\x18project_id,region,filter\x12\x9d\x01\n\tUpdateJob\x12*.google.cloud.dataproc.v1.UpdateJobRequest\x1a\x1d.google.cloud.dataproc.v1.Job"E\x82\xd3\xe4\x93\x02?28/v1/projects/{project_id}/regions/{region}/jobs/{job_id}:\x03job\x12\xbd\x01\n\tCancelJob\x12*.google.cloud.dataproc.v1.CancelJobRequest\x1a\x1d.google.cloud.dataproc.v1.Job"e\x82\xd3\xe4\x93\x02\x44"?/v1/projects/{project_id}/regions/{region}/jobs/{job_id}:cancel:\x01*\xda\x41\x18project_id,region,job_id\x12\xac\x01\n\tDeleteJob\x12*.google.cloud.dataproc.v1.DeleteJobRequest\x1a\x16.google.protobuf.Empty"[\x82\xd3\xe4\x93\x02:*8/v1/projects/{project_id}/regions/{region}/jobs/{job_id}\xda\x41\x18project_id,region,job_id\x1aK\xca\x41\x17\x64\x61taproc.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformBm\n\x1c\x63om.google.cloud.dataproc.v1B\tJobsProtoP\x01Z@google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataprocb\x06proto3' + '\n)google/cloud/dataproc_v1/proto/jobs.proto\x12\x18google.cloud.dataproc.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a#google/longrunning/operations.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xc1\x02\n\rLoggingConfig\x12W\n\x11\x64river_log_levels\x18\x02 \x03(\x0b\x32<.google.cloud.dataproc.v1.LoggingConfig.DriverLogLevelsEntry\x1a\x65\n\x14\x44riverLogLevelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12<\n\x05value\x18\x02 \x01(\x0e\x32-.google.cloud.dataproc.v1.LoggingConfig.Level:\x02\x38\x01"p\n\x05Level\x12\x15\n\x11LEVEL_UNSPECIFIED\x10\x00\x12\x07\n\x03\x41LL\x10\x01\x12\t\n\x05TRACE\x10\x02\x12\t\n\x05\x44\x45\x42UG\x10\x03\x12\x08\n\x04INFO\x10\x04\x12\x08\n\x04WARN\x10\x05\x12\t\n\x05\x45RROR\x10\x06\x12\t\n\x05\x46\x41TAL\x10\x07\x12\x07\n\x03OFF\x10\x08"\xf1\x02\n\tHadoopJob\x12\x1b\n\x11main_jar_file_uri\x18\x01 \x01(\tH\x00\x12\x14\n\nmain_class\x18\x02 \x01(\tH\x00\x12\x11\n\x04\x61rgs\x18\x03 \x03(\tB\x03\xe0\x41\x01\x12\x1a\n\rjar_file_uris\x18\x04 \x03(\tB\x03\xe0\x41\x01\x12\x16\n\tfile_uris\x18\x05 \x03(\tB\x03\xe0\x41\x01\x12\x19\n\x0c\x61rchive_uris\x18\x06 \x03(\tB\x03\xe0\x41\x01\x12L\n\nproperties\x18\x07 \x03(\x0b\x32\x33.google.cloud.dataproc.v1.HadoopJob.PropertiesEntryB\x03\xe0\x41\x01\x12\x44\n\x0elogging_config\x18\x08 \x01(\x0b\x32\'.google.cloud.dataproc.v1.LoggingConfigB\x03\xe0\x41\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x08\n\x06\x64river"\xef\x02\n\x08SparkJob\x12\x1b\n\x11main_jar_file_uri\x18\x01 \x01(\tH\x00\x12\x14\n\nmain_class\x18\x02 \x01(\tH\x00\x12\x11\n\x04\x61rgs\x18\x03 \x03(\tB\x03\xe0\x41\x01\x12\x1a\n\rjar_file_uris\x18\x04 \x03(\tB\x03\xe0\x41\x01\x12\x16\n\tfile_uris\x18\x05 \x03(\tB\x03\xe0\x41\x01\x12\x19\n\x0c\x61rchive_uris\x18\x06 \x03(\tB\x03\xe0\x41\x01\x12K\n\nproperties\x18\x07 \x03(\x0b\x32\x32.google.cloud.dataproc.v1.SparkJob.PropertiesEntryB\x03\xe0\x41\x01\x12\x44\n\x0elogging_config\x18\x08 \x01(\x0b\x32\'.google.cloud.dataproc.v1.LoggingConfigB\x03\xe0\x41\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x08\n\x06\x64river"\xf8\x02\n\nPySparkJob\x12!\n\x14main_python_file_uri\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x11\n\x04\x61rgs\x18\x02 \x03(\tB\x03\xe0\x41\x01\x12\x1d\n\x10python_file_uris\x18\x03 \x03(\tB\x03\xe0\x41\x01\x12\x1a\n\rjar_file_uris\x18\x04 \x03(\tB\x03\xe0\x41\x01\x12\x16\n\tfile_uris\x18\x05 \x03(\tB\x03\xe0\x41\x01\x12\x19\n\x0c\x61rchive_uris\x18\x06 \x03(\tB\x03\xe0\x41\x01\x12M\n\nproperties\x18\x07 \x03(\x0b\x32\x34.google.cloud.dataproc.v1.PySparkJob.PropertiesEntryB\x03\xe0\x41\x01\x12\x44\n\x0elogging_config\x18\x08 \x01(\x0b\x32\'.google.cloud.dataproc.v1.LoggingConfigB\x03\xe0\x41\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"!\n\tQueryList\x12\x14\n\x07queries\x18\x01 \x03(\tB\x03\xe0\x41\x02"\xb5\x03\n\x07HiveJob\x12\x18\n\x0equery_file_uri\x18\x01 \x01(\tH\x00\x12\x39\n\nquery_list\x18\x02 \x01(\x0b\x32#.google.cloud.dataproc.v1.QueryListH\x00\x12 \n\x13\x63ontinue_on_failure\x18\x03 \x01(\x08\x42\x03\xe0\x41\x01\x12U\n\x10script_variables\x18\x04 \x03(\x0b\x32\x36.google.cloud.dataproc.v1.HiveJob.ScriptVariablesEntryB\x03\xe0\x41\x01\x12J\n\nproperties\x18\x05 \x03(\x0b\x32\x31.google.cloud.dataproc.v1.HiveJob.PropertiesEntryB\x03\xe0\x41\x01\x12\x1a\n\rjar_file_uris\x18\x06 \x03(\tB\x03\xe0\x41\x01\x1a\x36\n\x14ScriptVariablesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07queries"\xe5\x03\n\x0bSparkSqlJob\x12\x18\n\x0equery_file_uri\x18\x01 \x01(\tH\x00\x12\x39\n\nquery_list\x18\x02 \x01(\x0b\x32#.google.cloud.dataproc.v1.QueryListH\x00\x12Y\n\x10script_variables\x18\x03 \x03(\x0b\x32:.google.cloud.dataproc.v1.SparkSqlJob.ScriptVariablesEntryB\x03\xe0\x41\x01\x12N\n\nproperties\x18\x04 \x03(\x0b\x32\x35.google.cloud.dataproc.v1.SparkSqlJob.PropertiesEntryB\x03\xe0\x41\x01\x12\x1a\n\rjar_file_uris\x18\x38 \x03(\tB\x03\xe0\x41\x01\x12\x44\n\x0elogging_config\x18\x06 \x01(\x0b\x32\'.google.cloud.dataproc.v1.LoggingConfigB\x03\xe0\x41\x01\x1a\x36\n\x14ScriptVariablesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07queries"\xf8\x03\n\x06PigJob\x12\x18\n\x0equery_file_uri\x18\x01 \x01(\tH\x00\x12\x39\n\nquery_list\x18\x02 \x01(\x0b\x32#.google.cloud.dataproc.v1.QueryListH\x00\x12 \n\x13\x63ontinue_on_failure\x18\x03 \x01(\x08\x42\x03\xe0\x41\x01\x12T\n\x10script_variables\x18\x04 \x03(\x0b\x32\x35.google.cloud.dataproc.v1.PigJob.ScriptVariablesEntryB\x03\xe0\x41\x01\x12I\n\nproperties\x18\x05 \x03(\x0b\x32\x30.google.cloud.dataproc.v1.PigJob.PropertiesEntryB\x03\xe0\x41\x01\x12\x1a\n\rjar_file_uris\x18\x06 \x03(\tB\x03\xe0\x41\x01\x12\x44\n\x0elogging_config\x18\x07 \x01(\x0b\x32\'.google.cloud.dataproc.v1.LoggingConfigB\x03\xe0\x41\x01\x1a\x36\n\x14ScriptVariablesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07queries"\xb6\x02\n\tSparkRJob\x12\x1c\n\x0fmain_r_file_uri\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x11\n\x04\x61rgs\x18\x02 \x03(\tB\x03\xe0\x41\x01\x12\x16\n\tfile_uris\x18\x03 \x03(\tB\x03\xe0\x41\x01\x12\x19\n\x0c\x61rchive_uris\x18\x04 \x03(\tB\x03\xe0\x41\x01\x12L\n\nproperties\x18\x05 \x03(\x0b\x32\x33.google.cloud.dataproc.v1.SparkRJob.PropertiesEntryB\x03\xe0\x41\x01\x12\x44\n\x0elogging_config\x18\x06 \x01(\x0b\x32\'.google.cloud.dataproc.v1.LoggingConfigB\x03\xe0\x41\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x8a\x03\n\tPrestoJob\x12\x18\n\x0equery_file_uri\x18\x01 \x01(\tH\x00\x12\x39\n\nquery_list\x18\x02 \x01(\x0b\x32#.google.cloud.dataproc.v1.QueryListH\x00\x12 \n\x13\x63ontinue_on_failure\x18\x03 \x01(\x08\x42\x03\xe0\x41\x01\x12\x1a\n\routput_format\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12\x18\n\x0b\x63lient_tags\x18\x05 \x03(\tB\x03\xe0\x41\x01\x12L\n\nproperties\x18\x06 \x03(\x0b\x32\x33.google.cloud.dataproc.v1.PrestoJob.PropertiesEntryB\x03\xe0\x41\x01\x12\x44\n\x0elogging_config\x18\x07 \x01(\x0b\x32\'.google.cloud.dataproc.v1.LoggingConfigB\x03\xe0\x41\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07queries"D\n\x0cJobPlacement\x12\x19\n\x0c\x63luster_name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_uuid\x18\x02 \x01(\tB\x03\xe0\x41\x03"\xd9\x03\n\tJobStatus\x12=\n\x05state\x18\x01 \x01(\x0e\x32).google.cloud.dataproc.v1.JobStatus.StateB\x03\xe0\x41\x03\x12\x17\n\x07\x64\x65tails\x18\x02 \x01(\tB\x06\xe0\x41\x03\xe0\x41\x01\x12\x39\n\x10state_start_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x43\n\x08substate\x18\x07 \x01(\x0e\x32,.google.cloud.dataproc.v1.JobStatus.SubstateB\x03\xe0\x41\x03"\xa9\x01\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07PENDING\x10\x01\x12\x0e\n\nSETUP_DONE\x10\x08\x12\x0b\n\x07RUNNING\x10\x02\x12\x12\n\x0e\x43\x41NCEL_PENDING\x10\x03\x12\x12\n\x0e\x43\x41NCEL_STARTED\x10\x07\x12\r\n\tCANCELLED\x10\x04\x12\x08\n\x04\x44ONE\x10\x05\x12\t\n\x05\x45RROR\x10\x06\x12\x13\n\x0f\x41TTEMPT_FAILURE\x10\t"H\n\x08Substate\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\r\n\tSUBMITTED\x10\x01\x12\n\n\x06QUEUED\x10\x02\x12\x10\n\x0cSTALE_STATUS\x10\x03"<\n\x0cJobReference\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06job_id\x18\x02 \x01(\tB\x03\xe0\x41\x01"\xa5\x02\n\x0fYarnApplication\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x43\n\x05state\x18\x02 \x01(\x0e\x32/.google.cloud.dataproc.v1.YarnApplication.StateB\x03\xe0\x41\x02\x12\x15\n\x08progress\x18\x03 \x01(\x02\x42\x03\xe0\x41\x02\x12\x19\n\x0ctracking_url\x18\x04 \x01(\tB\x03\xe0\x41\x01"\x87\x01\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x07\n\x03NEW\x10\x01\x12\x0e\n\nNEW_SAVING\x10\x02\x12\r\n\tSUBMITTED\x10\x03\x12\x0c\n\x08\x41\x43\x43\x45PTED\x10\x04\x12\x0b\n\x07RUNNING\x10\x05\x12\x0c\n\x08\x46INISHED\x10\x06\x12\n\n\x06\x46\x41ILED\x10\x07\x12\n\n\x06KILLED\x10\x08"\xec\x08\n\x03Job\x12>\n\treference\x18\x01 \x01(\x0b\x32&.google.cloud.dataproc.v1.JobReferenceB\x03\xe0\x41\x01\x12>\n\tplacement\x18\x02 \x01(\x0b\x32&.google.cloud.dataproc.v1.JobPlacementB\x03\xe0\x41\x02\x12>\n\nhadoop_job\x18\x03 \x01(\x0b\x32#.google.cloud.dataproc.v1.HadoopJobB\x03\xe0\x41\x01H\x00\x12<\n\tspark_job\x18\x04 \x01(\x0b\x32".google.cloud.dataproc.v1.SparkJobB\x03\xe0\x41\x01H\x00\x12@\n\x0bpyspark_job\x18\x05 \x01(\x0b\x32$.google.cloud.dataproc.v1.PySparkJobB\x03\xe0\x41\x01H\x00\x12:\n\x08hive_job\x18\x06 \x01(\x0b\x32!.google.cloud.dataproc.v1.HiveJobB\x03\xe0\x41\x01H\x00\x12\x38\n\x07pig_job\x18\x07 \x01(\x0b\x32 .google.cloud.dataproc.v1.PigJobB\x03\xe0\x41\x01H\x00\x12?\n\x0bspark_r_job\x18\x15 \x01(\x0b\x32#.google.cloud.dataproc.v1.SparkRJobB\x03\xe0\x41\x01H\x00\x12\x43\n\rspark_sql_job\x18\x0c \x01(\x0b\x32%.google.cloud.dataproc.v1.SparkSqlJobB\x03\xe0\x41\x01H\x00\x12>\n\npresto_job\x18\x17 \x01(\x0b\x32#.google.cloud.dataproc.v1.PrestoJobB\x03\xe0\x41\x01H\x00\x12\x38\n\x06status\x18\x08 \x01(\x0b\x32#.google.cloud.dataproc.v1.JobStatusB\x03\xe0\x41\x03\x12@\n\x0estatus_history\x18\r \x03(\x0b\x32#.google.cloud.dataproc.v1.JobStatusB\x03\xe0\x41\x03\x12I\n\x11yarn_applications\x18\t \x03(\x0b\x32).google.cloud.dataproc.v1.YarnApplicationB\x03\xe0\x41\x03\x12\'\n\x1a\x64river_output_resource_uri\x18\x11 \x01(\tB\x03\xe0\x41\x03\x12%\n\x18\x64river_control_files_uri\x18\x0f \x01(\tB\x03\xe0\x41\x03\x12>\n\x06labels\x18\x12 \x03(\x0b\x32).google.cloud.dataproc.v1.Job.LabelsEntryB\x03\xe0\x41\x01\x12@\n\nscheduling\x18\x14 \x01(\x0b\x32\'.google.cloud.dataproc.v1.JobSchedulingB\x03\xe0\x41\x01\x12\x15\n\x08job_uuid\x18\x16 \x01(\tB\x03\xe0\x41\x03\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\n\n\x08type_job"3\n\rJobScheduling\x12"\n\x15max_failures_per_hour\x18\x01 \x01(\x05\x42\x03\xe0\x41\x01"\x8a\x01\n\x10SubmitJobRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12/\n\x03job\x18\x02 \x01(\x0b\x32\x1d.google.cloud.dataproc.v1.JobB\x03\xe0\x41\x02\x12\x17\n\nrequest_id\x18\x04 \x01(\tB\x03\xe0\x41\x01"R\n\rGetJobRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06job_id\x18\x02 \x01(\tB\x03\xe0\x41\x02"\xb3\x02\n\x0fListJobsRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x06 \x01(\tB\x03\xe0\x41\x02\x12\x16\n\tpage_size\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x12\x17\n\npage_token\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12\x19\n\x0c\x63luster_name\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12Y\n\x11job_state_matcher\x18\x05 \x01(\x0e\x32\x39.google.cloud.dataproc.v1.ListJobsRequest.JobStateMatcherB\x03\xe0\x41\x01\x12\x13\n\x06\x66ilter\x18\x07 \x01(\tB\x03\xe0\x41\x01"6\n\x0fJobStateMatcher\x12\x07\n\x03\x41LL\x10\x00\x12\n\n\x06\x41\x43TIVE\x10\x01\x12\x0e\n\nNON_ACTIVE\x10\x02"\xbc\x01\n\x10UpdateJobRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06job_id\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12/\n\x03job\x18\x04 \x01(\x0b\x32\x1d.google.cloud.dataproc.v1.JobB\x03\xe0\x41\x02\x12\x34\n\x0bupdate_mask\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02"b\n\x10ListJobsResponse\x12\x30\n\x04jobs\x18\x01 \x03(\x0b\x32\x1d.google.cloud.dataproc.v1.JobB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x01"U\n\x10\x43\x61ncelJobRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06job_id\x18\x02 \x01(\tB\x03\xe0\x41\x02"U\n\x10\x44\x65leteJobRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06job_id\x18\x02 \x01(\tB\x03\xe0\x41\x02\x32\x9b\t\n\rJobController\x12\xb1\x01\n\tSubmitJob\x12*.google.cloud.dataproc.v1.SubmitJobRequest\x1a\x1d.google.cloud.dataproc.v1.Job"Y\x82\xd3\xe4\x93\x02;"6/v1/projects/{project_id}/regions/{region}/jobs:submit:\x01*\xda\x41\x15project_id,region,job\x12\xad\x01\n\x06GetJob\x12\'.google.cloud.dataproc.v1.GetJobRequest\x1a\x1d.google.cloud.dataproc.v1.Job"[\x82\xd3\xe4\x93\x02:\x12\x38/v1/projects/{project_id}/regions/{region}/jobs/{job_id}\xda\x41\x18project_id,region,job_id\x12\xc9\x01\n\x08ListJobs\x12).google.cloud.dataproc.v1.ListJobsRequest\x1a*.google.cloud.dataproc.v1.ListJobsResponse"f\x82\xd3\xe4\x93\x02\x31\x12//v1/projects/{project_id}/regions/{region}/jobs\xda\x41\x11project_id,region\xda\x41\x18project_id,region,filter\x12\x9d\x01\n\tUpdateJob\x12*.google.cloud.dataproc.v1.UpdateJobRequest\x1a\x1d.google.cloud.dataproc.v1.Job"E\x82\xd3\xe4\x93\x02?28/v1/projects/{project_id}/regions/{region}/jobs/{job_id}:\x03job\x12\xbd\x01\n\tCancelJob\x12*.google.cloud.dataproc.v1.CancelJobRequest\x1a\x1d.google.cloud.dataproc.v1.Job"e\x82\xd3\xe4\x93\x02\x44"?/v1/projects/{project_id}/regions/{region}/jobs/{job_id}:cancel:\x01*\xda\x41\x18project_id,region,job_id\x12\xac\x01\n\tDeleteJob\x12*.google.cloud.dataproc.v1.DeleteJobRequest\x1a\x16.google.protobuf.Empty"[\x82\xd3\xe4\x93\x02:*8/v1/projects/{project_id}/regions/{region}/jobs/{job_id}\xda\x41\x18project_id,region,job_id\x1aK\xca\x41\x17\x64\x61taproc.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformBm\n\x1c\x63om.google.cloud.dataproc.v1B\tJobsProtoP\x01Z@google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataprocb\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, google_dot_api_dot_client__pb2.DESCRIPTOR, google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + google_dot_longrunning_dot_operations__pb2.DESCRIPTOR, google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, @@ -84,8 +88,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=465, - serialized_end=577, + serialized_start=502, + serialized_end=614, ) _sym_db.RegisterEnumDescriptor(_LOGGINGCONFIG_LEVEL) @@ -136,8 +140,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=3471, - serialized_end=3640, + serialized_start=4218, + serialized_end=4387, ) _sym_db.RegisterEnumDescriptor(_JOBSTATUS_STATE) @@ -162,8 +166,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=3642, - serialized_end=3714, + serialized_start=4389, + serialized_end=4461, ) _sym_db.RegisterEnumDescriptor(_JOBSTATUS_SUBSTATE) @@ -207,8 +211,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=3937, - serialized_end=4072, + serialized_start=4684, + serialized_end=4819, ) _sym_db.RegisterEnumDescriptor(_YARNAPPLICATION_STATE) @@ -230,8 +234,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=5582, - serialized_end=5636, + serialized_start=6488, + serialized_end=6542, ) _sym_db.RegisterEnumDescriptor(_LISTJOBSREQUEST_JOBSTATEMATCHER) @@ -288,8 +292,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=362, - serialized_end=463, + serialized_start=399, + serialized_end=500, ) _LOGGINGCONFIG = _descriptor.Descriptor( @@ -326,8 +330,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=256, - serialized_end=577, + serialized_start=293, + serialized_end=614, ) @@ -383,8 +387,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=890, - serialized_end=939, + serialized_start=927, + serialized_end=976, ) _HADOOPJOB = _descriptor.Descriptor( @@ -555,8 +559,8 @@ fields=[], ) ], - serialized_start=580, - serialized_end=949, + serialized_start=617, + serialized_end=986, ) @@ -612,8 +616,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=890, - serialized_end=939, + serialized_start=927, + serialized_end=976, ) _SPARKJOB = _descriptor.Descriptor( @@ -784,8 +788,8 @@ fields=[], ) ], - serialized_start=952, - serialized_end=1319, + serialized_start=989, + serialized_end=1356, ) @@ -841,8 +845,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=890, - serialized_end=939, + serialized_start=927, + serialized_end=976, ) _PYSPARKJOB = _descriptor.Descriptor( @@ -1005,8 +1009,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1322, - serialized_end=1698, + serialized_start=1359, + serialized_end=1735, ) @@ -1044,8 +1048,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1700, - serialized_end=1733, + serialized_start=1737, + serialized_end=1770, ) @@ -1101,8 +1105,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2057, - serialized_end=2111, + serialized_start=2094, + serialized_end=2148, ) _HIVEJOB_PROPERTIESENTRY = _descriptor.Descriptor( @@ -1157,8 +1161,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=890, - serialized_end=939, + serialized_start=927, + serialized_end=976, ) _HIVEJOB = _descriptor.Descriptor( @@ -1293,8 +1297,8 @@ fields=[], ) ], - serialized_start=1736, - serialized_end=2173, + serialized_start=1773, + serialized_end=2210, ) @@ -1350,8 +1354,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2057, - serialized_end=2111, + serialized_start=2094, + serialized_end=2148, ) _SPARKSQLJOB_PROPERTIESENTRY = _descriptor.Descriptor( @@ -1406,8 +1410,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=890, - serialized_end=939, + serialized_start=927, + serialized_end=976, ) _SPARKSQLJOB = _descriptor.Descriptor( @@ -1542,8 +1546,8 @@ fields=[], ) ], - serialized_start=2176, - serialized_end=2661, + serialized_start=2213, + serialized_end=2698, ) @@ -1599,8 +1603,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2057, - serialized_end=2111, + serialized_start=2094, + serialized_end=2148, ) _PIGJOB_PROPERTIESENTRY = _descriptor.Descriptor( @@ -1655,8 +1659,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=890, - serialized_end=939, + serialized_start=927, + serialized_end=976, ) _PIGJOB = _descriptor.Descriptor( @@ -1809,8 +1813,404 @@ fields=[], ) ], - serialized_start=2664, - serialized_end=3168, + serialized_start=2701, + serialized_end=3205, +) + + +_SPARKRJOB_PROPERTIESENTRY = _descriptor.Descriptor( + name="PropertiesEntry", + full_name="google.cloud.dataproc.v1.SparkRJob.PropertiesEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.cloud.dataproc.v1.SparkRJob.PropertiesEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.cloud.dataproc.v1.SparkRJob.PropertiesEntry.value", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=_b("8\001"), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=927, + serialized_end=976, +) + +_SPARKRJOB = _descriptor.Descriptor( + name="SparkRJob", + full_name="google.cloud.dataproc.v1.SparkRJob", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="main_r_file_uri", + full_name="google.cloud.dataproc.v1.SparkRJob.main_r_file_uri", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="args", + full_name="google.cloud.dataproc.v1.SparkRJob.args", + index=1, + number=2, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="file_uris", + full_name="google.cloud.dataproc.v1.SparkRJob.file_uris", + index=2, + number=3, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="archive_uris", + full_name="google.cloud.dataproc.v1.SparkRJob.archive_uris", + index=3, + number=4, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="properties", + full_name="google.cloud.dataproc.v1.SparkRJob.properties", + index=4, + number=5, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="logging_config", + full_name="google.cloud.dataproc.v1.SparkRJob.logging_config", + index=5, + number=6, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[_SPARKRJOB_PROPERTIESENTRY], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=3208, + serialized_end=3518, +) + + +_PRESTOJOB_PROPERTIESENTRY = _descriptor.Descriptor( + name="PropertiesEntry", + full_name="google.cloud.dataproc.v1.PrestoJob.PropertiesEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.cloud.dataproc.v1.PrestoJob.PropertiesEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.cloud.dataproc.v1.PrestoJob.PropertiesEntry.value", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=_b("8\001"), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=927, + serialized_end=976, +) + +_PRESTOJOB = _descriptor.Descriptor( + name="PrestoJob", + full_name="google.cloud.dataproc.v1.PrestoJob", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="query_file_uri", + full_name="google.cloud.dataproc.v1.PrestoJob.query_file_uri", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="query_list", + full_name="google.cloud.dataproc.v1.PrestoJob.query_list", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="continue_on_failure", + full_name="google.cloud.dataproc.v1.PrestoJob.continue_on_failure", + index=2, + number=3, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="output_format", + full_name="google.cloud.dataproc.v1.PrestoJob.output_format", + index=3, + number=4, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="client_tags", + full_name="google.cloud.dataproc.v1.PrestoJob.client_tags", + index=4, + number=5, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="properties", + full_name="google.cloud.dataproc.v1.PrestoJob.properties", + index=5, + number=6, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="logging_config", + full_name="google.cloud.dataproc.v1.PrestoJob.logging_config", + index=6, + number=7, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[_PRESTOJOB_PROPERTIESENTRY], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="queries", + full_name="google.cloud.dataproc.v1.PrestoJob.queries", + index=0, + containing_type=None, + fields=[], + ) + ], + serialized_start=3521, + serialized_end=3915, ) @@ -1866,8 +2266,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3170, - serialized_end=3238, + serialized_start=3917, + serialized_end=3985, ) @@ -1959,8 +2359,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3241, - serialized_end=3714, + serialized_start=3988, + serialized_end=4461, ) @@ -2016,8 +2416,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3716, - serialized_end=3776, + serialized_start=4463, + serialized_end=4523, ) @@ -2109,8 +2509,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3779, - serialized_end=4072, + serialized_start=4526, + serialized_end=4819, ) @@ -2166,8 +2566,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4991, - serialized_end=5036, + serialized_start=5897, + serialized_end=5942, ) _JOB = _descriptor.Descriptor( @@ -2228,7 +2628,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2246,7 +2646,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2264,7 +2664,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2282,7 +2682,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2300,13 +2700,31 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="spark_r_job", + full_name="google.cloud.dataproc.v1.Job.spark_r_job", + index=7, + number=21, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( name="spark_sql_job", full_name="google.cloud.dataproc.v1.Job.spark_sql_job", - index=7, + index=8, number=12, type=11, cpp_type=10, @@ -2318,13 +2736,31 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="presto_job", + full_name="google.cloud.dataproc.v1.Job.presto_job", + index=9, + number=23, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( name="status", full_name="google.cloud.dataproc.v1.Job.status", - index=8, + index=10, number=8, type=11, cpp_type=10, @@ -2342,7 +2778,7 @@ _descriptor.FieldDescriptor( name="status_history", full_name="google.cloud.dataproc.v1.Job.status_history", - index=9, + index=11, number=13, type=11, cpp_type=10, @@ -2360,7 +2796,7 @@ _descriptor.FieldDescriptor( name="yarn_applications", full_name="google.cloud.dataproc.v1.Job.yarn_applications", - index=10, + index=12, number=9, type=11, cpp_type=10, @@ -2378,7 +2814,7 @@ _descriptor.FieldDescriptor( name="driver_output_resource_uri", full_name="google.cloud.dataproc.v1.Job.driver_output_resource_uri", - index=11, + index=13, number=17, type=9, cpp_type=9, @@ -2396,7 +2832,7 @@ _descriptor.FieldDescriptor( name="driver_control_files_uri", full_name="google.cloud.dataproc.v1.Job.driver_control_files_uri", - index=12, + index=14, number=15, type=9, cpp_type=9, @@ -2414,7 +2850,7 @@ _descriptor.FieldDescriptor( name="labels", full_name="google.cloud.dataproc.v1.Job.labels", - index=13, + index=15, number=18, type=11, cpp_type=10, @@ -2432,7 +2868,7 @@ _descriptor.FieldDescriptor( name="scheduling", full_name="google.cloud.dataproc.v1.Job.scheduling", - index=14, + index=16, number=20, type=11, cpp_type=10, @@ -2450,7 +2886,7 @@ _descriptor.FieldDescriptor( name="job_uuid", full_name="google.cloud.dataproc.v1.Job.job_uuid", - index=15, + index=17, number=22, type=9, cpp_type=9, @@ -2482,8 +2918,8 @@ fields=[], ) ], - serialized_start=4075, - serialized_end=5048, + serialized_start=4822, + serialized_end=5954, ) @@ -2521,8 +2957,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5050, - serialized_end=5101, + serialized_start=5956, + serialized_end=6007, ) @@ -2614,8 +3050,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5104, - serialized_end=5242, + serialized_start=6010, + serialized_end=6148, ) @@ -2689,8 +3125,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5244, - serialized_end=5326, + serialized_start=6150, + serialized_end=6232, ) @@ -2836,8 +3272,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5329, - serialized_end=5636, + serialized_start=6235, + serialized_end=6542, ) @@ -2947,8 +3383,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5639, - serialized_end=5827, + serialized_start=6545, + serialized_end=6733, ) @@ -3004,8 +3440,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5829, - serialized_end=5927, + serialized_start=6735, + serialized_end=6833, ) @@ -3079,8 +3515,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5929, - serialized_end=6014, + serialized_start=6835, + serialized_end=6920, ) @@ -3154,8 +3590,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=6016, - serialized_end=6101, + serialized_start=6922, + serialized_end=7007, ) _LOGGINGCONFIG_DRIVERLOGLEVELSENTRY.fields_by_name[ @@ -3248,6 +3684,25 @@ _PIGJOB.fields_by_name["query_list"].containing_oneof = _PIGJOB.oneofs_by_name[ "queries" ] +_SPARKRJOB_PROPERTIESENTRY.containing_type = _SPARKRJOB +_SPARKRJOB.fields_by_name["properties"].message_type = _SPARKRJOB_PROPERTIESENTRY +_SPARKRJOB.fields_by_name["logging_config"].message_type = _LOGGINGCONFIG +_PRESTOJOB_PROPERTIESENTRY.containing_type = _PRESTOJOB +_PRESTOJOB.fields_by_name["query_list"].message_type = _QUERYLIST +_PRESTOJOB.fields_by_name["properties"].message_type = _PRESTOJOB_PROPERTIESENTRY +_PRESTOJOB.fields_by_name["logging_config"].message_type = _LOGGINGCONFIG +_PRESTOJOB.oneofs_by_name["queries"].fields.append( + _PRESTOJOB.fields_by_name["query_file_uri"] +) +_PRESTOJOB.fields_by_name[ + "query_file_uri" +].containing_oneof = _PRESTOJOB.oneofs_by_name["queries"] +_PRESTOJOB.oneofs_by_name["queries"].fields.append( + _PRESTOJOB.fields_by_name["query_list"] +) +_PRESTOJOB.fields_by_name["query_list"].containing_oneof = _PRESTOJOB.oneofs_by_name[ + "queries" +] _JOBSTATUS.fields_by_name["state"].enum_type = _JOBSTATUS_STATE _JOBSTATUS.fields_by_name[ "state_start_time" @@ -3265,7 +3720,9 @@ _JOB.fields_by_name["pyspark_job"].message_type = _PYSPARKJOB _JOB.fields_by_name["hive_job"].message_type = _HIVEJOB _JOB.fields_by_name["pig_job"].message_type = _PIGJOB +_JOB.fields_by_name["spark_r_job"].message_type = _SPARKRJOB _JOB.fields_by_name["spark_sql_job"].message_type = _SPARKSQLJOB +_JOB.fields_by_name["presto_job"].message_type = _PRESTOJOB _JOB.fields_by_name["status"].message_type = _JOBSTATUS _JOB.fields_by_name["status_history"].message_type = _JOBSTATUS _JOB.fields_by_name["yarn_applications"].message_type = _YARNAPPLICATION @@ -3281,8 +3738,12 @@ _JOB.fields_by_name["hive_job"].containing_oneof = _JOB.oneofs_by_name["type_job"] _JOB.oneofs_by_name["type_job"].fields.append(_JOB.fields_by_name["pig_job"]) _JOB.fields_by_name["pig_job"].containing_oneof = _JOB.oneofs_by_name["type_job"] +_JOB.oneofs_by_name["type_job"].fields.append(_JOB.fields_by_name["spark_r_job"]) +_JOB.fields_by_name["spark_r_job"].containing_oneof = _JOB.oneofs_by_name["type_job"] _JOB.oneofs_by_name["type_job"].fields.append(_JOB.fields_by_name["spark_sql_job"]) _JOB.fields_by_name["spark_sql_job"].containing_oneof = _JOB.oneofs_by_name["type_job"] +_JOB.oneofs_by_name["type_job"].fields.append(_JOB.fields_by_name["presto_job"]) +_JOB.fields_by_name["presto_job"].containing_oneof = _JOB.oneofs_by_name["type_job"] _SUBMITJOBREQUEST.fields_by_name["job"].message_type = _JOB _LISTJOBSREQUEST.fields_by_name[ "job_state_matcher" @@ -3301,6 +3762,8 @@ DESCRIPTOR.message_types_by_name["HiveJob"] = _HIVEJOB DESCRIPTOR.message_types_by_name["SparkSqlJob"] = _SPARKSQLJOB DESCRIPTOR.message_types_by_name["PigJob"] = _PIGJOB +DESCRIPTOR.message_types_by_name["SparkRJob"] = _SPARKRJOB +DESCRIPTOR.message_types_by_name["PrestoJob"] = _PRESTOJOB DESCRIPTOR.message_types_by_name["JobPlacement"] = _JOBPLACEMENT DESCRIPTOR.message_types_by_name["JobStatus"] = _JOBSTATUS DESCRIPTOR.message_types_by_name["JobReference"] = _JOBREFERENCE @@ -3739,6 +4202,108 @@ _sym_db.RegisterMessage(PigJob.ScriptVariablesEntry) _sym_db.RegisterMessage(PigJob.PropertiesEntry) +SparkRJob = _reflection.GeneratedProtocolMessageType( + "SparkRJob", + (_message.Message,), + dict( + PropertiesEntry=_reflection.GeneratedProtocolMessageType( + "PropertiesEntry", + (_message.Message,), + dict( + DESCRIPTOR=_SPARKRJOB_PROPERTIESENTRY, + __module__="google.cloud.dataproc_v1.proto.jobs_pb2" + # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.SparkRJob.PropertiesEntry) + ), + ), + DESCRIPTOR=_SPARKRJOB, + __module__="google.cloud.dataproc_v1.proto.jobs_pb2", + __doc__="""A Dataproc job for running `Apache + SparkR `__ + applications on YARN. + + + Attributes: + main_r_file_uri: + Required. The HCFS URI of the main R file to use as the + driver. Must be a .R file. + args: + Optional. The arguments to pass to the driver. Do not include + arguments, such as ``--conf``, that can be set as job + properties, since a collision may occur that causes an + incorrect job submission. + file_uris: + Optional. HCFS URIs of files to be copied to the working + directory of R drivers and distributed tasks. Useful for + naively parallel tasks. + archive_uris: + Optional. HCFS URIs of archives to be extracted in the working + directory of Spark drivers and tasks. Supported file types: + .jar, .tar, .tar.gz, .tgz, and .zip. + properties: + Optional. A mapping of property names to values, used to + configure SparkR. Properties that conflict with values set by + the Dataproc API may be overwritten. Can include properties + set in /etc/spark/conf/spark-defaults.conf and classes in user + code. + logging_config: + Optional. The runtime log config for job execution. + """, + # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.SparkRJob) + ), +) +_sym_db.RegisterMessage(SparkRJob) +_sym_db.RegisterMessage(SparkRJob.PropertiesEntry) + +PrestoJob = _reflection.GeneratedProtocolMessageType( + "PrestoJob", + (_message.Message,), + dict( + PropertiesEntry=_reflection.GeneratedProtocolMessageType( + "PropertiesEntry", + (_message.Message,), + dict( + DESCRIPTOR=_PRESTOJOB_PROPERTIESENTRY, + __module__="google.cloud.dataproc_v1.proto.jobs_pb2" + # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.PrestoJob.PropertiesEntry) + ), + ), + DESCRIPTOR=_PRESTOJOB, + __module__="google.cloud.dataproc_v1.proto.jobs_pb2", + __doc__="""A Dataproc job for running + `Presto `__ queries + + + Attributes: + queries: + Required. The sequence of Presto queries to execute, specified + as either an HCFS file URI or as a list of queries. + query_file_uri: + The HCFS URI of the script that contains SQL queries. + query_list: + A list of queries. + continue_on_failure: + Optional. Whether to continue executing queries if a query + fails. The default value is ``false``. Setting to ``true`` can + be useful when executing independent parallel queries. + output_format: + Optional. The format in which query output will be displayed. + See the Presto documentation for supported output formats + client_tags: + Optional. Presto client tags to attach to this query + properties: + Optional. A mapping of property names to values. Used to set + Presto `session properties + `__ + Equivalent to using the --session flag in the Presto CLI + logging_config: + Optional. The runtime log config for job execution. + """, + # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.PrestoJob) + ), +) +_sym_db.RegisterMessage(PrestoJob) +_sym_db.RegisterMessage(PrestoJob.PropertiesEntry) + JobPlacement = _reflection.GeneratedProtocolMessageType( "JobPlacement", (_message.Message,), @@ -3876,17 +4441,21 @@ Required. The application/framework-specific portion of the job. hadoop_job: - Job is a Hadoop job. + Optional. Job is a Hadoop job. spark_job: - Job is a Spark job. + Optional. Job is a Spark job. pyspark_job: - Job is a Pyspark job. + Optional. Job is a PySpark job. hive_job: - Job is a Hive job. + Optional. Job is a Hive job. pig_job: - Job is a Pig job. + Optional. Job is a Pig job. + spark_r_job: + Optional. Job is a SparkR job. spark_sql_job: - Job is a SparkSql job. + Optional. Job is a SparkSql job. + presto_job: + Optional. Job is a Presto job. status: Output only. The job status. Additional application-specific status information may be contained in the type\_job and @@ -4197,6 +4766,19 @@ _PIGJOB.fields_by_name["properties"]._options = None _PIGJOB.fields_by_name["jar_file_uris"]._options = None _PIGJOB.fields_by_name["logging_config"]._options = None +_SPARKRJOB_PROPERTIESENTRY._options = None +_SPARKRJOB.fields_by_name["main_r_file_uri"]._options = None +_SPARKRJOB.fields_by_name["args"]._options = None +_SPARKRJOB.fields_by_name["file_uris"]._options = None +_SPARKRJOB.fields_by_name["archive_uris"]._options = None +_SPARKRJOB.fields_by_name["properties"]._options = None +_SPARKRJOB.fields_by_name["logging_config"]._options = None +_PRESTOJOB_PROPERTIESENTRY._options = None +_PRESTOJOB.fields_by_name["continue_on_failure"]._options = None +_PRESTOJOB.fields_by_name["output_format"]._options = None +_PRESTOJOB.fields_by_name["client_tags"]._options = None +_PRESTOJOB.fields_by_name["properties"]._options = None +_PRESTOJOB.fields_by_name["logging_config"]._options = None _JOBPLACEMENT.fields_by_name["cluster_name"]._options = None _JOBPLACEMENT.fields_by_name["cluster_uuid"]._options = None _JOBSTATUS.fields_by_name["state"]._options = None @@ -4212,6 +4794,14 @@ _JOB_LABELSENTRY._options = None _JOB.fields_by_name["reference"]._options = None _JOB.fields_by_name["placement"]._options = None +_JOB.fields_by_name["hadoop_job"]._options = None +_JOB.fields_by_name["spark_job"]._options = None +_JOB.fields_by_name["pyspark_job"]._options = None +_JOB.fields_by_name["hive_job"]._options = None +_JOB.fields_by_name["pig_job"]._options = None +_JOB.fields_by_name["spark_r_job"]._options = None +_JOB.fields_by_name["spark_sql_job"]._options = None +_JOB.fields_by_name["presto_job"]._options = None _JOB.fields_by_name["status"]._options = None _JOB.fields_by_name["status_history"]._options = None _JOB.fields_by_name["yarn_applications"]._options = None @@ -4257,8 +4847,8 @@ serialized_options=_b( "\312A\027dataproc.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" ), - serialized_start=6104, - serialized_end=7283, + serialized_start=7010, + serialized_end=8189, methods=[ _descriptor.MethodDescriptor( name="SubmitJob", diff --git a/google/cloud/dataproc_v1/proto/jobs_pb2_grpc.py b/google/cloud/dataproc_v1/proto/jobs_pb2_grpc.py index d2706382..f10f0252 100644 --- a/google/cloud/dataproc_v1/proto/jobs_pb2_grpc.py +++ b/google/cloud/dataproc_v1/proto/jobs_pb2_grpc.py @@ -84,9 +84,9 @@ def UpdateJob(self, request, context): def CancelJob(self, request, context): """Starts a job cancellation request. To access the job resource after cancellation, call - [regions/{region}/jobs.list](/dataproc/docs/reference/rest/v1/projects.regions.jobs/list) + [regions/{region}/jobs.list](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs/list) or - [regions/{region}/jobs.get](/dataproc/docs/reference/rest/v1/projects.regions.jobs/get). + [regions/{region}/jobs.get](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs/get). """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") diff --git a/google/cloud/dataproc_v1/proto/operations.proto b/google/cloud/dataproc_v1/proto/operations.proto index 4af2a5f8..724d2a89 100644 --- a/google/cloud/dataproc_v1/proto/operations.proto +++ b/google/cloud/dataproc_v1/proto/operations.proto @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC. +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,7 +11,6 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. -// syntax = "proto3"; diff --git a/google/cloud/dataproc_v1/proto/shared.proto b/google/cloud/dataproc_v1/proto/shared.proto index 74bd56a8..c6ff8f28 100644 --- a/google/cloud/dataproc_v1/proto/shared.proto +++ b/google/cloud/dataproc_v1/proto/shared.proto @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC. +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,7 +11,6 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. -// syntax = "proto3"; diff --git a/google/cloud/dataproc_v1/proto/workflow_templates.proto b/google/cloud/dataproc_v1/proto/workflow_templates.proto index 30b5ced4..2db55798 100644 --- a/google/cloud/dataproc_v1/proto/workflow_templates.proto +++ b/google/cloud/dataproc_v1/proto/workflow_templates.proto @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC. +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,7 +11,6 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. -// syntax = "proto3"; @@ -78,9 +77,9 @@ service WorkflowTemplateService { // clusters to be deleted. // // The [Operation.metadata][google.longrunning.Operation.metadata] will be - // [WorkflowMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#workflowmetadata). + // [WorkflowMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#workflowmetadata). // Also see [Using - // WorkflowMetadata](/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata). + // WorkflowMetadata](https://cloud.google.com/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata). // // On successful completion, // [Operation.response][google.longrunning.Operation.response] will be @@ -119,9 +118,9 @@ service WorkflowTemplateService { // clusters to be deleted. // // The [Operation.metadata][google.longrunning.Operation.metadata] will be - // [WorkflowMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#workflowmetadata). + // [WorkflowMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#workflowmetadata). // Also see [Using - // WorkflowMetadata](/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata). + // WorkflowMetadata](https://cloud.google.com/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata). // // On successful completion, // [Operation.response][google.longrunning.Operation.response] will be @@ -320,22 +319,16 @@ message OrderedJob { // Required. The job definition. oneof job_type { - // Job is a Hadoop job. HadoopJob hadoop_job = 2; - // Job is a Spark job. SparkJob spark_job = 3; - // Job is a Pyspark job. PySparkJob pyspark_job = 4; - // Job is a Hive job. HiveJob hive_job = 5; - // Job is a Pig job. PigJob pig_job = 6; - // Job is a SparkSql job. SparkSqlJob spark_sql_job = 7; } @@ -708,9 +701,7 @@ message UpdateWorkflowTemplateRequest { // Required. The updated workflow template. // // The `template.version` field must match the current version. - WorkflowTemplate template = 1 [ - (google.api.field_behavior) = REQUIRED - ]; + WorkflowTemplate template = 1 [(google.api.field_behavior) = REQUIRED]; } // A request to list workflow templates in a project. diff --git a/google/cloud/dataproc_v1/proto/workflow_templates_pb2.py b/google/cloud/dataproc_v1/proto/workflow_templates_pb2.py index 0c3125b1..10d4ade0 100644 --- a/google/cloud/dataproc_v1/proto/workflow_templates_pb2.py +++ b/google/cloud/dataproc_v1/proto/workflow_templates_pb2.py @@ -2641,18 +2641,6 @@ hyphen. Must consist of between 3 and 50 characters. job_type: Required. The job definition. - hadoop_job: - Job is a Hadoop job. - spark_job: - Job is a Spark job. - pyspark_job: - Job is a Pyspark job. - hive_job: - Job is a Hive job. - pig_job: - Job is a Pig job. - spark_sql_job: - Job is a SparkSql job. labels: Optional. The labels to associate with this job. Label keys must be between 1 and 63 characters long. Label values must be between diff --git a/google/cloud/dataproc_v1/proto/workflow_templates_pb2_grpc.py b/google/cloud/dataproc_v1/proto/workflow_templates_pb2_grpc.py index 3d9079ac..f766e9ea 100644 --- a/google/cloud/dataproc_v1/proto/workflow_templates_pb2_grpc.py +++ b/google/cloud/dataproc_v1/proto/workflow_templates_pb2_grpc.py @@ -94,9 +94,9 @@ def InstantiateWorkflowTemplate(self, request, context): clusters to be deleted. The [Operation.metadata][google.longrunning.Operation.metadata] will be - [WorkflowMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#workflowmetadata). + [WorkflowMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#workflowmetadata). Also see [Using - WorkflowMetadata](/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata). + WorkflowMetadata](https://cloud.google.com/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata). On successful completion, [Operation.response][google.longrunning.Operation.response] will be @@ -124,9 +124,9 @@ def InstantiateInlineWorkflowTemplate(self, request, context): clusters to be deleted. The [Operation.metadata][google.longrunning.Operation.metadata] will be - [WorkflowMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#workflowmetadata). + [WorkflowMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#workflowmetadata). Also see [Using - WorkflowMetadata](/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata). + WorkflowMetadata](https://cloud.google.com/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata). On successful completion, [Operation.response][google.longrunning.Operation.response] will be diff --git a/google/cloud/dataproc_v1beta2/gapic/autoscaling_policy_service_client.py b/google/cloud/dataproc_v1beta2/gapic/autoscaling_policy_service_client.py index 9f7b4695..e2d19713 100644 --- a/google/cloud/dataproc_v1beta2/gapic/autoscaling_policy_service_client.py +++ b/google/cloud/dataproc_v1beta2/gapic/autoscaling_policy_service_client.py @@ -47,7 +47,7 @@ class AutoscalingPolicyServiceClient(object): """ The API interface for managing autoscaling policies in the - Google Cloud Dataproc API. + Cloud Dataproc API. """ SERVICE_ADDRESS = "dataproc.googleapis.com:443" diff --git a/google/cloud/dataproc_v1beta2/gapic/cluster_controller_client.py b/google/cloud/dataproc_v1beta2/gapic/cluster_controller_client.py index e64fc0d7..267dee2a 100644 --- a/google/cloud/dataproc_v1beta2/gapic/cluster_controller_client.py +++ b/google/cloud/dataproc_v1beta2/gapic/cluster_controller_client.py @@ -239,7 +239,7 @@ def create_cluster( Args: project_id (str): Required. The ID of the Google Cloud Platform project that the cluster belongs to. - region (str): Required. The Cloud Dataproc region in which to handle the request. + region (str): Required. The Dataproc region in which to handle the request. cluster (Union[dict, ~google.cloud.dataproc_v1beta2.types.Cluster]): Required. The cluster to create. If a dict is provided, it must be of the same form as the protobuf @@ -350,7 +350,7 @@ def update_cluster( Args: project_id (str): Required. The ID of the Google Cloud Platform project the cluster belongs to. - region (str): Required. The Cloud Dataproc region in which to handle the request. + region (str): Required. The Dataproc region in which to handle the request. cluster_name (str): Required. The cluster name. cluster (Union[dict, ~google.cloud.dataproc_v1beta2.types.Cluster]): Required. The changes to the cluster. @@ -428,10 +428,12 @@ def update_cluster( message :class:`~google.cloud.dataproc_v1beta2.types.FieldMask` graceful_decommission_timeout (Union[dict, ~google.cloud.dataproc_v1beta2.types.Duration]): Optional. Timeout for graceful YARN decomissioning. Graceful decommissioning allows removing nodes from the cluster without - interrupting jobs in progress. Timeout specifies how long to wait for jobs - in progress to finish before forcefully removing nodes (and potentially - interrupting jobs). Default timeout is 0 (for forceful decommission), and - the maximum allowed timeout is 1 day. + interrupting jobs in progress. Timeout specifies how long to wait for + jobs in progress to finish before forcefully removing nodes (and + potentially interrupting jobs). Default timeout is 0 (for forceful + decommission), and the maximum allowed timeout is 1 day (see JSON + representation of + `Duration `__). Only supported on Dataproc image versions 1.2 and higher. @@ -541,7 +543,7 @@ def delete_cluster( Args: project_id (str): Required. The ID of the Google Cloud Platform project that the cluster belongs to. - region (str): Required. The Cloud Dataproc region in which to handle the request. + region (str): Required. The Dataproc region in which to handle the request. cluster_name (str): Required. The cluster name. cluster_uuid (str): Optional. Specifying the ``cluster_uuid`` means the RPC should fail (with error NOT\_FOUND) if cluster with specified UUID does not exist. @@ -634,7 +636,7 @@ def get_cluster( Args: project_id (str): Required. The ID of the Google Cloud Platform project that the cluster belongs to. - region (str): Required. The Cloud Dataproc region in which to handle the request. + region (str): Required. The Dataproc region in which to handle the request. cluster_name (str): Required. The cluster name. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will @@ -714,7 +716,7 @@ def list_clusters( Args: project_id (str): Required. The ID of the Google Cloud Platform project that the cluster belongs to. - region (str): Required. The Cloud Dataproc region in which to handle the request. + region (str): Required. The Dataproc region in which to handle the request. filter_ (str): Optional. A filter constraining the clusters to list. Filters are case-sensitive and have the following syntax: @@ -805,7 +807,7 @@ def diagnose_cluster( will be `ClusterOperationMetadata `__. After the operation completes, ``Operation.response`` contains - `Empty `__. + ``Empty``. Example: >>> from google.cloud import dataproc_v1beta2 @@ -835,7 +837,7 @@ def diagnose_cluster( Args: project_id (str): Required. The ID of the Google Cloud Platform project that the cluster belongs to. - region (str): Required. The Cloud Dataproc region in which to handle the request. + region (str): Required. The Dataproc region in which to handle the request. cluster_name (str): Required. The cluster name. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will diff --git a/google/cloud/dataproc_v1beta2/gapic/enums.py b/google/cloud/dataproc_v1beta2/gapic/enums.py index 8c3b0980..d29e1992 100644 --- a/google/cloud/dataproc_v1beta2/gapic/enums.py +++ b/google/cloud/dataproc_v1beta2/gapic/enums.py @@ -97,7 +97,7 @@ class Substate(enum.IntEnum): Applies to RUNNING state. STALE_STATUS (int): The agent-reported status is out of date (may occur if - Cloud Dataproc loses communication with Agent). + Dataproc loses communication with Agent). Applies to RUNNING state. """ @@ -156,7 +156,7 @@ class Substate(enum.IntEnum): Applies to RUNNING state. STALE_STATUS (int): The agent-reported status is out of date, which may be caused by a - loss of communication between the agent and Cloud Dataproc. If the + loss of communication between the agent and Dataproc. If the agent does not send a timely update, the job will fail. Applies to RUNNING state. diff --git a/google/cloud/dataproc_v1beta2/gapic/job_controller_client.py b/google/cloud/dataproc_v1beta2/gapic/job_controller_client.py index 21b6ca49..65adbc7e 100644 --- a/google/cloud/dataproc_v1beta2/gapic/job_controller_client.py +++ b/google/cloud/dataproc_v1beta2/gapic/job_controller_client.py @@ -223,7 +223,7 @@ def submit_job( Args: project_id (str): Required. The ID of the Google Cloud Platform project that the job belongs to. - region (str): Required. The Cloud Dataproc region in which to handle the request. + region (str): Required. The Dataproc region in which to handle the request. job (Union[dict, ~google.cloud.dataproc_v1beta2.types.Job]): Required. The job resource. If a dict is provided, it must be of the same form as the protobuf @@ -306,7 +306,7 @@ def get_job( Args: project_id (str): Required. The ID of the Google Cloud Platform project that the job belongs to. - region (str): Required. The Cloud Dataproc region in which to handle the request. + region (str): Required. The Dataproc region in which to handle the request. job_id (str): Required. The job ID. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will @@ -388,7 +388,7 @@ def list_jobs( Args: project_id (str): Required. The ID of the Google Cloud Platform project that the job belongs to. - region (str): Required. The Cloud Dataproc region in which to handle the request. + region (str): Required. The Dataproc region in which to handle the request. page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- resource, this parameter does not affect the return value. If page @@ -509,7 +509,7 @@ def update_job( Args: project_id (str): Required. The ID of the Google Cloud Platform project that the job belongs to. - region (str): Required. The Cloud Dataproc region in which to handle the request. + region (str): Required. The Dataproc region in which to handle the request. job_id (str): Required. The job ID. job (Union[dict, ~google.cloud.dataproc_v1beta2.types.Job]): Required. The changes to the job. @@ -599,7 +599,7 @@ def cancel_job( Args: project_id (str): Required. The ID of the Google Cloud Platform project that the job belongs to. - region (str): Required. The Cloud Dataproc region in which to handle the request. + region (str): Required. The Dataproc region in which to handle the request. job_id (str): Required. The job ID. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will @@ -670,7 +670,7 @@ def delete_job( Args: project_id (str): Required. The ID of the Google Cloud Platform project that the job belongs to. - region (str): Required. The Cloud Dataproc region in which to handle the request. + region (str): Required. The Dataproc region in which to handle the request. job_id (str): Required. The job ID. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will diff --git a/google/cloud/dataproc_v1beta2/gapic/transports/cluster_controller_grpc_transport.py b/google/cloud/dataproc_v1beta2/gapic/transports/cluster_controller_grpc_transport.py index 767268e5..25cd8277 100644 --- a/google/cloud/dataproc_v1beta2/gapic/transports/cluster_controller_grpc_transport.py +++ b/google/cloud/dataproc_v1beta2/gapic/transports/cluster_controller_grpc_transport.py @@ -194,7 +194,7 @@ def diagnose_cluster(self): will be `ClusterOperationMetadata `__. After the operation completes, ``Operation.response`` contains - `Empty `__. + ``Empty``. Returns: Callable: A callable which accepts the appropriate diff --git a/google/cloud/dataproc_v1beta2/gapic/workflow_template_service_client.py b/google/cloud/dataproc_v1beta2/gapic/workflow_template_service_client.py index 5319e2f1..c39f0267 100644 --- a/google/cloud/dataproc_v1beta2/gapic/workflow_template_service_client.py +++ b/google/cloud/dataproc_v1beta2/gapic/workflow_template_service_client.py @@ -59,7 +59,7 @@ class WorkflowTemplateServiceClient(object): """ The API interface for managing Workflow Templates in the - Cloud Dataproc API. + Dataproc API. """ SERVICE_ADDRESS = "dataproc.googleapis.com:443" diff --git a/google/cloud/dataproc_v1beta2/proto/autoscaling_policies.proto b/google/cloud/dataproc_v1beta2/proto/autoscaling_policies.proto index 36d507c8..e5d16fd9 100644 --- a/google/cloud/dataproc_v1beta2/proto/autoscaling_policies.proto +++ b/google/cloud/dataproc_v1beta2/proto/autoscaling_policies.proto @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC. +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,7 +11,6 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. -// syntax = "proto3"; @@ -30,7 +29,7 @@ option java_outer_classname = "AutoscalingPoliciesProto"; option java_package = "com.google.cloud.dataproc.v1beta2"; // The API interface for managing autoscaling policies in the -// Google Cloud Dataproc API. +// Cloud Dataproc API. service AutoscalingPolicyService { option (google.api.default_host) = "dataproc.googleapis.com"; option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; diff --git a/google/cloud/dataproc_v1beta2/proto/autoscaling_policies_pb2.py b/google/cloud/dataproc_v1beta2/proto/autoscaling_policies_pb2.py index e10b0b75..7c3be028 100644 --- a/google/cloud/dataproc_v1beta2/proto/autoscaling_policies_pb2.py +++ b/google/cloud/dataproc_v1beta2/proto/autoscaling_policies_pb2.py @@ -792,7 +792,8 @@ dict( DESCRIPTOR=_AUTOSCALINGPOLICY, __module__="google.cloud.dataproc_v1beta2.proto.autoscaling_policies_pb2", - __doc__="""Describes an autoscaling policy for Dataproc cluster autoscaler. + __doc__="""Describes an autoscaling policy for Dataproc cluster + autoscaler. Attributes: diff --git a/google/cloud/dataproc_v1beta2/proto/autoscaling_policies_pb2_grpc.py b/google/cloud/dataproc_v1beta2/proto/autoscaling_policies_pb2_grpc.py index 1e6910a5..0163633a 100644 --- a/google/cloud/dataproc_v1beta2/proto/autoscaling_policies_pb2_grpc.py +++ b/google/cloud/dataproc_v1beta2/proto/autoscaling_policies_pb2_grpc.py @@ -9,7 +9,7 @@ class AutoscalingPolicyServiceStub(object): """The API interface for managing autoscaling policies in the - Google Cloud Dataproc API. + Cloud Dataproc API. """ def __init__(self, channel): @@ -47,7 +47,7 @@ def __init__(self, channel): class AutoscalingPolicyServiceServicer(object): """The API interface for managing autoscaling policies in the - Google Cloud Dataproc API. + Cloud Dataproc API. """ def CreateAutoscalingPolicy(self, request, context): diff --git a/google/cloud/dataproc_v1beta2/proto/clusters.proto b/google/cloud/dataproc_v1beta2/proto/clusters.proto index 4b2ee649..2e9e648c 100644 --- a/google/cloud/dataproc_v1beta2/proto/clusters.proto +++ b/google/cloud/dataproc_v1beta2/proto/clusters.proto @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC. +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,7 +11,6 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. -// syntax = "proto3"; @@ -20,7 +19,7 @@ package google.cloud.dataproc.v1beta2; import "google/api/annotations.proto"; import "google/api/client.proto"; import "google/api/field_behavior.proto"; -import "google/cloud/dataproc/v1beta2/operations.proto"; +import "google/api/resource.proto"; import "google/cloud/dataproc/v1beta2/shared.proto"; import "google/longrunning/operations.proto"; import "google/protobuf/duration.proto"; @@ -40,7 +39,7 @@ service ClusterController { // Creates a cluster in a project. The returned // [Operation.metadata][google.longrunning.Operation.metadata] will be - // [ClusterOperationMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata). + // [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata). rpc CreateCluster(CreateClusterRequest) returns (google.longrunning.Operation) { option (google.api.http) = { post: "/v1beta2/projects/{project_id}/regions/{region}/clusters" @@ -55,7 +54,7 @@ service ClusterController { // Updates a cluster in a project. The returned // [Operation.metadata][google.longrunning.Operation.metadata] will be - // [ClusterOperationMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata). + // [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata). rpc UpdateCluster(UpdateClusterRequest) returns (google.longrunning.Operation) { option (google.api.http) = { patch: "/v1beta2/projects/{project_id}/regions/{region}/clusters/{cluster_name}" @@ -70,7 +69,7 @@ service ClusterController { // Deletes a cluster in a project. The returned // [Operation.metadata][google.longrunning.Operation.metadata] will be - // [ClusterOperationMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata). + // [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata). rpc DeleteCluster(DeleteClusterRequest) returns (google.longrunning.Operation) { option (google.api.http) = { delete: "/v1beta2/projects/{project_id}/regions/{region}/clusters/{cluster_name}" @@ -101,11 +100,11 @@ service ClusterController { // Gets cluster diagnostic information. The returned // [Operation.metadata][google.longrunning.Operation.metadata] will be - // [ClusterOperationMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata). + // [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata). // After the operation completes, // [Operation.response][google.longrunning.Operation.response] // contains - // [Empty](google.protobuf.Empty). + // [Empty][google.protobuf.Empty]. rpc DiagnoseCluster(DiagnoseClusterRequest) returns (google.longrunning.Operation) { option (google.api.http) = { post: "/v1beta2/projects/{project_id}/regions/{region}/clusters/{cluster_name}:diagnose" @@ -129,7 +128,7 @@ message Cluster { // unique. Names of deleted clusters can be reused. string cluster_name = 2 [(google.api.field_behavior) = REQUIRED]; - // Required. The cluster config. Note that Cloud Dataproc may set + // Required. The cluster config. Note that Dataproc may set // default values, and values may change when clusters are updated. ClusterConfig config = 3 [(google.api.field_behavior) = REQUIRED]; @@ -148,7 +147,7 @@ message Cluster { // Output only. The previous cluster status. repeated ClusterStatus status_history = 7 [(google.api.field_behavior) = OUTPUT_ONLY]; - // Output only. A cluster UUID (Unique Universal Identifier). Cloud Dataproc + // Output only. A cluster UUID (Unique Universal Identifier). Dataproc // generates this value when it creates the cluster. string cluster_uuid = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; @@ -161,14 +160,14 @@ message Cluster { // The cluster config. message ClusterConfig { - // Optional. A Google Cloud Storage bucket used to stage job + // Optional. A Cloud Storage bucket used to stage job // dependencies, config files, and job driver console output. // If you do not specify a staging bucket, Cloud // Dataproc will determine a Cloud Storage location (US, - // ASIA, or EU) for your cluster's staging bucket according to the Google + // ASIA, or EU) for your cluster's staging bucket according to the // Compute Engine zone where your cluster is deployed, and then create // and manage this project-level, per-location bucket (see - // [Cloud Dataproc staging + // [Dataproc staging // bucket](/dataproc/docs/concepts/configuring-clusters/staging-bucket)). string config_bucket = 1 [(google.api.field_behavior) = OPTIONAL]; @@ -244,7 +243,7 @@ message AutoscalingConfig { // * `https://www.googleapis.com/compute/v1/projects/[project_id]/locations/[dataproc_region]/autoscalingPolicies/[policy_id]` // * `projects/[project_id]/locations/[dataproc_region]/autoscalingPolicies/[policy_id]` // - // Note that the policy must be in the same project and Cloud Dataproc region. + // Note that the policy must be in the same project and Dataproc region. string policy_uri = 1 [(google.api.field_behavior) = OPTIONAL]; } @@ -260,7 +259,7 @@ message EncryptionConfig { message GceClusterConfig { // Optional. The zone where the Compute Engine cluster will be located. // On a create request, it is required in the "global" region. If omitted - // in a non-global Cloud Dataproc region, the service will pick a zone in the + // in a non-global Dataproc region, the service will pick a zone in the // corresponding Compute Engine region. On a get request, zone will always be // present. // @@ -302,17 +301,17 @@ message GceClusterConfig { // configured to be accessible without external IP addresses. bool internal_ip_only = 7 [(google.api.field_behavior) = OPTIONAL]; - // Optional. The service account of the instances. Defaults to the default - // Compute Engine service account. Custom service accounts need - // permissions equivalent to the following IAM roles: - // - // * roles/logging.logWriter - // * roles/storage.objectAdmin - // - // (see - // https://cloud.google.com/compute/docs/access/service-accounts#custom_service_accounts - // for more information). - // Example: `[account_id]@[project_id].iam.gserviceaccount.com` + // Optional. The [Dataproc service + // account](/dataproc/docs/concepts/configuring-clusters/service-accounts#service_accounts_in_cloud_dataproc) + // (also see [VM Data Plane + // identity](/dataproc/docs/concepts/iam/dataproc-principals#vm_service_account_data_plane_identity)) + // used by Dataproc cluster VM instances to access Google Cloud Platform + // services. + // + // If not specified, the + // [Compute Engine default service + // account](/compute/docs/access/service-accounts#default_service_account) + // is used. string service_account = 8 [(google.api.field_behavior) = OPTIONAL]; // Optional. The URIs of service account scopes to be included in @@ -351,7 +350,7 @@ message InstanceGroupConfig { // For master instance groups, must be set to 1. int32 num_instances = 1 [(google.api.field_behavior) = OPTIONAL]; - // Output only. The list of instance names. Cloud Dataproc derives the names + // Output only. The list of instance names. Dataproc derives the names // from `cluster_name`, `num_instances`, and the instance group. repeated string instance_names = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; @@ -368,7 +367,7 @@ message InstanceGroupConfig { // * `projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2` // * `n1-standard-2` // - // **Auto Zone Exception**: If you are using the Cloud Dataproc + // **Auto Zone Exception**: If you are using the Dataproc // [Auto Zone // Placement](/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) // feature, you must use the short name of the machine type @@ -392,7 +391,7 @@ message InstanceGroupConfig { repeated AcceleratorConfig accelerators = 8 [(google.api.field_behavior) = OPTIONAL]; // Specifies the minimum cpu platform for the Instance Group. - // See [Cloud Dataproc→Minimum CPU Platform] + // See [Dataproc→Minimum CPU Platform] // (/dataproc/docs/concepts/compute/dataproc-min-cpu). string min_cpu_platform = 9; } @@ -420,7 +419,7 @@ message AcceleratorConfig { // * `projects/[project_id]/zones/us-east1-a/acceleratorTypes/nvidia-tesla-k80` // * `nvidia-tesla-k80` // - // **Auto Zone Exception**: If you are using the Cloud Dataproc + // **Auto Zone Exception**: If you are using the Dataproc // [Auto Zone // Placement](/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) // feature, you must use the short name of the accelerator type @@ -452,29 +451,31 @@ message DiskConfig { // Specifies the cluster auto-delete schedule configuration. message LifecycleConfig { - // Optional. The duration to keep the cluster alive while idling. - // Passing this threshold will cause the cluster to be - // deleted. Valid range: **[10m, 14d]**. - // - // Example: **"10m"**, the minimum value, to delete the - // cluster when it has had no jobs running for 10 minutes. + // Optional. The duration to keep the cluster alive while idling (when no jobs + // are running). Passing this threshold will cause the cluster to be + // deleted. Minimum value is 10 minutes; maximum value is 14 days (see JSON + // representation of + // [Duration](https://developers.google.com/protocol-buffers/docs/proto3#json). google.protobuf.Duration idle_delete_ttl = 1 [(google.api.field_behavior) = OPTIONAL]; // Either the exact time the cluster should be deleted at or // the cluster maximum age. oneof ttl { - // Optional. The time when cluster will be auto-deleted. - google.protobuf.Timestamp auto_delete_time = 2; + // Optional. The time when cluster will be auto-deleted. (see JSON representation of + // [Timestamp](https://developers.google.com/protocol-buffers/docs/proto3#json)). + google.protobuf.Timestamp auto_delete_time = 2 [(google.api.field_behavior) = OPTIONAL]; // Optional. The lifetime duration of cluster. The cluster will be - // auto-deleted at the end of this period. Valid range: **[10m, 14d]**. - // - // Example: **"1d"**, to delete the cluster 1 day after its creation.. - google.protobuf.Duration auto_delete_ttl = 3; + // auto-deleted at the end of this period. Minimum value is 10 minutes; + // maximum value is 14 days (see JSON representation of + // [Duration](https://developers.google.com/protocol-buffers/docs/proto3#json)). + google.protobuf.Duration auto_delete_ttl = 3 [(google.api.field_behavior) = OPTIONAL]; } // Output only. The time when cluster became idle (most recent job finished) - // and became eligible for deletion due to idleness. + // and became eligible for deletion due to idleness (see JSON representation + // of + // [Timestamp](https://developers.google.com/protocol-buffers/docs/proto3#json)). google.protobuf.Timestamp idle_start_time = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; } @@ -560,7 +561,10 @@ message NodeInitializationAction { string executable_file = 1 [(google.api.field_behavior) = REQUIRED]; // Optional. Amount of time executable has to complete. Default is - // 10 minutes. Cluster creation fails with an explanatory error message (the + // 10 minutes (see JSON representation of + // [Duration](https://developers.google.com/protocol-buffers/docs/proto3#json)). + // + // Cluster creation fails with an explanatory error message (the // name of the executable that caused the error and the exceeded timeout // period) if the executable is not completed at end of the timeout period. google.protobuf.Duration execution_timeout = 2 [(google.api.field_behavior) = OPTIONAL]; @@ -602,7 +606,7 @@ message ClusterStatus { UNHEALTHY = 1; // The agent-reported status is out of date (may occur if - // Cloud Dataproc loses communication with Agent). + // Dataproc loses communication with Agent). // // Applies to RUNNING state. STALE_STATUS = 2; @@ -614,7 +618,8 @@ message ClusterStatus { // Output only. Optional details of cluster's state. string detail = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; - // Output only. Time when this state was entered. + // Output only. Time when this state was entered (see JSON representation of + // [Timestamp](https://developers.google.com/protocol-buffers/docs/proto3#json)). google.protobuf.Timestamp state_start_time = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Additional state information that includes @@ -625,7 +630,7 @@ message ClusterStatus { // Specifies the selection and config of software inside the cluster. message SoftwareConfig { // Optional. The version of software inside the cluster. It must be one of the - // supported [Cloud Dataproc + // supported [Dataproc // Versions](/dataproc/docs/concepts/versioning/dataproc-versions#supported_cloud_dataproc_versions), // such as "1.2" (including a subminor version, such as "1.2.29"), or the // ["preview" @@ -675,7 +680,7 @@ message CreateClusterRequest { // belongs to. string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - // Required. The Cloud Dataproc region in which to handle the request. + // Required. The Dataproc region in which to handle the request. string region = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The cluster to create. @@ -701,7 +706,7 @@ message UpdateClusterRequest { // cluster belongs to. string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - // Required. The Cloud Dataproc region in which to handle the request. + // Required. The Dataproc region in which to handle the request. string region = 5 [(google.api.field_behavior) = REQUIRED]; // Required. The cluster name. @@ -715,7 +720,8 @@ message UpdateClusterRequest { // interrupting jobs in progress. Timeout specifies how long to wait for jobs // in progress to finish before forcefully removing nodes (and potentially // interrupting jobs). Default timeout is 0 (for forceful decommission), and - // the maximum allowed timeout is 1 day. + // the maximum allowed timeout is 1 day (see JSON representation of + // [Duration](https://developers.google.com/protocol-buffers/docs/proto3#json)). // // Only supported on Dataproc image versions 1.2 and higher. google.protobuf.Duration graceful_decommission_timeout = 6 [(google.api.field_behavior) = OPTIONAL]; @@ -802,7 +808,7 @@ message DeleteClusterRequest { // belongs to. string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - // Required. The Cloud Dataproc region in which to handle the request. + // Required. The Dataproc region in which to handle the request. string region = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The cluster name. @@ -832,7 +838,7 @@ message GetClusterRequest { // belongs to. string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - // Required. The Cloud Dataproc region in which to handle the request. + // Required. The Dataproc region in which to handle the request. string region = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The cluster name. @@ -845,7 +851,7 @@ message ListClustersRequest { // belongs to. string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - // Required. The Cloud Dataproc region in which to handle the request. + // Required. The Dataproc region in which to handle the request. string region = 4 [(google.api.field_behavior) = REQUIRED]; // Optional. A filter constraining the clusters to list. Filters are @@ -893,7 +899,7 @@ message DiagnoseClusterRequest { // belongs to. string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - // Required. The Cloud Dataproc region in which to handle the request. + // Required. The Dataproc region in which to handle the request. string region = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The cluster name. diff --git a/google/cloud/dataproc_v1beta2/proto/clusters_pb2.py b/google/cloud/dataproc_v1beta2/proto/clusters_pb2.py index d043480d..33bd6924 100644 --- a/google/cloud/dataproc_v1beta2/proto/clusters_pb2.py +++ b/google/cloud/dataproc_v1beta2/proto/clusters_pb2.py @@ -19,9 +19,7 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 from google.api import client_pb2 as google_dot_api_dot_client__pb2 from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 -from google.cloud.dataproc_v1beta2.proto import ( - operations_pb2 as google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_operations__pb2, -) +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.cloud.dataproc_v1beta2.proto import ( shared_pb2 as google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_shared__pb2, ) @@ -41,13 +39,13 @@ "\n!com.google.cloud.dataproc.v1beta2B\rClustersProtoP\001ZEgoogle.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataproc" ), serialized_pb=_b( - '\n2google/cloud/dataproc_v1beta2/proto/clusters.proto\x12\x1dgoogle.cloud.dataproc.v1beta2\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x34google/cloud/dataproc_v1beta2/proto/operations.proto\x1a\x30google/cloud/dataproc_v1beta2/proto/shared.proto\x1a#google/longrunning/operations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xe6\x03\n\x07\x43luster\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x41\n\x06\x63onfig\x18\x03 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.ClusterConfigB\x03\xe0\x41\x02\x12G\n\x06labels\x18\x08 \x03(\x0b\x32\x32.google.cloud.dataproc.v1beta2.Cluster.LabelsEntryB\x03\xe0\x41\x01\x12\x41\n\x06status\x18\x04 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.ClusterStatusB\x03\xe0\x41\x03\x12I\n\x0estatus_history\x18\x07 \x03(\x0b\x32,.google.cloud.dataproc.v1beta2.ClusterStatusB\x03\xe0\x41\x03\x12\x19\n\x0c\x63luster_uuid\x18\x06 \x01(\tB\x03\xe0\x41\x03\x12\x43\n\x07metrics\x18\t \x01(\x0b\x32-.google.cloud.dataproc.v1beta2.ClusterMetricsB\x03\xe0\x41\x03\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xaf\x07\n\rClusterConfig\x12\x1a\n\rconfig_bucket\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12P\n\x12gce_cluster_config\x18\x08 \x01(\x0b\x32/.google.cloud.dataproc.v1beta2.GceClusterConfigB\x03\xe0\x41\x01\x12N\n\rmaster_config\x18\t \x01(\x0b\x32\x32.google.cloud.dataproc.v1beta2.InstanceGroupConfigB\x03\xe0\x41\x01\x12N\n\rworker_config\x18\n \x01(\x0b\x32\x32.google.cloud.dataproc.v1beta2.InstanceGroupConfigB\x03\xe0\x41\x01\x12X\n\x17secondary_worker_config\x18\x0c \x01(\x0b\x32\x32.google.cloud.dataproc.v1beta2.InstanceGroupConfigB\x03\xe0\x41\x01\x12K\n\x0fsoftware_config\x18\r \x01(\x0b\x32-.google.cloud.dataproc.v1beta2.SoftwareConfigB\x03\xe0\x41\x01\x12M\n\x10lifecycle_config\x18\x0e \x01(\x0b\x32..google.cloud.dataproc.v1beta2.LifecycleConfigB\x03\xe0\x41\x01\x12\\\n\x16initialization_actions\x18\x0b \x03(\x0b\x32\x37.google.cloud.dataproc.v1beta2.NodeInitializationActionB\x03\xe0\x41\x01\x12O\n\x11\x65ncryption_config\x18\x0f \x01(\x0b\x32/.google.cloud.dataproc.v1beta2.EncryptionConfigB\x03\xe0\x41\x01\x12Q\n\x12\x61utoscaling_config\x18\x10 \x01(\x0b\x32\x30.google.cloud.dataproc.v1beta2.AutoscalingConfigB\x03\xe0\x41\x01\x12K\n\x0f\x65ndpoint_config\x18\x11 \x01(\x0b\x32-.google.cloud.dataproc.v1beta2.EndpointConfigB\x03\xe0\x41\x01\x12K\n\x0fsecurity_config\x18\x12 \x01(\x0b\x32-.google.cloud.dataproc.v1beta2.SecurityConfigB\x03\xe0\x41\x01"\xbf\x01\n\x0e\x45ndpointConfig\x12U\n\nhttp_ports\x18\x01 \x03(\x0b\x32<.google.cloud.dataproc.v1beta2.EndpointConfig.HttpPortsEntryB\x03\xe0\x41\x03\x12$\n\x17\x65nable_http_port_access\x18\x02 \x01(\x08\x42\x03\xe0\x41\x01\x1a\x30\n\x0eHttpPortsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01",\n\x11\x41utoscalingConfig\x12\x17\n\npolicy_uri\x18\x01 \x01(\tB\x03\xe0\x41\x01"4\n\x10\x45ncryptionConfig\x12 \n\x13gce_pd_kms_key_name\x18\x01 \x01(\tB\x03\xe0\x41\x01"\xa9\x03\n\x10GceClusterConfig\x12\x15\n\x08zone_uri\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12\x18\n\x0bnetwork_uri\x18\x02 \x01(\tB\x03\xe0\x41\x01\x12\x1b\n\x0esubnetwork_uri\x18\x06 \x01(\tB\x03\xe0\x41\x01\x12\x1d\n\x10internal_ip_only\x18\x07 \x01(\x08\x42\x03\xe0\x41\x01\x12\x1c\n\x0fservice_account\x18\x08 \x01(\tB\x03\xe0\x41\x01\x12#\n\x16service_account_scopes\x18\x03 \x03(\tB\x03\xe0\x41\x01\x12\x0c\n\x04tags\x18\x04 \x03(\t\x12O\n\x08metadata\x18\x05 \x03(\x0b\x32=.google.cloud.dataproc.v1beta2.GceClusterConfig.MetadataEntry\x12U\n\x14reservation_affinity\x18\x0b \x01(\x0b\x32\x32.google.cloud.dataproc.v1beta2.ReservationAffinityB\x03\xe0\x41\x01\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xa4\x03\n\x13InstanceGroupConfig\x12\x1a\n\rnum_instances\x18\x01 \x01(\x05\x42\x03\xe0\x41\x01\x12\x1b\n\x0einstance_names\x18\x02 \x03(\tB\x03\xe0\x41\x03\x12\x16\n\timage_uri\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12\x1d\n\x10machine_type_uri\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12\x43\n\x0b\x64isk_config\x18\x05 \x01(\x0b\x32).google.cloud.dataproc.v1beta2.DiskConfigB\x03\xe0\x41\x01\x12\x1b\n\x0eis_preemptible\x18\x06 \x01(\x08\x42\x03\xe0\x41\x01\x12T\n\x14managed_group_config\x18\x07 \x01(\x0b\x32\x31.google.cloud.dataproc.v1beta2.ManagedGroupConfigB\x03\xe0\x41\x03\x12K\n\x0c\x61\x63\x63\x65lerators\x18\x08 \x03(\x0b\x32\x30.google.cloud.dataproc.v1beta2.AcceleratorConfigB\x03\xe0\x41\x01\x12\x18\n\x10min_cpu_platform\x18\t \x01(\t"c\n\x12ManagedGroupConfig\x12#\n\x16instance_template_name\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12(\n\x1binstance_group_manager_name\x18\x02 \x01(\tB\x03\xe0\x41\x03"L\n\x11\x41\x63\x63\x65leratorConfig\x12\x1c\n\x14\x61\x63\x63\x65lerator_type_uri\x18\x01 \x01(\t\x12\x19\n\x11\x61\x63\x63\x65lerator_count\x18\x02 \x01(\x05"a\n\nDiskConfig\x12\x1b\n\x0e\x62oot_disk_type\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12\x1e\n\x11\x62oot_disk_size_gb\x18\x01 \x01(\x05\x42\x03\xe0\x41\x01\x12\x16\n\x0enum_local_ssds\x18\x02 \x01(\x05"\xf9\x01\n\x0fLifecycleConfig\x12\x37\n\x0fidle_delete_ttl\x18\x01 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x01\x12\x36\n\x10\x61uto_delete_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x34\n\x0f\x61uto_delete_ttl\x18\x03 \x01(\x0b\x32\x19.google.protobuf.DurationH\x00\x12\x38\n\x0fidle_start_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x42\x05\n\x03ttl"X\n\x0eSecurityConfig\x12\x46\n\x0fkerberos_config\x18\x01 \x01(\x0b\x32-.google.cloud.dataproc.v1beta2.KerberosConfig"\x90\x04\n\x0eKerberosConfig\x12\x1c\n\x0f\x65nable_kerberos\x18\x01 \x01(\x08\x42\x03\xe0\x41\x01\x12(\n\x1broot_principal_password_uri\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x18\n\x0bkms_key_uri\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0ckeystore_uri\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12\x1b\n\x0etruststore_uri\x18\x05 \x01(\tB\x03\xe0\x41\x01\x12"\n\x15keystore_password_uri\x18\x06 \x01(\tB\x03\xe0\x41\x01\x12\x1d\n\x10key_password_uri\x18\x07 \x01(\tB\x03\xe0\x41\x01\x12$\n\x17truststore_password_uri\x18\x08 \x01(\tB\x03\xe0\x41\x01\x12$\n\x17\x63ross_realm_trust_realm\x18\t \x01(\tB\x03\xe0\x41\x01\x12"\n\x15\x63ross_realm_trust_kdc\x18\n \x01(\tB\x03\xe0\x41\x01\x12+\n\x1e\x63ross_realm_trust_admin_server\x18\x0b \x01(\tB\x03\xe0\x41\x01\x12\x32\n%cross_realm_trust_shared_password_uri\x18\x0c \x01(\tB\x03\xe0\x41\x01\x12\x1b\n\x0ekdc_db_key_uri\x18\r \x01(\tB\x03\xe0\x41\x01\x12\x1f\n\x12tgt_lifetime_hours\x18\x0e \x01(\x05\x42\x03\xe0\x41\x01\x12\x12\n\x05realm\x18\x0f \x01(\tB\x03\xe0\x41\x01"s\n\x18NodeInitializationAction\x12\x1c\n\x0f\x65xecutable_file\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x39\n\x11\x65xecution_timeout\x18\x02 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x01"\x8b\x03\n\rClusterStatus\x12\x46\n\x05state\x18\x01 \x01(\x0e\x32\x32.google.cloud.dataproc.v1beta2.ClusterStatus.StateB\x03\xe0\x41\x03\x12\x13\n\x06\x64\x65tail\x18\x02 \x01(\tB\x03\xe0\x41\x03\x12\x39\n\x10state_start_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12L\n\x08substate\x18\x04 \x01(\x0e\x32\x35.google.cloud.dataproc.v1beta2.ClusterStatus.SubstateB\x03\xe0\x41\x03"V\n\x05State\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0c\n\x08\x43REATING\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\x12\t\n\x05\x45RROR\x10\x03\x12\x0c\n\x08\x44\x45LETING\x10\x04\x12\x0c\n\x08UPDATING\x10\x05"<\n\x08Substate\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\r\n\tUNHEALTHY\x10\x01\x12\x10\n\x0cSTALE_STATUS\x10\x02"\xfe\x01\n\x0eSoftwareConfig\x12\x1a\n\rimage_version\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12V\n\nproperties\x18\x02 \x03(\x0b\x32=.google.cloud.dataproc.v1beta2.SoftwareConfig.PropertiesEntryB\x03\xe0\x41\x01\x12\x45\n\x13optional_components\x18\x03 \x03(\x0e\x32(.google.cloud.dataproc.v1beta2.Component\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xa4\x02\n\x0e\x43lusterMetrics\x12T\n\x0chdfs_metrics\x18\x01 \x03(\x0b\x32>.google.cloud.dataproc.v1beta2.ClusterMetrics.HdfsMetricsEntry\x12T\n\x0cyarn_metrics\x18\x02 \x03(\x0b\x32>.google.cloud.dataproc.v1beta2.ClusterMetrics.YarnMetricsEntry\x1a\x32\n\x10HdfsMetricsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x03:\x02\x38\x01\x1a\x32\n\x10YarnMetricsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x03:\x02\x38\x01"\x9b\x01\n\x14\x43reateClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12<\n\x07\x63luster\x18\x02 \x01(\x0b\x32&.google.cloud.dataproc.v1beta2.ClusterB\x03\xe0\x41\x02\x12\x17\n\nrequest_id\x18\x04 \x01(\tB\x03\xe0\x41\x01"\xb3\x02\n\x14UpdateClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x05 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12<\n\x07\x63luster\x18\x03 \x01(\x0b\x32&.google.cloud.dataproc.v1beta2.ClusterB\x03\xe0\x41\x02\x12\x45\n\x1dgraceful_decommission_timeout\x18\x06 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x01\x12\x34\n\x0bupdate_mask\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02\x12\x17\n\nrequest_id\x18\x07 \x01(\tB\x03\xe0\x41\x01"\x93\x01\n\x14\x44\x65leteClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_uuid\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12\x17\n\nrequest_id\x18\x05 \x01(\tB\x03\xe0\x41\x01"\\\n\x11GetClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02"\x89\x01\n\x13ListClustersRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x04 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06\x66ilter\x18\x05 \x01(\tB\x03\xe0\x41\x01\x12\x16\n\tpage_size\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x12\x17\n\npage_token\x18\x03 \x01(\tB\x03\xe0\x41\x01"s\n\x14ListClustersResponse\x12=\n\x08\x63lusters\x18\x01 \x03(\x0b\x32&.google.cloud.dataproc.v1beta2.ClusterB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03"a\n\x16\x44iagnoseClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02"1\n\x16\x44iagnoseClusterResults\x12\x17\n\noutput_uri\x18\x01 \x01(\tB\x03\xe0\x41\x03"\xfd\x01\n\x13ReservationAffinity\x12^\n\x18\x63onsume_reservation_type\x18\x01 \x01(\x0e\x32\x37.google.cloud.dataproc.v1beta2.ReservationAffinity.TypeB\x03\xe0\x41\x01\x12\x10\n\x03key\x18\x02 \x01(\tB\x03\xe0\x41\x01\x12\x13\n\x06values\x18\x03 \x03(\tB\x03\xe0\x41\x01"_\n\x04Type\x12\x14\n\x10TYPE_UNSPECIFIED\x10\x00\x12\x12\n\x0eNO_RESERVATION\x10\x01\x12\x13\n\x0f\x41NY_RESERVATION\x10\x02\x12\x18\n\x14SPECIFIC_RESERVATION\x10\x03\x32\xe7\r\n\x11\x43lusterController\x12\x91\x02\n\rCreateCluster\x12\x33.google.cloud.dataproc.v1beta2.CreateClusterRequest\x1a\x1d.google.longrunning.Operation"\xab\x01\x82\xd3\xe4\x93\x02\x43"8/v1beta2/projects/{project_id}/regions/{region}/clusters:\x07\x63luster\xda\x41\x1bproject_id, region, cluster\xca\x41\x41\n\x07\x43luster\x12\x36google.cloud.dataproc.v1beta2.ClusterOperationMetadata\x12\xbb\x02\n\rUpdateCluster\x12\x33.google.cloud.dataproc.v1beta2.UpdateClusterRequest\x1a\x1d.google.longrunning.Operation"\xd5\x01\x82\xd3\xe4\x93\x02R2G/v1beta2/projects/{project_id}/regions/{region}/clusters/{cluster_name}:\x07\x63luster\xda\x41\x36project_id, region, cluster_name, cluster, update_mask\xca\x41\x41\n\x07\x43luster\x12\x36google.cloud.dataproc.v1beta2.ClusterOperationMetadata\x12\xaa\x02\n\rDeleteCluster\x12\x33.google.cloud.dataproc.v1beta2.DeleteClusterRequest\x1a\x1d.google.longrunning.Operation"\xc4\x01\x82\xd3\xe4\x93\x02I*G/v1beta2/projects/{project_id}/regions/{region}/clusters/{cluster_name}\xda\x41 project_id, region, cluster_name\xca\x41O\n\x15google.protobuf.Empty\x12\x36google.cloud.dataproc.v1beta2.ClusterOperationMetadata\x12\xda\x01\n\nGetCluster\x12\x30.google.cloud.dataproc.v1beta2.GetClusterRequest\x1a&.google.cloud.dataproc.v1beta2.Cluster"r\x82\xd3\xe4\x93\x02I\x12G/v1beta2/projects/{project_id}/regions/{region}/clusters/{cluster_name}\xda\x41 project_id, region, cluster_name\x12\xeb\x01\n\x0cListClusters\x12\x32.google.cloud.dataproc.v1beta2.ListClustersRequest\x1a\x33.google.cloud.dataproc.v1beta2.ListClustersResponse"r\x82\xd3\xe4\x93\x02:\x12\x38/v1beta2/projects/{project_id}/regions/{region}/clusters\xda\x41\x12project_id, region\xda\x41\x1aproject_id, region, filter\x12\xba\x02\n\x0f\x44iagnoseCluster\x12\x35.google.cloud.dataproc.v1beta2.DiagnoseClusterRequest\x1a\x1d.google.longrunning.Operation"\xd0\x01\x82\xd3\xe4\x93\x02U"P/v1beta2/projects/{project_id}/regions/{region}/clusters/{cluster_name}:diagnose:\x01*\xda\x41 project_id, region, cluster_name\xca\x41O\n\x15google.protobuf.Empty\x12\x36google.cloud.dataproc.v1beta2.ClusterOperationMetadata\x1aK\xca\x41\x17\x64\x61taproc.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB{\n!com.google.cloud.dataproc.v1beta2B\rClustersProtoP\x01ZEgoogle.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataprocb\x06proto3' + '\n2google/cloud/dataproc_v1beta2/proto/clusters.proto\x12\x1dgoogle.cloud.dataproc.v1beta2\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\x30google/cloud/dataproc_v1beta2/proto/shared.proto\x1a#google/longrunning/operations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xe6\x03\n\x07\x43luster\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x41\n\x06\x63onfig\x18\x03 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.ClusterConfigB\x03\xe0\x41\x02\x12G\n\x06labels\x18\x08 \x03(\x0b\x32\x32.google.cloud.dataproc.v1beta2.Cluster.LabelsEntryB\x03\xe0\x41\x01\x12\x41\n\x06status\x18\x04 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.ClusterStatusB\x03\xe0\x41\x03\x12I\n\x0estatus_history\x18\x07 \x03(\x0b\x32,.google.cloud.dataproc.v1beta2.ClusterStatusB\x03\xe0\x41\x03\x12\x19\n\x0c\x63luster_uuid\x18\x06 \x01(\tB\x03\xe0\x41\x03\x12\x43\n\x07metrics\x18\t \x01(\x0b\x32-.google.cloud.dataproc.v1beta2.ClusterMetricsB\x03\xe0\x41\x03\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xaf\x07\n\rClusterConfig\x12\x1a\n\rconfig_bucket\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12P\n\x12gce_cluster_config\x18\x08 \x01(\x0b\x32/.google.cloud.dataproc.v1beta2.GceClusterConfigB\x03\xe0\x41\x01\x12N\n\rmaster_config\x18\t \x01(\x0b\x32\x32.google.cloud.dataproc.v1beta2.InstanceGroupConfigB\x03\xe0\x41\x01\x12N\n\rworker_config\x18\n \x01(\x0b\x32\x32.google.cloud.dataproc.v1beta2.InstanceGroupConfigB\x03\xe0\x41\x01\x12X\n\x17secondary_worker_config\x18\x0c \x01(\x0b\x32\x32.google.cloud.dataproc.v1beta2.InstanceGroupConfigB\x03\xe0\x41\x01\x12K\n\x0fsoftware_config\x18\r \x01(\x0b\x32-.google.cloud.dataproc.v1beta2.SoftwareConfigB\x03\xe0\x41\x01\x12M\n\x10lifecycle_config\x18\x0e \x01(\x0b\x32..google.cloud.dataproc.v1beta2.LifecycleConfigB\x03\xe0\x41\x01\x12\\\n\x16initialization_actions\x18\x0b \x03(\x0b\x32\x37.google.cloud.dataproc.v1beta2.NodeInitializationActionB\x03\xe0\x41\x01\x12O\n\x11\x65ncryption_config\x18\x0f \x01(\x0b\x32/.google.cloud.dataproc.v1beta2.EncryptionConfigB\x03\xe0\x41\x01\x12Q\n\x12\x61utoscaling_config\x18\x10 \x01(\x0b\x32\x30.google.cloud.dataproc.v1beta2.AutoscalingConfigB\x03\xe0\x41\x01\x12K\n\x0f\x65ndpoint_config\x18\x11 \x01(\x0b\x32-.google.cloud.dataproc.v1beta2.EndpointConfigB\x03\xe0\x41\x01\x12K\n\x0fsecurity_config\x18\x12 \x01(\x0b\x32-.google.cloud.dataproc.v1beta2.SecurityConfigB\x03\xe0\x41\x01"\xbf\x01\n\x0e\x45ndpointConfig\x12U\n\nhttp_ports\x18\x01 \x03(\x0b\x32<.google.cloud.dataproc.v1beta2.EndpointConfig.HttpPortsEntryB\x03\xe0\x41\x03\x12$\n\x17\x65nable_http_port_access\x18\x02 \x01(\x08\x42\x03\xe0\x41\x01\x1a\x30\n\x0eHttpPortsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01",\n\x11\x41utoscalingConfig\x12\x17\n\npolicy_uri\x18\x01 \x01(\tB\x03\xe0\x41\x01"4\n\x10\x45ncryptionConfig\x12 \n\x13gce_pd_kms_key_name\x18\x01 \x01(\tB\x03\xe0\x41\x01"\xa9\x03\n\x10GceClusterConfig\x12\x15\n\x08zone_uri\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12\x18\n\x0bnetwork_uri\x18\x02 \x01(\tB\x03\xe0\x41\x01\x12\x1b\n\x0esubnetwork_uri\x18\x06 \x01(\tB\x03\xe0\x41\x01\x12\x1d\n\x10internal_ip_only\x18\x07 \x01(\x08\x42\x03\xe0\x41\x01\x12\x1c\n\x0fservice_account\x18\x08 \x01(\tB\x03\xe0\x41\x01\x12#\n\x16service_account_scopes\x18\x03 \x03(\tB\x03\xe0\x41\x01\x12\x0c\n\x04tags\x18\x04 \x03(\t\x12O\n\x08metadata\x18\x05 \x03(\x0b\x32=.google.cloud.dataproc.v1beta2.GceClusterConfig.MetadataEntry\x12U\n\x14reservation_affinity\x18\x0b \x01(\x0b\x32\x32.google.cloud.dataproc.v1beta2.ReservationAffinityB\x03\xe0\x41\x01\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xa4\x03\n\x13InstanceGroupConfig\x12\x1a\n\rnum_instances\x18\x01 \x01(\x05\x42\x03\xe0\x41\x01\x12\x1b\n\x0einstance_names\x18\x02 \x03(\tB\x03\xe0\x41\x03\x12\x16\n\timage_uri\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12\x1d\n\x10machine_type_uri\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12\x43\n\x0b\x64isk_config\x18\x05 \x01(\x0b\x32).google.cloud.dataproc.v1beta2.DiskConfigB\x03\xe0\x41\x01\x12\x1b\n\x0eis_preemptible\x18\x06 \x01(\x08\x42\x03\xe0\x41\x01\x12T\n\x14managed_group_config\x18\x07 \x01(\x0b\x32\x31.google.cloud.dataproc.v1beta2.ManagedGroupConfigB\x03\xe0\x41\x03\x12K\n\x0c\x61\x63\x63\x65lerators\x18\x08 \x03(\x0b\x32\x30.google.cloud.dataproc.v1beta2.AcceleratorConfigB\x03\xe0\x41\x01\x12\x18\n\x10min_cpu_platform\x18\t \x01(\t"c\n\x12ManagedGroupConfig\x12#\n\x16instance_template_name\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12(\n\x1binstance_group_manager_name\x18\x02 \x01(\tB\x03\xe0\x41\x03"L\n\x11\x41\x63\x63\x65leratorConfig\x12\x1c\n\x14\x61\x63\x63\x65lerator_type_uri\x18\x01 \x01(\t\x12\x19\n\x11\x61\x63\x63\x65lerator_count\x18\x02 \x01(\x05"a\n\nDiskConfig\x12\x1b\n\x0e\x62oot_disk_type\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12\x1e\n\x11\x62oot_disk_size_gb\x18\x01 \x01(\x05\x42\x03\xe0\x41\x01\x12\x16\n\x0enum_local_ssds\x18\x02 \x01(\x05"\x83\x02\n\x0fLifecycleConfig\x12\x37\n\x0fidle_delete_ttl\x18\x01 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x01\x12;\n\x10\x61uto_delete_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x01H\x00\x12\x39\n\x0f\x61uto_delete_ttl\x18\x03 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x01H\x00\x12\x38\n\x0fidle_start_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x42\x05\n\x03ttl"X\n\x0eSecurityConfig\x12\x46\n\x0fkerberos_config\x18\x01 \x01(\x0b\x32-.google.cloud.dataproc.v1beta2.KerberosConfig"\x90\x04\n\x0eKerberosConfig\x12\x1c\n\x0f\x65nable_kerberos\x18\x01 \x01(\x08\x42\x03\xe0\x41\x01\x12(\n\x1broot_principal_password_uri\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x18\n\x0bkms_key_uri\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0ckeystore_uri\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12\x1b\n\x0etruststore_uri\x18\x05 \x01(\tB\x03\xe0\x41\x01\x12"\n\x15keystore_password_uri\x18\x06 \x01(\tB\x03\xe0\x41\x01\x12\x1d\n\x10key_password_uri\x18\x07 \x01(\tB\x03\xe0\x41\x01\x12$\n\x17truststore_password_uri\x18\x08 \x01(\tB\x03\xe0\x41\x01\x12$\n\x17\x63ross_realm_trust_realm\x18\t \x01(\tB\x03\xe0\x41\x01\x12"\n\x15\x63ross_realm_trust_kdc\x18\n \x01(\tB\x03\xe0\x41\x01\x12+\n\x1e\x63ross_realm_trust_admin_server\x18\x0b \x01(\tB\x03\xe0\x41\x01\x12\x32\n%cross_realm_trust_shared_password_uri\x18\x0c \x01(\tB\x03\xe0\x41\x01\x12\x1b\n\x0ekdc_db_key_uri\x18\r \x01(\tB\x03\xe0\x41\x01\x12\x1f\n\x12tgt_lifetime_hours\x18\x0e \x01(\x05\x42\x03\xe0\x41\x01\x12\x12\n\x05realm\x18\x0f \x01(\tB\x03\xe0\x41\x01"s\n\x18NodeInitializationAction\x12\x1c\n\x0f\x65xecutable_file\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x39\n\x11\x65xecution_timeout\x18\x02 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x01"\x8b\x03\n\rClusterStatus\x12\x46\n\x05state\x18\x01 \x01(\x0e\x32\x32.google.cloud.dataproc.v1beta2.ClusterStatus.StateB\x03\xe0\x41\x03\x12\x13\n\x06\x64\x65tail\x18\x02 \x01(\tB\x03\xe0\x41\x03\x12\x39\n\x10state_start_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12L\n\x08substate\x18\x04 \x01(\x0e\x32\x35.google.cloud.dataproc.v1beta2.ClusterStatus.SubstateB\x03\xe0\x41\x03"V\n\x05State\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0c\n\x08\x43REATING\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\x12\t\n\x05\x45RROR\x10\x03\x12\x0c\n\x08\x44\x45LETING\x10\x04\x12\x0c\n\x08UPDATING\x10\x05"<\n\x08Substate\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\r\n\tUNHEALTHY\x10\x01\x12\x10\n\x0cSTALE_STATUS\x10\x02"\xfe\x01\n\x0eSoftwareConfig\x12\x1a\n\rimage_version\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12V\n\nproperties\x18\x02 \x03(\x0b\x32=.google.cloud.dataproc.v1beta2.SoftwareConfig.PropertiesEntryB\x03\xe0\x41\x01\x12\x45\n\x13optional_components\x18\x03 \x03(\x0e\x32(.google.cloud.dataproc.v1beta2.Component\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xa4\x02\n\x0e\x43lusterMetrics\x12T\n\x0chdfs_metrics\x18\x01 \x03(\x0b\x32>.google.cloud.dataproc.v1beta2.ClusterMetrics.HdfsMetricsEntry\x12T\n\x0cyarn_metrics\x18\x02 \x03(\x0b\x32>.google.cloud.dataproc.v1beta2.ClusterMetrics.YarnMetricsEntry\x1a\x32\n\x10HdfsMetricsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x03:\x02\x38\x01\x1a\x32\n\x10YarnMetricsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x03:\x02\x38\x01"\x9b\x01\n\x14\x43reateClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12<\n\x07\x63luster\x18\x02 \x01(\x0b\x32&.google.cloud.dataproc.v1beta2.ClusterB\x03\xe0\x41\x02\x12\x17\n\nrequest_id\x18\x04 \x01(\tB\x03\xe0\x41\x01"\xb3\x02\n\x14UpdateClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x05 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12<\n\x07\x63luster\x18\x03 \x01(\x0b\x32&.google.cloud.dataproc.v1beta2.ClusterB\x03\xe0\x41\x02\x12\x45\n\x1dgraceful_decommission_timeout\x18\x06 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x01\x12\x34\n\x0bupdate_mask\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02\x12\x17\n\nrequest_id\x18\x07 \x01(\tB\x03\xe0\x41\x01"\x93\x01\n\x14\x44\x65leteClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_uuid\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12\x17\n\nrequest_id\x18\x05 \x01(\tB\x03\xe0\x41\x01"\\\n\x11GetClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02"\x89\x01\n\x13ListClustersRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x04 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06\x66ilter\x18\x05 \x01(\tB\x03\xe0\x41\x01\x12\x16\n\tpage_size\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x12\x17\n\npage_token\x18\x03 \x01(\tB\x03\xe0\x41\x01"s\n\x14ListClustersResponse\x12=\n\x08\x63lusters\x18\x01 \x03(\x0b\x32&.google.cloud.dataproc.v1beta2.ClusterB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03"a\n\x16\x44iagnoseClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02"1\n\x16\x44iagnoseClusterResults\x12\x17\n\noutput_uri\x18\x01 \x01(\tB\x03\xe0\x41\x03"\xfd\x01\n\x13ReservationAffinity\x12^\n\x18\x63onsume_reservation_type\x18\x01 \x01(\x0e\x32\x37.google.cloud.dataproc.v1beta2.ReservationAffinity.TypeB\x03\xe0\x41\x01\x12\x10\n\x03key\x18\x02 \x01(\tB\x03\xe0\x41\x01\x12\x13\n\x06values\x18\x03 \x03(\tB\x03\xe0\x41\x01"_\n\x04Type\x12\x14\n\x10TYPE_UNSPECIFIED\x10\x00\x12\x12\n\x0eNO_RESERVATION\x10\x01\x12\x13\n\x0f\x41NY_RESERVATION\x10\x02\x12\x18\n\x14SPECIFIC_RESERVATION\x10\x03\x32\xe7\r\n\x11\x43lusterController\x12\x91\x02\n\rCreateCluster\x12\x33.google.cloud.dataproc.v1beta2.CreateClusterRequest\x1a\x1d.google.longrunning.Operation"\xab\x01\x82\xd3\xe4\x93\x02\x43"8/v1beta2/projects/{project_id}/regions/{region}/clusters:\x07\x63luster\xda\x41\x1bproject_id, region, cluster\xca\x41\x41\n\x07\x43luster\x12\x36google.cloud.dataproc.v1beta2.ClusterOperationMetadata\x12\xbb\x02\n\rUpdateCluster\x12\x33.google.cloud.dataproc.v1beta2.UpdateClusterRequest\x1a\x1d.google.longrunning.Operation"\xd5\x01\x82\xd3\xe4\x93\x02R2G/v1beta2/projects/{project_id}/regions/{region}/clusters/{cluster_name}:\x07\x63luster\xda\x41\x36project_id, region, cluster_name, cluster, update_mask\xca\x41\x41\n\x07\x43luster\x12\x36google.cloud.dataproc.v1beta2.ClusterOperationMetadata\x12\xaa\x02\n\rDeleteCluster\x12\x33.google.cloud.dataproc.v1beta2.DeleteClusterRequest\x1a\x1d.google.longrunning.Operation"\xc4\x01\x82\xd3\xe4\x93\x02I*G/v1beta2/projects/{project_id}/regions/{region}/clusters/{cluster_name}\xda\x41 project_id, region, cluster_name\xca\x41O\n\x15google.protobuf.Empty\x12\x36google.cloud.dataproc.v1beta2.ClusterOperationMetadata\x12\xda\x01\n\nGetCluster\x12\x30.google.cloud.dataproc.v1beta2.GetClusterRequest\x1a&.google.cloud.dataproc.v1beta2.Cluster"r\x82\xd3\xe4\x93\x02I\x12G/v1beta2/projects/{project_id}/regions/{region}/clusters/{cluster_name}\xda\x41 project_id, region, cluster_name\x12\xeb\x01\n\x0cListClusters\x12\x32.google.cloud.dataproc.v1beta2.ListClustersRequest\x1a\x33.google.cloud.dataproc.v1beta2.ListClustersResponse"r\x82\xd3\xe4\x93\x02:\x12\x38/v1beta2/projects/{project_id}/regions/{region}/clusters\xda\x41\x12project_id, region\xda\x41\x1aproject_id, region, filter\x12\xba\x02\n\x0f\x44iagnoseCluster\x12\x35.google.cloud.dataproc.v1beta2.DiagnoseClusterRequest\x1a\x1d.google.longrunning.Operation"\xd0\x01\x82\xd3\xe4\x93\x02U"P/v1beta2/projects/{project_id}/regions/{region}/clusters/{cluster_name}:diagnose:\x01*\xda\x41 project_id, region, cluster_name\xca\x41O\n\x15google.protobuf.Empty\x12\x36google.cloud.dataproc.v1beta2.ClusterOperationMetadata\x1aK\xca\x41\x17\x64\x61taproc.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB{\n!com.google.cloud.dataproc.v1beta2B\rClustersProtoP\x01ZEgoogle.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataprocb\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, google_dot_api_dot_client__pb2.DESCRIPTOR, google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, - google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_operations__pb2.DESCRIPTOR, + google_dot_api_dot_resource__pb2.DESCRIPTOR, google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_shared__pb2.DESCRIPTOR, google_dot_longrunning_dot_operations__pb2.DESCRIPTOR, google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, @@ -84,8 +82,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=4509, - serialized_end=4595, + serialized_start=4492, + serialized_end=4578, ) _sym_db.RegisterEnumDescriptor(_CLUSTERSTATUS_STATE) @@ -107,8 +105,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=4597, - serialized_end=4657, + serialized_start=4580, + serialized_end=4640, ) _sym_db.RegisterEnumDescriptor(_CLUSTERSTATUS_SUBSTATE) @@ -145,8 +143,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=6489, - serialized_end=6584, + serialized_start=6472, + serialized_end=6567, ) _sym_db.RegisterEnumDescriptor(_RESERVATIONAFFINITY_TYPE) @@ -203,8 +201,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=855, - serialized_end=900, + serialized_start=828, + serialized_end=873, ) _CLUSTER = _descriptor.Descriptor( @@ -367,8 +365,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=414, - serialized_end=900, + serialized_start=387, + serialized_end=873, ) @@ -604,8 +602,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=903, - serialized_end=1846, + serialized_start=876, + serialized_end=1819, ) @@ -661,8 +659,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1992, - serialized_end=2040, + serialized_start=1965, + serialized_end=2013, ) _ENDPOINTCONFIG = _descriptor.Descriptor( @@ -717,8 +715,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1849, - serialized_end=2040, + serialized_start=1822, + serialized_end=2013, ) @@ -756,8 +754,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2042, - serialized_end=2086, + serialized_start=2015, + serialized_end=2059, ) @@ -795,8 +793,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2088, - serialized_end=2140, + serialized_start=2061, + serialized_end=2113, ) @@ -852,8 +850,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2521, - serialized_end=2568, + serialized_start=2494, + serialized_end=2541, ) _GCECLUSTERCONFIG = _descriptor.Descriptor( @@ -1034,8 +1032,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2143, - serialized_end=2568, + serialized_start=2116, + serialized_end=2541, ) @@ -1217,8 +1215,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2571, - serialized_end=2991, + serialized_start=2544, + serialized_end=2964, ) @@ -1274,8 +1272,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2993, - serialized_end=3092, + serialized_start=2966, + serialized_end=3065, ) @@ -1331,8 +1329,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3094, - serialized_end=3170, + serialized_start=3067, + serialized_end=3143, ) @@ -1406,8 +1404,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3172, - serialized_end=3269, + serialized_start=3145, + serialized_end=3242, ) @@ -1451,7 +1449,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1469,7 +1467,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1507,8 +1505,8 @@ fields=[], ) ], - serialized_start=3272, - serialized_end=3521, + serialized_start=3245, + serialized_end=3504, ) @@ -1546,8 +1544,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3523, - serialized_end=3611, + serialized_start=3506, + serialized_end=3594, ) @@ -1837,8 +1835,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3614, - serialized_end=4142, + serialized_start=3597, + serialized_end=4125, ) @@ -1894,8 +1892,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4144, - serialized_end=4259, + serialized_start=4127, + serialized_end=4242, ) @@ -1987,8 +1985,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4262, - serialized_end=4657, + serialized_start=4245, + serialized_end=4640, ) @@ -2044,8 +2042,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4865, - serialized_end=4914, + serialized_start=4848, + serialized_end=4897, ) _SOFTWARECONFIG = _descriptor.Descriptor( @@ -2118,8 +2116,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4660, - serialized_end=4914, + serialized_start=4643, + serialized_end=4897, ) @@ -2175,8 +2173,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5107, - serialized_end=5157, + serialized_start=5090, + serialized_end=5140, ) _CLUSTERMETRICS_YARNMETRICSENTRY = _descriptor.Descriptor( @@ -2231,8 +2229,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5159, - serialized_end=5209, + serialized_start=5142, + serialized_end=5192, ) _CLUSTERMETRICS = _descriptor.Descriptor( @@ -2287,8 +2285,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4917, - serialized_end=5209, + serialized_start=4900, + serialized_end=5192, ) @@ -2380,8 +2378,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5212, - serialized_end=5367, + serialized_start=5195, + serialized_end=5350, ) @@ -2527,8 +2525,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5370, - serialized_end=5677, + serialized_start=5353, + serialized_end=5660, ) @@ -2638,8 +2636,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5680, - serialized_end=5827, + serialized_start=5663, + serialized_end=5810, ) @@ -2713,8 +2711,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5829, - serialized_end=5921, + serialized_start=5812, + serialized_end=5904, ) @@ -2824,8 +2822,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5924, - serialized_end=6061, + serialized_start=5907, + serialized_end=6044, ) @@ -2881,8 +2879,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=6063, - serialized_end=6178, + serialized_start=6046, + serialized_end=6161, ) @@ -2956,8 +2954,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=6180, - serialized_end=6277, + serialized_start=6163, + serialized_end=6260, ) @@ -2995,8 +2993,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=6279, - serialized_end=6328, + serialized_start=6262, + serialized_end=6311, ) @@ -3070,8 +3068,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=6331, - serialized_end=6584, + serialized_start=6314, + serialized_end=6567, ) _CLUSTER_LABELSENTRY.containing_type = _CLUSTER @@ -3231,7 +3229,7 @@ Required. The cluster name. Cluster names within a project must be unique. Names of deleted clusters can be reused. config: - Required. The cluster config. Note that Cloud Dataproc may set + Required. The cluster config. Note that Dataproc may set default values, and values may change when clusters are updated. labels: @@ -3248,8 +3246,7 @@ Output only. The previous cluster status. cluster_uuid: Output only. A cluster UUID (Unique Universal Identifier). - Cloud Dataproc generates this value when it creates the - cluster. + Dataproc generates this value when it creates the cluster. metrics: Output only. Contains cluster daemon metrics such as HDFS and YARN stats. **Beta Feature**: This report is available for @@ -3272,15 +3269,15 @@ Attributes: config_bucket: - Optional. A Google Cloud Storage bucket used to stage job + Optional. A Cloud Storage bucket used to stage job dependencies, config files, and job driver console output. If you do not specify a staging bucket, Cloud Dataproc will determine a Cloud Storage location (US, ASIA, or EU) for your - cluster's staging bucket according to the Google Compute - Engine zone where your cluster is deployed, and then create - and manage this project-level, per-location bucket (see `Cloud - Dataproc staging bucket `__). + cluster's staging bucket according to the Compute Engine zone + where your cluster is deployed, and then create and manage + this project-level, per-location bucket (see `Dataproc staging + bucket `__). gce_cluster_config: Optional. The shared Compute Engine config settings for all instances in a cluster. @@ -3372,7 +3369,7 @@ rojects/[project_id]/locations/[dataproc_region]/autoscalingPo licies/[policy_id]`` - ``projects/[project_id]/locations/[dat aproc_region]/autoscalingPolicies/[policy_id]`` Note that the - policy must be in the same project and Cloud Dataproc region. + policy must be in the same project and Dataproc region. """, # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.AutoscalingConfig) ), @@ -3421,7 +3418,7 @@ zone_uri: Optional. The zone where the Compute Engine cluster will be located. On a create request, it is required in the "global" - region. If omitted in a non-global Cloud Dataproc region, the + region. If omitted in a non-global Dataproc region, the service will pick a zone in the corresponding Compute Engine region. On a get request, zone will always be present. A full URL, partial URI, or short name are valid. Examples: - ``htt @@ -3457,14 +3454,15 @@ networks, and all off-cluster dependencies must be configured to be accessible without external IP addresses. service_account: - Optional. The service account of the instances. Defaults to - the default Compute Engine service account. Custom service - accounts need permissions equivalent to the following IAM - roles: - roles/logging.logWriter - - roles/storage.objectAdmin (see - https://cloud.google.com/compute/docs/access/service- - accounts#custom\_service\_accounts for more information). - Example: ``[account_id]@[project_id].iam.gserviceaccount.com`` + Optional. The `Dataproc service account + `__ (also see `VM + Data Plane identity `__) used by + Dataproc cluster VM instances to access Google Cloud Platform + services. If not specified, the `Compute Engine default + service account `__ is used. service_account_scopes: Optional. The URIs of service account scopes to be included in Compute Engine instances. The following base set of scopes is @@ -3510,9 +3508,9 @@ Optional. The number of VM instances in the instance group. For master instance groups, must be set to 1. instance_names: - Output only. The list of instance names. Cloud Dataproc - derives the names from ``cluster_name``, ``num_instances``, - and the instance group. + Output only. The list of instance names. Dataproc derives the + names from ``cluster_name``, ``num_instances``, and the + instance group. image_uri: Optional. The Compute Engine image resource used for cluster instances. It can be specified or may be inferred from @@ -3524,8 +3522,8 @@ /[project_id]/zones/us-east1-a/machineTypes/n1-standard-2`` - ``projects/[project_id]/zones/us- east1-a/machineTypes/n1-standard-2`` - ``n1-standard-2`` - **Auto Zone Exception**: If you are using the Cloud Dataproc - `Auto Zone Placement `__ feature, you must use the short name of the machine type resource, for example, ``n1-standard-2``. @@ -3543,7 +3541,7 @@ these instances. min_cpu_platform: Specifies the minimum cpu platform for the Instance Group. See - [Cloud Dataproc→Minimum CPU Platform] + [Dataproc→Minimum CPU Platform] (/dataproc/docs/concepts/compute/dataproc-min-cpu). """, # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.InstanceGroupConfig) @@ -3596,7 +3594,7 @@ ``projects/[project_id]/zones/us- east1-a/acceleratorTypes/nvidia-tesla-k80`` \* ``nvidia- tesla-k80`` **Auto Zone Exception**: If you are using the - Cloud Dataproc `Auto Zone Placement + Dataproc `Auto Zone Placement `__ feature, you must use the short name of the accelerator type resource, for example, @@ -3651,25 +3649,32 @@ Attributes: idle_delete_ttl: - Optional. The duration to keep the cluster alive while idling. - Passing this threshold will cause the cluster to be deleted. - Valid range: **[10m, 14d]**. Example: **"10m"**, the minimum - value, to delete the cluster when it has had no jobs running - for 10 minutes. + Optional. The duration to keep the cluster alive while idling + (when no jobs are running). Passing this threshold will cause + the cluster to be deleted. Minimum value is 10 minutes; + maximum value is 14 days (see JSON representation of `Duration + `__. ttl: Either the exact time the cluster should be deleted at or the cluster maximum age. auto_delete_time: - Optional. The time when cluster will be auto-deleted. + Optional. The time when cluster will be auto-deleted. (see + JSON representation of `Timestamp + `__). auto_delete_ttl: Optional. The lifetime duration of cluster. The cluster will - be auto-deleted at the end of this period. Valid range: - **[10m, 14d]**. Example: **"1d"**, to delete the cluster 1 - day after its creation.. + be auto-deleted at the end of this period. Minimum value is 10 + minutes; maximum value is 14 days (see JSON representation of + `Duration `__). idle_start_time: Output only. The time when cluster became idle (most recent - job finished) and became eligible for deletion due to - idleness. + job finished) and became eligible for deletion due to idleness + (see JSON representation of `Timestamp + `__). """, # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.LifecycleConfig) ), @@ -3782,10 +3787,12 @@ Required. Cloud Storage URI of executable file. execution_timeout: Optional. Amount of time executable has to complete. Default - is 10 minutes. Cluster creation fails with an explanatory - error message (the name of the executable that caused the - error and the exceeded timeout period) if the executable is - not completed at end of the timeout period. + is 10 minutes (see JSON representation of `Duration + `__). Cluster creation fails with an + explanatory error message (the name of the executable that + caused the error and the exceeded timeout period) if the + executable is not completed at end of the timeout period. """, # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.NodeInitializationAction) ), @@ -3807,7 +3814,10 @@ detail: Output only. Optional details of cluster's state. state_start_time: - Output only. Time when this state was entered. + Output only. Time when this state was entered (see JSON + representation of `Timestamp + `__). substate: Output only. Additional state information that includes status reported by the agent. @@ -3839,7 +3849,7 @@ Attributes: image_version: Optional. The version of software inside the cluster. It must - be one of the supported `Cloud Dataproc Versions + be one of the supported `Dataproc Versions `__, such as "1.2" (including a subminor version, such as "1.2.29"), or the @@ -3925,8 +3935,7 @@ Required. The ID of the Google Cloud Platform project that the cluster belongs to. region: - Required. The Cloud Dataproc region in which to handle the - request. + Required. The Dataproc region in which to handle the request. cluster: Required. The cluster to create. request_id: @@ -3961,8 +3970,7 @@ Required. The ID of the Google Cloud Platform project the cluster belongs to. region: - Required. The Cloud Dataproc region in which to handle the - request. + Required. The Dataproc region in which to handle the request. cluster_name: Required. The cluster name. cluster: @@ -3974,8 +3982,10 @@ wait for jobs in progress to finish before forcefully removing nodes (and potentially interrupting jobs). Default timeout is 0 (for forceful decommission), and the maximum allowed timeout - is 1 day. Only supported on Dataproc image versions 1.2 and - higher. + is 1 day (see JSON representation of `Duration + `__). Only supported on Dataproc + image versions 1.2 and higher. update_mask: Required. Specifies the path, relative to ``Cluster``, of the field to update. For example, to change the number of workers @@ -4053,8 +4063,7 @@ Required. The ID of the Google Cloud Platform project that the cluster belongs to. region: - Required. The Cloud Dataproc region in which to handle the - request. + Required. The Dataproc region in which to handle the request. cluster_name: Required. The cluster name. cluster_uuid: @@ -4094,8 +4103,7 @@ Required. The ID of the Google Cloud Platform project that the cluster belongs to. region: - Required. The Cloud Dataproc region in which to handle the - request. + Required. The Dataproc region in which to handle the request. cluster_name: Required. The cluster name. """, @@ -4118,8 +4126,7 @@ Required. The ID of the Google Cloud Platform project that the cluster belongs to. region: - Required. The Cloud Dataproc region in which to handle the - request. + Required. The Dataproc region in which to handle the request. filter: Optional. A filter constraining the clusters to list. Filters are case-sensitive and have the following syntax: field = @@ -4184,8 +4191,7 @@ Required. The ID of the Google Cloud Platform project that the cluster belongs to. region: - Required. The Cloud Dataproc region in which to handle the - request. + Required. The Dataproc region in which to handle the request. cluster_name: Required. The cluster name. """, @@ -4287,6 +4293,8 @@ _DISKCONFIG.fields_by_name["boot_disk_type"]._options = None _DISKCONFIG.fields_by_name["boot_disk_size_gb"]._options = None _LIFECYCLECONFIG.fields_by_name["idle_delete_ttl"]._options = None +_LIFECYCLECONFIG.fields_by_name["auto_delete_time"]._options = None +_LIFECYCLECONFIG.fields_by_name["auto_delete_ttl"]._options = None _LIFECYCLECONFIG.fields_by_name["idle_start_time"]._options = None _KERBEROSCONFIG.fields_by_name["enable_kerberos"]._options = None _KERBEROSCONFIG.fields_by_name["root_principal_password_uri"]._options = None @@ -4356,8 +4364,8 @@ serialized_options=_b( "\312A\027dataproc.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" ), - serialized_start=6587, - serialized_end=8354, + serialized_start=6570, + serialized_end=8337, methods=[ _descriptor.MethodDescriptor( name="CreateCluster", diff --git a/google/cloud/dataproc_v1beta2/proto/clusters_pb2_grpc.py b/google/cloud/dataproc_v1beta2/proto/clusters_pb2_grpc.py index de982140..7623f9d7 100644 --- a/google/cloud/dataproc_v1beta2/proto/clusters_pb2_grpc.py +++ b/google/cloud/dataproc_v1beta2/proto/clusters_pb2_grpc.py @@ -60,7 +60,7 @@ class ClusterControllerServicer(object): def CreateCluster(self, request, context): """Creates a cluster in a project. The returned [Operation.metadata][google.longrunning.Operation.metadata] will be - [ClusterOperationMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata). + [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata). """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") @@ -69,7 +69,7 @@ def CreateCluster(self, request, context): def UpdateCluster(self, request, context): """Updates a cluster in a project. The returned [Operation.metadata][google.longrunning.Operation.metadata] will be - [ClusterOperationMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata). + [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata). """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") @@ -78,7 +78,7 @@ def UpdateCluster(self, request, context): def DeleteCluster(self, request, context): """Deletes a cluster in a project. The returned [Operation.metadata][google.longrunning.Operation.metadata] will be - [ClusterOperationMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata). + [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata). """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") @@ -101,11 +101,11 @@ def ListClusters(self, request, context): def DiagnoseCluster(self, request, context): """Gets cluster diagnostic information. The returned [Operation.metadata][google.longrunning.Operation.metadata] will be - [ClusterOperationMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata). + [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata). After the operation completes, [Operation.response][google.longrunning.Operation.response] contains - [Empty](google.protobuf.Empty). + [Empty][google.protobuf.Empty]. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") diff --git a/google/cloud/dataproc_v1beta2/proto/jobs.proto b/google/cloud/dataproc_v1beta2/proto/jobs.proto index c1e643c9..3208822f 100644 --- a/google/cloud/dataproc_v1beta2/proto/jobs.proto +++ b/google/cloud/dataproc_v1beta2/proto/jobs.proto @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC. +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,7 +11,6 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. -// syntax = "proto3"; @@ -20,6 +19,7 @@ package google.cloud.dataproc.v1beta2; import "google/api/annotations.proto"; import "google/api/client.proto"; import "google/api/field_behavior.proto"; +import "google/longrunning/operations.proto"; import "google/protobuf/empty.proto"; import "google/protobuf/field_mask.proto"; import "google/protobuf/timestamp.proto"; @@ -70,9 +70,9 @@ service JobController { // Starts a job cancellation request. To access the job resource // after cancellation, call - // [regions/{region}/jobs.list](/dataproc/docs/reference/rest/v1beta2/projects.regions.jobs/list) + // [regions/{region}/jobs.list](https://cloud.google.com/dataproc/docs/reference/rest/v1beta2/projects.regions.jobs/list) // or - // [regions/{region}/jobs.get](/dataproc/docs/reference/rest/v1beta2/projects.regions.jobs/get). + // [regions/{region}/jobs.get](https://cloud.google.com/dataproc/docs/reference/rest/v1beta2/projects.regions.jobs/get). rpc CancelJob(CancelJobRequest) returns (Job) { option (google.api.http) = { post: "/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}:cancel" @@ -132,7 +132,7 @@ message LoggingConfig { map driver_log_levels = 2; } -// A Cloud Dataproc job for running +// A Dataproc job for running // [Apache Hadoop // MapReduce](https://hadoop.apache.org/docs/current/hadoop-mapreduce-client/hadoop-mapreduce-client-core/MapReduceTutorial.html) // jobs on [Apache Hadoop @@ -159,33 +159,33 @@ message HadoopJob { // include arguments, such as `-libjars` or `-Dfoo=bar`, that can be set as // job properties, since a collision may occur that causes an incorrect job // submission. - repeated string args = 3; + repeated string args = 3 [(google.api.field_behavior) = OPTIONAL]; // Optional. Jar file URIs to add to the CLASSPATHs of the // Hadoop driver and tasks. - repeated string jar_file_uris = 4; + repeated string jar_file_uris = 4 [(google.api.field_behavior) = OPTIONAL]; // Optional. HCFS (Hadoop Compatible Filesystem) URIs of files to be copied // to the working directory of Hadoop drivers and distributed tasks. Useful // for naively parallel tasks. - repeated string file_uris = 5; + repeated string file_uris = 5 [(google.api.field_behavior) = OPTIONAL]; // Optional. HCFS URIs of archives to be extracted in the working directory of // Hadoop drivers and tasks. Supported file types: // .jar, .tar, .tar.gz, .tgz, or .zip. - repeated string archive_uris = 6; + repeated string archive_uris = 6 [(google.api.field_behavior) = OPTIONAL]; // Optional. A mapping of property names to values, used to configure Hadoop. - // Properties that conflict with values set by the Cloud Dataproc API may be + // Properties that conflict with values set by the Dataproc API may be // overwritten. Can include properties set in /etc/hadoop/conf/*-site and // classes in user code. - map properties = 7; + map properties = 7 [(google.api.field_behavior) = OPTIONAL]; // Optional. The runtime log config for job execution. - LoggingConfig logging_config = 8; + LoggingConfig logging_config = 8 [(google.api.field_behavior) = OPTIONAL]; } -// A Cloud Dataproc job for running [Apache Spark](http://spark.apache.org/) +// A Dataproc job for running [Apache Spark](http://spark.apache.org/) // applications on YARN. // The specification of the main method to call to drive the job. // Specify either the jar file that contains the main class or the main class @@ -205,32 +205,32 @@ message SparkJob { // Optional. The arguments to pass to the driver. Do not include arguments, // such as `--conf`, that can be set as job properties, since a collision may // occur that causes an incorrect job submission. - repeated string args = 3; + repeated string args = 3 [(google.api.field_behavior) = OPTIONAL]; // Optional. HCFS URIs of jar files to add to the CLASSPATHs of the // Spark driver and tasks. - repeated string jar_file_uris = 4; + repeated string jar_file_uris = 4 [(google.api.field_behavior) = OPTIONAL]; // Optional. HCFS URIs of files to be copied to the working directory of // Spark drivers and distributed tasks. Useful for naively parallel tasks. - repeated string file_uris = 5; + repeated string file_uris = 5 [(google.api.field_behavior) = OPTIONAL]; // Optional. HCFS URIs of archives to be extracted in the working directory // of Spark drivers and tasks. Supported file types: // .jar, .tar, .tar.gz, .tgz, and .zip. - repeated string archive_uris = 6; + repeated string archive_uris = 6 [(google.api.field_behavior) = OPTIONAL]; // Optional. A mapping of property names to values, used to configure Spark. - // Properties that conflict with values set by the Cloud Dataproc API may be + // Properties that conflict with values set by the Dataproc API may be // overwritten. Can include properties set in // /etc/spark/conf/spark-defaults.conf and classes in user code. - map properties = 7; + map properties = 7 [(google.api.field_behavior) = OPTIONAL]; // Optional. The runtime log config for job execution. - LoggingConfig logging_config = 8; + LoggingConfig logging_config = 8 [(google.api.field_behavior) = OPTIONAL]; } -// A Cloud Dataproc job for running +// A Dataproc job for running // [Apache // PySpark](https://spark.apache.org/docs/0.9.0/python-programming-guide.html) // applications on YARN. @@ -242,32 +242,32 @@ message PySparkJob { // Optional. The arguments to pass to the driver. Do not include arguments, // such as `--conf`, that can be set as job properties, since a collision may // occur that causes an incorrect job submission. - repeated string args = 2; + repeated string args = 2 [(google.api.field_behavior) = OPTIONAL]; // Optional. HCFS file URIs of Python files to pass to the PySpark // framework. Supported file types: .py, .egg, and .zip. - repeated string python_file_uris = 3; + repeated string python_file_uris = 3 [(google.api.field_behavior) = OPTIONAL]; // Optional. HCFS URIs of jar files to add to the CLASSPATHs of the // Python driver and tasks. - repeated string jar_file_uris = 4; + repeated string jar_file_uris = 4 [(google.api.field_behavior) = OPTIONAL]; // Optional. HCFS URIs of files to be copied to the working directory of // Python drivers and distributed tasks. Useful for naively parallel tasks. - repeated string file_uris = 5; + repeated string file_uris = 5 [(google.api.field_behavior) = OPTIONAL]; // Optional. HCFS URIs of archives to be extracted in the working directory of // .jar, .tar, .tar.gz, .tgz, and .zip. - repeated string archive_uris = 6; + repeated string archive_uris = 6 [(google.api.field_behavior) = OPTIONAL]; // Optional. A mapping of property names to values, used to configure PySpark. - // Properties that conflict with values set by the Cloud Dataproc API may be + // Properties that conflict with values set by the Dataproc API may be // overwritten. Can include properties set in // /etc/spark/conf/spark-defaults.conf and classes in user code. - map properties = 7; + map properties = 7 [(google.api.field_behavior) = OPTIONAL]; // Optional. The runtime log config for job execution. - LoggingConfig logging_config = 8; + LoggingConfig logging_config = 8 [(google.api.field_behavior) = OPTIONAL]; } // A list of queries to run on a cluster. @@ -289,7 +289,7 @@ message QueryList { repeated string queries = 1 [(google.api.field_behavior) = REQUIRED]; } -// A Cloud Dataproc job for running [Apache Hive](https://hive.apache.org/) +// A Dataproc job for running [Apache Hive](https://hive.apache.org/) // queries on YARN. message HiveJob { // Required. The sequence of Hive queries to execute, specified as either @@ -305,25 +305,25 @@ message HiveJob { // Optional. Whether to continue executing queries if a query fails. // The default value is `false`. Setting to `true` can be useful when // executing independent parallel queries. - bool continue_on_failure = 3; + bool continue_on_failure = 3 [(google.api.field_behavior) = OPTIONAL]; // Optional. Mapping of query variable names to values (equivalent to the // Hive command: `SET name="value";`). - map script_variables = 4; + map script_variables = 4 [(google.api.field_behavior) = OPTIONAL]; // Optional. A mapping of property names and values, used to configure Hive. - // Properties that conflict with values set by the Cloud Dataproc API may be + // Properties that conflict with values set by the Dataproc API may be // overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, // /etc/hive/conf/hive-site.xml, and classes in user code. - map properties = 5; + map properties = 5 [(google.api.field_behavior) = OPTIONAL]; // Optional. HCFS URIs of jar files to add to the CLASSPATH of the // Hive server and Hadoop MapReduce (MR) tasks. Can contain Hive SerDes // and UDFs. - repeated string jar_file_uris = 6; + repeated string jar_file_uris = 6 [(google.api.field_behavior) = OPTIONAL]; } -// A Cloud Dataproc job for running [Apache Spark +// A Dataproc job for running [Apache Spark // SQL](http://spark.apache.org/sql/) queries. message SparkSqlJob { // Required. The sequence of Spark SQL queries to execute, specified as @@ -338,21 +338,21 @@ message SparkSqlJob { // Optional. Mapping of query variable names to values (equivalent to the // Spark SQL command: SET `name="value";`). - map script_variables = 3; + map script_variables = 3 [(google.api.field_behavior) = OPTIONAL]; // Optional. A mapping of property names to values, used to configure // Spark SQL's SparkConf. Properties that conflict with values set by the - // Cloud Dataproc API may be overwritten. - map properties = 4; + // Dataproc API may be overwritten. + map properties = 4 [(google.api.field_behavior) = OPTIONAL]; // Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH. - repeated string jar_file_uris = 56; + repeated string jar_file_uris = 56 [(google.api.field_behavior) = OPTIONAL]; // Optional. The runtime log config for job execution. - LoggingConfig logging_config = 6; + LoggingConfig logging_config = 6 [(google.api.field_behavior) = OPTIONAL]; } -// A Cloud Dataproc job for running [Apache Pig](https://pig.apache.org/) +// A Dataproc job for running [Apache Pig](https://pig.apache.org/) // queries on YARN. message PigJob { // Required. The sequence of Pig queries to execute, specified as an HCFS @@ -368,27 +368,27 @@ message PigJob { // Optional. Whether to continue executing queries if a query fails. // The default value is `false`. Setting to `true` can be useful when // executing independent parallel queries. - bool continue_on_failure = 3; + bool continue_on_failure = 3 [(google.api.field_behavior) = OPTIONAL]; // Optional. Mapping of query variable names to values (equivalent to the Pig // command: `name=[value]`). - map script_variables = 4; + map script_variables = 4 [(google.api.field_behavior) = OPTIONAL]; // Optional. A mapping of property names to values, used to configure Pig. - // Properties that conflict with values set by the Cloud Dataproc API may be + // Properties that conflict with values set by the Dataproc API may be // overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, // /etc/pig/conf/pig.properties, and classes in user code. - map properties = 5; + map properties = 5 [(google.api.field_behavior) = OPTIONAL]; // Optional. HCFS URIs of jar files to add to the CLASSPATH of // the Pig Client and Hadoop MapReduce (MR) tasks. Can contain Pig UDFs. - repeated string jar_file_uris = 6; + repeated string jar_file_uris = 6 [(google.api.field_behavior) = OPTIONAL]; // Optional. The runtime log config for job execution. - LoggingConfig logging_config = 7; + LoggingConfig logging_config = 7 [(google.api.field_behavior) = OPTIONAL]; } -// A Cloud Dataproc job for running +// A Dataproc job for running // [Apache SparkR](https://spark.apache.org/docs/latest/sparkr.html) // applications on YARN. message SparkRJob { @@ -399,38 +399,38 @@ message SparkRJob { // Optional. The arguments to pass to the driver. Do not include arguments, // such as `--conf`, that can be set as job properties, since a collision may // occur that causes an incorrect job submission. - repeated string args = 2; + repeated string args = 2 [(google.api.field_behavior) = OPTIONAL]; // Optional. HCFS URIs of files to be copied to the working directory of // R drivers and distributed tasks. Useful for naively parallel tasks. - repeated string file_uris = 3; + repeated string file_uris = 3 [(google.api.field_behavior) = OPTIONAL]; // Optional. HCFS URIs of archives to be extracted in the working directory of // Spark drivers and tasks. Supported file types: // .jar, .tar, .tar.gz, .tgz, and .zip. - repeated string archive_uris = 4; + repeated string archive_uris = 4 [(google.api.field_behavior) = OPTIONAL]; // Optional. A mapping of property names to values, used to configure SparkR. - // Properties that conflict with values set by the Cloud Dataproc API may be + // Properties that conflict with values set by the Dataproc API may be // overwritten. Can include properties set in // /etc/spark/conf/spark-defaults.conf and classes in user code. - map properties = 5; + map properties = 5 [(google.api.field_behavior) = OPTIONAL]; // Optional. The runtime log config for job execution. - LoggingConfig logging_config = 6; + LoggingConfig logging_config = 6 [(google.api.field_behavior) = OPTIONAL]; } -// Cloud Dataproc job config. +// Dataproc job config. message JobPlacement { // Required. The name of the cluster where the job will be submitted. string cluster_name = 1 [(google.api.field_behavior) = REQUIRED]; - // Output only. A cluster UUID generated by the Cloud Dataproc service when + // Output only. A cluster UUID generated by the Dataproc service when // the job is submitted. - string cluster_uuid = 2; + string cluster_uuid = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; } -// Cloud Dataproc job status. +// Dataproc job status. message JobStatus { // The job state. enum State { @@ -488,7 +488,7 @@ message JobStatus { QUEUED = 2; // The agent-reported status is out of date, which may be caused by a - // loss of communication between the agent and Cloud Dataproc. If the + // loss of communication between the agent and Dataproc. If the // agent does not send a timely update, the job will fail. // // Applies to RUNNING state. @@ -496,18 +496,18 @@ message JobStatus { } // Output only. A state message specifying the overall job state. - State state = 1; + State state = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - // Output only. Optional job state details, such as an error + // Output only. Optional Job state details, such as an error // description if the state is ERROR. - string details = 2; + string details = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The time when this state was entered. - google.protobuf.Timestamp state_start_time = 6; + google.protobuf.Timestamp state_start_time = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Additional state information, which includes // status reported by the agent. - Substate substate = 7; + Substate substate = 7 [(google.api.field_behavior) = OUTPUT_ONLY]; } // Encapsulates the full scoping used to reference a job. @@ -517,12 +517,11 @@ message JobReference { string project_id = 1 [(google.api.field_behavior) = REQUIRED]; // Optional. The job ID, which must be unique within the project. - // // The ID must contain only letters (a-z, A-Z), numbers (0-9), // underscores (_), or hyphens (-). The maximum length is 100 characters. // // If not specified by the caller, the job ID will be provided by the server. - string job_id = 2; + string job_id = 2 [(google.api.field_behavior) = OPTIONAL]; } // A YARN application created by a job. Application information is a subset of @@ -571,20 +570,20 @@ message YarnApplication { // Output only. The numerical progress of the application, from 1 to 100. float progress = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; - // Optional. Output only. The HTTP URL of the ApplicationMaster, HistoryServer, or + // Output only. The HTTP URL of the ApplicationMaster, HistoryServer, or // TimelineServer that provides application-specific information. The URL uses // the internal hostname, and requires a proxy server for resolution and, // possibly, access. string tracking_url = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; } -// A Cloud Dataproc job resource. +// A Dataproc job resource. message Job { // Optional. The fully qualified reference to the job, which can be used to // obtain the equivalent REST path of the job resource. If this property // is not specified when a job is created, the server generates a // job_id. - JobReference reference = 1; + JobReference reference = 1 [(google.api.field_behavior) = OPTIONAL]; // Required. Job information, including how, when, and where to // run the job. @@ -592,54 +591,47 @@ message Job { // Required. The application/framework-specific portion of the job. oneof type_job { - // Job is a Hadoop job. HadoopJob hadoop_job = 3; - // Job is a Spark job. SparkJob spark_job = 4; - // Job is a Pyspark job. PySparkJob pyspark_job = 5; - // Job is a Hive job. HiveJob hive_job = 6; - // Job is a Pig job. PigJob pig_job = 7; - // Job is a SparkR job. SparkRJob spark_r_job = 21; - // Job is a SparkSql job. SparkSqlJob spark_sql_job = 12; } // Output only. The job status. Additional application-specific // status information may be contained in the type_job // and yarn_applications fields. - JobStatus status = 8; + JobStatus status = 8 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The previous job status. - repeated JobStatus status_history = 13; + repeated JobStatus status_history = 13 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The collection of YARN applications spun up by this job. // // **Beta** Feature: This report is available for testing purposes only. It // may be changed before final release. - repeated YarnApplication yarn_applications = 9; + repeated YarnApplication yarn_applications = 9 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The email address of the user submitting the job. For jobs // submitted on the cluster, the address is username@hostname. - string submitted_by = 10; + string submitted_by = 10 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. A URI pointing to the location of the stdout of the job's // driver program. - string driver_output_resource_uri = 17; + string driver_output_resource_uri = 17 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. If present, the location of miscellaneous control files // which may be used as part of job setup and handling. If not present, // control files may be placed in the same location as `driver_output_uri`. - string driver_control_files_uri = 15; + string driver_control_files_uri = 15 [(google.api.field_behavior) = OUTPUT_ONLY]; // Optional. The labels to associate with this job. // Label **keys** must contain 1 to 63 characters, and must conform to @@ -648,15 +640,15 @@ message Job { // characters, and must conform to [RFC // 1035](https://www.ietf.org/rfc/rfc1035.txt). No more than 32 labels can be // associated with a job. - map labels = 18; + map labels = 18 [(google.api.field_behavior) = OPTIONAL]; // Optional. Job scheduling configuration. - JobScheduling scheduling = 20; + JobScheduling scheduling = 20 [(google.api.field_behavior) = OPTIONAL]; // Output only. A UUID that uniquely identifies a job within the project // over time. This is in contrast to a user-settable reference.job_id that // may be reused over time. - string job_uuid = 22; + string job_uuid = 22 [(google.api.field_behavior) = OUTPUT_ONLY]; } // Job scheduling options. @@ -669,7 +661,7 @@ message JobScheduling { // 4 times within 10 minute window. // // Maximum value is 10. - int32 max_failures_per_hour = 1; + int32 max_failures_per_hour = 1 [(google.api.field_behavior) = OPTIONAL]; } // A request to submit a job. @@ -678,7 +670,7 @@ message SubmitJobRequest { // belongs to. string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - // Required. The Cloud Dataproc region in which to handle the request. + // Required. The Dataproc region in which to handle the request. string region = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The job resource. @@ -695,7 +687,7 @@ message SubmitJobRequest { // // The id must contain only letters (a-z, A-Z), numbers (0-9), // underscores (_), and hyphens (-). The maximum length is 40 characters. - string request_id = 4; + string request_id = 4 [(google.api.field_behavior) = OPTIONAL]; } // A request to get the resource representation for a job in a project. @@ -704,7 +696,7 @@ message GetJobRequest { // belongs to. string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - // Required. The Cloud Dataproc region in which to handle the request. + // Required. The Dataproc region in which to handle the request. string region = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The job ID. @@ -730,25 +722,25 @@ message ListJobsRequest { // belongs to. string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - // Required. The Cloud Dataproc region in which to handle the request. + // Required. The Dataproc region in which to handle the request. string region = 6 [(google.api.field_behavior) = REQUIRED]; // Optional. The number of results to return in each response. - int32 page_size = 2; + int32 page_size = 2 [(google.api.field_behavior) = OPTIONAL]; // Optional. The page token, returned by a previous call, to request the // next page of results. - string page_token = 3; + string page_token = 3 [(google.api.field_behavior) = OPTIONAL]; // Optional. If set, the returned jobs list includes only jobs that were // submitted to the named cluster. - string cluster_name = 4; + string cluster_name = 4 [(google.api.field_behavior) = OPTIONAL]; // Optional. Specifies enumerated categories of jobs to list. // (default = match ALL jobs). // // If `filter` is provided, `jobStateMatcher` will be ignored. - JobStateMatcher job_state_matcher = 5; + JobStateMatcher job_state_matcher = 5 [(google.api.field_behavior) = OPTIONAL]; // Optional. A filter constraining the jobs to list. Filters are // case-sensitive and have the following syntax: @@ -764,7 +756,7 @@ message ListJobsRequest { // Example filter: // // status.state = ACTIVE AND labels.env = staging AND labels.starred = * - string filter = 7; + string filter = 7 [(google.api.field_behavior) = OPTIONAL]; } // A request to update a job. @@ -773,7 +765,7 @@ message UpdateJobRequest { // belongs to. string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - // Required. The Cloud Dataproc region in which to handle the request. + // Required. The Dataproc region in which to handle the request. string region = 2 [(google.api.field_behavior) = REQUIRED]; // Required. The job ID. @@ -794,12 +786,12 @@ message UpdateJobRequest { // A list of jobs in a project. message ListJobsResponse { // Output only. Jobs list. - repeated Job jobs = 1; + repeated Job jobs = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; // Optional. This token is included in the response if there are more results // to fetch. To fetch additional results, provide this value as the // `page_token` in a subsequent ListJobsRequest. - string next_page_token = 2; + string next_page_token = 2 [(google.api.field_behavior) = OPTIONAL]; } // A request to cancel a job. @@ -808,7 +800,7 @@ message CancelJobRequest { // belongs to. string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - // Required. The Cloud Dataproc region in which to handle the request. + // Required. The Dataproc region in which to handle the request. string region = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The job ID. @@ -821,7 +813,7 @@ message DeleteJobRequest { // belongs to. string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - // Required. The Cloud Dataproc region in which to handle the request. + // Required. The Dataproc region in which to handle the request. string region = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The job ID. diff --git a/google/cloud/dataproc_v1beta2/proto/jobs_pb2.py b/google/cloud/dataproc_v1beta2/proto/jobs_pb2.py index c40e358b..37e4a1bb 100644 --- a/google/cloud/dataproc_v1beta2/proto/jobs_pb2.py +++ b/google/cloud/dataproc_v1beta2/proto/jobs_pb2.py @@ -18,6 +18,9 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 from google.api import client_pb2 as google_dot_api_dot_client__pb2 from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.longrunning import ( + operations_pb2 as google_dot_longrunning_dot_operations__pb2, +) from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 @@ -31,12 +34,13 @@ "\n!com.google.cloud.dataproc.v1beta2B\tJobsProtoP\001ZEgoogle.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataproc" ), serialized_pb=_b( - '\n.google/cloud/dataproc_v1beta2/proto/jobs.proto\x12\x1dgoogle.cloud.dataproc.v1beta2\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xcb\x02\n\rLoggingConfig\x12\\\n\x11\x64river_log_levels\x18\x02 \x03(\x0b\x32\x41.google.cloud.dataproc.v1beta2.LoggingConfig.DriverLogLevelsEntry\x1aj\n\x14\x44riverLogLevelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x41\n\x05value\x18\x02 \x01(\x0e\x32\x32.google.cloud.dataproc.v1beta2.LoggingConfig.Level:\x02\x38\x01"p\n\x05Level\x12\x15\n\x11LEVEL_UNSPECIFIED\x10\x00\x12\x07\n\x03\x41LL\x10\x01\x12\t\n\x05TRACE\x10\x02\x12\t\n\x05\x44\x45\x42UG\x10\x03\x12\x08\n\x04INFO\x10\x04\x12\x08\n\x04WARN\x10\x05\x12\t\n\x05\x45RROR\x10\x06\x12\t\n\x05\x46\x41TAL\x10\x07\x12\x07\n\x03OFF\x10\x08"\xdd\x02\n\tHadoopJob\x12\x1b\n\x11main_jar_file_uri\x18\x01 \x01(\tH\x00\x12\x14\n\nmain_class\x18\x02 \x01(\tH\x00\x12\x0c\n\x04\x61rgs\x18\x03 \x03(\t\x12\x15\n\rjar_file_uris\x18\x04 \x03(\t\x12\x11\n\tfile_uris\x18\x05 \x03(\t\x12\x14\n\x0c\x61rchive_uris\x18\x06 \x03(\t\x12L\n\nproperties\x18\x07 \x03(\x0b\x32\x38.google.cloud.dataproc.v1beta2.HadoopJob.PropertiesEntry\x12\x44\n\x0elogging_config\x18\x08 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.LoggingConfig\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x08\n\x06\x64river"\xdb\x02\n\x08SparkJob\x12\x1b\n\x11main_jar_file_uri\x18\x01 \x01(\tH\x00\x12\x14\n\nmain_class\x18\x02 \x01(\tH\x00\x12\x0c\n\x04\x61rgs\x18\x03 \x03(\t\x12\x15\n\rjar_file_uris\x18\x04 \x03(\t\x12\x11\n\tfile_uris\x18\x05 \x03(\t\x12\x14\n\x0c\x61rchive_uris\x18\x06 \x03(\t\x12K\n\nproperties\x18\x07 \x03(\x0b\x32\x37.google.cloud.dataproc.v1beta2.SparkJob.PropertiesEntry\x12\x44\n\x0elogging_config\x18\x08 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.LoggingConfig\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x08\n\x06\x64river"\xdf\x02\n\nPySparkJob\x12!\n\x14main_python_file_uri\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x0c\n\x04\x61rgs\x18\x02 \x03(\t\x12\x18\n\x10python_file_uris\x18\x03 \x03(\t\x12\x15\n\rjar_file_uris\x18\x04 \x03(\t\x12\x11\n\tfile_uris\x18\x05 \x03(\t\x12\x14\n\x0c\x61rchive_uris\x18\x06 \x03(\t\x12M\n\nproperties\x18\x07 \x03(\x0b\x32\x39.google.cloud.dataproc.v1beta2.PySparkJob.PropertiesEntry\x12\x44\n\x0elogging_config\x18\x08 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.LoggingConfig\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"!\n\tQueryList\x12\x14\n\x07queries\x18\x01 \x03(\tB\x03\xe0\x41\x02"\xb0\x03\n\x07HiveJob\x12\x18\n\x0equery_file_uri\x18\x01 \x01(\tH\x00\x12>\n\nquery_list\x18\x02 \x01(\x0b\x32(.google.cloud.dataproc.v1beta2.QueryListH\x00\x12\x1b\n\x13\x63ontinue_on_failure\x18\x03 \x01(\x08\x12U\n\x10script_variables\x18\x04 \x03(\x0b\x32;.google.cloud.dataproc.v1beta2.HiveJob.ScriptVariablesEntry\x12J\n\nproperties\x18\x05 \x03(\x0b\x32\x36.google.cloud.dataproc.v1beta2.HiveJob.PropertiesEntry\x12\x15\n\rjar_file_uris\x18\x06 \x03(\t\x1a\x36\n\x14ScriptVariablesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07queries"\xe5\x03\n\x0bSparkSqlJob\x12\x18\n\x0equery_file_uri\x18\x01 \x01(\tH\x00\x12>\n\nquery_list\x18\x02 \x01(\x0b\x32(.google.cloud.dataproc.v1beta2.QueryListH\x00\x12Y\n\x10script_variables\x18\x03 \x03(\x0b\x32?.google.cloud.dataproc.v1beta2.SparkSqlJob.ScriptVariablesEntry\x12N\n\nproperties\x18\x04 \x03(\x0b\x32:.google.cloud.dataproc.v1beta2.SparkSqlJob.PropertiesEntry\x12\x15\n\rjar_file_uris\x18\x38 \x03(\t\x12\x44\n\x0elogging_config\x18\x06 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.LoggingConfig\x1a\x36\n\x14ScriptVariablesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07queries"\xf3\x03\n\x06PigJob\x12\x18\n\x0equery_file_uri\x18\x01 \x01(\tH\x00\x12>\n\nquery_list\x18\x02 \x01(\x0b\x32(.google.cloud.dataproc.v1beta2.QueryListH\x00\x12\x1b\n\x13\x63ontinue_on_failure\x18\x03 \x01(\x08\x12T\n\x10script_variables\x18\x04 \x03(\x0b\x32:.google.cloud.dataproc.v1beta2.PigJob.ScriptVariablesEntry\x12I\n\nproperties\x18\x05 \x03(\x0b\x32\x35.google.cloud.dataproc.v1beta2.PigJob.PropertiesEntry\x12\x15\n\rjar_file_uris\x18\x06 \x03(\t\x12\x44\n\x0elogging_config\x18\x07 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.LoggingConfig\x1a\x36\n\x14ScriptVariablesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07queries"\xa7\x02\n\tSparkRJob\x12\x1c\n\x0fmain_r_file_uri\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x0c\n\x04\x61rgs\x18\x02 \x03(\t\x12\x11\n\tfile_uris\x18\x03 \x03(\t\x12\x14\n\x0c\x61rchive_uris\x18\x04 \x03(\t\x12L\n\nproperties\x18\x05 \x03(\x0b\x32\x38.google.cloud.dataproc.v1beta2.SparkRJob.PropertiesEntry\x12\x44\n\x0elogging_config\x18\x06 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.LoggingConfig\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"?\n\x0cJobPlacement\x12\x19\n\x0c\x63luster_name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x14\n\x0c\x63luster_uuid\x18\x02 \x01(\t"\xcc\x03\n\tJobStatus\x12=\n\x05state\x18\x01 \x01(\x0e\x32..google.cloud.dataproc.v1beta2.JobStatus.State\x12\x0f\n\x07\x64\x65tails\x18\x02 \x01(\t\x12\x34\n\x10state_start_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x43\n\x08substate\x18\x07 \x01(\x0e\x32\x31.google.cloud.dataproc.v1beta2.JobStatus.Substate"\xa9\x01\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07PENDING\x10\x01\x12\x0e\n\nSETUP_DONE\x10\x08\x12\x0b\n\x07RUNNING\x10\x02\x12\x12\n\x0e\x43\x41NCEL_PENDING\x10\x03\x12\x12\n\x0e\x43\x41NCEL_STARTED\x10\x07\x12\r\n\tCANCELLED\x10\x04\x12\x08\n\x04\x44ONE\x10\x05\x12\t\n\x05\x45RROR\x10\x06\x12\x13\n\x0f\x41TTEMPT_FAILURE\x10\t"H\n\x08Substate\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\r\n\tSUBMITTED\x10\x01\x12\n\n\x06QUEUED\x10\x02\x12\x10\n\x0cSTALE_STATUS\x10\x03"7\n\x0cJobReference\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x0e\n\x06job_id\x18\x02 \x01(\t"\xaa\x02\n\x0fYarnApplication\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12H\n\x05state\x18\x02 \x01(\x0e\x32\x34.google.cloud.dataproc.v1beta2.YarnApplication.StateB\x03\xe0\x41\x03\x12\x15\n\x08progress\x18\x03 \x01(\x02\x42\x03\xe0\x41\x03\x12\x19\n\x0ctracking_url\x18\x04 \x01(\tB\x03\xe0\x41\x03"\x87\x01\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x07\n\x03NEW\x10\x01\x12\x0e\n\nNEW_SAVING\x10\x02\x12\r\n\tSUBMITTED\x10\x03\x12\x0c\n\x08\x41\x43\x43\x45PTED\x10\x04\x12\x0b\n\x07RUNNING\x10\x05\x12\x0c\n\x08\x46INISHED\x10\x06\x12\n\n\x06\x46\x41ILED\x10\x07\x12\n\n\x06KILLED\x10\x08"\xb8\x08\n\x03Job\x12>\n\treference\x18\x01 \x01(\x0b\x32+.google.cloud.dataproc.v1beta2.JobReference\x12\x43\n\tplacement\x18\x02 \x01(\x0b\x32+.google.cloud.dataproc.v1beta2.JobPlacementB\x03\xe0\x41\x02\x12>\n\nhadoop_job\x18\x03 \x01(\x0b\x32(.google.cloud.dataproc.v1beta2.HadoopJobH\x00\x12<\n\tspark_job\x18\x04 \x01(\x0b\x32\'.google.cloud.dataproc.v1beta2.SparkJobH\x00\x12@\n\x0bpyspark_job\x18\x05 \x01(\x0b\x32).google.cloud.dataproc.v1beta2.PySparkJobH\x00\x12:\n\x08hive_job\x18\x06 \x01(\x0b\x32&.google.cloud.dataproc.v1beta2.HiveJobH\x00\x12\x38\n\x07pig_job\x18\x07 \x01(\x0b\x32%.google.cloud.dataproc.v1beta2.PigJobH\x00\x12?\n\x0bspark_r_job\x18\x15 \x01(\x0b\x32(.google.cloud.dataproc.v1beta2.SparkRJobH\x00\x12\x43\n\rspark_sql_job\x18\x0c \x01(\x0b\x32*.google.cloud.dataproc.v1beta2.SparkSqlJobH\x00\x12\x38\n\x06status\x18\x08 \x01(\x0b\x32(.google.cloud.dataproc.v1beta2.JobStatus\x12@\n\x0estatus_history\x18\r \x03(\x0b\x32(.google.cloud.dataproc.v1beta2.JobStatus\x12I\n\x11yarn_applications\x18\t \x03(\x0b\x32..google.cloud.dataproc.v1beta2.YarnApplication\x12\x14\n\x0csubmitted_by\x18\n \x01(\t\x12"\n\x1a\x64river_output_resource_uri\x18\x11 \x01(\t\x12 \n\x18\x64river_control_files_uri\x18\x0f \x01(\t\x12>\n\x06labels\x18\x12 \x03(\x0b\x32..google.cloud.dataproc.v1beta2.Job.LabelsEntry\x12@\n\nscheduling\x18\x14 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.JobScheduling\x12\x10\n\x08job_uuid\x18\x16 \x01(\t\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\n\n\x08type_job".\n\rJobScheduling\x12\x1d\n\x15max_failures_per_hour\x18\x01 \x01(\x05"\x8a\x01\n\x10SubmitJobRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x34\n\x03job\x18\x02 \x01(\x0b\x32".google.cloud.dataproc.v1beta2.JobB\x03\xe0\x41\x02\x12\x12\n\nrequest_id\x18\x04 \x01(\t"R\n\rGetJobRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06job_id\x18\x02 \x01(\tB\x03\xe0\x41\x02"\x9f\x02\n\x0fListJobsRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x06 \x01(\tB\x03\xe0\x41\x02\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\x12\x14\n\x0c\x63luster_name\x18\x04 \x01(\t\x12Y\n\x11job_state_matcher\x18\x05 \x01(\x0e\x32>.google.cloud.dataproc.v1beta2.ListJobsRequest.JobStateMatcher\x12\x0e\n\x06\x66ilter\x18\x07 \x01(\t"6\n\x0fJobStateMatcher\x12\x07\n\x03\x41LL\x10\x00\x12\n\n\x06\x41\x43TIVE\x10\x01\x12\x0e\n\nNON_ACTIVE\x10\x02"\xc1\x01\n\x10UpdateJobRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06job_id\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x34\n\x03job\x18\x04 \x01(\x0b\x32".google.cloud.dataproc.v1beta2.JobB\x03\xe0\x41\x02\x12\x34\n\x0bupdate_mask\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02"]\n\x10ListJobsResponse\x12\x30\n\x04jobs\x18\x01 \x03(\x0b\x32".google.cloud.dataproc.v1beta2.Job\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"U\n\x10\x43\x61ncelJobRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06job_id\x18\x02 \x01(\tB\x03\xe0\x41\x02"U\n\x10\x44\x65leteJobRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06job_id\x18\x02 \x01(\tB\x03\xe0\x41\x02\x32\xfb\t\n\rJobController\x12\xc2\x01\n\tSubmitJob\x12/.google.cloud.dataproc.v1beta2.SubmitJobRequest\x1a".google.cloud.dataproc.v1beta2.Job"`\x82\xd3\xe4\x93\x02@";/v1beta2/projects/{project_id}/regions/{region}/jobs:submit:\x01*\xda\x41\x17project_id, region, job\x12\xbe\x01\n\x06GetJob\x12,.google.cloud.dataproc.v1beta2.GetJobRequest\x1a".google.cloud.dataproc.v1beta2.Job"b\x82\xd3\xe4\x93\x02?\x12=/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}\xda\x41\x1aproject_id, region, job_id\x12\xdb\x01\n\x08ListJobs\x12..google.cloud.dataproc.v1beta2.ListJobsRequest\x1a/.google.cloud.dataproc.v1beta2.ListJobsResponse"n\x82\xd3\xe4\x93\x02\x36\x12\x34/v1beta2/projects/{project_id}/regions/{region}/jobs\xda\x41\x12project_id, region\xda\x41\x1aproject_id, region, filter\x12\xac\x01\n\tUpdateJob\x12/.google.cloud.dataproc.v1beta2.UpdateJobRequest\x1a".google.cloud.dataproc.v1beta2.Job"J\x82\xd3\xe4\x93\x02\x44\x32=/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}:\x03job\x12\xce\x01\n\tCancelJob\x12/.google.cloud.dataproc.v1beta2.CancelJobRequest\x1a".google.cloud.dataproc.v1beta2.Job"l\x82\xd3\xe4\x93\x02I"D/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}:cancel:\x01*\xda\x41\x1aproject_id, region, job_id\x12\xb8\x01\n\tDeleteJob\x12/.google.cloud.dataproc.v1beta2.DeleteJobRequest\x1a\x16.google.protobuf.Empty"b\x82\xd3\xe4\x93\x02?*=/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}\xda\x41\x1aproject_id, region, job_id\x1aK\xca\x41\x17\x64\x61taproc.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformBw\n!com.google.cloud.dataproc.v1beta2B\tJobsProtoP\x01ZEgoogle.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataprocb\x06proto3' + '\n.google/cloud/dataproc_v1beta2/proto/jobs.proto\x12\x1dgoogle.cloud.dataproc.v1beta2\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a#google/longrunning/operations.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xcb\x02\n\rLoggingConfig\x12\\\n\x11\x64river_log_levels\x18\x02 \x03(\x0b\x32\x41.google.cloud.dataproc.v1beta2.LoggingConfig.DriverLogLevelsEntry\x1aj\n\x14\x44riverLogLevelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x41\n\x05value\x18\x02 \x01(\x0e\x32\x32.google.cloud.dataproc.v1beta2.LoggingConfig.Level:\x02\x38\x01"p\n\x05Level\x12\x15\n\x11LEVEL_UNSPECIFIED\x10\x00\x12\x07\n\x03\x41LL\x10\x01\x12\t\n\x05TRACE\x10\x02\x12\t\n\x05\x44\x45\x42UG\x10\x03\x12\x08\n\x04INFO\x10\x04\x12\x08\n\x04WARN\x10\x05\x12\t\n\x05\x45RROR\x10\x06\x12\t\n\x05\x46\x41TAL\x10\x07\x12\x07\n\x03OFF\x10\x08"\xfb\x02\n\tHadoopJob\x12\x1b\n\x11main_jar_file_uri\x18\x01 \x01(\tH\x00\x12\x14\n\nmain_class\x18\x02 \x01(\tH\x00\x12\x11\n\x04\x61rgs\x18\x03 \x03(\tB\x03\xe0\x41\x01\x12\x1a\n\rjar_file_uris\x18\x04 \x03(\tB\x03\xe0\x41\x01\x12\x16\n\tfile_uris\x18\x05 \x03(\tB\x03\xe0\x41\x01\x12\x19\n\x0c\x61rchive_uris\x18\x06 \x03(\tB\x03\xe0\x41\x01\x12Q\n\nproperties\x18\x07 \x03(\x0b\x32\x38.google.cloud.dataproc.v1beta2.HadoopJob.PropertiesEntryB\x03\xe0\x41\x01\x12I\n\x0elogging_config\x18\x08 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.LoggingConfigB\x03\xe0\x41\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x08\n\x06\x64river"\xf9\x02\n\x08SparkJob\x12\x1b\n\x11main_jar_file_uri\x18\x01 \x01(\tH\x00\x12\x14\n\nmain_class\x18\x02 \x01(\tH\x00\x12\x11\n\x04\x61rgs\x18\x03 \x03(\tB\x03\xe0\x41\x01\x12\x1a\n\rjar_file_uris\x18\x04 \x03(\tB\x03\xe0\x41\x01\x12\x16\n\tfile_uris\x18\x05 \x03(\tB\x03\xe0\x41\x01\x12\x19\n\x0c\x61rchive_uris\x18\x06 \x03(\tB\x03\xe0\x41\x01\x12P\n\nproperties\x18\x07 \x03(\x0b\x32\x37.google.cloud.dataproc.v1beta2.SparkJob.PropertiesEntryB\x03\xe0\x41\x01\x12I\n\x0elogging_config\x18\x08 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.LoggingConfigB\x03\xe0\x41\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x08\n\x06\x64river"\x82\x03\n\nPySparkJob\x12!\n\x14main_python_file_uri\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x11\n\x04\x61rgs\x18\x02 \x03(\tB\x03\xe0\x41\x01\x12\x1d\n\x10python_file_uris\x18\x03 \x03(\tB\x03\xe0\x41\x01\x12\x1a\n\rjar_file_uris\x18\x04 \x03(\tB\x03\xe0\x41\x01\x12\x16\n\tfile_uris\x18\x05 \x03(\tB\x03\xe0\x41\x01\x12\x19\n\x0c\x61rchive_uris\x18\x06 \x03(\tB\x03\xe0\x41\x01\x12R\n\nproperties\x18\x07 \x03(\x0b\x32\x39.google.cloud.dataproc.v1beta2.PySparkJob.PropertiesEntryB\x03\xe0\x41\x01\x12I\n\x0elogging_config\x18\x08 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.LoggingConfigB\x03\xe0\x41\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"!\n\tQueryList\x12\x14\n\x07queries\x18\x01 \x03(\tB\x03\xe0\x41\x02"\xc4\x03\n\x07HiveJob\x12\x18\n\x0equery_file_uri\x18\x01 \x01(\tH\x00\x12>\n\nquery_list\x18\x02 \x01(\x0b\x32(.google.cloud.dataproc.v1beta2.QueryListH\x00\x12 \n\x13\x63ontinue_on_failure\x18\x03 \x01(\x08\x42\x03\xe0\x41\x01\x12Z\n\x10script_variables\x18\x04 \x03(\x0b\x32;.google.cloud.dataproc.v1beta2.HiveJob.ScriptVariablesEntryB\x03\xe0\x41\x01\x12O\n\nproperties\x18\x05 \x03(\x0b\x32\x36.google.cloud.dataproc.v1beta2.HiveJob.PropertiesEntryB\x03\xe0\x41\x01\x12\x1a\n\rjar_file_uris\x18\x06 \x03(\tB\x03\xe0\x41\x01\x1a\x36\n\x14ScriptVariablesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07queries"\xf9\x03\n\x0bSparkSqlJob\x12\x18\n\x0equery_file_uri\x18\x01 \x01(\tH\x00\x12>\n\nquery_list\x18\x02 \x01(\x0b\x32(.google.cloud.dataproc.v1beta2.QueryListH\x00\x12^\n\x10script_variables\x18\x03 \x03(\x0b\x32?.google.cloud.dataproc.v1beta2.SparkSqlJob.ScriptVariablesEntryB\x03\xe0\x41\x01\x12S\n\nproperties\x18\x04 \x03(\x0b\x32:.google.cloud.dataproc.v1beta2.SparkSqlJob.PropertiesEntryB\x03\xe0\x41\x01\x12\x1a\n\rjar_file_uris\x18\x38 \x03(\tB\x03\xe0\x41\x01\x12I\n\x0elogging_config\x18\x06 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.LoggingConfigB\x03\xe0\x41\x01\x1a\x36\n\x14ScriptVariablesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07queries"\x8c\x04\n\x06PigJob\x12\x18\n\x0equery_file_uri\x18\x01 \x01(\tH\x00\x12>\n\nquery_list\x18\x02 \x01(\x0b\x32(.google.cloud.dataproc.v1beta2.QueryListH\x00\x12 \n\x13\x63ontinue_on_failure\x18\x03 \x01(\x08\x42\x03\xe0\x41\x01\x12Y\n\x10script_variables\x18\x04 \x03(\x0b\x32:.google.cloud.dataproc.v1beta2.PigJob.ScriptVariablesEntryB\x03\xe0\x41\x01\x12N\n\nproperties\x18\x05 \x03(\x0b\x32\x35.google.cloud.dataproc.v1beta2.PigJob.PropertiesEntryB\x03\xe0\x41\x01\x12\x1a\n\rjar_file_uris\x18\x06 \x03(\tB\x03\xe0\x41\x01\x12I\n\x0elogging_config\x18\x07 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.LoggingConfigB\x03\xe0\x41\x01\x1a\x36\n\x14ScriptVariablesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07queries"\xc0\x02\n\tSparkRJob\x12\x1c\n\x0fmain_r_file_uri\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x11\n\x04\x61rgs\x18\x02 \x03(\tB\x03\xe0\x41\x01\x12\x16\n\tfile_uris\x18\x03 \x03(\tB\x03\xe0\x41\x01\x12\x19\n\x0c\x61rchive_uris\x18\x04 \x03(\tB\x03\xe0\x41\x01\x12Q\n\nproperties\x18\x05 \x03(\x0b\x32\x38.google.cloud.dataproc.v1beta2.SparkRJob.PropertiesEntryB\x03\xe0\x41\x01\x12I\n\x0elogging_config\x18\x06 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.LoggingConfigB\x03\xe0\x41\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"D\n\x0cJobPlacement\x12\x19\n\x0c\x63luster_name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_uuid\x18\x02 \x01(\tB\x03\xe0\x41\x03"\xe0\x03\n\tJobStatus\x12\x42\n\x05state\x18\x01 \x01(\x0e\x32..google.cloud.dataproc.v1beta2.JobStatus.StateB\x03\xe0\x41\x03\x12\x14\n\x07\x64\x65tails\x18\x02 \x01(\tB\x03\xe0\x41\x03\x12\x39\n\x10state_start_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12H\n\x08substate\x18\x07 \x01(\x0e\x32\x31.google.cloud.dataproc.v1beta2.JobStatus.SubstateB\x03\xe0\x41\x03"\xa9\x01\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07PENDING\x10\x01\x12\x0e\n\nSETUP_DONE\x10\x08\x12\x0b\n\x07RUNNING\x10\x02\x12\x12\n\x0e\x43\x41NCEL_PENDING\x10\x03\x12\x12\n\x0e\x43\x41NCEL_STARTED\x10\x07\x12\r\n\tCANCELLED\x10\x04\x12\x08\n\x04\x44ONE\x10\x05\x12\t\n\x05\x45RROR\x10\x06\x12\x13\n\x0f\x41TTEMPT_FAILURE\x10\t"H\n\x08Substate\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\r\n\tSUBMITTED\x10\x01\x12\n\n\x06QUEUED\x10\x02\x12\x10\n\x0cSTALE_STATUS\x10\x03"<\n\x0cJobReference\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06job_id\x18\x02 \x01(\tB\x03\xe0\x41\x01"\xaa\x02\n\x0fYarnApplication\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12H\n\x05state\x18\x02 \x01(\x0e\x32\x34.google.cloud.dataproc.v1beta2.YarnApplication.StateB\x03\xe0\x41\x03\x12\x15\n\x08progress\x18\x03 \x01(\x02\x42\x03\xe0\x41\x03\x12\x19\n\x0ctracking_url\x18\x04 \x01(\tB\x03\xe0\x41\x03"\x87\x01\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x07\n\x03NEW\x10\x01\x12\x0e\n\nNEW_SAVING\x10\x02\x12\r\n\tSUBMITTED\x10\x03\x12\x0c\n\x08\x41\x43\x43\x45PTED\x10\x04\x12\x0b\n\x07RUNNING\x10\x05\x12\x0c\n\x08\x46INISHED\x10\x06\x12\n\n\x06\x46\x41ILED\x10\x07\x12\n\n\x06KILLED\x10\x08"\xea\x08\n\x03Job\x12\x43\n\treference\x18\x01 \x01(\x0b\x32+.google.cloud.dataproc.v1beta2.JobReferenceB\x03\xe0\x41\x01\x12\x43\n\tplacement\x18\x02 \x01(\x0b\x32+.google.cloud.dataproc.v1beta2.JobPlacementB\x03\xe0\x41\x02\x12>\n\nhadoop_job\x18\x03 \x01(\x0b\x32(.google.cloud.dataproc.v1beta2.HadoopJobH\x00\x12<\n\tspark_job\x18\x04 \x01(\x0b\x32\'.google.cloud.dataproc.v1beta2.SparkJobH\x00\x12@\n\x0bpyspark_job\x18\x05 \x01(\x0b\x32).google.cloud.dataproc.v1beta2.PySparkJobH\x00\x12:\n\x08hive_job\x18\x06 \x01(\x0b\x32&.google.cloud.dataproc.v1beta2.HiveJobH\x00\x12\x38\n\x07pig_job\x18\x07 \x01(\x0b\x32%.google.cloud.dataproc.v1beta2.PigJobH\x00\x12?\n\x0bspark_r_job\x18\x15 \x01(\x0b\x32(.google.cloud.dataproc.v1beta2.SparkRJobH\x00\x12\x43\n\rspark_sql_job\x18\x0c \x01(\x0b\x32*.google.cloud.dataproc.v1beta2.SparkSqlJobH\x00\x12=\n\x06status\x18\x08 \x01(\x0b\x32(.google.cloud.dataproc.v1beta2.JobStatusB\x03\xe0\x41\x03\x12\x45\n\x0estatus_history\x18\r \x03(\x0b\x32(.google.cloud.dataproc.v1beta2.JobStatusB\x03\xe0\x41\x03\x12N\n\x11yarn_applications\x18\t \x03(\x0b\x32..google.cloud.dataproc.v1beta2.YarnApplicationB\x03\xe0\x41\x03\x12\x19\n\x0csubmitted_by\x18\n \x01(\tB\x03\xe0\x41\x03\x12\'\n\x1a\x64river_output_resource_uri\x18\x11 \x01(\tB\x03\xe0\x41\x03\x12%\n\x18\x64river_control_files_uri\x18\x0f \x01(\tB\x03\xe0\x41\x03\x12\x43\n\x06labels\x18\x12 \x03(\x0b\x32..google.cloud.dataproc.v1beta2.Job.LabelsEntryB\x03\xe0\x41\x01\x12\x45\n\nscheduling\x18\x14 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.JobSchedulingB\x03\xe0\x41\x01\x12\x15\n\x08job_uuid\x18\x16 \x01(\tB\x03\xe0\x41\x03\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\n\n\x08type_job"3\n\rJobScheduling\x12"\n\x15max_failures_per_hour\x18\x01 \x01(\x05\x42\x03\xe0\x41\x01"\x8f\x01\n\x10SubmitJobRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x34\n\x03job\x18\x02 \x01(\x0b\x32".google.cloud.dataproc.v1beta2.JobB\x03\xe0\x41\x02\x12\x17\n\nrequest_id\x18\x04 \x01(\tB\x03\xe0\x41\x01"R\n\rGetJobRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06job_id\x18\x02 \x01(\tB\x03\xe0\x41\x02"\xb8\x02\n\x0fListJobsRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x06 \x01(\tB\x03\xe0\x41\x02\x12\x16\n\tpage_size\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x12\x17\n\npage_token\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12\x19\n\x0c\x63luster_name\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12^\n\x11job_state_matcher\x18\x05 \x01(\x0e\x32>.google.cloud.dataproc.v1beta2.ListJobsRequest.JobStateMatcherB\x03\xe0\x41\x01\x12\x13\n\x06\x66ilter\x18\x07 \x01(\tB\x03\xe0\x41\x01"6\n\x0fJobStateMatcher\x12\x07\n\x03\x41LL\x10\x00\x12\n\n\x06\x41\x43TIVE\x10\x01\x12\x0e\n\nNON_ACTIVE\x10\x02"\xc1\x01\n\x10UpdateJobRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06job_id\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x34\n\x03job\x18\x04 \x01(\x0b\x32".google.cloud.dataproc.v1beta2.JobB\x03\xe0\x41\x02\x12\x34\n\x0bupdate_mask\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02"g\n\x10ListJobsResponse\x12\x35\n\x04jobs\x18\x01 \x03(\x0b\x32".google.cloud.dataproc.v1beta2.JobB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x01"U\n\x10\x43\x61ncelJobRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06job_id\x18\x02 \x01(\tB\x03\xe0\x41\x02"U\n\x10\x44\x65leteJobRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06job_id\x18\x02 \x01(\tB\x03\xe0\x41\x02\x32\xfb\t\n\rJobController\x12\xc2\x01\n\tSubmitJob\x12/.google.cloud.dataproc.v1beta2.SubmitJobRequest\x1a".google.cloud.dataproc.v1beta2.Job"`\x82\xd3\xe4\x93\x02@";/v1beta2/projects/{project_id}/regions/{region}/jobs:submit:\x01*\xda\x41\x17project_id, region, job\x12\xbe\x01\n\x06GetJob\x12,.google.cloud.dataproc.v1beta2.GetJobRequest\x1a".google.cloud.dataproc.v1beta2.Job"b\x82\xd3\xe4\x93\x02?\x12=/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}\xda\x41\x1aproject_id, region, job_id\x12\xdb\x01\n\x08ListJobs\x12..google.cloud.dataproc.v1beta2.ListJobsRequest\x1a/.google.cloud.dataproc.v1beta2.ListJobsResponse"n\x82\xd3\xe4\x93\x02\x36\x12\x34/v1beta2/projects/{project_id}/regions/{region}/jobs\xda\x41\x12project_id, region\xda\x41\x1aproject_id, region, filter\x12\xac\x01\n\tUpdateJob\x12/.google.cloud.dataproc.v1beta2.UpdateJobRequest\x1a".google.cloud.dataproc.v1beta2.Job"J\x82\xd3\xe4\x93\x02\x44\x32=/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}:\x03job\x12\xce\x01\n\tCancelJob\x12/.google.cloud.dataproc.v1beta2.CancelJobRequest\x1a".google.cloud.dataproc.v1beta2.Job"l\x82\xd3\xe4\x93\x02I"D/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}:cancel:\x01*\xda\x41\x1aproject_id, region, job_id\x12\xb8\x01\n\tDeleteJob\x12/.google.cloud.dataproc.v1beta2.DeleteJobRequest\x1a\x16.google.protobuf.Empty"b\x82\xd3\xe4\x93\x02?*=/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}\xda\x41\x1aproject_id, region, job_id\x1aK\xca\x41\x17\x64\x61taproc.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformBw\n!com.google.cloud.dataproc.v1beta2B\tJobsProtoP\x01ZEgoogle.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataprocb\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, google_dot_api_dot_client__pb2.DESCRIPTOR, google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + google_dot_longrunning_dot_operations__pb2.DESCRIPTOR, google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, @@ -84,8 +88,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=485, - serialized_end=597, + serialized_start=522, + serialized_end=634, ) _sym_db.RegisterEnumDescriptor(_LOGGINGCONFIG_LEVEL) @@ -136,8 +140,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=3696, - serialized_end=3865, + serialized_start=3943, + serialized_end=4112, ) _sym_db.RegisterEnumDescriptor(_JOBSTATUS_STATE) @@ -162,8 +166,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=3867, - serialized_end=3939, + serialized_start=4114, + serialized_end=4186, ) _sym_db.RegisterEnumDescriptor(_JOBSTATUS_SUBSTATE) @@ -207,8 +211,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=4162, - serialized_end=4297, + serialized_start=4414, + serialized_end=4549, ) _sym_db.RegisterEnumDescriptor(_YARNAPPLICATION_STATE) @@ -230,8 +234,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=5889, - serialized_end=5943, + serialized_start=6226, + serialized_end=6280, ) _sym_db.RegisterEnumDescriptor(_LISTJOBSREQUEST_JOBSTATEMATCHER) @@ -288,8 +292,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=377, - serialized_end=483, + serialized_start=414, + serialized_end=520, ) _LOGGINGCONFIG = _descriptor.Descriptor( @@ -326,8 +330,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=266, - serialized_end=597, + serialized_start=303, + serialized_end=634, ) @@ -383,8 +387,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=890, - serialized_end=939, + serialized_start=957, + serialized_end=1006, ) _HADOOPJOB = _descriptor.Descriptor( @@ -445,7 +449,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -463,7 +467,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -481,7 +485,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -499,7 +503,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -517,7 +521,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -535,7 +539,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -555,8 +559,8 @@ fields=[], ) ], - serialized_start=600, - serialized_end=949, + serialized_start=637, + serialized_end=1016, ) @@ -612,8 +616,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=890, - serialized_end=939, + serialized_start=957, + serialized_end=1006, ) _SPARKJOB = _descriptor.Descriptor( @@ -674,7 +678,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -692,7 +696,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -710,7 +714,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -728,7 +732,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -746,7 +750,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -764,7 +768,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -784,8 +788,8 @@ fields=[], ) ], - serialized_start=952, - serialized_end=1299, + serialized_start=1019, + serialized_end=1396, ) @@ -841,8 +845,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=890, - serialized_end=939, + serialized_start=957, + serialized_end=1006, ) _PYSPARKJOB = _descriptor.Descriptor( @@ -885,7 +889,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -903,7 +907,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -921,7 +925,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -939,7 +943,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -957,7 +961,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -975,7 +979,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -993,7 +997,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -1005,8 +1009,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1302, - serialized_end=1653, + serialized_start=1399, + serialized_end=1785, ) @@ -1044,8 +1048,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1655, - serialized_end=1688, + serialized_start=1787, + serialized_end=1820, ) @@ -1101,8 +1105,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2007, - serialized_end=2061, + serialized_start=2159, + serialized_end=2213, ) _HIVEJOB_PROPERTIESENTRY = _descriptor.Descriptor( @@ -1157,8 +1161,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=890, - serialized_end=939, + serialized_start=957, + serialized_end=1006, ) _HIVEJOB = _descriptor.Descriptor( @@ -1219,7 +1223,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1237,7 +1241,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1255,7 +1259,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1273,7 +1277,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -1293,8 +1297,8 @@ fields=[], ) ], - serialized_start=1691, - serialized_end=2123, + serialized_start=1823, + serialized_end=2275, ) @@ -1350,8 +1354,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2007, - serialized_end=2061, + serialized_start=2159, + serialized_end=2213, ) _SPARKSQLJOB_PROPERTIESENTRY = _descriptor.Descriptor( @@ -1406,8 +1410,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=890, - serialized_end=939, + serialized_start=957, + serialized_end=1006, ) _SPARKSQLJOB = _descriptor.Descriptor( @@ -1468,7 +1472,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1486,7 +1490,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1504,7 +1508,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1522,7 +1526,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -1542,8 +1546,8 @@ fields=[], ) ], - serialized_start=2126, - serialized_end=2611, + serialized_start=2278, + serialized_end=2783, ) @@ -1599,8 +1603,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2007, - serialized_end=2061, + serialized_start=2159, + serialized_end=2213, ) _PIGJOB_PROPERTIESENTRY = _descriptor.Descriptor( @@ -1655,8 +1659,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=890, - serialized_end=939, + serialized_start=957, + serialized_end=1006, ) _PIGJOB = _descriptor.Descriptor( @@ -1717,7 +1721,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1735,7 +1739,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1753,7 +1757,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1771,7 +1775,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1789,7 +1793,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -1809,8 +1813,8 @@ fields=[], ) ], - serialized_start=2614, - serialized_end=3113, + serialized_start=2786, + serialized_end=3310, ) @@ -1866,8 +1870,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=890, - serialized_end=939, + serialized_start=957, + serialized_end=1006, ) _SPARKRJOB = _descriptor.Descriptor( @@ -1910,7 +1914,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1928,7 +1932,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1946,7 +1950,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1964,7 +1968,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1982,7 +1986,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -1994,8 +1998,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3116, - serialized_end=3411, + serialized_start=3313, + serialized_end=3633, ) @@ -2039,7 +2043,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -2051,8 +2055,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3413, - serialized_end=3476, + serialized_start=3635, + serialized_end=3703, ) @@ -2078,7 +2082,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2096,7 +2100,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2114,7 +2118,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2132,7 +2136,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -2144,8 +2148,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3479, - serialized_end=3939, + serialized_start=3706, + serialized_end=4186, ) @@ -2189,7 +2193,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -2201,8 +2205,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3941, - serialized_end=3996, + serialized_start=4188, + serialized_end=4248, ) @@ -2294,8 +2298,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3999, - serialized_end=4297, + serialized_start=4251, + serialized_end=4549, ) @@ -2351,8 +2355,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5323, - serialized_end=5368, + serialized_start=5625, + serialized_end=5670, ) _JOB = _descriptor.Descriptor( @@ -2377,7 +2381,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2539,7 +2543,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2557,7 +2561,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2575,7 +2579,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2593,7 +2597,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2611,7 +2615,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2629,7 +2633,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2647,7 +2651,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2665,7 +2669,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2683,7 +2687,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -2703,8 +2707,8 @@ fields=[], ) ], - serialized_start=4300, - serialized_end=5380, + serialized_start=4552, + serialized_end=5682, ) @@ -2730,7 +2734,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ) ], @@ -2742,8 +2746,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5382, - serialized_end=5428, + serialized_start=5684, + serialized_end=5735, ) @@ -2823,7 +2827,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -2835,8 +2839,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5431, - serialized_end=5569, + serialized_start=5738, + serialized_end=5881, ) @@ -2910,8 +2914,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5571, - serialized_end=5653, + serialized_start=5883, + serialized_end=5965, ) @@ -2973,7 +2977,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2991,7 +2995,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -3009,7 +3013,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -3027,7 +3031,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -3045,7 +3049,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -3057,8 +3061,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5656, - serialized_end=5943, + serialized_start=5968, + serialized_end=6280, ) @@ -3168,8 +3172,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5946, - serialized_end=6139, + serialized_start=6283, + serialized_end=6476, ) @@ -3195,7 +3199,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -3213,7 +3217,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -3225,8 +3229,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=6141, - serialized_end=6234, + serialized_start=6478, + serialized_end=6581, ) @@ -3300,8 +3304,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=6236, - serialized_end=6321, + serialized_start=6583, + serialized_end=6668, ) @@ -3375,8 +3379,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=6323, - serialized_end=6408, + serialized_start=6670, + serialized_end=6755, ) _LOGGINGCONFIG_DRIVERLOGLEVELSENTRY.fields_by_name[ @@ -3589,7 +3593,7 @@ ), DESCRIPTOR=_HADOOPJOB, __module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2", - __doc__="""A Cloud Dataproc job for running `Apache Hadoop + __doc__="""A Dataproc job for running `Apache Hadoop MapReduce `__ jobs on `Apache Hadoop YARN `__. @@ -3631,9 +3635,8 @@ properties: Optional. A mapping of property names to values, used to configure Hadoop. Properties that conflict with values set by - the Cloud Dataproc API may be overwritten. Can include - properties set in /etc/hadoop/conf/\*-site and classes in user - code. + the Dataproc API may be overwritten. Can include properties + set in /etc/hadoop/conf/\*-site and classes in user code. logging_config: Optional. The runtime log config for job execution. """, @@ -3658,7 +3661,7 @@ ), DESCRIPTOR=_SPARKJOB, __module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2", - __doc__="""A Cloud Dataproc job for running `Apache + __doc__="""A Dataproc job for running `Apache Spark `__ applications on YARN. The specification of the main method to call to drive the job. Specify either the jar file that contains the main class or the main class name. @@ -3693,9 +3696,9 @@ properties: Optional. A mapping of property names to values, used to configure Spark. Properties that conflict with values set by - the Cloud Dataproc API may be overwritten. Can include - properties set in /etc/spark/conf/spark-defaults.conf and - classes in user code. + the Dataproc API may be overwritten. Can include properties + set in /etc/spark/conf/spark-defaults.conf and classes in user + code. logging_config: Optional. The runtime log config for job execution. """, @@ -3720,7 +3723,7 @@ ), DESCRIPTOR=_PYSPARKJOB, __module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2", - __doc__="""A Cloud Dataproc job for running `Apache + __doc__="""A Dataproc job for running `Apache PySpark `__ applications on YARN. @@ -3750,9 +3753,9 @@ properties: Optional. A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by - the Cloud Dataproc API may be overwritten. Can include - properties set in /etc/spark/conf/spark-defaults.conf and - classes in user code. + the Dataproc API may be overwritten. Can include properties + set in /etc/spark/conf/spark-defaults.conf and classes in user + code. logging_config: Optional. The runtime log config for job execution. """, @@ -3810,7 +3813,7 @@ ), DESCRIPTOR=_HIVEJOB, __module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2", - __doc__="""A Cloud Dataproc job for running `Apache + __doc__="""A Dataproc job for running `Apache Hive `__ queries on YARN. @@ -3832,9 +3835,9 @@ properties: Optional. A mapping of property names and values, used to configure Hive. Properties that conflict with values set by - the Cloud Dataproc API may be overwritten. Can include - properties set in /etc/hadoop/conf/\*-site.xml, - /etc/hive/conf/hive-site.xml, and classes in user code. + the Dataproc API may be overwritten. Can include properties + set in /etc/hadoop/conf/\*-site.xml, /etc/hive/conf/hive- + site.xml, and classes in user code. jar_file_uris: Optional. HCFS URIs of jar files to add to the CLASSPATH of the Hive server and Hadoop MapReduce (MR) tasks. Can contain @@ -3871,7 +3874,7 @@ ), DESCRIPTOR=_SPARKSQLJOB, __module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2", - __doc__="""A Cloud Dataproc job for running `Apache Spark + __doc__="""A Dataproc job for running `Apache Spark SQL `__ queries. @@ -3889,7 +3892,7 @@ properties: Optional. A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with - values set by the Cloud Dataproc API may be overwritten. + values set by the Dataproc API may be overwritten. jar_file_uris: Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH. @@ -3927,7 +3930,7 @@ ), DESCRIPTOR=_PIGJOB, __module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2", - __doc__="""A Cloud Dataproc job for running `Apache + __doc__="""A Dataproc job for running `Apache Pig `__ queries on YARN. @@ -3949,9 +3952,9 @@ properties: Optional. A mapping of property names to values, used to configure Pig. Properties that conflict with values set by the - Cloud Dataproc API may be overwritten. Can include properties - set in /etc/hadoop/conf/\*-site.xml, - /etc/pig/conf/pig.properties, and classes in user code. + Dataproc API may be overwritten. Can include properties set in + /etc/hadoop/conf/\*-site.xml, /etc/pig/conf/pig.properties, + and classes in user code. jar_file_uris: Optional. HCFS URIs of jar files to add to the CLASSPATH of the Pig Client and Hadoop MapReduce (MR) tasks. Can contain @@ -3981,7 +3984,7 @@ ), DESCRIPTOR=_SPARKRJOB, __module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2", - __doc__="""A Cloud Dataproc job for running `Apache + __doc__="""A Dataproc job for running `Apache SparkR `__ applications on YARN. @@ -4006,9 +4009,9 @@ properties: Optional. A mapping of property names to values, used to configure SparkR. Properties that conflict with values set by - the Cloud Dataproc API may be overwritten. Can include - properties set in /etc/spark/conf/spark-defaults.conf and - classes in user code. + the Dataproc API may be overwritten. Can include properties + set in /etc/spark/conf/spark-defaults.conf and classes in user + code. logging_config: Optional. The runtime log config for job execution. """, @@ -4024,7 +4027,7 @@ dict( DESCRIPTOR=_JOBPLACEMENT, __module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2", - __doc__="""Cloud Dataproc job config. + __doc__="""Dataproc job config. Attributes: @@ -4032,8 +4035,8 @@ Required. The name of the cluster where the job will be submitted. cluster_uuid: - Output only. A cluster UUID generated by the Cloud Dataproc - service when the job is submitted. + Output only. A cluster UUID generated by the Dataproc service + when the job is submitted. """, # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.JobPlacement) ), @@ -4046,14 +4049,14 @@ dict( DESCRIPTOR=_JOBSTATUS, __module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2", - __doc__="""Cloud Dataproc job status. + __doc__="""Dataproc job status. Attributes: state: Output only. A state message specifying the overall job state. details: - Output only. Optional job state details, such as an error + Output only. Optional Job state details, such as an error description if the state is ERROR. state_start_time: Output only. The time when this state was entered. @@ -4114,7 +4117,7 @@ Output only. The numerical progress of the application, from 1 to 100. tracking_url: - Optional. Output only. The HTTP URL of the ApplicationMaster, + Output only. The HTTP URL of the ApplicationMaster, HistoryServer, or TimelineServer that provides application- specific information. The URL uses the internal hostname, and requires a proxy server for resolution and, possibly, access. @@ -4139,7 +4142,7 @@ ), DESCRIPTOR=_JOB, __module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2", - __doc__="""A Cloud Dataproc job resource. + __doc__="""A Dataproc job resource. Attributes: @@ -4154,20 +4157,6 @@ type_job: Required. The application/framework-specific portion of the job. - hadoop_job: - Job is a Hadoop job. - spark_job: - Job is a Spark job. - pyspark_job: - Job is a Pyspark job. - hive_job: - Job is a Hive job. - pig_job: - Job is a Pig job. - spark_r_job: - Job is a SparkR job. - spark_sql_job: - Job is a SparkSql job. status: Output only. The job status. Additional application-specific status information may be contained in the type\_job and @@ -4247,8 +4236,7 @@ Required. The ID of the Google Cloud Platform project that the job belongs to. region: - Required. The Cloud Dataproc region in which to handle the - request. + Required. The Dataproc region in which to handle the request. job: Required. The job resource. request_id: @@ -4283,8 +4271,7 @@ Required. The ID of the Google Cloud Platform project that the job belongs to. region: - Required. The Cloud Dataproc region in which to handle the - request. + Required. The Dataproc region in which to handle the request. job_id: Required. The job ID. """, @@ -4307,8 +4294,7 @@ Required. The ID of the Google Cloud Platform project that the job belongs to. region: - Required. The Cloud Dataproc region in which to handle the - request. + Required. The Dataproc region in which to handle the request. page_size: Optional. The number of results to return in each response. page_token: @@ -4352,8 +4338,7 @@ Required. The ID of the Google Cloud Platform project that the job belongs to. region: - Required. The Cloud Dataproc region in which to handle the - request. + Required. The Dataproc region in which to handle the request. job_id: Required. The job ID. job: @@ -4407,8 +4392,7 @@ Required. The ID of the Google Cloud Platform project that the job belongs to. region: - Required. The Cloud Dataproc region in which to handle the - request. + Required. The Dataproc region in which to handle the request. job_id: Required. The job ID. """, @@ -4431,8 +4415,7 @@ Required. The ID of the Google Cloud Platform project that the job belongs to. region: - Required. The Cloud Dataproc region in which to handle the - request. + Required. The Dataproc region in which to handle the request. job_id: Required. The job ID. """, @@ -4445,39 +4428,101 @@ DESCRIPTOR._options = None _LOGGINGCONFIG_DRIVERLOGLEVELSENTRY._options = None _HADOOPJOB_PROPERTIESENTRY._options = None +_HADOOPJOB.fields_by_name["args"]._options = None +_HADOOPJOB.fields_by_name["jar_file_uris"]._options = None +_HADOOPJOB.fields_by_name["file_uris"]._options = None +_HADOOPJOB.fields_by_name["archive_uris"]._options = None +_HADOOPJOB.fields_by_name["properties"]._options = None +_HADOOPJOB.fields_by_name["logging_config"]._options = None _SPARKJOB_PROPERTIESENTRY._options = None +_SPARKJOB.fields_by_name["args"]._options = None +_SPARKJOB.fields_by_name["jar_file_uris"]._options = None +_SPARKJOB.fields_by_name["file_uris"]._options = None +_SPARKJOB.fields_by_name["archive_uris"]._options = None +_SPARKJOB.fields_by_name["properties"]._options = None +_SPARKJOB.fields_by_name["logging_config"]._options = None _PYSPARKJOB_PROPERTIESENTRY._options = None _PYSPARKJOB.fields_by_name["main_python_file_uri"]._options = None +_PYSPARKJOB.fields_by_name["args"]._options = None +_PYSPARKJOB.fields_by_name["python_file_uris"]._options = None +_PYSPARKJOB.fields_by_name["jar_file_uris"]._options = None +_PYSPARKJOB.fields_by_name["file_uris"]._options = None +_PYSPARKJOB.fields_by_name["archive_uris"]._options = None +_PYSPARKJOB.fields_by_name["properties"]._options = None +_PYSPARKJOB.fields_by_name["logging_config"]._options = None _QUERYLIST.fields_by_name["queries"]._options = None _HIVEJOB_SCRIPTVARIABLESENTRY._options = None _HIVEJOB_PROPERTIESENTRY._options = None +_HIVEJOB.fields_by_name["continue_on_failure"]._options = None +_HIVEJOB.fields_by_name["script_variables"]._options = None +_HIVEJOB.fields_by_name["properties"]._options = None +_HIVEJOB.fields_by_name["jar_file_uris"]._options = None _SPARKSQLJOB_SCRIPTVARIABLESENTRY._options = None _SPARKSQLJOB_PROPERTIESENTRY._options = None +_SPARKSQLJOB.fields_by_name["script_variables"]._options = None +_SPARKSQLJOB.fields_by_name["properties"]._options = None +_SPARKSQLJOB.fields_by_name["jar_file_uris"]._options = None +_SPARKSQLJOB.fields_by_name["logging_config"]._options = None _PIGJOB_SCRIPTVARIABLESENTRY._options = None _PIGJOB_PROPERTIESENTRY._options = None +_PIGJOB.fields_by_name["continue_on_failure"]._options = None +_PIGJOB.fields_by_name["script_variables"]._options = None +_PIGJOB.fields_by_name["properties"]._options = None +_PIGJOB.fields_by_name["jar_file_uris"]._options = None +_PIGJOB.fields_by_name["logging_config"]._options = None _SPARKRJOB_PROPERTIESENTRY._options = None _SPARKRJOB.fields_by_name["main_r_file_uri"]._options = None +_SPARKRJOB.fields_by_name["args"]._options = None +_SPARKRJOB.fields_by_name["file_uris"]._options = None +_SPARKRJOB.fields_by_name["archive_uris"]._options = None +_SPARKRJOB.fields_by_name["properties"]._options = None +_SPARKRJOB.fields_by_name["logging_config"]._options = None _JOBPLACEMENT.fields_by_name["cluster_name"]._options = None +_JOBPLACEMENT.fields_by_name["cluster_uuid"]._options = None +_JOBSTATUS.fields_by_name["state"]._options = None +_JOBSTATUS.fields_by_name["details"]._options = None +_JOBSTATUS.fields_by_name["state_start_time"]._options = None +_JOBSTATUS.fields_by_name["substate"]._options = None _JOBREFERENCE.fields_by_name["project_id"]._options = None +_JOBREFERENCE.fields_by_name["job_id"]._options = None _YARNAPPLICATION.fields_by_name["name"]._options = None _YARNAPPLICATION.fields_by_name["state"]._options = None _YARNAPPLICATION.fields_by_name["progress"]._options = None _YARNAPPLICATION.fields_by_name["tracking_url"]._options = None _JOB_LABELSENTRY._options = None +_JOB.fields_by_name["reference"]._options = None _JOB.fields_by_name["placement"]._options = None +_JOB.fields_by_name["status"]._options = None +_JOB.fields_by_name["status_history"]._options = None +_JOB.fields_by_name["yarn_applications"]._options = None +_JOB.fields_by_name["submitted_by"]._options = None +_JOB.fields_by_name["driver_output_resource_uri"]._options = None +_JOB.fields_by_name["driver_control_files_uri"]._options = None +_JOB.fields_by_name["labels"]._options = None +_JOB.fields_by_name["scheduling"]._options = None +_JOB.fields_by_name["job_uuid"]._options = None +_JOBSCHEDULING.fields_by_name["max_failures_per_hour"]._options = None _SUBMITJOBREQUEST.fields_by_name["project_id"]._options = None _SUBMITJOBREQUEST.fields_by_name["region"]._options = None _SUBMITJOBREQUEST.fields_by_name["job"]._options = None +_SUBMITJOBREQUEST.fields_by_name["request_id"]._options = None _GETJOBREQUEST.fields_by_name["project_id"]._options = None _GETJOBREQUEST.fields_by_name["region"]._options = None _GETJOBREQUEST.fields_by_name["job_id"]._options = None _LISTJOBSREQUEST.fields_by_name["project_id"]._options = None _LISTJOBSREQUEST.fields_by_name["region"]._options = None +_LISTJOBSREQUEST.fields_by_name["page_size"]._options = None +_LISTJOBSREQUEST.fields_by_name["page_token"]._options = None +_LISTJOBSREQUEST.fields_by_name["cluster_name"]._options = None +_LISTJOBSREQUEST.fields_by_name["job_state_matcher"]._options = None +_LISTJOBSREQUEST.fields_by_name["filter"]._options = None _UPDATEJOBREQUEST.fields_by_name["project_id"]._options = None _UPDATEJOBREQUEST.fields_by_name["region"]._options = None _UPDATEJOBREQUEST.fields_by_name["job_id"]._options = None _UPDATEJOBREQUEST.fields_by_name["job"]._options = None _UPDATEJOBREQUEST.fields_by_name["update_mask"]._options = None +_LISTJOBSRESPONSE.fields_by_name["jobs"]._options = None +_LISTJOBSRESPONSE.fields_by_name["next_page_token"]._options = None _CANCELJOBREQUEST.fields_by_name["project_id"]._options = None _CANCELJOBREQUEST.fields_by_name["region"]._options = None _CANCELJOBREQUEST.fields_by_name["job_id"]._options = None @@ -4493,8 +4538,8 @@ serialized_options=_b( "\312A\027dataproc.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" ), - serialized_start=6411, - serialized_end=7686, + serialized_start=6758, + serialized_end=8033, methods=[ _descriptor.MethodDescriptor( name="SubmitJob", diff --git a/google/cloud/dataproc_v1beta2/proto/jobs_pb2_grpc.py b/google/cloud/dataproc_v1beta2/proto/jobs_pb2_grpc.py index 9a07fdbb..e9a36ead 100644 --- a/google/cloud/dataproc_v1beta2/proto/jobs_pb2_grpc.py +++ b/google/cloud/dataproc_v1beta2/proto/jobs_pb2_grpc.py @@ -84,9 +84,9 @@ def UpdateJob(self, request, context): def CancelJob(self, request, context): """Starts a job cancellation request. To access the job resource after cancellation, call - [regions/{region}/jobs.list](/dataproc/docs/reference/rest/v1beta2/projects.regions.jobs/list) + [regions/{region}/jobs.list](https://cloud.google.com/dataproc/docs/reference/rest/v1beta2/projects.regions.jobs/list) or - [regions/{region}/jobs.get](/dataproc/docs/reference/rest/v1beta2/projects.regions.jobs/get). + [regions/{region}/jobs.get](https://cloud.google.com/dataproc/docs/reference/rest/v1beta2/projects.regions.jobs/get). """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") diff --git a/google/cloud/dataproc_v1beta2/proto/operations.proto b/google/cloud/dataproc_v1beta2/proto/operations.proto index 74cbde3c..2e98fb82 100644 --- a/google/cloud/dataproc_v1beta2/proto/operations.proto +++ b/google/cloud/dataproc_v1beta2/proto/operations.proto @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC. +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,7 +11,6 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. -// syntax = "proto3"; diff --git a/google/cloud/dataproc_v1beta2/proto/shared.proto b/google/cloud/dataproc_v1beta2/proto/shared.proto index de1130d9..eba80918 100644 --- a/google/cloud/dataproc_v1beta2/proto/shared.proto +++ b/google/cloud/dataproc_v1beta2/proto/shared.proto @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC. +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,7 +11,6 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. -// syntax = "proto3"; diff --git a/google/cloud/dataproc_v1beta2/proto/workflow_templates.proto b/google/cloud/dataproc_v1beta2/proto/workflow_templates.proto index 2979593d..b8497e83 100644 --- a/google/cloud/dataproc_v1beta2/proto/workflow_templates.proto +++ b/google/cloud/dataproc_v1beta2/proto/workflow_templates.proto @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC. +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,7 +11,6 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. -// syntax = "proto3"; @@ -33,7 +32,7 @@ option java_outer_classname = "WorkflowTemplatesProto"; option java_package = "com.google.cloud.dataproc.v1beta2"; // The API interface for managing Workflow Templates in the -// Cloud Dataproc API. +// Dataproc API. service WorkflowTemplateService { option (google.api.default_host) = "dataproc.googleapis.com"; option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; @@ -78,9 +77,9 @@ service WorkflowTemplateService { // clusters to be deleted. // // The [Operation.metadata][google.longrunning.Operation.metadata] will be - // [WorkflowMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#workflowmetadata). + // [WorkflowMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#workflowmetadata). // Also see [Using - // WorkflowMetadata](/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata). + // WorkflowMetadata](https://cloud.google.com/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata). // // On successful completion, // [Operation.response][google.longrunning.Operation.response] will be @@ -119,9 +118,9 @@ service WorkflowTemplateService { // clusters to be deleted. // // The [Operation.metadata][google.longrunning.Operation.metadata] will be - // [WorkflowMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#workflowmetadata). + // [WorkflowMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#workflowmetadata). // Also see [Using - // WorkflowMetadata](/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata). + // WorkflowMetadata](https://cloud.google.com/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata). // // On successful completion, // [Operation.response][google.longrunning.Operation.response] will be @@ -179,7 +178,7 @@ service WorkflowTemplateService { } } -// A Cloud Dataproc workflow template resource. +// A Dataproc workflow template resource. message WorkflowTemplate { option (google.api.resource) = { type: "dataproc.googleapis.com/WorkflowTemplate" @@ -327,22 +326,16 @@ message OrderedJob { // Required. The job definition. oneof job_type { - // Job is a Hadoop job. HadoopJob hadoop_job = 2; - // Job is a Spark job. SparkJob spark_job = 3; - // Job is a Pyspark job. PySparkJob pyspark_job = 4; - // Job is a Hive job. HiveJob hive_job = 5; - // Job is a Pig job. PigJob pig_job = 6; - // Job is a SparkSql job. SparkSqlJob spark_sql_job = 7; } @@ -465,7 +458,7 @@ message ValueValidation { repeated string values = 1; } -// A Cloud Dataproc workflow template resource. +// A Dataproc workflow template resource. message WorkflowMetadata { // The operation state. enum State { @@ -721,9 +714,7 @@ message UpdateWorkflowTemplateRequest { // Required. The updated workflow template. // // The `template.version` field must match the current version. - WorkflowTemplate template = 1 [ - (google.api.field_behavior) = REQUIRED - ]; + WorkflowTemplate template = 1 [(google.api.field_behavior) = REQUIRED]; } // A request to list workflow templates in a project. diff --git a/google/cloud/dataproc_v1beta2/proto/workflow_templates_pb2.py b/google/cloud/dataproc_v1beta2/proto/workflow_templates_pb2.py index af679f35..750be3c2 100644 --- a/google/cloud/dataproc_v1beta2/proto/workflow_templates_pb2.py +++ b/google/cloud/dataproc_v1beta2/proto/workflow_templates_pb2.py @@ -2497,7 +2497,7 @@ ), DESCRIPTOR=_WORKFLOWTEMPLATE, __module__="google.cloud.dataproc_v1beta2.proto.workflow_templates_pb2", - __doc__="""A Cloud Dataproc workflow template resource. + __doc__="""A Dataproc workflow template resource. Attributes: @@ -2688,18 +2688,6 @@ hyphen. Must consist of between 3 and 50 characters. job_type: Required. The job definition. - hadoop_job: - Job is a Hadoop job. - spark_job: - Job is a Spark job. - pyspark_job: - Job is a Pyspark job. - hive_job: - Job is a Hive job. - pig_job: - Job is a Pig job. - spark_sql_job: - Job is a SparkSql job. labels: Optional. The labels to associate with this job. Label keys must be between 1 and 63 characters long. Label values must be between @@ -2859,7 +2847,7 @@ ), DESCRIPTOR=_WORKFLOWMETADATA, __module__="google.cloud.dataproc_v1beta2.proto.workflow_templates_pb2", - __doc__="""A Cloud Dataproc workflow template resource. + __doc__="""A Dataproc workflow template resource. Attributes: diff --git a/google/cloud/dataproc_v1beta2/proto/workflow_templates_pb2_grpc.py b/google/cloud/dataproc_v1beta2/proto/workflow_templates_pb2_grpc.py index e05372f5..f9ea0bd6 100644 --- a/google/cloud/dataproc_v1beta2/proto/workflow_templates_pb2_grpc.py +++ b/google/cloud/dataproc_v1beta2/proto/workflow_templates_pb2_grpc.py @@ -12,7 +12,7 @@ class WorkflowTemplateServiceStub(object): """The API interface for managing Workflow Templates in the - Cloud Dataproc API. + Dataproc API. """ def __init__(self, channel): @@ -60,7 +60,7 @@ def __init__(self, channel): class WorkflowTemplateServiceServicer(object): """The API interface for managing Workflow Templates in the - Cloud Dataproc API. + Dataproc API. """ def CreateWorkflowTemplate(self, request, context): @@ -94,9 +94,9 @@ def InstantiateWorkflowTemplate(self, request, context): clusters to be deleted. The [Operation.metadata][google.longrunning.Operation.metadata] will be - [WorkflowMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#workflowmetadata). + [WorkflowMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#workflowmetadata). Also see [Using - WorkflowMetadata](/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata). + WorkflowMetadata](https://cloud.google.com/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata). On successful completion, [Operation.response][google.longrunning.Operation.response] will be @@ -124,9 +124,9 @@ def InstantiateInlineWorkflowTemplate(self, request, context): clusters to be deleted. The [Operation.metadata][google.longrunning.Operation.metadata] will be - [WorkflowMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#workflowmetadata). + [WorkflowMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#workflowmetadata). Also see [Using - WorkflowMetadata](/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata). + WorkflowMetadata](https://cloud.google.com/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata). On successful completion, [Operation.response][google.longrunning.Operation.response] will be diff --git a/synth.metadata b/synth.metadata index c746925d..f7c667d5 100644 --- a/synth.metadata +++ b/synth.metadata @@ -1,39 +1,25 @@ { - "updateTime": "2020-02-20T23:02:30.926314Z", + "updateTime": "2020-03-03T13:17:24.564852Z", "sources": [ { "generator": { "name": "artman", - "version": "0.45.0", - "dockerImage": "googleapis/artman@sha256:6aec9c34db0e4be221cdaf6faba27bdc07cfea846808b3d3b964dfce3a9a0f9b" - } - }, - { - "git": { - "name": ".", - "remote": "https://github.com/googleapis/python-dataproc.git", - "sha": "3f7fd5bef4ba959b9a6e153fb4ab6d8b6819948e" + "version": "0.47.0", + "dockerImage": "googleapis/artman@sha256:b3e50d6b8de03920b9f065bbc3d210e2ca93a043446f1fa16cdf567393c09678" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "3eaaaf8626ce5b0c0bc7eee05e143beffa373b01", - "internalRef": "296274723", - "log": "3eaaaf8626ce5b0c0bc7eee05e143beffa373b01\nAdd BUILD.bazel for v1 secretmanager.googleapis.com\n\nPiperOrigin-RevId: 296274723\n\ne76149c3d992337f85eeb45643106aacae7ede82\nMove securitycenter v1 to use generate from annotations.\n\nPiperOrigin-RevId: 296266862\n\n203740c78ac69ee07c3bf6be7408048751f618f8\nAdd StackdriverLoggingConfig field to Cloud Tasks v2 API.\n\nPiperOrigin-RevId: 296256388\n\ne4117d5e9ed8bbca28da4a60a94947ca51cb2083\nCreate a Bazel BUILD file for the google.actions.type export.\n\nPiperOrigin-RevId: 296212567\n\na9639a0a9854fd6e1be08bba1ac3897f4f16cb2f\nAdd secretmanager.googleapis.com v1 protos\n\nPiperOrigin-RevId: 295983266\n\n" - } - }, - { - "git": { - "name": "synthtool", - "remote": "rpc://devrel/cloud/libraries/tools/autosynth", - "sha": "706a38c26db42299845396cdae55db635c38794a" + "sha": "4a180bfff8a21645b3a935c2756e8d6ab18a74e0", + "internalRef": "298484782", + "log": "4a180bfff8a21645b3a935c2756e8d6ab18a74e0\nautoml/v1beta1 publish proto updates\n\nPiperOrigin-RevId: 298484782\n\n6de6e938b7df1cd62396563a067334abeedb9676\nchore: use the latest gapic-generator and protoc-java-resource-name-plugin in Bazel workspace.\n\nPiperOrigin-RevId: 298474513\n\n244ab2b83a82076a1fa7be63b7e0671af73f5c02\nAdds service config definition for bigqueryreservation v1\n\nPiperOrigin-RevId: 298455048\n\n83c6f84035ee0f80eaa44d8b688a010461cc4080\nUpdate google/api/auth.proto to make AuthProvider to have JwtLocation\n\nPiperOrigin-RevId: 297918498\n\ne9e90a787703ec5d388902e2cb796aaed3a385b4\nDialogflow weekly v2/v2beta1 library update:\n - adding get validation result\n - adding field mask override control for output audio config\nImportant updates are also posted at:\nhttps://cloud.google.com/dialogflow/docs/release-notes\n\nPiperOrigin-RevId: 297671458\n\n1a2b05cc3541a5f7714529c665aecc3ea042c646\nAdding .yaml and .json config files.\n\nPiperOrigin-RevId: 297570622\n\ndfe1cf7be44dee31d78f78e485d8c95430981d6e\nPublish `QueryOptions` proto.\n\nIntroduced a `query_options` input in `ExecuteSqlRequest`.\n\nPiperOrigin-RevId: 297497710\n\ndafc905f71e5d46f500b41ed715aad585be062c3\npubsub: revert pull init_rpc_timeout & max_rpc_timeout back to 25 seconds and reset multiplier to 1.0\n\nPiperOrigin-RevId: 297486523\n\nf077632ba7fee588922d9e8717ee272039be126d\nfirestore: add update_transform\n\nPiperOrigin-RevId: 297405063\n\n0aba1900ffef672ec5f0da677cf590ee5686e13b\ncluster: use square brace for cross-reference\n\nPiperOrigin-RevId: 297204568\n\n5dac2da18f6325cbaed54603c43f0667ecd50247\nRestore retry params in gapic config because securitycenter has non-standard default retry params.\nRestore a few retry codes for some idempotent methods.\n\nPiperOrigin-RevId: 297196720\n\n1eb61455530252bba8b2c8d4bc9832960e5a56f6\npubsub: v1 replace IAM HTTP rules\n\nPiperOrigin-RevId: 297188590\n\n80b2d25f8d43d9d47024ff06ead7f7166548a7ba\nDialogflow weekly v2/v2beta1 library update:\n - updates to mega agent api\n - adding field mask override control for output audio config\nImportant updates are also posted at:\nhttps://cloud.google.com/dialogflow/docs/release-notes\n\nPiperOrigin-RevId: 297187629\n\n0b1876b35e98f560f9c9ca9797955f020238a092\nUse an older version of protoc-docs-plugin that is compatible with the specified gapic-generator and protobuf versions.\n\nprotoc-docs-plugin >=0.4.0 (see commit https://github.com/googleapis/protoc-docs-plugin/commit/979f03ede6678c487337f3d7e88bae58df5207af) is incompatible with protobuf 3.9.1.\n\nPiperOrigin-RevId: 296986742\n\n1e47e676cddbbd8d93f19ba0665af15b5532417e\nFix: Restore a method signature for UpdateCluster\n\nPiperOrigin-RevId: 296901854\n\n7f910bcc4fc4704947ccfd3ceed015d16b9e00c2\nUpdate Dataproc v1beta2 client.\n\nPiperOrigin-RevId: 296451205\n\nde287524405a3dce124d301634731584fc0432d7\nFix: Reinstate method signatures that had been missed off some RPCs\nFix: Correct resource types for two fields\n\nPiperOrigin-RevId: 296435091\n\ne5bc9566ae057fb4c92f8b7e047f1c8958235b53\nDeprecate the endpoint_uris field, as it is unused.\n\nPiperOrigin-RevId: 296357191\n\n8c12e2b4dca94e12bff9f538bdac29524ff7ef7a\nUpdate Dataproc v1 client.\n\nPiperOrigin-RevId: 296336662\n\n17567c4a1ef0a9b50faa87024d66f8acbb561089\nRemoving erroneous comment, a la https://github.com/googleapis/java-speech/pull/103\n\nPiperOrigin-RevId: 296332968\n\n" } }, { "template": { - "name": "python_split_library", + "name": "python_library", "origin": "synthtool.gcp", "version": "2020.2.4" } From 9595d17afe91686dd4b45640f6a87586eb19f1f7 Mon Sep 17 00:00:00 2001 From: "gcf-merge-on-green[bot]" <60162190+gcf-merge-on-green[bot]@users.noreply.github.com> Date: Thu, 5 Mar 2020 18:18:03 +0000 Subject: [PATCH 7/7] chore: release 0.7.0 (#11) :robot: I have created a release \*beep\* \*boop\* --- ## [0.7.0](https://www.github.com/googleapis/python-dataproc/compare/v0.6.1...v0.7.0) (2020-03-05) ### Features * add lifecycle config and reservation affinity support to v1 (via synth) ([#10](https://www.github.com/googleapis/python-dataproc/issues/10)) ([bb36194](https://www.github.com/googleapis/python-dataproc/commit/bb36194d4b0cfb6f2c5a0358625a17c629f71b21)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). --- CHANGELOG.md | 8 +++++++- setup.py | 2 +- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 77f57b9c..ddac75c6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-dataproc/#history +## [0.7.0](https://www.github.com/googleapis/python-dataproc/compare/v0.6.1...v0.7.0) (2020-03-05) + + +### Features + +* add lifecycle config and reservation affinity support to v1 (via synth) ([#10](https://www.github.com/googleapis/python-dataproc/issues/10)) ([bb36194](https://www.github.com/googleapis/python-dataproc/commit/bb36194d4b0cfb6f2c5a0358625a17c629f71b21)) + ## 0.6.1 11-12-2019 08:24 PST @@ -155,4 +162,3 @@ - Re-enable lint for tests, remove usage of pylint (#4921) - Normalize all setup.py files (#4909) - diff --git a/setup.py b/setup.py index 74c90047..d17b495e 100644 --- a/setup.py +++ b/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-dataproc" description = "Google Cloud Dataproc API client library" -version = "0.6.1" +version = "0.7.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta'