diff --git a/.flake8 b/.flake8
index 20fe9bda..ed931638 100644
--- a/.flake8
+++ b/.flake8
@@ -21,6 +21,8 @@ exclude =
# Exclude generated code.
**/proto/**
**/gapic/**
+ **/services/**
+ **/types/**
*_pb2.py
# Standard linting exemptions.
diff --git a/.gitignore b/.gitignore
index 3fb06e09..b87e1ed5 100644
--- a/.gitignore
+++ b/.gitignore
@@ -10,6 +10,7 @@
dist
build
eggs
+.eggs
parts
bin
var
@@ -49,6 +50,7 @@ bigquery/docs/generated
# Virtual environment
env/
coverage.xml
+sponge_log.xml
# System test environment variables.
system_tests/local_test_setup
diff --git a/.kokoro/samples/lint/common.cfg b/.kokoro/samples/lint/common.cfg
new file mode 100644
index 00000000..2a372d92
--- /dev/null
+++ b/.kokoro/samples/lint/common.cfg
@@ -0,0 +1,34 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Specify which tests to run
+env_vars: {
+ key: "RUN_TESTS_SESSION"
+ value: "lint"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-dataproc/.kokoro/test-samples.sh"
+}
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
+}
+
+# Download secrets for samples
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-dataproc/.kokoro/trampoline.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/lint/continuous.cfg b/.kokoro/samples/lint/continuous.cfg
new file mode 100644
index 00000000..a1c8d975
--- /dev/null
+++ b/.kokoro/samples/lint/continuous.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/lint/periodic.cfg b/.kokoro/samples/lint/periodic.cfg
new file mode 100644
index 00000000..50fec964
--- /dev/null
+++ b/.kokoro/samples/lint/periodic.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "False"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/lint/presubmit.cfg b/.kokoro/samples/lint/presubmit.cfg
new file mode 100644
index 00000000..a1c8d975
--- /dev/null
+++ b/.kokoro/samples/lint/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.6/common.cfg b/.kokoro/samples/python3.6/common.cfg
new file mode 100644
index 00000000..c04328ca
--- /dev/null
+++ b/.kokoro/samples/python3.6/common.cfg
@@ -0,0 +1,34 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Specify which tests to run
+env_vars: {
+ key: "RUN_TESTS_SESSION"
+ value: "py-3.6"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-dataproc/.kokoro/test-samples.sh"
+}
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
+}
+
+# Download secrets for samples
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-dataproc/.kokoro/trampoline.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.6/continuous.cfg b/.kokoro/samples/python3.6/continuous.cfg
new file mode 100644
index 00000000..7218af14
--- /dev/null
+++ b/.kokoro/samples/python3.6/continuous.cfg
@@ -0,0 +1,7 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
+
diff --git a/.kokoro/samples/python3.6/periodic.cfg b/.kokoro/samples/python3.6/periodic.cfg
new file mode 100644
index 00000000..50fec964
--- /dev/null
+++ b/.kokoro/samples/python3.6/periodic.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "False"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.6/presubmit.cfg b/.kokoro/samples/python3.6/presubmit.cfg
new file mode 100644
index 00000000..a1c8d975
--- /dev/null
+++ b/.kokoro/samples/python3.6/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.7/common.cfg b/.kokoro/samples/python3.7/common.cfg
new file mode 100644
index 00000000..fd45b1ae
--- /dev/null
+++ b/.kokoro/samples/python3.7/common.cfg
@@ -0,0 +1,34 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Specify which tests to run
+env_vars: {
+ key: "RUN_TESTS_SESSION"
+ value: "py-3.7"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-dataproc/.kokoro/test-samples.sh"
+}
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
+}
+
+# Download secrets for samples
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-dataproc/.kokoro/trampoline.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.7/continuous.cfg b/.kokoro/samples/python3.7/continuous.cfg
new file mode 100644
index 00000000..a1c8d975
--- /dev/null
+++ b/.kokoro/samples/python3.7/continuous.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.7/periodic.cfg b/.kokoro/samples/python3.7/periodic.cfg
new file mode 100644
index 00000000..50fec964
--- /dev/null
+++ b/.kokoro/samples/python3.7/periodic.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "False"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.7/presubmit.cfg b/.kokoro/samples/python3.7/presubmit.cfg
new file mode 100644
index 00000000..a1c8d975
--- /dev/null
+++ b/.kokoro/samples/python3.7/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.8/common.cfg b/.kokoro/samples/python3.8/common.cfg
new file mode 100644
index 00000000..30105713
--- /dev/null
+++ b/.kokoro/samples/python3.8/common.cfg
@@ -0,0 +1,34 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Specify which tests to run
+env_vars: {
+ key: "RUN_TESTS_SESSION"
+ value: "py-3.8"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-dataproc/.kokoro/test-samples.sh"
+}
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
+}
+
+# Download secrets for samples
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-dataproc/.kokoro/trampoline.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.8/continuous.cfg b/.kokoro/samples/python3.8/continuous.cfg
new file mode 100644
index 00000000..a1c8d975
--- /dev/null
+++ b/.kokoro/samples/python3.8/continuous.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.8/periodic.cfg b/.kokoro/samples/python3.8/periodic.cfg
new file mode 100644
index 00000000..50fec964
--- /dev/null
+++ b/.kokoro/samples/python3.8/periodic.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "False"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.8/presubmit.cfg b/.kokoro/samples/python3.8/presubmit.cfg
new file mode 100644
index 00000000..a1c8d975
--- /dev/null
+++ b/.kokoro/samples/python3.8/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh
new file mode 100755
index 00000000..028210c0
--- /dev/null
+++ b/.kokoro/test-samples.sh
@@ -0,0 +1,104 @@
+#!/bin/bash
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# `-e` enables the script to automatically fail when a command fails
+# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero
+set -eo pipefail
+# Enables `**` to include files nested inside sub-folders
+shopt -s globstar
+
+cd github/python-dataproc
+
+# Run periodic samples tests at latest release
+if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then
+ LATEST_RELEASE=$(git describe --abbrev=0 --tags)
+ git checkout $LATEST_RELEASE
+fi
+
+# Disable buffering, so that the logs stream through.
+export PYTHONUNBUFFERED=1
+
+# Debug: show build environment
+env | grep KOKORO
+
+# Install nox
+python3.6 -m pip install --upgrade --quiet nox
+
+# Use secrets acessor service account to get secrets
+if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then
+ gcloud auth activate-service-account \
+ --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \
+ --project="cloud-devrel-kokoro-resources"
+fi
+
+# This script will create 3 files:
+# - testing/test-env.sh
+# - testing/service-account.json
+# - testing/client-secrets.json
+./scripts/decrypt-secrets.sh
+
+source ./testing/test-env.sh
+export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json
+
+# For cloud-run session, we activate the service account for gcloud sdk.
+gcloud auth activate-service-account \
+ --key-file "${GOOGLE_APPLICATION_CREDENTIALS}"
+
+export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json
+
+echo -e "\n******************** TESTING PROJECTS ********************"
+
+# Switch to 'fail at end' to allow all tests to complete before exiting.
+set +e
+# Use RTN to return a non-zero value if the test fails.
+RTN=0
+ROOT=$(pwd)
+# Find all requirements.txt in the samples directory (may break on whitespace).
+for file in samples/**/requirements.txt; do
+ cd "$ROOT"
+ # Navigate to the project folder.
+ file=$(dirname "$file")
+ cd "$file"
+
+ echo "------------------------------------------------------------"
+ echo "- testing $file"
+ echo "------------------------------------------------------------"
+
+ # Use nox to execute the tests for the project.
+ python3.6 -m nox -s "$RUN_TESTS_SESSION"
+ EXIT=$?
+
+ # If this is a periodic build, send the test log to the Build Cop Bot.
+ # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/buildcop.
+ if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then
+ chmod +x $KOKORO_GFILE_DIR/linux_amd64/buildcop
+ $KOKORO_GFILE_DIR/linux_amd64/buildcop
+ fi
+
+ if [[ $EXIT -ne 0 ]]; then
+ RTN=1
+ echo -e "\n Testing failed: Nox returned a non-zero exit code. \n"
+ else
+ echo -e "\n Testing completed.\n"
+ fi
+
+done
+cd "$ROOT"
+
+# Workaround for Kokoro permissions issue: delete secrets
+rm testing/{test-env.sh,client-secrets.json,service-account.json}
+
+exit "$RTN"
\ No newline at end of file
diff --git a/CHANGELOG.md b/CHANGELOG.md
index fd410de8..5a93e0de 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,6 +4,13 @@
[1]: https://pypi.org/project/google-cloud-dataproc/#history
+### [1.0.1](https://www.github.com/googleapis/python-dataproc/compare/v1.0.0...v1.0.1) (2020-07-16)
+
+
+### Bug Fixes
+
+* correct protobuf type for diagnose_cluster, update retry configs ([#55](https://www.github.com/googleapis/python-dataproc/issues/55)) ([822315e](https://www.github.com/googleapis/python-dataproc/commit/822315ec3f2517ebb6ca199b72156ebd50e0518b))
+
## [1.0.0](https://www.github.com/googleapis/python-dataproc/compare/v0.8.1...v1.0.0) (2020-06-17)
diff --git a/MANIFEST.in b/MANIFEST.in
index 68855abc..e9e29d12 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -20,3 +20,6 @@ recursive-include google *.json *.proto
recursive-include tests *
global-exclude *.py[co]
global-exclude __pycache__
+
+# Exclude scripts for samples readmegen
+prune scripts/readme-gen
\ No newline at end of file
diff --git a/docs/_templates/layout.html b/docs/_templates/layout.html
index 228529ef..6316a537 100644
--- a/docs/_templates/layout.html
+++ b/docs/_templates/layout.html
@@ -21,8 +21,8 @@
- On January 1, 2020 this library will no longer support Python 2 on the latest released version.
- Previously released library versions will continue to be available. For more information please
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version.
+ Library versions released prior to that date will continue to be available. For more information please
visit
Python 2 support on Google Cloud.
{% block body %} {% endblock %}
diff --git a/docs/conf.py b/docs/conf.py
index ac3f1331..01872827 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -38,21 +38,18 @@
"sphinx.ext.napoleon",
"sphinx.ext.todo",
"sphinx.ext.viewcode",
+ "recommonmark",
]
# autodoc/autosummary flags
autoclass_content = "both"
-autodoc_default_flags = ["members"]
+autodoc_default_options = {"members": True}
autosummary_generate = True
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
-# Allow markdown includes (so releases.md can include CHANGLEOG.md)
-# http://www.sphinx-doc.org/en/master/markdown.html
-source_parsers = {".md": "recommonmark.parser.CommonMarkParser"}
-
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
@@ -340,7 +337,7 @@
intersphinx_mapping = {
"python": ("http://python.readthedocs.org/en/latest/", None),
"google-auth": ("https://google-auth.readthedocs.io/en/stable", None),
- "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None),
+ "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,),
"grpc": ("https://grpc.io/grpc/python/", None),
}
diff --git a/google/cloud/dataproc_v1/gapic/autoscaling_policy_service_client.py b/google/cloud/dataproc_v1/gapic/autoscaling_policy_service_client.py
index 0e3dddd3..c1627925 100644
--- a/google/cloud/dataproc_v1/gapic/autoscaling_policy_service_client.py
+++ b/google/cloud/dataproc_v1/gapic/autoscaling_policy_service_client.py
@@ -41,7 +41,9 @@
from google.protobuf import empty_pb2
-_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-dataproc").version
+_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution(
+ "google-cloud-dataproc",
+).version
class AutoscalingPolicyServiceClient(object):
@@ -100,7 +102,7 @@ def location_path(cls, project, location):
def region_path(cls, project, region):
"""Return a fully-qualified region string."""
return google.api_core.path_template.expand(
- "projects/{project}/regions/{region}", project=project, region=region
+ "projects/{project}/regions/{region}", project=project, region=region,
)
def __init__(
@@ -190,12 +192,12 @@ def __init__(
self.transport = transport
else:
self.transport = autoscaling_policy_service_grpc_transport.AutoscalingPolicyServiceGrpcTransport(
- address=api_endpoint, channel=channel, credentials=credentials
+ address=api_endpoint, channel=channel, credentials=credentials,
)
if client_info is None:
client_info = google.api_core.gapic_v1.client_info.ClientInfo(
- gapic_version=_GAPIC_LIBRARY_VERSION
+ gapic_version=_GAPIC_LIBRARY_VERSION,
)
else:
client_info.gapic_version = _GAPIC_LIBRARY_VERSION
@@ -206,7 +208,7 @@ def __init__(
# (Ordinarily, these are the defaults specified in the `*_config.py`
# file next to this one.)
self._method_configs = google.api_core.gapic_v1.config.parse_method_configs(
- client_config["interfaces"][self._INTERFACE_NAME]
+ client_config["interfaces"][self._INTERFACE_NAME],
)
# Save a dictionary of cached API call functions.
@@ -216,31 +218,41 @@ def __init__(
self._inner_api_calls = {}
# Service calls
- def update_autoscaling_policy(
+ def create_autoscaling_policy(
self,
+ parent,
policy,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
- Updates (replaces) autoscaling policy.
-
- Disabled check for update_mask, because all updates will be full
- replacements.
+ Creates new autoscaling policy.
Example:
>>> from google.cloud import dataproc_v1
>>>
>>> client = dataproc_v1.AutoscalingPolicyServiceClient()
>>>
+ >>> parent = client.region_path('[PROJECT]', '[REGION]')
+ >>>
>>> # TODO: Initialize `policy`:
>>> policy = {}
>>>
- >>> response = client.update_autoscaling_policy(policy)
+ >>> response = client.create_autoscaling_policy(parent, policy)
Args:
- policy (Union[dict, ~google.cloud.dataproc_v1.types.AutoscalingPolicy]): Required. The updated autoscaling policy.
+ parent (str): Required. The "resource name" of the region or location, as
+ described in https://cloud.google.com/apis/design/resource_names.
+
+ - For ``projects.regions.autoscalingPolicies.create``, the resource
+ name of the region has the following format:
+ ``projects/{project_id}/regions/{region}``
+
+ - For ``projects.locations.autoscalingPolicies.create``, the resource
+ name of the location has the following format:
+ ``projects/{project_id}/locations/{location}``
+ policy (Union[dict, ~google.cloud.dataproc_v1.types.AutoscalingPolicy]): Required. The autoscaling policy to create.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.dataproc_v1.types.AutoscalingPolicy`
@@ -264,22 +276,24 @@ def update_autoscaling_policy(
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
- if "update_autoscaling_policy" not in self._inner_api_calls:
+ if "create_autoscaling_policy" not in self._inner_api_calls:
self._inner_api_calls[
- "update_autoscaling_policy"
+ "create_autoscaling_policy"
] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.update_autoscaling_policy,
- default_retry=self._method_configs["UpdateAutoscalingPolicy"].retry,
- default_timeout=self._method_configs["UpdateAutoscalingPolicy"].timeout,
+ self.transport.create_autoscaling_policy,
+ default_retry=self._method_configs["CreateAutoscalingPolicy"].retry,
+ default_timeout=self._method_configs["CreateAutoscalingPolicy"].timeout,
client_info=self._client_info,
)
- request = autoscaling_policies_pb2.UpdateAutoscalingPolicyRequest(policy=policy)
+ request = autoscaling_policies_pb2.CreateAutoscalingPolicyRequest(
+ parent=parent, policy=policy,
+ )
if metadata is None:
metadata = []
metadata = list(metadata)
try:
- routing_header = [("policy.name", policy.name)]
+ routing_header = [("parent", parent)]
except AttributeError:
pass
else:
@@ -288,45 +302,35 @@ def update_autoscaling_policy(
)
metadata.append(routing_metadata)
- return self._inner_api_calls["update_autoscaling_policy"](
+ return self._inner_api_calls["create_autoscaling_policy"](
request, retry=retry, timeout=timeout, metadata=metadata
)
- def create_autoscaling_policy(
+ def update_autoscaling_policy(
self,
- parent,
policy,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
- Creates new autoscaling policy.
+ Updates (replaces) autoscaling policy.
+
+ Disabled check for update_mask, because all updates will be full
+ replacements.
Example:
>>> from google.cloud import dataproc_v1
>>>
>>> client = dataproc_v1.AutoscalingPolicyServiceClient()
>>>
- >>> parent = client.region_path('[PROJECT]', '[REGION]')
- >>>
>>> # TODO: Initialize `policy`:
>>> policy = {}
>>>
- >>> response = client.create_autoscaling_policy(parent, policy)
+ >>> response = client.update_autoscaling_policy(policy)
Args:
- parent (str): Required. The "resource name" of the region or location, as
- described in https://cloud.google.com/apis/design/resource_names.
-
- - For ``projects.regions.autoscalingPolicies.create``, the resource
- name of the region has the following format:
- ``projects/{project_id}/regions/{region}``
-
- - For ``projects.locations.autoscalingPolicies.create``, the resource
- name of the location has the following format:
- ``projects/{project_id}/locations/{location}``
- policy (Union[dict, ~google.cloud.dataproc_v1.types.AutoscalingPolicy]): Required. The autoscaling policy to create.
+ policy (Union[dict, ~google.cloud.dataproc_v1.types.AutoscalingPolicy]): Required. The updated autoscaling policy.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.dataproc_v1.types.AutoscalingPolicy`
@@ -350,24 +354,24 @@ def create_autoscaling_policy(
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
- if "create_autoscaling_policy" not in self._inner_api_calls:
+ if "update_autoscaling_policy" not in self._inner_api_calls:
self._inner_api_calls[
- "create_autoscaling_policy"
+ "update_autoscaling_policy"
] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.create_autoscaling_policy,
- default_retry=self._method_configs["CreateAutoscalingPolicy"].retry,
- default_timeout=self._method_configs["CreateAutoscalingPolicy"].timeout,
+ self.transport.update_autoscaling_policy,
+ default_retry=self._method_configs["UpdateAutoscalingPolicy"].retry,
+ default_timeout=self._method_configs["UpdateAutoscalingPolicy"].timeout,
client_info=self._client_info,
)
- request = autoscaling_policies_pb2.CreateAutoscalingPolicyRequest(
- parent=parent, policy=policy
+ request = autoscaling_policies_pb2.UpdateAutoscalingPolicyRequest(
+ policy=policy,
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
- routing_header = [("parent", parent)]
+ routing_header = [("policy.name", policy.name)]
except AttributeError:
pass
else:
@@ -376,7 +380,7 @@ def create_autoscaling_policy(
)
metadata.append(routing_metadata)
- return self._inner_api_calls["create_autoscaling_policy"](
+ return self._inner_api_calls["update_autoscaling_policy"](
request, retry=retry, timeout=timeout, metadata=metadata
)
@@ -441,7 +445,7 @@ def get_autoscaling_policy(
client_info=self._client_info,
)
- request = autoscaling_policies_pb2.GetAutoscalingPolicyRequest(name=name)
+ request = autoscaling_policies_pb2.GetAutoscalingPolicyRequest(name=name,)
if metadata is None:
metadata = []
metadata = list(metadata)
@@ -541,7 +545,7 @@ def list_autoscaling_policies(
)
request = autoscaling_policies_pb2.ListAutoscalingPoliciesRequest(
- parent=parent, page_size=page_size
+ parent=parent, page_size=page_size,
)
if metadata is None:
metadata = []
@@ -630,7 +634,7 @@ def delete_autoscaling_policy(
client_info=self._client_info,
)
- request = autoscaling_policies_pb2.DeleteAutoscalingPolicyRequest(name=name)
+ request = autoscaling_policies_pb2.DeleteAutoscalingPolicyRequest(name=name,)
if metadata is None:
metadata = []
metadata = list(metadata)
diff --git a/google/cloud/dataproc_v1/gapic/autoscaling_policy_service_client_config.py b/google/cloud/dataproc_v1/gapic/autoscaling_policy_service_client_config.py
index dc98b7bd..7066450a 100644
--- a/google/cloud/dataproc_v1/gapic/autoscaling_policy_service_client_config.py
+++ b/google/cloud/dataproc_v1/gapic/autoscaling_policy_service_client_config.py
@@ -2,45 +2,136 @@
"interfaces": {
"google.cloud.dataproc.v1.AutoscalingPolicyService": {
"retry_codes": {
- "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"],
- "non_idempotent": [],
+ "retry_policy_4_codes": [
+ "DEADLINE_EXCEEDED",
+ "INTERNAL",
+ "UNAVAILABLE",
+ ],
+ "retry_policy_1_codes": ["DEADLINE_EXCEEDED", "UNAVAILABLE"],
+ "retry_policy_6_codes": [
+ "INTERNAL",
+ "DEADLINE_EXCEEDED",
+ "UNAVAILABLE",
+ ],
+ "no_retry_codes": [],
+ "retry_policy_3_codes": ["UNAVAILABLE"],
+ "retry_policy_2_codes": [
+ "DEADLINE_EXCEEDED",
+ "INTERNAL",
+ "UNAVAILABLE",
+ ],
+ "no_retry_1_codes": [],
+ "retry_policy_5_codes": ["UNAVAILABLE"],
+ "retry_policy_7_codes": ["UNAVAILABLE"],
},
"retry_params": {
- "default": {
+ "retry_policy_1_params": {
"initial_retry_delay_millis": 100,
"retry_delay_multiplier": 1.3,
"max_retry_delay_millis": 60000,
- "initial_rpc_timeout_millis": 20000,
+ "initial_rpc_timeout_millis": 600000,
"rpc_timeout_multiplier": 1.0,
- "max_rpc_timeout_millis": 20000,
+ "max_rpc_timeout_millis": 600000,
"total_timeout_millis": 600000,
- }
+ },
+ "retry_policy_3_params": {
+ "initial_retry_delay_millis": 100,
+ "retry_delay_multiplier": 1.3,
+ "max_retry_delay_millis": 60000,
+ "initial_rpc_timeout_millis": 600000,
+ "rpc_timeout_multiplier": 1.0,
+ "max_rpc_timeout_millis": 600000,
+ "total_timeout_millis": 600000,
+ },
+ "retry_policy_2_params": {
+ "initial_retry_delay_millis": 100,
+ "retry_delay_multiplier": 1.3,
+ "max_retry_delay_millis": 60000,
+ "initial_rpc_timeout_millis": 900000,
+ "rpc_timeout_multiplier": 1.0,
+ "max_rpc_timeout_millis": 900000,
+ "total_timeout_millis": 900000,
+ },
+ "retry_policy_6_params": {
+ "initial_retry_delay_millis": 100,
+ "retry_delay_multiplier": 1.3,
+ "max_retry_delay_millis": 60000,
+ "initial_rpc_timeout_millis": 300000,
+ "rpc_timeout_multiplier": 1.0,
+ "max_rpc_timeout_millis": 300000,
+ "total_timeout_millis": 300000,
+ },
+ "retry_policy_7_params": {
+ "initial_retry_delay_millis": 100,
+ "retry_delay_multiplier": 1.3,
+ "max_retry_delay_millis": 60000,
+ "initial_rpc_timeout_millis": 900000,
+ "rpc_timeout_multiplier": 1.0,
+ "max_rpc_timeout_millis": 900000,
+ "total_timeout_millis": 900000,
+ },
+ "retry_policy_5_params": {
+ "initial_retry_delay_millis": 100,
+ "retry_delay_multiplier": 1.3,
+ "max_retry_delay_millis": 60000,
+ "initial_rpc_timeout_millis": 300000,
+ "rpc_timeout_multiplier": 1.0,
+ "max_rpc_timeout_millis": 300000,
+ "total_timeout_millis": 300000,
+ },
+ "retry_policy_4_params": {
+ "initial_retry_delay_millis": 100,
+ "retry_delay_multiplier": 1.3,
+ "max_retry_delay_millis": 60000,
+ "initial_rpc_timeout_millis": 600000,
+ "rpc_timeout_multiplier": 1.0,
+ "max_rpc_timeout_millis": 600000,
+ "total_timeout_millis": 600000,
+ },
+ "no_retry_params": {
+ "initial_retry_delay_millis": 0,
+ "retry_delay_multiplier": 0.0,
+ "max_retry_delay_millis": 0,
+ "initial_rpc_timeout_millis": 0,
+ "rpc_timeout_multiplier": 1.0,
+ "max_rpc_timeout_millis": 0,
+ "total_timeout_millis": 0,
+ },
+ "no_retry_1_params": {
+ "initial_retry_delay_millis": 0,
+ "retry_delay_multiplier": 0.0,
+ "max_retry_delay_millis": 0,
+ "initial_rpc_timeout_millis": 600000,
+ "rpc_timeout_multiplier": 1.0,
+ "max_rpc_timeout_millis": 600000,
+ "total_timeout_millis": 600000,
+ },
},
"methods": {
- "UpdateAutoscalingPolicy": {
- "timeout_millis": 60000,
- "retry_codes_name": "idempotent",
- "retry_params_name": "default",
- },
"CreateAutoscalingPolicy": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "default",
+ "timeout_millis": 600000,
+ "retry_codes_name": "no_retry_1_codes",
+ "retry_params_name": "no_retry_1_params",
+ },
+ "UpdateAutoscalingPolicy": {
+ "timeout_millis": 600000,
+ "retry_codes_name": "retry_policy_1_codes",
+ "retry_params_name": "retry_policy_1_params",
},
"GetAutoscalingPolicy": {
- "timeout_millis": 60000,
- "retry_codes_name": "idempotent",
- "retry_params_name": "default",
+ "timeout_millis": 600000,
+ "retry_codes_name": "retry_policy_1_codes",
+ "retry_params_name": "retry_policy_1_params",
},
"ListAutoscalingPolicies": {
- "timeout_millis": 60000,
- "retry_codes_name": "idempotent",
- "retry_params_name": "default",
+ "timeout_millis": 600000,
+ "retry_codes_name": "retry_policy_1_codes",
+ "retry_params_name": "retry_policy_1_params",
},
"DeleteAutoscalingPolicy": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "default",
+ "timeout_millis": 600000,
+ "retry_codes_name": "no_retry_1_codes",
+ "retry_params_name": "no_retry_1_params",
},
},
}
diff --git a/google/cloud/dataproc_v1/gapic/cluster_controller_client.py b/google/cloud/dataproc_v1/gapic/cluster_controller_client.py
index 1e703ed5..9b01fa67 100644
--- a/google/cloud/dataproc_v1/gapic/cluster_controller_client.py
+++ b/google/cloud/dataproc_v1/gapic/cluster_controller_client.py
@@ -45,7 +45,9 @@
from google.protobuf import field_mask_pb2
-_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-dataproc").version
+_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution(
+ "google-cloud-dataproc",
+).version
class ClusterControllerClient(object):
@@ -168,12 +170,12 @@ def __init__(
self.transport = transport
else:
self.transport = cluster_controller_grpc_transport.ClusterControllerGrpcTransport(
- address=api_endpoint, channel=channel, credentials=credentials
+ address=api_endpoint, channel=channel, credentials=credentials,
)
if client_info is None:
client_info = google.api_core.gapic_v1.client_info.ClientInfo(
- gapic_version=_GAPIC_LIBRARY_VERSION
+ gapic_version=_GAPIC_LIBRARY_VERSION,
)
else:
client_info.gapic_version = _GAPIC_LIBRARY_VERSION
@@ -184,7 +186,7 @@ def __init__(
# (Ordinarily, these are the defaults specified in the `*_config.py`
# file next to this one.)
self._method_configs = google.api_core.gapic_v1.config.parse_method_configs(
- client_config["interfaces"][self._INTERFACE_NAME]
+ client_config["interfaces"][self._INTERFACE_NAME],
)
# Save a dictionary of cached API call functions.
@@ -284,7 +286,10 @@ def create_cluster(
)
request = clusters_pb2.CreateClusterRequest(
- project_id=project_id, region=region, cluster=cluster, request_id=request_id
+ project_id=project_id,
+ region=region,
+ cluster=cluster,
+ request_id=request_id,
)
operation = self._inner_api_calls["create_cluster"](
request, retry=retry, timeout=timeout, metadata=metadata
@@ -595,6 +600,95 @@ def delete_cluster(
metadata_type=proto_operations_pb2.ClusterOperationMetadata,
)
+ def diagnose_cluster(
+ self,
+ project_id,
+ region,
+ cluster_name,
+ retry=google.api_core.gapic_v1.method.DEFAULT,
+ timeout=google.api_core.gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+ """
+ Gets cluster diagnostic information. The returned
+ ``Operation.metadata`` will be
+ `ClusterOperationMetadata
`__.
+ After the operation completes, ``Operation.response`` contains
+ `DiagnoseClusterResults `__.
+
+ Example:
+ >>> from google.cloud import dataproc_v1
+ >>>
+ >>> client = dataproc_v1.ClusterControllerClient()
+ >>>
+ >>> # TODO: Initialize `project_id`:
+ >>> project_id = ''
+ >>>
+ >>> # TODO: Initialize `region`:
+ >>> region = ''
+ >>>
+ >>> # TODO: Initialize `cluster_name`:
+ >>> cluster_name = ''
+ >>>
+ >>> response = client.diagnose_cluster(project_id, region, cluster_name)
+ >>>
+ >>> def callback(operation_future):
+ ... # Handle result.
+ ... result = operation_future.result()
+ >>>
+ >>> response.add_done_callback(callback)
+ >>>
+ >>> # Handle metadata.
+ >>> metadata = response.metadata()
+
+ Args:
+ project_id (str): Required. The ID of the Google Cloud Platform project that the cluster
+ belongs to.
+ region (str): Required. The Dataproc region in which to handle the request.
+ cluster_name (str): Required. The cluster name.
+ retry (Optional[google.api_core.retry.Retry]): A retry object used
+ to retry requests. If ``None`` is specified, requests will
+ be retried using a default configuration.
+ timeout (Optional[float]): The amount of time, in seconds, to wait
+ for the request to complete. Note that if ``retry`` is
+ specified, the timeout applies to each individual attempt.
+ metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
+ that is provided to the method.
+
+ Returns:
+ A :class:`~google.cloud.dataproc_v1.types._OperationFuture` instance.
+
+ Raises:
+ google.api_core.exceptions.GoogleAPICallError: If the request
+ failed for any reason.
+ google.api_core.exceptions.RetryError: If the request failed due
+ to a retryable error and retry attempts failed.
+ ValueError: If the parameters are invalid.
+ """
+ # Wrap the transport method to add retry and timeout logic.
+ if "diagnose_cluster" not in self._inner_api_calls:
+ self._inner_api_calls[
+ "diagnose_cluster"
+ ] = google.api_core.gapic_v1.method.wrap_method(
+ self.transport.diagnose_cluster,
+ default_retry=self._method_configs["DiagnoseCluster"].retry,
+ default_timeout=self._method_configs["DiagnoseCluster"].timeout,
+ client_info=self._client_info,
+ )
+
+ request = clusters_pb2.DiagnoseClusterRequest(
+ project_id=project_id, region=region, cluster_name=cluster_name,
+ )
+ operation = self._inner_api_calls["diagnose_cluster"](
+ request, retry=retry, timeout=timeout, metadata=metadata
+ )
+ return google.api_core.operation.from_gapic(
+ operation,
+ self.transport._operations_client,
+ clusters_pb2.DiagnoseClusterResults,
+ metadata_type=proto_operations_pb2.ClusterOperationMetadata,
+ )
+
def get_cluster(
self,
project_id,
@@ -659,7 +753,7 @@ def get_cluster(
)
request = clusters_pb2.GetClusterRequest(
- project_id=project_id, region=region, cluster_name=cluster_name
+ project_id=project_id, region=region, cluster_name=cluster_name,
)
return self._inner_api_calls["get_cluster"](
request, retry=retry, timeout=timeout, metadata=metadata
@@ -766,7 +860,7 @@ def list_clusters(
)
request = clusters_pb2.ListClustersRequest(
- project_id=project_id, region=region, filter=filter_, page_size=page_size
+ project_id=project_id, region=region, filter=filter_, page_size=page_size,
)
iterator = google.api_core.page_iterator.GRPCIterator(
client=None,
@@ -782,92 +876,3 @@ def list_clusters(
response_token_field="next_page_token",
)
return iterator
-
- def diagnose_cluster(
- self,
- project_id,
- region,
- cluster_name,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Gets cluster diagnostic information. The returned
- ``Operation.metadata`` will be
- `ClusterOperationMetadata `__.
- After the operation completes, ``Operation.response`` contains
- `DiagnoseClusterResults `__.
-
- Example:
- >>> from google.cloud import dataproc_v1
- >>>
- >>> client = dataproc_v1.ClusterControllerClient()
- >>>
- >>> # TODO: Initialize `project_id`:
- >>> project_id = ''
- >>>
- >>> # TODO: Initialize `region`:
- >>> region = ''
- >>>
- >>> # TODO: Initialize `cluster_name`:
- >>> cluster_name = ''
- >>>
- >>> response = client.diagnose_cluster(project_id, region, cluster_name)
- >>>
- >>> def callback(operation_future):
- ... # Handle result.
- ... result = operation_future.result()
- >>>
- >>> response.add_done_callback(callback)
- >>>
- >>> # Handle metadata.
- >>> metadata = response.metadata()
-
- Args:
- project_id (str): Required. The ID of the Google Cloud Platform project that the cluster
- belongs to.
- region (str): Required. The Dataproc region in which to handle the request.
- cluster_name (str): Required. The cluster name.
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Returns:
- A :class:`~google.cloud.dataproc_v1.types._OperationFuture` instance.
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "diagnose_cluster" not in self._inner_api_calls:
- self._inner_api_calls[
- "diagnose_cluster"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.diagnose_cluster,
- default_retry=self._method_configs["DiagnoseCluster"].retry,
- default_timeout=self._method_configs["DiagnoseCluster"].timeout,
- client_info=self._client_info,
- )
-
- request = clusters_pb2.DiagnoseClusterRequest(
- project_id=project_id, region=region, cluster_name=cluster_name
- )
- operation = self._inner_api_calls["diagnose_cluster"](
- request, retry=retry, timeout=timeout, metadata=metadata
- )
- return google.api_core.operation.from_gapic(
- operation,
- self.transport._operations_client,
- empty_pb2.Empty,
- metadata_type=clusters_pb2.DiagnoseClusterResults,
- )
diff --git a/google/cloud/dataproc_v1/gapic/cluster_controller_client_config.py b/google/cloud/dataproc_v1/gapic/cluster_controller_client_config.py
index a1081c8d..51479bb1 100644
--- a/google/cloud/dataproc_v1/gapic/cluster_controller_client_config.py
+++ b/google/cloud/dataproc_v1/gapic/cluster_controller_client_config.py
@@ -2,50 +2,73 @@
"interfaces": {
"google.cloud.dataproc.v1.ClusterController": {
"retry_codes": {
- "idempotent": ["DEADLINE_EXCEEDED", "INTERNAL", "UNAVAILABLE"],
- "non_idempotent": ["UNAVAILABLE"],
+ "retry_policy_6_codes": [
+ "INTERNAL",
+ "DEADLINE_EXCEEDED",
+ "UNAVAILABLE",
+ ],
+ "no_retry_codes": [],
+ "retry_policy_5_codes": ["UNAVAILABLE"],
},
"retry_params": {
- "default": {
+ "retry_policy_6_params": {
"initial_retry_delay_millis": 100,
"retry_delay_multiplier": 1.3,
"max_retry_delay_millis": 60000,
- "initial_rpc_timeout_millis": 30000,
+ "initial_rpc_timeout_millis": 300000,
"rpc_timeout_multiplier": 1.0,
- "max_rpc_timeout_millis": 30000,
+ "max_rpc_timeout_millis": 300000,
"total_timeout_millis": 300000,
- }
+ },
+ "retry_policy_5_params": {
+ "initial_retry_delay_millis": 100,
+ "retry_delay_multiplier": 1.3,
+ "max_retry_delay_millis": 60000,
+ "initial_rpc_timeout_millis": 300000,
+ "rpc_timeout_multiplier": 1.0,
+ "max_rpc_timeout_millis": 300000,
+ "total_timeout_millis": 300000,
+ },
+ "no_retry_params": {
+ "initial_retry_delay_millis": 0,
+ "retry_delay_multiplier": 0.0,
+ "max_retry_delay_millis": 0,
+ "initial_rpc_timeout_millis": 0,
+ "rpc_timeout_multiplier": 1.0,
+ "max_rpc_timeout_millis": 0,
+ "total_timeout_millis": 0,
+ },
},
"methods": {
"CreateCluster": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "default",
+ "timeout_millis": 300000,
+ "retry_codes_name": "retry_policy_5_codes",
+ "retry_params_name": "retry_policy_5_params",
},
"UpdateCluster": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "default",
+ "timeout_millis": 300000,
+ "retry_codes_name": "retry_policy_5_codes",
+ "retry_params_name": "retry_policy_5_params",
},
"DeleteCluster": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "default",
+ "timeout_millis": 300000,
+ "retry_codes_name": "retry_policy_5_codes",
+ "retry_params_name": "retry_policy_5_params",
+ },
+ "DiagnoseCluster": {
+ "timeout_millis": 300000,
+ "retry_codes_name": "retry_policy_5_codes",
+ "retry_params_name": "retry_policy_5_params",
},
"GetCluster": {
- "timeout_millis": 60000,
- "retry_codes_name": "idempotent",
- "retry_params_name": "default",
+ "timeout_millis": 300000,
+ "retry_codes_name": "retry_policy_6_codes",
+ "retry_params_name": "retry_policy_6_params",
},
"ListClusters": {
- "timeout_millis": 60000,
- "retry_codes_name": "idempotent",
- "retry_params_name": "default",
- },
- "DiagnoseCluster": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "default",
+ "timeout_millis": 300000,
+ "retry_codes_name": "retry_policy_6_codes",
+ "retry_params_name": "retry_policy_6_params",
},
},
}
diff --git a/google/cloud/dataproc_v1/gapic/job_controller_client.py b/google/cloud/dataproc_v1/gapic/job_controller_client.py
index 2fb9cf48..25f12dfa 100644
--- a/google/cloud/dataproc_v1/gapic/job_controller_client.py
+++ b/google/cloud/dataproc_v1/gapic/job_controller_client.py
@@ -47,7 +47,9 @@
from google.protobuf import field_mask_pb2
-_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-dataproc").version
+_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution(
+ "google-cloud-dataproc",
+).version
class JobControllerClient(object):
@@ -167,12 +169,12 @@ def __init__(
self.transport = transport
else:
self.transport = job_controller_grpc_transport.JobControllerGrpcTransport(
- address=api_endpoint, channel=channel, credentials=credentials
+ address=api_endpoint, channel=channel, credentials=credentials,
)
if client_info is None:
client_info = google.api_core.gapic_v1.client_info.ClientInfo(
- gapic_version=_GAPIC_LIBRARY_VERSION
+ gapic_version=_GAPIC_LIBRARY_VERSION,
)
else:
client_info.gapic_version = _GAPIC_LIBRARY_VERSION
@@ -183,7 +185,7 @@ def __init__(
# (Ordinarily, these are the defaults specified in the `*_config.py`
# file next to this one.)
self._method_configs = google.api_core.gapic_v1.config.parse_method_configs(
- client_config["interfaces"][self._INTERFACE_NAME]
+ client_config["interfaces"][self._INTERFACE_NAME],
)
# Save a dictionary of cached API call functions.
@@ -271,12 +273,111 @@ def submit_job(
)
request = jobs_pb2.SubmitJobRequest(
- project_id=project_id, region=region, job=job, request_id=request_id
+ project_id=project_id, region=region, job=job, request_id=request_id,
)
return self._inner_api_calls["submit_job"](
request, retry=retry, timeout=timeout, metadata=metadata
)
+ def submit_job_as_operation(
+ self,
+ project_id,
+ region,
+ job,
+ request_id=None,
+ retry=google.api_core.gapic_v1.method.DEFAULT,
+ timeout=google.api_core.gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+ """
+ Submits job to a cluster.
+
+ Example:
+ >>> from google.cloud import dataproc_v1
+ >>>
+ >>> client = dataproc_v1.JobControllerClient()
+ >>>
+ >>> # TODO: Initialize `project_id`:
+ >>> project_id = ''
+ >>>
+ >>> # TODO: Initialize `region`:
+ >>> region = ''
+ >>>
+ >>> # TODO: Initialize `job`:
+ >>> job = {}
+ >>>
+ >>> response = client.submit_job_as_operation(project_id, region, job)
+ >>>
+ >>> def callback(operation_future):
+ ... # Handle result.
+ ... result = operation_future.result()
+ >>>
+ >>> response.add_done_callback(callback)
+ >>>
+ >>> # Handle metadata.
+ >>> metadata = response.metadata()
+
+ Args:
+ project_id (str): Required. The ID of the Google Cloud Platform project that the job
+ belongs to.
+ region (str): Required. The Dataproc region in which to handle the request.
+ job (Union[dict, ~google.cloud.dataproc_v1.types.Job]): Required. The job resource.
+
+ If a dict is provided, it must be of the same form as the protobuf
+ message :class:`~google.cloud.dataproc_v1.types.Job`
+ request_id (str): Optional. A unique id used to identify the request. If the server
+ receives two ``SubmitJobRequest`` requests with the same id, then the
+ second request will be ignored and the first ``Job`` created and stored
+ in the backend is returned.
+
+ It is recommended to always set this value to a
+ `UUID `__.
+
+ The id must contain only letters (a-z, A-Z), numbers (0-9), underscores
+ (_), and hyphens (-). The maximum length is 40 characters.
+ retry (Optional[google.api_core.retry.Retry]): A retry object used
+ to retry requests. If ``None`` is specified, requests will
+ be retried using a default configuration.
+ timeout (Optional[float]): The amount of time, in seconds, to wait
+ for the request to complete. Note that if ``retry`` is
+ specified, the timeout applies to each individual attempt.
+ metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
+ that is provided to the method.
+
+ Returns:
+ A :class:`~google.cloud.dataproc_v1.types._OperationFuture` instance.
+
+ Raises:
+ google.api_core.exceptions.GoogleAPICallError: If the request
+ failed for any reason.
+ google.api_core.exceptions.RetryError: If the request failed due
+ to a retryable error and retry attempts failed.
+ ValueError: If the parameters are invalid.
+ """
+ # Wrap the transport method to add retry and timeout logic.
+ if "submit_job_as_operation" not in self._inner_api_calls:
+ self._inner_api_calls[
+ "submit_job_as_operation"
+ ] = google.api_core.gapic_v1.method.wrap_method(
+ self.transport.submit_job_as_operation,
+ default_retry=self._method_configs["SubmitJobAsOperation"].retry,
+ default_timeout=self._method_configs["SubmitJobAsOperation"].timeout,
+ client_info=self._client_info,
+ )
+
+ request = jobs_pb2.SubmitJobRequest(
+ project_id=project_id, region=region, job=job, request_id=request_id,
+ )
+ operation = self._inner_api_calls["submit_job_as_operation"](
+ request, retry=retry, timeout=timeout, metadata=metadata
+ )
+ return google.api_core.operation.from_gapic(
+ operation,
+ self.transport._operations_client,
+ jobs_pb2.Job,
+ metadata_type=jobs_pb2.JobMetadata,
+ )
+
def get_job(
self,
project_id,
@@ -341,7 +442,7 @@ def get_job(
)
request = jobs_pb2.GetJobRequest(
- project_id=project_id, region=region, job_id=job_id
+ project_id=project_id, region=region, job_id=job_id,
)
return self._inner_api_calls["get_job"](
request, retry=retry, timeout=timeout, metadata=metadata
@@ -634,7 +735,7 @@ def cancel_job(
)
request = jobs_pb2.CancelJobRequest(
- project_id=project_id, region=region, job_id=job_id
+ project_id=project_id, region=region, job_id=job_id,
)
return self._inner_api_calls["cancel_job"](
request, retry=retry, timeout=timeout, metadata=metadata
@@ -702,107 +803,8 @@ def delete_job(
)
request = jobs_pb2.DeleteJobRequest(
- project_id=project_id, region=region, job_id=job_id
+ project_id=project_id, region=region, job_id=job_id,
)
self._inner_api_calls["delete_job"](
request, retry=retry, timeout=timeout, metadata=metadata
)
-
- def submit_job_as_operation(
- self,
- project_id,
- region,
- job,
- request_id=None,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Submits job to a cluster.
-
- Example:
- >>> from google.cloud import dataproc_v1
- >>>
- >>> client = dataproc_v1.JobControllerClient()
- >>>
- >>> # TODO: Initialize `project_id`:
- >>> project_id = ''
- >>>
- >>> # TODO: Initialize `region`:
- >>> region = ''
- >>>
- >>> # TODO: Initialize `job`:
- >>> job = {}
- >>>
- >>> response = client.submit_job_as_operation(project_id, region, job)
- >>>
- >>> def callback(operation_future):
- ... # Handle result.
- ... result = operation_future.result()
- >>>
- >>> response.add_done_callback(callback)
- >>>
- >>> # Handle metadata.
- >>> metadata = response.metadata()
-
- Args:
- project_id (str): Required. The ID of the Google Cloud Platform project that the job
- belongs to.
- region (str): Required. The Dataproc region in which to handle the request.
- job (Union[dict, ~google.cloud.dataproc_v1.types.Job]): Required. The job resource.
-
- If a dict is provided, it must be of the same form as the protobuf
- message :class:`~google.cloud.dataproc_v1.types.Job`
- request_id (str): Optional. A unique id used to identify the request. If the server
- receives two ``SubmitJobRequest`` requests with the same id, then the
- second request will be ignored and the first ``Job`` created and stored
- in the backend is returned.
-
- It is recommended to always set this value to a
- `UUID `__.
-
- The id must contain only letters (a-z, A-Z), numbers (0-9), underscores
- (_), and hyphens (-). The maximum length is 40 characters.
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Returns:
- A :class:`~google.cloud.dataproc_v1.types._OperationFuture` instance.
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "submit_job_as_operation" not in self._inner_api_calls:
- self._inner_api_calls[
- "submit_job_as_operation"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.submit_job_as_operation,
- default_retry=self._method_configs["SubmitJobAsOperation"].retry,
- default_timeout=self._method_configs["SubmitJobAsOperation"].timeout,
- client_info=self._client_info,
- )
-
- request = jobs_pb2.SubmitJobRequest(
- project_id=project_id, region=region, job=job, request_id=request_id
- )
- operation = self._inner_api_calls["submit_job_as_operation"](
- request, retry=retry, timeout=timeout, metadata=metadata
- )
- return google.api_core.operation.from_gapic(
- operation,
- self.transport._operations_client,
- jobs_pb2.Job,
- metadata_type=jobs_pb2.JobMetadata,
- )
diff --git a/google/cloud/dataproc_v1/gapic/job_controller_client_config.py b/google/cloud/dataproc_v1/gapic/job_controller_client_config.py
index 69ef2d50..c04bef57 100644
--- a/google/cloud/dataproc_v1/gapic/job_controller_client_config.py
+++ b/google/cloud/dataproc_v1/gapic/job_controller_client_config.py
@@ -2,56 +2,146 @@
"interfaces": {
"google.cloud.dataproc.v1.JobController": {
"retry_codes": {
- "idempotent": ["DEADLINE_EXCEEDED", "INTERNAL", "UNAVAILABLE"],
- "non_idempotent2": [],
- "non_idempotent": ["UNAVAILABLE"],
+ "retry_policy_4_codes": [
+ "DEADLINE_EXCEEDED",
+ "INTERNAL",
+ "UNAVAILABLE",
+ ],
+ "retry_policy_1_codes": ["DEADLINE_EXCEEDED", "UNAVAILABLE"],
+ "retry_policy_6_codes": [
+ "INTERNAL",
+ "DEADLINE_EXCEEDED",
+ "UNAVAILABLE",
+ ],
+ "no_retry_codes": [],
+ "retry_policy_3_codes": ["UNAVAILABLE"],
+ "retry_policy_2_codes": [
+ "DEADLINE_EXCEEDED",
+ "INTERNAL",
+ "UNAVAILABLE",
+ ],
+ "no_retry_1_codes": [],
+ "retry_policy_5_codes": ["UNAVAILABLE"],
+ "retry_policy_7_codes": ["UNAVAILABLE"],
},
"retry_params": {
- "default": {
+ "retry_policy_1_params": {
"initial_retry_delay_millis": 100,
"retry_delay_multiplier": 1.3,
"max_retry_delay_millis": 60000,
- "initial_rpc_timeout_millis": 30000,
+ "initial_rpc_timeout_millis": 600000,
"rpc_timeout_multiplier": 1.0,
- "max_rpc_timeout_millis": 30000,
+ "max_rpc_timeout_millis": 600000,
+ "total_timeout_millis": 600000,
+ },
+ "retry_policy_3_params": {
+ "initial_retry_delay_millis": 100,
+ "retry_delay_multiplier": 1.3,
+ "max_retry_delay_millis": 60000,
+ "initial_rpc_timeout_millis": 600000,
+ "rpc_timeout_multiplier": 1.0,
+ "max_rpc_timeout_millis": 600000,
+ "total_timeout_millis": 600000,
+ },
+ "retry_policy_2_params": {
+ "initial_retry_delay_millis": 100,
+ "retry_delay_multiplier": 1.3,
+ "max_retry_delay_millis": 60000,
+ "initial_rpc_timeout_millis": 900000,
+ "rpc_timeout_multiplier": 1.0,
+ "max_rpc_timeout_millis": 900000,
+ "total_timeout_millis": 900000,
+ },
+ "retry_policy_6_params": {
+ "initial_retry_delay_millis": 100,
+ "retry_delay_multiplier": 1.3,
+ "max_retry_delay_millis": 60000,
+ "initial_rpc_timeout_millis": 300000,
+ "rpc_timeout_multiplier": 1.0,
+ "max_rpc_timeout_millis": 300000,
+ "total_timeout_millis": 300000,
+ },
+ "retry_policy_7_params": {
+ "initial_retry_delay_millis": 100,
+ "retry_delay_multiplier": 1.3,
+ "max_retry_delay_millis": 60000,
+ "initial_rpc_timeout_millis": 900000,
+ "rpc_timeout_multiplier": 1.0,
+ "max_rpc_timeout_millis": 900000,
"total_timeout_millis": 900000,
- }
+ },
+ "retry_policy_5_params": {
+ "initial_retry_delay_millis": 100,
+ "retry_delay_multiplier": 1.3,
+ "max_retry_delay_millis": 60000,
+ "initial_rpc_timeout_millis": 300000,
+ "rpc_timeout_multiplier": 1.0,
+ "max_rpc_timeout_millis": 300000,
+ "total_timeout_millis": 300000,
+ },
+ "retry_policy_4_params": {
+ "initial_retry_delay_millis": 100,
+ "retry_delay_multiplier": 1.3,
+ "max_retry_delay_millis": 60000,
+ "initial_rpc_timeout_millis": 600000,
+ "rpc_timeout_multiplier": 1.0,
+ "max_rpc_timeout_millis": 600000,
+ "total_timeout_millis": 600000,
+ },
+ "no_retry_params": {
+ "initial_retry_delay_millis": 0,
+ "retry_delay_multiplier": 0.0,
+ "max_retry_delay_millis": 0,
+ "initial_rpc_timeout_millis": 0,
+ "rpc_timeout_multiplier": 1.0,
+ "max_rpc_timeout_millis": 0,
+ "total_timeout_millis": 0,
+ },
+ "no_retry_1_params": {
+ "initial_retry_delay_millis": 0,
+ "retry_delay_multiplier": 0.0,
+ "max_retry_delay_millis": 0,
+ "initial_rpc_timeout_millis": 600000,
+ "rpc_timeout_multiplier": 1.0,
+ "max_rpc_timeout_millis": 600000,
+ "total_timeout_millis": 600000,
+ },
},
"methods": {
"SubmitJob": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "default",
+ "timeout_millis": 900000,
+ "retry_codes_name": "retry_policy_7_codes",
+ "retry_params_name": "retry_policy_7_params",
+ },
+ "SubmitJobAsOperation": {
+ "timeout_millis": 900000,
+ "retry_codes_name": "retry_policy_7_codes",
+ "retry_params_name": "retry_policy_7_params",
},
"GetJob": {
- "timeout_millis": 60000,
- "retry_codes_name": "idempotent",
- "retry_params_name": "default",
+ "timeout_millis": 900000,
+ "retry_codes_name": "retry_policy_2_codes",
+ "retry_params_name": "retry_policy_2_params",
},
"ListJobs": {
- "timeout_millis": 60000,
- "retry_codes_name": "idempotent",
- "retry_params_name": "default",
+ "timeout_millis": 900000,
+ "retry_codes_name": "retry_policy_2_codes",
+ "retry_params_name": "retry_policy_2_params",
},
"UpdateJob": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "default",
+ "timeout_millis": 900000,
+ "retry_codes_name": "retry_policy_7_codes",
+ "retry_params_name": "retry_policy_7_params",
},
"CancelJob": {
- "timeout_millis": 60000,
- "retry_codes_name": "idempotent",
- "retry_params_name": "default",
+ "timeout_millis": 900000,
+ "retry_codes_name": "retry_policy_2_codes",
+ "retry_params_name": "retry_policy_2_params",
},
"DeleteJob": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "default",
- },
- "SubmitJobAsOperation": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent2",
- "retry_params_name": "default",
+ "timeout_millis": 900000,
+ "retry_codes_name": "retry_policy_7_codes",
+ "retry_params_name": "retry_policy_7_params",
},
},
}
diff --git a/google/cloud/dataproc_v1/gapic/transports/autoscaling_policy_service_grpc_transport.py b/google/cloud/dataproc_v1/gapic/transports/autoscaling_policy_service_grpc_transport.py
index 740f094b..b2b4e646 100644
--- a/google/cloud/dataproc_v1/gapic/transports/autoscaling_policy_service_grpc_transport.py
+++ b/google/cloud/dataproc_v1/gapic/transports/autoscaling_policy_service_grpc_transport.py
@@ -53,7 +53,7 @@ def __init__(
# exception (channels come with credentials baked in already).
if channel is not None and credentials is not None:
raise ValueError(
- "The `channel` and `credentials` arguments are mutually " "exclusive."
+ "The `channel` and `credentials` arguments are mutually " "exclusive.",
)
# Create the channel.
@@ -74,7 +74,7 @@ def __init__(
self._stubs = {
"autoscaling_policy_service_stub": autoscaling_policies_pb2_grpc.AutoscalingPolicyServiceStub(
channel
- )
+ ),
}
@classmethod
@@ -110,33 +110,33 @@ def channel(self):
return self._channel
@property
- def update_autoscaling_policy(self):
- """Return the gRPC stub for :meth:`AutoscalingPolicyServiceClient.update_autoscaling_policy`.
-
- Updates (replaces) autoscaling policy.
+ def create_autoscaling_policy(self):
+ """Return the gRPC stub for :meth:`AutoscalingPolicyServiceClient.create_autoscaling_policy`.
- Disabled check for update_mask, because all updates will be full
- replacements.
+ Creates new autoscaling policy.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
- return self._stubs["autoscaling_policy_service_stub"].UpdateAutoscalingPolicy
+ return self._stubs["autoscaling_policy_service_stub"].CreateAutoscalingPolicy
@property
- def create_autoscaling_policy(self):
- """Return the gRPC stub for :meth:`AutoscalingPolicyServiceClient.create_autoscaling_policy`.
+ def update_autoscaling_policy(self):
+ """Return the gRPC stub for :meth:`AutoscalingPolicyServiceClient.update_autoscaling_policy`.
- Creates new autoscaling policy.
+ Updates (replaces) autoscaling policy.
+
+ Disabled check for update_mask, because all updates will be full
+ replacements.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
- return self._stubs["autoscaling_policy_service_stub"].CreateAutoscalingPolicy
+ return self._stubs["autoscaling_policy_service_stub"].UpdateAutoscalingPolicy
@property
def get_autoscaling_policy(self):
diff --git a/google/cloud/dataproc_v1/gapic/transports/cluster_controller_grpc_transport.py b/google/cloud/dataproc_v1/gapic/transports/cluster_controller_grpc_transport.py
index f1d053a0..5fc36d86 100644
--- a/google/cloud/dataproc_v1/gapic/transports/cluster_controller_grpc_transport.py
+++ b/google/cloud/dataproc_v1/gapic/transports/cluster_controller_grpc_transport.py
@@ -54,7 +54,7 @@ def __init__(
# exception (channels come with credentials baked in already).
if channel is not None and credentials is not None:
raise ValueError(
- "The `channel` and `credentials` arguments are mutually " "exclusive."
+ "The `channel` and `credentials` arguments are mutually " "exclusive.",
)
# Create the channel.
@@ -73,7 +73,7 @@ def __init__(
# gRPC uses objects called "stubs" that are bound to the
# channel and provide a basic method for each RPC.
self._stubs = {
- "cluster_controller_stub": clusters_pb2_grpc.ClusterControllerStub(channel)
+ "cluster_controller_stub": clusters_pb2_grpc.ClusterControllerStub(channel),
}
# Because this API includes a method that returns a
@@ -161,44 +161,44 @@ def delete_cluster(self):
return self._stubs["cluster_controller_stub"].DeleteCluster
@property
- def get_cluster(self):
- """Return the gRPC stub for :meth:`ClusterControllerClient.get_cluster`.
+ def diagnose_cluster(self):
+ """Return the gRPC stub for :meth:`ClusterControllerClient.diagnose_cluster`.
- Gets the resource representation for a cluster in a project.
+ Gets cluster diagnostic information. The returned
+ ``Operation.metadata`` will be
+ `ClusterOperationMetadata `__.
+ After the operation completes, ``Operation.response`` contains
+ `DiagnoseClusterResults `__.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
- return self._stubs["cluster_controller_stub"].GetCluster
+ return self._stubs["cluster_controller_stub"].DiagnoseCluster
@property
- def list_clusters(self):
- """Return the gRPC stub for :meth:`ClusterControllerClient.list_clusters`.
+ def get_cluster(self):
+ """Return the gRPC stub for :meth:`ClusterControllerClient.get_cluster`.
- Lists all regions/{region}/clusters in a project alphabetically.
+ Gets the resource representation for a cluster in a project.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
- return self._stubs["cluster_controller_stub"].ListClusters
+ return self._stubs["cluster_controller_stub"].GetCluster
@property
- def diagnose_cluster(self):
- """Return the gRPC stub for :meth:`ClusterControllerClient.diagnose_cluster`.
+ def list_clusters(self):
+ """Return the gRPC stub for :meth:`ClusterControllerClient.list_clusters`.
- Gets cluster diagnostic information. The returned
- ``Operation.metadata`` will be
- `ClusterOperationMetadata `__.
- After the operation completes, ``Operation.response`` contains
- `DiagnoseClusterResults `__.
+ Lists all regions/{region}/clusters in a project alphabetically.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
- return self._stubs["cluster_controller_stub"].DiagnoseCluster
+ return self._stubs["cluster_controller_stub"].ListClusters
diff --git a/google/cloud/dataproc_v1/gapic/transports/job_controller_grpc_transport.py b/google/cloud/dataproc_v1/gapic/transports/job_controller_grpc_transport.py
index ccbb17d1..54a30763 100644
--- a/google/cloud/dataproc_v1/gapic/transports/job_controller_grpc_transport.py
+++ b/google/cloud/dataproc_v1/gapic/transports/job_controller_grpc_transport.py
@@ -54,7 +54,7 @@ def __init__(
# exception (channels come with credentials baked in already).
if channel is not None and credentials is not None:
raise ValueError(
- "The `channel` and `credentials` arguments are mutually " "exclusive."
+ "The `channel` and `credentials` arguments are mutually " "exclusive.",
)
# Create the channel.
@@ -72,7 +72,9 @@ def __init__(
# gRPC uses objects called "stubs" that are bound to the
# channel and provide a basic method for each RPC.
- self._stubs = {"job_controller_stub": jobs_pb2_grpc.JobControllerStub(channel)}
+ self._stubs = {
+ "job_controller_stub": jobs_pb2_grpc.JobControllerStub(channel),
+ }
# Because this API includes a method that returns a
# long-running operation (proto: google.longrunning.Operation),
@@ -126,6 +128,19 @@ def submit_job(self):
"""
return self._stubs["job_controller_stub"].SubmitJob
+ @property
+ def submit_job_as_operation(self):
+ """Return the gRPC stub for :meth:`JobControllerClient.submit_job_as_operation`.
+
+ Submits job to a cluster.
+
+ Returns:
+ Callable: A callable which accepts the appropriate
+ deserialized request object and returns a
+ deserialized response object.
+ """
+ return self._stubs["job_controller_stub"].SubmitJobAsOperation
+
@property
def get_job(self):
"""Return the gRPC stub for :meth:`JobControllerClient.get_job`.
@@ -195,16 +210,3 @@ def delete_job(self):
deserialized response object.
"""
return self._stubs["job_controller_stub"].DeleteJob
-
- @property
- def submit_job_as_operation(self):
- """Return the gRPC stub for :meth:`JobControllerClient.submit_job_as_operation`.
-
- Submits job to a cluster.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["job_controller_stub"].SubmitJobAsOperation
diff --git a/google/cloud/dataproc_v1/gapic/transports/workflow_template_service_grpc_transport.py b/google/cloud/dataproc_v1/gapic/transports/workflow_template_service_grpc_transport.py
index 06564b07..705ab40b 100644
--- a/google/cloud/dataproc_v1/gapic/transports/workflow_template_service_grpc_transport.py
+++ b/google/cloud/dataproc_v1/gapic/transports/workflow_template_service_grpc_transport.py
@@ -54,7 +54,7 @@ def __init__(
# exception (channels come with credentials baked in already).
if channel is not None and credentials is not None:
raise ValueError(
- "The `channel` and `credentials` arguments are mutually " "exclusive."
+ "The `channel` and `credentials` arguments are mutually " "exclusive.",
)
# Create the channel.
@@ -75,7 +75,7 @@ def __init__(
self._stubs = {
"workflow_template_service_stub": workflow_templates_pb2_grpc.WorkflowTemplateServiceStub(
channel
- )
+ ),
}
# Because this API includes a method that returns a
@@ -117,35 +117,6 @@ def channel(self):
"""
return self._channel
- @property
- def create_workflow_template(self):
- """Return the gRPC stub for :meth:`WorkflowTemplateServiceClient.create_workflow_template`.
-
- Creates new workflow template.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["workflow_template_service_stub"].CreateWorkflowTemplate
-
- @property
- def get_workflow_template(self):
- """Return the gRPC stub for :meth:`WorkflowTemplateServiceClient.get_workflow_template`.
-
- Retrieves the latest workflow template.
-
- Can retrieve previously instantiated template by specifying optional
- version parameter.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["workflow_template_service_stub"].GetWorkflowTemplate
-
@property
def instantiate_workflow_template(self):
"""Return the gRPC stub for :meth:`WorkflowTemplateServiceClient.instantiate_workflow_template`.
@@ -208,6 +179,35 @@ def instantiate_inline_workflow_template(self):
"workflow_template_service_stub"
].InstantiateInlineWorkflowTemplate
+ @property
+ def create_workflow_template(self):
+ """Return the gRPC stub for :meth:`WorkflowTemplateServiceClient.create_workflow_template`.
+
+ Creates new workflow template.
+
+ Returns:
+ Callable: A callable which accepts the appropriate
+ deserialized request object and returns a
+ deserialized response object.
+ """
+ return self._stubs["workflow_template_service_stub"].CreateWorkflowTemplate
+
+ @property
+ def get_workflow_template(self):
+ """Return the gRPC stub for :meth:`WorkflowTemplateServiceClient.get_workflow_template`.
+
+ Retrieves the latest workflow template.
+
+ Can retrieve previously instantiated template by specifying optional
+ version parameter.
+
+ Returns:
+ Callable: A callable which accepts the appropriate
+ deserialized request object and returns a
+ deserialized response object.
+ """
+ return self._stubs["workflow_template_service_stub"].GetWorkflowTemplate
+
@property
def update_workflow_template(self):
"""Return the gRPC stub for :meth:`WorkflowTemplateServiceClient.update_workflow_template`.
diff --git a/google/cloud/dataproc_v1/gapic/workflow_template_service_client.py b/google/cloud/dataproc_v1/gapic/workflow_template_service_client.py
index f17bf273..417a1dd0 100644
--- a/google/cloud/dataproc_v1/gapic/workflow_template_service_client.py
+++ b/google/cloud/dataproc_v1/gapic/workflow_template_service_client.py
@@ -53,7 +53,9 @@
from google.protobuf import field_mask_pb2
-_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-dataproc").version
+_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution(
+ "google-cloud-dataproc",
+).version
class WorkflowTemplateServiceClient(object):
@@ -102,7 +104,7 @@ def location_path(cls, project, location):
def region_path(cls, project, region):
"""Return a fully-qualified region string."""
return google.api_core.path_template.expand(
- "projects/{project}/regions/{region}", project=project, region=region
+ "projects/{project}/regions/{region}", project=project, region=region,
)
@classmethod
@@ -202,12 +204,12 @@ def __init__(
self.transport = transport
else:
self.transport = workflow_template_service_grpc_transport.WorkflowTemplateServiceGrpcTransport(
- address=api_endpoint, channel=channel, credentials=credentials
+ address=api_endpoint, channel=channel, credentials=credentials,
)
if client_info is None:
client_info = google.api_core.gapic_v1.client_info.ClientInfo(
- gapic_version=_GAPIC_LIBRARY_VERSION
+ gapic_version=_GAPIC_LIBRARY_VERSION,
)
else:
client_info.gapic_version = _GAPIC_LIBRARY_VERSION
@@ -218,7 +220,7 @@ def __init__(
# (Ordinarily, these are the defaults specified in the `*_config.py`
# file next to this one.)
self._method_configs = google.api_core.gapic_v1.config.parse_method_configs(
- client_config["interfaces"][self._INTERFACE_NAME]
+ client_config["interfaces"][self._INTERFACE_NAME],
)
# Save a dictionary of cached API call functions.
@@ -228,183 +230,6 @@ def __init__(
self._inner_api_calls = {}
# Service calls
- def create_workflow_template(
- self,
- parent,
- template,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Creates new workflow template.
-
- Example:
- >>> from google.cloud import dataproc_v1
- >>>
- >>> client = dataproc_v1.WorkflowTemplateServiceClient()
- >>>
- >>> parent = client.region_path('[PROJECT]', '[REGION]')
- >>>
- >>> # TODO: Initialize `template`:
- >>> template = {}
- >>>
- >>> response = client.create_workflow_template(parent, template)
-
- Args:
- parent (str): Required. The resource name of the region or location, as described
- in https://cloud.google.com/apis/design/resource_names.
-
- - For ``projects.regions.workflowTemplates,create``, the resource name
- of the region has the following format:
- ``projects/{project_id}/regions/{region}``
-
- - For ``projects.locations.workflowTemplates.create``, the resource
- name of the location has the following format:
- ``projects/{project_id}/locations/{location}``
- template (Union[dict, ~google.cloud.dataproc_v1.types.WorkflowTemplate]): Required. The Dataproc workflow template to create.
-
- If a dict is provided, it must be of the same form as the protobuf
- message :class:`~google.cloud.dataproc_v1.types.WorkflowTemplate`
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Returns:
- A :class:`~google.cloud.dataproc_v1.types.WorkflowTemplate` instance.
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "create_workflow_template" not in self._inner_api_calls:
- self._inner_api_calls[
- "create_workflow_template"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.create_workflow_template,
- default_retry=self._method_configs["CreateWorkflowTemplate"].retry,
- default_timeout=self._method_configs["CreateWorkflowTemplate"].timeout,
- client_info=self._client_info,
- )
-
- request = workflow_templates_pb2.CreateWorkflowTemplateRequest(
- parent=parent, template=template
- )
- if metadata is None:
- metadata = []
- metadata = list(metadata)
- try:
- routing_header = [("parent", parent)]
- except AttributeError:
- pass
- else:
- routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
- routing_header
- )
- metadata.append(routing_metadata)
-
- return self._inner_api_calls["create_workflow_template"](
- request, retry=retry, timeout=timeout, metadata=metadata
- )
-
- def get_workflow_template(
- self,
- name,
- version=None,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Retrieves the latest workflow template.
-
- Can retrieve previously instantiated template by specifying optional
- version parameter.
-
- Example:
- >>> from google.cloud import dataproc_v1
- >>>
- >>> client = dataproc_v1.WorkflowTemplateServiceClient()
- >>>
- >>> # TODO: Initialize `name`:
- >>> name = ''
- >>>
- >>> response = client.get_workflow_template(name)
-
- Args:
- name (str): Required. The resource name of the workflow template, as described
- in https://cloud.google.com/apis/design/resource_names.
-
- - For ``projects.regions.workflowTemplates.get``, the resource name of
- the template has the following format:
- ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}``
-
- - For ``projects.locations.workflowTemplates.get``, the resource name
- of the template has the following format:
- ``projects/{project_id}/locations/{location}/workflowTemplates/{template_id}``
- version (int): Optional. The version of workflow template to retrieve. Only previously
- instantiated versions can be retrieved.
-
- If unspecified, retrieves the current version.
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Returns:
- A :class:`~google.cloud.dataproc_v1.types.WorkflowTemplate` instance.
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "get_workflow_template" not in self._inner_api_calls:
- self._inner_api_calls[
- "get_workflow_template"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.get_workflow_template,
- default_retry=self._method_configs["GetWorkflowTemplate"].retry,
- default_timeout=self._method_configs["GetWorkflowTemplate"].timeout,
- client_info=self._client_info,
- )
-
- request = workflow_templates_pb2.GetWorkflowTemplateRequest(
- name=name, version=version
- )
- if metadata is None:
- metadata = []
- metadata = list(metadata)
- try:
- routing_header = [("name", name)]
- except AttributeError:
- pass
- else:
- routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
- routing_header
- )
- metadata.append(routing_metadata)
-
- return self._inner_api_calls["get_workflow_template"](
- request, retry=retry, timeout=timeout, metadata=metadata
- )
-
def instantiate_workflow_template(
self,
name,
@@ -513,7 +338,7 @@ def instantiate_workflow_template(
)
request = workflow_templates_pb2.InstantiateWorkflowTemplateRequest(
- name=name, version=version, request_id=request_id, parameters=parameters
+ name=name, version=version, request_id=request_id, parameters=parameters,
)
if metadata is None:
metadata = []
@@ -649,7 +474,7 @@ def instantiate_inline_workflow_template(
)
request = workflow_templates_pb2.InstantiateInlineWorkflowTemplateRequest(
- parent=parent, template=template, request_id=request_id
+ parent=parent, template=template, request_id=request_id,
)
if metadata is None:
metadata = []
@@ -674,6 +499,183 @@ def instantiate_inline_workflow_template(
metadata_type=workflow_templates_pb2.WorkflowMetadata,
)
+ def create_workflow_template(
+ self,
+ parent,
+ template,
+ retry=google.api_core.gapic_v1.method.DEFAULT,
+ timeout=google.api_core.gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+ """
+ Creates new workflow template.
+
+ Example:
+ >>> from google.cloud import dataproc_v1
+ >>>
+ >>> client = dataproc_v1.WorkflowTemplateServiceClient()
+ >>>
+ >>> parent = client.region_path('[PROJECT]', '[REGION]')
+ >>>
+ >>> # TODO: Initialize `template`:
+ >>> template = {}
+ >>>
+ >>> response = client.create_workflow_template(parent, template)
+
+ Args:
+ parent (str): Required. The resource name of the region or location, as described
+ in https://cloud.google.com/apis/design/resource_names.
+
+ - For ``projects.regions.workflowTemplates,create``, the resource name
+ of the region has the following format:
+ ``projects/{project_id}/regions/{region}``
+
+ - For ``projects.locations.workflowTemplates.create``, the resource
+ name of the location has the following format:
+ ``projects/{project_id}/locations/{location}``
+ template (Union[dict, ~google.cloud.dataproc_v1.types.WorkflowTemplate]): Required. The Dataproc workflow template to create.
+
+ If a dict is provided, it must be of the same form as the protobuf
+ message :class:`~google.cloud.dataproc_v1.types.WorkflowTemplate`
+ retry (Optional[google.api_core.retry.Retry]): A retry object used
+ to retry requests. If ``None`` is specified, requests will
+ be retried using a default configuration.
+ timeout (Optional[float]): The amount of time, in seconds, to wait
+ for the request to complete. Note that if ``retry`` is
+ specified, the timeout applies to each individual attempt.
+ metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
+ that is provided to the method.
+
+ Returns:
+ A :class:`~google.cloud.dataproc_v1.types.WorkflowTemplate` instance.
+
+ Raises:
+ google.api_core.exceptions.GoogleAPICallError: If the request
+ failed for any reason.
+ google.api_core.exceptions.RetryError: If the request failed due
+ to a retryable error and retry attempts failed.
+ ValueError: If the parameters are invalid.
+ """
+ # Wrap the transport method to add retry and timeout logic.
+ if "create_workflow_template" not in self._inner_api_calls:
+ self._inner_api_calls[
+ "create_workflow_template"
+ ] = google.api_core.gapic_v1.method.wrap_method(
+ self.transport.create_workflow_template,
+ default_retry=self._method_configs["CreateWorkflowTemplate"].retry,
+ default_timeout=self._method_configs["CreateWorkflowTemplate"].timeout,
+ client_info=self._client_info,
+ )
+
+ request = workflow_templates_pb2.CreateWorkflowTemplateRequest(
+ parent=parent, template=template,
+ )
+ if metadata is None:
+ metadata = []
+ metadata = list(metadata)
+ try:
+ routing_header = [("parent", parent)]
+ except AttributeError:
+ pass
+ else:
+ routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
+ routing_header
+ )
+ metadata.append(routing_metadata)
+
+ return self._inner_api_calls["create_workflow_template"](
+ request, retry=retry, timeout=timeout, metadata=metadata
+ )
+
+ def get_workflow_template(
+ self,
+ name,
+ version=None,
+ retry=google.api_core.gapic_v1.method.DEFAULT,
+ timeout=google.api_core.gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+ """
+ Retrieves the latest workflow template.
+
+ Can retrieve previously instantiated template by specifying optional
+ version parameter.
+
+ Example:
+ >>> from google.cloud import dataproc_v1
+ >>>
+ >>> client = dataproc_v1.WorkflowTemplateServiceClient()
+ >>>
+ >>> # TODO: Initialize `name`:
+ >>> name = ''
+ >>>
+ >>> response = client.get_workflow_template(name)
+
+ Args:
+ name (str): Required. The resource name of the workflow template, as described
+ in https://cloud.google.com/apis/design/resource_names.
+
+ - For ``projects.regions.workflowTemplates.get``, the resource name of
+ the template has the following format:
+ ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}``
+
+ - For ``projects.locations.workflowTemplates.get``, the resource name
+ of the template has the following format:
+ ``projects/{project_id}/locations/{location}/workflowTemplates/{template_id}``
+ version (int): Optional. The version of workflow template to retrieve. Only previously
+ instantiated versions can be retrieved.
+
+ If unspecified, retrieves the current version.
+ retry (Optional[google.api_core.retry.Retry]): A retry object used
+ to retry requests. If ``None`` is specified, requests will
+ be retried using a default configuration.
+ timeout (Optional[float]): The amount of time, in seconds, to wait
+ for the request to complete. Note that if ``retry`` is
+ specified, the timeout applies to each individual attempt.
+ metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
+ that is provided to the method.
+
+ Returns:
+ A :class:`~google.cloud.dataproc_v1.types.WorkflowTemplate` instance.
+
+ Raises:
+ google.api_core.exceptions.GoogleAPICallError: If the request
+ failed for any reason.
+ google.api_core.exceptions.RetryError: If the request failed due
+ to a retryable error and retry attempts failed.
+ ValueError: If the parameters are invalid.
+ """
+ # Wrap the transport method to add retry and timeout logic.
+ if "get_workflow_template" not in self._inner_api_calls:
+ self._inner_api_calls[
+ "get_workflow_template"
+ ] = google.api_core.gapic_v1.method.wrap_method(
+ self.transport.get_workflow_template,
+ default_retry=self._method_configs["GetWorkflowTemplate"].retry,
+ default_timeout=self._method_configs["GetWorkflowTemplate"].timeout,
+ client_info=self._client_info,
+ )
+
+ request = workflow_templates_pb2.GetWorkflowTemplateRequest(
+ name=name, version=version,
+ )
+ if metadata is None:
+ metadata = []
+ metadata = list(metadata)
+ try:
+ routing_header = [("name", name)]
+ except AttributeError:
+ pass
+ else:
+ routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
+ routing_header
+ )
+ metadata.append(routing_metadata)
+
+ return self._inner_api_calls["get_workflow_template"](
+ request, retry=retry, timeout=timeout, metadata=metadata
+ )
+
def update_workflow_template(
self,
template,
@@ -733,7 +735,7 @@ def update_workflow_template(
)
request = workflow_templates_pb2.UpdateWorkflowTemplateRequest(
- template=template
+ template=template,
)
if metadata is None:
metadata = []
@@ -834,7 +836,7 @@ def list_workflow_templates(
)
request = workflow_templates_pb2.ListWorkflowTemplatesRequest(
- parent=parent, page_size=page_size
+ parent=parent, page_size=page_size,
)
if metadata is None:
metadata = []
@@ -927,7 +929,7 @@ def delete_workflow_template(
)
request = workflow_templates_pb2.DeleteWorkflowTemplateRequest(
- name=name, version=version
+ name=name, version=version,
)
if metadata is None:
metadata = []
diff --git a/google/cloud/dataproc_v1/gapic/workflow_template_service_client_config.py b/google/cloud/dataproc_v1/gapic/workflow_template_service_client_config.py
index 8b6be43f..ec0aea38 100644
--- a/google/cloud/dataproc_v1/gapic/workflow_template_service_client_config.py
+++ b/google/cloud/dataproc_v1/gapic/workflow_template_service_client_config.py
@@ -2,55 +2,78 @@
"interfaces": {
"google.cloud.dataproc.v1.WorkflowTemplateService": {
"retry_codes": {
- "idempotent": ["DEADLINE_EXCEEDED", "INTERNAL", "UNAVAILABLE"],
- "non_idempotent": ["UNAVAILABLE"],
+ "retry_policy_4_codes": [
+ "DEADLINE_EXCEEDED",
+ "INTERNAL",
+ "UNAVAILABLE",
+ ],
+ "no_retry_codes": [],
+ "retry_policy_3_codes": ["UNAVAILABLE"],
},
"retry_params": {
- "default": {
+ "retry_policy_3_params": {
"initial_retry_delay_millis": 100,
"retry_delay_multiplier": 1.3,
"max_retry_delay_millis": 60000,
- "initial_rpc_timeout_millis": 30000,
+ "initial_rpc_timeout_millis": 600000,
"rpc_timeout_multiplier": 1.0,
- "max_rpc_timeout_millis": 30000,
- "total_timeout_millis": 900000,
- }
- },
- "methods": {
- "CreateWorkflowTemplate": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "default",
+ "max_rpc_timeout_millis": 600000,
+ "total_timeout_millis": 600000,
},
- "GetWorkflowTemplate": {
- "timeout_millis": 60000,
- "retry_codes_name": "idempotent",
- "retry_params_name": "default",
+ "retry_policy_4_params": {
+ "initial_retry_delay_millis": 100,
+ "retry_delay_multiplier": 1.3,
+ "max_retry_delay_millis": 60000,
+ "initial_rpc_timeout_millis": 600000,
+ "rpc_timeout_multiplier": 1.0,
+ "max_rpc_timeout_millis": 600000,
+ "total_timeout_millis": 600000,
},
+ "no_retry_params": {
+ "initial_retry_delay_millis": 0,
+ "retry_delay_multiplier": 0.0,
+ "max_retry_delay_millis": 0,
+ "initial_rpc_timeout_millis": 0,
+ "rpc_timeout_multiplier": 1.0,
+ "max_rpc_timeout_millis": 0,
+ "total_timeout_millis": 0,
+ },
+ },
+ "methods": {
"InstantiateWorkflowTemplate": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "default",
+ "timeout_millis": 600000,
+ "retry_codes_name": "retry_policy_3_codes",
+ "retry_params_name": "retry_policy_3_params",
},
"InstantiateInlineWorkflowTemplate": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "default",
+ "timeout_millis": 600000,
+ "retry_codes_name": "retry_policy_3_codes",
+ "retry_params_name": "retry_policy_3_params",
+ },
+ "CreateWorkflowTemplate": {
+ "timeout_millis": 600000,
+ "retry_codes_name": "retry_policy_3_codes",
+ "retry_params_name": "retry_policy_3_params",
+ },
+ "GetWorkflowTemplate": {
+ "timeout_millis": 600000,
+ "retry_codes_name": "retry_policy_4_codes",
+ "retry_params_name": "retry_policy_4_params",
},
"UpdateWorkflowTemplate": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "default",
+ "timeout_millis": 600000,
+ "retry_codes_name": "retry_policy_3_codes",
+ "retry_params_name": "retry_policy_3_params",
},
"ListWorkflowTemplates": {
- "timeout_millis": 60000,
- "retry_codes_name": "idempotent",
- "retry_params_name": "default",
+ "timeout_millis": 600000,
+ "retry_codes_name": "retry_policy_4_codes",
+ "retry_params_name": "retry_policy_4_params",
},
"DeleteWorkflowTemplate": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "default",
+ "timeout_millis": 600000,
+ "retry_codes_name": "retry_policy_3_codes",
+ "retry_params_name": "retry_policy_3_params",
},
},
}
diff --git a/google/cloud/dataproc_v1/proto/autoscaling_policies_pb2.py b/google/cloud/dataproc_v1/proto/autoscaling_policies_pb2.py
index 25b2e962..ae720579 100644
--- a/google/cloud/dataproc_v1/proto/autoscaling_policies_pb2.py
+++ b/google/cloud/dataproc_v1/proto/autoscaling_policies_pb2.py
@@ -157,7 +157,7 @@
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[],
- )
+ ),
],
serialized_start=264,
serialized_end=816,
@@ -506,7 +506,7 @@
serialized_options=b"\340A\002\372A+\n)dataproc.googleapis.com/AutoscalingPolicy",
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
- )
+ ),
],
extensions=[],
nested_types=[],
@@ -547,7 +547,7 @@
serialized_options=b"\340A\002",
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
- )
+ ),
],
extensions=[],
nested_types=[],
@@ -588,7 +588,7 @@
serialized_options=b"\340A\002\372A+\n)dataproc.googleapis.com/AutoscalingPolicy",
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
- )
+ ),
],
extensions=[],
nested_types=[],
@@ -811,6 +811,7 @@
"DESCRIPTOR": _AUTOSCALINGPOLICY,
"__module__": "google.cloud.dataproc_v1.proto.autoscaling_policies_pb2",
"__doc__": """Describes an autoscaling policy for Dataproc cluster autoscaler.
+
Attributes:
id:
Required. The policy id. The id must contain only letters
@@ -848,6 +849,7 @@
"DESCRIPTOR": _BASICAUTOSCALINGALGORITHM,
"__module__": "google.cloud.dataproc_v1.proto.autoscaling_policies_pb2",
"__doc__": """Basic algorithm for autoscaling.
+
Attributes:
yarn_config:
Required. YARN autoscaling configuration.
@@ -868,6 +870,7 @@
"DESCRIPTOR": _BASICYARNAUTOSCALINGCONFIG,
"__module__": "google.cloud.dataproc_v1.proto.autoscaling_policies_pb2",
"__doc__": """Basic autoscaling configurations for YARN.
+
Attributes:
graceful_decommission_timeout:
Required. Timeout for YARN graceful decommissioning of Node
@@ -919,6 +922,7 @@
"__module__": "google.cloud.dataproc_v1.proto.autoscaling_policies_pb2",
"__doc__": """Configuration for the size bounds of an instance group, including its
proportional size to other groups.
+
Attributes:
min_instances:
Optional. Minimum number of instances for this group. Primary
@@ -963,6 +967,7 @@
"DESCRIPTOR": _CREATEAUTOSCALINGPOLICYREQUEST,
"__module__": "google.cloud.dataproc_v1.proto.autoscaling_policies_pb2",
"__doc__": """A request to create an autoscaling policy.
+
Attributes:
parent:
Required. The “resource name” of the region or location, as
@@ -989,6 +994,7 @@
"DESCRIPTOR": _GETAUTOSCALINGPOLICYREQUEST,
"__module__": "google.cloud.dataproc_v1.proto.autoscaling_policies_pb2",
"__doc__": """A request to fetch an autoscaling policy.
+
Attributes:
name:
Required. The “resource name” of the autoscaling policy, as
@@ -1014,6 +1020,7 @@
"DESCRIPTOR": _UPDATEAUTOSCALINGPOLICYREQUEST,
"__module__": "google.cloud.dataproc_v1.proto.autoscaling_policies_pb2",
"__doc__": """A request to update an autoscaling policy.
+
Attributes:
policy:
Required. The updated autoscaling policy.
@@ -1031,6 +1038,7 @@
"__module__": "google.cloud.dataproc_v1.proto.autoscaling_policies_pb2",
"__doc__": """A request to delete an autoscaling policy. Autoscaling policies in
use by one or more clusters will not be deleted.
+
Attributes:
name:
Required. The “resource name” of the autoscaling policy, as
@@ -1056,6 +1064,7 @@
"DESCRIPTOR": _LISTAUTOSCALINGPOLICIESREQUEST,
"__module__": "google.cloud.dataproc_v1.proto.autoscaling_policies_pb2",
"__doc__": """A request to list autoscaling policies in a project.
+
Attributes:
parent:
Required. The “resource name” of the region or location, as
@@ -1086,6 +1095,7 @@
"DESCRIPTOR": _LISTAUTOSCALINGPOLICIESRESPONSE,
"__module__": "google.cloud.dataproc_v1.proto.autoscaling_policies_pb2",
"__doc__": """A response to a request to list autoscaling policies in a project.
+
Attributes:
policies:
Output only. Autoscaling policies list.
diff --git a/google/cloud/dataproc_v1/proto/autoscaling_policies_pb2_grpc.py b/google/cloud/dataproc_v1/proto/autoscaling_policies_pb2_grpc.py
index 172d1815..1d3e8259 100644
--- a/google/cloud/dataproc_v1/proto/autoscaling_policies_pb2_grpc.py
+++ b/google/cloud/dataproc_v1/proto/autoscaling_policies_pb2_grpc.py
@@ -1,4 +1,5 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from google.cloud.dataproc_v1.proto import (
@@ -9,15 +10,15 @@
class AutoscalingPolicyServiceStub(object):
"""The API interface for managing autoscaling policies in the
- Dataproc API.
- """
+ Dataproc API.
+ """
def __init__(self, channel):
"""Constructor.
- Args:
- channel: A grpc.Channel.
- """
+ Args:
+ channel: A grpc.Channel.
+ """
self.CreateAutoscalingPolicy = channel.unary_unary(
"/google.cloud.dataproc.v1.AutoscalingPolicyService/CreateAutoscalingPolicy",
request_serializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_autoscaling__policies__pb2.CreateAutoscalingPolicyRequest.SerializeToString,
@@ -47,12 +48,12 @@ def __init__(self, channel):
class AutoscalingPolicyServiceServicer(object):
"""The API interface for managing autoscaling policies in the
- Dataproc API.
- """
+ Dataproc API.
+ """
def CreateAutoscalingPolicy(self, request, context):
"""Creates new autoscaling policy.
- """
+ """
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
@@ -60,31 +61,31 @@ def CreateAutoscalingPolicy(self, request, context):
def UpdateAutoscalingPolicy(self, request, context):
"""Updates (replaces) autoscaling policy.
- Disabled check for update_mask, because all updates will be full
- replacements.
- """
+ Disabled check for update_mask, because all updates will be full
+ replacements.
+ """
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def GetAutoscalingPolicy(self, request, context):
"""Retrieves autoscaling policy.
- """
+ """
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def ListAutoscalingPolicies(self, request, context):
"""Lists autoscaling policies in the project.
- """
+ """
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def DeleteAutoscalingPolicy(self, request, context):
"""Deletes an autoscaling policy. It is an error to delete an autoscaling
- policy that is in use by one or more clusters.
- """
+ policy that is in use by one or more clusters.
+ """
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
@@ -122,3 +123,145 @@ def add_AutoscalingPolicyServiceServicer_to_server(servicer, server):
"google.cloud.dataproc.v1.AutoscalingPolicyService", rpc_method_handlers
)
server.add_generic_rpc_handlers((generic_handler,))
+
+
+# This class is part of an EXPERIMENTAL API.
+class AutoscalingPolicyService(object):
+ """The API interface for managing autoscaling policies in the
+ Dataproc API.
+ """
+
+ @staticmethod
+ def CreateAutoscalingPolicy(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.cloud.dataproc.v1.AutoscalingPolicyService/CreateAutoscalingPolicy",
+ google_dot_cloud_dot_dataproc__v1_dot_proto_dot_autoscaling__policies__pb2.CreateAutoscalingPolicyRequest.SerializeToString,
+ google_dot_cloud_dot_dataproc__v1_dot_proto_dot_autoscaling__policies__pb2.AutoscalingPolicy.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def UpdateAutoscalingPolicy(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.cloud.dataproc.v1.AutoscalingPolicyService/UpdateAutoscalingPolicy",
+ google_dot_cloud_dot_dataproc__v1_dot_proto_dot_autoscaling__policies__pb2.UpdateAutoscalingPolicyRequest.SerializeToString,
+ google_dot_cloud_dot_dataproc__v1_dot_proto_dot_autoscaling__policies__pb2.AutoscalingPolicy.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def GetAutoscalingPolicy(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.cloud.dataproc.v1.AutoscalingPolicyService/GetAutoscalingPolicy",
+ google_dot_cloud_dot_dataproc__v1_dot_proto_dot_autoscaling__policies__pb2.GetAutoscalingPolicyRequest.SerializeToString,
+ google_dot_cloud_dot_dataproc__v1_dot_proto_dot_autoscaling__policies__pb2.AutoscalingPolicy.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def ListAutoscalingPolicies(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.cloud.dataproc.v1.AutoscalingPolicyService/ListAutoscalingPolicies",
+ google_dot_cloud_dot_dataproc__v1_dot_proto_dot_autoscaling__policies__pb2.ListAutoscalingPoliciesRequest.SerializeToString,
+ google_dot_cloud_dot_dataproc__v1_dot_proto_dot_autoscaling__policies__pb2.ListAutoscalingPoliciesResponse.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def DeleteAutoscalingPolicy(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.cloud.dataproc.v1.AutoscalingPolicyService/DeleteAutoscalingPolicy",
+ google_dot_cloud_dot_dataproc__v1_dot_proto_dot_autoscaling__policies__pb2.DeleteAutoscalingPolicyRequest.SerializeToString,
+ google_dot_protobuf_dot_empty__pb2.Empty.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
diff --git a/google/cloud/dataproc_v1/proto/clusters.proto b/google/cloud/dataproc_v1/proto/clusters.proto
index fbaf9391..c66d35d3 100644
--- a/google/cloud/dataproc_v1/proto/clusters.proto
+++ b/google/cloud/dataproc_v1/proto/clusters.proto
@@ -111,8 +111,8 @@ service ClusterController {
};
option (google.api.method_signature) = "project_id,region,cluster_name";
option (google.longrunning.operation_info) = {
- response_type: "google.protobuf.Empty"
- metadata_type: "DiagnoseClusterResults"
+ response_type: "DiagnoseClusterResults"
+ metadata_type: "google.cloud.dataproc.v1.ClusterOperationMetadata"
};
}
}
diff --git a/google/cloud/dataproc_v1/proto/clusters_pb2.py b/google/cloud/dataproc_v1/proto/clusters_pb2.py
index cdfe0bc3..0b950767 100644
--- a/google/cloud/dataproc_v1/proto/clusters_pb2.py
+++ b/google/cloud/dataproc_v1/proto/clusters_pb2.py
@@ -32,7 +32,7 @@
syntax="proto3",
serialized_options=b"\n\034com.google.cloud.dataproc.v1B\rClustersProtoP\001Z@google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataproc",
create_key=_descriptor._internal_create_key,
- serialized_pb=b'\n-google/cloud/dataproc_v1/proto/clusters.proto\x12\x18google.cloud.dataproc.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a+google/cloud/dataproc_v1/proto/shared.proto\x1a#google/longrunning/operations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xcd\x03\n\x07\x43luster\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12<\n\x06\x63onfig\x18\x03 \x01(\x0b\x32\'.google.cloud.dataproc.v1.ClusterConfigB\x03\xe0\x41\x02\x12\x42\n\x06labels\x18\x08 \x03(\x0b\x32-.google.cloud.dataproc.v1.Cluster.LabelsEntryB\x03\xe0\x41\x01\x12<\n\x06status\x18\x04 \x01(\x0b\x32\'.google.cloud.dataproc.v1.ClusterStatusB\x03\xe0\x41\x03\x12\x44\n\x0estatus_history\x18\x07 \x03(\x0b\x32\'.google.cloud.dataproc.v1.ClusterStatusB\x03\xe0\x41\x03\x12\x19\n\x0c\x63luster_uuid\x18\x06 \x01(\tB\x03\xe0\x41\x03\x12>\n\x07metrics\x18\t \x01(\x0b\x32(.google.cloud.dataproc.v1.ClusterMetricsB\x03\xe0\x41\x03\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xb0\x06\n\rClusterConfig\x12\x1a\n\rconfig_bucket\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12K\n\x12gce_cluster_config\x18\x08 \x01(\x0b\x32*.google.cloud.dataproc.v1.GceClusterConfigB\x03\xe0\x41\x01\x12I\n\rmaster_config\x18\t \x01(\x0b\x32-.google.cloud.dataproc.v1.InstanceGroupConfigB\x03\xe0\x41\x01\x12I\n\rworker_config\x18\n \x01(\x0b\x32-.google.cloud.dataproc.v1.InstanceGroupConfigB\x03\xe0\x41\x01\x12S\n\x17secondary_worker_config\x18\x0c \x01(\x0b\x32-.google.cloud.dataproc.v1.InstanceGroupConfigB\x03\xe0\x41\x01\x12\x46\n\x0fsoftware_config\x18\r \x01(\x0b\x32(.google.cloud.dataproc.v1.SoftwareConfigB\x03\xe0\x41\x01\x12W\n\x16initialization_actions\x18\x0b \x03(\x0b\x32\x32.google.cloud.dataproc.v1.NodeInitializationActionB\x03\xe0\x41\x01\x12J\n\x11\x65ncryption_config\x18\x0f \x01(\x0b\x32*.google.cloud.dataproc.v1.EncryptionConfigB\x03\xe0\x41\x01\x12L\n\x12\x61utoscaling_config\x18\x12 \x01(\x0b\x32+.google.cloud.dataproc.v1.AutoscalingConfigB\x03\xe0\x41\x01\x12\x46\n\x0fsecurity_config\x18\x10 \x01(\x0b\x32(.google.cloud.dataproc.v1.SecurityConfigB\x03\xe0\x41\x01\x12H\n\x10lifecycle_config\x18\x11 \x01(\x0b\x32).google.cloud.dataproc.v1.LifecycleConfigB\x03\xe0\x41\x01",\n\x11\x41utoscalingConfig\x12\x17\n\npolicy_uri\x18\x01 \x01(\tB\x03\xe0\x41\x01"4\n\x10\x45ncryptionConfig\x12 \n\x13gce_pd_kms_key_name\x18\x01 \x01(\tB\x03\xe0\x41\x01"\x9f\x03\n\x10GceClusterConfig\x12\x15\n\x08zone_uri\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12\x18\n\x0bnetwork_uri\x18\x02 \x01(\tB\x03\xe0\x41\x01\x12\x1b\n\x0esubnetwork_uri\x18\x06 \x01(\tB\x03\xe0\x41\x01\x12\x1d\n\x10internal_ip_only\x18\x07 \x01(\x08\x42\x03\xe0\x41\x01\x12\x1c\n\x0fservice_account\x18\x08 \x01(\tB\x03\xe0\x41\x01\x12#\n\x16service_account_scopes\x18\x03 \x03(\tB\x03\xe0\x41\x01\x12\x0c\n\x04tags\x18\x04 \x03(\t\x12J\n\x08metadata\x18\x05 \x03(\x0b\x32\x38.google.cloud.dataproc.v1.GceClusterConfig.MetadataEntry\x12P\n\x14reservation_affinity\x18\x0b \x01(\x0b\x32-.google.cloud.dataproc.v1.ReservationAffinityB\x03\xe0\x41\x01\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x9a\x03\n\x13InstanceGroupConfig\x12\x1a\n\rnum_instances\x18\x01 \x01(\x05\x42\x03\xe0\x41\x01\x12\x1b\n\x0einstance_names\x18\x02 \x03(\tB\x03\xe0\x41\x03\x12\x16\n\timage_uri\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12\x1d\n\x10machine_type_uri\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12>\n\x0b\x64isk_config\x18\x05 \x01(\x0b\x32$.google.cloud.dataproc.v1.DiskConfigB\x03\xe0\x41\x01\x12\x1b\n\x0eis_preemptible\x18\x06 \x01(\x08\x42\x03\xe0\x41\x03\x12O\n\x14managed_group_config\x18\x07 \x01(\x0b\x32,.google.cloud.dataproc.v1.ManagedGroupConfigB\x03\xe0\x41\x03\x12\x46\n\x0c\x61\x63\x63\x65lerators\x18\x08 \x03(\x0b\x32+.google.cloud.dataproc.v1.AcceleratorConfigB\x03\xe0\x41\x01\x12\x1d\n\x10min_cpu_platform\x18\t \x01(\tB\x03\xe0\x41\x01"c\n\x12ManagedGroupConfig\x12#\n\x16instance_template_name\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12(\n\x1binstance_group_manager_name\x18\x02 \x01(\tB\x03\xe0\x41\x03"L\n\x11\x41\x63\x63\x65leratorConfig\x12\x1c\n\x14\x61\x63\x63\x65lerator_type_uri\x18\x01 \x01(\t\x12\x19\n\x11\x61\x63\x63\x65lerator_count\x18\x02 \x01(\x05"f\n\nDiskConfig\x12\x1b\n\x0e\x62oot_disk_type\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12\x1e\n\x11\x62oot_disk_size_gb\x18\x01 \x01(\x05\x42\x03\xe0\x41\x01\x12\x1b\n\x0enum_local_ssds\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01"s\n\x18NodeInitializationAction\x12\x1c\n\x0f\x65xecutable_file\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x39\n\x11\x65xecution_timeout\x18\x02 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x01"\x84\x03\n\rClusterStatus\x12\x41\n\x05state\x18\x01 \x01(\x0e\x32-.google.cloud.dataproc.v1.ClusterStatus.StateB\x03\xe0\x41\x03\x12\x16\n\x06\x64\x65tail\x18\x02 \x01(\tB\x06\xe0\x41\x03\xe0\x41\x01\x12\x39\n\x10state_start_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12G\n\x08substate\x18\x04 \x01(\x0e\x32\x30.google.cloud.dataproc.v1.ClusterStatus.SubstateB\x03\xe0\x41\x03"V\n\x05State\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0c\n\x08\x43REATING\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\x12\t\n\x05\x45RROR\x10\x03\x12\x0c\n\x08\x44\x45LETING\x10\x04\x12\x0c\n\x08UPDATING\x10\x05"<\n\x08Substate\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\r\n\tUNHEALTHY\x10\x01\x12\x10\n\x0cSTALE_STATUS\x10\x02"S\n\x0eSecurityConfig\x12\x41\n\x0fkerberos_config\x18\x01 \x01(\x0b\x32(.google.cloud.dataproc.v1.KerberosConfig"\x90\x04\n\x0eKerberosConfig\x12\x1c\n\x0f\x65nable_kerberos\x18\x01 \x01(\x08\x42\x03\xe0\x41\x01\x12(\n\x1broot_principal_password_uri\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x18\n\x0bkms_key_uri\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0ckeystore_uri\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12\x1b\n\x0etruststore_uri\x18\x05 \x01(\tB\x03\xe0\x41\x01\x12"\n\x15keystore_password_uri\x18\x06 \x01(\tB\x03\xe0\x41\x01\x12\x1d\n\x10key_password_uri\x18\x07 \x01(\tB\x03\xe0\x41\x01\x12$\n\x17truststore_password_uri\x18\x08 \x01(\tB\x03\xe0\x41\x01\x12$\n\x17\x63ross_realm_trust_realm\x18\t \x01(\tB\x03\xe0\x41\x01\x12"\n\x15\x63ross_realm_trust_kdc\x18\n \x01(\tB\x03\xe0\x41\x01\x12+\n\x1e\x63ross_realm_trust_admin_server\x18\x0b \x01(\tB\x03\xe0\x41\x01\x12\x32\n%cross_realm_trust_shared_password_uri\x18\x0c \x01(\tB\x03\xe0\x41\x01\x12\x1b\n\x0ekdc_db_key_uri\x18\r \x01(\tB\x03\xe0\x41\x01\x12\x1f\n\x12tgt_lifetime_hours\x18\x0e \x01(\x05\x42\x03\xe0\x41\x01\x12\x12\n\x05realm\x18\x0f \x01(\tB\x03\xe0\x41\x01"\xf9\x01\n\x0eSoftwareConfig\x12\x1a\n\rimage_version\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12Q\n\nproperties\x18\x02 \x03(\x0b\x32\x38.google.cloud.dataproc.v1.SoftwareConfig.PropertiesEntryB\x03\xe0\x41\x01\x12\x45\n\x13optional_components\x18\x03 \x03(\x0e\x32#.google.cloud.dataproc.v1.ComponentB\x03\xe0\x41\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x83\x02\n\x0fLifecycleConfig\x12\x37\n\x0fidle_delete_ttl\x18\x01 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x01\x12;\n\x10\x61uto_delete_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x01H\x00\x12\x39\n\x0f\x61uto_delete_ttl\x18\x03 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x01H\x00\x12\x38\n\x0fidle_start_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x42\x05\n\x03ttl"\x9a\x02\n\x0e\x43lusterMetrics\x12O\n\x0chdfs_metrics\x18\x01 \x03(\x0b\x32\x39.google.cloud.dataproc.v1.ClusterMetrics.HdfsMetricsEntry\x12O\n\x0cyarn_metrics\x18\x02 \x03(\x0b\x32\x39.google.cloud.dataproc.v1.ClusterMetrics.YarnMetricsEntry\x1a\x32\n\x10HdfsMetricsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x03:\x02\x38\x01\x1a\x32\n\x10YarnMetricsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x03:\x02\x38\x01"\x96\x01\n\x14\x43reateClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x37\n\x07\x63luster\x18\x02 \x01(\x0b\x32!.google.cloud.dataproc.v1.ClusterB\x03\xe0\x41\x02\x12\x17\n\nrequest_id\x18\x04 \x01(\tB\x03\xe0\x41\x01"\xae\x02\n\x14UpdateClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x05 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x37\n\x07\x63luster\x18\x03 \x01(\x0b\x32!.google.cloud.dataproc.v1.ClusterB\x03\xe0\x41\x02\x12\x45\n\x1dgraceful_decommission_timeout\x18\x06 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x01\x12\x34\n\x0bupdate_mask\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02\x12\x17\n\nrequest_id\x18\x07 \x01(\tB\x03\xe0\x41\x01"\x93\x01\n\x14\x44\x65leteClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_uuid\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12\x17\n\nrequest_id\x18\x05 \x01(\tB\x03\xe0\x41\x01"\\\n\x11GetClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02"\x89\x01\n\x13ListClustersRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x04 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06\x66ilter\x18\x05 \x01(\tB\x03\xe0\x41\x01\x12\x16\n\tpage_size\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x12\x17\n\npage_token\x18\x03 \x01(\tB\x03\xe0\x41\x01"n\n\x14ListClustersResponse\x12\x38\n\x08\x63lusters\x18\x01 \x03(\x0b\x32!.google.cloud.dataproc.v1.ClusterB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03"a\n\x16\x44iagnoseClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02"1\n\x16\x44iagnoseClusterResults\x12\x17\n\noutput_uri\x18\x01 \x01(\tB\x03\xe0\x41\x03"\xf8\x01\n\x13ReservationAffinity\x12Y\n\x18\x63onsume_reservation_type\x18\x01 \x01(\x0e\x32\x32.google.cloud.dataproc.v1.ReservationAffinity.TypeB\x03\xe0\x41\x01\x12\x10\n\x03key\x18\x02 \x01(\tB\x03\xe0\x41\x01\x12\x13\n\x06values\x18\x03 \x03(\tB\x03\xe0\x41\x01"_\n\x04Type\x12\x14\n\x10TYPE_UNSPECIFIED\x10\x00\x12\x12\n\x0eNO_RESERVATION\x10\x01\x12\x13\n\x0f\x41NY_RESERVATION\x10\x02\x12\x18\n\x14SPECIFIC_RESERVATION\x10\x03\x32\xe3\x0c\n\x11\x43lusterController\x12\x80\x02\n\rCreateCluster\x12..google.cloud.dataproc.v1.CreateClusterRequest\x1a\x1d.google.longrunning.Operation"\x9f\x01\x82\xd3\xe4\x93\x02>"3/v1/projects/{project_id}/regions/{region}/clusters:\x07\x63luster\xda\x41\x19project_id,region,cluster\xca\x41<\n\x07\x43luster\x12\x31google.cloud.dataproc.v1.ClusterOperationMetadata\x12\xa8\x02\n\rUpdateCluster\x12..google.cloud.dataproc.v1.UpdateClusterRequest\x1a\x1d.google.longrunning.Operation"\xc7\x01\x82\xd3\xe4\x93\x02M2B/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}:\x07\x63luster\xda\x41\x32project_id,region,cluster_name,cluster,update_mask\xca\x41<\n\x07\x43luster\x12\x31google.cloud.dataproc.v1.ClusterOperationMetadata\x12\x99\x02\n\rDeleteCluster\x12..google.cloud.dataproc.v1.DeleteClusterRequest\x1a\x1d.google.longrunning.Operation"\xb8\x01\x82\xd3\xe4\x93\x02\x44*B/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}\xda\x41\x1eproject_id,region,cluster_name\xca\x41J\n\x15google.protobuf.Empty\x12\x31google.cloud.dataproc.v1.ClusterOperationMetadata\x12\xc9\x01\n\nGetCluster\x12+.google.cloud.dataproc.v1.GetClusterRequest\x1a!.google.cloud.dataproc.v1.Cluster"k\x82\xd3\xe4\x93\x02\x44\x12\x42/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}\xda\x41\x1eproject_id,region,cluster_name\x12\xd9\x01\n\x0cListClusters\x12-.google.cloud.dataproc.v1.ListClustersRequest\x1a..google.cloud.dataproc.v1.ListClustersResponse"j\x82\xd3\xe4\x93\x02\x35\x12\x33/v1/projects/{project_id}/regions/{region}/clusters\xda\x41\x11project_id,region\xda\x41\x18project_id,region,filter\x12\x8e\x02\n\x0f\x44iagnoseCluster\x12\x30.google.cloud.dataproc.v1.DiagnoseClusterRequest\x1a\x1d.google.longrunning.Operation"\xa9\x01\x82\xd3\xe4\x93\x02P"K/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}:diagnose:\x01*\xda\x41\x1eproject_id,region,cluster_name\xca\x41/\n\x15google.protobuf.Empty\x12\x16\x44iagnoseClusterResults\x1aK\xca\x41\x17\x64\x61taproc.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformBq\n\x1c\x63om.google.cloud.dataproc.v1B\rClustersProtoP\x01Z@google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataprocb\x06proto3',
+ serialized_pb=b'\n-google/cloud/dataproc_v1/proto/clusters.proto\x12\x18google.cloud.dataproc.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a+google/cloud/dataproc_v1/proto/shared.proto\x1a#google/longrunning/operations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xcd\x03\n\x07\x43luster\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12<\n\x06\x63onfig\x18\x03 \x01(\x0b\x32\'.google.cloud.dataproc.v1.ClusterConfigB\x03\xe0\x41\x02\x12\x42\n\x06labels\x18\x08 \x03(\x0b\x32-.google.cloud.dataproc.v1.Cluster.LabelsEntryB\x03\xe0\x41\x01\x12<\n\x06status\x18\x04 \x01(\x0b\x32\'.google.cloud.dataproc.v1.ClusterStatusB\x03\xe0\x41\x03\x12\x44\n\x0estatus_history\x18\x07 \x03(\x0b\x32\'.google.cloud.dataproc.v1.ClusterStatusB\x03\xe0\x41\x03\x12\x19\n\x0c\x63luster_uuid\x18\x06 \x01(\tB\x03\xe0\x41\x03\x12>\n\x07metrics\x18\t \x01(\x0b\x32(.google.cloud.dataproc.v1.ClusterMetricsB\x03\xe0\x41\x03\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xb0\x06\n\rClusterConfig\x12\x1a\n\rconfig_bucket\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12K\n\x12gce_cluster_config\x18\x08 \x01(\x0b\x32*.google.cloud.dataproc.v1.GceClusterConfigB\x03\xe0\x41\x01\x12I\n\rmaster_config\x18\t \x01(\x0b\x32-.google.cloud.dataproc.v1.InstanceGroupConfigB\x03\xe0\x41\x01\x12I\n\rworker_config\x18\n \x01(\x0b\x32-.google.cloud.dataproc.v1.InstanceGroupConfigB\x03\xe0\x41\x01\x12S\n\x17secondary_worker_config\x18\x0c \x01(\x0b\x32-.google.cloud.dataproc.v1.InstanceGroupConfigB\x03\xe0\x41\x01\x12\x46\n\x0fsoftware_config\x18\r \x01(\x0b\x32(.google.cloud.dataproc.v1.SoftwareConfigB\x03\xe0\x41\x01\x12W\n\x16initialization_actions\x18\x0b \x03(\x0b\x32\x32.google.cloud.dataproc.v1.NodeInitializationActionB\x03\xe0\x41\x01\x12J\n\x11\x65ncryption_config\x18\x0f \x01(\x0b\x32*.google.cloud.dataproc.v1.EncryptionConfigB\x03\xe0\x41\x01\x12L\n\x12\x61utoscaling_config\x18\x12 \x01(\x0b\x32+.google.cloud.dataproc.v1.AutoscalingConfigB\x03\xe0\x41\x01\x12\x46\n\x0fsecurity_config\x18\x10 \x01(\x0b\x32(.google.cloud.dataproc.v1.SecurityConfigB\x03\xe0\x41\x01\x12H\n\x10lifecycle_config\x18\x11 \x01(\x0b\x32).google.cloud.dataproc.v1.LifecycleConfigB\x03\xe0\x41\x01",\n\x11\x41utoscalingConfig\x12\x17\n\npolicy_uri\x18\x01 \x01(\tB\x03\xe0\x41\x01"4\n\x10\x45ncryptionConfig\x12 \n\x13gce_pd_kms_key_name\x18\x01 \x01(\tB\x03\xe0\x41\x01"\x9f\x03\n\x10GceClusterConfig\x12\x15\n\x08zone_uri\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12\x18\n\x0bnetwork_uri\x18\x02 \x01(\tB\x03\xe0\x41\x01\x12\x1b\n\x0esubnetwork_uri\x18\x06 \x01(\tB\x03\xe0\x41\x01\x12\x1d\n\x10internal_ip_only\x18\x07 \x01(\x08\x42\x03\xe0\x41\x01\x12\x1c\n\x0fservice_account\x18\x08 \x01(\tB\x03\xe0\x41\x01\x12#\n\x16service_account_scopes\x18\x03 \x03(\tB\x03\xe0\x41\x01\x12\x0c\n\x04tags\x18\x04 \x03(\t\x12J\n\x08metadata\x18\x05 \x03(\x0b\x32\x38.google.cloud.dataproc.v1.GceClusterConfig.MetadataEntry\x12P\n\x14reservation_affinity\x18\x0b \x01(\x0b\x32-.google.cloud.dataproc.v1.ReservationAffinityB\x03\xe0\x41\x01\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x9a\x03\n\x13InstanceGroupConfig\x12\x1a\n\rnum_instances\x18\x01 \x01(\x05\x42\x03\xe0\x41\x01\x12\x1b\n\x0einstance_names\x18\x02 \x03(\tB\x03\xe0\x41\x03\x12\x16\n\timage_uri\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12\x1d\n\x10machine_type_uri\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12>\n\x0b\x64isk_config\x18\x05 \x01(\x0b\x32$.google.cloud.dataproc.v1.DiskConfigB\x03\xe0\x41\x01\x12\x1b\n\x0eis_preemptible\x18\x06 \x01(\x08\x42\x03\xe0\x41\x03\x12O\n\x14managed_group_config\x18\x07 \x01(\x0b\x32,.google.cloud.dataproc.v1.ManagedGroupConfigB\x03\xe0\x41\x03\x12\x46\n\x0c\x61\x63\x63\x65lerators\x18\x08 \x03(\x0b\x32+.google.cloud.dataproc.v1.AcceleratorConfigB\x03\xe0\x41\x01\x12\x1d\n\x10min_cpu_platform\x18\t \x01(\tB\x03\xe0\x41\x01"c\n\x12ManagedGroupConfig\x12#\n\x16instance_template_name\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12(\n\x1binstance_group_manager_name\x18\x02 \x01(\tB\x03\xe0\x41\x03"L\n\x11\x41\x63\x63\x65leratorConfig\x12\x1c\n\x14\x61\x63\x63\x65lerator_type_uri\x18\x01 \x01(\t\x12\x19\n\x11\x61\x63\x63\x65lerator_count\x18\x02 \x01(\x05"f\n\nDiskConfig\x12\x1b\n\x0e\x62oot_disk_type\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12\x1e\n\x11\x62oot_disk_size_gb\x18\x01 \x01(\x05\x42\x03\xe0\x41\x01\x12\x1b\n\x0enum_local_ssds\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01"s\n\x18NodeInitializationAction\x12\x1c\n\x0f\x65xecutable_file\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x39\n\x11\x65xecution_timeout\x18\x02 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x01"\x84\x03\n\rClusterStatus\x12\x41\n\x05state\x18\x01 \x01(\x0e\x32-.google.cloud.dataproc.v1.ClusterStatus.StateB\x03\xe0\x41\x03\x12\x16\n\x06\x64\x65tail\x18\x02 \x01(\tB\x06\xe0\x41\x03\xe0\x41\x01\x12\x39\n\x10state_start_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12G\n\x08substate\x18\x04 \x01(\x0e\x32\x30.google.cloud.dataproc.v1.ClusterStatus.SubstateB\x03\xe0\x41\x03"V\n\x05State\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0c\n\x08\x43REATING\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\x12\t\n\x05\x45RROR\x10\x03\x12\x0c\n\x08\x44\x45LETING\x10\x04\x12\x0c\n\x08UPDATING\x10\x05"<\n\x08Substate\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\r\n\tUNHEALTHY\x10\x01\x12\x10\n\x0cSTALE_STATUS\x10\x02"S\n\x0eSecurityConfig\x12\x41\n\x0fkerberos_config\x18\x01 \x01(\x0b\x32(.google.cloud.dataproc.v1.KerberosConfig"\x90\x04\n\x0eKerberosConfig\x12\x1c\n\x0f\x65nable_kerberos\x18\x01 \x01(\x08\x42\x03\xe0\x41\x01\x12(\n\x1broot_principal_password_uri\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x18\n\x0bkms_key_uri\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0ckeystore_uri\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12\x1b\n\x0etruststore_uri\x18\x05 \x01(\tB\x03\xe0\x41\x01\x12"\n\x15keystore_password_uri\x18\x06 \x01(\tB\x03\xe0\x41\x01\x12\x1d\n\x10key_password_uri\x18\x07 \x01(\tB\x03\xe0\x41\x01\x12$\n\x17truststore_password_uri\x18\x08 \x01(\tB\x03\xe0\x41\x01\x12$\n\x17\x63ross_realm_trust_realm\x18\t \x01(\tB\x03\xe0\x41\x01\x12"\n\x15\x63ross_realm_trust_kdc\x18\n \x01(\tB\x03\xe0\x41\x01\x12+\n\x1e\x63ross_realm_trust_admin_server\x18\x0b \x01(\tB\x03\xe0\x41\x01\x12\x32\n%cross_realm_trust_shared_password_uri\x18\x0c \x01(\tB\x03\xe0\x41\x01\x12\x1b\n\x0ekdc_db_key_uri\x18\r \x01(\tB\x03\xe0\x41\x01\x12\x1f\n\x12tgt_lifetime_hours\x18\x0e \x01(\x05\x42\x03\xe0\x41\x01\x12\x12\n\x05realm\x18\x0f \x01(\tB\x03\xe0\x41\x01"\xf9\x01\n\x0eSoftwareConfig\x12\x1a\n\rimage_version\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12Q\n\nproperties\x18\x02 \x03(\x0b\x32\x38.google.cloud.dataproc.v1.SoftwareConfig.PropertiesEntryB\x03\xe0\x41\x01\x12\x45\n\x13optional_components\x18\x03 \x03(\x0e\x32#.google.cloud.dataproc.v1.ComponentB\x03\xe0\x41\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x83\x02\n\x0fLifecycleConfig\x12\x37\n\x0fidle_delete_ttl\x18\x01 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x01\x12;\n\x10\x61uto_delete_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x01H\x00\x12\x39\n\x0f\x61uto_delete_ttl\x18\x03 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x01H\x00\x12\x38\n\x0fidle_start_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x42\x05\n\x03ttl"\x9a\x02\n\x0e\x43lusterMetrics\x12O\n\x0chdfs_metrics\x18\x01 \x03(\x0b\x32\x39.google.cloud.dataproc.v1.ClusterMetrics.HdfsMetricsEntry\x12O\n\x0cyarn_metrics\x18\x02 \x03(\x0b\x32\x39.google.cloud.dataproc.v1.ClusterMetrics.YarnMetricsEntry\x1a\x32\n\x10HdfsMetricsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x03:\x02\x38\x01\x1a\x32\n\x10YarnMetricsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x03:\x02\x38\x01"\x96\x01\n\x14\x43reateClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x37\n\x07\x63luster\x18\x02 \x01(\x0b\x32!.google.cloud.dataproc.v1.ClusterB\x03\xe0\x41\x02\x12\x17\n\nrequest_id\x18\x04 \x01(\tB\x03\xe0\x41\x01"\xae\x02\n\x14UpdateClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x05 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x37\n\x07\x63luster\x18\x03 \x01(\x0b\x32!.google.cloud.dataproc.v1.ClusterB\x03\xe0\x41\x02\x12\x45\n\x1dgraceful_decommission_timeout\x18\x06 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x01\x12\x34\n\x0bupdate_mask\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02\x12\x17\n\nrequest_id\x18\x07 \x01(\tB\x03\xe0\x41\x01"\x93\x01\n\x14\x44\x65leteClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_uuid\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12\x17\n\nrequest_id\x18\x05 \x01(\tB\x03\xe0\x41\x01"\\\n\x11GetClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02"\x89\x01\n\x13ListClustersRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x04 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06\x66ilter\x18\x05 \x01(\tB\x03\xe0\x41\x01\x12\x16\n\tpage_size\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x12\x17\n\npage_token\x18\x03 \x01(\tB\x03\xe0\x41\x01"n\n\x14ListClustersResponse\x12\x38\n\x08\x63lusters\x18\x01 \x03(\x0b\x32!.google.cloud.dataproc.v1.ClusterB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03"a\n\x16\x44iagnoseClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02"1\n\x16\x44iagnoseClusterResults\x12\x17\n\noutput_uri\x18\x01 \x01(\tB\x03\xe0\x41\x03"\xf8\x01\n\x13ReservationAffinity\x12Y\n\x18\x63onsume_reservation_type\x18\x01 \x01(\x0e\x32\x32.google.cloud.dataproc.v1.ReservationAffinity.TypeB\x03\xe0\x41\x01\x12\x10\n\x03key\x18\x02 \x01(\tB\x03\xe0\x41\x01\x12\x13\n\x06values\x18\x03 \x03(\tB\x03\xe0\x41\x01"_\n\x04Type\x12\x14\n\x10TYPE_UNSPECIFIED\x10\x00\x12\x12\n\x0eNO_RESERVATION\x10\x01\x12\x13\n\x0f\x41NY_RESERVATION\x10\x02\x12\x18\n\x14SPECIFIC_RESERVATION\x10\x03\x32\xff\x0c\n\x11\x43lusterController\x12\x80\x02\n\rCreateCluster\x12..google.cloud.dataproc.v1.CreateClusterRequest\x1a\x1d.google.longrunning.Operation"\x9f\x01\x82\xd3\xe4\x93\x02>"3/v1/projects/{project_id}/regions/{region}/clusters:\x07\x63luster\xda\x41\x19project_id,region,cluster\xca\x41<\n\x07\x43luster\x12\x31google.cloud.dataproc.v1.ClusterOperationMetadata\x12\xa8\x02\n\rUpdateCluster\x12..google.cloud.dataproc.v1.UpdateClusterRequest\x1a\x1d.google.longrunning.Operation"\xc7\x01\x82\xd3\xe4\x93\x02M2B/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}:\x07\x63luster\xda\x41\x32project_id,region,cluster_name,cluster,update_mask\xca\x41<\n\x07\x43luster\x12\x31google.cloud.dataproc.v1.ClusterOperationMetadata\x12\x99\x02\n\rDeleteCluster\x12..google.cloud.dataproc.v1.DeleteClusterRequest\x1a\x1d.google.longrunning.Operation"\xb8\x01\x82\xd3\xe4\x93\x02\x44*B/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}\xda\x41\x1eproject_id,region,cluster_name\xca\x41J\n\x15google.protobuf.Empty\x12\x31google.cloud.dataproc.v1.ClusterOperationMetadata\x12\xc9\x01\n\nGetCluster\x12+.google.cloud.dataproc.v1.GetClusterRequest\x1a!.google.cloud.dataproc.v1.Cluster"k\x82\xd3\xe4\x93\x02\x44\x12\x42/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}\xda\x41\x1eproject_id,region,cluster_name\x12\xd9\x01\n\x0cListClusters\x12-.google.cloud.dataproc.v1.ListClustersRequest\x1a..google.cloud.dataproc.v1.ListClustersResponse"j\x82\xd3\xe4\x93\x02\x35\x12\x33/v1/projects/{project_id}/regions/{region}/clusters\xda\x41\x11project_id,region\xda\x41\x18project_id,region,filter\x12\xaa\x02\n\x0f\x44iagnoseCluster\x12\x30.google.cloud.dataproc.v1.DiagnoseClusterRequest\x1a\x1d.google.longrunning.Operation"\xc5\x01\x82\xd3\xe4\x93\x02P"K/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}:diagnose:\x01*\xda\x41\x1eproject_id,region,cluster_name\xca\x41K\n\x16\x44iagnoseClusterResults\x12\x31google.cloud.dataproc.v1.ClusterOperationMetadata\x1aK\xca\x41\x17\x64\x61taproc.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformBq\n\x1c\x63om.google.cloud.dataproc.v1B\rClustersProtoP\x01Z@google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataprocb\x06proto3',
dependencies=[
google_dot_api_dot_annotations__pb2.DESCRIPTOR,
google_dot_api_dot_client__pb2.DESCRIPTOR,
@@ -417,7 +417,7 @@
),
],
extensions=[],
- nested_types=[_CLUSTER_LABELSENTRY],
+ nested_types=[_CLUSTER_LABELSENTRY,],
enum_types=[],
serialized_options=None,
is_extendable=False,
@@ -686,7 +686,7 @@
serialized_options=b"\340A\001",
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
- )
+ ),
],
extensions=[],
nested_types=[],
@@ -727,7 +727,7 @@
serialized_options=b"\340A\001",
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
- )
+ ),
],
extensions=[],
nested_types=[],
@@ -982,7 +982,7 @@
),
],
extensions=[],
- nested_types=[_GCECLUSTERCONFIG_METADATAENTRY],
+ nested_types=[_GCECLUSTERCONFIG_METADATAENTRY,],
enum_types=[],
serialized_options=None,
is_extendable=False,
@@ -1533,7 +1533,7 @@
],
extensions=[],
nested_types=[],
- enum_types=[_CLUSTERSTATUS_STATE, _CLUSTERSTATUS_SUBSTATE],
+ enum_types=[_CLUSTERSTATUS_STATE, _CLUSTERSTATUS_SUBSTATE,],
serialized_options=None,
is_extendable=False,
syntax="proto3",
@@ -1570,7 +1570,7 @@
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
- )
+ ),
],
extensions=[],
nested_types=[],
@@ -2018,7 +2018,7 @@
),
],
extensions=[],
- nested_types=[_SOFTWARECONFIG_PROPERTIESENTRY],
+ nested_types=[_SOFTWARECONFIG_PROPERTIESENTRY,],
enum_types=[],
serialized_options=None,
is_extendable=False,
@@ -2130,7 +2130,7 @@
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[],
- )
+ ),
],
serialized_start=4218,
serialized_end=4477,
@@ -2303,7 +2303,7 @@
),
],
extensions=[],
- nested_types=[_CLUSTERMETRICS_HDFSMETRICSENTRY, _CLUSTERMETRICS_YARNMETRICSENTRY],
+ nested_types=[_CLUSTERMETRICS_HDFSMETRICSENTRY, _CLUSTERMETRICS_YARNMETRICSENTRY,],
enum_types=[],
serialized_options=None,
is_extendable=False,
@@ -3046,7 +3046,7 @@
serialized_options=b"\340A\003",
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
- )
+ ),
],
extensions=[],
nested_types=[],
@@ -3129,7 +3129,7 @@
],
extensions=[],
nested_types=[],
- enum_types=[_RESERVATIONAFFINITY_TYPE],
+ enum_types=[_RESERVATIONAFFINITY_TYPE,],
serialized_options=None,
is_extendable=False,
syntax="proto3",
@@ -3278,6 +3278,7 @@
"__module__": "google.cloud.dataproc_v1.proto.clusters_pb2",
"__doc__": """Describes the identifying information, config, and status of a cluster
of Compute Engine instances.
+
Attributes:
project_id:
Required. The Google Cloud Platform project ID that the
@@ -3322,6 +3323,7 @@
"DESCRIPTOR": _CLUSTERCONFIG,
"__module__": "google.cloud.dataproc_v1.proto.clusters_pb2",
"__doc__": """The cluster config.
+
Attributes:
config_bucket:
Optional. A Cloud Storage bucket used to stage job
@@ -3381,6 +3383,7 @@
"DESCRIPTOR": _AUTOSCALINGCONFIG,
"__module__": "google.cloud.dataproc_v1.proto.clusters_pb2",
"__doc__": """Autoscaling Policy config associated with the cluster.
+
Attributes:
policy_uri:
Optional. The autoscaling policy used by the cluster. Only
@@ -3403,6 +3406,7 @@
"DESCRIPTOR": _ENCRYPTIONCONFIG,
"__module__": "google.cloud.dataproc_v1.proto.clusters_pb2",
"__doc__": """Encryption settings for the cluster.
+
Attributes:
gce_pd_kms_key_name:
Optional. The Cloud KMS key name to use for PD disk encryption
@@ -3430,6 +3434,7 @@
"__module__": "google.cloud.dataproc_v1.proto.clusters_pb2",
"__doc__": """Common config settings for resources of Compute Engine cluster
instances, applicable to all instances in the cluster.
+
Attributes:
zone_uri:
Optional. The zone where the Compute Engine cluster will be
@@ -3521,6 +3526,7 @@
"__module__": "google.cloud.dataproc_v1.proto.clusters_pb2",
"__doc__": """The config settings for Compute Engine resources in an instance group,
such as a master or worker group.
+
Attributes:
num_instances:
Optional. The number of VM instances in the instance group.
@@ -3584,6 +3590,7 @@
"DESCRIPTOR": _MANAGEDGROUPCONFIG,
"__module__": "google.cloud.dataproc_v1.proto.clusters_pb2",
"__doc__": """Specifies the resources used to actively manage an instance group.
+
Attributes:
instance_template_name:
Output only. The name of the Instance Template used for the
@@ -3606,6 +3613,7 @@
"__doc__": """Specifies the type and number of accelerator cards attached to the
instances of an instance. See `GPUs on Compute Engine
`__.
+
Attributes:
accelerator_type_uri:
Full URL, partial URI, or short name of the accelerator type
@@ -3638,6 +3646,7 @@
"DESCRIPTOR": _DISKCONFIG,
"__module__": "google.cloud.dataproc_v1.proto.clusters_pb2",
"__doc__": """Specifies the config of disk options for a group of VM instances.
+
Attributes:
boot_disk_type:
Optional. Type of the boot disk (default is “pd-standard”).
@@ -3667,6 +3676,7 @@
"__module__": "google.cloud.dataproc_v1.proto.clusters_pb2",
"__doc__": """Specifies an executable to run on a fully configured node and a
timeout period for executable completion.
+
Attributes:
executable_file:
Required. Cloud Storage URI of executable file.
@@ -3691,6 +3701,7 @@
"DESCRIPTOR": _CLUSTERSTATUS,
"__module__": "google.cloud.dataproc_v1.proto.clusters_pb2",
"__doc__": """The status of a cluster and its instances.
+
Attributes:
state:
Output only. The cluster’s state.
@@ -3717,6 +3728,7 @@
"DESCRIPTOR": _SECURITYCONFIG,
"__module__": "google.cloud.dataproc_v1.proto.clusters_pb2",
"__doc__": """Security related configuration, including Kerberos.
+
Attributes:
kerberos_config:
Kerberos related configuration.
@@ -3733,6 +3745,7 @@
"DESCRIPTOR": _KERBEROSCONFIG,
"__module__": "google.cloud.dataproc_v1.proto.clusters_pb2",
"__doc__": """Specifies Kerberos related configuration.
+
Attributes:
enable_kerberos:
Optional. Flag to indicate whether to Kerberize the cluster
@@ -3814,6 +3827,7 @@
"DESCRIPTOR": _SOFTWARECONFIG,
"__module__": "google.cloud.dataproc_v1.proto.clusters_pb2",
"__doc__": """Specifies the selection and config of software inside the cluster.
+
Attributes:
image_version:
Optional. The version of software inside the cluster. It must
@@ -3853,6 +3867,7 @@
"DESCRIPTOR": _LIFECYCLECONFIG,
"__module__": "google.cloud.dataproc_v1.proto.clusters_pb2",
"__doc__": """Specifies the cluster auto-delete schedule configuration.
+
Attributes:
idle_delete_ttl:
Optional. The duration to keep the cluster alive while idling
@@ -3914,6 +3929,7 @@
"__doc__": """Contains cluster daemon metrics, such as HDFS and YARN stats. **Beta
Feature**: This report is available for testing purposes only. It may
be changed before final release.
+
Attributes:
hdfs_metrics:
The HDFS metrics.
@@ -3934,6 +3950,7 @@
"DESCRIPTOR": _CREATECLUSTERREQUEST,
"__module__": "google.cloud.dataproc_v1.proto.clusters_pb2",
"__doc__": """A request to create a cluster.
+
Attributes:
project_id:
Required. The ID of the Google Cloud Platform project that the
@@ -3967,6 +3984,7 @@
"DESCRIPTOR": _UPDATECLUSTERREQUEST,
"__module__": "google.cloud.dataproc_v1.proto.clusters_pb2",
"__doc__": """A request to update a cluster.
+
Attributes:
project_id:
Required. The ID of the Google Cloud Platform project the
@@ -4047,6 +4065,7 @@
"DESCRIPTOR": _DELETECLUSTERREQUEST,
"__module__": "google.cloud.dataproc_v1.proto.clusters_pb2",
"__doc__": """A request to delete a cluster.
+
Attributes:
project_id:
Required. The ID of the Google Cloud Platform project that the
@@ -4084,6 +4103,7 @@
"DESCRIPTOR": _GETCLUSTERREQUEST,
"__module__": "google.cloud.dataproc_v1.proto.clusters_pb2",
"__doc__": """Request to get the resource representation for a cluster in a project.
+
Attributes:
project_id:
Required. The ID of the Google Cloud Platform project that the
@@ -4105,6 +4125,7 @@
"DESCRIPTOR": _LISTCLUSTERSREQUEST,
"__module__": "google.cloud.dataproc_v1.proto.clusters_pb2",
"__doc__": """A request to list the clusters in a project.
+
Attributes:
project_id:
Required. The ID of the Google Cloud Platform project that the
@@ -4145,6 +4166,7 @@
"DESCRIPTOR": _LISTCLUSTERSRESPONSE,
"__module__": "google.cloud.dataproc_v1.proto.clusters_pb2",
"__doc__": """The list of all clusters in a project.
+
Attributes:
clusters:
Output only. The clusters in the project.
@@ -4166,6 +4188,7 @@
"DESCRIPTOR": _DIAGNOSECLUSTERREQUEST,
"__module__": "google.cloud.dataproc_v1.proto.clusters_pb2",
"__doc__": """A request to collect cluster diagnostic information.
+
Attributes:
project_id:
Required. The ID of the Google Cloud Platform project that the
@@ -4187,6 +4210,7 @@
"DESCRIPTOR": _DIAGNOSECLUSTERRESULTS,
"__module__": "google.cloud.dataproc_v1.proto.clusters_pb2",
"__doc__": """The location of diagnostic output.
+
Attributes:
output_uri:
Output only. The Cloud Storage URI of the diagnostic output.
@@ -4205,6 +4229,7 @@
"DESCRIPTOR": _RESERVATIONAFFINITY,
"__module__": "google.cloud.dataproc_v1.proto.clusters_pb2",
"__doc__": """Reservation Affinity for consuming Zonal reservation.
+
Attributes:
consume_reservation_type:
Optional. Type of reservation to consume
@@ -4339,7 +4364,7 @@
serialized_options=b"\312A\027dataproc.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform",
create_key=_descriptor._internal_create_key,
serialized_start=6120,
- serialized_end=7755,
+ serialized_end=7783,
methods=[
_descriptor.MethodDescriptor(
name="CreateCluster",
@@ -4398,7 +4423,7 @@
containing_service=None,
input_type=_DIAGNOSECLUSTERREQUEST,
output_type=google_dot_longrunning_dot_operations__pb2._OPERATION,
- serialized_options=b'\202\323\344\223\002P"K/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}:diagnose:\001*\332A\036project_id,region,cluster_name\312A/\n\025google.protobuf.Empty\022\026DiagnoseClusterResults',
+ serialized_options=b'\202\323\344\223\002P"K/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}:diagnose:\001*\332A\036project_id,region,cluster_name\312AK\n\026DiagnoseClusterResults\0221google.cloud.dataproc.v1.ClusterOperationMetadata',
create_key=_descriptor._internal_create_key,
),
],
diff --git a/google/cloud/dataproc_v1/proto/clusters_pb2_grpc.py b/google/cloud/dataproc_v1/proto/clusters_pb2_grpc.py
index 391133b4..8eab4906 100644
--- a/google/cloud/dataproc_v1/proto/clusters_pb2_grpc.py
+++ b/google/cloud/dataproc_v1/proto/clusters_pb2_grpc.py
@@ -1,4 +1,5 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from google.cloud.dataproc_v1.proto import (
@@ -11,15 +12,15 @@
class ClusterControllerStub(object):
"""The ClusterControllerService provides methods to manage clusters
- of Compute Engine instances.
- """
+ of Compute Engine instances.
+ """
def __init__(self, channel):
"""Constructor.
- Args:
- channel: A grpc.Channel.
- """
+ Args:
+ channel: A grpc.Channel.
+ """
self.CreateCluster = channel.unary_unary(
"/google.cloud.dataproc.v1.ClusterController/CreateCluster",
request_serializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_clusters__pb2.CreateClusterRequest.SerializeToString,
@@ -54,59 +55,59 @@ def __init__(self, channel):
class ClusterControllerServicer(object):
"""The ClusterControllerService provides methods to manage clusters
- of Compute Engine instances.
- """
+ of Compute Engine instances.
+ """
def CreateCluster(self, request, context):
"""Creates a cluster in a project. The returned
- [Operation.metadata][google.longrunning.Operation.metadata] will be
- [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata).
- """
+ [Operation.metadata][google.longrunning.Operation.metadata] will be
+ [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata).
+ """
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def UpdateCluster(self, request, context):
"""Updates a cluster in a project. The returned
- [Operation.metadata][google.longrunning.Operation.metadata] will be
- [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata).
- """
+ [Operation.metadata][google.longrunning.Operation.metadata] will be
+ [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata).
+ """
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def DeleteCluster(self, request, context):
"""Deletes a cluster in a project. The returned
- [Operation.metadata][google.longrunning.Operation.metadata] will be
- [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata).
- """
+ [Operation.metadata][google.longrunning.Operation.metadata] will be
+ [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata).
+ """
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def GetCluster(self, request, context):
"""Gets the resource representation for a cluster in a project.
- """
+ """
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def ListClusters(self, request, context):
"""Lists all regions/{region}/clusters in a project alphabetically.
- """
+ """
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def DiagnoseCluster(self, request, context):
"""Gets cluster diagnostic information. The returned
- [Operation.metadata][google.longrunning.Operation.metadata] will be
- [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata).
- After the operation completes,
- [Operation.response][google.longrunning.Operation.response]
- contains
- [DiagnoseClusterResults](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#diagnoseclusterresults).
- """
+ [Operation.metadata][google.longrunning.Operation.metadata] will be
+ [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata).
+ After the operation completes,
+ [Operation.response][google.longrunning.Operation.response]
+ contains
+ [DiagnoseClusterResults](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#diagnoseclusterresults).
+ """
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
@@ -149,3 +150,172 @@ def add_ClusterControllerServicer_to_server(servicer, server):
"google.cloud.dataproc.v1.ClusterController", rpc_method_handlers
)
server.add_generic_rpc_handlers((generic_handler,))
+
+
+# This class is part of an EXPERIMENTAL API.
+class ClusterController(object):
+ """The ClusterControllerService provides methods to manage clusters
+ of Compute Engine instances.
+ """
+
+ @staticmethod
+ def CreateCluster(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.cloud.dataproc.v1.ClusterController/CreateCluster",
+ google_dot_cloud_dot_dataproc__v1_dot_proto_dot_clusters__pb2.CreateClusterRequest.SerializeToString,
+ google_dot_longrunning_dot_operations__pb2.Operation.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def UpdateCluster(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.cloud.dataproc.v1.ClusterController/UpdateCluster",
+ google_dot_cloud_dot_dataproc__v1_dot_proto_dot_clusters__pb2.UpdateClusterRequest.SerializeToString,
+ google_dot_longrunning_dot_operations__pb2.Operation.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def DeleteCluster(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.cloud.dataproc.v1.ClusterController/DeleteCluster",
+ google_dot_cloud_dot_dataproc__v1_dot_proto_dot_clusters__pb2.DeleteClusterRequest.SerializeToString,
+ google_dot_longrunning_dot_operations__pb2.Operation.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def GetCluster(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.cloud.dataproc.v1.ClusterController/GetCluster",
+ google_dot_cloud_dot_dataproc__v1_dot_proto_dot_clusters__pb2.GetClusterRequest.SerializeToString,
+ google_dot_cloud_dot_dataproc__v1_dot_proto_dot_clusters__pb2.Cluster.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def ListClusters(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.cloud.dataproc.v1.ClusterController/ListClusters",
+ google_dot_cloud_dot_dataproc__v1_dot_proto_dot_clusters__pb2.ListClustersRequest.SerializeToString,
+ google_dot_cloud_dot_dataproc__v1_dot_proto_dot_clusters__pb2.ListClustersResponse.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def DiagnoseCluster(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.cloud.dataproc.v1.ClusterController/DiagnoseCluster",
+ google_dot_cloud_dot_dataproc__v1_dot_proto_dot_clusters__pb2.DiagnoseClusterRequest.SerializeToString,
+ google_dot_longrunning_dot_operations__pb2.Operation.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
diff --git a/google/cloud/dataproc_v1/proto/jobs_pb2.py b/google/cloud/dataproc_v1/proto/jobs_pb2.py
index 145d9007..6fd028e6 100644
--- a/google/cloud/dataproc_v1/proto/jobs_pb2.py
+++ b/google/cloud/dataproc_v1/proto/jobs_pb2.py
@@ -483,11 +483,11 @@
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
- )
+ ),
],
extensions=[],
- nested_types=[_LOGGINGCONFIG_DRIVERLOGLEVELSENTRY],
- enum_types=[_LOGGINGCONFIG_LEVEL],
+ nested_types=[_LOGGINGCONFIG_DRIVERLOGLEVELSENTRY,],
+ enum_types=[_LOGGINGCONFIG_LEVEL,],
serialized_options=None,
is_extendable=False,
syntax="proto3",
@@ -719,7 +719,7 @@
),
],
extensions=[],
- nested_types=[_HADOOPJOB_PROPERTIESENTRY],
+ nested_types=[_HADOOPJOB_PROPERTIESENTRY,],
enum_types=[],
serialized_options=None,
is_extendable=False,
@@ -733,7 +733,7 @@
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[],
- )
+ ),
],
serialized_start=617,
serialized_end=986,
@@ -961,7 +961,7 @@
),
],
extensions=[],
- nested_types=[_SPARKJOB_PROPERTIESENTRY],
+ nested_types=[_SPARKJOB_PROPERTIESENTRY,],
enum_types=[],
serialized_options=None,
is_extendable=False,
@@ -975,7 +975,7 @@
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[],
- )
+ ),
],
serialized_start=989,
serialized_end=1356,
@@ -1203,7 +1203,7 @@
),
],
extensions=[],
- nested_types=[_PYSPARKJOB_PROPERTIESENTRY],
+ nested_types=[_PYSPARKJOB_PROPERTIESENTRY,],
enum_types=[],
serialized_options=None,
is_extendable=False,
@@ -1241,7 +1241,7 @@
serialized_options=b"\340A\002",
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
- )
+ ),
],
extensions=[],
nested_types=[],
@@ -1498,7 +1498,7 @@
),
],
extensions=[],
- nested_types=[_HIVEJOB_SCRIPTVARIABLESENTRY, _HIVEJOB_PROPERTIESENTRY],
+ nested_types=[_HIVEJOB_SCRIPTVARIABLESENTRY, _HIVEJOB_PROPERTIESENTRY,],
enum_types=[],
serialized_options=None,
is_extendable=False,
@@ -1512,7 +1512,7 @@
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[],
- )
+ ),
],
serialized_start=1773,
serialized_end=2210,
@@ -1761,7 +1761,7 @@
),
],
extensions=[],
- nested_types=[_SPARKSQLJOB_SCRIPTVARIABLESENTRY, _SPARKSQLJOB_PROPERTIESENTRY],
+ nested_types=[_SPARKSQLJOB_SCRIPTVARIABLESENTRY, _SPARKSQLJOB_PROPERTIESENTRY,],
enum_types=[],
serialized_options=None,
is_extendable=False,
@@ -1775,7 +1775,7 @@
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[],
- )
+ ),
],
serialized_start=2213,
serialized_end=2698,
@@ -2043,7 +2043,7 @@
),
],
extensions=[],
- nested_types=[_PIGJOB_SCRIPTVARIABLESENTRY, _PIGJOB_PROPERTIESENTRY],
+ nested_types=[_PIGJOB_SCRIPTVARIABLESENTRY, _PIGJOB_PROPERTIESENTRY,],
enum_types=[],
serialized_options=None,
is_extendable=False,
@@ -2057,7 +2057,7 @@
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[],
- )
+ ),
],
serialized_start=2701,
serialized_end=3205,
@@ -2247,7 +2247,7 @@
),
],
extensions=[],
- nested_types=[_SPARKRJOB_PROPERTIESENTRY],
+ nested_types=[_SPARKRJOB_PROPERTIESENTRY,],
enum_types=[],
serialized_options=None,
is_extendable=False,
@@ -2461,7 +2461,7 @@
),
],
extensions=[],
- nested_types=[_PRESTOJOB_PROPERTIESENTRY],
+ nested_types=[_PRESTOJOB_PROPERTIESENTRY,],
enum_types=[],
serialized_options=None,
is_extendable=False,
@@ -2475,7 +2475,7 @@
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[],
- )
+ ),
],
serialized_start=3521,
serialized_end=3915,
@@ -2629,7 +2629,7 @@
],
extensions=[],
nested_types=[],
- enum_types=[_JOBSTATUS_STATE, _JOBSTATUS_SUBSTATE],
+ enum_types=[_JOBSTATUS_STATE, _JOBSTATUS_SUBSTATE,],
serialized_options=None,
is_extendable=False,
syntax="proto3",
@@ -2787,7 +2787,7 @@
],
extensions=[],
nested_types=[],
- enum_types=[_YARNAPPLICATION_STATE],
+ enum_types=[_YARNAPPLICATION_STATE,],
serialized_options=None,
is_extendable=False,
syntax="proto3",
@@ -3228,7 +3228,7 @@
),
],
extensions=[],
- nested_types=[_JOB_LABELSENTRY],
+ nested_types=[_JOB_LABELSENTRY,],
enum_types=[],
serialized_options=None,
is_extendable=False,
@@ -3242,7 +3242,7 @@
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[],
- )
+ ),
],
serialized_start=4822,
serialized_end=5973,
@@ -3275,7 +3275,7 @@
serialized_options=b"\340A\001",
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
- )
+ ),
],
extensions=[],
nested_types=[],
@@ -3709,7 +3709,7 @@
],
extensions=[],
nested_types=[],
- enum_types=[_LISTJOBSREQUEST_JOBSTATEMATCHER],
+ enum_types=[_LISTJOBSREQUEST_JOBSTATEMATCHER,],
serialized_options=None,
is_extendable=False,
syntax="proto3",
@@ -4260,6 +4260,7 @@
"DESCRIPTOR": _LOGGINGCONFIG,
"__module__": "google.cloud.dataproc_v1.proto.jobs_pb2",
"__doc__": """The runtime logging config of the job.
+
Attributes:
driver_log_levels:
The per-package log levels for the driver. This may include
@@ -4292,6 +4293,7 @@
client/hadoop-mapreduce-client-core/MapReduceTutorial.html>`__ jobs on
`Apache Hadoop YARN `__.
+
Attributes:
driver:
Required. Indicates the location of the driver’s main class.
@@ -4356,6 +4358,7 @@
"__module__": "google.cloud.dataproc_v1.proto.jobs_pb2",
"__doc__": """A Dataproc job for running `Apache Spark `__
applications on YARN.
+
Attributes:
driver:
Required. The specification of the main method to call to
@@ -4419,6 +4422,7 @@
"__doc__": """A Dataproc job for running `Apache PySpark
`__
applications on YARN.
+
Attributes:
main_python_file_uri:
Required. The HCFS URI of the main Python file to use as the
@@ -4463,6 +4467,7 @@
"DESCRIPTOR": _QUERYLIST,
"__module__": "google.cloud.dataproc_v1.proto.jobs_pb2",
"__doc__": """A list of queries to run on a cluster.
+
Attributes:
queries:
Required. The queries to execute. You do not need to terminate
@@ -4503,6 +4508,7 @@
"__module__": "google.cloud.dataproc_v1.proto.jobs_pb2",
"__doc__": """A Dataproc job for running `Apache Hive `__
queries on YARN.
+
Attributes:
queries:
Required. The sequence of Hive queries to execute, specified
@@ -4562,6 +4568,7 @@
"__module__": "google.cloud.dataproc_v1.proto.jobs_pb2",
"__doc__": """A Dataproc job for running `Apache Spark SQL
`__ queries.
+
Attributes:
queries:
Required. The sequence of Spark SQL queries to execute,
@@ -4616,6 +4623,7 @@
"__module__": "google.cloud.dataproc_v1.proto.jobs_pb2",
"__doc__": """A Dataproc job for running `Apache Pig `__
queries on YARN.
+
Attributes:
queries:
Required. The sequence of Pig queries to execute, specified as
@@ -4669,6 +4677,7 @@
"__doc__": """A Dataproc job for running `Apache SparkR
`__ applications on
YARN.
+
Attributes:
main_r_file_uri:
Required. The HCFS URI of the main R file to use as the
@@ -4721,6 +4730,7 @@
`__
must be enabled when the cluster is created to submit a Presto job to
the cluster.
+
Attributes:
queries:
Required. The sequence of Presto queries to execute, specified
@@ -4759,6 +4769,7 @@
"DESCRIPTOR": _JOBPLACEMENT,
"__module__": "google.cloud.dataproc_v1.proto.jobs_pb2",
"__doc__": """Dataproc job config.
+
Attributes:
cluster_name:
Required. The name of the cluster where the job will be
@@ -4779,6 +4790,7 @@
"DESCRIPTOR": _JOBSTATUS,
"__module__": "google.cloud.dataproc_v1.proto.jobs_pb2",
"__doc__": """Dataproc job status.
+
Attributes:
state:
Output only. A state message specifying the overall job state.
@@ -4803,6 +4815,7 @@
"DESCRIPTOR": _JOBREFERENCE,
"__module__": "google.cloud.dataproc_v1.proto.jobs_pb2",
"__doc__": """Encapsulates the full scoping used to reference a job.
+
Attributes:
project_id:
Required. The ID of the Google Cloud Platform project that the
@@ -4830,6 +4843,7 @@
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.
**Beta Feature**: This report is available for testing purposes only.
It may be changed before final release.
+
Attributes:
name:
Required. The application name.
@@ -4865,6 +4879,7 @@
"DESCRIPTOR": _JOB,
"__module__": "google.cloud.dataproc_v1.proto.jobs_pb2",
"__doc__": """A Dataproc job resource.
+
Attributes:
reference:
Optional. The fully qualified reference to the job, which can
@@ -4944,6 +4959,7 @@
"DESCRIPTOR": _JOBSCHEDULING,
"__module__": "google.cloud.dataproc_v1.proto.jobs_pb2",
"__doc__": """Job scheduling options.
+
Attributes:
max_failures_per_hour:
Optional. Maximum number of times per hour a driver may be
@@ -4964,6 +4980,7 @@
"DESCRIPTOR": _SUBMITJOBREQUEST,
"__module__": "google.cloud.dataproc_v1.proto.jobs_pb2",
"__doc__": """A request to submit a job.
+
Attributes:
project_id:
Required. The ID of the Google Cloud Platform project that the
@@ -4997,6 +5014,7 @@
"DESCRIPTOR": _JOBMETADATA,
"__module__": "google.cloud.dataproc_v1.proto.jobs_pb2",
"__doc__": """Job Operation metadata.
+
Attributes:
job_id:
Output only. The job id.
@@ -5019,6 +5037,7 @@
"DESCRIPTOR": _GETJOBREQUEST,
"__module__": "google.cloud.dataproc_v1.proto.jobs_pb2",
"__doc__": """A request to get the resource representation for a job in a project.
+
Attributes:
project_id:
Required. The ID of the Google Cloud Platform project that the
@@ -5040,6 +5059,7 @@
"DESCRIPTOR": _LISTJOBSREQUEST,
"__module__": "google.cloud.dataproc_v1.proto.jobs_pb2",
"__doc__": """A request to list jobs in a project.
+
Attributes:
project_id:
Required. The ID of the Google Cloud Platform project that the
@@ -5082,6 +5102,7 @@
"DESCRIPTOR": _UPDATEJOBREQUEST,
"__module__": "google.cloud.dataproc_v1.proto.jobs_pb2",
"__doc__": """A request to update a job.
+
Attributes:
project_id:
Required. The ID of the Google Cloud Platform project that the
@@ -5111,6 +5132,7 @@
"DESCRIPTOR": _LISTJOBSRESPONSE,
"__module__": "google.cloud.dataproc_v1.proto.jobs_pb2",
"__doc__": """A list of jobs in a project.
+
Attributes:
jobs:
Output only. Jobs list.
@@ -5132,6 +5154,7 @@
"DESCRIPTOR": _CANCELJOBREQUEST,
"__module__": "google.cloud.dataproc_v1.proto.jobs_pb2",
"__doc__": """A request to cancel a job.
+
Attributes:
project_id:
Required. The ID of the Google Cloud Platform project that the
@@ -5153,6 +5176,7 @@
"DESCRIPTOR": _DELETEJOBREQUEST,
"__module__": "google.cloud.dataproc_v1.proto.jobs_pb2",
"__doc__": """A request to delete a job.
+
Attributes:
project_id:
Required. The ID of the Google Cloud Platform project that the
diff --git a/google/cloud/dataproc_v1/proto/jobs_pb2_grpc.py b/google/cloud/dataproc_v1/proto/jobs_pb2_grpc.py
index 0f52c0e9..106082cb 100644
--- a/google/cloud/dataproc_v1/proto/jobs_pb2_grpc.py
+++ b/google/cloud/dataproc_v1/proto/jobs_pb2_grpc.py
@@ -1,4 +1,5 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from google.cloud.dataproc_v1.proto import (
@@ -12,14 +13,14 @@
class JobControllerStub(object):
"""The JobController provides methods to manage jobs.
- """
+ """
def __init__(self, channel):
"""Constructor.
- Args:
- channel: A grpc.Channel.
- """
+ Args:
+ channel: A grpc.Channel.
+ """
self.SubmitJob = channel.unary_unary(
"/google.cloud.dataproc.v1.JobController/SubmitJob",
request_serializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_jobs__pb2.SubmitJobRequest.SerializeToString,
@@ -59,58 +60,58 @@ def __init__(self, channel):
class JobControllerServicer(object):
"""The JobController provides methods to manage jobs.
- """
+ """
def SubmitJob(self, request, context):
"""Submits a job to a cluster.
- """
+ """
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def SubmitJobAsOperation(self, request, context):
"""Submits job to a cluster.
- """
+ """
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def GetJob(self, request, context):
"""Gets the resource representation for a job in a project.
- """
+ """
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def ListJobs(self, request, context):
"""Lists regions/{region}/jobs in a project.
- """
+ """
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def UpdateJob(self, request, context):
"""Updates a job in a project.
- """
+ """
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def CancelJob(self, request, context):
"""Starts a job cancellation request. To access the job resource
- after cancellation, call
- [regions/{region}/jobs.list](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs/list)
- or
- [regions/{region}/jobs.get](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs/get).
- """
+ after cancellation, call
+ [regions/{region}/jobs.list](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs/list)
+ or
+ [regions/{region}/jobs.get](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs/get).
+ """
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def DeleteJob(self, request, context):
"""Deletes the job from the project. If the job is active, the delete fails,
- and the response returns `FAILED_PRECONDITION`.
- """
+ and the response returns `FAILED_PRECONDITION`.
+ """
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
@@ -158,3 +159,198 @@ def add_JobControllerServicer_to_server(servicer, server):
"google.cloud.dataproc.v1.JobController", rpc_method_handlers
)
server.add_generic_rpc_handlers((generic_handler,))
+
+
+# This class is part of an EXPERIMENTAL API.
+class JobController(object):
+ """The JobController provides methods to manage jobs.
+ """
+
+ @staticmethod
+ def SubmitJob(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.cloud.dataproc.v1.JobController/SubmitJob",
+ google_dot_cloud_dot_dataproc__v1_dot_proto_dot_jobs__pb2.SubmitJobRequest.SerializeToString,
+ google_dot_cloud_dot_dataproc__v1_dot_proto_dot_jobs__pb2.Job.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def SubmitJobAsOperation(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.cloud.dataproc.v1.JobController/SubmitJobAsOperation",
+ google_dot_cloud_dot_dataproc__v1_dot_proto_dot_jobs__pb2.SubmitJobRequest.SerializeToString,
+ google_dot_longrunning_dot_operations__pb2.Operation.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def GetJob(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.cloud.dataproc.v1.JobController/GetJob",
+ google_dot_cloud_dot_dataproc__v1_dot_proto_dot_jobs__pb2.GetJobRequest.SerializeToString,
+ google_dot_cloud_dot_dataproc__v1_dot_proto_dot_jobs__pb2.Job.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def ListJobs(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.cloud.dataproc.v1.JobController/ListJobs",
+ google_dot_cloud_dot_dataproc__v1_dot_proto_dot_jobs__pb2.ListJobsRequest.SerializeToString,
+ google_dot_cloud_dot_dataproc__v1_dot_proto_dot_jobs__pb2.ListJobsResponse.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def UpdateJob(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.cloud.dataproc.v1.JobController/UpdateJob",
+ google_dot_cloud_dot_dataproc__v1_dot_proto_dot_jobs__pb2.UpdateJobRequest.SerializeToString,
+ google_dot_cloud_dot_dataproc__v1_dot_proto_dot_jobs__pb2.Job.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def CancelJob(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.cloud.dataproc.v1.JobController/CancelJob",
+ google_dot_cloud_dot_dataproc__v1_dot_proto_dot_jobs__pb2.CancelJobRequest.SerializeToString,
+ google_dot_cloud_dot_dataproc__v1_dot_proto_dot_jobs__pb2.Job.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def DeleteJob(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.cloud.dataproc.v1.JobController/DeleteJob",
+ google_dot_cloud_dot_dataproc__v1_dot_proto_dot_jobs__pb2.DeleteJobRequest.SerializeToString,
+ google_dot_protobuf_dot_empty__pb2.Empty.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
diff --git a/google/cloud/dataproc_v1/proto/operations_pb2.py b/google/cloud/dataproc_v1/proto/operations_pb2.py
index 9fc6fe38..f8ed3ca8 100644
--- a/google/cloud/dataproc_v1/proto/operations_pb2.py
+++ b/google/cloud/dataproc_v1/proto/operations_pb2.py
@@ -167,7 +167,7 @@
],
extensions=[],
nested_types=[],
- enum_types=[_CLUSTEROPERATIONSTATUS_STATE],
+ enum_types=[_CLUSTEROPERATIONSTATUS_STATE,],
serialized_options=None,
is_extendable=False,
syntax="proto3",
@@ -399,7 +399,7 @@
),
],
extensions=[],
- nested_types=[_CLUSTEROPERATIONMETADATA_LABELSENTRY],
+ nested_types=[_CLUSTEROPERATIONMETADATA_LABELSENTRY,],
enum_types=[],
serialized_options=None,
is_extendable=False,
@@ -438,6 +438,7 @@
"DESCRIPTOR": _CLUSTEROPERATIONSTATUS,
"__module__": "google.cloud.dataproc_v1.proto.operations_pb2",
"__doc__": """The status of the operation.
+
Attributes:
state:
Output only. A message containing the operation state.
@@ -471,6 +472,7 @@
"DESCRIPTOR": _CLUSTEROPERATIONMETADATA,
"__module__": "google.cloud.dataproc_v1.proto.operations_pb2",
"__doc__": """Metadata describing the operation.
+
Attributes:
cluster_name:
Output only. Name of the cluster for the operation.
diff --git a/google/cloud/dataproc_v1/proto/operations_pb2_grpc.py b/google/cloud/dataproc_v1/proto/operations_pb2_grpc.py
index 07cb78fe..8a939394 100644
--- a/google/cloud/dataproc_v1/proto/operations_pb2_grpc.py
+++ b/google/cloud/dataproc_v1/proto/operations_pb2_grpc.py
@@ -1,2 +1,3 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+"""Client and server classes corresponding to protobuf-defined services."""
import grpc
diff --git a/google/cloud/dataproc_v1/proto/shared_pb2.py b/google/cloud/dataproc_v1/proto/shared_pb2.py
index e3c7df44..2b5e305d 100644
--- a/google/cloud/dataproc_v1/proto/shared_pb2.py
+++ b/google/cloud/dataproc_v1/proto/shared_pb2.py
@@ -23,7 +23,7 @@
serialized_options=b"\n\034com.google.cloud.dataproc.v1B\013SharedProtoP\001Z@google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataproc",
create_key=_descriptor._internal_create_key,
serialized_pb=b"\n+google/cloud/dataproc_v1/proto/shared.proto\x12\x18google.cloud.dataproc.v1\x1a\x1cgoogle/api/annotations.proto*|\n\tComponent\x12\x19\n\x15\x43OMPONENT_UNSPECIFIED\x10\x00\x12\x0c\n\x08\x41NACONDA\x10\x05\x12\x10\n\x0cHIVE_WEBHCAT\x10\x03\x12\x0b\n\x07JUPYTER\x10\x01\x12\n\n\x06PRESTO\x10\x06\x12\x0c\n\x08ZEPPELIN\x10\x04\x12\r\n\tZOOKEEPER\x10\x08\x42o\n\x1c\x63om.google.cloud.dataproc.v1B\x0bSharedProtoP\x01Z@google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataprocb\x06proto3",
- dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR],
+ dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,],
)
_COMPONENT = _descriptor.EnumDescriptor(
diff --git a/google/cloud/dataproc_v1/proto/shared_pb2_grpc.py b/google/cloud/dataproc_v1/proto/shared_pb2_grpc.py
index 07cb78fe..8a939394 100644
--- a/google/cloud/dataproc_v1/proto/shared_pb2_grpc.py
+++ b/google/cloud/dataproc_v1/proto/shared_pb2_grpc.py
@@ -1,2 +1,3 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+"""Client and server classes corresponding to protobuf-defined services."""
import grpc
diff --git a/google/cloud/dataproc_v1/proto/workflow_templates_pb2.py b/google/cloud/dataproc_v1/proto/workflow_templates_pb2.py
index 4581bc81..492ba11a 100644
--- a/google/cloud/dataproc_v1/proto/workflow_templates_pb2.py
+++ b/google/cloud/dataproc_v1/proto/workflow_templates_pb2.py
@@ -401,7 +401,7 @@
),
],
extensions=[],
- nested_types=[_WORKFLOWTEMPLATE_LABELSENTRY],
+ nested_types=[_WORKFLOWTEMPLATE_LABELSENTRY,],
enum_types=[],
serialized_options=b"\352A\306\001\n(dataproc.googleapis.com/WorkflowTemplate\022Iprojects/{project}/regions/{region}/workflowTemplates/{workflow_template}\022Mprojects/{project}/locations/{location}/workflowTemplates/{workflow_template} \001",
is_extendable=False,
@@ -475,7 +475,7 @@
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[],
- )
+ ),
],
serialized_start=1110,
serialized_end=1290,
@@ -608,7 +608,7 @@
),
],
extensions=[],
- nested_types=[_MANAGEDCLUSTER_LABELSENTRY],
+ nested_types=[_MANAGEDCLUSTER_LABELSENTRY,],
enum_types=[],
serialized_options=None,
is_extendable=False,
@@ -727,7 +727,7 @@
),
],
extensions=[],
- nested_types=[_CLUSTERSELECTOR_CLUSTERLABELSENTRY],
+ nested_types=[_CLUSTERSELECTOR_CLUSTERLABELSENTRY,],
enum_types=[],
serialized_options=None,
is_extendable=False,
@@ -1036,7 +1036,7 @@
),
],
extensions=[],
- nested_types=[_ORDEREDJOB_LABELSENTRY],
+ nested_types=[_ORDEREDJOB_LABELSENTRY,],
enum_types=[],
serialized_options=None,
is_extendable=False,
@@ -1050,7 +1050,7 @@
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[],
- )
+ ),
],
serialized_start=1707,
serialized_end=2441,
@@ -1217,7 +1217,7 @@
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[],
- )
+ ),
],
serialized_start=2604,
serialized_end=2765,
@@ -1250,7 +1250,7 @@
serialized_options=b"\340A\002",
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
- )
+ ),
],
extensions=[],
nested_types=[],
@@ -1291,7 +1291,7 @@
serialized_options=b"\340A\002",
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
- )
+ ),
],
extensions=[],
nested_types=[],
@@ -1584,8 +1584,8 @@
),
],
extensions=[],
- nested_types=[_WORKFLOWMETADATA_PARAMETERSENTRY],
- enum_types=[_WORKFLOWMETADATA_STATE],
+ nested_types=[_WORKFLOWMETADATA_PARAMETERSENTRY,],
+ enum_types=[_WORKFLOWMETADATA_STATE,],
serialized_options=None,
is_extendable=False,
syntax="proto3",
@@ -1701,7 +1701,7 @@
serialized_options=b"\340A\003",
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
- )
+ ),
],
extensions=[],
nested_types=[],
@@ -1822,7 +1822,7 @@
],
extensions=[],
nested_types=[],
- enum_types=[_WORKFLOWNODE_NODESTATE],
+ enum_types=[_WORKFLOWNODE_NODESTATE,],
serialized_options=None,
is_extendable=False,
syntax="proto3",
@@ -2098,7 +2098,7 @@
),
],
extensions=[],
- nested_types=[_INSTANTIATEWORKFLOWTEMPLATEREQUEST_PARAMETERSENTRY],
+ nested_types=[_INSTANTIATEWORKFLOWTEMPLATEREQUEST_PARAMETERSENTRY,],
enum_types=[],
serialized_options=None,
is_extendable=False,
@@ -2215,7 +2215,7 @@
serialized_options=b"\340A\002",
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
- )
+ ),
],
extensions=[],
nested_types=[],
@@ -2655,6 +2655,7 @@
"DESCRIPTOR": _WORKFLOWTEMPLATE,
"__module__": "google.cloud.dataproc_v1.proto.workflow_templates_pb2",
"__doc__": """A Dataproc workflow template resource.
+
Attributes:
name:
Output only. The resource name of the workflow template, as
@@ -2715,6 +2716,7 @@
"__module__": "google.cloud.dataproc_v1.proto.workflow_templates_pb2",
"__doc__": """Specifies workflow execution target. Either ``managed_cluster`` or
``cluster_selector`` is required.
+
Attributes:
placement:
Required. Specifies where workflow executes; either on a
@@ -2747,6 +2749,7 @@
"DESCRIPTOR": _MANAGEDCLUSTER,
"__module__": "google.cloud.dataproc_v1.proto.workflow_templates_pb2",
"__doc__": """Cluster that is managed by the workflow.
+
Attributes:
cluster_name:
Required. The cluster name prefix. A unique cluster name will
@@ -2784,6 +2787,7 @@
"DESCRIPTOR": _CLUSTERSELECTOR,
"__module__": "google.cloud.dataproc_v1.proto.workflow_templates_pb2",
"__doc__": """A selector that chooses target cluster for jobs based on metadata.
+
Attributes:
zone:
Optional. The zone where workflow process executes. This
@@ -2816,6 +2820,7 @@
"DESCRIPTOR": _ORDEREDJOB,
"__module__": "google.cloud.dataproc_v1.proto.workflow_templates_pb2",
"__doc__": """A job executed by the workflow.
+
Attributes:
step_id:
Required. The step id. The id must be unique among all jobs
@@ -2860,6 +2865,7 @@
template. Parameterizable fields: - Labels - File uris - Job
properties - Job arguments - Script variables - Main class (in
HadoopJob and SparkJob) - Zone (in ClusterSelector)
+
Attributes:
name:
Required. Parameter name. The parameter name is used as the
@@ -2921,6 +2927,7 @@
"DESCRIPTOR": _PARAMETERVALIDATION,
"__module__": "google.cloud.dataproc_v1.proto.workflow_templates_pb2",
"__doc__": """Configuration for parameter validation.
+
Attributes:
validation_type:
Required. The type of validation to be performed.
@@ -2941,6 +2948,7 @@
"DESCRIPTOR": _REGEXVALIDATION,
"__module__": "google.cloud.dataproc_v1.proto.workflow_templates_pb2",
"__doc__": """Validation based on regular expressions.
+
Attributes:
regexes:
Required. RE2 regular expressions used to validate the
@@ -2959,6 +2967,7 @@
"DESCRIPTOR": _VALUEVALIDATION,
"__module__": "google.cloud.dataproc_v1.proto.workflow_templates_pb2",
"__doc__": """Validation based on a list of allowed values.
+
Attributes:
values:
Required. List of allowed values for the parameter.
@@ -2984,6 +2993,7 @@
"DESCRIPTOR": _WORKFLOWMETADATA,
"__module__": "google.cloud.dataproc_v1.proto.workflow_templates_pb2",
"__doc__": """A Dataproc workflow template resource.
+
Attributes:
template:
Output only. The resource name of the workflow template as
@@ -3032,6 +3042,7 @@
"DESCRIPTOR": _CLUSTEROPERATION,
"__module__": "google.cloud.dataproc_v1.proto.workflow_templates_pb2",
"__doc__": """The cluster operation triggered by a workflow.
+
Attributes:
operation_id:
Output only. The id of the cluster operation.
@@ -3052,6 +3063,7 @@
"DESCRIPTOR": _WORKFLOWGRAPH,
"__module__": "google.cloud.dataproc_v1.proto.workflow_templates_pb2",
"__doc__": """The workflow graph.
+
Attributes:
nodes:
Output only. The workflow nodes.
@@ -3068,6 +3080,7 @@
"DESCRIPTOR": _WORKFLOWNODE,
"__module__": "google.cloud.dataproc_v1.proto.workflow_templates_pb2",
"__doc__": """The workflow node.
+
Attributes:
step_id:
Output only. The name of the node.
@@ -3093,6 +3106,7 @@
"DESCRIPTOR": _CREATEWORKFLOWTEMPLATEREQUEST,
"__module__": "google.cloud.dataproc_v1.proto.workflow_templates_pb2",
"__doc__": """A request to create a workflow template.
+
Attributes:
parent:
Required. The resource name of the region or location, as
@@ -3119,6 +3133,7 @@
"DESCRIPTOR": _GETWORKFLOWTEMPLATEREQUEST,
"__module__": "google.cloud.dataproc_v1.proto.workflow_templates_pb2",
"__doc__": """A request to fetch a workflow template.
+
Attributes:
name:
Required. The resource name of the workflow template, as
@@ -3157,6 +3172,7 @@
"DESCRIPTOR": _INSTANTIATEWORKFLOWTEMPLATEREQUEST,
"__module__": "google.cloud.dataproc_v1.proto.workflow_templates_pb2",
"__doc__": """A request to instantiate a workflow template.
+
Attributes:
name:
Required. The resource name of the workflow template, as
@@ -3203,6 +3219,7 @@
"DESCRIPTOR": _INSTANTIATEINLINEWORKFLOWTEMPLATEREQUEST,
"__module__": "google.cloud.dataproc_v1.proto.workflow_templates_pb2",
"__doc__": """A request to instantiate an inline workflow template.
+
Attributes:
parent:
Required. The resource name of the region or location, as
@@ -3238,6 +3255,7 @@
"DESCRIPTOR": _UPDATEWORKFLOWTEMPLATEREQUEST,
"__module__": "google.cloud.dataproc_v1.proto.workflow_templates_pb2",
"__doc__": """A request to update a workflow template.
+
Attributes:
template:
Required. The updated workflow template. The
@@ -3255,6 +3273,7 @@
"DESCRIPTOR": _LISTWORKFLOWTEMPLATESREQUEST,
"__module__": "google.cloud.dataproc_v1.proto.workflow_templates_pb2",
"__doc__": """A request to list workflow templates in a project.
+
Attributes:
parent:
Required. The resource name of the region or location, as
@@ -3285,6 +3304,7 @@
"DESCRIPTOR": _LISTWORKFLOWTEMPLATESRESPONSE,
"__module__": "google.cloud.dataproc_v1.proto.workflow_templates_pb2",
"__doc__": """A response to a request to list workflow templates in a project.
+
Attributes:
templates:
Output only. WorkflowTemplates list.
@@ -3307,6 +3327,7 @@
"__module__": "google.cloud.dataproc_v1.proto.workflow_templates_pb2",
"__doc__": """A request to delete a workflow template. Currently started workflows
will remain running.
+
Attributes:
name:
Required. The resource name of the workflow template, as
diff --git a/google/cloud/dataproc_v1/proto/workflow_templates_pb2_grpc.py b/google/cloud/dataproc_v1/proto/workflow_templates_pb2_grpc.py
index f766e9ea..26effdb6 100644
--- a/google/cloud/dataproc_v1/proto/workflow_templates_pb2_grpc.py
+++ b/google/cloud/dataproc_v1/proto/workflow_templates_pb2_grpc.py
@@ -1,4 +1,5 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from google.cloud.dataproc_v1.proto import (
@@ -12,15 +13,15 @@
class WorkflowTemplateServiceStub(object):
"""The API interface for managing Workflow Templates in the
- Dataproc API.
- """
+ Dataproc API.
+ """
def __init__(self, channel):
"""Constructor.
- Args:
- channel: A grpc.Channel.
- """
+ Args:
+ channel: A grpc.Channel.
+ """
self.CreateWorkflowTemplate = channel.unary_unary(
"/google.cloud.dataproc.v1.WorkflowTemplateService/CreateWorkflowTemplate",
request_serializer=google_dot_cloud_dot_dataproc__v1_dot_proto_dot_workflow__templates__pb2.CreateWorkflowTemplateRequest.SerializeToString,
@@ -60,12 +61,12 @@ def __init__(self, channel):
class WorkflowTemplateServiceServicer(object):
"""The API interface for managing Workflow Templates in the
- Dataproc API.
- """
+ Dataproc API.
+ """
def CreateWorkflowTemplate(self, request, context):
"""Creates new workflow template.
- """
+ """
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
@@ -73,9 +74,9 @@ def CreateWorkflowTemplate(self, request, context):
def GetWorkflowTemplate(self, request, context):
"""Retrieves the latest workflow template.
- Can retrieve previously instantiated template by specifying optional
- version parameter.
- """
+ Can retrieve previously instantiated template by specifying optional
+ version parameter.
+ """
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
@@ -83,25 +84,25 @@ def GetWorkflowTemplate(self, request, context):
def InstantiateWorkflowTemplate(self, request, context):
"""Instantiates a template and begins execution.
- The returned Operation can be used to track execution of
- workflow by polling
- [operations.get][google.longrunning.Operations.GetOperation].
- The Operation will complete when entire workflow is finished.
+ The returned Operation can be used to track execution of
+ workflow by polling
+ [operations.get][google.longrunning.Operations.GetOperation].
+ The Operation will complete when entire workflow is finished.
- The running workflow can be aborted via
- [operations.cancel][google.longrunning.Operations.CancelOperation].
- This will cause any inflight jobs to be cancelled and workflow-owned
- clusters to be deleted.
+ The running workflow can be aborted via
+ [operations.cancel][google.longrunning.Operations.CancelOperation].
+ This will cause any inflight jobs to be cancelled and workflow-owned
+ clusters to be deleted.
- The [Operation.metadata][google.longrunning.Operation.metadata] will be
- [WorkflowMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#workflowmetadata).
- Also see [Using
- WorkflowMetadata](https://cloud.google.com/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata).
+ The [Operation.metadata][google.longrunning.Operation.metadata] will be
+ [WorkflowMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#workflowmetadata).
+ Also see [Using
+ WorkflowMetadata](https://cloud.google.com/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata).
- On successful completion,
- [Operation.response][google.longrunning.Operation.response] will be
- [Empty][google.protobuf.Empty].
- """
+ On successful completion,
+ [Operation.response][google.longrunning.Operation.response] will be
+ [Empty][google.protobuf.Empty].
+ """
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
@@ -109,51 +110,51 @@ def InstantiateWorkflowTemplate(self, request, context):
def InstantiateInlineWorkflowTemplate(self, request, context):
"""Instantiates a template and begins execution.
- This method is equivalent to executing the sequence
- [CreateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.CreateWorkflowTemplate], [InstantiateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.InstantiateWorkflowTemplate],
- [DeleteWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.DeleteWorkflowTemplate].
+ This method is equivalent to executing the sequence
+ [CreateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.CreateWorkflowTemplate], [InstantiateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.InstantiateWorkflowTemplate],
+ [DeleteWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.DeleteWorkflowTemplate].
- The returned Operation can be used to track execution of
- workflow by polling
- [operations.get][google.longrunning.Operations.GetOperation].
- The Operation will complete when entire workflow is finished.
+ The returned Operation can be used to track execution of
+ workflow by polling
+ [operations.get][google.longrunning.Operations.GetOperation].
+ The Operation will complete when entire workflow is finished.
- The running workflow can be aborted via
- [operations.cancel][google.longrunning.Operations.CancelOperation].
- This will cause any inflight jobs to be cancelled and workflow-owned
- clusters to be deleted.
+ The running workflow can be aborted via
+ [operations.cancel][google.longrunning.Operations.CancelOperation].
+ This will cause any inflight jobs to be cancelled and workflow-owned
+ clusters to be deleted.
- The [Operation.metadata][google.longrunning.Operation.metadata] will be
- [WorkflowMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#workflowmetadata).
- Also see [Using
- WorkflowMetadata](https://cloud.google.com/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata).
+ The [Operation.metadata][google.longrunning.Operation.metadata] will be
+ [WorkflowMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#workflowmetadata).
+ Also see [Using
+ WorkflowMetadata](https://cloud.google.com/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata).
- On successful completion,
- [Operation.response][google.longrunning.Operation.response] will be
- [Empty][google.protobuf.Empty].
- """
+ On successful completion,
+ [Operation.response][google.longrunning.Operation.response] will be
+ [Empty][google.protobuf.Empty].
+ """
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def UpdateWorkflowTemplate(self, request, context):
"""Updates (replaces) workflow template. The updated template
- must contain version that matches the current server version.
- """
+ must contain version that matches the current server version.
+ """
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def ListWorkflowTemplates(self, request, context):
"""Lists workflows that match the specified filter in the request.
- """
+ """
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def DeleteWorkflowTemplate(self, request, context):
"""Deletes a workflow template. It does not cancel in-progress workflows.
- """
+ """
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
@@ -201,3 +202,199 @@ def add_WorkflowTemplateServiceServicer_to_server(servicer, server):
"google.cloud.dataproc.v1.WorkflowTemplateService", rpc_method_handlers
)
server.add_generic_rpc_handlers((generic_handler,))
+
+
+# This class is part of an EXPERIMENTAL API.
+class WorkflowTemplateService(object):
+ """The API interface for managing Workflow Templates in the
+ Dataproc API.
+ """
+
+ @staticmethod
+ def CreateWorkflowTemplate(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.cloud.dataproc.v1.WorkflowTemplateService/CreateWorkflowTemplate",
+ google_dot_cloud_dot_dataproc__v1_dot_proto_dot_workflow__templates__pb2.CreateWorkflowTemplateRequest.SerializeToString,
+ google_dot_cloud_dot_dataproc__v1_dot_proto_dot_workflow__templates__pb2.WorkflowTemplate.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def GetWorkflowTemplate(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.cloud.dataproc.v1.WorkflowTemplateService/GetWorkflowTemplate",
+ google_dot_cloud_dot_dataproc__v1_dot_proto_dot_workflow__templates__pb2.GetWorkflowTemplateRequest.SerializeToString,
+ google_dot_cloud_dot_dataproc__v1_dot_proto_dot_workflow__templates__pb2.WorkflowTemplate.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def InstantiateWorkflowTemplate(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.cloud.dataproc.v1.WorkflowTemplateService/InstantiateWorkflowTemplate",
+ google_dot_cloud_dot_dataproc__v1_dot_proto_dot_workflow__templates__pb2.InstantiateWorkflowTemplateRequest.SerializeToString,
+ google_dot_longrunning_dot_operations__pb2.Operation.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def InstantiateInlineWorkflowTemplate(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.cloud.dataproc.v1.WorkflowTemplateService/InstantiateInlineWorkflowTemplate",
+ google_dot_cloud_dot_dataproc__v1_dot_proto_dot_workflow__templates__pb2.InstantiateInlineWorkflowTemplateRequest.SerializeToString,
+ google_dot_longrunning_dot_operations__pb2.Operation.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def UpdateWorkflowTemplate(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.cloud.dataproc.v1.WorkflowTemplateService/UpdateWorkflowTemplate",
+ google_dot_cloud_dot_dataproc__v1_dot_proto_dot_workflow__templates__pb2.UpdateWorkflowTemplateRequest.SerializeToString,
+ google_dot_cloud_dot_dataproc__v1_dot_proto_dot_workflow__templates__pb2.WorkflowTemplate.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def ListWorkflowTemplates(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.cloud.dataproc.v1.WorkflowTemplateService/ListWorkflowTemplates",
+ google_dot_cloud_dot_dataproc__v1_dot_proto_dot_workflow__templates__pb2.ListWorkflowTemplatesRequest.SerializeToString,
+ google_dot_cloud_dot_dataproc__v1_dot_proto_dot_workflow__templates__pb2.ListWorkflowTemplatesResponse.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def DeleteWorkflowTemplate(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.cloud.dataproc.v1.WorkflowTemplateService/DeleteWorkflowTemplate",
+ google_dot_cloud_dot_dataproc__v1_dot_proto_dot_workflow__templates__pb2.DeleteWorkflowTemplateRequest.SerializeToString,
+ google_dot_protobuf_dot_empty__pb2.Empty.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
diff --git a/google/cloud/dataproc_v1beta2/gapic/autoscaling_policy_service_client.py b/google/cloud/dataproc_v1beta2/gapic/autoscaling_policy_service_client.py
index 3d0074c0..7783cf49 100644
--- a/google/cloud/dataproc_v1beta2/gapic/autoscaling_policy_service_client.py
+++ b/google/cloud/dataproc_v1beta2/gapic/autoscaling_policy_service_client.py
@@ -41,7 +41,9 @@
from google.protobuf import empty_pb2
-_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-dataproc").version
+_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution(
+ "google-cloud-dataproc",
+).version
class AutoscalingPolicyServiceClient(object):
@@ -100,7 +102,7 @@ def location_path(cls, project, location):
def region_path(cls, project, region):
"""Return a fully-qualified region string."""
return google.api_core.path_template.expand(
- "projects/{project}/regions/{region}", project=project, region=region
+ "projects/{project}/regions/{region}", project=project, region=region,
)
def __init__(
@@ -190,12 +192,12 @@ def __init__(
self.transport = transport
else:
self.transport = autoscaling_policy_service_grpc_transport.AutoscalingPolicyServiceGrpcTransport(
- address=api_endpoint, channel=channel, credentials=credentials
+ address=api_endpoint, channel=channel, credentials=credentials,
)
if client_info is None:
client_info = google.api_core.gapic_v1.client_info.ClientInfo(
- gapic_version=_GAPIC_LIBRARY_VERSION
+ gapic_version=_GAPIC_LIBRARY_VERSION,
)
else:
client_info.gapic_version = _GAPIC_LIBRARY_VERSION
@@ -206,7 +208,7 @@ def __init__(
# (Ordinarily, these are the defaults specified in the `*_config.py`
# file next to this one.)
self._method_configs = google.api_core.gapic_v1.config.parse_method_configs(
- client_config["interfaces"][self._INTERFACE_NAME]
+ client_config["interfaces"][self._INTERFACE_NAME],
)
# Save a dictionary of cached API call functions.
@@ -285,7 +287,7 @@ def create_autoscaling_policy(
)
request = autoscaling_policies_pb2.CreateAutoscalingPolicyRequest(
- parent=parent, policy=policy
+ parent=parent, policy=policy,
)
if metadata is None:
metadata = []
@@ -362,7 +364,9 @@ def update_autoscaling_policy(
client_info=self._client_info,
)
- request = autoscaling_policies_pb2.UpdateAutoscalingPolicyRequest(policy=policy)
+ request = autoscaling_policies_pb2.UpdateAutoscalingPolicyRequest(
+ policy=policy,
+ )
if metadata is None:
metadata = []
metadata = list(metadata)
@@ -441,7 +445,7 @@ def get_autoscaling_policy(
client_info=self._client_info,
)
- request = autoscaling_policies_pb2.GetAutoscalingPolicyRequest(name=name)
+ request = autoscaling_policies_pb2.GetAutoscalingPolicyRequest(name=name,)
if metadata is None:
metadata = []
metadata = list(metadata)
@@ -541,7 +545,7 @@ def list_autoscaling_policies(
)
request = autoscaling_policies_pb2.ListAutoscalingPoliciesRequest(
- parent=parent, page_size=page_size
+ parent=parent, page_size=page_size,
)
if metadata is None:
metadata = []
@@ -630,7 +634,7 @@ def delete_autoscaling_policy(
client_info=self._client_info,
)
- request = autoscaling_policies_pb2.DeleteAutoscalingPolicyRequest(name=name)
+ request = autoscaling_policies_pb2.DeleteAutoscalingPolicyRequest(name=name,)
if metadata is None:
metadata = []
metadata = list(metadata)
diff --git a/google/cloud/dataproc_v1beta2/gapic/autoscaling_policy_service_client_config.py b/google/cloud/dataproc_v1beta2/gapic/autoscaling_policy_service_client_config.py
index 53ca474b..3274e972 100644
--- a/google/cloud/dataproc_v1beta2/gapic/autoscaling_policy_service_client_config.py
+++ b/google/cloud/dataproc_v1beta2/gapic/autoscaling_policy_service_client_config.py
@@ -2,45 +2,136 @@
"interfaces": {
"google.cloud.dataproc.v1beta2.AutoscalingPolicyService": {
"retry_codes": {
- "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"],
- "non_idempotent": [],
+ "retry_policy_1_codes": ["DEADLINE_EXCEEDED", "UNAVAILABLE"],
+ "retry_policy_4_codes": ["UNAVAILABLE"],
+ "retry_policy_6_codes": ["UNAVAILABLE"],
+ "no_retry_codes": [],
+ "retry_policy_3_codes": [
+ "INTERNAL",
+ "DEADLINE_EXCEEDED",
+ "UNAVAILABLE",
+ ],
+ "retry_policy_2_codes": ["UNAVAILABLE"],
+ "no_retry_1_codes": [],
+ "retry_policy_5_codes": [
+ "DEADLINE_EXCEEDED",
+ "INTERNAL",
+ "UNAVAILABLE",
+ ],
+ "retry_policy_7_codes": [
+ "DEADLINE_EXCEEDED",
+ "INTERNAL",
+ "UNAVAILABLE",
+ ],
},
"retry_params": {
- "default": {
+ "retry_policy_1_params": {
"initial_retry_delay_millis": 100,
"retry_delay_multiplier": 1.3,
"max_retry_delay_millis": 60000,
- "initial_rpc_timeout_millis": 20000,
+ "initial_rpc_timeout_millis": 600000,
"rpc_timeout_multiplier": 1.0,
- "max_rpc_timeout_millis": 20000,
+ "max_rpc_timeout_millis": 600000,
"total_timeout_millis": 600000,
- }
+ },
+ "retry_policy_6_params": {
+ "initial_retry_delay_millis": 100,
+ "retry_delay_multiplier": 1.3,
+ "max_retry_delay_millis": 60000,
+ "initial_rpc_timeout_millis": 600000,
+ "rpc_timeout_multiplier": 1.0,
+ "max_rpc_timeout_millis": 600000,
+ "total_timeout_millis": 600000,
+ },
+ "retry_policy_2_params": {
+ "initial_retry_delay_millis": 100,
+ "retry_delay_multiplier": 1.3,
+ "max_retry_delay_millis": 60000,
+ "initial_rpc_timeout_millis": 300000,
+ "rpc_timeout_multiplier": 1.0,
+ "max_rpc_timeout_millis": 300000,
+ "total_timeout_millis": 300000,
+ },
+ "retry_policy_3_params": {
+ "initial_retry_delay_millis": 100,
+ "retry_delay_multiplier": 1.3,
+ "max_retry_delay_millis": 60000,
+ "initial_rpc_timeout_millis": 300000,
+ "rpc_timeout_multiplier": 1.0,
+ "max_rpc_timeout_millis": 300000,
+ "total_timeout_millis": 300000,
+ },
+ "retry_policy_7_params": {
+ "initial_retry_delay_millis": 100,
+ "retry_delay_multiplier": 1.3,
+ "max_retry_delay_millis": 60000,
+ "initial_rpc_timeout_millis": 600000,
+ "rpc_timeout_multiplier": 1.0,
+ "max_rpc_timeout_millis": 600000,
+ "total_timeout_millis": 600000,
+ },
+ "retry_policy_5_params": {
+ "initial_retry_delay_millis": 100,
+ "retry_delay_multiplier": 1.3,
+ "max_retry_delay_millis": 60000,
+ "initial_rpc_timeout_millis": 900000,
+ "rpc_timeout_multiplier": 1.0,
+ "max_rpc_timeout_millis": 900000,
+ "total_timeout_millis": 900000,
+ },
+ "retry_policy_4_params": {
+ "initial_retry_delay_millis": 100,
+ "retry_delay_multiplier": 1.3,
+ "max_retry_delay_millis": 60000,
+ "initial_rpc_timeout_millis": 900000,
+ "rpc_timeout_multiplier": 1.0,
+ "max_rpc_timeout_millis": 900000,
+ "total_timeout_millis": 900000,
+ },
+ "no_retry_params": {
+ "initial_retry_delay_millis": 0,
+ "retry_delay_multiplier": 0.0,
+ "max_retry_delay_millis": 0,
+ "initial_rpc_timeout_millis": 0,
+ "rpc_timeout_multiplier": 1.0,
+ "max_rpc_timeout_millis": 0,
+ "total_timeout_millis": 0,
+ },
+ "no_retry_1_params": {
+ "initial_retry_delay_millis": 0,
+ "retry_delay_multiplier": 0.0,
+ "max_retry_delay_millis": 0,
+ "initial_rpc_timeout_millis": 600000,
+ "rpc_timeout_multiplier": 1.0,
+ "max_rpc_timeout_millis": 600000,
+ "total_timeout_millis": 600000,
+ },
},
"methods": {
"CreateAutoscalingPolicy": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "default",
+ "timeout_millis": 600000,
+ "retry_codes_name": "no_retry_1_codes",
+ "retry_params_name": "no_retry_1_params",
},
"UpdateAutoscalingPolicy": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "default",
+ "timeout_millis": 600000,
+ "retry_codes_name": "retry_policy_1_codes",
+ "retry_params_name": "retry_policy_1_params",
},
"GetAutoscalingPolicy": {
- "timeout_millis": 60000,
- "retry_codes_name": "idempotent",
- "retry_params_name": "default",
+ "timeout_millis": 600000,
+ "retry_codes_name": "retry_policy_1_codes",
+ "retry_params_name": "retry_policy_1_params",
},
"ListAutoscalingPolicies": {
- "timeout_millis": 60000,
- "retry_codes_name": "idempotent",
- "retry_params_name": "default",
+ "timeout_millis": 600000,
+ "retry_codes_name": "retry_policy_1_codes",
+ "retry_params_name": "retry_policy_1_params",
},
"DeleteAutoscalingPolicy": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "default",
+ "timeout_millis": 600000,
+ "retry_codes_name": "no_retry_1_codes",
+ "retry_params_name": "no_retry_1_params",
},
},
}
diff --git a/google/cloud/dataproc_v1beta2/gapic/cluster_controller_client.py b/google/cloud/dataproc_v1beta2/gapic/cluster_controller_client.py
index 5408b71a..bdc99bf7 100644
--- a/google/cloud/dataproc_v1beta2/gapic/cluster_controller_client.py
+++ b/google/cloud/dataproc_v1beta2/gapic/cluster_controller_client.py
@@ -47,7 +47,9 @@
from google.protobuf import field_mask_pb2
-_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-dataproc").version
+_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution(
+ "google-cloud-dataproc",
+).version
class ClusterControllerClient(object):
@@ -170,12 +172,12 @@ def __init__(
self.transport = transport
else:
self.transport = cluster_controller_grpc_transport.ClusterControllerGrpcTransport(
- address=api_endpoint, channel=channel, credentials=credentials
+ address=api_endpoint, channel=channel, credentials=credentials,
)
if client_info is None:
client_info = google.api_core.gapic_v1.client_info.ClientInfo(
- gapic_version=_GAPIC_LIBRARY_VERSION
+ gapic_version=_GAPIC_LIBRARY_VERSION,
)
else:
client_info.gapic_version = _GAPIC_LIBRARY_VERSION
@@ -186,7 +188,7 @@ def __init__(
# (Ordinarily, these are the defaults specified in the `*_config.py`
# file next to this one.)
self._method_configs = google.api_core.gapic_v1.config.parse_method_configs(
- client_config["interfaces"][self._INTERFACE_NAME]
+ client_config["interfaces"][self._INTERFACE_NAME],
)
# Save a dictionary of cached API call functions.
@@ -286,7 +288,10 @@ def create_cluster(
)
request = clusters_pb2.CreateClusterRequest(
- project_id=project_id, region=region, cluster=cluster, request_id=request_id
+ project_id=project_id,
+ region=region,
+ cluster=cluster,
+ request_id=request_id,
)
operation = self._inner_api_calls["create_cluster"](
request, retry=retry, timeout=timeout, metadata=metadata
@@ -605,6 +610,95 @@ def delete_cluster(
metadata_type=proto_operations_pb2.ClusterOperationMetadata,
)
+ def diagnose_cluster(
+ self,
+ project_id,
+ region,
+ cluster_name,
+ retry=google.api_core.gapic_v1.method.DEFAULT,
+ timeout=google.api_core.gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+ """
+ Gets cluster diagnostic information. The returned
+ ``Operation.metadata`` will be
+ `ClusterOperationMetadata `__.
+ After the operation completes, ``Operation.response`` contains
+ ``Empty``.
+
+ Example:
+ >>> from google.cloud import dataproc_v1beta2
+ >>>
+ >>> client = dataproc_v1beta2.ClusterControllerClient()
+ >>>
+ >>> # TODO: Initialize `project_id`:
+ >>> project_id = ''
+ >>>
+ >>> # TODO: Initialize `region`:
+ >>> region = ''
+ >>>
+ >>> # TODO: Initialize `cluster_name`:
+ >>> cluster_name = ''
+ >>>
+ >>> response = client.diagnose_cluster(project_id, region, cluster_name)
+ >>>
+ >>> def callback(operation_future):
+ ... # Handle result.
+ ... result = operation_future.result()
+ >>>
+ >>> response.add_done_callback(callback)
+ >>>
+ >>> # Handle metadata.
+ >>> metadata = response.metadata()
+
+ Args:
+ project_id (str): Required. The ID of the Google Cloud Platform project that the cluster
+ belongs to.
+ region (str): Required. The Dataproc region in which to handle the request.
+ cluster_name (str): Required. The cluster name.
+ retry (Optional[google.api_core.retry.Retry]): A retry object used
+ to retry requests. If ``None`` is specified, requests will
+ be retried using a default configuration.
+ timeout (Optional[float]): The amount of time, in seconds, to wait
+ for the request to complete. Note that if ``retry`` is
+ specified, the timeout applies to each individual attempt.
+ metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
+ that is provided to the method.
+
+ Returns:
+ A :class:`~google.cloud.dataproc_v1beta2.types._OperationFuture` instance.
+
+ Raises:
+ google.api_core.exceptions.GoogleAPICallError: If the request
+ failed for any reason.
+ google.api_core.exceptions.RetryError: If the request failed due
+ to a retryable error and retry attempts failed.
+ ValueError: If the parameters are invalid.
+ """
+ # Wrap the transport method to add retry and timeout logic.
+ if "diagnose_cluster" not in self._inner_api_calls:
+ self._inner_api_calls[
+ "diagnose_cluster"
+ ] = google.api_core.gapic_v1.method.wrap_method(
+ self.transport.diagnose_cluster,
+ default_retry=self._method_configs["DiagnoseCluster"].retry,
+ default_timeout=self._method_configs["DiagnoseCluster"].timeout,
+ client_info=self._client_info,
+ )
+
+ request = clusters_pb2.DiagnoseClusterRequest(
+ project_id=project_id, region=region, cluster_name=cluster_name,
+ )
+ operation = self._inner_api_calls["diagnose_cluster"](
+ request, retry=retry, timeout=timeout, metadata=metadata
+ )
+ return google.api_core.operation.from_gapic(
+ operation,
+ self.transport._operations_client,
+ empty_pb2.Empty,
+ metadata_type=proto_operations_pb2.ClusterOperationMetadata,
+ )
+
def get_cluster(
self,
project_id,
@@ -669,7 +763,7 @@ def get_cluster(
)
request = clusters_pb2.GetClusterRequest(
- project_id=project_id, region=region, cluster_name=cluster_name
+ project_id=project_id, region=region, cluster_name=cluster_name,
)
return self._inner_api_calls["get_cluster"](
request, retry=retry, timeout=timeout, metadata=metadata
@@ -776,7 +870,7 @@ def list_clusters(
)
request = clusters_pb2.ListClustersRequest(
- project_id=project_id, region=region, filter=filter_, page_size=page_size
+ project_id=project_id, region=region, filter=filter_, page_size=page_size,
)
iterator = google.api_core.page_iterator.GRPCIterator(
client=None,
@@ -792,92 +886,3 @@ def list_clusters(
response_token_field="next_page_token",
)
return iterator
-
- def diagnose_cluster(
- self,
- project_id,
- region,
- cluster_name,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Gets cluster diagnostic information. The returned
- ``Operation.metadata`` will be
- `ClusterOperationMetadata `__.
- After the operation completes, ``Operation.response`` contains
- ``Empty``.
-
- Example:
- >>> from google.cloud import dataproc_v1beta2
- >>>
- >>> client = dataproc_v1beta2.ClusterControllerClient()
- >>>
- >>> # TODO: Initialize `project_id`:
- >>> project_id = ''
- >>>
- >>> # TODO: Initialize `region`:
- >>> region = ''
- >>>
- >>> # TODO: Initialize `cluster_name`:
- >>> cluster_name = ''
- >>>
- >>> response = client.diagnose_cluster(project_id, region, cluster_name)
- >>>
- >>> def callback(operation_future):
- ... # Handle result.
- ... result = operation_future.result()
- >>>
- >>> response.add_done_callback(callback)
- >>>
- >>> # Handle metadata.
- >>> metadata = response.metadata()
-
- Args:
- project_id (str): Required. The ID of the Google Cloud Platform project that the cluster
- belongs to.
- region (str): Required. The Dataproc region in which to handle the request.
- cluster_name (str): Required. The cluster name.
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Returns:
- A :class:`~google.cloud.dataproc_v1beta2.types._OperationFuture` instance.
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "diagnose_cluster" not in self._inner_api_calls:
- self._inner_api_calls[
- "diagnose_cluster"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.diagnose_cluster,
- default_retry=self._method_configs["DiagnoseCluster"].retry,
- default_timeout=self._method_configs["DiagnoseCluster"].timeout,
- client_info=self._client_info,
- )
-
- request = clusters_pb2.DiagnoseClusterRequest(
- project_id=project_id, region=region, cluster_name=cluster_name
- )
- operation = self._inner_api_calls["diagnose_cluster"](
- request, retry=retry, timeout=timeout, metadata=metadata
- )
- return google.api_core.operation.from_gapic(
- operation,
- self.transport._operations_client,
- empty_pb2.Empty,
- metadata_type=proto_operations_pb2.ClusterOperationMetadata,
- )
diff --git a/google/cloud/dataproc_v1beta2/gapic/cluster_controller_client_config.py b/google/cloud/dataproc_v1beta2/gapic/cluster_controller_client_config.py
index b2482807..43673451 100644
--- a/google/cloud/dataproc_v1beta2/gapic/cluster_controller_client_config.py
+++ b/google/cloud/dataproc_v1beta2/gapic/cluster_controller_client_config.py
@@ -2,50 +2,73 @@
"interfaces": {
"google.cloud.dataproc.v1beta2.ClusterController": {
"retry_codes": {
- "idempotent": ["DEADLINE_EXCEEDED", "INTERNAL", "UNAVAILABLE"],
- "non_idempotent": ["UNAVAILABLE"],
+ "no_retry_codes": [],
+ "retry_policy_3_codes": [
+ "INTERNAL",
+ "DEADLINE_EXCEEDED",
+ "UNAVAILABLE",
+ ],
+ "retry_policy_2_codes": ["UNAVAILABLE"],
},
"retry_params": {
- "default": {
+ "retry_policy_2_params": {
"initial_retry_delay_millis": 100,
"retry_delay_multiplier": 1.3,
"max_retry_delay_millis": 60000,
- "initial_rpc_timeout_millis": 20000,
+ "initial_rpc_timeout_millis": 300000,
"rpc_timeout_multiplier": 1.0,
- "max_rpc_timeout_millis": 20000,
- "total_timeout_millis": 600000,
- }
+ "max_rpc_timeout_millis": 300000,
+ "total_timeout_millis": 300000,
+ },
+ "retry_policy_3_params": {
+ "initial_retry_delay_millis": 100,
+ "retry_delay_multiplier": 1.3,
+ "max_retry_delay_millis": 60000,
+ "initial_rpc_timeout_millis": 300000,
+ "rpc_timeout_multiplier": 1.0,
+ "max_rpc_timeout_millis": 300000,
+ "total_timeout_millis": 300000,
+ },
+ "no_retry_params": {
+ "initial_retry_delay_millis": 0,
+ "retry_delay_multiplier": 0.0,
+ "max_retry_delay_millis": 0,
+ "initial_rpc_timeout_millis": 0,
+ "rpc_timeout_multiplier": 1.0,
+ "max_rpc_timeout_millis": 0,
+ "total_timeout_millis": 0,
+ },
},
"methods": {
"CreateCluster": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "default",
+ "timeout_millis": 300000,
+ "retry_codes_name": "retry_policy_2_codes",
+ "retry_params_name": "retry_policy_2_params",
},
"UpdateCluster": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "default",
+ "timeout_millis": 300000,
+ "retry_codes_name": "retry_policy_2_codes",
+ "retry_params_name": "retry_policy_2_params",
},
"DeleteCluster": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "default",
+ "timeout_millis": 300000,
+ "retry_codes_name": "retry_policy_2_codes",
+ "retry_params_name": "retry_policy_2_params",
+ },
+ "DiagnoseCluster": {
+ "timeout_millis": 300000,
+ "retry_codes_name": "retry_policy_2_codes",
+ "retry_params_name": "retry_policy_2_params",
},
"GetCluster": {
- "timeout_millis": 60000,
- "retry_codes_name": "idempotent",
- "retry_params_name": "default",
+ "timeout_millis": 300000,
+ "retry_codes_name": "retry_policy_3_codes",
+ "retry_params_name": "retry_policy_3_params",
},
"ListClusters": {
- "timeout_millis": 60000,
- "retry_codes_name": "idempotent",
- "retry_params_name": "default",
- },
- "DiagnoseCluster": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "default",
+ "timeout_millis": 300000,
+ "retry_codes_name": "retry_policy_3_codes",
+ "retry_params_name": "retry_policy_3_params",
},
},
}
diff --git a/google/cloud/dataproc_v1beta2/gapic/job_controller_client.py b/google/cloud/dataproc_v1beta2/gapic/job_controller_client.py
index bc9b6c8a..f3f12304 100644
--- a/google/cloud/dataproc_v1beta2/gapic/job_controller_client.py
+++ b/google/cloud/dataproc_v1beta2/gapic/job_controller_client.py
@@ -47,7 +47,9 @@
from google.protobuf import field_mask_pb2
-_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-dataproc").version
+_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution(
+ "google-cloud-dataproc",
+).version
class JobControllerClient(object):
@@ -167,12 +169,12 @@ def __init__(
self.transport = transport
else:
self.transport = job_controller_grpc_transport.JobControllerGrpcTransport(
- address=api_endpoint, channel=channel, credentials=credentials
+ address=api_endpoint, channel=channel, credentials=credentials,
)
if client_info is None:
client_info = google.api_core.gapic_v1.client_info.ClientInfo(
- gapic_version=_GAPIC_LIBRARY_VERSION
+ gapic_version=_GAPIC_LIBRARY_VERSION,
)
else:
client_info.gapic_version = _GAPIC_LIBRARY_VERSION
@@ -183,7 +185,7 @@ def __init__(
# (Ordinarily, these are the defaults specified in the `*_config.py`
# file next to this one.)
self._method_configs = google.api_core.gapic_v1.config.parse_method_configs(
- client_config["interfaces"][self._INTERFACE_NAME]
+ client_config["interfaces"][self._INTERFACE_NAME],
)
# Save a dictionary of cached API call functions.
@@ -271,12 +273,111 @@ def submit_job(
)
request = jobs_pb2.SubmitJobRequest(
- project_id=project_id, region=region, job=job, request_id=request_id
+ project_id=project_id, region=region, job=job, request_id=request_id,
)
return self._inner_api_calls["submit_job"](
request, retry=retry, timeout=timeout, metadata=metadata
)
+ def submit_job_as_operation(
+ self,
+ project_id,
+ region,
+ job,
+ request_id=None,
+ retry=google.api_core.gapic_v1.method.DEFAULT,
+ timeout=google.api_core.gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+ """
+ Submits job to a cluster.
+
+ Example:
+ >>> from google.cloud import dataproc_v1beta2
+ >>>
+ >>> client = dataproc_v1beta2.JobControllerClient()
+ >>>
+ >>> # TODO: Initialize `project_id`:
+ >>> project_id = ''
+ >>>
+ >>> # TODO: Initialize `region`:
+ >>> region = ''
+ >>>
+ >>> # TODO: Initialize `job`:
+ >>> job = {}
+ >>>
+ >>> response = client.submit_job_as_operation(project_id, region, job)
+ >>>
+ >>> def callback(operation_future):
+ ... # Handle result.
+ ... result = operation_future.result()
+ >>>
+ >>> response.add_done_callback(callback)
+ >>>
+ >>> # Handle metadata.
+ >>> metadata = response.metadata()
+
+ Args:
+ project_id (str): Required. The ID of the Google Cloud Platform project that the job
+ belongs to.
+ region (str): Required. The Dataproc region in which to handle the request.
+ job (Union[dict, ~google.cloud.dataproc_v1beta2.types.Job]): Required. The job resource.
+
+ If a dict is provided, it must be of the same form as the protobuf
+ message :class:`~google.cloud.dataproc_v1beta2.types.Job`
+ request_id (str): Optional. A unique id used to identify the request. If the server
+ receives two ``SubmitJobRequest`` requests with the same id, then the
+ second request will be ignored and the first ``Job`` created and stored
+ in the backend is returned.
+
+ It is recommended to always set this value to a
+ `UUID `__.
+
+ The id must contain only letters (a-z, A-Z), numbers (0-9), underscores
+ (_), and hyphens (-). The maximum length is 40 characters.
+ retry (Optional[google.api_core.retry.Retry]): A retry object used
+ to retry requests. If ``None`` is specified, requests will
+ be retried using a default configuration.
+ timeout (Optional[float]): The amount of time, in seconds, to wait
+ for the request to complete. Note that if ``retry`` is
+ specified, the timeout applies to each individual attempt.
+ metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
+ that is provided to the method.
+
+ Returns:
+ A :class:`~google.cloud.dataproc_v1beta2.types._OperationFuture` instance.
+
+ Raises:
+ google.api_core.exceptions.GoogleAPICallError: If the request
+ failed for any reason.
+ google.api_core.exceptions.RetryError: If the request failed due
+ to a retryable error and retry attempts failed.
+ ValueError: If the parameters are invalid.
+ """
+ # Wrap the transport method to add retry and timeout logic.
+ if "submit_job_as_operation" not in self._inner_api_calls:
+ self._inner_api_calls[
+ "submit_job_as_operation"
+ ] = google.api_core.gapic_v1.method.wrap_method(
+ self.transport.submit_job_as_operation,
+ default_retry=self._method_configs["SubmitJobAsOperation"].retry,
+ default_timeout=self._method_configs["SubmitJobAsOperation"].timeout,
+ client_info=self._client_info,
+ )
+
+ request = jobs_pb2.SubmitJobRequest(
+ project_id=project_id, region=region, job=job, request_id=request_id,
+ )
+ operation = self._inner_api_calls["submit_job_as_operation"](
+ request, retry=retry, timeout=timeout, metadata=metadata
+ )
+ return google.api_core.operation.from_gapic(
+ operation,
+ self.transport._operations_client,
+ jobs_pb2.Job,
+ metadata_type=jobs_pb2.JobMetadata,
+ )
+
def get_job(
self,
project_id,
@@ -341,7 +442,7 @@ def get_job(
)
request = jobs_pb2.GetJobRequest(
- project_id=project_id, region=region, job_id=job_id
+ project_id=project_id, region=region, job_id=job_id,
)
return self._inner_api_calls["get_job"](
request, retry=retry, timeout=timeout, metadata=metadata
@@ -634,7 +735,7 @@ def cancel_job(
)
request = jobs_pb2.CancelJobRequest(
- project_id=project_id, region=region, job_id=job_id
+ project_id=project_id, region=region, job_id=job_id,
)
return self._inner_api_calls["cancel_job"](
request, retry=retry, timeout=timeout, metadata=metadata
@@ -702,107 +803,8 @@ def delete_job(
)
request = jobs_pb2.DeleteJobRequest(
- project_id=project_id, region=region, job_id=job_id
+ project_id=project_id, region=region, job_id=job_id,
)
self._inner_api_calls["delete_job"](
request, retry=retry, timeout=timeout, metadata=metadata
)
-
- def submit_job_as_operation(
- self,
- project_id,
- region,
- job,
- request_id=None,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Submits job to a cluster.
-
- Example:
- >>> from google.cloud import dataproc_v1beta2
- >>>
- >>> client = dataproc_v1beta2.JobControllerClient()
- >>>
- >>> # TODO: Initialize `project_id`:
- >>> project_id = ''
- >>>
- >>> # TODO: Initialize `region`:
- >>> region = ''
- >>>
- >>> # TODO: Initialize `job`:
- >>> job = {}
- >>>
- >>> response = client.submit_job_as_operation(project_id, region, job)
- >>>
- >>> def callback(operation_future):
- ... # Handle result.
- ... result = operation_future.result()
- >>>
- >>> response.add_done_callback(callback)
- >>>
- >>> # Handle metadata.
- >>> metadata = response.metadata()
-
- Args:
- project_id (str): Required. The ID of the Google Cloud Platform project that the job
- belongs to.
- region (str): Required. The Dataproc region in which to handle the request.
- job (Union[dict, ~google.cloud.dataproc_v1beta2.types.Job]): Required. The job resource.
-
- If a dict is provided, it must be of the same form as the protobuf
- message :class:`~google.cloud.dataproc_v1beta2.types.Job`
- request_id (str): Optional. A unique id used to identify the request. If the server
- receives two ``SubmitJobRequest`` requests with the same id, then the
- second request will be ignored and the first ``Job`` created and stored
- in the backend is returned.
-
- It is recommended to always set this value to a
- `UUID `__.
-
- The id must contain only letters (a-z, A-Z), numbers (0-9), underscores
- (_), and hyphens (-). The maximum length is 40 characters.
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Returns:
- A :class:`~google.cloud.dataproc_v1beta2.types._OperationFuture` instance.
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "submit_job_as_operation" not in self._inner_api_calls:
- self._inner_api_calls[
- "submit_job_as_operation"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.submit_job_as_operation,
- default_retry=self._method_configs["SubmitJobAsOperation"].retry,
- default_timeout=self._method_configs["SubmitJobAsOperation"].timeout,
- client_info=self._client_info,
- )
-
- request = jobs_pb2.SubmitJobRequest(
- project_id=project_id, region=region, job=job, request_id=request_id
- )
- operation = self._inner_api_calls["submit_job_as_operation"](
- request, retry=retry, timeout=timeout, metadata=metadata
- )
- return google.api_core.operation.from_gapic(
- operation,
- self.transport._operations_client,
- jobs_pb2.Job,
- metadata_type=jobs_pb2.JobMetadata,
- )
diff --git a/google/cloud/dataproc_v1beta2/gapic/job_controller_client_config.py b/google/cloud/dataproc_v1beta2/gapic/job_controller_client_config.py
index d9e29c97..75561150 100644
--- a/google/cloud/dataproc_v1beta2/gapic/job_controller_client_config.py
+++ b/google/cloud/dataproc_v1beta2/gapic/job_controller_client_config.py
@@ -2,56 +2,146 @@
"interfaces": {
"google.cloud.dataproc.v1beta2.JobController": {
"retry_codes": {
- "idempotent": ["DEADLINE_EXCEEDED", "INTERNAL", "UNAVAILABLE"],
- "non_idempotent2": [],
- "non_idempotent": ["UNAVAILABLE"],
+ "retry_policy_1_codes": ["DEADLINE_EXCEEDED", "UNAVAILABLE"],
+ "retry_policy_4_codes": ["UNAVAILABLE"],
+ "retry_policy_6_codes": ["UNAVAILABLE"],
+ "no_retry_codes": [],
+ "retry_policy_3_codes": [
+ "INTERNAL",
+ "DEADLINE_EXCEEDED",
+ "UNAVAILABLE",
+ ],
+ "retry_policy_2_codes": ["UNAVAILABLE"],
+ "no_retry_1_codes": [],
+ "retry_policy_5_codes": [
+ "DEADLINE_EXCEEDED",
+ "INTERNAL",
+ "UNAVAILABLE",
+ ],
+ "retry_policy_7_codes": [
+ "DEADLINE_EXCEEDED",
+ "INTERNAL",
+ "UNAVAILABLE",
+ ],
},
"retry_params": {
- "default": {
+ "retry_policy_1_params": {
"initial_retry_delay_millis": 100,
"retry_delay_multiplier": 1.3,
"max_retry_delay_millis": 60000,
- "initial_rpc_timeout_millis": 20000,
+ "initial_rpc_timeout_millis": 600000,
"rpc_timeout_multiplier": 1.0,
- "max_rpc_timeout_millis": 20000,
+ "max_rpc_timeout_millis": 600000,
"total_timeout_millis": 600000,
- }
+ },
+ "retry_policy_6_params": {
+ "initial_retry_delay_millis": 100,
+ "retry_delay_multiplier": 1.3,
+ "max_retry_delay_millis": 60000,
+ "initial_rpc_timeout_millis": 600000,
+ "rpc_timeout_multiplier": 1.0,
+ "max_rpc_timeout_millis": 600000,
+ "total_timeout_millis": 600000,
+ },
+ "retry_policy_2_params": {
+ "initial_retry_delay_millis": 100,
+ "retry_delay_multiplier": 1.3,
+ "max_retry_delay_millis": 60000,
+ "initial_rpc_timeout_millis": 300000,
+ "rpc_timeout_multiplier": 1.0,
+ "max_rpc_timeout_millis": 300000,
+ "total_timeout_millis": 300000,
+ },
+ "retry_policy_3_params": {
+ "initial_retry_delay_millis": 100,
+ "retry_delay_multiplier": 1.3,
+ "max_retry_delay_millis": 60000,
+ "initial_rpc_timeout_millis": 300000,
+ "rpc_timeout_multiplier": 1.0,
+ "max_rpc_timeout_millis": 300000,
+ "total_timeout_millis": 300000,
+ },
+ "retry_policy_7_params": {
+ "initial_retry_delay_millis": 100,
+ "retry_delay_multiplier": 1.3,
+ "max_retry_delay_millis": 60000,
+ "initial_rpc_timeout_millis": 600000,
+ "rpc_timeout_multiplier": 1.0,
+ "max_rpc_timeout_millis": 600000,
+ "total_timeout_millis": 600000,
+ },
+ "retry_policy_5_params": {
+ "initial_retry_delay_millis": 100,
+ "retry_delay_multiplier": 1.3,
+ "max_retry_delay_millis": 60000,
+ "initial_rpc_timeout_millis": 900000,
+ "rpc_timeout_multiplier": 1.0,
+ "max_rpc_timeout_millis": 900000,
+ "total_timeout_millis": 900000,
+ },
+ "retry_policy_4_params": {
+ "initial_retry_delay_millis": 100,
+ "retry_delay_multiplier": 1.3,
+ "max_retry_delay_millis": 60000,
+ "initial_rpc_timeout_millis": 900000,
+ "rpc_timeout_multiplier": 1.0,
+ "max_rpc_timeout_millis": 900000,
+ "total_timeout_millis": 900000,
+ },
+ "no_retry_params": {
+ "initial_retry_delay_millis": 0,
+ "retry_delay_multiplier": 0.0,
+ "max_retry_delay_millis": 0,
+ "initial_rpc_timeout_millis": 0,
+ "rpc_timeout_multiplier": 1.0,
+ "max_rpc_timeout_millis": 0,
+ "total_timeout_millis": 0,
+ },
+ "no_retry_1_params": {
+ "initial_retry_delay_millis": 0,
+ "retry_delay_multiplier": 0.0,
+ "max_retry_delay_millis": 0,
+ "initial_rpc_timeout_millis": 600000,
+ "rpc_timeout_multiplier": 1.0,
+ "max_rpc_timeout_millis": 600000,
+ "total_timeout_millis": 600000,
+ },
},
"methods": {
"SubmitJob": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "default",
+ "timeout_millis": 900000,
+ "retry_codes_name": "retry_policy_4_codes",
+ "retry_params_name": "retry_policy_4_params",
+ },
+ "SubmitJobAsOperation": {
+ "timeout_millis": 900000,
+ "retry_codes_name": "retry_policy_4_codes",
+ "retry_params_name": "retry_policy_4_params",
},
"GetJob": {
- "timeout_millis": 60000,
- "retry_codes_name": "idempotent",
- "retry_params_name": "default",
+ "timeout_millis": 900000,
+ "retry_codes_name": "retry_policy_5_codes",
+ "retry_params_name": "retry_policy_5_params",
},
"ListJobs": {
- "timeout_millis": 60000,
- "retry_codes_name": "idempotent",
- "retry_params_name": "default",
+ "timeout_millis": 900000,
+ "retry_codes_name": "retry_policy_5_codes",
+ "retry_params_name": "retry_policy_5_params",
},
"UpdateJob": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "default",
+ "timeout_millis": 900000,
+ "retry_codes_name": "retry_policy_4_codes",
+ "retry_params_name": "retry_policy_4_params",
},
"CancelJob": {
- "timeout_millis": 60000,
- "retry_codes_name": "idempotent",
- "retry_params_name": "default",
+ "timeout_millis": 900000,
+ "retry_codes_name": "retry_policy_5_codes",
+ "retry_params_name": "retry_policy_5_params",
},
"DeleteJob": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "default",
- },
- "SubmitJobAsOperation": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent2",
- "retry_params_name": "default",
+ "timeout_millis": 900000,
+ "retry_codes_name": "retry_policy_4_codes",
+ "retry_params_name": "retry_policy_4_params",
},
},
}
diff --git a/google/cloud/dataproc_v1beta2/gapic/transports/autoscaling_policy_service_grpc_transport.py b/google/cloud/dataproc_v1beta2/gapic/transports/autoscaling_policy_service_grpc_transport.py
index a0ecce49..1e41f971 100644
--- a/google/cloud/dataproc_v1beta2/gapic/transports/autoscaling_policy_service_grpc_transport.py
+++ b/google/cloud/dataproc_v1beta2/gapic/transports/autoscaling_policy_service_grpc_transport.py
@@ -53,7 +53,7 @@ def __init__(
# exception (channels come with credentials baked in already).
if channel is not None and credentials is not None:
raise ValueError(
- "The `channel` and `credentials` arguments are mutually " "exclusive."
+ "The `channel` and `credentials` arguments are mutually " "exclusive.",
)
# Create the channel.
@@ -74,7 +74,7 @@ def __init__(
self._stubs = {
"autoscaling_policy_service_stub": autoscaling_policies_pb2_grpc.AutoscalingPolicyServiceStub(
channel
- )
+ ),
}
@classmethod
diff --git a/google/cloud/dataproc_v1beta2/gapic/transports/cluster_controller_grpc_transport.py b/google/cloud/dataproc_v1beta2/gapic/transports/cluster_controller_grpc_transport.py
index 34c3484b..c8bbc15c 100644
--- a/google/cloud/dataproc_v1beta2/gapic/transports/cluster_controller_grpc_transport.py
+++ b/google/cloud/dataproc_v1beta2/gapic/transports/cluster_controller_grpc_transport.py
@@ -54,7 +54,7 @@ def __init__(
# exception (channels come with credentials baked in already).
if channel is not None and credentials is not None:
raise ValueError(
- "The `channel` and `credentials` arguments are mutually " "exclusive."
+ "The `channel` and `credentials` arguments are mutually " "exclusive.",
)
# Create the channel.
@@ -73,7 +73,7 @@ def __init__(
# gRPC uses objects called "stubs" that are bound to the
# channel and provide a basic method for each RPC.
self._stubs = {
- "cluster_controller_stub": clusters_pb2_grpc.ClusterControllerStub(channel)
+ "cluster_controller_stub": clusters_pb2_grpc.ClusterControllerStub(channel),
}
# Because this API includes a method that returns a
@@ -161,44 +161,44 @@ def delete_cluster(self):
return self._stubs["cluster_controller_stub"].DeleteCluster
@property
- def get_cluster(self):
- """Return the gRPC stub for :meth:`ClusterControllerClient.get_cluster`.
+ def diagnose_cluster(self):
+ """Return the gRPC stub for :meth:`ClusterControllerClient.diagnose_cluster`.
- Gets the resource representation for a cluster in a project.
+ Gets cluster diagnostic information. The returned
+ ``Operation.metadata`` will be
+ `ClusterOperationMetadata `__.
+ After the operation completes, ``Operation.response`` contains
+ ``Empty``.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
- return self._stubs["cluster_controller_stub"].GetCluster
+ return self._stubs["cluster_controller_stub"].DiagnoseCluster
@property
- def list_clusters(self):
- """Return the gRPC stub for :meth:`ClusterControllerClient.list_clusters`.
+ def get_cluster(self):
+ """Return the gRPC stub for :meth:`ClusterControllerClient.get_cluster`.
- Lists all regions/{region}/clusters in a project alphabetically.
+ Gets the resource representation for a cluster in a project.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
- return self._stubs["cluster_controller_stub"].ListClusters
+ return self._stubs["cluster_controller_stub"].GetCluster
@property
- def diagnose_cluster(self):
- """Return the gRPC stub for :meth:`ClusterControllerClient.diagnose_cluster`.
+ def list_clusters(self):
+ """Return the gRPC stub for :meth:`ClusterControllerClient.list_clusters`.
- Gets cluster diagnostic information. The returned
- ``Operation.metadata`` will be
- `ClusterOperationMetadata `__.
- After the operation completes, ``Operation.response`` contains
- ``Empty``.
+ Lists all regions/{region}/clusters in a project alphabetically.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
- return self._stubs["cluster_controller_stub"].DiagnoseCluster
+ return self._stubs["cluster_controller_stub"].ListClusters
diff --git a/google/cloud/dataproc_v1beta2/gapic/transports/job_controller_grpc_transport.py b/google/cloud/dataproc_v1beta2/gapic/transports/job_controller_grpc_transport.py
index d8162560..8b941307 100644
--- a/google/cloud/dataproc_v1beta2/gapic/transports/job_controller_grpc_transport.py
+++ b/google/cloud/dataproc_v1beta2/gapic/transports/job_controller_grpc_transport.py
@@ -54,7 +54,7 @@ def __init__(
# exception (channels come with credentials baked in already).
if channel is not None and credentials is not None:
raise ValueError(
- "The `channel` and `credentials` arguments are mutually " "exclusive."
+ "The `channel` and `credentials` arguments are mutually " "exclusive.",
)
# Create the channel.
@@ -72,7 +72,9 @@ def __init__(
# gRPC uses objects called "stubs" that are bound to the
# channel and provide a basic method for each RPC.
- self._stubs = {"job_controller_stub": jobs_pb2_grpc.JobControllerStub(channel)}
+ self._stubs = {
+ "job_controller_stub": jobs_pb2_grpc.JobControllerStub(channel),
+ }
# Because this API includes a method that returns a
# long-running operation (proto: google.longrunning.Operation),
@@ -126,6 +128,19 @@ def submit_job(self):
"""
return self._stubs["job_controller_stub"].SubmitJob
+ @property
+ def submit_job_as_operation(self):
+ """Return the gRPC stub for :meth:`JobControllerClient.submit_job_as_operation`.
+
+ Submits job to a cluster.
+
+ Returns:
+ Callable: A callable which accepts the appropriate
+ deserialized request object and returns a
+ deserialized response object.
+ """
+ return self._stubs["job_controller_stub"].SubmitJobAsOperation
+
@property
def get_job(self):
"""Return the gRPC stub for :meth:`JobControllerClient.get_job`.
@@ -195,16 +210,3 @@ def delete_job(self):
deserialized response object.
"""
return self._stubs["job_controller_stub"].DeleteJob
-
- @property
- def submit_job_as_operation(self):
- """Return the gRPC stub for :meth:`JobControllerClient.submit_job_as_operation`.
-
- Submits job to a cluster.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["job_controller_stub"].SubmitJobAsOperation
diff --git a/google/cloud/dataproc_v1beta2/gapic/transports/workflow_template_service_grpc_transport.py b/google/cloud/dataproc_v1beta2/gapic/transports/workflow_template_service_grpc_transport.py
index 39b8c85e..d2738246 100644
--- a/google/cloud/dataproc_v1beta2/gapic/transports/workflow_template_service_grpc_transport.py
+++ b/google/cloud/dataproc_v1beta2/gapic/transports/workflow_template_service_grpc_transport.py
@@ -54,7 +54,7 @@ def __init__(
# exception (channels come with credentials baked in already).
if channel is not None and credentials is not None:
raise ValueError(
- "The `channel` and `credentials` arguments are mutually " "exclusive."
+ "The `channel` and `credentials` arguments are mutually " "exclusive.",
)
# Create the channel.
@@ -75,7 +75,7 @@ def __init__(
self._stubs = {
"workflow_template_service_stub": workflow_templates_pb2_grpc.WorkflowTemplateServiceStub(
channel
- )
+ ),
}
# Because this API includes a method that returns a
@@ -117,35 +117,6 @@ def channel(self):
"""
return self._channel
- @property
- def create_workflow_template(self):
- """Return the gRPC stub for :meth:`WorkflowTemplateServiceClient.create_workflow_template`.
-
- Creates new workflow template.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["workflow_template_service_stub"].CreateWorkflowTemplate
-
- @property
- def get_workflow_template(self):
- """Return the gRPC stub for :meth:`WorkflowTemplateServiceClient.get_workflow_template`.
-
- Retrieves the latest workflow template.
-
- Can retrieve previously instantiated template by specifying optional
- version parameter.
-
- Returns:
- Callable: A callable which accepts the appropriate
- deserialized request object and returns a
- deserialized response object.
- """
- return self._stubs["workflow_template_service_stub"].GetWorkflowTemplate
-
@property
def instantiate_workflow_template(self):
"""Return the gRPC stub for :meth:`WorkflowTemplateServiceClient.instantiate_workflow_template`.
@@ -208,6 +179,35 @@ def instantiate_inline_workflow_template(self):
"workflow_template_service_stub"
].InstantiateInlineWorkflowTemplate
+ @property
+ def create_workflow_template(self):
+ """Return the gRPC stub for :meth:`WorkflowTemplateServiceClient.create_workflow_template`.
+
+ Creates new workflow template.
+
+ Returns:
+ Callable: A callable which accepts the appropriate
+ deserialized request object and returns a
+ deserialized response object.
+ """
+ return self._stubs["workflow_template_service_stub"].CreateWorkflowTemplate
+
+ @property
+ def get_workflow_template(self):
+ """Return the gRPC stub for :meth:`WorkflowTemplateServiceClient.get_workflow_template`.
+
+ Retrieves the latest workflow template.
+
+ Can retrieve previously instantiated template by specifying optional
+ version parameter.
+
+ Returns:
+ Callable: A callable which accepts the appropriate
+ deserialized request object and returns a
+ deserialized response object.
+ """
+ return self._stubs["workflow_template_service_stub"].GetWorkflowTemplate
+
@property
def update_workflow_template(self):
"""Return the gRPC stub for :meth:`WorkflowTemplateServiceClient.update_workflow_template`.
diff --git a/google/cloud/dataproc_v1beta2/gapic/workflow_template_service_client.py b/google/cloud/dataproc_v1beta2/gapic/workflow_template_service_client.py
index 8c13918b..b77b32e5 100644
--- a/google/cloud/dataproc_v1beta2/gapic/workflow_template_service_client.py
+++ b/google/cloud/dataproc_v1beta2/gapic/workflow_template_service_client.py
@@ -53,7 +53,9 @@
from google.protobuf import field_mask_pb2
-_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-dataproc").version
+_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution(
+ "google-cloud-dataproc",
+).version
class WorkflowTemplateServiceClient(object):
@@ -102,7 +104,7 @@ def location_path(cls, project, location):
def region_path(cls, project, region):
"""Return a fully-qualified region string."""
return google.api_core.path_template.expand(
- "projects/{project}/regions/{region}", project=project, region=region
+ "projects/{project}/regions/{region}", project=project, region=region,
)
@classmethod
@@ -202,12 +204,12 @@ def __init__(
self.transport = transport
else:
self.transport = workflow_template_service_grpc_transport.WorkflowTemplateServiceGrpcTransport(
- address=api_endpoint, channel=channel, credentials=credentials
+ address=api_endpoint, channel=channel, credentials=credentials,
)
if client_info is None:
client_info = google.api_core.gapic_v1.client_info.ClientInfo(
- gapic_version=_GAPIC_LIBRARY_VERSION
+ gapic_version=_GAPIC_LIBRARY_VERSION,
)
else:
client_info.gapic_version = _GAPIC_LIBRARY_VERSION
@@ -218,7 +220,7 @@ def __init__(
# (Ordinarily, these are the defaults specified in the `*_config.py`
# file next to this one.)
self._method_configs = google.api_core.gapic_v1.config.parse_method_configs(
- client_config["interfaces"][self._INTERFACE_NAME]
+ client_config["interfaces"][self._INTERFACE_NAME],
)
# Save a dictionary of cached API call functions.
@@ -228,183 +230,6 @@ def __init__(
self._inner_api_calls = {}
# Service calls
- def create_workflow_template(
- self,
- parent,
- template,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Creates new workflow template.
-
- Example:
- >>> from google.cloud import dataproc_v1beta2
- >>>
- >>> client = dataproc_v1beta2.WorkflowTemplateServiceClient()
- >>>
- >>> parent = client.region_path('[PROJECT]', '[REGION]')
- >>>
- >>> # TODO: Initialize `template`:
- >>> template = {}
- >>>
- >>> response = client.create_workflow_template(parent, template)
-
- Args:
- parent (str): Required. The resource name of the region or location, as described
- in https://cloud.google.com/apis/design/resource_names.
-
- - For ``projects.regions.workflowTemplates,create``, the resource name
- of the region has the following format:
- ``projects/{project_id}/regions/{region}``
-
- - For ``projects.locations.workflowTemplates.create``, the resource
- name of the location has the following format:
- ``projects/{project_id}/locations/{location}``
- template (Union[dict, ~google.cloud.dataproc_v1beta2.types.WorkflowTemplate]): Required. The Dataproc workflow template to create.
-
- If a dict is provided, it must be of the same form as the protobuf
- message :class:`~google.cloud.dataproc_v1beta2.types.WorkflowTemplate`
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Returns:
- A :class:`~google.cloud.dataproc_v1beta2.types.WorkflowTemplate` instance.
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "create_workflow_template" not in self._inner_api_calls:
- self._inner_api_calls[
- "create_workflow_template"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.create_workflow_template,
- default_retry=self._method_configs["CreateWorkflowTemplate"].retry,
- default_timeout=self._method_configs["CreateWorkflowTemplate"].timeout,
- client_info=self._client_info,
- )
-
- request = workflow_templates_pb2.CreateWorkflowTemplateRequest(
- parent=parent, template=template
- )
- if metadata is None:
- metadata = []
- metadata = list(metadata)
- try:
- routing_header = [("parent", parent)]
- except AttributeError:
- pass
- else:
- routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
- routing_header
- )
- metadata.append(routing_metadata)
-
- return self._inner_api_calls["create_workflow_template"](
- request, retry=retry, timeout=timeout, metadata=metadata
- )
-
- def get_workflow_template(
- self,
- name,
- version=None,
- retry=google.api_core.gapic_v1.method.DEFAULT,
- timeout=google.api_core.gapic_v1.method.DEFAULT,
- metadata=None,
- ):
- """
- Retrieves the latest workflow template.
-
- Can retrieve previously instantiated template by specifying optional
- version parameter.
-
- Example:
- >>> from google.cloud import dataproc_v1beta2
- >>>
- >>> client = dataproc_v1beta2.WorkflowTemplateServiceClient()
- >>>
- >>> # TODO: Initialize `name`:
- >>> name = ''
- >>>
- >>> response = client.get_workflow_template(name)
-
- Args:
- name (str): Required. The resource name of the workflow template, as described
- in https://cloud.google.com/apis/design/resource_names.
-
- - For ``projects.regions.workflowTemplates.get``, the resource name of
- the template has the following format:
- ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}``
-
- - For ``projects.locations.workflowTemplates.get``, the resource name
- of the template has the following format:
- ``projects/{project_id}/locations/{location}/workflowTemplates/{template_id}``
- version (int): Optional. The version of workflow template to retrieve. Only previously
- instantiated versions can be retrieved.
-
- If unspecified, retrieves the current version.
- retry (Optional[google.api_core.retry.Retry]): A retry object used
- to retry requests. If ``None`` is specified, requests will
- be retried using a default configuration.
- timeout (Optional[float]): The amount of time, in seconds, to wait
- for the request to complete. Note that if ``retry`` is
- specified, the timeout applies to each individual attempt.
- metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
- that is provided to the method.
-
- Returns:
- A :class:`~google.cloud.dataproc_v1beta2.types.WorkflowTemplate` instance.
-
- Raises:
- google.api_core.exceptions.GoogleAPICallError: If the request
- failed for any reason.
- google.api_core.exceptions.RetryError: If the request failed due
- to a retryable error and retry attempts failed.
- ValueError: If the parameters are invalid.
- """
- # Wrap the transport method to add retry and timeout logic.
- if "get_workflow_template" not in self._inner_api_calls:
- self._inner_api_calls[
- "get_workflow_template"
- ] = google.api_core.gapic_v1.method.wrap_method(
- self.transport.get_workflow_template,
- default_retry=self._method_configs["GetWorkflowTemplate"].retry,
- default_timeout=self._method_configs["GetWorkflowTemplate"].timeout,
- client_info=self._client_info,
- )
-
- request = workflow_templates_pb2.GetWorkflowTemplateRequest(
- name=name, version=version
- )
- if metadata is None:
- metadata = []
- metadata = list(metadata)
- try:
- routing_header = [("name", name)]
- except AttributeError:
- pass
- else:
- routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
- routing_header
- )
- metadata.append(routing_metadata)
-
- return self._inner_api_calls["get_workflow_template"](
- request, retry=retry, timeout=timeout, metadata=metadata
- )
-
def instantiate_workflow_template(
self,
name,
@@ -685,6 +510,183 @@ def instantiate_inline_workflow_template(
metadata_type=workflow_templates_pb2.WorkflowMetadata,
)
+ def create_workflow_template(
+ self,
+ parent,
+ template,
+ retry=google.api_core.gapic_v1.method.DEFAULT,
+ timeout=google.api_core.gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+ """
+ Creates new workflow template.
+
+ Example:
+ >>> from google.cloud import dataproc_v1beta2
+ >>>
+ >>> client = dataproc_v1beta2.WorkflowTemplateServiceClient()
+ >>>
+ >>> parent = client.region_path('[PROJECT]', '[REGION]')
+ >>>
+ >>> # TODO: Initialize `template`:
+ >>> template = {}
+ >>>
+ >>> response = client.create_workflow_template(parent, template)
+
+ Args:
+ parent (str): Required. The resource name of the region or location, as described
+ in https://cloud.google.com/apis/design/resource_names.
+
+ - For ``projects.regions.workflowTemplates,create``, the resource name
+ of the region has the following format:
+ ``projects/{project_id}/regions/{region}``
+
+ - For ``projects.locations.workflowTemplates.create``, the resource
+ name of the location has the following format:
+ ``projects/{project_id}/locations/{location}``
+ template (Union[dict, ~google.cloud.dataproc_v1beta2.types.WorkflowTemplate]): Required. The Dataproc workflow template to create.
+
+ If a dict is provided, it must be of the same form as the protobuf
+ message :class:`~google.cloud.dataproc_v1beta2.types.WorkflowTemplate`
+ retry (Optional[google.api_core.retry.Retry]): A retry object used
+ to retry requests. If ``None`` is specified, requests will
+ be retried using a default configuration.
+ timeout (Optional[float]): The amount of time, in seconds, to wait
+ for the request to complete. Note that if ``retry`` is
+ specified, the timeout applies to each individual attempt.
+ metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
+ that is provided to the method.
+
+ Returns:
+ A :class:`~google.cloud.dataproc_v1beta2.types.WorkflowTemplate` instance.
+
+ Raises:
+ google.api_core.exceptions.GoogleAPICallError: If the request
+ failed for any reason.
+ google.api_core.exceptions.RetryError: If the request failed due
+ to a retryable error and retry attempts failed.
+ ValueError: If the parameters are invalid.
+ """
+ # Wrap the transport method to add retry and timeout logic.
+ if "create_workflow_template" not in self._inner_api_calls:
+ self._inner_api_calls[
+ "create_workflow_template"
+ ] = google.api_core.gapic_v1.method.wrap_method(
+ self.transport.create_workflow_template,
+ default_retry=self._method_configs["CreateWorkflowTemplate"].retry,
+ default_timeout=self._method_configs["CreateWorkflowTemplate"].timeout,
+ client_info=self._client_info,
+ )
+
+ request = workflow_templates_pb2.CreateWorkflowTemplateRequest(
+ parent=parent, template=template,
+ )
+ if metadata is None:
+ metadata = []
+ metadata = list(metadata)
+ try:
+ routing_header = [("parent", parent)]
+ except AttributeError:
+ pass
+ else:
+ routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
+ routing_header
+ )
+ metadata.append(routing_metadata)
+
+ return self._inner_api_calls["create_workflow_template"](
+ request, retry=retry, timeout=timeout, metadata=metadata
+ )
+
+ def get_workflow_template(
+ self,
+ name,
+ version=None,
+ retry=google.api_core.gapic_v1.method.DEFAULT,
+ timeout=google.api_core.gapic_v1.method.DEFAULT,
+ metadata=None,
+ ):
+ """
+ Retrieves the latest workflow template.
+
+ Can retrieve previously instantiated template by specifying optional
+ version parameter.
+
+ Example:
+ >>> from google.cloud import dataproc_v1beta2
+ >>>
+ >>> client = dataproc_v1beta2.WorkflowTemplateServiceClient()
+ >>>
+ >>> # TODO: Initialize `name`:
+ >>> name = ''
+ >>>
+ >>> response = client.get_workflow_template(name)
+
+ Args:
+ name (str): Required. The resource name of the workflow template, as described
+ in https://cloud.google.com/apis/design/resource_names.
+
+ - For ``projects.regions.workflowTemplates.get``, the resource name of
+ the template has the following format:
+ ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}``
+
+ - For ``projects.locations.workflowTemplates.get``, the resource name
+ of the template has the following format:
+ ``projects/{project_id}/locations/{location}/workflowTemplates/{template_id}``
+ version (int): Optional. The version of workflow template to retrieve. Only previously
+ instantiated versions can be retrieved.
+
+ If unspecified, retrieves the current version.
+ retry (Optional[google.api_core.retry.Retry]): A retry object used
+ to retry requests. If ``None`` is specified, requests will
+ be retried using a default configuration.
+ timeout (Optional[float]): The amount of time, in seconds, to wait
+ for the request to complete. Note that if ``retry`` is
+ specified, the timeout applies to each individual attempt.
+ metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
+ that is provided to the method.
+
+ Returns:
+ A :class:`~google.cloud.dataproc_v1beta2.types.WorkflowTemplate` instance.
+
+ Raises:
+ google.api_core.exceptions.GoogleAPICallError: If the request
+ failed for any reason.
+ google.api_core.exceptions.RetryError: If the request failed due
+ to a retryable error and retry attempts failed.
+ ValueError: If the parameters are invalid.
+ """
+ # Wrap the transport method to add retry and timeout logic.
+ if "get_workflow_template" not in self._inner_api_calls:
+ self._inner_api_calls[
+ "get_workflow_template"
+ ] = google.api_core.gapic_v1.method.wrap_method(
+ self.transport.get_workflow_template,
+ default_retry=self._method_configs["GetWorkflowTemplate"].retry,
+ default_timeout=self._method_configs["GetWorkflowTemplate"].timeout,
+ client_info=self._client_info,
+ )
+
+ request = workflow_templates_pb2.GetWorkflowTemplateRequest(
+ name=name, version=version,
+ )
+ if metadata is None:
+ metadata = []
+ metadata = list(metadata)
+ try:
+ routing_header = [("name", name)]
+ except AttributeError:
+ pass
+ else:
+ routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
+ routing_header
+ )
+ metadata.append(routing_metadata)
+
+ return self._inner_api_calls["get_workflow_template"](
+ request, retry=retry, timeout=timeout, metadata=metadata
+ )
+
def update_workflow_template(
self,
template,
@@ -744,7 +746,7 @@ def update_workflow_template(
)
request = workflow_templates_pb2.UpdateWorkflowTemplateRequest(
- template=template
+ template=template,
)
if metadata is None:
metadata = []
@@ -845,7 +847,7 @@ def list_workflow_templates(
)
request = workflow_templates_pb2.ListWorkflowTemplatesRequest(
- parent=parent, page_size=page_size
+ parent=parent, page_size=page_size,
)
if metadata is None:
metadata = []
@@ -938,7 +940,7 @@ def delete_workflow_template(
)
request = workflow_templates_pb2.DeleteWorkflowTemplateRequest(
- name=name, version=version
+ name=name, version=version,
)
if metadata is None:
metadata = []
diff --git a/google/cloud/dataproc_v1beta2/gapic/workflow_template_service_client_config.py b/google/cloud/dataproc_v1beta2/gapic/workflow_template_service_client_config.py
index e9e1031a..b086ceb1 100644
--- a/google/cloud/dataproc_v1beta2/gapic/workflow_template_service_client_config.py
+++ b/google/cloud/dataproc_v1beta2/gapic/workflow_template_service_client_config.py
@@ -2,55 +2,78 @@
"interfaces": {
"google.cloud.dataproc.v1beta2.WorkflowTemplateService": {
"retry_codes": {
- "idempotent": ["DEADLINE_EXCEEDED", "INTERNAL", "UNAVAILABLE"],
- "non_idempotent": ["UNAVAILABLE"],
+ "retry_policy_6_codes": ["UNAVAILABLE"],
+ "no_retry_codes": [],
+ "retry_policy_7_codes": [
+ "DEADLINE_EXCEEDED",
+ "INTERNAL",
+ "UNAVAILABLE",
+ ],
},
"retry_params": {
- "default": {
+ "retry_policy_6_params": {
"initial_retry_delay_millis": 100,
"retry_delay_multiplier": 1.3,
"max_retry_delay_millis": 60000,
- "initial_rpc_timeout_millis": 20000,
+ "initial_rpc_timeout_millis": 600000,
"rpc_timeout_multiplier": 1.0,
- "max_rpc_timeout_millis": 20000,
+ "max_rpc_timeout_millis": 600000,
"total_timeout_millis": 600000,
- }
- },
- "methods": {
- "CreateWorkflowTemplate": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "default",
},
- "GetWorkflowTemplate": {
- "timeout_millis": 60000,
- "retry_codes_name": "idempotent",
- "retry_params_name": "default",
+ "retry_policy_7_params": {
+ "initial_retry_delay_millis": 100,
+ "retry_delay_multiplier": 1.3,
+ "max_retry_delay_millis": 60000,
+ "initial_rpc_timeout_millis": 600000,
+ "rpc_timeout_multiplier": 1.0,
+ "max_rpc_timeout_millis": 600000,
+ "total_timeout_millis": 600000,
},
+ "no_retry_params": {
+ "initial_retry_delay_millis": 0,
+ "retry_delay_multiplier": 0.0,
+ "max_retry_delay_millis": 0,
+ "initial_rpc_timeout_millis": 0,
+ "rpc_timeout_multiplier": 1.0,
+ "max_rpc_timeout_millis": 0,
+ "total_timeout_millis": 0,
+ },
+ },
+ "methods": {
"InstantiateWorkflowTemplate": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "default",
+ "timeout_millis": 600000,
+ "retry_codes_name": "retry_policy_6_codes",
+ "retry_params_name": "retry_policy_6_params",
},
"InstantiateInlineWorkflowTemplate": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "default",
+ "timeout_millis": 600000,
+ "retry_codes_name": "retry_policy_6_codes",
+ "retry_params_name": "retry_policy_6_params",
+ },
+ "CreateWorkflowTemplate": {
+ "timeout_millis": 600000,
+ "retry_codes_name": "retry_policy_6_codes",
+ "retry_params_name": "retry_policy_6_params",
+ },
+ "GetWorkflowTemplate": {
+ "timeout_millis": 600000,
+ "retry_codes_name": "retry_policy_7_codes",
+ "retry_params_name": "retry_policy_7_params",
},
"UpdateWorkflowTemplate": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "default",
+ "timeout_millis": 600000,
+ "retry_codes_name": "retry_policy_6_codes",
+ "retry_params_name": "retry_policy_6_params",
},
"ListWorkflowTemplates": {
- "timeout_millis": 60000,
- "retry_codes_name": "idempotent",
- "retry_params_name": "default",
+ "timeout_millis": 600000,
+ "retry_codes_name": "retry_policy_7_codes",
+ "retry_params_name": "retry_policy_7_params",
},
"DeleteWorkflowTemplate": {
- "timeout_millis": 60000,
- "retry_codes_name": "non_idempotent",
- "retry_params_name": "default",
+ "timeout_millis": 600000,
+ "retry_codes_name": "retry_policy_6_codes",
+ "retry_params_name": "retry_policy_6_params",
},
},
}
diff --git a/google/cloud/dataproc_v1beta2/proto/autoscaling_policies_pb2.py b/google/cloud/dataproc_v1beta2/proto/autoscaling_policies_pb2.py
index 6efb3bae..ac8b00ac 100644
--- a/google/cloud/dataproc_v1beta2/proto/autoscaling_policies_pb2.py
+++ b/google/cloud/dataproc_v1beta2/proto/autoscaling_policies_pb2.py
@@ -157,7 +157,7 @@
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[],
- )
+ ),
],
serialized_start=274,
serialized_end=843,
@@ -506,7 +506,7 @@
serialized_options=b"\340A\002\372A+\n)dataproc.googleapis.com/AutoscalingPolicy",
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
- )
+ ),
],
extensions=[],
nested_types=[],
@@ -547,7 +547,7 @@
serialized_options=b"\340A\002",
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
- )
+ ),
],
extensions=[],
nested_types=[],
@@ -588,7 +588,7 @@
serialized_options=b"\340A\002\372A+\n)dataproc.googleapis.com/AutoscalingPolicy",
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
- )
+ ),
],
extensions=[],
nested_types=[],
@@ -811,6 +811,7 @@
"DESCRIPTOR": _AUTOSCALINGPOLICY,
"__module__": "google.cloud.dataproc_v1beta2.proto.autoscaling_policies_pb2",
"__doc__": """Describes an autoscaling policy for Dataproc cluster autoscaler.
+
Attributes:
id:
Required. The policy id. The id must contain only letters
@@ -848,6 +849,7 @@
"DESCRIPTOR": _BASICAUTOSCALINGALGORITHM,
"__module__": "google.cloud.dataproc_v1beta2.proto.autoscaling_policies_pb2",
"__doc__": """Basic algorithm for autoscaling.
+
Attributes:
yarn_config:
Required. YARN autoscaling configuration.
@@ -868,6 +870,7 @@
"DESCRIPTOR": _BASICYARNAUTOSCALINGCONFIG,
"__module__": "google.cloud.dataproc_v1beta2.proto.autoscaling_policies_pb2",
"__doc__": """Basic autoscaling configurations for YARN.
+
Attributes:
graceful_decommission_timeout:
Required. Timeout for YARN graceful decommissioning of Node
@@ -919,6 +922,7 @@
"__module__": "google.cloud.dataproc_v1beta2.proto.autoscaling_policies_pb2",
"__doc__": """Configuration for the size bounds of an instance group, including its
proportional size to other groups.
+
Attributes:
min_instances:
Optional. Minimum number of instances for this group. Primary
@@ -963,6 +967,7 @@
"DESCRIPTOR": _CREATEAUTOSCALINGPOLICYREQUEST,
"__module__": "google.cloud.dataproc_v1beta2.proto.autoscaling_policies_pb2",
"__doc__": """A request to create an autoscaling policy.
+
Attributes:
parent:
Required. The “resource name” of the region or location, as
@@ -989,6 +994,7 @@
"DESCRIPTOR": _GETAUTOSCALINGPOLICYREQUEST,
"__module__": "google.cloud.dataproc_v1beta2.proto.autoscaling_policies_pb2",
"__doc__": """A request to fetch an autoscaling policy.
+
Attributes:
name:
Required. The “resource name” of the autoscaling policy, as
@@ -1014,6 +1020,7 @@
"DESCRIPTOR": _UPDATEAUTOSCALINGPOLICYREQUEST,
"__module__": "google.cloud.dataproc_v1beta2.proto.autoscaling_policies_pb2",
"__doc__": """A request to update an autoscaling policy.
+
Attributes:
policy:
Required. The updated autoscaling policy.
@@ -1031,6 +1038,7 @@
"__module__": "google.cloud.dataproc_v1beta2.proto.autoscaling_policies_pb2",
"__doc__": """A request to delete an autoscaling policy. Autoscaling policies in
use by one or more clusters will not be deleted.
+
Attributes:
name:
Required. The “resource name” of the autoscaling policy, as
@@ -1056,6 +1064,7 @@
"DESCRIPTOR": _LISTAUTOSCALINGPOLICIESREQUEST,
"__module__": "google.cloud.dataproc_v1beta2.proto.autoscaling_policies_pb2",
"__doc__": """A request to list autoscaling policies in a project.
+
Attributes:
parent:
Required. The “resource name” of the region or location, as
@@ -1086,6 +1095,7 @@
"DESCRIPTOR": _LISTAUTOSCALINGPOLICIESRESPONSE,
"__module__": "google.cloud.dataproc_v1beta2.proto.autoscaling_policies_pb2",
"__doc__": """A response to a request to list autoscaling policies in a project.
+
Attributes:
policies:
Output only. Autoscaling policies list.
diff --git a/google/cloud/dataproc_v1beta2/proto/autoscaling_policies_pb2_grpc.py b/google/cloud/dataproc_v1beta2/proto/autoscaling_policies_pb2_grpc.py
index 0163633a..44c55e58 100644
--- a/google/cloud/dataproc_v1beta2/proto/autoscaling_policies_pb2_grpc.py
+++ b/google/cloud/dataproc_v1beta2/proto/autoscaling_policies_pb2_grpc.py
@@ -1,4 +1,5 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from google.cloud.dataproc_v1beta2.proto import (
@@ -9,15 +10,15 @@
class AutoscalingPolicyServiceStub(object):
"""The API interface for managing autoscaling policies in the
- Cloud Dataproc API.
- """
+ Cloud Dataproc API.
+ """
def __init__(self, channel):
"""Constructor.
- Args:
- channel: A grpc.Channel.
- """
+ Args:
+ channel: A grpc.Channel.
+ """
self.CreateAutoscalingPolicy = channel.unary_unary(
"/google.cloud.dataproc.v1beta2.AutoscalingPolicyService/CreateAutoscalingPolicy",
request_serializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_autoscaling__policies__pb2.CreateAutoscalingPolicyRequest.SerializeToString,
@@ -47,12 +48,12 @@ def __init__(self, channel):
class AutoscalingPolicyServiceServicer(object):
"""The API interface for managing autoscaling policies in the
- Cloud Dataproc API.
- """
+ Cloud Dataproc API.
+ """
def CreateAutoscalingPolicy(self, request, context):
"""Creates new autoscaling policy.
- """
+ """
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
@@ -60,31 +61,31 @@ def CreateAutoscalingPolicy(self, request, context):
def UpdateAutoscalingPolicy(self, request, context):
"""Updates (replaces) autoscaling policy.
- Disabled check for update_mask, because all updates will be full
- replacements.
- """
+ Disabled check for update_mask, because all updates will be full
+ replacements.
+ """
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def GetAutoscalingPolicy(self, request, context):
"""Retrieves autoscaling policy.
- """
+ """
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def ListAutoscalingPolicies(self, request, context):
"""Lists autoscaling policies in the project.
- """
+ """
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def DeleteAutoscalingPolicy(self, request, context):
"""Deletes an autoscaling policy. It is an error to delete an autoscaling
- policy that is in use by one or more clusters.
- """
+ policy that is in use by one or more clusters.
+ """
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
@@ -122,3 +123,145 @@ def add_AutoscalingPolicyServiceServicer_to_server(servicer, server):
"google.cloud.dataproc.v1beta2.AutoscalingPolicyService", rpc_method_handlers
)
server.add_generic_rpc_handlers((generic_handler,))
+
+
+# This class is part of an EXPERIMENTAL API.
+class AutoscalingPolicyService(object):
+ """The API interface for managing autoscaling policies in the
+ Cloud Dataproc API.
+ """
+
+ @staticmethod
+ def CreateAutoscalingPolicy(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.cloud.dataproc.v1beta2.AutoscalingPolicyService/CreateAutoscalingPolicy",
+ google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_autoscaling__policies__pb2.CreateAutoscalingPolicyRequest.SerializeToString,
+ google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_autoscaling__policies__pb2.AutoscalingPolicy.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def UpdateAutoscalingPolicy(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.cloud.dataproc.v1beta2.AutoscalingPolicyService/UpdateAutoscalingPolicy",
+ google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_autoscaling__policies__pb2.UpdateAutoscalingPolicyRequest.SerializeToString,
+ google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_autoscaling__policies__pb2.AutoscalingPolicy.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def GetAutoscalingPolicy(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.cloud.dataproc.v1beta2.AutoscalingPolicyService/GetAutoscalingPolicy",
+ google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_autoscaling__policies__pb2.GetAutoscalingPolicyRequest.SerializeToString,
+ google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_autoscaling__policies__pb2.AutoscalingPolicy.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def ListAutoscalingPolicies(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.cloud.dataproc.v1beta2.AutoscalingPolicyService/ListAutoscalingPolicies",
+ google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_autoscaling__policies__pb2.ListAutoscalingPoliciesRequest.SerializeToString,
+ google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_autoscaling__policies__pb2.ListAutoscalingPoliciesResponse.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def DeleteAutoscalingPolicy(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.cloud.dataproc.v1beta2.AutoscalingPolicyService/DeleteAutoscalingPolicy",
+ google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_autoscaling__policies__pb2.DeleteAutoscalingPolicyRequest.SerializeToString,
+ google_dot_protobuf_dot_empty__pb2.Empty.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
diff --git a/google/cloud/dataproc_v1beta2/proto/clusters_pb2.py b/google/cloud/dataproc_v1beta2/proto/clusters_pb2.py
index 88cecb06..7e0a1364 100644
--- a/google/cloud/dataproc_v1beta2/proto/clusters_pb2.py
+++ b/google/cloud/dataproc_v1beta2/proto/clusters_pb2.py
@@ -444,7 +444,7 @@
),
],
extensions=[],
- nested_types=[_CLUSTER_LABELSENTRY],
+ nested_types=[_CLUSTER_LABELSENTRY,],
enum_types=[],
serialized_options=None,
is_extendable=False,
@@ -810,10 +810,10 @@
serialized_options=b"\340A\001",
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
- )
+ ),
],
extensions=[],
- nested_types=[_GKECLUSTERCONFIG_NAMESPACEDGKEDEPLOYMENTTARGET],
+ nested_types=[_GKECLUSTERCONFIG_NAMESPACEDGKEDEPLOYMENTTARGET,],
enum_types=[],
serialized_options=None,
is_extendable=False,
@@ -932,7 +932,7 @@
),
],
extensions=[],
- nested_types=[_ENDPOINTCONFIG_HTTPPORTSENTRY],
+ nested_types=[_ENDPOINTCONFIG_HTTPPORTSENTRY,],
enum_types=[],
serialized_options=None,
is_extendable=False,
@@ -970,7 +970,7 @@
serialized_options=b"\340A\001",
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
- )
+ ),
],
extensions=[],
nested_types=[],
@@ -1011,7 +1011,7 @@
serialized_options=b"\340A\001",
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
- )
+ ),
],
extensions=[],
nested_types=[],
@@ -1266,7 +1266,7 @@
),
],
extensions=[],
- nested_types=[_GCECLUSTERCONFIG_METADATAENTRY],
+ nested_types=[_GCECLUSTERCONFIG_METADATAENTRY,],
enum_types=[],
serialized_options=None,
is_extendable=False,
@@ -1770,7 +1770,7 @@
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[],
- )
+ ),
],
serialized_start=3610,
serialized_end=3869,
@@ -1803,7 +1803,7 @@
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
- )
+ ),
],
extensions=[],
nested_types=[],
@@ -2272,7 +2272,7 @@
],
extensions=[],
nested_types=[],
- enum_types=[_CLUSTERSTATUS_STATE, _CLUSTERSTATUS_SUBSTATE],
+ enum_types=[_CLUSTERSTATUS_STATE, _CLUSTERSTATUS_SUBSTATE,],
serialized_options=None,
is_extendable=False,
syntax="proto3",
@@ -2409,7 +2409,7 @@
),
],
extensions=[],
- nested_types=[_SOFTWARECONFIG_PROPERTIESENTRY],
+ nested_types=[_SOFTWARECONFIG_PROPERTIESENTRY,],
enum_types=[],
serialized_options=None,
is_extendable=False,
@@ -2587,7 +2587,7 @@
),
],
extensions=[],
- nested_types=[_CLUSTERMETRICS_HDFSMETRICSENTRY, _CLUSTERMETRICS_YARNMETRICSENTRY],
+ nested_types=[_CLUSTERMETRICS_HDFSMETRICSENTRY, _CLUSTERMETRICS_YARNMETRICSENTRY,],
enum_types=[],
serialized_options=None,
is_extendable=False,
@@ -3330,7 +3330,7 @@
serialized_options=b"\340A\003",
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
- )
+ ),
],
extensions=[],
nested_types=[],
@@ -3413,7 +3413,7 @@
],
extensions=[],
nested_types=[],
- enum_types=[_RESERVATIONAFFINITY_TYPE],
+ enum_types=[_RESERVATIONAFFINITY_TYPE,],
serialized_options=None,
is_extendable=False,
syntax="proto3",
@@ -3576,6 +3576,7 @@
"__module__": "google.cloud.dataproc_v1beta2.proto.clusters_pb2",
"__doc__": """Describes the identifying information, config, and status of a cluster
of Compute Engine instances.
+
Attributes:
project_id:
Required. The Google Cloud Platform project ID that the
@@ -3620,6 +3621,7 @@
"DESCRIPTOR": _CLUSTERCONFIG,
"__module__": "google.cloud.dataproc_v1beta2.proto.clusters_pb2",
"__doc__": """The cluster config.
+
Attributes:
config_bucket:
Optional. A Cloud Storage bucket used to stage job
@@ -3692,6 +3694,7 @@
"__module__": "google.cloud.dataproc_v1beta2.proto.clusters_pb2",
"__doc__": """A full, namespace-isolated deployment target for an existing GKE
cluster.
+
Attributes:
target_gke_cluster:
Optional. The target GKE cluster to deploy to. Format: ‘projec
@@ -3705,6 +3708,7 @@
"DESCRIPTOR": _GKECLUSTERCONFIG,
"__module__": "google.cloud.dataproc_v1beta2.proto.clusters_pb2",
"__doc__": """The GKE config for this cluster.
+
Attributes:
namespaced_gke_deployment_target:
Optional. A target for the deployment.
@@ -3731,6 +3735,7 @@
"DESCRIPTOR": _ENDPOINTCONFIG,
"__module__": "google.cloud.dataproc_v1beta2.proto.clusters_pb2",
"__doc__": """Endpoint config for this cluster
+
Attributes:
http_ports:
Output only. The map of port descriptions to URLs. Will only
@@ -3752,6 +3757,7 @@
"DESCRIPTOR": _AUTOSCALINGCONFIG,
"__module__": "google.cloud.dataproc_v1beta2.proto.clusters_pb2",
"__doc__": """Autoscaling Policy config associated with the cluster.
+
Attributes:
policy_uri:
Optional. The autoscaling policy used by the cluster. Only
@@ -3774,6 +3780,7 @@
"DESCRIPTOR": _ENCRYPTIONCONFIG,
"__module__": "google.cloud.dataproc_v1beta2.proto.clusters_pb2",
"__doc__": """Encryption settings for the cluster.
+
Attributes:
gce_pd_kms_key_name:
Optional. The Cloud KMS key name to use for PD disk encryption
@@ -3801,6 +3808,7 @@
"__module__": "google.cloud.dataproc_v1beta2.proto.clusters_pb2",
"__doc__": """Common config settings for resources of Compute Engine cluster
instances, applicable to all instances in the cluster.
+
Attributes:
zone_uri:
Optional. The zone where the Compute Engine cluster will be
@@ -3892,6 +3900,7 @@
"__module__": "google.cloud.dataproc_v1beta2.proto.clusters_pb2",
"__doc__": """The config settings for Compute Engine resources in an instance group,
such as a master or worker group.
+
Attributes:
num_instances:
Optional. The number of VM instances in the instance group.
@@ -3955,6 +3964,7 @@
"DESCRIPTOR": _MANAGEDGROUPCONFIG,
"__module__": "google.cloud.dataproc_v1beta2.proto.clusters_pb2",
"__doc__": """Specifies the resources used to actively manage an instance group.
+
Attributes:
instance_template_name:
Output only. The name of the Instance Template used for the
@@ -3977,6 +3987,7 @@
"__doc__": """Specifies the type and number of accelerator cards attached to the
instances of an instance group (see `GPUs on Compute Engine
`__).
+
Attributes:
accelerator_type_uri:
Full URL, partial URI, or short name of the accelerator type
@@ -4009,6 +4020,7 @@
"DESCRIPTOR": _DISKCONFIG,
"__module__": "google.cloud.dataproc_v1beta2.proto.clusters_pb2",
"__doc__": """Specifies the config of disk options for a group of VM instances.
+
Attributes:
boot_disk_type:
Optional. Type of the boot disk (default is “pd-standard”).
@@ -4036,6 +4048,7 @@
"DESCRIPTOR": _LIFECYCLECONFIG,
"__module__": "google.cloud.dataproc_v1beta2.proto.clusters_pb2",
"__doc__": """Specifies the cluster auto-delete schedule configuration.
+
Attributes:
idle_delete_ttl:
Optional. The duration to keep the cluster alive while idling
@@ -4077,6 +4090,7 @@
"DESCRIPTOR": _SECURITYCONFIG,
"__module__": "google.cloud.dataproc_v1beta2.proto.clusters_pb2",
"__doc__": """Security related configuration, including encryption, Kerberos, etc.
+
Attributes:
kerberos_config:
Kerberos related configuration.
@@ -4093,6 +4107,7 @@
"DESCRIPTOR": _KERBEROSCONFIG,
"__module__": "google.cloud.dataproc_v1beta2.proto.clusters_pb2",
"__doc__": """Specifies Kerberos related configuration.
+
Attributes:
enable_kerberos:
Optional. Flag to indicate whether to Kerberize the cluster
@@ -4166,6 +4181,7 @@
"__module__": "google.cloud.dataproc_v1beta2.proto.clusters_pb2",
"__doc__": """Specifies an executable to run on a fully configured node and a
timeout period for executable completion.
+
Attributes:
executable_file:
Required. Cloud Storage URI of executable file.
@@ -4190,6 +4206,7 @@
"DESCRIPTOR": _CLUSTERSTATUS,
"__module__": "google.cloud.dataproc_v1beta2.proto.clusters_pb2",
"__doc__": """The status of a cluster and its instances.
+
Attributes:
state:
Output only. The cluster’s state.
@@ -4225,6 +4242,7 @@
"DESCRIPTOR": _SOFTWARECONFIG,
"__module__": "google.cloud.dataproc_v1beta2.proto.clusters_pb2",
"__doc__": """Specifies the selection and config of software inside the cluster.
+
Attributes:
image_version:
Optional. The version of software inside the cluster. It must
@@ -4284,6 +4302,7 @@
"__doc__": """Contains cluster daemon metrics, such as HDFS and YARN stats. **Beta
Feature**: This report is available for testing purposes only. It may
be changed before final release.
+
Attributes:
hdfs_metrics:
The HDFS metrics.
@@ -4304,6 +4323,7 @@
"DESCRIPTOR": _CREATECLUSTERREQUEST,
"__module__": "google.cloud.dataproc_v1beta2.proto.clusters_pb2",
"__doc__": """A request to create a cluster.
+
Attributes:
project_id:
Required. The ID of the Google Cloud Platform project that the
@@ -4337,6 +4357,7 @@
"DESCRIPTOR": _UPDATECLUSTERREQUEST,
"__module__": "google.cloud.dataproc_v1beta2.proto.clusters_pb2",
"__doc__": """A request to update a cluster.
+
Attributes:
project_id:
Required. The ID of the Google Cloud Platform project the
@@ -4428,6 +4449,7 @@
"DESCRIPTOR": _DELETECLUSTERREQUEST,
"__module__": "google.cloud.dataproc_v1beta2.proto.clusters_pb2",
"__doc__": """A request to delete a cluster.
+
Attributes:
project_id:
Required. The ID of the Google Cloud Platform project that the
@@ -4465,6 +4487,7 @@
"DESCRIPTOR": _GETCLUSTERREQUEST,
"__module__": "google.cloud.dataproc_v1beta2.proto.clusters_pb2",
"__doc__": """Request to get the resource representation for a cluster in a project.
+
Attributes:
project_id:
Required. The ID of the Google Cloud Platform project that the
@@ -4486,6 +4509,7 @@
"DESCRIPTOR": _LISTCLUSTERSREQUEST,
"__module__": "google.cloud.dataproc_v1beta2.proto.clusters_pb2",
"__doc__": """A request to list the clusters in a project.
+
Attributes:
project_id:
Required. The ID of the Google Cloud Platform project that the
@@ -4526,6 +4550,7 @@
"DESCRIPTOR": _LISTCLUSTERSRESPONSE,
"__module__": "google.cloud.dataproc_v1beta2.proto.clusters_pb2",
"__doc__": """The list of all clusters in a project.
+
Attributes:
clusters:
Output only. The clusters in the project.
@@ -4547,6 +4572,7 @@
"DESCRIPTOR": _DIAGNOSECLUSTERREQUEST,
"__module__": "google.cloud.dataproc_v1beta2.proto.clusters_pb2",
"__doc__": """A request to collect cluster diagnostic information.
+
Attributes:
project_id:
Required. The ID of the Google Cloud Platform project that the
@@ -4568,6 +4594,7 @@
"DESCRIPTOR": _DIAGNOSECLUSTERRESULTS,
"__module__": "google.cloud.dataproc_v1beta2.proto.clusters_pb2",
"__doc__": """The location of diagnostic output.
+
Attributes:
output_uri:
Output only. The Cloud Storage URI of the diagnostic output.
@@ -4586,6 +4613,7 @@
"DESCRIPTOR": _RESERVATIONAFFINITY,
"__module__": "google.cloud.dataproc_v1beta2.proto.clusters_pb2",
"__doc__": """Reservation Affinity for consuming Zonal reservation.
+
Attributes:
consume_reservation_type:
Optional. Type of reservation to consume
diff --git a/google/cloud/dataproc_v1beta2/proto/clusters_pb2_grpc.py b/google/cloud/dataproc_v1beta2/proto/clusters_pb2_grpc.py
index 4ad03d3c..e5b15eec 100644
--- a/google/cloud/dataproc_v1beta2/proto/clusters_pb2_grpc.py
+++ b/google/cloud/dataproc_v1beta2/proto/clusters_pb2_grpc.py
@@ -1,4 +1,5 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from google.cloud.dataproc_v1beta2.proto import (
@@ -11,15 +12,15 @@
class ClusterControllerStub(object):
"""The ClusterControllerService provides methods to manage clusters
- of Compute Engine instances.
- """
+ of Compute Engine instances.
+ """
def __init__(self, channel):
"""Constructor.
- Args:
- channel: A grpc.Channel.
- """
+ Args:
+ channel: A grpc.Channel.
+ """
self.CreateCluster = channel.unary_unary(
"/google.cloud.dataproc.v1beta2.ClusterController/CreateCluster",
request_serializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_clusters__pb2.CreateClusterRequest.SerializeToString,
@@ -54,59 +55,59 @@ def __init__(self, channel):
class ClusterControllerServicer(object):
"""The ClusterControllerService provides methods to manage clusters
- of Compute Engine instances.
- """
+ of Compute Engine instances.
+ """
def CreateCluster(self, request, context):
"""Creates a cluster in a project. The returned
- [Operation.metadata][google.longrunning.Operation.metadata] will be
- [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata).
- """
+ [Operation.metadata][google.longrunning.Operation.metadata] will be
+ [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata).
+ """
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def UpdateCluster(self, request, context):
"""Updates a cluster in a project. The returned
- [Operation.metadata][google.longrunning.Operation.metadata] will be
- [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata).
- """
+ [Operation.metadata][google.longrunning.Operation.metadata] will be
+ [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata).
+ """
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def DeleteCluster(self, request, context):
"""Deletes a cluster in a project. The returned
- [Operation.metadata][google.longrunning.Operation.metadata] will be
- [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata).
- """
+ [Operation.metadata][google.longrunning.Operation.metadata] will be
+ [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata).
+ """
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def GetCluster(self, request, context):
"""Gets the resource representation for a cluster in a project.
- """
+ """
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def ListClusters(self, request, context):
"""Lists all regions/{region}/clusters in a project alphabetically.
- """
+ """
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def DiagnoseCluster(self, request, context):
"""Gets cluster diagnostic information. The returned
- [Operation.metadata][google.longrunning.Operation.metadata] will be
- [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata).
- After the operation completes,
- [Operation.response][google.longrunning.Operation.response]
- contains
- [Empty][google.protobuf.Empty].
- """
+ [Operation.metadata][google.longrunning.Operation.metadata] will be
+ [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata).
+ After the operation completes,
+ [Operation.response][google.longrunning.Operation.response]
+ contains
+ [Empty][google.protobuf.Empty].
+ """
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
@@ -149,3 +150,172 @@ def add_ClusterControllerServicer_to_server(servicer, server):
"google.cloud.dataproc.v1beta2.ClusterController", rpc_method_handlers
)
server.add_generic_rpc_handlers((generic_handler,))
+
+
+# This class is part of an EXPERIMENTAL API.
+class ClusterController(object):
+ """The ClusterControllerService provides methods to manage clusters
+ of Compute Engine instances.
+ """
+
+ @staticmethod
+ def CreateCluster(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.cloud.dataproc.v1beta2.ClusterController/CreateCluster",
+ google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_clusters__pb2.CreateClusterRequest.SerializeToString,
+ google_dot_longrunning_dot_operations__pb2.Operation.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def UpdateCluster(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.cloud.dataproc.v1beta2.ClusterController/UpdateCluster",
+ google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_clusters__pb2.UpdateClusterRequest.SerializeToString,
+ google_dot_longrunning_dot_operations__pb2.Operation.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def DeleteCluster(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.cloud.dataproc.v1beta2.ClusterController/DeleteCluster",
+ google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_clusters__pb2.DeleteClusterRequest.SerializeToString,
+ google_dot_longrunning_dot_operations__pb2.Operation.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def GetCluster(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.cloud.dataproc.v1beta2.ClusterController/GetCluster",
+ google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_clusters__pb2.GetClusterRequest.SerializeToString,
+ google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_clusters__pb2.Cluster.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def ListClusters(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.cloud.dataproc.v1beta2.ClusterController/ListClusters",
+ google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_clusters__pb2.ListClustersRequest.SerializeToString,
+ google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_clusters__pb2.ListClustersResponse.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def DiagnoseCluster(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.cloud.dataproc.v1beta2.ClusterController/DiagnoseCluster",
+ google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_clusters__pb2.DiagnoseClusterRequest.SerializeToString,
+ google_dot_longrunning_dot_operations__pb2.Operation.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
diff --git a/google/cloud/dataproc_v1beta2/proto/jobs_pb2.py b/google/cloud/dataproc_v1beta2/proto/jobs_pb2.py
index f7737363..c8affffc 100644
--- a/google/cloud/dataproc_v1beta2/proto/jobs_pb2.py
+++ b/google/cloud/dataproc_v1beta2/proto/jobs_pb2.py
@@ -483,11 +483,11 @@
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
- )
+ ),
],
extensions=[],
- nested_types=[_LOGGINGCONFIG_DRIVERLOGLEVELSENTRY],
- enum_types=[_LOGGINGCONFIG_LEVEL],
+ nested_types=[_LOGGINGCONFIG_DRIVERLOGLEVELSENTRY,],
+ enum_types=[_LOGGINGCONFIG_LEVEL,],
serialized_options=None,
is_extendable=False,
syntax="proto3",
@@ -719,7 +719,7 @@
),
],
extensions=[],
- nested_types=[_HADOOPJOB_PROPERTIESENTRY],
+ nested_types=[_HADOOPJOB_PROPERTIESENTRY,],
enum_types=[],
serialized_options=None,
is_extendable=False,
@@ -733,7 +733,7 @@
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[],
- )
+ ),
],
serialized_start=637,
serialized_end=1016,
@@ -961,7 +961,7 @@
),
],
extensions=[],
- nested_types=[_SPARKJOB_PROPERTIESENTRY],
+ nested_types=[_SPARKJOB_PROPERTIESENTRY,],
enum_types=[],
serialized_options=None,
is_extendable=False,
@@ -975,7 +975,7 @@
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[],
- )
+ ),
],
serialized_start=1019,
serialized_end=1396,
@@ -1203,7 +1203,7 @@
),
],
extensions=[],
- nested_types=[_PYSPARKJOB_PROPERTIESENTRY],
+ nested_types=[_PYSPARKJOB_PROPERTIESENTRY,],
enum_types=[],
serialized_options=None,
is_extendable=False,
@@ -1241,7 +1241,7 @@
serialized_options=b"\340A\002",
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
- )
+ ),
],
extensions=[],
nested_types=[],
@@ -1498,7 +1498,7 @@
),
],
extensions=[],
- nested_types=[_HIVEJOB_SCRIPTVARIABLESENTRY, _HIVEJOB_PROPERTIESENTRY],
+ nested_types=[_HIVEJOB_SCRIPTVARIABLESENTRY, _HIVEJOB_PROPERTIESENTRY,],
enum_types=[],
serialized_options=None,
is_extendable=False,
@@ -1512,7 +1512,7 @@
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[],
- )
+ ),
],
serialized_start=1823,
serialized_end=2275,
@@ -1761,7 +1761,7 @@
),
],
extensions=[],
- nested_types=[_SPARKSQLJOB_SCRIPTVARIABLESENTRY, _SPARKSQLJOB_PROPERTIESENTRY],
+ nested_types=[_SPARKSQLJOB_SCRIPTVARIABLESENTRY, _SPARKSQLJOB_PROPERTIESENTRY,],
enum_types=[],
serialized_options=None,
is_extendable=False,
@@ -1775,7 +1775,7 @@
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[],
- )
+ ),
],
serialized_start=2278,
serialized_end=2783,
@@ -2043,7 +2043,7 @@
),
],
extensions=[],
- nested_types=[_PIGJOB_SCRIPTVARIABLESENTRY, _PIGJOB_PROPERTIESENTRY],
+ nested_types=[_PIGJOB_SCRIPTVARIABLESENTRY, _PIGJOB_PROPERTIESENTRY,],
enum_types=[],
serialized_options=None,
is_extendable=False,
@@ -2057,7 +2057,7 @@
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[],
- )
+ ),
],
serialized_start=2786,
serialized_end=3310,
@@ -2247,7 +2247,7 @@
),
],
extensions=[],
- nested_types=[_SPARKRJOB_PROPERTIESENTRY],
+ nested_types=[_SPARKRJOB_PROPERTIESENTRY,],
enum_types=[],
serialized_options=None,
is_extendable=False,
@@ -2461,7 +2461,7 @@
),
],
extensions=[],
- nested_types=[_PRESTOJOB_PROPERTIESENTRY],
+ nested_types=[_PRESTOJOB_PROPERTIESENTRY,],
enum_types=[],
serialized_options=None,
is_extendable=False,
@@ -2475,7 +2475,7 @@
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[],
- )
+ ),
],
serialized_start=3636,
serialized_end=4045,
@@ -2629,7 +2629,7 @@
],
extensions=[],
nested_types=[],
- enum_types=[_JOBSTATUS_STATE, _JOBSTATUS_SUBSTATE],
+ enum_types=[_JOBSTATUS_STATE, _JOBSTATUS_SUBSTATE,],
serialized_options=None,
is_extendable=False,
syntax="proto3",
@@ -2787,7 +2787,7 @@
],
extensions=[],
nested_types=[],
- enum_types=[_YARNAPPLICATION_STATE],
+ enum_types=[_YARNAPPLICATION_STATE,],
serialized_options=None,
is_extendable=False,
syntax="proto3",
@@ -3247,7 +3247,7 @@
),
],
extensions=[],
- nested_types=[_JOB_LABELSENTRY],
+ nested_types=[_JOB_LABELSENTRY,],
enum_types=[],
serialized_options=None,
is_extendable=False,
@@ -3261,7 +3261,7 @@
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[],
- )
+ ),
],
serialized_start=4964,
serialized_end=6217,
@@ -3294,7 +3294,7 @@
serialized_options=b"\340A\001",
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
- )
+ ),
],
extensions=[],
nested_types=[],
@@ -3728,7 +3728,7 @@
],
extensions=[],
nested_types=[],
- enum_types=[_LISTJOBSREQUEST_JOBSTATEMATCHER],
+ enum_types=[_LISTJOBSREQUEST_JOBSTATEMATCHER,],
serialized_options=None,
is_extendable=False,
syntax="proto3",
@@ -4279,6 +4279,7 @@
"DESCRIPTOR": _LOGGINGCONFIG,
"__module__": "google.cloud.dataproc_v1beta2.proto.jobs_pb2",
"__doc__": """The runtime logging config of the job.
+
Attributes:
driver_log_levels:
The per-package log levels for the driver. This may include
@@ -4311,6 +4312,7 @@
client/hadoop-mapreduce-client-core/MapReduceTutorial.html>`__ jobs on
`Apache Hadoop YARN `__.
+
Attributes:
driver:
Required. Indicates the location of the driver’s main class.
@@ -4379,6 +4381,7 @@
class or the main class name. To pass both a main jar and a main class
in that jar, add the jar to ``CommonJob.jar_file_uris``, and then
specify the main class name in ``main_class``.
+
Attributes:
main_jar_file_uri:
The HCFS URI of the jar file that contains the main class.
@@ -4435,6 +4438,7 @@ class or the main class name. To pass both a main jar and a main class
"__doc__": """A Dataproc job for running `Apache PySpark
`__
applications on YARN.
+
Attributes:
main_python_file_uri:
Required. The HCFS URI of the main Python file to use as the
@@ -4479,6 +4483,7 @@ class or the main class name. To pass both a main jar and a main class
"DESCRIPTOR": _QUERYLIST,
"__module__": "google.cloud.dataproc_v1beta2.proto.jobs_pb2",
"__doc__": """A list of queries to run on a cluster.
+
Attributes:
queries:
Required. The queries to execute. You do not need to terminate
@@ -4520,6 +4525,7 @@ class or the main class name. To pass both a main jar and a main class
"__module__": "google.cloud.dataproc_v1beta2.proto.jobs_pb2",
"__doc__": """A Dataproc job for running `Apache Hive `__
queries on YARN.
+
Attributes:
queries:
Required. The sequence of Hive queries to execute, specified
@@ -4579,6 +4585,7 @@ class or the main class name. To pass both a main jar and a main class
"__module__": "google.cloud.dataproc_v1beta2.proto.jobs_pb2",
"__doc__": """A Dataproc job for running `Apache Spark SQL
`__ queries.
+
Attributes:
queries:
Required. The sequence of Spark SQL queries to execute,
@@ -4633,6 +4640,7 @@ class or the main class name. To pass both a main jar and a main class
"__module__": "google.cloud.dataproc_v1beta2.proto.jobs_pb2",
"__doc__": """A Dataproc job for running `Apache Pig `__
queries on YARN.
+
Attributes:
queries:
Required. The sequence of Pig queries to execute, specified as
@@ -4686,6 +4694,7 @@ class or the main class name. To pass both a main jar and a main class
"__doc__": """A Dataproc job for running `Apache SparkR
`__ applications on
YARN.
+
Attributes:
main_r_file_uri:
Required. The HCFS URI of the main R file to use as the
@@ -4738,6 +4747,7 @@ class or the main class name. To pass both a main jar and a main class
`__
must be enabled when the cluster is created to submit a Presto job to
the cluster.
+
Attributes:
queries:
Required. The sequence of Presto queries to execute, specified
@@ -4776,6 +4786,7 @@ class or the main class name. To pass both a main jar and a main class
"DESCRIPTOR": _JOBPLACEMENT,
"__module__": "google.cloud.dataproc_v1beta2.proto.jobs_pb2",
"__doc__": """Dataproc job config.
+
Attributes:
cluster_name:
Required. The name of the cluster where the job will be
@@ -4796,6 +4807,7 @@ class or the main class name. To pass both a main jar and a main class
"DESCRIPTOR": _JOBSTATUS,
"__module__": "google.cloud.dataproc_v1beta2.proto.jobs_pb2",
"__doc__": """Dataproc job status.
+
Attributes:
state:
Output only. A state message specifying the overall job state.
@@ -4820,6 +4832,7 @@ class or the main class name. To pass both a main jar and a main class
"DESCRIPTOR": _JOBREFERENCE,
"__module__": "google.cloud.dataproc_v1beta2.proto.jobs_pb2",
"__doc__": """Encapsulates the full scoping used to reference a job.
+
Attributes:
project_id:
Required. The ID of the Google Cloud Platform project that the
@@ -4847,6 +4860,7 @@ class or the main class name. To pass both a main jar and a main class
org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.
**Beta Feature**: This report is available for testing purposes only.
It may be changed before final release.
+
Attributes:
name:
Output only. The application name.
@@ -4882,6 +4896,7 @@ class or the main class name. To pass both a main jar and a main class
"DESCRIPTOR": _JOB,
"__module__": "google.cloud.dataproc_v1beta2.proto.jobs_pb2",
"__doc__": """A Dataproc job resource.
+
Attributes:
reference:
Optional. The fully qualified reference to the job, which can
@@ -4965,6 +4980,7 @@ class or the main class name. To pass both a main jar and a main class
"DESCRIPTOR": _JOBSCHEDULING,
"__module__": "google.cloud.dataproc_v1beta2.proto.jobs_pb2",
"__doc__": """Job scheduling options.
+
Attributes:
max_failures_per_hour:
Optional. Maximum number of times per hour a driver may be
@@ -4985,6 +5001,7 @@ class or the main class name. To pass both a main jar and a main class
"DESCRIPTOR": _JOBMETADATA,
"__module__": "google.cloud.dataproc_v1beta2.proto.jobs_pb2",
"__doc__": """Job Operation metadata.
+
Attributes:
job_id:
Output only. The job id.
@@ -5007,6 +5024,7 @@ class or the main class name. To pass both a main jar and a main class
"DESCRIPTOR": _SUBMITJOBREQUEST,
"__module__": "google.cloud.dataproc_v1beta2.proto.jobs_pb2",
"__doc__": """A request to submit a job.
+
Attributes:
project_id:
Required. The ID of the Google Cloud Platform project that the
@@ -5039,6 +5057,7 @@ class or the main class name. To pass both a main jar and a main class
"DESCRIPTOR": _GETJOBREQUEST,
"__module__": "google.cloud.dataproc_v1beta2.proto.jobs_pb2",
"__doc__": """A request to get the resource representation for a job in a project.
+
Attributes:
project_id:
Required. The ID of the Google Cloud Platform project that the
@@ -5060,6 +5079,7 @@ class or the main class name. To pass both a main jar and a main class
"DESCRIPTOR": _LISTJOBSREQUEST,
"__module__": "google.cloud.dataproc_v1beta2.proto.jobs_pb2",
"__doc__": """A request to list jobs in a project.
+
Attributes:
project_id:
Required. The ID of the Google Cloud Platform project that the
@@ -5102,6 +5122,7 @@ class or the main class name. To pass both a main jar and a main class
"DESCRIPTOR": _UPDATEJOBREQUEST,
"__module__": "google.cloud.dataproc_v1beta2.proto.jobs_pb2",
"__doc__": """A request to update a job.
+
Attributes:
project_id:
Required. The ID of the Google Cloud Platform project that the
@@ -5131,6 +5152,7 @@ class or the main class name. To pass both a main jar and a main class
"DESCRIPTOR": _LISTJOBSRESPONSE,
"__module__": "google.cloud.dataproc_v1beta2.proto.jobs_pb2",
"__doc__": """A list of jobs in a project.
+
Attributes:
jobs:
Output only. Jobs list.
@@ -5152,6 +5174,7 @@ class or the main class name. To pass both a main jar and a main class
"DESCRIPTOR": _CANCELJOBREQUEST,
"__module__": "google.cloud.dataproc_v1beta2.proto.jobs_pb2",
"__doc__": """A request to cancel a job.
+
Attributes:
project_id:
Required. The ID of the Google Cloud Platform project that the
@@ -5173,6 +5196,7 @@ class or the main class name. To pass both a main jar and a main class
"DESCRIPTOR": _DELETEJOBREQUEST,
"__module__": "google.cloud.dataproc_v1beta2.proto.jobs_pb2",
"__doc__": """A request to delete a job.
+
Attributes:
project_id:
Required. The ID of the Google Cloud Platform project that the
diff --git a/google/cloud/dataproc_v1beta2/proto/jobs_pb2_grpc.py b/google/cloud/dataproc_v1beta2/proto/jobs_pb2_grpc.py
index 006838a9..f1a19a55 100644
--- a/google/cloud/dataproc_v1beta2/proto/jobs_pb2_grpc.py
+++ b/google/cloud/dataproc_v1beta2/proto/jobs_pb2_grpc.py
@@ -1,4 +1,5 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from google.cloud.dataproc_v1beta2.proto import (
@@ -12,14 +13,14 @@
class JobControllerStub(object):
"""The JobController provides methods to manage jobs.
- """
+ """
def __init__(self, channel):
"""Constructor.
- Args:
- channel: A grpc.Channel.
- """
+ Args:
+ channel: A grpc.Channel.
+ """
self.SubmitJob = channel.unary_unary(
"/google.cloud.dataproc.v1beta2.JobController/SubmitJob",
request_serializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_jobs__pb2.SubmitJobRequest.SerializeToString,
@@ -59,58 +60,58 @@ def __init__(self, channel):
class JobControllerServicer(object):
"""The JobController provides methods to manage jobs.
- """
+ """
def SubmitJob(self, request, context):
"""Submits a job to a cluster.
- """
+ """
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def SubmitJobAsOperation(self, request, context):
"""Submits job to a cluster.
- """
+ """
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def GetJob(self, request, context):
"""Gets the resource representation for a job in a project.
- """
+ """
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def ListJobs(self, request, context):
"""Lists regions/{region}/jobs in a project.
- """
+ """
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def UpdateJob(self, request, context):
"""Updates a job in a project.
- """
+ """
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def CancelJob(self, request, context):
"""Starts a job cancellation request. To access the job resource
- after cancellation, call
- [regions/{region}/jobs.list](https://cloud.google.com/dataproc/docs/reference/rest/v1beta2/projects.regions.jobs/list)
- or
- [regions/{region}/jobs.get](https://cloud.google.com/dataproc/docs/reference/rest/v1beta2/projects.regions.jobs/get).
- """
+ after cancellation, call
+ [regions/{region}/jobs.list](https://cloud.google.com/dataproc/docs/reference/rest/v1beta2/projects.regions.jobs/list)
+ or
+ [regions/{region}/jobs.get](https://cloud.google.com/dataproc/docs/reference/rest/v1beta2/projects.regions.jobs/get).
+ """
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def DeleteJob(self, request, context):
"""Deletes the job from the project. If the job is active, the delete fails,
- and the response returns `FAILED_PRECONDITION`.
- """
+ and the response returns `FAILED_PRECONDITION`.
+ """
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
@@ -158,3 +159,198 @@ def add_JobControllerServicer_to_server(servicer, server):
"google.cloud.dataproc.v1beta2.JobController", rpc_method_handlers
)
server.add_generic_rpc_handlers((generic_handler,))
+
+
+# This class is part of an EXPERIMENTAL API.
+class JobController(object):
+ """The JobController provides methods to manage jobs.
+ """
+
+ @staticmethod
+ def SubmitJob(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.cloud.dataproc.v1beta2.JobController/SubmitJob",
+ google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_jobs__pb2.SubmitJobRequest.SerializeToString,
+ google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_jobs__pb2.Job.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def SubmitJobAsOperation(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.cloud.dataproc.v1beta2.JobController/SubmitJobAsOperation",
+ google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_jobs__pb2.SubmitJobRequest.SerializeToString,
+ google_dot_longrunning_dot_operations__pb2.Operation.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def GetJob(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.cloud.dataproc.v1beta2.JobController/GetJob",
+ google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_jobs__pb2.GetJobRequest.SerializeToString,
+ google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_jobs__pb2.Job.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def ListJobs(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.cloud.dataproc.v1beta2.JobController/ListJobs",
+ google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_jobs__pb2.ListJobsRequest.SerializeToString,
+ google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_jobs__pb2.ListJobsResponse.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def UpdateJob(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.cloud.dataproc.v1beta2.JobController/UpdateJob",
+ google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_jobs__pb2.UpdateJobRequest.SerializeToString,
+ google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_jobs__pb2.Job.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def CancelJob(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.cloud.dataproc.v1beta2.JobController/CancelJob",
+ google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_jobs__pb2.CancelJobRequest.SerializeToString,
+ google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_jobs__pb2.Job.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def DeleteJob(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.cloud.dataproc.v1beta2.JobController/DeleteJob",
+ google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_jobs__pb2.DeleteJobRequest.SerializeToString,
+ google_dot_protobuf_dot_empty__pb2.Empty.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
diff --git a/google/cloud/dataproc_v1beta2/proto/operations_pb2.py b/google/cloud/dataproc_v1beta2/proto/operations_pb2.py
index e1d8212b..a4187389 100644
--- a/google/cloud/dataproc_v1beta2/proto/operations_pb2.py
+++ b/google/cloud/dataproc_v1beta2/proto/operations_pb2.py
@@ -165,7 +165,7 @@
],
extensions=[],
nested_types=[],
- enum_types=[_CLUSTEROPERATIONSTATUS_STATE],
+ enum_types=[_CLUSTEROPERATIONSTATUS_STATE,],
serialized_options=None,
is_extendable=False,
syntax="proto3",
@@ -397,7 +397,7 @@
),
],
extensions=[],
- nested_types=[_CLUSTEROPERATIONMETADATA_LABELSENTRY],
+ nested_types=[_CLUSTEROPERATIONMETADATA_LABELSENTRY,],
enum_types=[],
serialized_options=None,
is_extendable=False,
@@ -436,6 +436,7 @@
"DESCRIPTOR": _CLUSTEROPERATIONSTATUS,
"__module__": "google.cloud.dataproc_v1beta2.proto.operations_pb2",
"__doc__": """The status of the operation.
+
Attributes:
state:
Output only. A message containing the operation state.
@@ -469,6 +470,7 @@
"DESCRIPTOR": _CLUSTEROPERATIONMETADATA,
"__module__": "google.cloud.dataproc_v1beta2.proto.operations_pb2",
"__doc__": """Metadata describing the operation.
+
Attributes:
cluster_name:
Output only. Name of the cluster for the operation.
diff --git a/google/cloud/dataproc_v1beta2/proto/operations_pb2_grpc.py b/google/cloud/dataproc_v1beta2/proto/operations_pb2_grpc.py
index 07cb78fe..8a939394 100644
--- a/google/cloud/dataproc_v1beta2/proto/operations_pb2_grpc.py
+++ b/google/cloud/dataproc_v1beta2/proto/operations_pb2_grpc.py
@@ -1,2 +1,3 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+"""Client and server classes corresponding to protobuf-defined services."""
import grpc
diff --git a/google/cloud/dataproc_v1beta2/proto/shared_pb2.py b/google/cloud/dataproc_v1beta2/proto/shared_pb2.py
index a0e553be..136a7be9 100644
--- a/google/cloud/dataproc_v1beta2/proto/shared_pb2.py
+++ b/google/cloud/dataproc_v1beta2/proto/shared_pb2.py
@@ -23,7 +23,7 @@
serialized_options=b"\n!com.google.cloud.dataproc.v1beta2B\013SharedProtoP\001ZEgoogle.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataproc",
create_key=_descriptor._internal_create_key,
serialized_pb=b"\n0google/cloud/dataproc_v1beta2/proto/shared.proto\x12\x1dgoogle.cloud.dataproc.v1beta2\x1a\x1cgoogle/api/annotations.proto*\xc2\x01\n\tComponent\x12\x19\n\x15\x43OMPONENT_UNSPECIFIED\x10\x00\x12\x0c\n\x08\x41NACONDA\x10\x05\x12\n\n\x06\x44OCKER\x10\r\x12\t\n\x05\x44RUID\x10\t\x12\t\n\x05\x46LINK\x10\x0e\x12\x10\n\x0cHIVE_WEBHCAT\x10\x03\x12\x0b\n\x07JUPYTER\x10\x01\x12\x0c\n\x08KERBEROS\x10\x07\x12\n\n\x06PRESTO\x10\x06\x12\n\n\x06RANGER\x10\x0c\x12\x08\n\x04SOLR\x10\n\x12\x0c\n\x08ZEPPELIN\x10\x04\x12\r\n\tZOOKEEPER\x10\x08\x42y\n!com.google.cloud.dataproc.v1beta2B\x0bSharedProtoP\x01ZEgoogle.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataprocb\x06proto3",
- dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR],
+ dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,],
)
_COMPONENT = _descriptor.EnumDescriptor(
diff --git a/google/cloud/dataproc_v1beta2/proto/shared_pb2_grpc.py b/google/cloud/dataproc_v1beta2/proto/shared_pb2_grpc.py
index 07cb78fe..8a939394 100644
--- a/google/cloud/dataproc_v1beta2/proto/shared_pb2_grpc.py
+++ b/google/cloud/dataproc_v1beta2/proto/shared_pb2_grpc.py
@@ -1,2 +1,3 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+"""Client and server classes corresponding to protobuf-defined services."""
import grpc
diff --git a/google/cloud/dataproc_v1beta2/proto/workflow_templates_pb2.py b/google/cloud/dataproc_v1beta2/proto/workflow_templates_pb2.py
index 78119700..4f61fcb6 100644
--- a/google/cloud/dataproc_v1beta2/proto/workflow_templates_pb2.py
+++ b/google/cloud/dataproc_v1beta2/proto/workflow_templates_pb2.py
@@ -401,7 +401,7 @@
),
],
extensions=[],
- nested_types=[_WORKFLOWTEMPLATE_LABELSENTRY],
+ nested_types=[_WORKFLOWTEMPLATE_LABELSENTRY,],
enum_types=[],
serialized_options=b"\352A\306\001\n(dataproc.googleapis.com/WorkflowTemplate\022Iprojects/{project}/regions/{region}/workflowTemplates/{workflow_template}\022Mprojects/{project}/locations/{location}/workflowTemplates/{workflow_template} \001",
is_extendable=False,
@@ -475,7 +475,7 @@
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[],
- )
+ ),
],
serialized_start=1140,
serialized_end=1330,
@@ -608,7 +608,7 @@
),
],
extensions=[],
- nested_types=[_MANAGEDCLUSTER_LABELSENTRY],
+ nested_types=[_MANAGEDCLUSTER_LABELSENTRY,],
enum_types=[],
serialized_options=None,
is_extendable=False,
@@ -727,7 +727,7 @@
),
],
extensions=[],
- nested_types=[_CLUSTERSELECTOR_CLUSTERLABELSENTRY],
+ nested_types=[_CLUSTERSELECTOR_CLUSTERLABELSENTRY,],
enum_types=[],
serialized_options=None,
is_extendable=False,
@@ -1036,7 +1036,7 @@
),
],
extensions=[],
- nested_types=[_ORDEREDJOB_LABELSENTRY],
+ nested_types=[_ORDEREDJOB_LABELSENTRY,],
enum_types=[],
serialized_options=None,
is_extendable=False,
@@ -1050,7 +1050,7 @@
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[],
- )
+ ),
],
serialized_start=1737,
serialized_end=2521,
@@ -1217,7 +1217,7 @@
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[],
- )
+ ),
],
serialized_start=2669,
serialized_end=2840,
@@ -1250,7 +1250,7 @@
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
- )
+ ),
],
extensions=[],
nested_types=[],
@@ -1291,7 +1291,7 @@
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
- )
+ ),
],
extensions=[],
nested_types=[],
@@ -1584,8 +1584,8 @@
),
],
extensions=[],
- nested_types=[_WORKFLOWMETADATA_PARAMETERSENTRY],
- enum_types=[_WORKFLOWMETADATA_STATE],
+ nested_types=[_WORKFLOWMETADATA_PARAMETERSENTRY,],
+ enum_types=[_WORKFLOWMETADATA_STATE,],
serialized_options=None,
is_extendable=False,
syntax="proto3",
@@ -1701,7 +1701,7 @@
serialized_options=b"\340A\003",
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
- )
+ ),
],
extensions=[],
nested_types=[],
@@ -1822,7 +1822,7 @@
],
extensions=[],
nested_types=[],
- enum_types=[_WORKFLOWNODE_NODESTATE],
+ enum_types=[_WORKFLOWNODE_NODESTATE,],
serialized_options=None,
is_extendable=False,
syntax="proto3",
@@ -2117,7 +2117,7 @@
),
],
extensions=[],
- nested_types=[_INSTANTIATEWORKFLOWTEMPLATEREQUEST_PARAMETERSENTRY],
+ nested_types=[_INSTANTIATEWORKFLOWTEMPLATEREQUEST_PARAMETERSENTRY,],
enum_types=[],
serialized_options=None,
is_extendable=False,
@@ -2253,7 +2253,7 @@
serialized_options=b"\340A\002",
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
- )
+ ),
],
extensions=[],
nested_types=[],
@@ -2705,6 +2705,7 @@
"DESCRIPTOR": _WORKFLOWTEMPLATE,
"__module__": "google.cloud.dataproc_v1beta2.proto.workflow_templates_pb2",
"__doc__": """A Dataproc workflow template resource.
+
Attributes:
id:
Required. The template id. The id must contain only letters
@@ -2770,6 +2771,7 @@
"__module__": "google.cloud.dataproc_v1beta2.proto.workflow_templates_pb2",
"__doc__": """Specifies workflow execution target. Either ``managed_cluster`` or
``cluster_selector`` is required.
+
Attributes:
placement:
Required. Specifies where workflow executes; either on a
@@ -2802,6 +2804,7 @@
"DESCRIPTOR": _MANAGEDCLUSTER,
"__module__": "google.cloud.dataproc_v1beta2.proto.workflow_templates_pb2",
"__doc__": """Cluster that is managed by the workflow.
+
Attributes:
cluster_name:
Required. The cluster name prefix. A unique cluster name will
@@ -2839,6 +2842,7 @@
"DESCRIPTOR": _CLUSTERSELECTOR,
"__module__": "google.cloud.dataproc_v1beta2.proto.workflow_templates_pb2",
"__doc__": """A selector that chooses target cluster for jobs based on metadata.
+
Attributes:
zone:
Optional. The zone where workflow process executes. This
@@ -2871,6 +2875,7 @@
"DESCRIPTOR": _ORDEREDJOB,
"__module__": "google.cloud.dataproc_v1beta2.proto.workflow_templates_pb2",
"__doc__": """A job executed by the workflow.
+
Attributes:
step_id:
Required. The step id. The id must be unique among all jobs
@@ -2915,6 +2920,7 @@
template. Parameterizable fields: - Labels - File uris - Job
properties - Job arguments - Script variables - Main class (in
HadoopJob and SparkJob) - Zone (in ClusterSelector)
+
Attributes:
name:
Required. Parameter name. The parameter name is used as the
@@ -2976,6 +2982,7 @@
"DESCRIPTOR": _PARAMETERVALIDATION,
"__module__": "google.cloud.dataproc_v1beta2.proto.workflow_templates_pb2",
"__doc__": """Configuration for parameter validation.
+
Attributes:
validation_type:
Required. The type of validation to be performed.
@@ -2996,6 +3003,7 @@
"DESCRIPTOR": _REGEXVALIDATION,
"__module__": "google.cloud.dataproc_v1beta2.proto.workflow_templates_pb2",
"__doc__": """Validation based on regular expressions.
+
Attributes:
regexes:
Required. RE2 regular expressions used to validate the
@@ -3014,6 +3022,7 @@
"DESCRIPTOR": _VALUEVALIDATION,
"__module__": "google.cloud.dataproc_v1beta2.proto.workflow_templates_pb2",
"__doc__": """Validation based on a list of allowed values.
+
Attributes:
values:
Required. List of allowed values for the parameter.
@@ -3039,6 +3048,7 @@
"DESCRIPTOR": _WORKFLOWMETADATA,
"__module__": "google.cloud.dataproc_v1beta2.proto.workflow_templates_pb2",
"__doc__": """A Dataproc workflow template resource.
+
Attributes:
template:
Output only. The resource name of the workflow template as
@@ -3087,6 +3097,7 @@
"DESCRIPTOR": _CLUSTEROPERATION,
"__module__": "google.cloud.dataproc_v1beta2.proto.workflow_templates_pb2",
"__doc__": """The cluster operation triggered by a workflow.
+
Attributes:
operation_id:
Output only. The id of the cluster operation.
@@ -3107,6 +3118,7 @@
"DESCRIPTOR": _WORKFLOWGRAPH,
"__module__": "google.cloud.dataproc_v1beta2.proto.workflow_templates_pb2",
"__doc__": """The workflow graph.
+
Attributes:
nodes:
Output only. The workflow nodes.
@@ -3123,6 +3135,7 @@
"DESCRIPTOR": _WORKFLOWNODE,
"__module__": "google.cloud.dataproc_v1beta2.proto.workflow_templates_pb2",
"__doc__": """The workflow node.
+
Attributes:
step_id:
Output only. The name of the node.
@@ -3148,6 +3161,7 @@
"DESCRIPTOR": _CREATEWORKFLOWTEMPLATEREQUEST,
"__module__": "google.cloud.dataproc_v1beta2.proto.workflow_templates_pb2",
"__doc__": """A request to create a workflow template.
+
Attributes:
parent:
Required. The resource name of the region or location, as
@@ -3174,6 +3188,7 @@
"DESCRIPTOR": _GETWORKFLOWTEMPLATEREQUEST,
"__module__": "google.cloud.dataproc_v1beta2.proto.workflow_templates_pb2",
"__doc__": """A request to fetch a workflow template.
+
Attributes:
name:
Required. The resource name of the workflow template, as
@@ -3212,6 +3227,7 @@
"DESCRIPTOR": _INSTANTIATEWORKFLOWTEMPLATEREQUEST,
"__module__": "google.cloud.dataproc_v1beta2.proto.workflow_templates_pb2",
"__doc__": """A request to instantiate a workflow template.
+
Attributes:
name:
Required. The resource name of the workflow template, as
@@ -3260,6 +3276,7 @@
"DESCRIPTOR": _INSTANTIATEINLINEWORKFLOWTEMPLATEREQUEST,
"__module__": "google.cloud.dataproc_v1beta2.proto.workflow_templates_pb2",
"__doc__": """A request to instantiate an inline workflow template.
+
Attributes:
parent:
Required. The resource name of the region or location, as
@@ -3297,6 +3314,7 @@
"DESCRIPTOR": _UPDATEWORKFLOWTEMPLATEREQUEST,
"__module__": "google.cloud.dataproc_v1beta2.proto.workflow_templates_pb2",
"__doc__": """A request to update a workflow template.
+
Attributes:
template:
Required. The updated workflow template. The
@@ -3314,6 +3332,7 @@
"DESCRIPTOR": _LISTWORKFLOWTEMPLATESREQUEST,
"__module__": "google.cloud.dataproc_v1beta2.proto.workflow_templates_pb2",
"__doc__": """A request to list workflow templates in a project.
+
Attributes:
parent:
Required. The resource name of the region or location, as
@@ -3344,6 +3363,7 @@
"DESCRIPTOR": _LISTWORKFLOWTEMPLATESRESPONSE,
"__module__": "google.cloud.dataproc_v1beta2.proto.workflow_templates_pb2",
"__doc__": """A response to a request to list workflow templates in a project.
+
Attributes:
templates:
Output only. WorkflowTemplates list.
@@ -3366,6 +3386,7 @@
"__module__": "google.cloud.dataproc_v1beta2.proto.workflow_templates_pb2",
"__doc__": """A request to delete a workflow template. Currently started workflows
will remain running.
+
Attributes:
name:
Required. The resource name of the workflow template, as
diff --git a/google/cloud/dataproc_v1beta2/proto/workflow_templates_pb2_grpc.py b/google/cloud/dataproc_v1beta2/proto/workflow_templates_pb2_grpc.py
index f9ea0bd6..72d48e23 100644
--- a/google/cloud/dataproc_v1beta2/proto/workflow_templates_pb2_grpc.py
+++ b/google/cloud/dataproc_v1beta2/proto/workflow_templates_pb2_grpc.py
@@ -1,4 +1,5 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from google.cloud.dataproc_v1beta2.proto import (
@@ -12,15 +13,15 @@
class WorkflowTemplateServiceStub(object):
"""The API interface for managing Workflow Templates in the
- Dataproc API.
- """
+ Dataproc API.
+ """
def __init__(self, channel):
"""Constructor.
- Args:
- channel: A grpc.Channel.
- """
+ Args:
+ channel: A grpc.Channel.
+ """
self.CreateWorkflowTemplate = channel.unary_unary(
"/google.cloud.dataproc.v1beta2.WorkflowTemplateService/CreateWorkflowTemplate",
request_serializer=google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_workflow__templates__pb2.CreateWorkflowTemplateRequest.SerializeToString,
@@ -60,12 +61,12 @@ def __init__(self, channel):
class WorkflowTemplateServiceServicer(object):
"""The API interface for managing Workflow Templates in the
- Dataproc API.
- """
+ Dataproc API.
+ """
def CreateWorkflowTemplate(self, request, context):
"""Creates new workflow template.
- """
+ """
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
@@ -73,9 +74,9 @@ def CreateWorkflowTemplate(self, request, context):
def GetWorkflowTemplate(self, request, context):
"""Retrieves the latest workflow template.
- Can retrieve previously instantiated template by specifying optional
- version parameter.
- """
+ Can retrieve previously instantiated template by specifying optional
+ version parameter.
+ """
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
@@ -83,25 +84,25 @@ def GetWorkflowTemplate(self, request, context):
def InstantiateWorkflowTemplate(self, request, context):
"""Instantiates a template and begins execution.
- The returned Operation can be used to track execution of
- workflow by polling
- [operations.get][google.longrunning.Operations.GetOperation].
- The Operation will complete when entire workflow is finished.
+ The returned Operation can be used to track execution of
+ workflow by polling
+ [operations.get][google.longrunning.Operations.GetOperation].
+ The Operation will complete when entire workflow is finished.
- The running workflow can be aborted via
- [operations.cancel][google.longrunning.Operations.CancelOperation].
- This will cause any inflight jobs to be cancelled and workflow-owned
- clusters to be deleted.
+ The running workflow can be aborted via
+ [operations.cancel][google.longrunning.Operations.CancelOperation].
+ This will cause any inflight jobs to be cancelled and workflow-owned
+ clusters to be deleted.
- The [Operation.metadata][google.longrunning.Operation.metadata] will be
- [WorkflowMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#workflowmetadata).
- Also see [Using
- WorkflowMetadata](https://cloud.google.com/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata).
+ The [Operation.metadata][google.longrunning.Operation.metadata] will be
+ [WorkflowMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#workflowmetadata).
+ Also see [Using
+ WorkflowMetadata](https://cloud.google.com/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata).
- On successful completion,
- [Operation.response][google.longrunning.Operation.response] will be
- [Empty][google.protobuf.Empty].
- """
+ On successful completion,
+ [Operation.response][google.longrunning.Operation.response] will be
+ [Empty][google.protobuf.Empty].
+ """
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
@@ -109,51 +110,51 @@ def InstantiateWorkflowTemplate(self, request, context):
def InstantiateInlineWorkflowTemplate(self, request, context):
"""Instantiates a template and begins execution.
- This method is equivalent to executing the sequence
- [CreateWorkflowTemplate][google.cloud.dataproc.v1beta2.WorkflowTemplateService.CreateWorkflowTemplate], [InstantiateWorkflowTemplate][google.cloud.dataproc.v1beta2.WorkflowTemplateService.InstantiateWorkflowTemplate],
- [DeleteWorkflowTemplate][google.cloud.dataproc.v1beta2.WorkflowTemplateService.DeleteWorkflowTemplate].
+ This method is equivalent to executing the sequence
+ [CreateWorkflowTemplate][google.cloud.dataproc.v1beta2.WorkflowTemplateService.CreateWorkflowTemplate], [InstantiateWorkflowTemplate][google.cloud.dataproc.v1beta2.WorkflowTemplateService.InstantiateWorkflowTemplate],
+ [DeleteWorkflowTemplate][google.cloud.dataproc.v1beta2.WorkflowTemplateService.DeleteWorkflowTemplate].
- The returned Operation can be used to track execution of
- workflow by polling
- [operations.get][google.longrunning.Operations.GetOperation].
- The Operation will complete when entire workflow is finished.
+ The returned Operation can be used to track execution of
+ workflow by polling
+ [operations.get][google.longrunning.Operations.GetOperation].
+ The Operation will complete when entire workflow is finished.
- The running workflow can be aborted via
- [operations.cancel][google.longrunning.Operations.CancelOperation].
- This will cause any inflight jobs to be cancelled and workflow-owned
- clusters to be deleted.
+ The running workflow can be aborted via
+ [operations.cancel][google.longrunning.Operations.CancelOperation].
+ This will cause any inflight jobs to be cancelled and workflow-owned
+ clusters to be deleted.
- The [Operation.metadata][google.longrunning.Operation.metadata] will be
- [WorkflowMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#workflowmetadata).
- Also see [Using
- WorkflowMetadata](https://cloud.google.com/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata).
+ The [Operation.metadata][google.longrunning.Operation.metadata] will be
+ [WorkflowMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#workflowmetadata).
+ Also see [Using
+ WorkflowMetadata](https://cloud.google.com/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata).
- On successful completion,
- [Operation.response][google.longrunning.Operation.response] will be
- [Empty][google.protobuf.Empty].
- """
+ On successful completion,
+ [Operation.response][google.longrunning.Operation.response] will be
+ [Empty][google.protobuf.Empty].
+ """
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def UpdateWorkflowTemplate(self, request, context):
"""Updates (replaces) workflow template. The updated template
- must contain version that matches the current server version.
- """
+ must contain version that matches the current server version.
+ """
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def ListWorkflowTemplates(self, request, context):
"""Lists workflows that match the specified filter in the request.
- """
+ """
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def DeleteWorkflowTemplate(self, request, context):
"""Deletes a workflow template. It does not cancel in-progress workflows.
- """
+ """
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
@@ -201,3 +202,199 @@ def add_WorkflowTemplateServiceServicer_to_server(servicer, server):
"google.cloud.dataproc.v1beta2.WorkflowTemplateService", rpc_method_handlers
)
server.add_generic_rpc_handlers((generic_handler,))
+
+
+# This class is part of an EXPERIMENTAL API.
+class WorkflowTemplateService(object):
+ """The API interface for managing Workflow Templates in the
+ Dataproc API.
+ """
+
+ @staticmethod
+ def CreateWorkflowTemplate(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.cloud.dataproc.v1beta2.WorkflowTemplateService/CreateWorkflowTemplate",
+ google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_workflow__templates__pb2.CreateWorkflowTemplateRequest.SerializeToString,
+ google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_workflow__templates__pb2.WorkflowTemplate.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def GetWorkflowTemplate(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.cloud.dataproc.v1beta2.WorkflowTemplateService/GetWorkflowTemplate",
+ google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_workflow__templates__pb2.GetWorkflowTemplateRequest.SerializeToString,
+ google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_workflow__templates__pb2.WorkflowTemplate.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def InstantiateWorkflowTemplate(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.cloud.dataproc.v1beta2.WorkflowTemplateService/InstantiateWorkflowTemplate",
+ google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_workflow__templates__pb2.InstantiateWorkflowTemplateRequest.SerializeToString,
+ google_dot_longrunning_dot_operations__pb2.Operation.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def InstantiateInlineWorkflowTemplate(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.cloud.dataproc.v1beta2.WorkflowTemplateService/InstantiateInlineWorkflowTemplate",
+ google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_workflow__templates__pb2.InstantiateInlineWorkflowTemplateRequest.SerializeToString,
+ google_dot_longrunning_dot_operations__pb2.Operation.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def UpdateWorkflowTemplate(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.cloud.dataproc.v1beta2.WorkflowTemplateService/UpdateWorkflowTemplate",
+ google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_workflow__templates__pb2.UpdateWorkflowTemplateRequest.SerializeToString,
+ google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_workflow__templates__pb2.WorkflowTemplate.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def ListWorkflowTemplates(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.cloud.dataproc.v1beta2.WorkflowTemplateService/ListWorkflowTemplates",
+ google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_workflow__templates__pb2.ListWorkflowTemplatesRequest.SerializeToString,
+ google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_workflow__templates__pb2.ListWorkflowTemplatesResponse.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
+
+ @staticmethod
+ def DeleteWorkflowTemplate(
+ request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None,
+ ):
+ return grpc.experimental.unary_unary(
+ request,
+ target,
+ "/google.cloud.dataproc.v1beta2.WorkflowTemplateService/DeleteWorkflowTemplate",
+ google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_workflow__templates__pb2.DeleteWorkflowTemplateRequest.SerializeToString,
+ google_dot_protobuf_dot_empty__pb2.Empty.FromString,
+ options,
+ channel_credentials,
+ call_credentials,
+ compression,
+ wait_for_ready,
+ timeout,
+ metadata,
+ )
diff --git a/noxfile.py b/noxfile.py
index f11be94b..a5b7aec1 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -23,14 +23,15 @@
import nox
-BLACK_VERSION = "black==19.3b0"
+BLACK_VERSION = "black==19.10b0"
BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"]
-if os.path.exists("samples"):
- BLACK_PATHS.append("samples")
+DEFAULT_PYTHON_VERSION = "3.8"
+SYSTEM_TEST_PYTHON_VERSIONS = ["2.7", "3.8"]
+UNIT_TEST_PYTHON_VERSIONS = ["2.7", "3.5", "3.6", "3.7", "3.8"]
-@nox.session(python="3.7")
+@nox.session(python=DEFAULT_PYTHON_VERSION)
def lint(session):
"""Run linters.
@@ -38,7 +39,9 @@ def lint(session):
serious code quality issues.
"""
session.install("flake8", BLACK_VERSION)
- session.run("black", "--check", *BLACK_PATHS)
+ session.run(
+ "black", "--check", *BLACK_PATHS,
+ )
session.run("flake8", "google", "tests")
@@ -53,10 +56,12 @@ def blacken(session):
check the state of the `gcp_ubuntu_config` we use for that Kokoro run.
"""
session.install(BLACK_VERSION)
- session.run("black", *BLACK_PATHS)
+ session.run(
+ "black", *BLACK_PATHS,
+ )
-@nox.session(python="3.7")
+@nox.session(python=DEFAULT_PYTHON_VERSION)
def lint_setup_py(session):
"""Verify that setup.py is valid (including RST check)."""
session.install("docutils", "pygments")
@@ -84,13 +89,13 @@ def default(session):
)
-@nox.session(python=["2.7", "3.5", "3.6", "3.7", "3.8"])
+@nox.session(python=UNIT_TEST_PYTHON_VERSIONS)
def unit(session):
"""Run the unit test suite."""
default(session)
-@nox.session(python=["2.7", "3.7"])
+@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS)
def system(session):
"""Run the system test suite."""
system_test_path = os.path.join("tests", "system.py")
@@ -110,7 +115,9 @@ def system(session):
# Install all test dependencies, then install this package into the
# virtualenv's dist-packages.
- session.install("mock", "pytest", "google-cloud-testutils")
+ session.install(
+ "mock", "pytest", "google-cloud-testutils",
+ )
session.install("-e", ".")
# Run py.test against the system tests.
@@ -120,7 +127,7 @@ def system(session):
session.run("py.test", "--quiet", system_test_folder_path, *session.posargs)
-@nox.session(python="3.7")
+@nox.session(python=DEFAULT_PYTHON_VERSION)
def cover(session):
"""Run the final coverage report.
@@ -133,7 +140,7 @@ def cover(session):
session.run("coverage", "erase")
-@nox.session(python="3.7")
+@nox.session(python=DEFAULT_PYTHON_VERSION)
def docs(session):
"""Build the docs for this library."""
diff --git a/scripts/decrypt-secrets.sh b/scripts/decrypt-secrets.sh
new file mode 100755
index 00000000..ff599eb2
--- /dev/null
+++ b/scripts/decrypt-secrets.sh
@@ -0,0 +1,33 @@
+#!/bin/bash
+
+# Copyright 2015 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+ROOT=$( dirname "$DIR" )
+
+# Work from the project root.
+cd $ROOT
+
+# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources.
+PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}"
+
+gcloud secrets versions access latest --secret="python-docs-samples-test-env" \
+ > testing/test-env.sh
+gcloud secrets versions access latest \
+ --secret="python-docs-samples-service-account" \
+ > testing/service-account.json
+gcloud secrets versions access latest \
+ --secret="python-docs-samples-client-secrets" \
+ > testing/client-secrets.json
\ No newline at end of file
diff --git a/scripts/readme-gen/readme_gen.py b/scripts/readme-gen/readme_gen.py
new file mode 100644
index 00000000..d309d6e9
--- /dev/null
+++ b/scripts/readme-gen/readme_gen.py
@@ -0,0 +1,66 @@
+#!/usr/bin/env python
+
+# Copyright 2016 Google Inc
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Generates READMEs using configuration defined in yaml."""
+
+import argparse
+import io
+import os
+import subprocess
+
+import jinja2
+import yaml
+
+
+jinja_env = jinja2.Environment(
+ trim_blocks=True,
+ loader=jinja2.FileSystemLoader(
+ os.path.abspath(os.path.join(os.path.dirname(__file__), 'templates'))))
+
+README_TMPL = jinja_env.get_template('README.tmpl.rst')
+
+
+def get_help(file):
+ return subprocess.check_output(['python', file, '--help']).decode()
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('source')
+ parser.add_argument('--destination', default='README.rst')
+
+ args = parser.parse_args()
+
+ source = os.path.abspath(args.source)
+ root = os.path.dirname(source)
+ destination = os.path.join(root, args.destination)
+
+ jinja_env.globals['get_help'] = get_help
+
+ with io.open(source, 'r') as f:
+ config = yaml.load(f)
+
+ # This allows get_help to execute in the right directory.
+ os.chdir(root)
+
+ output = README_TMPL.render(config)
+
+ with io.open(destination, 'w') as f:
+ f.write(output)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/scripts/readme-gen/templates/README.tmpl.rst b/scripts/readme-gen/templates/README.tmpl.rst
new file mode 100644
index 00000000..4fd23976
--- /dev/null
+++ b/scripts/readme-gen/templates/README.tmpl.rst
@@ -0,0 +1,87 @@
+{# The following line is a lie. BUT! Once jinja2 is done with it, it will
+ become truth! #}
+.. This file is automatically generated. Do not edit this file directly.
+
+{{product.name}} Python Samples
+===============================================================================
+
+.. image:: https://gstatic.com/cloudssh/images/open-btn.png
+ :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/README.rst
+
+
+This directory contains samples for {{product.name}}. {{product.description}}
+
+{{description}}
+
+.. _{{product.name}}: {{product.url}}
+
+{% if required_api_url %}
+To run the sample, you need to enable the API at: {{required_api_url}}
+{% endif %}
+
+{% if required_role %}
+To run the sample, you need to have `{{required_role}}` role.
+{% endif %}
+
+{{other_required_steps}}
+
+{% if setup %}
+Setup
+-------------------------------------------------------------------------------
+
+{% for section in setup %}
+
+{% include section + '.tmpl.rst' %}
+
+{% endfor %}
+{% endif %}
+
+{% if samples %}
+Samples
+-------------------------------------------------------------------------------
+
+{% for sample in samples %}
+{{sample.name}}
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+{% if not sample.hide_cloudshell_button %}
+.. image:: https://gstatic.com/cloudssh/images/open-btn.png
+ :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/{{sample.file}},{{folder}}/README.rst
+{% endif %}
+
+
+{{sample.description}}
+
+To run this sample:
+
+.. code-block:: bash
+
+ $ python {{sample.file}}
+{% if sample.show_help %}
+
+ {{get_help(sample.file)|indent}}
+{% endif %}
+
+
+{% endfor %}
+{% endif %}
+
+{% if cloud_client_library %}
+
+The client library
+-------------------------------------------------------------------------------
+
+This sample uses the `Google Cloud Client Library for Python`_.
+You can read the documentation for more details on API usage and use GitHub
+to `browse the source`_ and `report issues`_.
+
+.. _Google Cloud Client Library for Python:
+ https://googlecloudplatform.github.io/google-cloud-python/
+.. _browse the source:
+ https://github.com/GoogleCloudPlatform/google-cloud-python
+.. _report issues:
+ https://github.com/GoogleCloudPlatform/google-cloud-python/issues
+
+{% endif %}
+
+.. _Google Cloud SDK: https://cloud.google.com/sdk/
\ No newline at end of file
diff --git a/scripts/readme-gen/templates/auth.tmpl.rst b/scripts/readme-gen/templates/auth.tmpl.rst
new file mode 100644
index 00000000..1446b94a
--- /dev/null
+++ b/scripts/readme-gen/templates/auth.tmpl.rst
@@ -0,0 +1,9 @@
+Authentication
+++++++++++++++
+
+This sample requires you to have authentication setup. Refer to the
+`Authentication Getting Started Guide`_ for instructions on setting up
+credentials for applications.
+
+.. _Authentication Getting Started Guide:
+ https://cloud.google.com/docs/authentication/getting-started
diff --git a/scripts/readme-gen/templates/auth_api_key.tmpl.rst b/scripts/readme-gen/templates/auth_api_key.tmpl.rst
new file mode 100644
index 00000000..11957ce2
--- /dev/null
+++ b/scripts/readme-gen/templates/auth_api_key.tmpl.rst
@@ -0,0 +1,14 @@
+Authentication
+++++++++++++++
+
+Authentication for this service is done via an `API Key`_. To obtain an API
+Key:
+
+1. Open the `Cloud Platform Console`_
+2. Make sure that billing is enabled for your project.
+3. From the **Credentials** page, create a new **API Key** or use an existing
+ one for your project.
+
+.. _API Key:
+ https://developers.google.com/api-client-library/python/guide/aaa_apikeys
+.. _Cloud Console: https://console.cloud.google.com/project?_
diff --git a/scripts/readme-gen/templates/install_deps.tmpl.rst b/scripts/readme-gen/templates/install_deps.tmpl.rst
new file mode 100644
index 00000000..a0406dba
--- /dev/null
+++ b/scripts/readme-gen/templates/install_deps.tmpl.rst
@@ -0,0 +1,29 @@
+Install Dependencies
+++++++++++++++++++++
+
+#. Clone python-docs-samples and change directory to the sample directory you want to use.
+
+ .. code-block:: bash
+
+ $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git
+
+#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions.
+
+ .. _Python Development Environment Setup Guide:
+ https://cloud.google.com/python/setup
+
+#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+.
+
+ .. code-block:: bash
+
+ $ virtualenv env
+ $ source env/bin/activate
+
+#. Install the dependencies needed to run the samples.
+
+ .. code-block:: bash
+
+ $ pip install -r requirements.txt
+
+.. _pip: https://pip.pypa.io/
+.. _virtualenv: https://virtualenv.pypa.io/
diff --git a/scripts/readme-gen/templates/install_portaudio.tmpl.rst b/scripts/readme-gen/templates/install_portaudio.tmpl.rst
new file mode 100644
index 00000000..5ea33d18
--- /dev/null
+++ b/scripts/readme-gen/templates/install_portaudio.tmpl.rst
@@ -0,0 +1,35 @@
+Install PortAudio
++++++++++++++++++
+
+Install `PortAudio`_. This is required by the `PyAudio`_ library to stream
+audio from your computer's microphone. PyAudio depends on PortAudio for cross-platform compatibility, and is installed differently depending on the
+platform.
+
+* For Mac OS X, you can use `Homebrew`_::
+
+ brew install portaudio
+
+ **Note**: if you encounter an error when running `pip install` that indicates
+ it can't find `portaudio.h`, try running `pip install` with the following
+ flags::
+
+ pip install --global-option='build_ext' \
+ --global-option='-I/usr/local/include' \
+ --global-option='-L/usr/local/lib' \
+ pyaudio
+
+* For Debian / Ubuntu Linux::
+
+ apt-get install portaudio19-dev python-all-dev
+
+* Windows may work without having to install PortAudio explicitly (it will get
+ installed with PyAudio).
+
+For more details, see the `PyAudio installation`_ page.
+
+
+.. _PyAudio: https://people.csail.mit.edu/hubert/pyaudio/
+.. _PortAudio: http://www.portaudio.com/
+.. _PyAudio installation:
+ https://people.csail.mit.edu/hubert/pyaudio/#downloads
+.. _Homebrew: http://brew.sh
diff --git a/setup.py b/setup.py
index ee8478c2..7e4c065f 100644
--- a/setup.py
+++ b/setup.py
@@ -22,7 +22,7 @@
name = "google-cloud-dataproc"
description = "Google Cloud Dataproc API client library"
-version = "1.0.0"
+version = "1.0.1"
# Should be one of:
# 'Development Status :: 3 - Alpha'
# 'Development Status :: 4 - Beta'
diff --git a/synth.metadata b/synth.metadata
index 3715006d..f2ff1e3d 100644
--- a/synth.metadata
+++ b/synth.metadata
@@ -3,23 +3,23 @@
{
"git": {
"name": ".",
- "remote": "https://github.com/googleapis/python-dataproc.git",
- "sha": "5accf9d1990820744053c6d38f5465fca3ea848d"
+ "remote": "git@github.com:googleapis/python-dataproc.git",
+ "sha": "f0c8897e5124a553fb66ef20d9cd55d2ed912a6a"
}
},
{
"git": {
"name": "googleapis",
"remote": "https://github.com/googleapis/googleapis.git",
- "sha": "ca1372c6d7bcb199638ebfdb40d2b2660bab7b88",
- "internalRef": "315548189"
+ "sha": "bad4b831900d70e69b5e4d43bd7565d0aaded997",
+ "internalRef": "321584556"
}
},
{
"git": {
"name": "synthtool",
"remote": "https://github.com/googleapis/synthtool.git",
- "sha": "274dd49554809834287c24b6dd324a85283f1182"
+ "sha": "21f1470ecd01424dc91c70f1a7c798e4e87d1eec"
}
}
],
diff --git a/synth.py b/synth.py
index f64639bc..087d4bcb 100644
--- a/synth.py
+++ b/synth.py
@@ -94,4 +94,7 @@
templated_files = common.py_library(unit_cov_level=97, cov_level=89)
s.move(templated_files)
+# TODO(busunkim): Use latest sphinx after microgenerator transition
+s.replace("noxfile.py", """['"]sphinx['"]""", '"sphinx<3.0.0"')
+
s.shell.run(["nox", "-s", "blacken"], hide_output=False)
diff --git a/testing/.gitignore b/testing/.gitignore
new file mode 100644
index 00000000..b05fbd63
--- /dev/null
+++ b/testing/.gitignore
@@ -0,0 +1,3 @@
+test-env.sh
+service-account.json
+client-secrets.json
\ No newline at end of file
diff --git a/tests/unit/gapic/v1/test_autoscaling_policy_service_client_v1.py b/tests/unit/gapic/v1/test_autoscaling_policy_service_client_v1.py
index 17622a06..83736be4 100644
--- a/tests/unit/gapic/v1/test_autoscaling_policy_service_client_v1.py
+++ b/tests/unit/gapic/v1/test_autoscaling_policy_service_client_v1.py
@@ -61,7 +61,7 @@ class CustomException(Exception):
class TestAutoscalingPolicyServiceClient(object):
- def test_update_autoscaling_policy(self):
+ def test_create_autoscaling_policy(self):
# Setup Expected Response
id_ = "id3355"
name = "name3373707"
@@ -78,19 +78,20 @@ def test_update_autoscaling_policy(self):
client = dataproc_v1.AutoscalingPolicyServiceClient()
# Setup Request
+ parent = client.region_path("[PROJECT]", "[REGION]")
policy = {}
- response = client.update_autoscaling_policy(policy)
+ response = client.create_autoscaling_policy(parent, policy)
assert expected_response == response
assert len(channel.requests) == 1
- expected_request = autoscaling_policies_pb2.UpdateAutoscalingPolicyRequest(
- policy=policy
+ expected_request = autoscaling_policies_pb2.CreateAutoscalingPolicyRequest(
+ parent=parent, policy=policy
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
- def test_update_autoscaling_policy_exception(self):
+ def test_create_autoscaling_policy_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
@@ -99,12 +100,13 @@ def test_update_autoscaling_policy_exception(self):
client = dataproc_v1.AutoscalingPolicyServiceClient()
# Setup request
+ parent = client.region_path("[PROJECT]", "[REGION]")
policy = {}
with pytest.raises(CustomException):
- client.update_autoscaling_policy(policy)
+ client.create_autoscaling_policy(parent, policy)
- def test_create_autoscaling_policy(self):
+ def test_update_autoscaling_policy(self):
# Setup Expected Response
id_ = "id3355"
name = "name3373707"
@@ -121,20 +123,19 @@ def test_create_autoscaling_policy(self):
client = dataproc_v1.AutoscalingPolicyServiceClient()
# Setup Request
- parent = client.region_path("[PROJECT]", "[REGION]")
policy = {}
- response = client.create_autoscaling_policy(parent, policy)
+ response = client.update_autoscaling_policy(policy)
assert expected_response == response
assert len(channel.requests) == 1
- expected_request = autoscaling_policies_pb2.CreateAutoscalingPolicyRequest(
- parent=parent, policy=policy
+ expected_request = autoscaling_policies_pb2.UpdateAutoscalingPolicyRequest(
+ policy=policy
)
actual_request = channel.requests[0][1]
assert expected_request == actual_request
- def test_create_autoscaling_policy_exception(self):
+ def test_update_autoscaling_policy_exception(self):
# Mock the API response
channel = ChannelStub(responses=[CustomException()])
patch = mock.patch("google.api_core.grpc_helpers.create_channel")
@@ -143,11 +144,10 @@ def test_create_autoscaling_policy_exception(self):
client = dataproc_v1.AutoscalingPolicyServiceClient()
# Setup request
- parent = client.region_path("[PROJECT]", "[REGION]")
policy = {}
with pytest.raises(CustomException):
- client.create_autoscaling_policy(parent, policy)
+ client.update_autoscaling_policy(policy)
def test_get_autoscaling_policy(self):
# Setup Expected Response
diff --git a/tests/unit/gapic/v1/test_cluster_controller_client_v1.py b/tests/unit/gapic/v1/test_cluster_controller_client_v1.py
index 1c15fdcf..81591382 100644
--- a/tests/unit/gapic/v1/test_cluster_controller_client_v1.py
+++ b/tests/unit/gapic/v1/test_cluster_controller_client_v1.py
@@ -259,6 +259,63 @@ def test_delete_cluster_exception(self):
exception = response.exception()
assert exception.errors[0] == error
+ def test_diagnose_cluster(self):
+ # Setup Expected Response
+ output_uri = "outputUri-1273518802"
+ expected_response = {"output_uri": output_uri}
+ expected_response = clusters_pb2.DiagnoseClusterResults(**expected_response)
+ operation = operations_pb2.Operation(
+ name="operations/test_diagnose_cluster", done=True
+ )
+ operation.response.Pack(expected_response)
+
+ # Mock the API response
+ channel = ChannelStub(responses=[operation])
+ patch = mock.patch("google.api_core.grpc_helpers.create_channel")
+ with patch as create_channel:
+ create_channel.return_value = channel
+ client = dataproc_v1.ClusterControllerClient()
+
+ # Setup Request
+ project_id = "projectId-1969970175"
+ region = "region-934795532"
+ cluster_name = "clusterName-1018081872"
+
+ response = client.diagnose_cluster(project_id, region, cluster_name)
+ result = response.result()
+ assert expected_response == result
+
+ assert len(channel.requests) == 1
+ expected_request = clusters_pb2.DiagnoseClusterRequest(
+ project_id=project_id, region=region, cluster_name=cluster_name
+ )
+ actual_request = channel.requests[0][1]
+ assert expected_request == actual_request
+
+ def test_diagnose_cluster_exception(self):
+ # Setup Response
+ error = status_pb2.Status()
+ operation = operations_pb2.Operation(
+ name="operations/test_diagnose_cluster_exception", done=True
+ )
+ operation.error.CopyFrom(error)
+
+ # Mock the API response
+ channel = ChannelStub(responses=[operation])
+ patch = mock.patch("google.api_core.grpc_helpers.create_channel")
+ with patch as create_channel:
+ create_channel.return_value = channel
+ client = dataproc_v1.ClusterControllerClient()
+
+ # Setup Request
+ project_id = "projectId-1969970175"
+ region = "region-934795532"
+ cluster_name = "clusterName-1018081872"
+
+ response = client.diagnose_cluster(project_id, region, cluster_name)
+ exception = response.exception()
+ assert exception.errors[0] == error
+
def test_get_cluster(self):
# Setup Expected Response
project_id_2 = "projectId2939242356"
@@ -355,59 +412,3 @@ def test_list_clusters_exception(self):
paged_list_response = client.list_clusters(project_id, region)
with pytest.raises(CustomException):
list(paged_list_response)
-
- def test_diagnose_cluster(self):
- # Setup Expected Response
- expected_response = {}
- expected_response = empty_pb2.Empty(**expected_response)
- operation = operations_pb2.Operation(
- name="operations/test_diagnose_cluster", done=True
- )
- operation.response.Pack(expected_response)
-
- # Mock the API response
- channel = ChannelStub(responses=[operation])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = dataproc_v1.ClusterControllerClient()
-
- # Setup Request
- project_id = "projectId-1969970175"
- region = "region-934795532"
- cluster_name = "clusterName-1018081872"
-
- response = client.diagnose_cluster(project_id, region, cluster_name)
- result = response.result()
- assert expected_response == result
-
- assert len(channel.requests) == 1
- expected_request = clusters_pb2.DiagnoseClusterRequest(
- project_id=project_id, region=region, cluster_name=cluster_name
- )
- actual_request = channel.requests[0][1]
- assert expected_request == actual_request
-
- def test_diagnose_cluster_exception(self):
- # Setup Response
- error = status_pb2.Status()
- operation = operations_pb2.Operation(
- name="operations/test_diagnose_cluster_exception", done=True
- )
- operation.error.CopyFrom(error)
-
- # Mock the API response
- channel = ChannelStub(responses=[operation])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = dataproc_v1.ClusterControllerClient()
-
- # Setup Request
- project_id = "projectId-1969970175"
- region = "region-934795532"
- cluster_name = "clusterName-1018081872"
-
- response = client.diagnose_cluster(project_id, region, cluster_name)
- exception = response.exception()
- assert exception.errors[0] == error
diff --git a/tests/unit/gapic/v1/test_job_controller_client_v1.py b/tests/unit/gapic/v1/test_job_controller_client_v1.py
index bc9ff0f9..4d777faf 100644
--- a/tests/unit/gapic/v1/test_job_controller_client_v1.py
+++ b/tests/unit/gapic/v1/test_job_controller_client_v1.py
@@ -117,6 +117,71 @@ def test_submit_job_exception(self):
with pytest.raises(CustomException):
client.submit_job(project_id, region, job)
+ def test_submit_job_as_operation(self):
+ # Setup Expected Response
+ driver_output_resource_uri = "driverOutputResourceUri-542229086"
+ driver_control_files_uri = "driverControlFilesUri207057643"
+ job_uuid = "jobUuid-1615012099"
+ done = True
+ expected_response = {
+ "driver_output_resource_uri": driver_output_resource_uri,
+ "driver_control_files_uri": driver_control_files_uri,
+ "job_uuid": job_uuid,
+ "done": done,
+ }
+ expected_response = jobs_pb2.Job(**expected_response)
+ operation = operations_pb2.Operation(
+ name="operations/test_submit_job_as_operation", done=True
+ )
+ operation.response.Pack(expected_response)
+
+ # Mock the API response
+ channel = ChannelStub(responses=[operation])
+ patch = mock.patch("google.api_core.grpc_helpers.create_channel")
+ with patch as create_channel:
+ create_channel.return_value = channel
+ client = dataproc_v1.JobControllerClient()
+
+ # Setup Request
+ project_id = "projectId-1969970175"
+ region = "region-934795532"
+ job = {}
+
+ response = client.submit_job_as_operation(project_id, region, job)
+ result = response.result()
+ assert expected_response == result
+
+ assert len(channel.requests) == 1
+ expected_request = jobs_pb2.SubmitJobRequest(
+ project_id=project_id, region=region, job=job
+ )
+ actual_request = channel.requests[0][1]
+ assert expected_request == actual_request
+
+ def test_submit_job_as_operation_exception(self):
+ # Setup Response
+ error = status_pb2.Status()
+ operation = operations_pb2.Operation(
+ name="operations/test_submit_job_as_operation_exception", done=True
+ )
+ operation.error.CopyFrom(error)
+
+ # Mock the API response
+ channel = ChannelStub(responses=[operation])
+ patch = mock.patch("google.api_core.grpc_helpers.create_channel")
+ with patch as create_channel:
+ create_channel.return_value = channel
+ client = dataproc_v1.JobControllerClient()
+
+ # Setup Request
+ project_id = "projectId-1969970175"
+ region = "region-934795532"
+ job = {}
+
+ response = client.submit_job_as_operation(project_id, region, job)
+ exception = response.exception()
+ assert exception.errors[0] == error
+
def test_get_job(self):
# Setup Expected Response
driver_output_resource_uri = "driverOutputResourceUri-542229086"
@@ -364,68 +429,3 @@ def test_delete_job_exception(self):
with pytest.raises(CustomException):
client.delete_job(project_id, region, job_id)
-
- def test_submit_job_as_operation(self):
- # Setup Expected Response
- driver_output_resource_uri = "driverOutputResourceUri-542229086"
- driver_control_files_uri = "driverControlFilesUri207057643"
- job_uuid = "jobUuid-1615012099"
- done = True
- expected_response = {
- "driver_output_resource_uri": driver_output_resource_uri,
- "driver_control_files_uri": driver_control_files_uri,
- "job_uuid": job_uuid,
- "done": done,
- }
- expected_response = jobs_pb2.Job(**expected_response)
- operation = operations_pb2.Operation(
- name="operations/test_submit_job_as_operation", done=True
- )
- operation.response.Pack(expected_response)
-
- # Mock the API response
- channel = ChannelStub(responses=[operation])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = dataproc_v1.JobControllerClient()
-
- # Setup Request
- project_id = "projectId-1969970175"
- region = "region-934795532"
- job = {}
-
- response = client.submit_job_as_operation(project_id, region, job)
- result = response.result()
- assert expected_response == result
-
- assert len(channel.requests) == 1
- expected_request = jobs_pb2.SubmitJobRequest(
- project_id=project_id, region=region, job=job
- )
- actual_request = channel.requests[0][1]
- assert expected_request == actual_request
-
- def test_submit_job_as_operation_exception(self):
- # Setup Response
- error = status_pb2.Status()
- operation = operations_pb2.Operation(
- name="operations/test_submit_job_as_operation_exception", done=True
- )
- operation.error.CopyFrom(error)
-
- # Mock the API response
- channel = ChannelStub(responses=[operation])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = dataproc_v1.JobControllerClient()
-
- # Setup Request
- project_id = "projectId-1969970175"
- region = "region-934795532"
- job = {}
-
- response = client.submit_job_as_operation(project_id, region, job)
- exception = response.exception()
- assert exception.errors[0] == error
diff --git a/tests/unit/gapic/v1/test_workflow_template_service_client_v1.py b/tests/unit/gapic/v1/test_workflow_template_service_client_v1.py
index 764e38fb..c63831ca 100644
--- a/tests/unit/gapic/v1/test_workflow_template_service_client_v1.py
+++ b/tests/unit/gapic/v1/test_workflow_template_service_client_v1.py
@@ -64,90 +64,6 @@ class CustomException(Exception):
class TestWorkflowTemplateServiceClient(object):
- def test_create_workflow_template(self):
- # Setup Expected Response
- id_ = "id3355"
- name = "name3373707"
- version = 351608024
- expected_response = {"id": id_, "name": name, "version": version}
- expected_response = workflow_templates_pb2.WorkflowTemplate(**expected_response)
-
- # Mock the API response
- channel = ChannelStub(responses=[expected_response])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = dataproc_v1.WorkflowTemplateServiceClient()
-
- # Setup Request
- parent = client.region_path("[PROJECT]", "[REGION]")
- template = {}
-
- response = client.create_workflow_template(parent, template)
- assert expected_response == response
-
- assert len(channel.requests) == 1
- expected_request = workflow_templates_pb2.CreateWorkflowTemplateRequest(
- parent=parent, template=template
- )
- actual_request = channel.requests[0][1]
- assert expected_request == actual_request
-
- def test_create_workflow_template_exception(self):
- # Mock the API response
- channel = ChannelStub(responses=[CustomException()])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = dataproc_v1.WorkflowTemplateServiceClient()
-
- # Setup request
- parent = client.region_path("[PROJECT]", "[REGION]")
- template = {}
-
- with pytest.raises(CustomException):
- client.create_workflow_template(parent, template)
-
- def test_get_workflow_template(self):
- # Setup Expected Response
- id_ = "id3355"
- name_2 = "name2-1052831874"
- version = 351608024
- expected_response = {"id": id_, "name": name_2, "version": version}
- expected_response = workflow_templates_pb2.WorkflowTemplate(**expected_response)
-
- # Mock the API response
- channel = ChannelStub(responses=[expected_response])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = dataproc_v1.WorkflowTemplateServiceClient()
-
- # Setup Request
- name = "name3373707"
-
- response = client.get_workflow_template(name)
- assert expected_response == response
-
- assert len(channel.requests) == 1
- expected_request = workflow_templates_pb2.GetWorkflowTemplateRequest(name=name)
- actual_request = channel.requests[0][1]
- assert expected_request == actual_request
-
- def test_get_workflow_template_exception(self):
- # Mock the API response
- channel = ChannelStub(responses=[CustomException()])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = dataproc_v1.WorkflowTemplateServiceClient()
-
- # Setup request
- name = "name3373707"
-
- with pytest.raises(CustomException):
- client.get_workflow_template(name)
-
def test_instantiate_workflow_template(self):
# Setup Expected Response
expected_response = {}
@@ -255,6 +171,90 @@ def test_instantiate_inline_workflow_template_exception(self):
exception = response.exception()
assert exception.errors[0] == error
+ def test_create_workflow_template(self):
+ # Setup Expected Response
+ id_ = "id3355"
+ name = "name3373707"
+ version = 351608024
+ expected_response = {"id": id_, "name": name, "version": version}
+ expected_response = workflow_templates_pb2.WorkflowTemplate(**expected_response)
+
+ # Mock the API response
+ channel = ChannelStub(responses=[expected_response])
+ patch = mock.patch("google.api_core.grpc_helpers.create_channel")
+ with patch as create_channel:
+ create_channel.return_value = channel
+ client = dataproc_v1.WorkflowTemplateServiceClient()
+
+ # Setup Request
+ parent = client.region_path("[PROJECT]", "[REGION]")
+ template = {}
+
+ response = client.create_workflow_template(parent, template)
+ assert expected_response == response
+
+ assert len(channel.requests) == 1
+ expected_request = workflow_templates_pb2.CreateWorkflowTemplateRequest(
+ parent=parent, template=template
+ )
+ actual_request = channel.requests[0][1]
+ assert expected_request == actual_request
+
+ def test_create_workflow_template_exception(self):
+ # Mock the API response
+ channel = ChannelStub(responses=[CustomException()])
+ patch = mock.patch("google.api_core.grpc_helpers.create_channel")
+ with patch as create_channel:
+ create_channel.return_value = channel
+ client = dataproc_v1.WorkflowTemplateServiceClient()
+
+ # Setup request
+ parent = client.region_path("[PROJECT]", "[REGION]")
+ template = {}
+
+ with pytest.raises(CustomException):
+ client.create_workflow_template(parent, template)
+
+ def test_get_workflow_template(self):
+ # Setup Expected Response
+ id_ = "id3355"
+ name_2 = "name2-1052831874"
+ version = 351608024
+ expected_response = {"id": id_, "name": name_2, "version": version}
+ expected_response = workflow_templates_pb2.WorkflowTemplate(**expected_response)
+
+ # Mock the API response
+ channel = ChannelStub(responses=[expected_response])
+ patch = mock.patch("google.api_core.grpc_helpers.create_channel")
+ with patch as create_channel:
+ create_channel.return_value = channel
+ client = dataproc_v1.WorkflowTemplateServiceClient()
+
+ # Setup Request
+ name = "name3373707"
+
+ response = client.get_workflow_template(name)
+ assert expected_response == response
+
+ assert len(channel.requests) == 1
+ expected_request = workflow_templates_pb2.GetWorkflowTemplateRequest(name=name)
+ actual_request = channel.requests[0][1]
+ assert expected_request == actual_request
+
+ def test_get_workflow_template_exception(self):
+ # Mock the API response
+ channel = ChannelStub(responses=[CustomException()])
+ patch = mock.patch("google.api_core.grpc_helpers.create_channel")
+ with patch as create_channel:
+ create_channel.return_value = channel
+ client = dataproc_v1.WorkflowTemplateServiceClient()
+
+ # Setup request
+ name = "name3373707"
+
+ with pytest.raises(CustomException):
+ client.get_workflow_template(name)
+
def test_update_workflow_template(self):
# Setup Expected Response
id_ = "id3355"
diff --git a/tests/unit/gapic/v1beta2/test_cluster_controller_client_v1beta2.py b/tests/unit/gapic/v1beta2/test_cluster_controller_client_v1beta2.py
index 7c75dc57..cb4d14ad 100644
--- a/tests/unit/gapic/v1beta2/test_cluster_controller_client_v1beta2.py
+++ b/tests/unit/gapic/v1beta2/test_cluster_controller_client_v1beta2.py
@@ -259,6 +259,62 @@ def test_delete_cluster_exception(self):
exception = response.exception()
assert exception.errors[0] == error
+ def test_diagnose_cluster(self):
+ # Setup Expected Response
+ expected_response = {}
+ expected_response = empty_pb2.Empty(**expected_response)
+ operation = operations_pb2.Operation(
+ name="operations/test_diagnose_cluster", done=True
+ )
+ operation.response.Pack(expected_response)
+
+ # Mock the API response
+ channel = ChannelStub(responses=[operation])
+ patch = mock.patch("google.api_core.grpc_helpers.create_channel")
+ with patch as create_channel:
+ create_channel.return_value = channel
+ client = dataproc_v1beta2.ClusterControllerClient()
+
+ # Setup Request
+ project_id = "projectId-1969970175"
+ region = "region-934795532"
+ cluster_name = "clusterName-1018081872"
+
+ response = client.diagnose_cluster(project_id, region, cluster_name)
+ result = response.result()
+ assert expected_response == result
+
+ assert len(channel.requests) == 1
+ expected_request = clusters_pb2.DiagnoseClusterRequest(
+ project_id=project_id, region=region, cluster_name=cluster_name
+ )
+ actual_request = channel.requests[0][1]
+ assert expected_request == actual_request
+
+ def test_diagnose_cluster_exception(self):
+ # Setup Response
+ error = status_pb2.Status()
+ operation = operations_pb2.Operation(
+ name="operations/test_diagnose_cluster_exception", done=True
+ )
+ operation.error.CopyFrom(error)
+
+ # Mock the API response
+ channel = ChannelStub(responses=[operation])
+ patch = mock.patch("google.api_core.grpc_helpers.create_channel")
+ with patch as create_channel:
+ create_channel.return_value = channel
+ client = dataproc_v1beta2.ClusterControllerClient()
+
+ # Setup Request
+ project_id = "projectId-1969970175"
+ region = "region-934795532"
+ cluster_name = "clusterName-1018081872"
+
+ response = client.diagnose_cluster(project_id, region, cluster_name)
+ exception = response.exception()
+ assert exception.errors[0] == error
+
def test_get_cluster(self):
# Setup Expected Response
project_id_2 = "projectId2939242356"
@@ -355,59 +411,3 @@ def test_list_clusters_exception(self):
paged_list_response = client.list_clusters(project_id, region)
with pytest.raises(CustomException):
list(paged_list_response)
-
- def test_diagnose_cluster(self):
- # Setup Expected Response
- expected_response = {}
- expected_response = empty_pb2.Empty(**expected_response)
- operation = operations_pb2.Operation(
- name="operations/test_diagnose_cluster", done=True
- )
- operation.response.Pack(expected_response)
-
- # Mock the API response
- channel = ChannelStub(responses=[operation])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = dataproc_v1beta2.ClusterControllerClient()
-
- # Setup Request
- project_id = "projectId-1969970175"
- region = "region-934795532"
- cluster_name = "clusterName-1018081872"
-
- response = client.diagnose_cluster(project_id, region, cluster_name)
- result = response.result()
- assert expected_response == result
-
- assert len(channel.requests) == 1
- expected_request = clusters_pb2.DiagnoseClusterRequest(
- project_id=project_id, region=region, cluster_name=cluster_name
- )
- actual_request = channel.requests[0][1]
- assert expected_request == actual_request
-
- def test_diagnose_cluster_exception(self):
- # Setup Response
- error = status_pb2.Status()
- operation = operations_pb2.Operation(
- name="operations/test_diagnose_cluster_exception", done=True
- )
- operation.error.CopyFrom(error)
-
- # Mock the API response
- channel = ChannelStub(responses=[operation])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = dataproc_v1beta2.ClusterControllerClient()
-
- # Setup Request
- project_id = "projectId-1969970175"
- region = "region-934795532"
- cluster_name = "clusterName-1018081872"
-
- response = client.diagnose_cluster(project_id, region, cluster_name)
- exception = response.exception()
- assert exception.errors[0] == error
diff --git a/tests/unit/gapic/v1beta2/test_job_controller_client_v1beta2.py b/tests/unit/gapic/v1beta2/test_job_controller_client_v1beta2.py
index 84ce3d3d..57dbcbe8 100644
--- a/tests/unit/gapic/v1beta2/test_job_controller_client_v1beta2.py
+++ b/tests/unit/gapic/v1beta2/test_job_controller_client_v1beta2.py
@@ -119,6 +119,73 @@ def test_submit_job_exception(self):
with pytest.raises(CustomException):
client.submit_job(project_id, region, job)
+ def test_submit_job_as_operation(self):
+ # Setup Expected Response
+ submitted_by = "submittedBy-2047729125"
+ driver_output_resource_uri = "driverOutputResourceUri-542229086"
+ driver_control_files_uri = "driverControlFilesUri207057643"
+ job_uuid = "jobUuid-1615012099"
+ done = True
+ expected_response = {
+ "submitted_by": submitted_by,
+ "driver_output_resource_uri": driver_output_resource_uri,
+ "driver_control_files_uri": driver_control_files_uri,
+ "job_uuid": job_uuid,
+ "done": done,
+ }
+ expected_response = jobs_pb2.Job(**expected_response)
+ operation = operations_pb2.Operation(
+ name="operations/test_submit_job_as_operation", done=True
+ )
+ operation.response.Pack(expected_response)
+
+ # Mock the API response
+ channel = ChannelStub(responses=[operation])
+ patch = mock.patch("google.api_core.grpc_helpers.create_channel")
+ with patch as create_channel:
+ create_channel.return_value = channel
+ client = dataproc_v1beta2.JobControllerClient()
+
+ # Setup Request
+ project_id = "projectId-1969970175"
+ region = "region-934795532"
+ job = {}
+
+ response = client.submit_job_as_operation(project_id, region, job)
+ result = response.result()
+ assert expected_response == result
+
+ assert len(channel.requests) == 1
+ expected_request = jobs_pb2.SubmitJobRequest(
+ project_id=project_id, region=region, job=job
+ )
+ actual_request = channel.requests[0][1]
+ assert expected_request == actual_request
+
+ def test_submit_job_as_operation_exception(self):
+ # Setup Response
+ error = status_pb2.Status()
+ operation = operations_pb2.Operation(
+ name="operations/test_submit_job_as_operation_exception", done=True
+ )
+ operation.error.CopyFrom(error)
+
+ # Mock the API response
+ channel = ChannelStub(responses=[operation])
+ patch = mock.patch("google.api_core.grpc_helpers.create_channel")
+ with patch as create_channel:
+ create_channel.return_value = channel
+ client = dataproc_v1beta2.JobControllerClient()
+
+ # Setup Request
+ project_id = "projectId-1969970175"
+ region = "region-934795532"
+ job = {}
+
+ response = client.submit_job_as_operation(project_id, region, job)
+ exception = response.exception()
+ assert exception.errors[0] == error
+
def test_get_job(self):
# Setup Expected Response
submitted_by = "submittedBy-2047729125"
@@ -372,70 +439,3 @@ def test_delete_job_exception(self):
with pytest.raises(CustomException):
client.delete_job(project_id, region, job_id)
-
- def test_submit_job_as_operation(self):
- # Setup Expected Response
- submitted_by = "submittedBy-2047729125"
- driver_output_resource_uri = "driverOutputResourceUri-542229086"
- driver_control_files_uri = "driverControlFilesUri207057643"
- job_uuid = "jobUuid-1615012099"
- done = True
- expected_response = {
- "submitted_by": submitted_by,
- "driver_output_resource_uri": driver_output_resource_uri,
- "driver_control_files_uri": driver_control_files_uri,
- "job_uuid": job_uuid,
- "done": done,
- }
- expected_response = jobs_pb2.Job(**expected_response)
- operation = operations_pb2.Operation(
- name="operations/test_submit_job_as_operation", done=True
- )
- operation.response.Pack(expected_response)
-
- # Mock the API response
- channel = ChannelStub(responses=[operation])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = dataproc_v1beta2.JobControllerClient()
-
- # Setup Request
- project_id = "projectId-1969970175"
- region = "region-934795532"
- job = {}
-
- response = client.submit_job_as_operation(project_id, region, job)
- result = response.result()
- assert expected_response == result
-
- assert len(channel.requests) == 1
- expected_request = jobs_pb2.SubmitJobRequest(
- project_id=project_id, region=region, job=job
- )
- actual_request = channel.requests[0][1]
- assert expected_request == actual_request
-
- def test_submit_job_as_operation_exception(self):
- # Setup Response
- error = status_pb2.Status()
- operation = operations_pb2.Operation(
- name="operations/test_submit_job_as_operation_exception", done=True
- )
- operation.error.CopyFrom(error)
-
- # Mock the API response
- channel = ChannelStub(responses=[operation])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = dataproc_v1beta2.JobControllerClient()
-
- # Setup Request
- project_id = "projectId-1969970175"
- region = "region-934795532"
- job = {}
-
- response = client.submit_job_as_operation(project_id, region, job)
- exception = response.exception()
- assert exception.errors[0] == error
diff --git a/tests/unit/gapic/v1beta2/test_workflow_template_service_client_v1beta2.py b/tests/unit/gapic/v1beta2/test_workflow_template_service_client_v1beta2.py
index d3017008..f8ff56f9 100644
--- a/tests/unit/gapic/v1beta2/test_workflow_template_service_client_v1beta2.py
+++ b/tests/unit/gapic/v1beta2/test_workflow_template_service_client_v1beta2.py
@@ -64,90 +64,6 @@ class CustomException(Exception):
class TestWorkflowTemplateServiceClient(object):
- def test_create_workflow_template(self):
- # Setup Expected Response
- id_ = "id3355"
- name = "name3373707"
- version = 351608024
- expected_response = {"id": id_, "name": name, "version": version}
- expected_response = workflow_templates_pb2.WorkflowTemplate(**expected_response)
-
- # Mock the API response
- channel = ChannelStub(responses=[expected_response])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = dataproc_v1beta2.WorkflowTemplateServiceClient()
-
- # Setup Request
- parent = client.region_path("[PROJECT]", "[REGION]")
- template = {}
-
- response = client.create_workflow_template(parent, template)
- assert expected_response == response
-
- assert len(channel.requests) == 1
- expected_request = workflow_templates_pb2.CreateWorkflowTemplateRequest(
- parent=parent, template=template
- )
- actual_request = channel.requests[0][1]
- assert expected_request == actual_request
-
- def test_create_workflow_template_exception(self):
- # Mock the API response
- channel = ChannelStub(responses=[CustomException()])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = dataproc_v1beta2.WorkflowTemplateServiceClient()
-
- # Setup request
- parent = client.region_path("[PROJECT]", "[REGION]")
- template = {}
-
- with pytest.raises(CustomException):
- client.create_workflow_template(parent, template)
-
- def test_get_workflow_template(self):
- # Setup Expected Response
- id_ = "id3355"
- name_2 = "name2-1052831874"
- version = 351608024
- expected_response = {"id": id_, "name": name_2, "version": version}
- expected_response = workflow_templates_pb2.WorkflowTemplate(**expected_response)
-
- # Mock the API response
- channel = ChannelStub(responses=[expected_response])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = dataproc_v1beta2.WorkflowTemplateServiceClient()
-
- # Setup Request
- name = "name3373707"
-
- response = client.get_workflow_template(name)
- assert expected_response == response
-
- assert len(channel.requests) == 1
- expected_request = workflow_templates_pb2.GetWorkflowTemplateRequest(name=name)
- actual_request = channel.requests[0][1]
- assert expected_request == actual_request
-
- def test_get_workflow_template_exception(self):
- # Mock the API response
- channel = ChannelStub(responses=[CustomException()])
- patch = mock.patch("google.api_core.grpc_helpers.create_channel")
- with patch as create_channel:
- create_channel.return_value = channel
- client = dataproc_v1beta2.WorkflowTemplateServiceClient()
-
- # Setup request
- name = "name3373707"
-
- with pytest.raises(CustomException):
- client.get_workflow_template(name)
-
def test_instantiate_workflow_template(self):
# Setup Expected Response
expected_response = {}
@@ -255,6 +171,90 @@ def test_instantiate_inline_workflow_template_exception(self):
exception = response.exception()
assert exception.errors[0] == error
+ def test_create_workflow_template(self):
+ # Setup Expected Response
+ id_ = "id3355"
+ name = "name3373707"
+ version = 351608024
+ expected_response = {"id": id_, "name": name, "version": version}
+ expected_response = workflow_templates_pb2.WorkflowTemplate(**expected_response)
+
+ # Mock the API response
+ channel = ChannelStub(responses=[expected_response])
+ patch = mock.patch("google.api_core.grpc_helpers.create_channel")
+ with patch as create_channel:
+ create_channel.return_value = channel
+ client = dataproc_v1beta2.WorkflowTemplateServiceClient()
+
+ # Setup Request
+ parent = client.region_path("[PROJECT]", "[REGION]")
+ template = {}
+
+ response = client.create_workflow_template(parent, template)
+ assert expected_response == response
+
+ assert len(channel.requests) == 1
+ expected_request = workflow_templates_pb2.CreateWorkflowTemplateRequest(
+ parent=parent, template=template
+ )
+ actual_request = channel.requests[0][1]
+ assert expected_request == actual_request
+
+ def test_create_workflow_template_exception(self):
+ # Mock the API response
+ channel = ChannelStub(responses=[CustomException()])
+ patch = mock.patch("google.api_core.grpc_helpers.create_channel")
+ with patch as create_channel:
+ create_channel.return_value = channel
+ client = dataproc_v1beta2.WorkflowTemplateServiceClient()
+
+ # Setup request
+ parent = client.region_path("[PROJECT]", "[REGION]")
+ template = {}
+
+ with pytest.raises(CustomException):
+ client.create_workflow_template(parent, template)
+
+ def test_get_workflow_template(self):
+ # Setup Expected Response
+ id_ = "id3355"
+ name_2 = "name2-1052831874"
+ version = 351608024
+ expected_response = {"id": id_, "name": name_2, "version": version}
+ expected_response = workflow_templates_pb2.WorkflowTemplate(**expected_response)
+
+ # Mock the API response
+ channel = ChannelStub(responses=[expected_response])
+ patch = mock.patch("google.api_core.grpc_helpers.create_channel")
+ with patch as create_channel:
+ create_channel.return_value = channel
+ client = dataproc_v1beta2.WorkflowTemplateServiceClient()
+
+ # Setup Request
+ name = "name3373707"
+
+ response = client.get_workflow_template(name)
+ assert expected_response == response
+
+ assert len(channel.requests) == 1
+ expected_request = workflow_templates_pb2.GetWorkflowTemplateRequest(name=name)
+ actual_request = channel.requests[0][1]
+ assert expected_request == actual_request
+
+ def test_get_workflow_template_exception(self):
+ # Mock the API response
+ channel = ChannelStub(responses=[CustomException()])
+ patch = mock.patch("google.api_core.grpc_helpers.create_channel")
+ with patch as create_channel:
+ create_channel.return_value = channel
+ client = dataproc_v1beta2.WorkflowTemplateServiceClient()
+
+ # Setup request
+ name = "name3373707"
+
+ with pytest.raises(CustomException):
+ client.get_workflow_template(name)
+
def test_update_workflow_template(self):
# Setup Expected Response
id_ = "id3355"