From 098be5532417b2f3d2a861584a7af16a68852e35 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 3 Nov 2021 14:39:35 -0400 Subject: [PATCH 01/18] chore: delete owlbot.py (#15) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: delete owlbot.py * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- docs/index.rst | 6 ++++-- owlbot.py | 42 ------------------------------------------ 2 files changed, 4 insertions(+), 44 deletions(-) delete mode 100644 owlbot.py diff --git a/docs/index.rst b/docs/index.rst index cadfd1b..1636190 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -2,6 +2,7 @@ .. include:: multiprocessing.rst + API Reference ------------- .. toctree:: @@ -10,12 +11,13 @@ API Reference bigquery_migration_v2alpha/services bigquery_migration_v2alpha/types + Changelog --------- For a list of all ``google-cloud-bigquery-migration`` releases: .. toctree:: - :maxdepth: 2 + :maxdepth: 2 - changelog + changelog diff --git a/owlbot.py b/owlbot.py deleted file mode 100644 index 6d3ffe2..0000000 --- a/owlbot.py +++ /dev/null @@ -1,42 +0,0 @@ -# Copyright 2021 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import synthtool as s -import synthtool.gcp as gcp -from synthtool.languages import python - -# ---------------------------------------------------------------------------- -# Copy the generated client from the owl-bot staging directory -# ---------------------------------------------------------------------------- - -default_version = "v2alpha" - -for library in s.get_staging_dirs(default_version): - s.move(library, excludes=["setup.py", "README.rst", "docs/index.rst"]) -s.remove_staging_dirs() - -# ---------------------------------------------------------------------------- -# Add templated files -# ---------------------------------------------------------------------------- - -templated_files = gcp.CommonTemplates().py_library(microgenerator=True) -s.move(templated_files, excludes=[".coveragerc"]) # the microgenerator has a good coveragerc file - -python.py_samples(skip_readmes=True) - -# ---------------------------------------------------------------------------- -# Run blacken session -# ---------------------------------------------------------------------------- - -s.shell.run(["nox", "-s", "blacken"], hide_output=False) From af7e6ab0b5363237c49801035c80fa38a39a9c94 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 10 Nov 2021 05:12:09 -0500 Subject: [PATCH 02/18] chore: use gapic-generator-python 0.56.2 (#19) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: update Java and Python dependencies PiperOrigin-RevId: 408420890 Source-Link: https://github.com/googleapis/googleapis/commit/2921f9fb3bfbd16f6b2da0104373e2b47a80a65e Source-Link: https://github.com/googleapis/googleapis-gen/commit/6598ca8cbbf5226733a099c4506518a5af6ff74c Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNjU5OGNhOGNiYmY1MjI2NzMzYTA5OWM0NTA2NTE4YTVhZjZmZjc0YyJ9 * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../migration_service/async_client.py | 13 ++-- .../services/migration_service/client.py | 25 ++++--- .../migration_service/transports/base.py | 8 +-- .../migration_service/transports/grpc.py | 4 +- .../transports/grpc_asyncio.py | 4 +- .../types/migration_metrics.py | 5 ++ .../test_migration_service.py | 68 +++++++++++++------ 7 files changed, 86 insertions(+), 41 deletions(-) diff --git a/google/cloud/bigquery_migration_v2alpha/services/migration_service/async_client.py b/google/cloud/bigquery_migration_v2alpha/services/migration_service/async_client.py index bb4d20c..58da1a3 100644 --- a/google/cloud/bigquery_migration_v2alpha/services/migration_service/async_client.py +++ b/google/cloud/bigquery_migration_v2alpha/services/migration_service/async_client.py @@ -19,14 +19,17 @@ from typing import Dict, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core.client_options import ClientOptions # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.cloud.bigquery_migration_v2alpha.services.migration_service import pagers from google.cloud.bigquery_migration_v2alpha.types import migration_entities diff --git a/google/cloud/bigquery_migration_v2alpha/services/migration_service/client.py b/google/cloud/bigquery_migration_v2alpha/services/migration_service/client.py index 013e2a3..11ff5d0 100644 --- a/google/cloud/bigquery_migration_v2alpha/services/migration_service/client.py +++ b/google/cloud/bigquery_migration_v2alpha/services/migration_service/client.py @@ -14,23 +14,25 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.cloud.bigquery_migration_v2alpha.services.migration_service import pagers from google.cloud.bigquery_migration_v2alpha.types import migration_entities @@ -306,8 +308,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None diff --git a/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/base.py b/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/base.py index f72f8d6..43410b4 100644 --- a/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/base.py +++ b/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/base.py @@ -18,10 +18,10 @@ import pkg_resources import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore diff --git a/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/grpc.py b/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/grpc.py index f4c740a..19f31f8 100644 --- a/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/grpc.py +++ b/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/grpc.py @@ -16,8 +16,8 @@ import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import grpc_helpers # type: ignore -from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/grpc_asyncio.py b/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/grpc_asyncio.py index d3d3fd7..5eacdd0 100644 --- a/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/grpc_asyncio.py +++ b/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/grpc_asyncio.py @@ -16,8 +16,8 @@ import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import gapic_v1 # type: ignore -from google.api_core import grpc_helpers_async # type: ignore +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/google/cloud/bigquery_migration_v2alpha/types/migration_metrics.py b/google/cloud/bigquery_migration_v2alpha/types/migration_metrics.py index 4820644..89cd2e9 100644 --- a/google/cloud/bigquery_migration_v2alpha/types/migration_metrics.py +++ b/google/cloud/bigquery_migration_v2alpha/types/migration_metrics.py @@ -122,22 +122,27 @@ class TypedValue(proto.Message): Attributes: bool_value (bool): A Boolean value: ``true`` or ``false``. + This field is a member of `oneof`_ ``value``. int64_value (int): A 64-bit integer. Its range is approximately +/-9.2x10^18. + This field is a member of `oneof`_ ``value``. double_value (float): A 64-bit double-precision floating-point number. Its magnitude is approximately +/-10^(+/-300) and it has 16 significant digits of precision. + This field is a member of `oneof`_ ``value``. string_value (str): A variable-length string value. + This field is a member of `oneof`_ ``value``. distribution_value (google.api.distribution_pb2.Distribution): A distribution value. + This field is a member of `oneof`_ ``value``. """ diff --git a/tests/unit/gapic/bigquery_migration_v2alpha/test_migration_service.py b/tests/unit/gapic/bigquery_migration_v2alpha/test_migration_service.py index 0345e84..eb3103a 100644 --- a/tests/unit/gapic/bigquery_migration_v2alpha/test_migration_service.py +++ b/tests/unit/gapic/bigquery_migration_v2alpha/test_migration_service.py @@ -679,10 +679,12 @@ def test_create_migration_workflow_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].migration_workflow == migration_entities.MigrationWorkflow( - name="name_value" - ) + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].migration_workflow + mock_val = migration_entities.MigrationWorkflow(name="name_value") + assert arg == mock_val def test_create_migration_workflow_flattened_error(): @@ -725,10 +727,12 @@ async def test_create_migration_workflow_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].migration_workflow == migration_entities.MigrationWorkflow( - name="name_value" - ) + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].migration_workflow + mock_val = migration_entities.MigrationWorkflow(name="name_value") + assert arg == mock_val @pytest.mark.asyncio @@ -921,7 +925,9 @@ def test_get_migration_workflow_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val def test_get_migration_workflow_flattened_error(): @@ -959,7 +965,9 @@ async def test_get_migration_workflow_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val @pytest.mark.asyncio @@ -1143,7 +1151,9 @@ def test_list_migration_workflows_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val def test_list_migration_workflows_flattened_error(): @@ -1181,7 +1191,9 @@ async def test_list_migration_workflows_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val @pytest.mark.asyncio @@ -1531,7 +1543,9 @@ def test_delete_migration_workflow_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val def test_delete_migration_workflow_flattened_error(): @@ -1567,7 +1581,9 @@ async def test_delete_migration_workflow_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val @pytest.mark.asyncio @@ -1741,7 +1757,9 @@ def test_start_migration_workflow_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val def test_start_migration_workflow_flattened_error(): @@ -1777,7 +1795,9 @@ async def test_start_migration_workflow_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val @pytest.mark.asyncio @@ -1976,7 +1996,9 @@ def test_get_migration_subtask_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val def test_get_migration_subtask_flattened_error(): @@ -2014,7 +2036,9 @@ async def test_get_migration_subtask_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val @pytest.mark.asyncio @@ -2197,7 +2221,9 @@ def test_list_migration_subtasks_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val def test_list_migration_subtasks_flattened_error(): @@ -2235,7 +2261,9 @@ async def test_list_migration_subtasks_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val @pytest.mark.asyncio From ecbab76d04f773c116b41ab556d089e3542248c4 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Thu, 11 Nov 2021 11:58:37 -0500 Subject: [PATCH 03/18] chore: add link to Client Library Documentation in README (#20) --- README.rst | 43 ++++++++++++++++++++++++++++++++++++++----- 1 file changed, 38 insertions(+), 5 deletions(-) diff --git a/README.rst b/README.rst index 6b6ed6b..0e957de 100644 --- a/README.rst +++ b/README.rst @@ -1,6 +1,23 @@ Python Client for Google Cloud Bigquery Migration API ===================================================== +|GA| |pypi| |versions| + +Python client for `Google Cloud Bigquery Migration API`_. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. |GA| image:: https://img.shields.io/badge/support-ga-gold.svg + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#general-availability +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-bigquery-migration.svg + :target: https://pypi.org/project/google-cloud-bigquery-migration/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-bigquery-migration.svg + :target: https://pypi.org/project/google-cloud-bigquery-migration/ +.. _Google Cloud Bigquery Migration API: https://cloud.google.com/bigquery/docs/reference/migration/ +.. _Client Library Documentation: https://googleapis.dev/python/bigquerymigration/latest +.. _Product Documentation: https://cloud.google.com/bigquery/docs/reference/migration/ + Quick Start ----------- @@ -8,11 +25,12 @@ In order to use this library, you first need to go through the following steps: 1. `Select or create a Cloud Platform project.`_ 2. `Enable billing for your project.`_ -3. Enable the Google Cloud Bigquery Migration API. +3. `Enable the Google Cloud Bigquery Migration API.`_ 4. `Setup Authentication.`_ .. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project .. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Google Cloud Bigquery Migration API.: https://console.cloud.google.com/apis/library/bigquerymigration.googleapis.com .. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html Installation @@ -34,9 +52,10 @@ Mac/Linux .. code-block:: console - python3 -m venv + pip install virtualenv + virtualenv source /bin/activate - /bin/pip install /path/to/library + /bin/pip install google-cloud-bigquery-migration Windows @@ -44,6 +63,20 @@ Windows .. code-block:: console - python3 -m venv + pip install virtualenv + virtualenv \Scripts\activate - \Scripts\pip.exe install \path\to\library + \Scripts\pip.exe install google-cloud-bigquery-migration + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Google Cloud Bigquery + Migration API to see other available methods on the client. +- Read the `Bigquery Migration API Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `README`_ to see the full list of Cloud + APIs that we cover. + +.. _Bigquery Migration API Product documentation: https://cloud.google.com/bigquery/docs/reference/migration/ +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst \ No newline at end of file From 88b95b6a6e26395b9fd7bde33c2695bd8947cea5 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Thu, 11 Nov 2021 14:48:38 -0500 Subject: [PATCH 04/18] chore: fix release level in README (#22) --- README.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.rst b/README.rst index 0e957de..41fc18a 100644 --- a/README.rst +++ b/README.rst @@ -1,14 +1,14 @@ Python Client for Google Cloud Bigquery Migration API ===================================================== -|GA| |pypi| |versions| +|alpha| |pypi| |versions| Python client for `Google Cloud Bigquery Migration API`_. - `Client Library Documentation`_ - `Product Documentation`_ -.. |GA| image:: https://img.shields.io/badge/support-ga-gold.svg +.. |alpha| image:: https://img.shields.io/badge/support-alpha-orange.svg :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#general-availability .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-bigquery-migration.svg :target: https://pypi.org/project/google-cloud-bigquery-migration/ @@ -79,4 +79,4 @@ Next Steps APIs that we cover. .. _Bigquery Migration API Product documentation: https://cloud.google.com/bigquery/docs/reference/migration/ -.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst \ No newline at end of file +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst From 9bd29bbf00a1f1f1bb06fa6b742198873d073e90 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 11 Nov 2021 18:47:13 -0500 Subject: [PATCH 05/18] chore(python): add .github/CODEOWNERS as a templated file (#21) Source-Link: https://github.com/googleapis/synthtool/commit/c5026b3217973a8db55db8ee85feee0e9a65e295 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:0e18b9475fbeb12d9ad4302283171edebb6baf2dfca1bd215ee3b34ed79d95d7 Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .github/.OwlBot.lock.yaml | 2 +- .github/CODEOWNERS | 9 +++++---- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 108063d..7519fa3 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:4ee57a76a176ede9087c14330c625a71553cf9c72828b2c0ca12f5338171ba60 + digest: sha256:0e18b9475fbeb12d9ad4302283171edebb6baf2dfca1bd215ee3b34ed79d95d7 diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 8e1c1fb..f8714a3 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -3,9 +3,10 @@ # # For syntax help see: # https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax +# Note: This file is autogenerated. To make changes to the codeowner team, please update .repo-metadata.json. -# The @googleapis/api-bigquery is the default owner for changes in this repo -* @googleapis/api-bigquery @googleapis/yoshi-python +# @googleapis/yoshi-python @googleapis/api-bigquery are the default owners for changes in this repo +* @googleapis/yoshi-python @googleapis/api-bigquery -# The python-samples-reviewers team is the default owner for samples changes -/samples/ @googleapis/python-samples-owners @googleapis/api-bigquery @googleapis/yoshi-python \ No newline at end of file +# @googleapis/python-samples-owners @googleapis/api-bigquery are the default owners for samples changes +/samples/ @googleapis/python-samples-owners @googleapis/api-bigquery From 94ceed1e033dc44d6234da382bc332d6906edb93 Mon Sep 17 00:00:00 2001 From: Dan Lee <71398022+dandhlee@users.noreply.github.com> Date: Wed, 17 Nov 2021 17:22:09 -0500 Subject: [PATCH 06/18] chore: update doc links from googleapis.dev to cloud.google.com (#23) --- .repo-metadata.json | 2 +- README.rst | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.repo-metadata.json b/.repo-metadata.json index c717a26..348c4f3 100644 --- a/.repo-metadata.json +++ b/.repo-metadata.json @@ -2,7 +2,7 @@ "name": "bigquerymigration", "name_pretty": "Google BigQuery Migration", "product_documentation": "https://cloud.google.com/bigquery/docs/reference/migration/", - "client_documentation": "https://googleapis.dev/python/bigquerymigration/latest", + "client_documentation": "https://cloud.google.com/python/docs/reference/bigquerymigration/latest", "issue_tracker": "https://issuetracker.google.com/savedsearches/559654", "release_level": "alpha", "language": "python", diff --git a/README.rst b/README.rst index 41fc18a..16ea38a 100644 --- a/README.rst +++ b/README.rst @@ -15,7 +15,7 @@ Python client for `Google Cloud Bigquery Migration API`_. .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-bigquery-migration.svg :target: https://pypi.org/project/google-cloud-bigquery-migration/ .. _Google Cloud Bigquery Migration API: https://cloud.google.com/bigquery/docs/reference/migration/ -.. _Client Library Documentation: https://googleapis.dev/python/bigquerymigration/latest +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/bigquerymigration/latest .. _Product Documentation: https://cloud.google.com/bigquery/docs/reference/migration/ Quick Start From 140bda92923ce431030abc4ce644d6e91cab23c7 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 24 Dec 2021 20:26:22 -0500 Subject: [PATCH 07/18] chore: add api_shortname to repo-metadata.json (#26) --- .repo-metadata.json | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.repo-metadata.json b/.repo-metadata.json index 348c4f3..cb3e092 100644 --- a/.repo-metadata.json +++ b/.repo-metadata.json @@ -4,7 +4,7 @@ "product_documentation": "https://cloud.google.com/bigquery/docs/reference/migration/", "client_documentation": "https://cloud.google.com/python/docs/reference/bigquerymigration/latest", "issue_tracker": "https://issuetracker.google.com/savedsearches/559654", - "release_level": "alpha", + "release_level": "preview", "language": "python", "library_type": "GAPIC_AUTO", "repo": "googleapis/python-bigquery-migration", @@ -12,5 +12,6 @@ "api_id": "bigquerymigration.googleapis.com", "requires_billing": true, "default_version": "v2alpha", - "codeowner_team": "@googleapis/api-bigquery" + "codeowner_team": "@googleapis/api-bigquery", + "api_shortname": "bigquerymigration" } From 896ec6ab62424479b0713a5942914042dd14c9d9 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 6 Jan 2022 16:04:38 +0000 Subject: [PATCH 08/18] chore: use python-samples-reviewers (#28) --- .github/.OwlBot.lock.yaml | 2 +- .github/CODEOWNERS | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 7519fa3..f33299d 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:0e18b9475fbeb12d9ad4302283171edebb6baf2dfca1bd215ee3b34ed79d95d7 + digest: sha256:899d5d7cc340fa8ef9d8ae1a8cfba362c6898584f779e156f25ee828ba824610 diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index f8714a3..193b436 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -8,5 +8,5 @@ # @googleapis/yoshi-python @googleapis/api-bigquery are the default owners for changes in this repo * @googleapis/yoshi-python @googleapis/api-bigquery -# @googleapis/python-samples-owners @googleapis/api-bigquery are the default owners for samples changes -/samples/ @googleapis/python-samples-owners @googleapis/api-bigquery +# @googleapis/python-samples-reviewers @googleapis/api-bigquery are the default owners for samples changes +/samples/ @googleapis/python-samples-reviewers @googleapis/api-bigquery From 8a134603258a7f96696f019607eae3bab7940c94 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sun, 9 Jan 2022 17:24:03 -0500 Subject: [PATCH 09/18] chore: use gapic-generator-python 0.58.4 (#27) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: use gapic-generator-python 0.58.4 fix: provide appropriate mock values for message body fields committer: dovs PiperOrigin-RevId: 419025932 Source-Link: https://github.com/googleapis/googleapis/commit/73da6697f598f1ba30618924936a59f8e457ec89 Source-Link: https://github.com/googleapis/googleapis-gen/commit/46df624a54b9ed47c1a7eefb7a49413cf7b82f98 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNDZkZjYyNGE1NGI5ZWQ0N2MxYTdlZWZiN2E0OTQxM2NmN2I4MmY5OCJ9 * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../migration_service/transports/base.py | 1 - .../test_migration_service.py | 119 ++++++++---------- 2 files changed, 51 insertions(+), 69 deletions(-) diff --git a/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/base.py b/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/base.py index 43410b4..5b01d1b 100644 --- a/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/base.py +++ b/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/base.py @@ -103,7 +103,6 @@ def __init__( credentials, _ = google.auth.load_credentials_from_file( credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id diff --git a/tests/unit/gapic/bigquery_migration_v2alpha/test_migration_service.py b/tests/unit/gapic/bigquery_migration_v2alpha/test_migration_service.py index eb3103a..38c993b 100644 --- a/tests/unit/gapic/bigquery_migration_v2alpha/test_migration_service.py +++ b/tests/unit/gapic/bigquery_migration_v2alpha/test_migration_service.py @@ -260,20 +260,20 @@ def test_migration_service_client_client_options( # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError): - client = client_class() + client = client_class(transport=transport_name) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): with pytest.raises(ValueError): - client = client_class() + client = client_class(transport=transport_name) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -342,7 +342,7 @@ def test_migration_service_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) if use_client_cert_env == "false": expected_client_cert_source = None @@ -437,7 +437,7 @@ def test_migration_service_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -468,7 +468,7 @@ def test_migration_service_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -501,10 +501,10 @@ def test_migration_service_client_client_options_from_dict(): ) -def test_create_migration_workflow( - transport: str = "grpc", - request_type=migration_service.CreateMigrationWorkflowRequest, -): +@pytest.mark.parametrize( + "request_type", [migration_service.CreateMigrationWorkflowRequest, dict,] +) +def test_create_migration_workflow(request_type, transport: str = "grpc"): client = MigrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -537,10 +537,6 @@ def test_create_migration_workflow( assert response.state == migration_entities.MigrationWorkflow.State.DRAFT -def test_create_migration_workflow_from_dict(): - test_create_migration_workflow(request_type=dict) - - def test_create_migration_workflow_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -751,9 +747,10 @@ async def test_create_migration_workflow_flattened_error_async(): ) -def test_get_migration_workflow( - transport: str = "grpc", request_type=migration_service.GetMigrationWorkflowRequest -): +@pytest.mark.parametrize( + "request_type", [migration_service.GetMigrationWorkflowRequest, dict,] +) +def test_get_migration_workflow(request_type, transport: str = "grpc"): client = MigrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -786,10 +783,6 @@ def test_get_migration_workflow( assert response.state == migration_entities.MigrationWorkflow.State.DRAFT -def test_get_migration_workflow_from_dict(): - test_get_migration_workflow(request_type=dict) - - def test_get_migration_workflow_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -984,10 +977,10 @@ async def test_get_migration_workflow_flattened_error_async(): ) -def test_list_migration_workflows( - transport: str = "grpc", - request_type=migration_service.ListMigrationWorkflowsRequest, -): +@pytest.mark.parametrize( + "request_type", [migration_service.ListMigrationWorkflowsRequest, dict,] +) +def test_list_migration_workflows(request_type, transport: str = "grpc"): client = MigrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1016,10 +1009,6 @@ def test_list_migration_workflows( assert response.next_page_token == "next_page_token_value" -def test_list_migration_workflows_from_dict(): - test_list_migration_workflows(request_type=dict) - - def test_list_migration_workflows_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1210,8 +1199,10 @@ async def test_list_migration_workflows_flattened_error_async(): ) -def test_list_migration_workflows_pager(): - client = MigrationServiceClient(credentials=ga_credentials.AnonymousCredentials,) +def test_list_migration_workflows_pager(transport_name: str = "grpc"): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1256,8 +1247,10 @@ def test_list_migration_workflows_pager(): assert all(isinstance(i, migration_entities.MigrationWorkflow) for i in results) -def test_list_migration_workflows_pages(): - client = MigrationServiceClient(credentials=ga_credentials.AnonymousCredentials,) +def test_list_migration_workflows_pages(transport_name: str = "grpc"): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1386,10 +1379,10 @@ async def test_list_migration_workflows_async_pages(): assert page_.raw_page.next_page_token == token -def test_delete_migration_workflow( - transport: str = "grpc", - request_type=migration_service.DeleteMigrationWorkflowRequest, -): +@pytest.mark.parametrize( + "request_type", [migration_service.DeleteMigrationWorkflowRequest, dict,] +) +def test_delete_migration_workflow(request_type, transport: str = "grpc"): client = MigrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1415,10 +1408,6 @@ def test_delete_migration_workflow( assert response is None -def test_delete_migration_workflow_from_dict(): - test_delete_migration_workflow(request_type=dict) - - def test_delete_migration_workflow_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1600,10 +1589,10 @@ async def test_delete_migration_workflow_flattened_error_async(): ) -def test_start_migration_workflow( - transport: str = "grpc", - request_type=migration_service.StartMigrationWorkflowRequest, -): +@pytest.mark.parametrize( + "request_type", [migration_service.StartMigrationWorkflowRequest, dict,] +) +def test_start_migration_workflow(request_type, transport: str = "grpc"): client = MigrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1629,10 +1618,6 @@ def test_start_migration_workflow( assert response is None -def test_start_migration_workflow_from_dict(): - test_start_migration_workflow(request_type=dict) - - def test_start_migration_workflow_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1814,9 +1799,10 @@ async def test_start_migration_workflow_flattened_error_async(): ) -def test_get_migration_subtask( - transport: str = "grpc", request_type=migration_service.GetMigrationSubtaskRequest -): +@pytest.mark.parametrize( + "request_type", [migration_service.GetMigrationSubtaskRequest, dict,] +) +def test_get_migration_subtask(request_type, transport: str = "grpc"): client = MigrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1853,10 +1839,6 @@ def test_get_migration_subtask( assert response.resource_error_count == 2169 -def test_get_migration_subtask_from_dict(): - test_get_migration_subtask(request_type=dict) - - def test_get_migration_subtask_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -2055,9 +2037,10 @@ async def test_get_migration_subtask_flattened_error_async(): ) -def test_list_migration_subtasks( - transport: str = "grpc", request_type=migration_service.ListMigrationSubtasksRequest -): +@pytest.mark.parametrize( + "request_type", [migration_service.ListMigrationSubtasksRequest, dict,] +) +def test_list_migration_subtasks(request_type, transport: str = "grpc"): client = MigrationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2086,10 +2069,6 @@ def test_list_migration_subtasks( assert response.next_page_token == "next_page_token_value" -def test_list_migration_subtasks_from_dict(): - test_list_migration_subtasks(request_type=dict) - - def test_list_migration_subtasks_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -2280,8 +2259,10 @@ async def test_list_migration_subtasks_flattened_error_async(): ) -def test_list_migration_subtasks_pager(): - client = MigrationServiceClient(credentials=ga_credentials.AnonymousCredentials,) +def test_list_migration_subtasks_pager(transport_name: str = "grpc"): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2326,8 +2307,10 @@ def test_list_migration_subtasks_pager(): assert all(isinstance(i, migration_entities.MigrationSubtask) for i in results) -def test_list_migration_subtasks_pages(): - client = MigrationServiceClient(credentials=ga_credentials.AnonymousCredentials,) +def test_list_migration_subtasks_pages(transport_name: str = "grpc"): + client = MigrationServiceClient( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3004,7 +2987,7 @@ def test_parse_common_location_path(): assert expected == actual -def test_client_withDEFAULT_CLIENT_INFO(): +def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() with mock.patch.object( From e135752e9594891969d6d8177fe5432350bbb182 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 13 Jan 2022 10:48:01 -0800 Subject: [PATCH 10/18] build: switch to release-please for tagging (#29) Source-Link: https://github.com/googleapis/synthtool/commit/f8077d237e0df2cb0066dfc6e09fc41e1c59646a Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:dfa9b663b32de8b5b327e32c1da665a80de48876558dd58091d8160c60ad7355 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 2 +- .github/release-please.yml | 1 + .github/release-trigger.yml | 1 + 3 files changed, 3 insertions(+), 1 deletion(-) create mode 100644 .github/release-trigger.yml diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index f33299d..ff5126c 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:899d5d7cc340fa8ef9d8ae1a8cfba362c6898584f779e156f25ee828ba824610 + digest: sha256:dfa9b663b32de8b5b327e32c1da665a80de48876558dd58091d8160c60ad7355 diff --git a/.github/release-please.yml b/.github/release-please.yml index 4507ad0..466597e 100644 --- a/.github/release-please.yml +++ b/.github/release-please.yml @@ -1 +1,2 @@ releaseType: python +handleGHRelease: true diff --git a/.github/release-trigger.yml b/.github/release-trigger.yml new file mode 100644 index 0000000..d4ca941 --- /dev/null +++ b/.github/release-trigger.yml @@ -0,0 +1 @@ +enabled: true From 64edcd94a9ce0f3013cdacb2024cd9559766e114 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 14 Jan 2022 11:29:53 -0500 Subject: [PATCH 11/18] chore(python): update release.sh to use keystore (#30) Source-Link: https://github.com/googleapis/synthtool/commit/69fda12e2994f0b595a397e8bb6e3e9f380524eb Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:ae600f36b6bc972b368367b6f83a1d91ec2c82a4a116b383d67d547c56fe6de3 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 2 +- .kokoro/release.sh | 2 +- .kokoro/release/common.cfg | 12 +++++++++++- 3 files changed, 13 insertions(+), 3 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index ff5126c..eecb84c 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:dfa9b663b32de8b5b327e32c1da665a80de48876558dd58091d8160c60ad7355 + digest: sha256:ae600f36b6bc972b368367b6f83a1d91ec2c82a4a116b383d67d547c56fe6de3 diff --git a/.kokoro/release.sh b/.kokoro/release.sh index 64243d5..2cedfed 100755 --- a/.kokoro/release.sh +++ b/.kokoro/release.sh @@ -26,7 +26,7 @@ python3 -m pip install --upgrade twine wheel setuptools export PYTHONUNBUFFERED=1 # Move into the package, build the distribution and upload. -TWINE_PASSWORD=$(cat "${KOKORO_GFILE_DIR}/secret_manager/google-cloud-pypi-token") +TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-1") cd github/python-bigquery-migration python3 setup.py sdist bdist_wheel twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/* diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg index d219b96..2ffb6a4 100644 --- a/.kokoro/release/common.cfg +++ b/.kokoro/release/common.cfg @@ -23,8 +23,18 @@ env_vars: { value: "github/python-bigquery-migration/.kokoro/release.sh" } +# Fetch PyPI password +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "google-cloud-pypi-token-keystore-1" + } + } +} + # Tokens needed to report release status back to GitHub env_vars: { key: "SECRET_MANAGER_KEYS" - value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem,google-cloud-pypi-token" + value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" } From 2ec1240319119b8020de19356179d378ee9ec364 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 21 Jan 2022 20:05:43 -0500 Subject: [PATCH 12/18] ci(python): run lint / unit tests / docs as GH actions (#31) * ci(python): run lint / unit tests / docs as GH actions Source-Link: https://github.com/googleapis/synthtool/commit/57be0cdb0b94e1669cee0ca38d790de1dfdbcd44 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:ed1f9983d5a935a89fe8085e8bb97d94e41015252c5b6c9771257cf8624367e6 * add commit to trigger gh actions Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .github/.OwlBot.lock.yaml | 16 +++++++++- .github/workflows/docs.yml | 38 +++++++++++++++++++++++ .github/workflows/lint.yml | 25 +++++++++++++++ .github/workflows/unittest.yml | 57 ++++++++++++++++++++++++++++++++++ 4 files changed, 135 insertions(+), 1 deletion(-) create mode 100644 .github/workflows/docs.yml create mode 100644 .github/workflows/lint.yml create mode 100644 .github/workflows/unittest.yml diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index eecb84c..b668c04 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,17 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:ae600f36b6bc972b368367b6f83a1d91ec2c82a4a116b383d67d547c56fe6de3 + digest: sha256:ed1f9983d5a935a89fe8085e8bb97d94e41015252c5b6c9771257cf8624367e6 + diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml new file mode 100644 index 0000000..f7b8344 --- /dev/null +++ b/.github/workflows/docs.yml @@ -0,0 +1,38 @@ +on: + pull_request: + branches: + - main +name: docs +jobs: + docs: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Setup Python + uses: actions/setup-python@v2 + with: + python-version: "3.10" + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run docs + run: | + nox -s docs + docfx: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Setup Python + uses: actions/setup-python@v2 + with: + python-version: "3.10" + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run docfx + run: | + nox -s docfx diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml new file mode 100644 index 0000000..1e8b05c --- /dev/null +++ b/.github/workflows/lint.yml @@ -0,0 +1,25 @@ +on: + pull_request: + branches: + - main +name: lint +jobs: + lint: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Setup Python + uses: actions/setup-python@v2 + with: + python-version: "3.10" + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run lint + run: | + nox -s lint + - name: Run lint_setup_py + run: | + nox -s lint_setup_py diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml new file mode 100644 index 0000000..074ee25 --- /dev/null +++ b/.github/workflows/unittest.yml @@ -0,0 +1,57 @@ +on: + pull_request: + branches: + - main +name: unittest +jobs: + unit: + runs-on: ubuntu-latest + strategy: + matrix: + python: ['3.6', '3.7', '3.8', '3.9', '3.10'] + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Setup Python + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python }} + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run unit tests + env: + COVERAGE_FILE: .coverage-${{ matrix.python }} + run: | + nox -s unit-${{ matrix.python }} + - name: Upload coverage results + uses: actions/upload-artifact@v2 + with: + name: coverage-artifacts + path: .coverage-${{ matrix.python }} + + cover: + runs-on: ubuntu-latest + needs: + - unit + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Setup Python + uses: actions/setup-python@v2 + with: + python-version: "3.10" + - name: Install coverage + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install coverage + - name: Download coverage results + uses: actions/download-artifact@v2 + with: + name: coverage-artifacts + path: .coverage-results/ + - name: Report coverage results + run: | + coverage combine .coverage-results/.coverage* + coverage report --show-missing --fail-under=100 From 44c10e17767135b7a5c9a5e22b82260be75459b1 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 24 Jan 2022 11:01:16 -0600 Subject: [PATCH 13/18] feat: Add task details and orchestration result details (#32) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat!: Consolidate task details into service API and add orchestration result details BREAKING CHANGE: This change relocates some task proto definitions and updates message fields, necessitating updates to imports. PiperOrigin-RevId: 423360094 Source-Link: https://github.com/googleapis/googleapis/commit/c9a482e23c28251459afdc4b3b63cbf2fb1f163b Source-Link: https://github.com/googleapis/googleapis-gen/commit/0c2ce73c148389e4382c11eb7abfd0b26973cbb7 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMGMyY2U3M2MxNDgzODllNDM4MmMxMWViN2FiZmQwYjI2OTczY2JiNyJ9 * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- google/cloud/bigquery_migration/__init__.py | 36 +++ .../bigquery_migration_v2alpha/__init__.py | 20 ++ .../types/__init__.py | 24 ++ .../types/assessment_task.py | 62 +++++ .../types/migration_entities.py | 71 ++++- .../types/translation_task.py | 244 ++++++++++++++++++ .../test_migration_service.py | 2 + 7 files changed, 455 insertions(+), 4 deletions(-) create mode 100644 google/cloud/bigquery_migration_v2alpha/types/assessment_task.py create mode 100644 google/cloud/bigquery_migration_v2alpha/types/translation_task.py diff --git a/google/cloud/bigquery_migration/__init__.py b/google/cloud/bigquery_migration/__init__.py index 80e5483..a7d2621 100644 --- a/google/cloud/bigquery_migration/__init__.py +++ b/google/cloud/bigquery_migration/__init__.py @@ -21,12 +21,21 @@ MigrationServiceAsyncClient, ) +from google.cloud.bigquery_migration_v2alpha.types.assessment_task import ( + AssessmentOrchestrationResultDetails, +) +from google.cloud.bigquery_migration_v2alpha.types.assessment_task import ( + AssessmentTaskDetails, +) from google.cloud.bigquery_migration_v2alpha.types.migration_entities import ( MigrationSubtask, ) from google.cloud.bigquery_migration_v2alpha.types.migration_entities import ( MigrationTask, ) +from google.cloud.bigquery_migration_v2alpha.types.migration_entities import ( + MigrationTaskOrchestrationResult, +) from google.cloud.bigquery_migration_v2alpha.types.migration_entities import ( MigrationWorkflow, ) @@ -70,12 +79,32 @@ from google.cloud.bigquery_migration_v2alpha.types.migration_service import ( StartMigrationWorkflowRequest, ) +from google.cloud.bigquery_migration_v2alpha.types.translation_task import BteqOptions +from google.cloud.bigquery_migration_v2alpha.types.translation_task import ( + DatasetReference, +) +from google.cloud.bigquery_migration_v2alpha.types.translation_task import Filter +from google.cloud.bigquery_migration_v2alpha.types.translation_task import ( + IdentifierSettings, +) +from google.cloud.bigquery_migration_v2alpha.types.translation_task import ( + TeradataOptions, +) +from google.cloud.bigquery_migration_v2alpha.types.translation_task import ( + TranslationFileMapping, +) +from google.cloud.bigquery_migration_v2alpha.types.translation_task import ( + TranslationTaskDetails, +) __all__ = ( "MigrationServiceClient", "MigrationServiceAsyncClient", + "AssessmentOrchestrationResultDetails", + "AssessmentTaskDetails", "MigrationSubtask", "MigrationTask", + "MigrationTaskOrchestrationResult", "MigrationWorkflow", "ErrorDetail", "ErrorLocation", @@ -93,4 +122,11 @@ "ListMigrationWorkflowsRequest", "ListMigrationWorkflowsResponse", "StartMigrationWorkflowRequest", + "BteqOptions", + "DatasetReference", + "Filter", + "IdentifierSettings", + "TeradataOptions", + "TranslationFileMapping", + "TranslationTaskDetails", ) diff --git a/google/cloud/bigquery_migration_v2alpha/__init__.py b/google/cloud/bigquery_migration_v2alpha/__init__.py index 4b5dcfa..d99fd78 100644 --- a/google/cloud/bigquery_migration_v2alpha/__init__.py +++ b/google/cloud/bigquery_migration_v2alpha/__init__.py @@ -17,8 +17,11 @@ from .services.migration_service import MigrationServiceClient from .services.migration_service import MigrationServiceAsyncClient +from .types.assessment_task import AssessmentOrchestrationResultDetails +from .types.assessment_task import AssessmentTaskDetails from .types.migration_entities import MigrationSubtask from .types.migration_entities import MigrationTask +from .types.migration_entities import MigrationTaskOrchestrationResult from .types.migration_entities import MigrationWorkflow from .types.migration_error_details import ErrorDetail from .types.migration_error_details import ErrorLocation @@ -36,15 +39,28 @@ from .types.migration_service import ListMigrationWorkflowsRequest from .types.migration_service import ListMigrationWorkflowsResponse from .types.migration_service import StartMigrationWorkflowRequest +from .types.translation_task import BteqOptions +from .types.translation_task import DatasetReference +from .types.translation_task import Filter +from .types.translation_task import IdentifierSettings +from .types.translation_task import TeradataOptions +from .types.translation_task import TranslationFileMapping +from .types.translation_task import TranslationTaskDetails __all__ = ( "MigrationServiceAsyncClient", + "AssessmentOrchestrationResultDetails", + "AssessmentTaskDetails", + "BteqOptions", "CreateMigrationWorkflowRequest", + "DatasetReference", "DeleteMigrationWorkflowRequest", "ErrorDetail", "ErrorLocation", + "Filter", "GetMigrationSubtaskRequest", "GetMigrationWorkflowRequest", + "IdentifierSettings", "ListMigrationSubtasksRequest", "ListMigrationSubtasksResponse", "ListMigrationWorkflowsRequest", @@ -52,11 +68,15 @@ "MigrationServiceClient", "MigrationSubtask", "MigrationTask", + "MigrationTaskOrchestrationResult", "MigrationWorkflow", "Point", "ResourceErrorDetail", "StartMigrationWorkflowRequest", + "TeradataOptions", "TimeInterval", "TimeSeries", + "TranslationFileMapping", + "TranslationTaskDetails", "TypedValue", ) diff --git a/google/cloud/bigquery_migration_v2alpha/types/__init__.py b/google/cloud/bigquery_migration_v2alpha/types/__init__.py index 8a7d29f..8763134 100644 --- a/google/cloud/bigquery_migration_v2alpha/types/__init__.py +++ b/google/cloud/bigquery_migration_v2alpha/types/__init__.py @@ -13,9 +13,14 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from .assessment_task import ( + AssessmentOrchestrationResultDetails, + AssessmentTaskDetails, +) from .migration_entities import ( MigrationSubtask, MigrationTask, + MigrationTaskOrchestrationResult, MigrationWorkflow, ) from .migration_error_details import ( @@ -40,10 +45,22 @@ ListMigrationWorkflowsResponse, StartMigrationWorkflowRequest, ) +from .translation_task import ( + BteqOptions, + DatasetReference, + Filter, + IdentifierSettings, + TeradataOptions, + TranslationFileMapping, + TranslationTaskDetails, +) __all__ = ( + "AssessmentOrchestrationResultDetails", + "AssessmentTaskDetails", "MigrationSubtask", "MigrationTask", + "MigrationTaskOrchestrationResult", "MigrationWorkflow", "ErrorDetail", "ErrorLocation", @@ -61,4 +78,11 @@ "ListMigrationWorkflowsRequest", "ListMigrationWorkflowsResponse", "StartMigrationWorkflowRequest", + "BteqOptions", + "DatasetReference", + "Filter", + "IdentifierSettings", + "TeradataOptions", + "TranslationFileMapping", + "TranslationTaskDetails", ) diff --git a/google/cloud/bigquery_migration_v2alpha/types/assessment_task.py b/google/cloud/bigquery_migration_v2alpha/types/assessment_task.py new file mode 100644 index 0000000..719f44d --- /dev/null +++ b/google/cloud/bigquery_migration_v2alpha/types/assessment_task.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + + +__protobuf__ = proto.module( + package="google.cloud.bigquery.migration.v2alpha", + manifest={"AssessmentTaskDetails", "AssessmentOrchestrationResultDetails",}, +) + + +class AssessmentTaskDetails(proto.Message): + r"""Assessment task config. + + Attributes: + input_path (str): + Required. The Cloud Storage path for + assessment input files. + output_dataset (str): + Required. The BigQuery dataset for output. + querylogs_path (str): + Optional. An optional Cloud Storage path to + write the query logs (which is then used as an + input path on the translation task) + data_source (str): + Required. The data source or data warehouse + type (eg: TERADATA/REDSHIFT) from which the + input data is extracted. + """ + + input_path = proto.Field(proto.STRING, number=1,) + output_dataset = proto.Field(proto.STRING, number=2,) + querylogs_path = proto.Field(proto.STRING, number=3,) + data_source = proto.Field(proto.STRING, number=4,) + + +class AssessmentOrchestrationResultDetails(proto.Message): + r"""Details for an assessment task orchestration result. + + Attributes: + output_tables_schema_version (str): + Optional. The version used for the output + table schemas. + """ + + output_tables_schema_version = proto.Field(proto.STRING, number=1,) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/bigquery_migration_v2alpha/types/migration_entities.py b/google/cloud/bigquery_migration_v2alpha/types/migration_entities.py index 1e99224..55daf49 100644 --- a/google/cloud/bigquery_migration_v2alpha/types/migration_entities.py +++ b/google/cloud/bigquery_migration_v2alpha/types/migration_entities.py @@ -15,8 +15,10 @@ # import proto # type: ignore +from google.cloud.bigquery_migration_v2alpha.types import assessment_task from google.cloud.bigquery_migration_v2alpha.types import migration_error_details from google.cloud.bigquery_migration_v2alpha.types import migration_metrics +from google.cloud.bigquery_migration_v2alpha.types import translation_task from google.protobuf import any_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.rpc import error_details_pb2 # type: ignore @@ -24,7 +26,12 @@ __protobuf__ = proto.module( package="google.cloud.bigquery.migration.v2alpha", - manifest={"MigrationWorkflow", "MigrationTask", "MigrationSubtask",}, + manifest={ + "MigrationWorkflow", + "MigrationTask", + "MigrationSubtask", + "MigrationTaskOrchestrationResult", + }, ) @@ -79,7 +86,23 @@ class MigrationTask(proto.Message): r"""A single task for a migration which has details about the configuration of the task. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: + assessment_task_details (google.cloud.bigquery_migration_v2alpha.types.AssessmentTaskDetails): + Task configuration for Assessment. + + This field is a member of `oneof`_ ``task_details``. + translation_task_details (google.cloud.bigquery_migration_v2alpha.types.TranslationTaskDetails): + Task configuration for Batch/Offline SQL + Translation. + + This field is a member of `oneof`_ ``task_details``. id (str): Output only. Immutable. The unique identifier for the migration task. The ID is server- @@ -88,9 +111,9 @@ class MigrationTask(proto.Message): The type of the task. This must be a supported task type. details (google.protobuf.any_pb2.Any): - The details of the task. The type URL must be - one of the supported task details messages and - correspond to the Task's type. + DEPRECATED! Use one of the task_details below. The details + of the task. The type URL must be one of the supported task + details messages and correspond to the Task's type. state (google.cloud.bigquery_migration_v2alpha.types.MigrationTask.State): Output only. The current state of the task. processing_error (google.rpc.error_details_pb2.ErrorInfo): @@ -100,6 +123,9 @@ class MigrationTask(proto.Message): Time when the task was created. last_update_time (google.protobuf.timestamp_pb2.Timestamp): Time when the task was last updated. + orchestration_result (google.cloud.bigquery_migration_v2alpha.types.MigrationTaskOrchestrationResult): + Output only. Additional information about the + orchestration. """ class State(proto.Enum): @@ -112,6 +138,18 @@ class State(proto.Enum): SUCCEEDED = 5 FAILED = 6 + assessment_task_details = proto.Field( + proto.MESSAGE, + number=12, + oneof="task_details", + message=assessment_task.AssessmentTaskDetails, + ) + translation_task_details = proto.Field( + proto.MESSAGE, + number=13, + oneof="task_details", + message=translation_task.TranslationTaskDetails, + ) id = proto.Field(proto.STRING, number=1,) type_ = proto.Field(proto.STRING, number=2,) details = proto.Field(proto.MESSAGE, number=3, message=any_pb2.Any,) @@ -123,6 +161,9 @@ class State(proto.Enum): last_update_time = proto.Field( proto.MESSAGE, number=7, message=timestamp_pb2.Timestamp, ) + orchestration_result = proto.Field( + proto.MESSAGE, number=10, message="MigrationTaskOrchestrationResult", + ) class MigrationSubtask(proto.Message): @@ -202,4 +243,26 @@ class State(proto.Enum): ) +class MigrationTaskOrchestrationResult(proto.Message): + r"""Additional information from the orchestrator when it is done + with the task orchestration. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + assessment_details (google.cloud.bigquery_migration_v2alpha.types.AssessmentOrchestrationResultDetails): + Details specific to assessment task types. + + This field is a member of `oneof`_ ``details``. + """ + + assessment_details = proto.Field( + proto.MESSAGE, + number=1, + oneof="details", + message=assessment_task.AssessmentOrchestrationResultDetails, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/bigquery_migration_v2alpha/types/translation_task.py b/google/cloud/bigquery_migration_v2alpha/types/translation_task.py new file mode 100644 index 0000000..e2be946 --- /dev/null +++ b/google/cloud/bigquery_migration_v2alpha/types/translation_task.py @@ -0,0 +1,244 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + + +__protobuf__ = proto.module( + package="google.cloud.bigquery.migration.v2alpha", + manifest={ + "TranslationFileMapping", + "TranslationTaskDetails", + "Filter", + "IdentifierSettings", + "TeradataOptions", + "BteqOptions", + "DatasetReference", + }, +) + + +class TranslationFileMapping(proto.Message): + r"""Mapping between an input and output file to be translated in + a subtask. + + Attributes: + input_path (str): + The Cloud Storage path for a file to + translation in a subtask. + output_path (str): + The Cloud Storage path to write back the + corresponding input file to. + """ + + input_path = proto.Field(proto.STRING, number=1,) + output_path = proto.Field(proto.STRING, number=2,) + + +class TranslationTaskDetails(proto.Message): + r"""The translation task config to capture necessary settings for + a translation task and subtask. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + teradata_options (google.cloud.bigquery_migration_v2alpha.types.TeradataOptions): + The Teradata SQL specific settings for the + translation task. + + This field is a member of `oneof`_ ``language_options``. + bteq_options (google.cloud.bigquery_migration_v2alpha.types.BteqOptions): + The BTEQ specific settings for the + translation task. + + This field is a member of `oneof`_ ``language_options``. + input_path (str): + The Cloud Storage path for translation input + files. + output_path (str): + The Cloud Storage path for translation output + files. + file_paths (Sequence[google.cloud.bigquery_migration_v2alpha.types.TranslationFileMapping]): + Cloud Storage files to be processed for + translation. + schema_path (str): + The Cloud Storage path to DDL files as table + schema to assist semantic translation. + file_encoding (google.cloud.bigquery_migration_v2alpha.types.TranslationTaskDetails.FileEncoding): + The file encoding type. + identifier_settings (google.cloud.bigquery_migration_v2alpha.types.IdentifierSettings): + The settings for SQL identifiers. + special_token_map (Sequence[google.cloud.bigquery_migration_v2alpha.types.TranslationTaskDetails.SpecialTokenMapEntry]): + The map capturing special tokens to be + replaced during translation. The key is special + token in string. The value is the token data + type. This is used to translate SQL query + template which contains special token as place + holder. The special token makes a query invalid + to parse. This map will be applied to annotate + those special token with types to let parser + understand how to parse them into proper + structure with type information. + filter (google.cloud.bigquery_migration_v2alpha.types.Filter): + The filter applied to translation details. + translation_exception_table (str): + Specifies the exact name of the bigquery + table ("dataset.table") to be used for surfacing + raw translation errors. If the table does not + exist, we will create it. If it already exists + and the schema is the same, we will re-use. If + the table exists and the schema is different, we + will throw an error. + """ + + class FileEncoding(proto.Enum): + r"""The file encoding types.""" + FILE_ENCODING_UNSPECIFIED = 0 + UTF_8 = 1 + ISO_8859_1 = 2 + US_ASCII = 3 + UTF_16 = 4 + UTF_16LE = 5 + UTF_16BE = 6 + + class TokenType(proto.Enum): + r"""The special token data type.""" + TOKEN_TYPE_UNSPECIFIED = 0 + STRING = 1 + INT64 = 2 + NUMERIC = 3 + BOOL = 4 + FLOAT64 = 5 + DATE = 6 + TIMESTAMP = 7 + + teradata_options = proto.Field( + proto.MESSAGE, number=10, oneof="language_options", message="TeradataOptions", + ) + bteq_options = proto.Field( + proto.MESSAGE, number=11, oneof="language_options", message="BteqOptions", + ) + input_path = proto.Field(proto.STRING, number=1,) + output_path = proto.Field(proto.STRING, number=2,) + file_paths = proto.RepeatedField( + proto.MESSAGE, number=12, message="TranslationFileMapping", + ) + schema_path = proto.Field(proto.STRING, number=3,) + file_encoding = proto.Field(proto.ENUM, number=4, enum=FileEncoding,) + identifier_settings = proto.Field( + proto.MESSAGE, number=5, message="IdentifierSettings", + ) + special_token_map = proto.MapField( + proto.STRING, proto.ENUM, number=6, enum=TokenType, + ) + filter = proto.Field(proto.MESSAGE, number=7, message="Filter",) + translation_exception_table = proto.Field(proto.STRING, number=13,) + + +class Filter(proto.Message): + r"""The filter applied to fields of translation details. + + Attributes: + input_file_exclusion_prefixes (Sequence[str]): + The list of prefixes used to exclude + processing for input files. + """ + + input_file_exclusion_prefixes = proto.RepeatedField(proto.STRING, number=1,) + + +class IdentifierSettings(proto.Message): + r"""Settings related to SQL identifiers. + + Attributes: + output_identifier_case (google.cloud.bigquery_migration_v2alpha.types.IdentifierSettings.IdentifierCase): + The setting to control output queries' + identifier case. + identifier_rewrite_mode (google.cloud.bigquery_migration_v2alpha.types.IdentifierSettings.IdentifierRewriteMode): + Specifies the rewrite mode for SQL + identifiers. + """ + + class IdentifierCase(proto.Enum): + r"""The identifier case type.""" + IDENTIFIER_CASE_UNSPECIFIED = 0 + ORIGINAL = 1 + UPPER = 2 + LOWER = 3 + + class IdentifierRewriteMode(proto.Enum): + r"""The SQL identifier rewrite mode.""" + IDENTIFIER_REWRITE_MODE_UNSPECIFIED = 0 + NONE = 1 + REWRITE_ALL = 2 + + output_identifier_case = proto.Field(proto.ENUM, number=1, enum=IdentifierCase,) + identifier_rewrite_mode = proto.Field( + proto.ENUM, number=2, enum=IdentifierRewriteMode, + ) + + +class TeradataOptions(proto.Message): + r"""Teradata SQL specific translation task related settings. + """ + + +class BteqOptions(proto.Message): + r"""BTEQ translation task related settings. + + Attributes: + project_dataset (google.cloud.bigquery_migration_v2alpha.types.DatasetReference): + Specifies the project and dataset in BigQuery + that will be used for external table creation + during the translation. + default_path_uri (str): + The Cloud Storage location to be used as the + default path for files that are not otherwise + specified in the file replacement map. + file_replacement_map (Sequence[google.cloud.bigquery_migration_v2alpha.types.BteqOptions.FileReplacementMapEntry]): + Maps the local paths that are used in BTEQ + scripts (the keys) to the paths in Cloud Storage + that should be used in their stead in the + translation (the value). + """ + + project_dataset = proto.Field(proto.MESSAGE, number=1, message="DatasetReference",) + default_path_uri = proto.Field(proto.STRING, number=2,) + file_replacement_map = proto.MapField(proto.STRING, proto.STRING, number=3,) + + +class DatasetReference(proto.Message): + r"""Reference to a BigQuery dataset. + + Attributes: + dataset_id (str): + A unique ID for this dataset, without the project name. The + ID must contain only letters (a-z, A-Z), numbers (0-9), or + underscores (_). The maximum length is 1,024 characters. + project_id (str): + The ID of the project containing this + dataset. + """ + + dataset_id = proto.Field(proto.STRING, number=1,) + project_id = proto.Field(proto.STRING, number=2,) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/tests/unit/gapic/bigquery_migration_v2alpha/test_migration_service.py b/tests/unit/gapic/bigquery_migration_v2alpha/test_migration_service.py index 38c993b..d6d7508 100644 --- a/tests/unit/gapic/bigquery_migration_v2alpha/test_migration_service.py +++ b/tests/unit/gapic/bigquery_migration_v2alpha/test_migration_service.py @@ -41,10 +41,12 @@ from google.cloud.bigquery_migration_v2alpha.services.migration_service import ( transports, ) +from google.cloud.bigquery_migration_v2alpha.types import assessment_task from google.cloud.bigquery_migration_v2alpha.types import migration_entities from google.cloud.bigquery_migration_v2alpha.types import migration_error_details from google.cloud.bigquery_migration_v2alpha.types import migration_metrics from google.cloud.bigquery_migration_v2alpha.types import migration_service +from google.cloud.bigquery_migration_v2alpha.types import translation_task from google.oauth2 import service_account from google.protobuf import any_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore From 124de81e97b39694433820678704b3f6079ce1e2 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 25 Jan 2022 12:08:08 -0500 Subject: [PATCH 14/18] feat: add api key support (#34) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: upgrade gapic-generator-java, gax-java and gapic-generator-python PiperOrigin-RevId: 423842556 Source-Link: https://github.com/googleapis/googleapis/commit/a616ca08f4b1416abbac7bc5dd6d61c791756a81 Source-Link: https://github.com/googleapis/googleapis-gen/commit/29b938c58c1e51d019f2ee539d55dc0a3c86a905 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMjliOTM4YzU4YzFlNTFkMDE5ZjJlZTUzOWQ1NWRjMGEzYzg2YTkwNSJ9 * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../migration_service/async_client.py | 38 +++++- .../services/migration_service/client.py | 127 +++++++++++------ .../test_migration_service.py | 128 ++++++++++++++++++ 3 files changed, 249 insertions(+), 44 deletions(-) diff --git a/google/cloud/bigquery_migration_v2alpha/services/migration_service/async_client.py b/google/cloud/bigquery_migration_v2alpha/services/migration_service/async_client.py index 58da1a3..6e7f774 100644 --- a/google/cloud/bigquery_migration_v2alpha/services/migration_service/async_client.py +++ b/google/cloud/bigquery_migration_v2alpha/services/migration_service/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core.client_options import ClientOptions @@ -119,6 +119,42 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return MigrationServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + @property def transport(self) -> MigrationServiceTransport: """Returns the transport used by the client instance. diff --git a/google/cloud/bigquery_migration_v2alpha/services/migration_service/client.py b/google/cloud/bigquery_migration_v2alpha/services/migration_service/client.py index 11ff5d0..4e30027 100644 --- a/google/cloud/bigquery_migration_v2alpha/services/migration_service/client.py +++ b/google/cloud/bigquery_migration_v2alpha/services/migration_service/client.py @@ -257,6 +257,73 @@ def parse_common_location_path(path: str) -> Dict[str, str]: m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + def __init__( self, *, @@ -307,57 +374,22 @@ def __init__( if client_options is None: client_options = client_options_lib.ClientOptions() - # Create SSL credentials for mutual TLS if needed. - if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( - "true", - "false", - ): - raise ValueError( - "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) - use_client_cert = ( - os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options ) - client_cert_source_func = None - is_mtls = False - if use_client_cert: - if client_options.client_cert_source: - is_mtls = True - client_cert_source_func = client_options.client_cert_source - else: - is_mtls = mtls.has_default_client_cert_source() - if is_mtls: - client_cert_source_func = mtls.default_client_cert_source() - else: - client_cert_source_func = None - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - else: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_mtls_env == "never": - api_endpoint = self.DEFAULT_ENDPOINT - elif use_mtls_env == "always": - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - elif use_mtls_env == "auto": - if is_mtls: - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = self.DEFAULT_ENDPOINT - else: - raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " - "values: never, auto, always" - ) + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. if isinstance(transport, MigrationServiceTransport): # transport is a MigrationServiceTransport instance. - if credentials or client_options.credentials_file: + if credentials or client_options.credentials_file or api_key_value: raise ValueError( "When providing a transport instance, " "provide its credentials directly." @@ -369,6 +401,15 @@ def __init__( ) self._transport = transport else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + Transport = type(self).get_transport_class(transport) self._transport = Transport( credentials=credentials, diff --git a/tests/unit/gapic/bigquery_migration_v2alpha/test_migration_service.py b/tests/unit/gapic/bigquery_migration_v2alpha/test_migration_service.py index d6d7508..67b7975 100644 --- a/tests/unit/gapic/bigquery_migration_v2alpha/test_migration_service.py +++ b/tests/unit/gapic/bigquery_migration_v2alpha/test_migration_service.py @@ -421,6 +421,87 @@ def test_migration_service_client_mtls_env_auto( ) +@pytest.mark.parametrize( + "client_class", [MigrationServiceClient, MigrationServiceAsyncClient] +) +@mock.patch.object( + MigrationServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MigrationServiceClient), +) +@mock.patch.object( + MigrationServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MigrationServiceAsyncClient), +) +def test_migration_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ @@ -2461,6 +2542,23 @@ def test_credentials_transport_error(): transport=transport, ) + # It is an error to provide an api_key and a transport instance. + transport = transports.MigrationServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = MigrationServiceClient(client_options=options, transport=transport,) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = MigrationServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + # It is an error to provide scopes and a transport instance. transport = transports.MigrationServiceGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), @@ -3054,3 +3152,33 @@ def test_client_ctx(): with client: pass close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (MigrationServiceClient, transports.MigrationServiceGrpcTransport), + (MigrationServiceAsyncClient, transports.MigrationServiceGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) From b3b1ee2c0075adadedeef28a5853a440fc1e6535 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 3 Feb 2022 19:56:47 +0000 Subject: [PATCH 15/18] chore: use gapic-generator-python 0.62.1 (#36) - [ ] Regenerate this pull request now. fix: resolve DuplicateCredentialArgs error when using credentials_file committer: parthea PiperOrigin-RevId: 425964861 Source-Link: https://github.com/googleapis/googleapis/commit/84b1a5a4f6fb2d04905be58e586b8a7a4310a8cf Source-Link: https://github.com/googleapis/googleapis-gen/commit/4fb761bbd8506ac156f49bac5f18306aa8eb3aa8 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNGZiNzYxYmJkODUwNmFjMTU2ZjQ5YmFjNWYxODMwNmFhOGViM2FhOCJ9 --- .../migration_service/async_client.py | 14 ++-- .../services/migration_service/client.py | 14 ++-- .../migration_service/transports/grpc.py | 5 +- .../transports/grpc_asyncio.py | 5 +- .../types/migration_entities.py | 4 +- .../test_migration_service.py | 79 ++++++++++++++++++- 6 files changed, 100 insertions(+), 21 deletions(-) diff --git a/google/cloud/bigquery_migration_v2alpha/services/migration_service/async_client.py b/google/cloud/bigquery_migration_v2alpha/services/migration_service/async_client.py index 6e7f774..efe0d6a 100644 --- a/google/cloud/bigquery_migration_v2alpha/services/migration_service/async_client.py +++ b/google/cloud/bigquery_migration_v2alpha/services/migration_service/async_client.py @@ -260,7 +260,7 @@ async def create_migration_workflow( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, migration_workflow]) if request is not None and has_flattened_params: @@ -335,7 +335,7 @@ async def get_migration_workflow( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -418,7 +418,7 @@ async def list_migration_workflows( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -499,7 +499,7 @@ async def delete_migration_workflow( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -568,7 +568,7 @@ async def start_migration_workflow( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -652,7 +652,7 @@ async def get_migration_subtask( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -734,7 +734,7 @@ async def list_migration_subtasks( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: diff --git a/google/cloud/bigquery_migration_v2alpha/services/migration_service/client.py b/google/cloud/bigquery_migration_v2alpha/services/migration_service/client.py index 4e30027..a0774b1 100644 --- a/google/cloud/bigquery_migration_v2alpha/services/migration_service/client.py +++ b/google/cloud/bigquery_migration_v2alpha/services/migration_service/client.py @@ -467,7 +467,7 @@ def create_migration_workflow( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, migration_workflow]) if request is not None and has_flattened_params: @@ -544,7 +544,7 @@ def get_migration_workflow( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -618,7 +618,7 @@ def list_migration_workflows( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -690,7 +690,7 @@ def delete_migration_workflow( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -761,7 +761,7 @@ def start_migration_workflow( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -836,7 +836,7 @@ def get_migration_subtask( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -909,7 +909,7 @@ def list_migration_subtasks( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: diff --git a/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/grpc.py b/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/grpc.py index 19f31f8..2e80842 100644 --- a/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/grpc.py +++ b/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/grpc.py @@ -160,8 +160,11 @@ def __init__( if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, diff --git a/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/grpc_asyncio.py b/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/grpc_asyncio.py index 5eacdd0..c06da35 100644 --- a/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/grpc_asyncio.py +++ b/google/cloud/bigquery_migration_v2alpha/services/migration_service/transports/grpc_asyncio.py @@ -205,8 +205,11 @@ def __init__( if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, diff --git a/google/cloud/bigquery_migration_v2alpha/types/migration_entities.py b/google/cloud/bigquery_migration_v2alpha/types/migration_entities.py index 55daf49..0254518 100644 --- a/google/cloud/bigquery_migration_v2alpha/types/migration_entities.py +++ b/google/cloud/bigquery_migration_v2alpha/types/migration_entities.py @@ -105,8 +105,8 @@ class MigrationTask(proto.Message): This field is a member of `oneof`_ ``task_details``. id (str): Output only. Immutable. The unique identifier - for the migration task. The ID is server- - generated. + for the migration task. The ID is + server-generated. type_ (str): The type of the task. This must be a supported task type. diff --git a/tests/unit/gapic/bigquery_migration_v2alpha/test_migration_service.py b/tests/unit/gapic/bigquery_migration_v2alpha/test_migration_service.py index 67b7975..323f238 100644 --- a/tests/unit/gapic/bigquery_migration_v2alpha/test_migration_service.py +++ b/tests/unit/gapic/bigquery_migration_v2alpha/test_migration_service.py @@ -534,21 +534,28 @@ def test_migration_service_client_client_options_scopes( @pytest.mark.parametrize( - "client_class,transport_class,transport_name", + "client_class,transport_class,transport_name,grpc_helpers", [ - (MigrationServiceClient, transports.MigrationServiceGrpcTransport, "grpc"), + ( + MigrationServiceClient, + transports.MigrationServiceGrpcTransport, + "grpc", + grpc_helpers, + ), ( MigrationServiceAsyncClient, transports.MigrationServiceGrpcAsyncIOTransport, "grpc_asyncio", + grpc_helpers_async, ), ], ) def test_migration_service_client_client_options_credentials_file( - client_class, transport_class, transport_name + client_class, transport_class, transport_name, grpc_helpers ): # Check the case credentials file is provided. options = client_options.ClientOptions(credentials_file="credentials.json") + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) @@ -584,6 +591,72 @@ def test_migration_service_client_client_options_from_dict(): ) +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + MigrationServiceClient, + transports.MigrationServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + MigrationServiceAsyncClient, + transports.MigrationServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_migration_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "bigquerymigration.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="bigquerymigration.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + @pytest.mark.parametrize( "request_type", [migration_service.CreateMigrationWorkflowRequest, dict,] ) From 13b7ac71ace1cc226d6fa5b43dde345c3ac3e489 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 11 Feb 2022 11:10:40 -0700 Subject: [PATCH 16/18] docs: add generated snippets (#38) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: use gapic-generator-python 0.63.2 docs: add generated snippets PiperOrigin-RevId: 427792504 Source-Link: https://github.com/googleapis/googleapis/commit/55b9e1e0b3106c850d13958352bc0751147b6b15 Source-Link: https://github.com/googleapis/googleapis-gen/commit/bf4e86b753f42cb0edb1fd51fbe840d7da0a1cde Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYmY0ZTg2Yjc1M2Y0MmNiMGVkYjFmZDUxZmJlODQwZDdkYTBhMWNkZSJ9 * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../migration_service/async_client.py | 130 ++++ .../services/migration_service/client.py | 137 ++++ ...service_create_migration_workflow_async.py | 45 ++ ..._service_create_migration_workflow_sync.py | 45 ++ ...service_delete_migration_workflow_async.py | 43 ++ ..._service_delete_migration_workflow_sync.py | 43 ++ ...ion_service_get_migration_subtask_async.py | 45 ++ ...tion_service_get_migration_subtask_sync.py | 45 ++ ...on_service_get_migration_workflow_async.py | 45 ++ ...ion_service_get_migration_workflow_sync.py | 45 ++ ...n_service_list_migration_subtasks_async.py | 46 ++ ...on_service_list_migration_subtasks_sync.py | 46 ++ ..._service_list_migration_workflows_async.py | 46 ++ ...n_service_list_migration_workflows_sync.py | 46 ++ ..._service_start_migration_workflow_async.py | 43 ++ ...n_service_start_migration_workflow_sync.py | 43 ++ ...t_metadata_bigquery migration_v2alpha.json | 619 ++++++++++++++++++ 17 files changed, 1512 insertions(+) create mode 100644 samples/generated_samples/bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_create_migration_workflow_async.py create mode 100644 samples/generated_samples/bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_create_migration_workflow_sync.py create mode 100644 samples/generated_samples/bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_delete_migration_workflow_async.py create mode 100644 samples/generated_samples/bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_delete_migration_workflow_sync.py create mode 100644 samples/generated_samples/bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_get_migration_subtask_async.py create mode 100644 samples/generated_samples/bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_get_migration_subtask_sync.py create mode 100644 samples/generated_samples/bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_get_migration_workflow_async.py create mode 100644 samples/generated_samples/bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_get_migration_workflow_sync.py create mode 100644 samples/generated_samples/bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_list_migration_subtasks_async.py create mode 100644 samples/generated_samples/bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_list_migration_subtasks_sync.py create mode 100644 samples/generated_samples/bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_list_migration_workflows_async.py create mode 100644 samples/generated_samples/bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_list_migration_workflows_sync.py create mode 100644 samples/generated_samples/bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_start_migration_workflow_async.py create mode 100644 samples/generated_samples/bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_start_migration_workflow_sync.py create mode 100644 samples/generated_samples/snippet_metadata_bigquery migration_v2alpha.json diff --git a/google/cloud/bigquery_migration_v2alpha/services/migration_service/async_client.py b/google/cloud/bigquery_migration_v2alpha/services/migration_service/async_client.py index efe0d6a..c7e8bc2 100644 --- a/google/cloud/bigquery_migration_v2alpha/services/migration_service/async_client.py +++ b/google/cloud/bigquery_migration_v2alpha/services/migration_service/async_client.py @@ -227,6 +227,25 @@ async def create_migration_workflow( ) -> migration_entities.MigrationWorkflow: r"""Creates a migration workflow. + .. code-block:: + + from google.cloud import bigquery_migration_v2alpha + + def sample_create_migration_workflow(): + # Create a client + client = bigquery_migration_v2alpha.MigrationServiceClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.CreateMigrationWorkflowRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_migration_workflow(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.bigquery_migration_v2alpha.types.CreateMigrationWorkflowRequest, dict]): The request object. Request to create a migration @@ -309,6 +328,25 @@ async def get_migration_workflow( ) -> migration_entities.MigrationWorkflow: r"""Gets a previously created migration workflow. + .. code-block:: + + from google.cloud import bigquery_migration_v2alpha + + def sample_get_migration_workflow(): + # Create a client + client = bigquery_migration_v2alpha.MigrationServiceClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.GetMigrationWorkflowRequest( + name="name_value", + ) + + # Make the request + response = client.get_migration_workflow(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.bigquery_migration_v2alpha.types.GetMigrationWorkflowRequest, dict]): The request object. A request to get a previously @@ -391,6 +429,26 @@ async def list_migration_workflows( ) -> pagers.ListMigrationWorkflowsAsyncPager: r"""Lists previously created migration workflow. + .. code-block:: + + from google.cloud import bigquery_migration_v2alpha + + def sample_list_migration_workflows(): + # Create a client + client = bigquery_migration_v2alpha.MigrationServiceClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.ListMigrationWorkflowsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_migration_workflows(request=request) + + # Handle the response + for response in page_result: + print(response) + Args: request (Union[google.cloud.bigquery_migration_v2alpha.types.ListMigrationWorkflowsRequest, dict]): The request object. A request to list previously created @@ -480,6 +538,22 @@ async def delete_migration_workflow( ) -> None: r"""Deletes a migration workflow by name. + .. code-block:: + + from google.cloud import bigquery_migration_v2alpha + + def sample_delete_migration_workflow(): + # Create a client + client = bigquery_migration_v2alpha.MigrationServiceClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.DeleteMigrationWorkflowRequest( + name="name_value", + ) + + # Make the request + client.delete_migration_workflow(request=request) + Args: request (Union[google.cloud.bigquery_migration_v2alpha.types.DeleteMigrationWorkflowRequest, dict]): The request object. A request to delete a previously @@ -549,6 +623,23 @@ async def start_migration_workflow( signaled if the state is anything other than DRAFT or RUNNING. + + .. code-block:: + + from google.cloud import bigquery_migration_v2alpha + + def sample_start_migration_workflow(): + # Create a client + client = bigquery_migration_v2alpha.MigrationServiceClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.StartMigrationWorkflowRequest( + name="name_value", + ) + + # Make the request + client.start_migration_workflow(request=request) + Args: request (Union[google.cloud.bigquery_migration_v2alpha.types.StartMigrationWorkflowRequest, dict]): The request object. A request to start a previously @@ -623,6 +714,25 @@ async def get_migration_subtask( ) -> migration_entities.MigrationSubtask: r"""Gets a previously created migration subtask. + .. code-block:: + + from google.cloud import bigquery_migration_v2alpha + + def sample_get_migration_subtask(): + # Create a client + client = bigquery_migration_v2alpha.MigrationServiceClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.GetMigrationSubtaskRequest( + name="name_value", + ) + + # Make the request + response = client.get_migration_subtask(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.bigquery_migration_v2alpha.types.GetMigrationSubtaskRequest, dict]): The request object. A request to get a previously @@ -708,6 +818,26 @@ async def list_migration_subtasks( ) -> pagers.ListMigrationSubtasksAsyncPager: r"""Lists previously created migration subtasks. + .. code-block:: + + from google.cloud import bigquery_migration_v2alpha + + def sample_list_migration_subtasks(): + # Create a client + client = bigquery_migration_v2alpha.MigrationServiceClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.ListMigrationSubtasksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_migration_subtasks(request=request) + + # Handle the response + for response in page_result: + print(response) + Args: request (Union[google.cloud.bigquery_migration_v2alpha.types.ListMigrationSubtasksRequest, dict]): The request object. A request to list previously created diff --git a/google/cloud/bigquery_migration_v2alpha/services/migration_service/client.py b/google/cloud/bigquery_migration_v2alpha/services/migration_service/client.py index a0774b1..69285a5 100644 --- a/google/cloud/bigquery_migration_v2alpha/services/migration_service/client.py +++ b/google/cloud/bigquery_migration_v2alpha/services/migration_service/client.py @@ -434,6 +434,26 @@ def create_migration_workflow( ) -> migration_entities.MigrationWorkflow: r"""Creates a migration workflow. + + .. code-block:: + + from google.cloud import bigquery_migration_v2alpha + + def sample_create_migration_workflow(): + # Create a client + client = bigquery_migration_v2alpha.MigrationServiceClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.CreateMigrationWorkflowRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_migration_workflow(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.bigquery_migration_v2alpha.types.CreateMigrationWorkflowRequest, dict]): The request object. Request to create a migration @@ -518,6 +538,26 @@ def get_migration_workflow( ) -> migration_entities.MigrationWorkflow: r"""Gets a previously created migration workflow. + + .. code-block:: + + from google.cloud import bigquery_migration_v2alpha + + def sample_get_migration_workflow(): + # Create a client + client = bigquery_migration_v2alpha.MigrationServiceClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.GetMigrationWorkflowRequest( + name="name_value", + ) + + # Make the request + response = client.get_migration_workflow(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.bigquery_migration_v2alpha.types.GetMigrationWorkflowRequest, dict]): The request object. A request to get a previously @@ -591,6 +631,27 @@ def list_migration_workflows( ) -> pagers.ListMigrationWorkflowsPager: r"""Lists previously created migration workflow. + + .. code-block:: + + from google.cloud import bigquery_migration_v2alpha + + def sample_list_migration_workflows(): + # Create a client + client = bigquery_migration_v2alpha.MigrationServiceClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.ListMigrationWorkflowsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_migration_workflows(request=request) + + # Handle the response + for response in page_result: + print(response) + Args: request (Union[google.cloud.bigquery_migration_v2alpha.types.ListMigrationWorkflowsRequest, dict]): The request object. A request to list previously created @@ -671,6 +732,23 @@ def delete_migration_workflow( ) -> None: r"""Deletes a migration workflow by name. + + .. code-block:: + + from google.cloud import bigquery_migration_v2alpha + + def sample_delete_migration_workflow(): + # Create a client + client = bigquery_migration_v2alpha.MigrationServiceClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.DeleteMigrationWorkflowRequest( + name="name_value", + ) + + # Make the request + client.delete_migration_workflow(request=request) + Args: request (Union[google.cloud.bigquery_migration_v2alpha.types.DeleteMigrationWorkflowRequest, dict]): The request object. A request to delete a previously @@ -742,6 +820,24 @@ def start_migration_workflow( signaled if the state is anything other than DRAFT or RUNNING. + + + .. code-block:: + + from google.cloud import bigquery_migration_v2alpha + + def sample_start_migration_workflow(): + # Create a client + client = bigquery_migration_v2alpha.MigrationServiceClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.StartMigrationWorkflowRequest( + name="name_value", + ) + + # Make the request + client.start_migration_workflow(request=request) + Args: request (Union[google.cloud.bigquery_migration_v2alpha.types.StartMigrationWorkflowRequest, dict]): The request object. A request to start a previously @@ -807,6 +903,26 @@ def get_migration_subtask( ) -> migration_entities.MigrationSubtask: r"""Gets a previously created migration subtask. + + .. code-block:: + + from google.cloud import bigquery_migration_v2alpha + + def sample_get_migration_subtask(): + # Create a client + client = bigquery_migration_v2alpha.MigrationServiceClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.GetMigrationSubtaskRequest( + name="name_value", + ) + + # Make the request + response = client.get_migration_subtask(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.bigquery_migration_v2alpha.types.GetMigrationSubtaskRequest, dict]): The request object. A request to get a previously @@ -883,6 +999,27 @@ def list_migration_subtasks( ) -> pagers.ListMigrationSubtasksPager: r"""Lists previously created migration subtasks. + + .. code-block:: + + from google.cloud import bigquery_migration_v2alpha + + def sample_list_migration_subtasks(): + # Create a client + client = bigquery_migration_v2alpha.MigrationServiceClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.ListMigrationSubtasksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_migration_subtasks(request=request) + + # Handle the response + for response in page_result: + print(response) + Args: request (Union[google.cloud.bigquery_migration_v2alpha.types.ListMigrationSubtasksRequest, dict]): The request object. A request to list previously created diff --git a/samples/generated_samples/bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_create_migration_workflow_async.py b/samples/generated_samples/bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_create_migration_workflow_async.py new file mode 100644 index 0000000..16c630e --- /dev/null +++ b/samples/generated_samples/bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_create_migration_workflow_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateMigrationWorkflow +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-migration + + +# [START bigquerymigration_generated_bigquery_migration_v2alpha_MigrationService_CreateMigrationWorkflow_async] +from google.cloud import bigquery_migration_v2alpha + + +async def sample_create_migration_workflow(): + # Create a client + client = bigquery_migration_v2alpha.MigrationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.CreateMigrationWorkflowRequest( + parent="parent_value", + ) + + # Make the request + response = await client.create_migration_workflow(request=request) + + # Handle the response + print(response) + +# [END bigquerymigration_generated_bigquery_migration_v2alpha_MigrationService_CreateMigrationWorkflow_async] diff --git a/samples/generated_samples/bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_create_migration_workflow_sync.py b/samples/generated_samples/bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_create_migration_workflow_sync.py new file mode 100644 index 0000000..830b1bc --- /dev/null +++ b/samples/generated_samples/bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_create_migration_workflow_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateMigrationWorkflow +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-migration + + +# [START bigquerymigration_generated_bigquery_migration_v2alpha_MigrationService_CreateMigrationWorkflow_sync] +from google.cloud import bigquery_migration_v2alpha + + +def sample_create_migration_workflow(): + # Create a client + client = bigquery_migration_v2alpha.MigrationServiceClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.CreateMigrationWorkflowRequest( + parent="parent_value", + ) + + # Make the request + response = client.create_migration_workflow(request=request) + + # Handle the response + print(response) + +# [END bigquerymigration_generated_bigquery_migration_v2alpha_MigrationService_CreateMigrationWorkflow_sync] diff --git a/samples/generated_samples/bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_delete_migration_workflow_async.py b/samples/generated_samples/bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_delete_migration_workflow_async.py new file mode 100644 index 0000000..72f05df --- /dev/null +++ b/samples/generated_samples/bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_delete_migration_workflow_async.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteMigrationWorkflow +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-migration + + +# [START bigquerymigration_generated_bigquery_migration_v2alpha_MigrationService_DeleteMigrationWorkflow_async] +from google.cloud import bigquery_migration_v2alpha + + +async def sample_delete_migration_workflow(): + # Create a client + client = bigquery_migration_v2alpha.MigrationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.DeleteMigrationWorkflowRequest( + name="name_value", + ) + + # Make the request + await client.delete_migration_workflow(request=request) + + +# [END bigquerymigration_generated_bigquery_migration_v2alpha_MigrationService_DeleteMigrationWorkflow_async] diff --git a/samples/generated_samples/bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_delete_migration_workflow_sync.py b/samples/generated_samples/bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_delete_migration_workflow_sync.py new file mode 100644 index 0000000..083b647 --- /dev/null +++ b/samples/generated_samples/bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_delete_migration_workflow_sync.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteMigrationWorkflow +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-migration + + +# [START bigquerymigration_generated_bigquery_migration_v2alpha_MigrationService_DeleteMigrationWorkflow_sync] +from google.cloud import bigquery_migration_v2alpha + + +def sample_delete_migration_workflow(): + # Create a client + client = bigquery_migration_v2alpha.MigrationServiceClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.DeleteMigrationWorkflowRequest( + name="name_value", + ) + + # Make the request + client.delete_migration_workflow(request=request) + + +# [END bigquerymigration_generated_bigquery_migration_v2alpha_MigrationService_DeleteMigrationWorkflow_sync] diff --git a/samples/generated_samples/bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_get_migration_subtask_async.py b/samples/generated_samples/bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_get_migration_subtask_async.py new file mode 100644 index 0000000..ce4065e --- /dev/null +++ b/samples/generated_samples/bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_get_migration_subtask_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetMigrationSubtask +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-migration + + +# [START bigquerymigration_generated_bigquery_migration_v2alpha_MigrationService_GetMigrationSubtask_async] +from google.cloud import bigquery_migration_v2alpha + + +async def sample_get_migration_subtask(): + # Create a client + client = bigquery_migration_v2alpha.MigrationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.GetMigrationSubtaskRequest( + name="name_value", + ) + + # Make the request + response = await client.get_migration_subtask(request=request) + + # Handle the response + print(response) + +# [END bigquerymigration_generated_bigquery_migration_v2alpha_MigrationService_GetMigrationSubtask_async] diff --git a/samples/generated_samples/bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_get_migration_subtask_sync.py b/samples/generated_samples/bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_get_migration_subtask_sync.py new file mode 100644 index 0000000..d98a29f --- /dev/null +++ b/samples/generated_samples/bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_get_migration_subtask_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetMigrationSubtask +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-migration + + +# [START bigquerymigration_generated_bigquery_migration_v2alpha_MigrationService_GetMigrationSubtask_sync] +from google.cloud import bigquery_migration_v2alpha + + +def sample_get_migration_subtask(): + # Create a client + client = bigquery_migration_v2alpha.MigrationServiceClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.GetMigrationSubtaskRequest( + name="name_value", + ) + + # Make the request + response = client.get_migration_subtask(request=request) + + # Handle the response + print(response) + +# [END bigquerymigration_generated_bigquery_migration_v2alpha_MigrationService_GetMigrationSubtask_sync] diff --git a/samples/generated_samples/bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_get_migration_workflow_async.py b/samples/generated_samples/bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_get_migration_workflow_async.py new file mode 100644 index 0000000..edad29e --- /dev/null +++ b/samples/generated_samples/bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_get_migration_workflow_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetMigrationWorkflow +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-migration + + +# [START bigquerymigration_generated_bigquery_migration_v2alpha_MigrationService_GetMigrationWorkflow_async] +from google.cloud import bigquery_migration_v2alpha + + +async def sample_get_migration_workflow(): + # Create a client + client = bigquery_migration_v2alpha.MigrationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.GetMigrationWorkflowRequest( + name="name_value", + ) + + # Make the request + response = await client.get_migration_workflow(request=request) + + # Handle the response + print(response) + +# [END bigquerymigration_generated_bigquery_migration_v2alpha_MigrationService_GetMigrationWorkflow_async] diff --git a/samples/generated_samples/bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_get_migration_workflow_sync.py b/samples/generated_samples/bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_get_migration_workflow_sync.py new file mode 100644 index 0000000..12359ca --- /dev/null +++ b/samples/generated_samples/bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_get_migration_workflow_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetMigrationWorkflow +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-migration + + +# [START bigquerymigration_generated_bigquery_migration_v2alpha_MigrationService_GetMigrationWorkflow_sync] +from google.cloud import bigquery_migration_v2alpha + + +def sample_get_migration_workflow(): + # Create a client + client = bigquery_migration_v2alpha.MigrationServiceClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.GetMigrationWorkflowRequest( + name="name_value", + ) + + # Make the request + response = client.get_migration_workflow(request=request) + + # Handle the response + print(response) + +# [END bigquerymigration_generated_bigquery_migration_v2alpha_MigrationService_GetMigrationWorkflow_sync] diff --git a/samples/generated_samples/bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_list_migration_subtasks_async.py b/samples/generated_samples/bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_list_migration_subtasks_async.py new file mode 100644 index 0000000..c382d66 --- /dev/null +++ b/samples/generated_samples/bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_list_migration_subtasks_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListMigrationSubtasks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-migration + + +# [START bigquerymigration_generated_bigquery_migration_v2alpha_MigrationService_ListMigrationSubtasks_async] +from google.cloud import bigquery_migration_v2alpha + + +async def sample_list_migration_subtasks(): + # Create a client + client = bigquery_migration_v2alpha.MigrationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.ListMigrationSubtasksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_migration_subtasks(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END bigquerymigration_generated_bigquery_migration_v2alpha_MigrationService_ListMigrationSubtasks_async] diff --git a/samples/generated_samples/bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_list_migration_subtasks_sync.py b/samples/generated_samples/bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_list_migration_subtasks_sync.py new file mode 100644 index 0000000..33159a3 --- /dev/null +++ b/samples/generated_samples/bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_list_migration_subtasks_sync.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListMigrationSubtasks +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-migration + + +# [START bigquerymigration_generated_bigquery_migration_v2alpha_MigrationService_ListMigrationSubtasks_sync] +from google.cloud import bigquery_migration_v2alpha + + +def sample_list_migration_subtasks(): + # Create a client + client = bigquery_migration_v2alpha.MigrationServiceClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.ListMigrationSubtasksRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_migration_subtasks(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END bigquerymigration_generated_bigquery_migration_v2alpha_MigrationService_ListMigrationSubtasks_sync] diff --git a/samples/generated_samples/bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_list_migration_workflows_async.py b/samples/generated_samples/bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_list_migration_workflows_async.py new file mode 100644 index 0000000..5dcde4a --- /dev/null +++ b/samples/generated_samples/bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_list_migration_workflows_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListMigrationWorkflows +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-migration + + +# [START bigquerymigration_generated_bigquery_migration_v2alpha_MigrationService_ListMigrationWorkflows_async] +from google.cloud import bigquery_migration_v2alpha + + +async def sample_list_migration_workflows(): + # Create a client + client = bigquery_migration_v2alpha.MigrationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.ListMigrationWorkflowsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_migration_workflows(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END bigquerymigration_generated_bigquery_migration_v2alpha_MigrationService_ListMigrationWorkflows_async] diff --git a/samples/generated_samples/bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_list_migration_workflows_sync.py b/samples/generated_samples/bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_list_migration_workflows_sync.py new file mode 100644 index 0000000..2b84f35 --- /dev/null +++ b/samples/generated_samples/bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_list_migration_workflows_sync.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListMigrationWorkflows +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-migration + + +# [START bigquerymigration_generated_bigquery_migration_v2alpha_MigrationService_ListMigrationWorkflows_sync] +from google.cloud import bigquery_migration_v2alpha + + +def sample_list_migration_workflows(): + # Create a client + client = bigquery_migration_v2alpha.MigrationServiceClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.ListMigrationWorkflowsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_migration_workflows(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END bigquerymigration_generated_bigquery_migration_v2alpha_MigrationService_ListMigrationWorkflows_sync] diff --git a/samples/generated_samples/bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_start_migration_workflow_async.py b/samples/generated_samples/bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_start_migration_workflow_async.py new file mode 100644 index 0000000..5e2bd8b --- /dev/null +++ b/samples/generated_samples/bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_start_migration_workflow_async.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StartMigrationWorkflow +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-migration + + +# [START bigquerymigration_generated_bigquery_migration_v2alpha_MigrationService_StartMigrationWorkflow_async] +from google.cloud import bigquery_migration_v2alpha + + +async def sample_start_migration_workflow(): + # Create a client + client = bigquery_migration_v2alpha.MigrationServiceAsyncClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.StartMigrationWorkflowRequest( + name="name_value", + ) + + # Make the request + await client.start_migration_workflow(request=request) + + +# [END bigquerymigration_generated_bigquery_migration_v2alpha_MigrationService_StartMigrationWorkflow_async] diff --git a/samples/generated_samples/bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_start_migration_workflow_sync.py b/samples/generated_samples/bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_start_migration_workflow_sync.py new file mode 100644 index 0000000..f96ef9a --- /dev/null +++ b/samples/generated_samples/bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_start_migration_workflow_sync.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StartMigrationWorkflow +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-bigquery-migration + + +# [START bigquerymigration_generated_bigquery_migration_v2alpha_MigrationService_StartMigrationWorkflow_sync] +from google.cloud import bigquery_migration_v2alpha + + +def sample_start_migration_workflow(): + # Create a client + client = bigquery_migration_v2alpha.MigrationServiceClient() + + # Initialize request argument(s) + request = bigquery_migration_v2alpha.StartMigrationWorkflowRequest( + name="name_value", + ) + + # Make the request + client.start_migration_workflow(request=request) + + +# [END bigquerymigration_generated_bigquery_migration_v2alpha_MigrationService_StartMigrationWorkflow_sync] diff --git a/samples/generated_samples/snippet_metadata_bigquery migration_v2alpha.json b/samples/generated_samples/snippet_metadata_bigquery migration_v2alpha.json new file mode 100644 index 0000000..e6c33b0 --- /dev/null +++ b/samples/generated_samples/snippet_metadata_bigquery migration_v2alpha.json @@ -0,0 +1,619 @@ +{ + "snippets": [ + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "MigrationService" + }, + "shortName": "CreateMigrationWorkflow" + } + }, + "file": "bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_create_migration_workflow_async.py", + "regionTag": "bigquerymigration_generated_bigquery_migration_v2alpha_MigrationService_CreateMigrationWorkflow_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "MigrationService" + }, + "shortName": "CreateMigrationWorkflow" + } + }, + "file": "bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_create_migration_workflow_sync.py", + "regionTag": "bigquerymigration_generated_bigquery_migration_v2alpha_MigrationService_CreateMigrationWorkflow_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "MigrationService" + }, + "shortName": "DeleteMigrationWorkflow" + } + }, + "file": "bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_delete_migration_workflow_async.py", + "regionTag": "bigquerymigration_generated_bigquery_migration_v2alpha_MigrationService_DeleteMigrationWorkflow_async", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "MigrationService" + }, + "shortName": "DeleteMigrationWorkflow" + } + }, + "file": "bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_delete_migration_workflow_sync.py", + "regionTag": "bigquerymigration_generated_bigquery_migration_v2alpha_MigrationService_DeleteMigrationWorkflow_sync", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "MigrationService" + }, + "shortName": "GetMigrationSubtask" + } + }, + "file": "bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_get_migration_subtask_async.py", + "regionTag": "bigquerymigration_generated_bigquery_migration_v2alpha_MigrationService_GetMigrationSubtask_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "MigrationService" + }, + "shortName": "GetMigrationSubtask" + } + }, + "file": "bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_get_migration_subtask_sync.py", + "regionTag": "bigquerymigration_generated_bigquery_migration_v2alpha_MigrationService_GetMigrationSubtask_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "MigrationService" + }, + "shortName": "GetMigrationWorkflow" + } + }, + "file": "bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_get_migration_workflow_async.py", + "regionTag": "bigquerymigration_generated_bigquery_migration_v2alpha_MigrationService_GetMigrationWorkflow_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "MigrationService" + }, + "shortName": "GetMigrationWorkflow" + } + }, + "file": "bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_get_migration_workflow_sync.py", + "regionTag": "bigquerymigration_generated_bigquery_migration_v2alpha_MigrationService_GetMigrationWorkflow_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "MigrationService" + }, + "shortName": "ListMigrationSubtasks" + } + }, + "file": "bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_list_migration_subtasks_async.py", + "regionTag": "bigquerymigration_generated_bigquery_migration_v2alpha_MigrationService_ListMigrationSubtasks_async", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "MigrationService" + }, + "shortName": "ListMigrationSubtasks" + } + }, + "file": "bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_list_migration_subtasks_sync.py", + "regionTag": "bigquerymigration_generated_bigquery_migration_v2alpha_MigrationService_ListMigrationSubtasks_sync", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "MigrationService" + }, + "shortName": "ListMigrationWorkflows" + } + }, + "file": "bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_list_migration_workflows_async.py", + "regionTag": "bigquerymigration_generated_bigquery_migration_v2alpha_MigrationService_ListMigrationWorkflows_async", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "MigrationService" + }, + "shortName": "ListMigrationWorkflows" + } + }, + "file": "bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_list_migration_workflows_sync.py", + "regionTag": "bigquerymigration_generated_bigquery_migration_v2alpha_MigrationService_ListMigrationWorkflows_sync", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "MigrationService" + }, + "shortName": "StartMigrationWorkflow" + } + }, + "file": "bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_start_migration_workflow_async.py", + "regionTag": "bigquerymigration_generated_bigquery_migration_v2alpha_MigrationService_StartMigrationWorkflow_async", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "MigrationService" + }, + "shortName": "StartMigrationWorkflow" + } + }, + "file": "bigquerymigration_generated_bigquery_migration_v2alpha_migration_service_start_migration_workflow_sync.py", + "regionTag": "bigquerymigration_generated_bigquery_migration_v2alpha_MigrationService_StartMigrationWorkflow_sync", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ] + } + ] +} From 496abc7854985c6f1bfd8463330f2f07a0f3048c Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Mon, 14 Feb 2022 15:09:19 -0700 Subject: [PATCH 17/18] fix(deps): remove unused dependency libcst (#39) --- setup.py | 1 - 1 file changed, 1 deletion(-) diff --git a/setup.py b/setup.py index 93d732a..8094521 100644 --- a/setup.py +++ b/setup.py @@ -39,7 +39,6 @@ include_package_data=True, install_requires=( "google-api-core[grpc] >= 1.28.0, < 3.0.0dev", - "libcst >= 0.2.5", "proto-plus >= 1.15.0", ), python_requires=">=3.6", From c08b92e452b27f8fc5eb5a6f7a171fd50bfef4db Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 14 Feb 2022 22:12:38 +0000 Subject: [PATCH 18/18] chore(main): release 0.3.0 (#33) :robot: I have created a release *beep* *boop* --- ## [0.3.0](https://github.com/googleapis/python-bigquery-migration/compare/v0.2.1...v0.3.0) (2022-02-14) ### Features * add api key support ([#34](https://github.com/googleapis/python-bigquery-migration/issues/34)) ([124de81](https://github.com/googleapis/python-bigquery-migration/commit/124de81e97b39694433820678704b3f6079ce1e2)) * Add task details and orchestration result details ([#32](https://github.com/googleapis/python-bigquery-migration/issues/32)) ([44c10e1](https://github.com/googleapis/python-bigquery-migration/commit/44c10e17767135b7a5c9a5e22b82260be75459b1)) ### Bug Fixes * **deps:** remove unused dependency libcst ([#39](https://github.com/googleapis/python-bigquery-migration/issues/39)) ([496abc7](https://github.com/googleapis/python-bigquery-migration/commit/496abc7854985c6f1bfd8463330f2f07a0f3048c)) * resolve DuplicateCredentialArgs error when using credentials_file ([b3b1ee2](https://github.com/googleapis/python-bigquery-migration/commit/b3b1ee2c0075adadedeef28a5853a440fc1e6535)) ### Documentation * add generated snippets ([#38](https://github.com/googleapis/python-bigquery-migration/issues/38)) ([13b7ac7](https://github.com/googleapis/python-bigquery-migration/commit/13b7ac71ace1cc226d6fa5b43dde345c3ac3e489)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- CHANGELOG.md | 19 +++++++++++++++++++ setup.py | 2 +- 2 files changed, 20 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6356149..25bd5b2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,24 @@ # Changelog +## [0.3.0](https://github.com/googleapis/python-bigquery-migration/compare/v0.2.1...v0.3.0) (2022-02-14) + + +### Features + +* add api key support ([#34](https://github.com/googleapis/python-bigquery-migration/issues/34)) ([124de81](https://github.com/googleapis/python-bigquery-migration/commit/124de81e97b39694433820678704b3f6079ce1e2)) +* Add task details and orchestration result details ([#32](https://github.com/googleapis/python-bigquery-migration/issues/32)) ([44c10e1](https://github.com/googleapis/python-bigquery-migration/commit/44c10e17767135b7a5c9a5e22b82260be75459b1)) + + +### Bug Fixes + +* **deps:** remove unused dependency libcst ([#39](https://github.com/googleapis/python-bigquery-migration/issues/39)) ([496abc7](https://github.com/googleapis/python-bigquery-migration/commit/496abc7854985c6f1bfd8463330f2f07a0f3048c)) +* resolve DuplicateCredentialArgs error when using credentials_file ([b3b1ee2](https://github.com/googleapis/python-bigquery-migration/commit/b3b1ee2c0075adadedeef28a5853a440fc1e6535)) + + +### Documentation + +* add generated snippets ([#38](https://github.com/googleapis/python-bigquery-migration/issues/38)) ([13b7ac7](https://github.com/googleapis/python-bigquery-migration/commit/13b7ac71ace1cc226d6fa5b43dde345c3ac3e489)) + ### [0.2.1](https://www.github.com/googleapis/python-bigquery-migration/compare/v0.2.0...v0.2.1) (2021-11-01) diff --git a/setup.py b/setup.py index 8094521..d3c37b6 100644 --- a/setup.py +++ b/setup.py @@ -17,7 +17,7 @@ import os import setuptools # type: ignore -version = "0.2.1" +version = "0.3.0" package_root = os.path.abspath(os.path.dirname(__file__))