diff --git a/.coveragerc b/.coveragerc index 1b1bce72..691a23ba 100644 --- a/.coveragerc +++ b/.coveragerc @@ -5,6 +5,7 @@ branch = True show_missing = True omit = google/cloud/bigquery_datatransfer/__init__.py + google/cloud/bigquery_datatransfer/gapic_version.py exclude_lines = # Re-enable the standard pragma pragma: NO COVER diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 889f77df..5fc5daa3 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:c43f1d918bcf817d337aa29ff833439494a158a0831508fda4ec75dc4c0d0320 + digest: sha256:8555f0e37e6261408f792bfd6635102d2da5ad73f8f09bcb24f25e6afb5fac97 diff --git a/.kokoro/requirements.in b/.kokoro/requirements.in index cbd7e77f..882178ce 100644 --- a/.kokoro/requirements.in +++ b/.kokoro/requirements.in @@ -1,5 +1,5 @@ gcp-docuploader -gcp-releasetool +gcp-releasetool>=1.10.5 # required for compatibility with cryptography>=39.x importlib-metadata typing-extensions twine diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 05dc4672..fa99c129 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -113,33 +113,28 @@ commonmark==0.9.1 \ --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 # via rich -cryptography==38.0.3 \ - --hash=sha256:068147f32fa662c81aebab95c74679b401b12b57494872886eb5c1139250ec5d \ - --hash=sha256:06fc3cc7b6f6cca87bd56ec80a580c88f1da5306f505876a71c8cfa7050257dd \ - --hash=sha256:25c1d1f19729fb09d42e06b4bf9895212292cb27bb50229f5aa64d039ab29146 \ - --hash=sha256:402852a0aea73833d982cabb6d0c3bb582c15483d29fb7085ef2c42bfa7e38d7 \ - --hash=sha256:4e269dcd9b102c5a3d72be3c45d8ce20377b8076a43cbed6f660a1afe365e436 \ - --hash=sha256:5419a127426084933076132d317911e3c6eb77568a1ce23c3ac1e12d111e61e0 \ - --hash=sha256:554bec92ee7d1e9d10ded2f7e92a5d70c1f74ba9524947c0ba0c850c7b011828 \ - --hash=sha256:5e89468fbd2fcd733b5899333bc54d0d06c80e04cd23d8c6f3e0542358c6060b \ - --hash=sha256:65535bc550b70bd6271984d9863a37741352b4aad6fb1b3344a54e6950249b55 \ - --hash=sha256:6ab9516b85bebe7aa83f309bacc5f44a61eeb90d0b4ec125d2d003ce41932d36 \ - --hash=sha256:6addc3b6d593cd980989261dc1cce38263c76954d758c3c94de51f1e010c9a50 \ - --hash=sha256:728f2694fa743a996d7784a6194da430f197d5c58e2f4e278612b359f455e4a2 \ - --hash=sha256:785e4056b5a8b28f05a533fab69febf5004458e20dad7e2e13a3120d8ecec75a \ - --hash=sha256:78cf5eefac2b52c10398a42765bfa981ce2372cbc0457e6bf9658f41ec3c41d8 \ - --hash=sha256:7f836217000342d448e1c9a342e9163149e45d5b5eca76a30e84503a5a96cab0 \ - --hash=sha256:8d41a46251bf0634e21fac50ffd643216ccecfaf3701a063257fe0b2be1b6548 \ - --hash=sha256:984fe150f350a3c91e84de405fe49e688aa6092b3525f407a18b9646f6612320 \ - --hash=sha256:9b24bcff7853ed18a63cfb0c2b008936a9554af24af2fb146e16d8e1aed75748 \ - --hash=sha256:b1b35d9d3a65542ed2e9d90115dfd16bbc027b3f07ee3304fc83580f26e43249 \ - --hash=sha256:b1b52c9e5f8aa2b802d48bd693190341fae201ea51c7a167d69fc48b60e8a959 \ - --hash=sha256:bbf203f1a814007ce24bd4d51362991d5cb90ba0c177a9c08825f2cc304d871f \ - --hash=sha256:be243c7e2bfcf6cc4cb350c0d5cdf15ca6383bbcb2a8ef51d3c9411a9d4386f0 \ - --hash=sha256:bfbe6ee19615b07a98b1d2287d6a6073f734735b49ee45b11324d85efc4d5cbd \ - --hash=sha256:c46837ea467ed1efea562bbeb543994c2d1f6e800785bd5a2c98bc096f5cb220 \ - --hash=sha256:dfb4f4dd568de1b6af9f4cda334adf7d72cf5bc052516e1b2608b683375dd95c \ - --hash=sha256:ed7b00096790213e09eb11c97cc6e2b757f15f3d2f85833cd2d3ec3fe37c1722 +cryptography==39.0.1 \ + --hash=sha256:0f8da300b5c8af9f98111ffd512910bc792b4c77392a9523624680f7956a99d4 \ + --hash=sha256:35f7c7d015d474f4011e859e93e789c87d21f6f4880ebdc29896a60403328f1f \ + --hash=sha256:5aa67414fcdfa22cf052e640cb5ddc461924a045cacf325cd164e65312d99502 \ + --hash=sha256:5d2d8b87a490bfcd407ed9d49093793d0f75198a35e6eb1a923ce1ee86c62b41 \ + --hash=sha256:6687ef6d0a6497e2b58e7c5b852b53f62142cfa7cd1555795758934da363a965 \ + --hash=sha256:6f8ba7f0328b79f08bdacc3e4e66fb4d7aab0c3584e0bd41328dce5262e26b2e \ + --hash=sha256:706843b48f9a3f9b9911979761c91541e3d90db1ca905fd63fee540a217698bc \ + --hash=sha256:807ce09d4434881ca3a7594733669bd834f5b2c6d5c7e36f8c00f691887042ad \ + --hash=sha256:83e17b26de248c33f3acffb922748151d71827d6021d98c70e6c1a25ddd78505 \ + --hash=sha256:96f1157a7c08b5b189b16b47bc9db2332269d6680a196341bf30046330d15388 \ + --hash=sha256:aec5a6c9864be7df2240c382740fcf3b96928c46604eaa7f3091f58b878c0bb6 \ + --hash=sha256:b0afd054cd42f3d213bf82c629efb1ee5f22eba35bf0eec88ea9ea7304f511a2 \ + --hash=sha256:ced4e447ae29ca194449a3f1ce132ded8fcab06971ef5f618605aacaa612beac \ + --hash=sha256:d1f6198ee6d9148405e49887803907fe8962a23e6c6f83ea7d98f1c0de375695 \ + --hash=sha256:e124352fd3db36a9d4a21c1aa27fd5d051e621845cb87fb851c08f4f75ce8be6 \ + --hash=sha256:e422abdec8b5fa8462aa016786680720d78bdce7a30c652b7fadf83a4ba35336 \ + --hash=sha256:ef8b72fa70b348724ff1218267e7f7375b8de4e8194d1636ee60510aae104cd0 \ + --hash=sha256:f0c64d1bd842ca2633e74a1a28033d139368ad959872533b1bab8c80e8240a0c \ + --hash=sha256:f24077a3b5298a5a06a8e0536e3ea9ec60e4c7ac486755e5fb6e6ea9b3500106 \ + --hash=sha256:fdd188c8a6ef8769f148f88f859884507b954cc64db6b52f66ef199bb9ad660a \ + --hash=sha256:fe913f20024eb2cb2f323e42a64bdf2911bb9738a15dba7d3cce48151034e3a8 # via # gcp-releasetool # secretstorage @@ -159,9 +154,9 @@ gcp-docuploader==0.6.4 \ --hash=sha256:01486419e24633af78fd0167db74a2763974765ee8078ca6eb6964d0ebd388af \ --hash=sha256:70861190c123d907b3b067da896265ead2eeb9263969d6955c9e0bb091b5ccbf # via -r requirements.in -gcp-releasetool==1.10.0 \ - --hash=sha256:72a38ca91b59c24f7e699e9227c90cbe4dd71b789383cb0164b088abae294c83 \ - --hash=sha256:8c7c99320208383d4bb2b808c6880eb7a81424afe7cdba3c8d84b25f4f0e097d +gcp-releasetool==1.10.5 \ + --hash=sha256:174b7b102d704b254f2a26a3eda2c684fd3543320ec239baf771542a2e58e109 \ + --hash=sha256:e29d29927fe2ca493105a82958c6873bb2b90d503acac56be2c229e74de0eec9 # via -r requirements.in google-api-core==2.10.2 \ --hash=sha256:10c06f7739fe57781f87523375e8e1a3a4674bf6392cd6131a3222182b971320 \ diff --git a/.release-please-manifest.json b/.release-please-manifest.json index dac16de6..b5505621 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "3.10.1" + ".": "3.11.0" } diff --git a/CHANGELOG.md b/CHANGELOG.md index c96299e0..40ccb6b7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,18 @@ [1]: https://pypi.org/project/google-cloud-bigquery-datatransfer/#history +## [3.11.0](https://github.com/googleapis/python-bigquery-datatransfer/compare/v3.10.1...v3.11.0) (2023-02-28) + + +### Features + +* Enable "rest" transport in Python for services supporting numeric enums ([#439](https://github.com/googleapis/python-bigquery-datatransfer/issues/439)) ([b77f671](https://github.com/googleapis/python-bigquery-datatransfer/commit/b77f6716a47ba5bda4e50919e37b41fbc7cb3e20)) + + +### Documentation + +* Minor comment update ([#445](https://github.com/googleapis/python-bigquery-datatransfer/issues/445)) ([27b3271](https://github.com/googleapis/python-bigquery-datatransfer/commit/27b3271682fae002d29c65e057190c01f4f24ed4)) + ## [3.10.1](https://github.com/googleapis/python-bigquery-datatransfer/compare/v3.10.0...v3.10.1) (2023-01-20) diff --git a/google/cloud/bigquery_datatransfer/gapic_version.py b/google/cloud/bigquery_datatransfer/gapic_version.py index 9d71162c..6c2e88f2 100644 --- a/google/cloud/bigquery_datatransfer/gapic_version.py +++ b/google/cloud/bigquery_datatransfer/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.10.1" # {x-release-please-version} +__version__ = "3.11.0" # {x-release-please-version} diff --git a/google/cloud/bigquery_datatransfer_v1/__init__.py b/google/cloud/bigquery_datatransfer_v1/__init__.py index 393dc1ea..918a64e2 100644 --- a/google/cloud/bigquery_datatransfer_v1/__init__.py +++ b/google/cloud/bigquery_datatransfer_v1/__init__.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from google.cloud.bigquery_datatransfer import gapic_version as package_version +from google.cloud.bigquery_datatransfer_v1 import gapic_version as package_version __version__ = package_version.__version__ diff --git a/google/cloud/bigquery_datatransfer_v1/gapic_metadata.json b/google/cloud/bigquery_datatransfer_v1/gapic_metadata.json index 3b914fe7..c349f4cf 100644 --- a/google/cloud/bigquery_datatransfer_v1/gapic_metadata.json +++ b/google/cloud/bigquery_datatransfer_v1/gapic_metadata.json @@ -166,6 +166,86 @@ ] } } + }, + "rest": { + "libraryClient": "DataTransferServiceClient", + "rpcs": { + "CheckValidCreds": { + "methods": [ + "check_valid_creds" + ] + }, + "CreateTransferConfig": { + "methods": [ + "create_transfer_config" + ] + }, + "DeleteTransferConfig": { + "methods": [ + "delete_transfer_config" + ] + }, + "DeleteTransferRun": { + "methods": [ + "delete_transfer_run" + ] + }, + "EnrollDataSources": { + "methods": [ + "enroll_data_sources" + ] + }, + "GetDataSource": { + "methods": [ + "get_data_source" + ] + }, + "GetTransferConfig": { + "methods": [ + "get_transfer_config" + ] + }, + "GetTransferRun": { + "methods": [ + "get_transfer_run" + ] + }, + "ListDataSources": { + "methods": [ + "list_data_sources" + ] + }, + "ListTransferConfigs": { + "methods": [ + "list_transfer_configs" + ] + }, + "ListTransferLogs": { + "methods": [ + "list_transfer_logs" + ] + }, + "ListTransferRuns": { + "methods": [ + "list_transfer_runs" + ] + }, + "ScheduleTransferRuns": { + "methods": [ + "schedule_transfer_runs" + ] + }, + "StartManualTransferRuns": { + "methods": [ + "start_manual_transfer_runs" + ] + }, + "UpdateTransferConfig": { + "methods": [ + "update_transfer_config" + ] + } + } } } } diff --git a/google/cloud/bigquery_datatransfer_v1/gapic_version.py b/google/cloud/bigquery_datatransfer_v1/gapic_version.py index 9d71162c..6c2e88f2 100644 --- a/google/cloud/bigquery_datatransfer_v1/gapic_version.py +++ b/google/cloud/bigquery_datatransfer_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.10.1" # {x-release-please-version} +__version__ = "3.11.0" # {x-release-please-version} diff --git a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py index fb865c8f..0495d040 100644 --- a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py +++ b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py @@ -60,6 +60,7 @@ from .transports.base import DEFAULT_CLIENT_INFO, DataTransferServiceTransport from .transports.grpc import DataTransferServiceGrpcTransport from .transports.grpc_asyncio import DataTransferServiceGrpcAsyncIOTransport +from .transports.rest import DataTransferServiceRestTransport class DataTransferServiceClientMeta(type): @@ -75,6 +76,7 @@ class DataTransferServiceClientMeta(type): ) # type: Dict[str, Type[DataTransferServiceTransport]] _transport_registry["grpc"] = DataTransferServiceGrpcTransport _transport_registry["grpc_asyncio"] = DataTransferServiceGrpcAsyncIOTransport + _transport_registry["rest"] = DataTransferServiceRestTransport def get_transport_class( cls, diff --git a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/__init__.py b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/__init__.py index 01d735cb..ea741888 100644 --- a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/__init__.py +++ b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/__init__.py @@ -19,6 +19,7 @@ from .base import DataTransferServiceTransport from .grpc import DataTransferServiceGrpcTransport from .grpc_asyncio import DataTransferServiceGrpcAsyncIOTransport +from .rest import DataTransferServiceRestInterceptor, DataTransferServiceRestTransport # Compile a registry of transports. _transport_registry = ( @@ -26,9 +27,12 @@ ) # type: Dict[str, Type[DataTransferServiceTransport]] _transport_registry["grpc"] = DataTransferServiceGrpcTransport _transport_registry["grpc_asyncio"] = DataTransferServiceGrpcAsyncIOTransport +_transport_registry["rest"] = DataTransferServiceRestTransport __all__ = ( "DataTransferServiceTransport", "DataTransferServiceGrpcTransport", "DataTransferServiceGrpcAsyncIOTransport", + "DataTransferServiceRestTransport", + "DataTransferServiceRestInterceptor", ) diff --git a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/rest.py b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/rest.py new file mode 100644 index 00000000..4ab502f8 --- /dev/null +++ b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/rest.py @@ -0,0 +1,2388 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import dataclasses +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, path_template, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.protobuf import json_format +import grpc # type: ignore +from requests import __version__ as requests_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.bigquery_datatransfer_v1.types import datatransfer, transfer + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import DataTransferServiceTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class DataTransferServiceRestInterceptor: + """Interceptor for DataTransferService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the DataTransferServiceRestTransport. + + .. code-block:: python + class MyCustomDataTransferServiceInterceptor(DataTransferServiceRestInterceptor): + def pre_check_valid_creds(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_check_valid_creds(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_transfer_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_transfer_config(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_transfer_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_transfer_run(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_enroll_data_sources(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_data_source(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_data_source(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_transfer_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_transfer_config(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_transfer_run(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_transfer_run(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_data_sources(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_data_sources(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_transfer_configs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_transfer_configs(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_transfer_logs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_transfer_logs(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_transfer_runs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_transfer_runs(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_schedule_transfer_runs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_schedule_transfer_runs(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_start_manual_transfer_runs(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_start_manual_transfer_runs(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_transfer_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_transfer_config(self, response): + logging.log(f"Received response: {response}") + return response + + transport = DataTransferServiceRestTransport(interceptor=MyCustomDataTransferServiceInterceptor()) + client = DataTransferServiceClient(transport=transport) + + + """ + + def pre_check_valid_creds( + self, + request: datatransfer.CheckValidCredsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datatransfer.CheckValidCredsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for check_valid_creds + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTransferService server. + """ + return request, metadata + + def post_check_valid_creds( + self, response: datatransfer.CheckValidCredsResponse + ) -> datatransfer.CheckValidCredsResponse: + """Post-rpc interceptor for check_valid_creds + + Override in a subclass to manipulate the response + after it is returned by the DataTransferService server but before + it is returned to user code. + """ + return response + + def pre_create_transfer_config( + self, + request: datatransfer.CreateTransferConfigRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datatransfer.CreateTransferConfigRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_transfer_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTransferService server. + """ + return request, metadata + + def post_create_transfer_config( + self, response: transfer.TransferConfig + ) -> transfer.TransferConfig: + """Post-rpc interceptor for create_transfer_config + + Override in a subclass to manipulate the response + after it is returned by the DataTransferService server but before + it is returned to user code. + """ + return response + + def pre_delete_transfer_config( + self, + request: datatransfer.DeleteTransferConfigRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datatransfer.DeleteTransferConfigRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_transfer_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTransferService server. + """ + return request, metadata + + def pre_delete_transfer_run( + self, + request: datatransfer.DeleteTransferRunRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datatransfer.DeleteTransferRunRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_transfer_run + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTransferService server. + """ + return request, metadata + + def pre_enroll_data_sources( + self, + request: datatransfer.EnrollDataSourcesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datatransfer.EnrollDataSourcesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for enroll_data_sources + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTransferService server. + """ + return request, metadata + + def pre_get_data_source( + self, + request: datatransfer.GetDataSourceRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datatransfer.GetDataSourceRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_data_source + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTransferService server. + """ + return request, metadata + + def post_get_data_source( + self, response: datatransfer.DataSource + ) -> datatransfer.DataSource: + """Post-rpc interceptor for get_data_source + + Override in a subclass to manipulate the response + after it is returned by the DataTransferService server but before + it is returned to user code. + """ + return response + + def pre_get_transfer_config( + self, + request: datatransfer.GetTransferConfigRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datatransfer.GetTransferConfigRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_transfer_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTransferService server. + """ + return request, metadata + + def post_get_transfer_config( + self, response: transfer.TransferConfig + ) -> transfer.TransferConfig: + """Post-rpc interceptor for get_transfer_config + + Override in a subclass to manipulate the response + after it is returned by the DataTransferService server but before + it is returned to user code. + """ + return response + + def pre_get_transfer_run( + self, + request: datatransfer.GetTransferRunRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datatransfer.GetTransferRunRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_transfer_run + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTransferService server. + """ + return request, metadata + + def post_get_transfer_run( + self, response: transfer.TransferRun + ) -> transfer.TransferRun: + """Post-rpc interceptor for get_transfer_run + + Override in a subclass to manipulate the response + after it is returned by the DataTransferService server but before + it is returned to user code. + """ + return response + + def pre_list_data_sources( + self, + request: datatransfer.ListDataSourcesRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datatransfer.ListDataSourcesRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_data_sources + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTransferService server. + """ + return request, metadata + + def post_list_data_sources( + self, response: datatransfer.ListDataSourcesResponse + ) -> datatransfer.ListDataSourcesResponse: + """Post-rpc interceptor for list_data_sources + + Override in a subclass to manipulate the response + after it is returned by the DataTransferService server but before + it is returned to user code. + """ + return response + + def pre_list_transfer_configs( + self, + request: datatransfer.ListTransferConfigsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datatransfer.ListTransferConfigsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_transfer_configs + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTransferService server. + """ + return request, metadata + + def post_list_transfer_configs( + self, response: datatransfer.ListTransferConfigsResponse + ) -> datatransfer.ListTransferConfigsResponse: + """Post-rpc interceptor for list_transfer_configs + + Override in a subclass to manipulate the response + after it is returned by the DataTransferService server but before + it is returned to user code. + """ + return response + + def pre_list_transfer_logs( + self, + request: datatransfer.ListTransferLogsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datatransfer.ListTransferLogsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_transfer_logs + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTransferService server. + """ + return request, metadata + + def post_list_transfer_logs( + self, response: datatransfer.ListTransferLogsResponse + ) -> datatransfer.ListTransferLogsResponse: + """Post-rpc interceptor for list_transfer_logs + + Override in a subclass to manipulate the response + after it is returned by the DataTransferService server but before + it is returned to user code. + """ + return response + + def pre_list_transfer_runs( + self, + request: datatransfer.ListTransferRunsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datatransfer.ListTransferRunsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_transfer_runs + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTransferService server. + """ + return request, metadata + + def post_list_transfer_runs( + self, response: datatransfer.ListTransferRunsResponse + ) -> datatransfer.ListTransferRunsResponse: + """Post-rpc interceptor for list_transfer_runs + + Override in a subclass to manipulate the response + after it is returned by the DataTransferService server but before + it is returned to user code. + """ + return response + + def pre_schedule_transfer_runs( + self, + request: datatransfer.ScheduleTransferRunsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datatransfer.ScheduleTransferRunsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for schedule_transfer_runs + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTransferService server. + """ + return request, metadata + + def post_schedule_transfer_runs( + self, response: datatransfer.ScheduleTransferRunsResponse + ) -> datatransfer.ScheduleTransferRunsResponse: + """Post-rpc interceptor for schedule_transfer_runs + + Override in a subclass to manipulate the response + after it is returned by the DataTransferService server but before + it is returned to user code. + """ + return response + + def pre_start_manual_transfer_runs( + self, + request: datatransfer.StartManualTransferRunsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datatransfer.StartManualTransferRunsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for start_manual_transfer_runs + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTransferService server. + """ + return request, metadata + + def post_start_manual_transfer_runs( + self, response: datatransfer.StartManualTransferRunsResponse + ) -> datatransfer.StartManualTransferRunsResponse: + """Post-rpc interceptor for start_manual_transfer_runs + + Override in a subclass to manipulate the response + after it is returned by the DataTransferService server but before + it is returned to user code. + """ + return response + + def pre_update_transfer_config( + self, + request: datatransfer.UpdateTransferConfigRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[datatransfer.UpdateTransferConfigRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_transfer_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTransferService server. + """ + return request, metadata + + def post_update_transfer_config( + self, response: transfer.TransferConfig + ) -> transfer.TransferConfig: + """Post-rpc interceptor for update_transfer_config + + Override in a subclass to manipulate the response + after it is returned by the DataTransferService server but before + it is returned to user code. + """ + return response + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.GetLocationRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTransferService server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the DataTransferService server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[locations_pb2.ListLocationsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTransferService server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the DataTransferService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class DataTransferServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: DataTransferServiceRestInterceptor + + +class DataTransferServiceRestTransport(DataTransferServiceTransport): + """REST backend transport for DataTransferService. + + This API allows users to manage their data transfers into + BigQuery. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "bigquerydatatransfer.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[DataTransferServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or DataTransferServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CheckValidCreds(DataTransferServiceRestStub): + def __hash__(self): + return hash("CheckValidCreds") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datatransfer.CheckValidCredsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datatransfer.CheckValidCredsResponse: + r"""Call the check valid creds method over HTTP. + + Args: + request (~.datatransfer.CheckValidCredsRequest): + The request object. A request to determine whether the + user has valid credentials. This method + is used to limit the number of OAuth + popups in the user interface. The user + id is inferred from the API call + context. If the data source has the + Google+ authorization type, this method + returns false, as it cannot be + determined whether the credentials are + already valid merely based on the user + id. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datatransfer.CheckValidCredsResponse: + A response indicating whether the + credentials exist and are valid. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/dataSources/*}:checkValidCreds", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/dataSources/*}:checkValidCreds", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_check_valid_creds( + request, metadata + ) + pb_request = datatransfer.CheckValidCredsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datatransfer.CheckValidCredsResponse() + pb_resp = datatransfer.CheckValidCredsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_check_valid_creds(resp) + return resp + + class _CreateTransferConfig(DataTransferServiceRestStub): + def __hash__(self): + return hash("CreateTransferConfig") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datatransfer.CreateTransferConfigRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> transfer.TransferConfig: + r"""Call the create transfer config method over HTTP. + + Args: + request (~.datatransfer.CreateTransferConfigRequest): + The request object. A request to create a data transfer + configuration. If new credentials are + needed for this transfer configuration, + authorization info must be provided. If + authorization info is provided, the + transfer configuration will be + associated with the user id + corresponding to the authorization info. + Otherwise, the transfer configuration + will be associated with the calling + user. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.transfer.TransferConfig: + Represents a data transfer configuration. A transfer + configuration contains all metadata needed to perform a + data transfer. For example, ``destination_dataset_id`` + specifies where data should be stored. When a new + transfer configuration is created, the specified + ``destination_dataset_id`` is created when needed and + shared with the appropriate data source service account. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/transferConfigs", + "body": "transfer_config", + }, + { + "method": "post", + "uri": "/v1/{parent=projects/*}/transferConfigs", + "body": "transfer_config", + }, + ] + request, metadata = self._interceptor.pre_create_transfer_config( + request, metadata + ) + pb_request = datatransfer.CreateTransferConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = transfer.TransferConfig() + pb_resp = transfer.TransferConfig.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_transfer_config(resp) + return resp + + class _DeleteTransferConfig(DataTransferServiceRestStub): + def __hash__(self): + return hash("DeleteTransferConfig") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datatransfer.DeleteTransferConfigRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete transfer config method over HTTP. + + Args: + request (~.datatransfer.DeleteTransferConfigRequest): + The request object. A request to delete data transfer + information. All associated transfer + runs and log messages will be deleted as + well. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/transferConfigs/*}", + }, + { + "method": "delete", + "uri": "/v1/{name=projects/*/transferConfigs/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_transfer_config( + request, metadata + ) + pb_request = datatransfer.DeleteTransferConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteTransferRun(DataTransferServiceRestStub): + def __hash__(self): + return hash("DeleteTransferRun") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datatransfer.DeleteTransferRunRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete transfer run method over HTTP. + + Args: + request (~.datatransfer.DeleteTransferRunRequest): + The request object. A request to delete data transfer run + information. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}", + }, + { + "method": "delete", + "uri": "/v1/{name=projects/*/transferConfigs/*/runs/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_transfer_run( + request, metadata + ) + pb_request = datatransfer.DeleteTransferRunRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _EnrollDataSources(DataTransferServiceRestStub): + def __hash__(self): + return hash("EnrollDataSources") + + def __call__( + self, + request: datatransfer.EnrollDataSourcesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the enroll data sources method over HTTP. + + Args: + request (~.datatransfer.EnrollDataSourcesRequest): + The request object. A request to enroll a set of data sources so they are + visible in the BigQuery UI's ``Transfer`` tab. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*}:enrollDataSources", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*}:enrollDataSources", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_enroll_data_sources( + request, metadata + ) + pb_request = datatransfer.EnrollDataSourcesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetDataSource(DataTransferServiceRestStub): + def __hash__(self): + return hash("GetDataSource") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datatransfer.GetDataSourceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datatransfer.DataSource: + r"""Call the get data source method over HTTP. + + Args: + request (~.datatransfer.GetDataSourceRequest): + The request object. A request to get data source info. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datatransfer.DataSource: + Defines the properties and custom + parameters for a data source. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/dataSources/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/dataSources/*}", + }, + ] + request, metadata = self._interceptor.pre_get_data_source(request, metadata) + pb_request = datatransfer.GetDataSourceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datatransfer.DataSource() + pb_resp = datatransfer.DataSource.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_data_source(resp) + return resp + + class _GetTransferConfig(DataTransferServiceRestStub): + def __hash__(self): + return hash("GetTransferConfig") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datatransfer.GetTransferConfigRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> transfer.TransferConfig: + r"""Call the get transfer config method over HTTP. + + Args: + request (~.datatransfer.GetTransferConfigRequest): + The request object. A request to get data transfer + information. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.transfer.TransferConfig: + Represents a data transfer configuration. A transfer + configuration contains all metadata needed to perform a + data transfer. For example, ``destination_dataset_id`` + specifies where data should be stored. When a new + transfer configuration is created, the specified + ``destination_dataset_id`` is created when needed and + shared with the appropriate data source service account. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/transferConfigs/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/transferConfigs/*}", + }, + ] + request, metadata = self._interceptor.pre_get_transfer_config( + request, metadata + ) + pb_request = datatransfer.GetTransferConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = transfer.TransferConfig() + pb_resp = transfer.TransferConfig.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_transfer_config(resp) + return resp + + class _GetTransferRun(DataTransferServiceRestStub): + def __hash__(self): + return hash("GetTransferRun") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datatransfer.GetTransferRunRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> transfer.TransferRun: + r"""Call the get transfer run method over HTTP. + + Args: + request (~.datatransfer.GetTransferRunRequest): + The request object. A request to get data transfer run + information. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.transfer.TransferRun: + Represents a data transfer run. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/transferConfigs/*/runs/*}", + }, + ] + request, metadata = self._interceptor.pre_get_transfer_run( + request, metadata + ) + pb_request = datatransfer.GetTransferRunRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = transfer.TransferRun() + pb_resp = transfer.TransferRun.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_transfer_run(resp) + return resp + + class _ListDataSources(DataTransferServiceRestStub): + def __hash__(self): + return hash("ListDataSources") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datatransfer.ListDataSourcesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datatransfer.ListDataSourcesResponse: + r"""Call the list data sources method over HTTP. + + Args: + request (~.datatransfer.ListDataSourcesRequest): + The request object. Request to list supported data + sources and their data transfer + settings. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datatransfer.ListDataSourcesResponse: + Returns list of supported data + sources and their metadata. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/dataSources", + }, + { + "method": "get", + "uri": "/v1/{parent=projects/*}/dataSources", + }, + ] + request, metadata = self._interceptor.pre_list_data_sources( + request, metadata + ) + pb_request = datatransfer.ListDataSourcesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datatransfer.ListDataSourcesResponse() + pb_resp = datatransfer.ListDataSourcesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_data_sources(resp) + return resp + + class _ListTransferConfigs(DataTransferServiceRestStub): + def __hash__(self): + return hash("ListTransferConfigs") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datatransfer.ListTransferConfigsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datatransfer.ListTransferConfigsResponse: + r"""Call the list transfer configs method over HTTP. + + Args: + request (~.datatransfer.ListTransferConfigsRequest): + The request object. A request to list data transfers + configured for a BigQuery project. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datatransfer.ListTransferConfigsResponse: + The returned list of pipelines in the + project. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/transferConfigs", + }, + { + "method": "get", + "uri": "/v1/{parent=projects/*}/transferConfigs", + }, + ] + request, metadata = self._interceptor.pre_list_transfer_configs( + request, metadata + ) + pb_request = datatransfer.ListTransferConfigsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datatransfer.ListTransferConfigsResponse() + pb_resp = datatransfer.ListTransferConfigsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_transfer_configs(resp) + return resp + + class _ListTransferLogs(DataTransferServiceRestStub): + def __hash__(self): + return hash("ListTransferLogs") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datatransfer.ListTransferLogsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datatransfer.ListTransferLogsResponse: + r"""Call the list transfer logs method over HTTP. + + Args: + request (~.datatransfer.ListTransferLogsRequest): + The request object. A request to get user facing log + messages associated with data transfer + run. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datatransfer.ListTransferLogsResponse: + The returned list transfer run + messages. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/transferConfigs/*/runs/*}/transferLogs", + }, + { + "method": "get", + "uri": "/v1/{parent=projects/*/transferConfigs/*/runs/*}/transferLogs", + }, + ] + request, metadata = self._interceptor.pre_list_transfer_logs( + request, metadata + ) + pb_request = datatransfer.ListTransferLogsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datatransfer.ListTransferLogsResponse() + pb_resp = datatransfer.ListTransferLogsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_transfer_logs(resp) + return resp + + class _ListTransferRuns(DataTransferServiceRestStub): + def __hash__(self): + return hash("ListTransferRuns") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datatransfer.ListTransferRunsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datatransfer.ListTransferRunsResponse: + r"""Call the list transfer runs method over HTTP. + + Args: + request (~.datatransfer.ListTransferRunsRequest): + The request object. A request to list data transfer runs. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datatransfer.ListTransferRunsResponse: + The returned list of pipelines in the + project. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/transferConfigs/*}/runs", + }, + { + "method": "get", + "uri": "/v1/{parent=projects/*/transferConfigs/*}/runs", + }, + ] + request, metadata = self._interceptor.pre_list_transfer_runs( + request, metadata + ) + pb_request = datatransfer.ListTransferRunsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datatransfer.ListTransferRunsResponse() + pb_resp = datatransfer.ListTransferRunsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_transfer_runs(resp) + return resp + + class _ScheduleTransferRuns(DataTransferServiceRestStub): + def __hash__(self): + return hash("ScheduleTransferRuns") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datatransfer.ScheduleTransferRunsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datatransfer.ScheduleTransferRunsResponse: + r"""Call the schedule transfer runs method over HTTP. + + Args: + request (~.datatransfer.ScheduleTransferRunsRequest): + The request object. A request to schedule transfer runs + for a time range. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datatransfer.ScheduleTransferRunsResponse: + A response to schedule transfer runs + for a time range. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/transferConfigs/*}:scheduleRuns", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{parent=projects/*/transferConfigs/*}:scheduleRuns", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_schedule_transfer_runs( + request, metadata + ) + pb_request = datatransfer.ScheduleTransferRunsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datatransfer.ScheduleTransferRunsResponse() + pb_resp = datatransfer.ScheduleTransferRunsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_schedule_transfer_runs(resp) + return resp + + class _StartManualTransferRuns(DataTransferServiceRestStub): + def __hash__(self): + return hash("StartManualTransferRuns") + + def __call__( + self, + request: datatransfer.StartManualTransferRunsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datatransfer.StartManualTransferRunsResponse: + r"""Call the start manual transfer + runs method over HTTP. + + Args: + request (~.datatransfer.StartManualTransferRunsRequest): + The request object. A request to start manual transfer + runs. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.datatransfer.StartManualTransferRunsResponse: + A response to start manual transfer + runs. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/transferConfigs/*}:startManualRuns", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{parent=projects/*/transferConfigs/*}:startManualRuns", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_start_manual_transfer_runs( + request, metadata + ) + pb_request = datatransfer.StartManualTransferRunsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = datatransfer.StartManualTransferRunsResponse() + pb_resp = datatransfer.StartManualTransferRunsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_start_manual_transfer_runs(resp) + return resp + + class _UpdateTransferConfig(DataTransferServiceRestStub): + def __hash__(self): + return hash("UpdateTransferConfig") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: datatransfer.UpdateTransferConfigRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> transfer.TransferConfig: + r"""Call the update transfer config method over HTTP. + + Args: + request (~.datatransfer.UpdateTransferConfigRequest): + The request object. A request to update a transfer + configuration. To update the user id of + the transfer configuration, + authorization info needs to be provided. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.transfer.TransferConfig: + Represents a data transfer configuration. A transfer + configuration contains all metadata needed to perform a + data transfer. For example, ``destination_dataset_id`` + specifies where data should be stored. When a new + transfer configuration is created, the specified + ``destination_dataset_id`` is created when needed and + shared with the appropriate data source service account. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{transfer_config.name=projects/*/locations/*/transferConfigs/*}", + "body": "transfer_config", + }, + { + "method": "patch", + "uri": "/v1/{transfer_config.name=projects/*/transferConfigs/*}", + "body": "transfer_config", + }, + ] + request, metadata = self._interceptor.pre_update_transfer_config( + request, metadata + ) + pb_request = datatransfer.UpdateTransferConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = transfer.TransferConfig() + pb_resp = transfer.TransferConfig.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_transfer_config(resp) + return resp + + @property + def check_valid_creds( + self, + ) -> Callable[ + [datatransfer.CheckValidCredsRequest], datatransfer.CheckValidCredsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CheckValidCreds(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_transfer_config( + self, + ) -> Callable[[datatransfer.CreateTransferConfigRequest], transfer.TransferConfig]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateTransferConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_transfer_config( + self, + ) -> Callable[[datatransfer.DeleteTransferConfigRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteTransferConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_transfer_run( + self, + ) -> Callable[[datatransfer.DeleteTransferRunRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteTransferRun(self._session, self._host, self._interceptor) # type: ignore + + @property + def enroll_data_sources( + self, + ) -> Callable[[datatransfer.EnrollDataSourcesRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._EnrollDataSources(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_data_source( + self, + ) -> Callable[[datatransfer.GetDataSourceRequest], datatransfer.DataSource]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDataSource(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_transfer_config( + self, + ) -> Callable[[datatransfer.GetTransferConfigRequest], transfer.TransferConfig]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetTransferConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_transfer_run( + self, + ) -> Callable[[datatransfer.GetTransferRunRequest], transfer.TransferRun]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetTransferRun(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_data_sources( + self, + ) -> Callable[ + [datatransfer.ListDataSourcesRequest], datatransfer.ListDataSourcesResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDataSources(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_transfer_configs( + self, + ) -> Callable[ + [datatransfer.ListTransferConfigsRequest], + datatransfer.ListTransferConfigsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListTransferConfigs(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_transfer_logs( + self, + ) -> Callable[ + [datatransfer.ListTransferLogsRequest], datatransfer.ListTransferLogsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListTransferLogs(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_transfer_runs( + self, + ) -> Callable[ + [datatransfer.ListTransferRunsRequest], datatransfer.ListTransferRunsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListTransferRuns(self._session, self._host, self._interceptor) # type: ignore + + @property + def schedule_transfer_runs( + self, + ) -> Callable[ + [datatransfer.ScheduleTransferRunsRequest], + datatransfer.ScheduleTransferRunsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ScheduleTransferRuns(self._session, self._host, self._interceptor) # type: ignore + + @property + def start_manual_transfer_runs( + self, + ) -> Callable[ + [datatransfer.StartManualTransferRunsRequest], + datatransfer.StartManualTransferRunsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._StartManualTransferRuns(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_transfer_config( + self, + ) -> Callable[[datatransfer.UpdateTransferConfigRequest], transfer.TransferConfig]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateTransferConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation(DataTransferServiceRestStub): + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}", + }, + ] + + request, metadata = self._interceptor.pre_get_location(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.Location() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_location(resp) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations(DataTransferServiceRestStub): + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/locations", + }, + ] + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_list_locations(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("DataTransferServiceRestTransport",) diff --git a/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py b/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py index 4d3a5150..b7dfe591 100644 --- a/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py +++ b/google/cloud/bigquery_datatransfer_v1/types/datatransfer.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence from google.protobuf import duration_pb2 # type: ignore @@ -529,7 +531,7 @@ class CreateTransferConfigRequest(proto.Message): ``service_account_name`` is used to create the transfer config. service_account_name (str): - Optional service account name. If this field is set, the + Optional service account email. If this field is set, the transfer config will be created with this service account's credentials. It requires that the requesting user calling this API has permissions to act as this service account. @@ -619,7 +621,7 @@ class UpdateTransferConfigRequest(proto.Message): ``service_account_name`` is used to update the transfer config. service_account_name (str): - Optional service account name. If this field is set, the + Optional service account email. If this field is set, the transfer config will be created with this service account's credentials. It requires that the requesting user calling this API has permissions to act as this service account. diff --git a/google/cloud/bigquery_datatransfer_v1/types/transfer.py b/google/cloud/bigquery_datatransfer_v1/types/transfer.py index b99c1423..17aee06d 100644 --- a/google/cloud/bigquery_datatransfer_v1/types/transfer.py +++ b/google/cloud/bigquery_datatransfer_v1/types/transfer.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence from google.protobuf import struct_pb2 # type: ignore diff --git a/noxfile.py b/noxfile.py index e716318b..95e58c52 100644 --- a/noxfile.py +++ b/noxfile.py @@ -189,9 +189,9 @@ def unit(session): def install_systemtest_dependencies(session, *constraints): # Use pre-release gRPC for system tests. - # Exclude version 1.49.0rc1 which has a known issue. - # See https://github.com/grpc/grpc/pull/30642 - session.install("--pre", "grpcio!=1.49.0rc1") + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) @@ -346,9 +346,7 @@ def prerelease_deps(session): unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES session.install(*unit_deps_all) system_deps_all = ( - SYSTEM_TEST_STANDARD_DEPENDENCIES - + SYSTEM_TEST_EXTERNAL_DEPENDENCIES - + SYSTEM_TEST_EXTRAS + SYSTEM_TEST_STANDARD_DEPENDENCIES + SYSTEM_TEST_EXTERNAL_DEPENDENCIES ) session.install(*system_deps_all) @@ -378,8 +376,8 @@ def prerelease_deps(session): # dependency of grpc "six", "googleapis-common-protos", - # Exclude version 1.49.0rc1 which has a known issue. See https://github.com/grpc/grpc/pull/30642 - "grpcio!=1.49.0rc1", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", "grpcio-status", "google-api-core", "proto-plus", diff --git a/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json b/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json index 22d7ce94..ff4d7442 100644 --- a/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json +++ b/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-datatransfer", - "version": "3.10.1" + "version": "3.11.0" }, "snippets": [ { diff --git a/samples/snippets/requirements-test.txt b/samples/snippets/requirements-test.txt index 472e3a68..bbc2a702 100644 --- a/samples/snippets/requirements-test.txt +++ b/samples/snippets/requirements-test.txt @@ -1,4 +1,4 @@ -google-cloud-bigquery==3.4.2 -google-cloud-pubsub==2.13.12 +google-cloud-bigquery==3.6.0 +google-cloud-pubsub==2.15.0 pytest==7.2.1 mock==5.0.1 diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index d3fca50f..13e5c5de 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-bigquery-datatransfer==3.10.0 +google-cloud-bigquery-datatransfer==3.10.1 diff --git a/setup.py b/setup.py index f8ca026e..43a822f1 100644 --- a/setup.py +++ b/setup.py @@ -57,9 +57,7 @@ if package.startswith("google") ] -namespaces = ["google"] -if "google.cloud" in packages: - namespaces.append("google.cloud") +namespaces = ["google", "google.cloud"] setuptools.setup( name=name, diff --git a/tests/system.py b/tests/system.py index 03e24313..920c0db7 100644 --- a/tests/system.py +++ b/tests/system.py @@ -26,8 +26,9 @@ def project_id(): return os.environ["PROJECT_ID"] -def test_list_data_sources(project_id): - client = bigquery_datatransfer.DataTransferServiceClient() +@pytest.mark.parametrize("transport", ["grpc", "rest"]) +def test_list_data_sources(project_id: str, transport: str): + client = bigquery_datatransfer.DataTransferServiceClient(transport=transport) parent = client.common_project_path(project_id) data_sources = list(client.list_data_sources(parent=parent)) diff --git a/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py b/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py index 483c9a7f..70ebdf41 100644 --- a/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py +++ b/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py @@ -22,6 +22,8 @@ except ImportError: # pragma: NO COVER import mock +from collections.abc import Iterable +import json import math from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template @@ -34,6 +36,7 @@ from google.oauth2 import service_account from google.protobuf import duration_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import json_format from google.protobuf import struct_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.rpc import status_pb2 # type: ignore @@ -42,6 +45,8 @@ from proto.marshal.rules import wrappers from proto.marshal.rules.dates import DurationRule, TimestampRule import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session from google.cloud.bigquery_datatransfer_v1.services.data_transfer_service import ( DataTransferServiceAsyncClient, @@ -102,6 +107,7 @@ def test__get_default_mtls_endpoint(): [ (DataTransferServiceClient, "grpc"), (DataTransferServiceAsyncClient, "grpc_asyncio"), + (DataTransferServiceClient, "rest"), ], ) def test_data_transfer_service_client_from_service_account_info( @@ -117,7 +123,11 @@ def test_data_transfer_service_client_from_service_account_info( assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("bigquerydatatransfer.googleapis.com:443") + assert client.transport._host == ( + "bigquerydatatransfer.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://bigquerydatatransfer.googleapis.com" + ) @pytest.mark.parametrize( @@ -125,6 +135,7 @@ def test_data_transfer_service_client_from_service_account_info( [ (transports.DataTransferServiceGrpcTransport, "grpc"), (transports.DataTransferServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.DataTransferServiceRestTransport, "rest"), ], ) def test_data_transfer_service_client_service_account_always_use_jwt( @@ -150,6 +161,7 @@ def test_data_transfer_service_client_service_account_always_use_jwt( [ (DataTransferServiceClient, "grpc"), (DataTransferServiceAsyncClient, "grpc_asyncio"), + (DataTransferServiceClient, "rest"), ], ) def test_data_transfer_service_client_from_service_account_file( @@ -172,13 +184,18 @@ def test_data_transfer_service_client_from_service_account_file( assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("bigquerydatatransfer.googleapis.com:443") + assert client.transport._host == ( + "bigquerydatatransfer.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://bigquerydatatransfer.googleapis.com" + ) def test_data_transfer_service_client_get_transport_class(): transport = DataTransferServiceClient.get_transport_class() available_transports = [ transports.DataTransferServiceGrpcTransport, + transports.DataTransferServiceRestTransport, ] assert transport in available_transports @@ -199,6 +216,11 @@ def test_data_transfer_service_client_get_transport_class(): transports.DataTransferServiceGrpcAsyncIOTransport, "grpc_asyncio", ), + ( + DataTransferServiceClient, + transports.DataTransferServiceRestTransport, + "rest", + ), ], ) @mock.patch.object( @@ -354,6 +376,18 @@ def test_data_transfer_service_client_client_options( "grpc_asyncio", "false", ), + ( + DataTransferServiceClient, + transports.DataTransferServiceRestTransport, + "rest", + "true", + ), + ( + DataTransferServiceClient, + transports.DataTransferServiceRestTransport, + "rest", + "false", + ), ], ) @mock.patch.object( @@ -557,6 +591,11 @@ def test_data_transfer_service_client_get_mtls_endpoint_and_cert_source(client_c transports.DataTransferServiceGrpcAsyncIOTransport, "grpc_asyncio", ), + ( + DataTransferServiceClient, + transports.DataTransferServiceRestTransport, + "rest", + ), ], ) def test_data_transfer_service_client_client_options_scopes( @@ -597,6 +636,12 @@ def test_data_transfer_service_client_client_options_scopes( "grpc_asyncio", grpc_helpers_async, ), + ( + DataTransferServiceClient, + transports.DataTransferServiceRestTransport, + "rest", + None, + ), ], ) def test_data_transfer_service_client_client_options_credentials_file( @@ -5187,148 +5232,4458 @@ async def test_enroll_data_sources_field_headers_async(): ) in kw["metadata"] -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.DataTransferServiceGrpcTransport( +@pytest.mark.parametrize( + "request_type", + [ + datatransfer.GetDataSourceRequest, + dict, + ], +) +def test_get_data_source_rest(request_type): + client = DataTransferServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - with pytest.raises(ValueError): - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/dataSources/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datatransfer.DataSource( + name="name_value", + data_source_id="data_source_id_value", + display_name="display_name_value", + description="description_value", + client_id="client_id_value", + scopes=["scopes_value"], + transfer_type=transfer.TransferType.BATCH, + supports_multiple_transfers=True, + update_deadline_seconds=2406, + default_schedule="default_schedule_value", + supports_custom_schedule=True, + help_url="help_url_value", + authorization_type=datatransfer.DataSource.AuthorizationType.AUTHORIZATION_CODE, + data_refresh_type=datatransfer.DataSource.DataRefreshType.SLIDING_WINDOW, + default_data_refresh_window_days=3379, + manual_runs_disabled=True, ) - # It is an error to provide a credentials file and a transport instance. - transport = transports.DataTransferServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datatransfer.DataSource.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_data_source(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, datatransfer.DataSource) + assert response.name == "name_value" + assert response.data_source_id == "data_source_id_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.client_id == "client_id_value" + assert response.scopes == ["scopes_value"] + assert response.transfer_type == transfer.TransferType.BATCH + assert response.supports_multiple_transfers is True + assert response.update_deadline_seconds == 2406 + assert response.default_schedule == "default_schedule_value" + assert response.supports_custom_schedule is True + assert response.help_url == "help_url_value" + assert ( + response.authorization_type + == datatransfer.DataSource.AuthorizationType.AUTHORIZATION_CODE ) - with pytest.raises(ValueError): - client = DataTransferServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, + assert ( + response.data_refresh_type + == datatransfer.DataSource.DataRefreshType.SLIDING_WINDOW + ) + assert response.default_data_refresh_window_days == 3379 + assert response.manual_runs_disabled is True + + +def test_get_data_source_rest_required_fields( + request_type=datatransfer.GetDataSourceRequest, +): + transport_class = transports.DataTransferServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, ) + ) - # It is an error to provide an api_key and a transport instance. - transport = transports.DataTransferServiceGrpcTransport( + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_data_source._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_data_source._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DataTransferServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datatransfer.DataSource() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = datatransfer.DataSource.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_data_source(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_data_source_rest_unset_required_fields(): + transport = transports.DataTransferServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DataTransferServiceClient( - client_options=options, - transport=transport, - ) - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DataTransferServiceClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) + unset_fields = transport.get_data_source._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) - # It is an error to provide scopes and a transport instance. - transport = transports.DataTransferServiceGrpcTransport( + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_data_source_rest_interceptors(null_interceptor): + transport = transports.DataTransferServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataTransferServiceRestInterceptor(), ) - with pytest.raises(ValueError): - client = DataTransferServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, + client = DataTransferServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DataTransferServiceRestInterceptor, "post_get_data_source" + ) as post, mock.patch.object( + transports.DataTransferServiceRestInterceptor, "pre_get_data_source" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datatransfer.GetDataSourceRequest.pb( + datatransfer.GetDataSourceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = datatransfer.DataSource.to_json( + datatransfer.DataSource() ) + request = datatransfer.GetDataSourceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datatransfer.DataSource() -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.DataTransferServiceGrpcTransport( + client.get_data_source( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_data_source_rest_bad_request( + transport: str = "rest", request_type=datatransfer.GetDataSourceRequest +): + client = DataTransferServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - client = DataTransferServiceClient(transport=transport) - assert client.transport is transport + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/dataSources/sample3"} + request = request_type(**request_init) -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.DataTransferServiceGrpcTransport( + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_data_source(request) + + +def test_get_data_source_rest_flattened(): + client = DataTransferServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - channel = transport.grpc_channel - assert channel - transport = transports.DataTransferServiceGrpcAsyncIOTransport( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datatransfer.DataSource() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dataSources/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datatransfer.DataSource.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_data_source(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/dataSources/*}" + % client.transport._host, + args[1], + ) + + +def test_get_data_source_rest_flattened_error(transport: str = "rest"): + client = DataTransferServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - channel = transport.grpc_channel - assert channel + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_data_source( + datatransfer.GetDataSourceRequest(), + name="name_value", + ) -@pytest.mark.parametrize( - "transport_class", - [ - transports.DataTransferServiceGrpcTransport, - transports.DataTransferServiceGrpcAsyncIOTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() + +def test_get_data_source_rest_error(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) @pytest.mark.parametrize( - "transport_name", + "request_type", [ - "grpc", + datatransfer.ListDataSourcesRequest, + dict, ], ) -def test_transport_kind(transport_name): - transport = DataTransferServiceClient.get_transport_class(transport_name)( +def test_list_data_sources_rest(request_type): + client = DataTransferServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - assert transport.kind == transport_name + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = DataTransferServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.DataTransferServiceGrpcTransport, - ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datatransfer.ListDataSourcesResponse( + next_page_token="next_page_token_value", + ) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datatransfer.ListDataSourcesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) -def test_data_transfer_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.DataTransferServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_data_sources(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataSourcesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_data_sources_rest_required_fields( + request_type=datatransfer.ListDataSourcesRequest, +): + transport_class = transports.DataTransferServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, ) + ) + # verify fields with default values are dropped -def test_data_transfer_service_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.transports.DataTransferServiceTransport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.DataTransferServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_data_sources._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_data_sources._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", ) + ) + jsonified_request.update(unset_fields) - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - "get_data_source", - "list_data_sources", - "create_transfer_config", - "update_transfer_config", - "delete_transfer_config", + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datatransfer.ListDataSourcesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = datatransfer.ListDataSourcesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_data_sources(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_data_sources_rest_unset_required_fields(): + transport = transports.DataTransferServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_data_sources._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_data_sources_rest_interceptors(null_interceptor): + transport = transports.DataTransferServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataTransferServiceRestInterceptor(), + ) + client = DataTransferServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DataTransferServiceRestInterceptor, "post_list_data_sources" + ) as post, mock.patch.object( + transports.DataTransferServiceRestInterceptor, "pre_list_data_sources" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datatransfer.ListDataSourcesRequest.pb( + datatransfer.ListDataSourcesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = datatransfer.ListDataSourcesResponse.to_json( + datatransfer.ListDataSourcesResponse() + ) + + request = datatransfer.ListDataSourcesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datatransfer.ListDataSourcesResponse() + + client.list_data_sources( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_data_sources_rest_bad_request( + transport: str = "rest", request_type=datatransfer.ListDataSourcesRequest +): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_data_sources(request) + + +def test_list_data_sources_rest_flattened(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datatransfer.ListDataSourcesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datatransfer.ListDataSourcesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_data_sources(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/dataSources" + % client.transport._host, + args[1], + ) + + +def test_list_data_sources_rest_flattened_error(transport: str = "rest"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_data_sources( + datatransfer.ListDataSourcesRequest(), + parent="parent_value", + ) + + +def test_list_data_sources_rest_pager(transport: str = "rest"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + datatransfer.ListDataSourcesResponse( + data_sources=[ + datatransfer.DataSource(), + datatransfer.DataSource(), + datatransfer.DataSource(), + ], + next_page_token="abc", + ), + datatransfer.ListDataSourcesResponse( + data_sources=[], + next_page_token="def", + ), + datatransfer.ListDataSourcesResponse( + data_sources=[ + datatransfer.DataSource(), + ], + next_page_token="ghi", + ), + datatransfer.ListDataSourcesResponse( + data_sources=[ + datatransfer.DataSource(), + datatransfer.DataSource(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + datatransfer.ListDataSourcesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_data_sources(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, datatransfer.DataSource) for i in results) + + pages = list(client.list_data_sources(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + datatransfer.CreateTransferConfigRequest, + dict, + ], +) +def test_create_transfer_config_rest(request_type): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["transfer_config"] = { + "name": "name_value", + "destination_dataset_id": "destination_dataset_id_value", + "display_name": "display_name_value", + "data_source_id": "data_source_id_value", + "params": {"fields": {}}, + "schedule": "schedule_value", + "schedule_options": { + "disable_auto_scheduling": True, + "start_time": {"seconds": 751, "nanos": 543}, + "end_time": {}, + }, + "data_refresh_window_days": 2543, + "disabled": True, + "update_time": {}, + "next_run_time": {}, + "state": 2, + "user_id": 747, + "dataset_region": "dataset_region_value", + "notification_pubsub_topic": "notification_pubsub_topic_value", + "email_preferences": {"enable_failure_email": True}, + "owner_info": {"email": "email_value"}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = transfer.TransferConfig( + name="name_value", + display_name="display_name_value", + data_source_id="data_source_id_value", + schedule="schedule_value", + data_refresh_window_days=2543, + disabled=True, + state=transfer.TransferState.PENDING, + user_id=747, + dataset_region="dataset_region_value", + notification_pubsub_topic="notification_pubsub_topic_value", + destination_dataset_id="destination_dataset_id_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = transfer.TransferConfig.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_transfer_config(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, transfer.TransferConfig) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.data_source_id == "data_source_id_value" + assert response.schedule == "schedule_value" + assert response.data_refresh_window_days == 2543 + assert response.disabled is True + assert response.state == transfer.TransferState.PENDING + assert response.user_id == 747 + assert response.dataset_region == "dataset_region_value" + assert response.notification_pubsub_topic == "notification_pubsub_topic_value" + + +def test_create_transfer_config_rest_required_fields( + request_type=datatransfer.CreateTransferConfigRequest, +): + transport_class = transports.DataTransferServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_transfer_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_transfer_config._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "authorization_code", + "service_account_name", + "version_info", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = transfer.TransferConfig() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = transfer.TransferConfig.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_transfer_config(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_transfer_config_rest_unset_required_fields(): + transport = transports.DataTransferServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_transfer_config._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "authorizationCode", + "serviceAccountName", + "versionInfo", + ) + ) + & set( + ( + "parent", + "transferConfig", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_transfer_config_rest_interceptors(null_interceptor): + transport = transports.DataTransferServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataTransferServiceRestInterceptor(), + ) + client = DataTransferServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DataTransferServiceRestInterceptor, "post_create_transfer_config" + ) as post, mock.patch.object( + transports.DataTransferServiceRestInterceptor, "pre_create_transfer_config" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datatransfer.CreateTransferConfigRequest.pb( + datatransfer.CreateTransferConfigRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = transfer.TransferConfig.to_json( + transfer.TransferConfig() + ) + + request = datatransfer.CreateTransferConfigRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = transfer.TransferConfig() + + client.create_transfer_config( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_transfer_config_rest_bad_request( + transport: str = "rest", request_type=datatransfer.CreateTransferConfigRequest +): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["transfer_config"] = { + "name": "name_value", + "destination_dataset_id": "destination_dataset_id_value", + "display_name": "display_name_value", + "data_source_id": "data_source_id_value", + "params": {"fields": {}}, + "schedule": "schedule_value", + "schedule_options": { + "disable_auto_scheduling": True, + "start_time": {"seconds": 751, "nanos": 543}, + "end_time": {}, + }, + "data_refresh_window_days": 2543, + "disabled": True, + "update_time": {}, + "next_run_time": {}, + "state": 2, + "user_id": 747, + "dataset_region": "dataset_region_value", + "notification_pubsub_topic": "notification_pubsub_topic_value", + "email_preferences": {"enable_failure_email": True}, + "owner_info": {"email": "email_value"}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_transfer_config(request) + + +def test_create_transfer_config_rest_flattened(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = transfer.TransferConfig() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + transfer_config=transfer.TransferConfig(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = transfer.TransferConfig.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_transfer_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/transferConfigs" + % client.transport._host, + args[1], + ) + + +def test_create_transfer_config_rest_flattened_error(transport: str = "rest"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_transfer_config( + datatransfer.CreateTransferConfigRequest(), + parent="parent_value", + transfer_config=transfer.TransferConfig(name="name_value"), + ) + + +def test_create_transfer_config_rest_error(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datatransfer.UpdateTransferConfigRequest, + dict, + ], +) +def test_update_transfer_config_rest(request_type): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "transfer_config": { + "name": "projects/sample1/locations/sample2/transferConfigs/sample3" + } + } + request_init["transfer_config"] = { + "name": "projects/sample1/locations/sample2/transferConfigs/sample3", + "destination_dataset_id": "destination_dataset_id_value", + "display_name": "display_name_value", + "data_source_id": "data_source_id_value", + "params": {"fields": {}}, + "schedule": "schedule_value", + "schedule_options": { + "disable_auto_scheduling": True, + "start_time": {"seconds": 751, "nanos": 543}, + "end_time": {}, + }, + "data_refresh_window_days": 2543, + "disabled": True, + "update_time": {}, + "next_run_time": {}, + "state": 2, + "user_id": 747, + "dataset_region": "dataset_region_value", + "notification_pubsub_topic": "notification_pubsub_topic_value", + "email_preferences": {"enable_failure_email": True}, + "owner_info": {"email": "email_value"}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = transfer.TransferConfig( + name="name_value", + display_name="display_name_value", + data_source_id="data_source_id_value", + schedule="schedule_value", + data_refresh_window_days=2543, + disabled=True, + state=transfer.TransferState.PENDING, + user_id=747, + dataset_region="dataset_region_value", + notification_pubsub_topic="notification_pubsub_topic_value", + destination_dataset_id="destination_dataset_id_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = transfer.TransferConfig.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_transfer_config(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, transfer.TransferConfig) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.data_source_id == "data_source_id_value" + assert response.schedule == "schedule_value" + assert response.data_refresh_window_days == 2543 + assert response.disabled is True + assert response.state == transfer.TransferState.PENDING + assert response.user_id == 747 + assert response.dataset_region == "dataset_region_value" + assert response.notification_pubsub_topic == "notification_pubsub_topic_value" + + +def test_update_transfer_config_rest_required_fields( + request_type=datatransfer.UpdateTransferConfigRequest, +): + transport_class = transports.DataTransferServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_transfer_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_transfer_config._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "authorization_code", + "service_account_name", + "update_mask", + "version_info", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = transfer.TransferConfig() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = transfer.TransferConfig.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_transfer_config(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_transfer_config_rest_unset_required_fields(): + transport = transports.DataTransferServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_transfer_config._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "authorizationCode", + "serviceAccountName", + "updateMask", + "versionInfo", + ) + ) + & set( + ( + "transferConfig", + "updateMask", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_transfer_config_rest_interceptors(null_interceptor): + transport = transports.DataTransferServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataTransferServiceRestInterceptor(), + ) + client = DataTransferServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DataTransferServiceRestInterceptor, "post_update_transfer_config" + ) as post, mock.patch.object( + transports.DataTransferServiceRestInterceptor, "pre_update_transfer_config" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datatransfer.UpdateTransferConfigRequest.pb( + datatransfer.UpdateTransferConfigRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = transfer.TransferConfig.to_json( + transfer.TransferConfig() + ) + + request = datatransfer.UpdateTransferConfigRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = transfer.TransferConfig() + + client.update_transfer_config( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_transfer_config_rest_bad_request( + transport: str = "rest", request_type=datatransfer.UpdateTransferConfigRequest +): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "transfer_config": { + "name": "projects/sample1/locations/sample2/transferConfigs/sample3" + } + } + request_init["transfer_config"] = { + "name": "projects/sample1/locations/sample2/transferConfigs/sample3", + "destination_dataset_id": "destination_dataset_id_value", + "display_name": "display_name_value", + "data_source_id": "data_source_id_value", + "params": {"fields": {}}, + "schedule": "schedule_value", + "schedule_options": { + "disable_auto_scheduling": True, + "start_time": {"seconds": 751, "nanos": 543}, + "end_time": {}, + }, + "data_refresh_window_days": 2543, + "disabled": True, + "update_time": {}, + "next_run_time": {}, + "state": 2, + "user_id": 747, + "dataset_region": "dataset_region_value", + "notification_pubsub_topic": "notification_pubsub_topic_value", + "email_preferences": {"enable_failure_email": True}, + "owner_info": {"email": "email_value"}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_transfer_config(request) + + +def test_update_transfer_config_rest_flattened(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = transfer.TransferConfig() + + # get arguments that satisfy an http rule for this method + sample_request = { + "transfer_config": { + "name": "projects/sample1/locations/sample2/transferConfigs/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + transfer_config=transfer.TransferConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = transfer.TransferConfig.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_transfer_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{transfer_config.name=projects/*/locations/*/transferConfigs/*}" + % client.transport._host, + args[1], + ) + + +def test_update_transfer_config_rest_flattened_error(transport: str = "rest"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_transfer_config( + datatransfer.UpdateTransferConfigRequest(), + transfer_config=transfer.TransferConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_transfer_config_rest_error(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datatransfer.DeleteTransferConfigRequest, + dict, + ], +) +def test_delete_transfer_config_rest(request_type): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/transferConfigs/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_transfer_config(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_transfer_config_rest_required_fields( + request_type=datatransfer.DeleteTransferConfigRequest, +): + transport_class = transports.DataTransferServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_transfer_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_transfer_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_transfer_config(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_transfer_config_rest_unset_required_fields(): + transport = transports.DataTransferServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_transfer_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_transfer_config_rest_interceptors(null_interceptor): + transport = transports.DataTransferServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataTransferServiceRestInterceptor(), + ) + client = DataTransferServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DataTransferServiceRestInterceptor, "pre_delete_transfer_config" + ) as pre: + pre.assert_not_called() + pb_message = datatransfer.DeleteTransferConfigRequest.pb( + datatransfer.DeleteTransferConfigRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = datatransfer.DeleteTransferConfigRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_transfer_config( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_transfer_config_rest_bad_request( + transport: str = "rest", request_type=datatransfer.DeleteTransferConfigRequest +): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/transferConfigs/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_transfer_config(request) + + +def test_delete_transfer_config_rest_flattened(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/transferConfigs/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_transfer_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/transferConfigs/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_transfer_config_rest_flattened_error(transport: str = "rest"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_transfer_config( + datatransfer.DeleteTransferConfigRequest(), + name="name_value", + ) + + +def test_delete_transfer_config_rest_error(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datatransfer.GetTransferConfigRequest, + dict, + ], +) +def test_get_transfer_config_rest(request_type): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/transferConfigs/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = transfer.TransferConfig( + name="name_value", + display_name="display_name_value", + data_source_id="data_source_id_value", + schedule="schedule_value", + data_refresh_window_days=2543, + disabled=True, + state=transfer.TransferState.PENDING, + user_id=747, + dataset_region="dataset_region_value", + notification_pubsub_topic="notification_pubsub_topic_value", + destination_dataset_id="destination_dataset_id_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = transfer.TransferConfig.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_transfer_config(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, transfer.TransferConfig) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.data_source_id == "data_source_id_value" + assert response.schedule == "schedule_value" + assert response.data_refresh_window_days == 2543 + assert response.disabled is True + assert response.state == transfer.TransferState.PENDING + assert response.user_id == 747 + assert response.dataset_region == "dataset_region_value" + assert response.notification_pubsub_topic == "notification_pubsub_topic_value" + + +def test_get_transfer_config_rest_required_fields( + request_type=datatransfer.GetTransferConfigRequest, +): + transport_class = transports.DataTransferServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_transfer_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_transfer_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = transfer.TransferConfig() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = transfer.TransferConfig.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_transfer_config(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_transfer_config_rest_unset_required_fields(): + transport = transports.DataTransferServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_transfer_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_transfer_config_rest_interceptors(null_interceptor): + transport = transports.DataTransferServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataTransferServiceRestInterceptor(), + ) + client = DataTransferServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DataTransferServiceRestInterceptor, "post_get_transfer_config" + ) as post, mock.patch.object( + transports.DataTransferServiceRestInterceptor, "pre_get_transfer_config" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datatransfer.GetTransferConfigRequest.pb( + datatransfer.GetTransferConfigRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = transfer.TransferConfig.to_json( + transfer.TransferConfig() + ) + + request = datatransfer.GetTransferConfigRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = transfer.TransferConfig() + + client.get_transfer_config( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_transfer_config_rest_bad_request( + transport: str = "rest", request_type=datatransfer.GetTransferConfigRequest +): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/transferConfigs/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_transfer_config(request) + + +def test_get_transfer_config_rest_flattened(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = transfer.TransferConfig() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/transferConfigs/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = transfer.TransferConfig.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_transfer_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/transferConfigs/*}" + % client.transport._host, + args[1], + ) + + +def test_get_transfer_config_rest_flattened_error(transport: str = "rest"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_transfer_config( + datatransfer.GetTransferConfigRequest(), + name="name_value", + ) + + +def test_get_transfer_config_rest_error(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datatransfer.ListTransferConfigsRequest, + dict, + ], +) +def test_list_transfer_configs_rest(request_type): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datatransfer.ListTransferConfigsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datatransfer.ListTransferConfigsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_transfer_configs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTransferConfigsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_transfer_configs_rest_required_fields( + request_type=datatransfer.ListTransferConfigsRequest, +): + transport_class = transports.DataTransferServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_transfer_configs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_transfer_configs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "data_source_ids", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datatransfer.ListTransferConfigsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = datatransfer.ListTransferConfigsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_transfer_configs(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_transfer_configs_rest_unset_required_fields(): + transport = transports.DataTransferServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_transfer_configs._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "dataSourceIds", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_transfer_configs_rest_interceptors(null_interceptor): + transport = transports.DataTransferServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataTransferServiceRestInterceptor(), + ) + client = DataTransferServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DataTransferServiceRestInterceptor, "post_list_transfer_configs" + ) as post, mock.patch.object( + transports.DataTransferServiceRestInterceptor, "pre_list_transfer_configs" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datatransfer.ListTransferConfigsRequest.pb( + datatransfer.ListTransferConfigsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = datatransfer.ListTransferConfigsResponse.to_json( + datatransfer.ListTransferConfigsResponse() + ) + + request = datatransfer.ListTransferConfigsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datatransfer.ListTransferConfigsResponse() + + client.list_transfer_configs( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_transfer_configs_rest_bad_request( + transport: str = "rest", request_type=datatransfer.ListTransferConfigsRequest +): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_transfer_configs(request) + + +def test_list_transfer_configs_rest_flattened(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datatransfer.ListTransferConfigsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datatransfer.ListTransferConfigsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_transfer_configs(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/transferConfigs" + % client.transport._host, + args[1], + ) + + +def test_list_transfer_configs_rest_flattened_error(transport: str = "rest"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_transfer_configs( + datatransfer.ListTransferConfigsRequest(), + parent="parent_value", + ) + + +def test_list_transfer_configs_rest_pager(transport: str = "rest"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + datatransfer.ListTransferConfigsResponse( + transfer_configs=[ + transfer.TransferConfig(), + transfer.TransferConfig(), + transfer.TransferConfig(), + ], + next_page_token="abc", + ), + datatransfer.ListTransferConfigsResponse( + transfer_configs=[], + next_page_token="def", + ), + datatransfer.ListTransferConfigsResponse( + transfer_configs=[ + transfer.TransferConfig(), + ], + next_page_token="ghi", + ), + datatransfer.ListTransferConfigsResponse( + transfer_configs=[ + transfer.TransferConfig(), + transfer.TransferConfig(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + datatransfer.ListTransferConfigsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_transfer_configs(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, transfer.TransferConfig) for i in results) + + pages = list(client.list_transfer_configs(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + datatransfer.ScheduleTransferRunsRequest, + dict, + ], +) +def test_schedule_transfer_runs_rest(request_type): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/transferConfigs/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datatransfer.ScheduleTransferRunsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datatransfer.ScheduleTransferRunsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.schedule_transfer_runs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, datatransfer.ScheduleTransferRunsResponse) + + +def test_schedule_transfer_runs_rest_required_fields( + request_type=datatransfer.ScheduleTransferRunsRequest, +): + transport_class = transports.DataTransferServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).schedule_transfer_runs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).schedule_transfer_runs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datatransfer.ScheduleTransferRunsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = datatransfer.ScheduleTransferRunsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.schedule_transfer_runs(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_schedule_transfer_runs_rest_unset_required_fields(): + transport = transports.DataTransferServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.schedule_transfer_runs._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "startTime", + "endTime", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_schedule_transfer_runs_rest_interceptors(null_interceptor): + transport = transports.DataTransferServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataTransferServiceRestInterceptor(), + ) + client = DataTransferServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DataTransferServiceRestInterceptor, "post_schedule_transfer_runs" + ) as post, mock.patch.object( + transports.DataTransferServiceRestInterceptor, "pre_schedule_transfer_runs" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datatransfer.ScheduleTransferRunsRequest.pb( + datatransfer.ScheduleTransferRunsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = datatransfer.ScheduleTransferRunsResponse.to_json( + datatransfer.ScheduleTransferRunsResponse() + ) + + request = datatransfer.ScheduleTransferRunsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datatransfer.ScheduleTransferRunsResponse() + + client.schedule_transfer_runs( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_schedule_transfer_runs_rest_bad_request( + transport: str = "rest", request_type=datatransfer.ScheduleTransferRunsRequest +): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/transferConfigs/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.schedule_transfer_runs(request) + + +def test_schedule_transfer_runs_rest_flattened(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datatransfer.ScheduleTransferRunsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/transferConfigs/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + start_time=timestamp_pb2.Timestamp(seconds=751), + end_time=timestamp_pb2.Timestamp(seconds=751), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datatransfer.ScheduleTransferRunsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.schedule_transfer_runs(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/transferConfigs/*}:scheduleRuns" + % client.transport._host, + args[1], + ) + + +def test_schedule_transfer_runs_rest_flattened_error(transport: str = "rest"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.schedule_transfer_runs( + datatransfer.ScheduleTransferRunsRequest(), + parent="parent_value", + start_time=timestamp_pb2.Timestamp(seconds=751), + end_time=timestamp_pb2.Timestamp(seconds=751), + ) + + +def test_schedule_transfer_runs_rest_error(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datatransfer.StartManualTransferRunsRequest, + dict, + ], +) +def test_start_manual_transfer_runs_rest(request_type): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/transferConfigs/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datatransfer.StartManualTransferRunsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datatransfer.StartManualTransferRunsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.start_manual_transfer_runs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, datatransfer.StartManualTransferRunsResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_start_manual_transfer_runs_rest_interceptors(null_interceptor): + transport = transports.DataTransferServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataTransferServiceRestInterceptor(), + ) + client = DataTransferServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DataTransferServiceRestInterceptor, "post_start_manual_transfer_runs" + ) as post, mock.patch.object( + transports.DataTransferServiceRestInterceptor, "pre_start_manual_transfer_runs" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datatransfer.StartManualTransferRunsRequest.pb( + datatransfer.StartManualTransferRunsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = ( + datatransfer.StartManualTransferRunsResponse.to_json( + datatransfer.StartManualTransferRunsResponse() + ) + ) + + request = datatransfer.StartManualTransferRunsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datatransfer.StartManualTransferRunsResponse() + + client.start_manual_transfer_runs( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_start_manual_transfer_runs_rest_bad_request( + transport: str = "rest", request_type=datatransfer.StartManualTransferRunsRequest +): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/transferConfigs/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.start_manual_transfer_runs(request) + + +def test_start_manual_transfer_runs_rest_error(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datatransfer.GetTransferRunRequest, + dict, + ], +) +def test_get_transfer_run_rest(request_type): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/transferConfigs/sample3/runs/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = transfer.TransferRun( + name="name_value", + data_source_id="data_source_id_value", + state=transfer.TransferState.PENDING, + user_id=747, + schedule="schedule_value", + notification_pubsub_topic="notification_pubsub_topic_value", + destination_dataset_id="destination_dataset_id_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = transfer.TransferRun.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_transfer_run(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, transfer.TransferRun) + assert response.name == "name_value" + assert response.data_source_id == "data_source_id_value" + assert response.state == transfer.TransferState.PENDING + assert response.user_id == 747 + assert response.schedule == "schedule_value" + assert response.notification_pubsub_topic == "notification_pubsub_topic_value" + + +def test_get_transfer_run_rest_required_fields( + request_type=datatransfer.GetTransferRunRequest, +): + transport_class = transports.DataTransferServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_transfer_run._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_transfer_run._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = transfer.TransferRun() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = transfer.TransferRun.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_transfer_run(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_transfer_run_rest_unset_required_fields(): + transport = transports.DataTransferServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_transfer_run._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_transfer_run_rest_interceptors(null_interceptor): + transport = transports.DataTransferServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataTransferServiceRestInterceptor(), + ) + client = DataTransferServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DataTransferServiceRestInterceptor, "post_get_transfer_run" + ) as post, mock.patch.object( + transports.DataTransferServiceRestInterceptor, "pre_get_transfer_run" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datatransfer.GetTransferRunRequest.pb( + datatransfer.GetTransferRunRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = transfer.TransferRun.to_json(transfer.TransferRun()) + + request = datatransfer.GetTransferRunRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = transfer.TransferRun() + + client.get_transfer_run( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_transfer_run_rest_bad_request( + transport: str = "rest", request_type=datatransfer.GetTransferRunRequest +): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/transferConfigs/sample3/runs/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_transfer_run(request) + + +def test_get_transfer_run_rest_flattened(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = transfer.TransferRun() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/transferConfigs/sample3/runs/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = transfer.TransferRun.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_transfer_run(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}" + % client.transport._host, + args[1], + ) + + +def test_get_transfer_run_rest_flattened_error(transport: str = "rest"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_transfer_run( + datatransfer.GetTransferRunRequest(), + name="name_value", + ) + + +def test_get_transfer_run_rest_error(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datatransfer.DeleteTransferRunRequest, + dict, + ], +) +def test_delete_transfer_run_rest(request_type): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/transferConfigs/sample3/runs/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_transfer_run(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_transfer_run_rest_required_fields( + request_type=datatransfer.DeleteTransferRunRequest, +): + transport_class = transports.DataTransferServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_transfer_run._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_transfer_run._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_transfer_run(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_transfer_run_rest_unset_required_fields(): + transport = transports.DataTransferServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_transfer_run._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_transfer_run_rest_interceptors(null_interceptor): + transport = transports.DataTransferServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataTransferServiceRestInterceptor(), + ) + client = DataTransferServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DataTransferServiceRestInterceptor, "pre_delete_transfer_run" + ) as pre: + pre.assert_not_called() + pb_message = datatransfer.DeleteTransferRunRequest.pb( + datatransfer.DeleteTransferRunRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = datatransfer.DeleteTransferRunRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_transfer_run( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_transfer_run_rest_bad_request( + transport: str = "rest", request_type=datatransfer.DeleteTransferRunRequest +): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/transferConfigs/sample3/runs/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_transfer_run(request) + + +def test_delete_transfer_run_rest_flattened(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/transferConfigs/sample3/runs/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_transfer_run(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_transfer_run_rest_flattened_error(transport: str = "rest"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_transfer_run( + datatransfer.DeleteTransferRunRequest(), + name="name_value", + ) + + +def test_delete_transfer_run_rest_error(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datatransfer.ListTransferRunsRequest, + dict, + ], +) +def test_list_transfer_runs_rest(request_type): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/transferConfigs/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datatransfer.ListTransferRunsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datatransfer.ListTransferRunsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_transfer_runs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTransferRunsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_transfer_runs_rest_required_fields( + request_type=datatransfer.ListTransferRunsRequest, +): + transport_class = transports.DataTransferServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_transfer_runs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_transfer_runs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + "run_attempt", + "states", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datatransfer.ListTransferRunsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = datatransfer.ListTransferRunsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_transfer_runs(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_transfer_runs_rest_unset_required_fields(): + transport = transports.DataTransferServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_transfer_runs._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + "runAttempt", + "states", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_transfer_runs_rest_interceptors(null_interceptor): + transport = transports.DataTransferServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataTransferServiceRestInterceptor(), + ) + client = DataTransferServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DataTransferServiceRestInterceptor, "post_list_transfer_runs" + ) as post, mock.patch.object( + transports.DataTransferServiceRestInterceptor, "pre_list_transfer_runs" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datatransfer.ListTransferRunsRequest.pb( + datatransfer.ListTransferRunsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = datatransfer.ListTransferRunsResponse.to_json( + datatransfer.ListTransferRunsResponse() + ) + + request = datatransfer.ListTransferRunsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datatransfer.ListTransferRunsResponse() + + client.list_transfer_runs( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_transfer_runs_rest_bad_request( + transport: str = "rest", request_type=datatransfer.ListTransferRunsRequest +): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/transferConfigs/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_transfer_runs(request) + + +def test_list_transfer_runs_rest_flattened(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datatransfer.ListTransferRunsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/transferConfigs/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datatransfer.ListTransferRunsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_transfer_runs(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/transferConfigs/*}/runs" + % client.transport._host, + args[1], + ) + + +def test_list_transfer_runs_rest_flattened_error(transport: str = "rest"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_transfer_runs( + datatransfer.ListTransferRunsRequest(), + parent="parent_value", + ) + + +def test_list_transfer_runs_rest_pager(transport: str = "rest"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + datatransfer.ListTransferRunsResponse( + transfer_runs=[ + transfer.TransferRun(), + transfer.TransferRun(), + transfer.TransferRun(), + ], + next_page_token="abc", + ), + datatransfer.ListTransferRunsResponse( + transfer_runs=[], + next_page_token="def", + ), + datatransfer.ListTransferRunsResponse( + transfer_runs=[ + transfer.TransferRun(), + ], + next_page_token="ghi", + ), + datatransfer.ListTransferRunsResponse( + transfer_runs=[ + transfer.TransferRun(), + transfer.TransferRun(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + datatransfer.ListTransferRunsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/transferConfigs/sample3" + } + + pager = client.list_transfer_runs(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, transfer.TransferRun) for i in results) + + pages = list(client.list_transfer_runs(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + datatransfer.ListTransferLogsRequest, + dict, + ], +) +def test_list_transfer_logs_rest(request_type): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/transferConfigs/sample3/runs/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datatransfer.ListTransferLogsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datatransfer.ListTransferLogsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_transfer_logs(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTransferLogsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_transfer_logs_rest_required_fields( + request_type=datatransfer.ListTransferLogsRequest, +): + transport_class = transports.DataTransferServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_transfer_logs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_transfer_logs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "message_types", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datatransfer.ListTransferLogsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = datatransfer.ListTransferLogsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_transfer_logs(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_transfer_logs_rest_unset_required_fields(): + transport = transports.DataTransferServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_transfer_logs._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "messageTypes", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_transfer_logs_rest_interceptors(null_interceptor): + transport = transports.DataTransferServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataTransferServiceRestInterceptor(), + ) + client = DataTransferServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DataTransferServiceRestInterceptor, "post_list_transfer_logs" + ) as post, mock.patch.object( + transports.DataTransferServiceRestInterceptor, "pre_list_transfer_logs" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datatransfer.ListTransferLogsRequest.pb( + datatransfer.ListTransferLogsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = datatransfer.ListTransferLogsResponse.to_json( + datatransfer.ListTransferLogsResponse() + ) + + request = datatransfer.ListTransferLogsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datatransfer.ListTransferLogsResponse() + + client.list_transfer_logs( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_transfer_logs_rest_bad_request( + transport: str = "rest", request_type=datatransfer.ListTransferLogsRequest +): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/transferConfigs/sample3/runs/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_transfer_logs(request) + + +def test_list_transfer_logs_rest_flattened(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datatransfer.ListTransferLogsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/transferConfigs/sample3/runs/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datatransfer.ListTransferLogsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_transfer_logs(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/transferConfigs/*/runs/*}/transferLogs" + % client.transport._host, + args[1], + ) + + +def test_list_transfer_logs_rest_flattened_error(transport: str = "rest"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_transfer_logs( + datatransfer.ListTransferLogsRequest(), + parent="parent_value", + ) + + +def test_list_transfer_logs_rest_pager(transport: str = "rest"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + datatransfer.ListTransferLogsResponse( + transfer_messages=[ + transfer.TransferMessage(), + transfer.TransferMessage(), + transfer.TransferMessage(), + ], + next_page_token="abc", + ), + datatransfer.ListTransferLogsResponse( + transfer_messages=[], + next_page_token="def", + ), + datatransfer.ListTransferLogsResponse( + transfer_messages=[ + transfer.TransferMessage(), + ], + next_page_token="ghi", + ), + datatransfer.ListTransferLogsResponse( + transfer_messages=[ + transfer.TransferMessage(), + transfer.TransferMessage(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + datatransfer.ListTransferLogsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/transferConfigs/sample3/runs/sample4" + } + + pager = client.list_transfer_logs(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, transfer.TransferMessage) for i in results) + + pages = list(client.list_transfer_logs(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + datatransfer.CheckValidCredsRequest, + dict, + ], +) +def test_check_valid_creds_rest(request_type): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/dataSources/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datatransfer.CheckValidCredsResponse( + has_valid_creds=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datatransfer.CheckValidCredsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.check_valid_creds(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, datatransfer.CheckValidCredsResponse) + assert response.has_valid_creds is True + + +def test_check_valid_creds_rest_required_fields( + request_type=datatransfer.CheckValidCredsRequest, +): + transport_class = transports.DataTransferServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).check_valid_creds._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).check_valid_creds._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = datatransfer.CheckValidCredsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = datatransfer.CheckValidCredsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.check_valid_creds(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_check_valid_creds_rest_unset_required_fields(): + transport = transports.DataTransferServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.check_valid_creds._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_check_valid_creds_rest_interceptors(null_interceptor): + transport = transports.DataTransferServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataTransferServiceRestInterceptor(), + ) + client = DataTransferServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DataTransferServiceRestInterceptor, "post_check_valid_creds" + ) as post, mock.patch.object( + transports.DataTransferServiceRestInterceptor, "pre_check_valid_creds" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = datatransfer.CheckValidCredsRequest.pb( + datatransfer.CheckValidCredsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = datatransfer.CheckValidCredsResponse.to_json( + datatransfer.CheckValidCredsResponse() + ) + + request = datatransfer.CheckValidCredsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = datatransfer.CheckValidCredsResponse() + + client.check_valid_creds( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_check_valid_creds_rest_bad_request( + transport: str = "rest", request_type=datatransfer.CheckValidCredsRequest +): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/dataSources/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.check_valid_creds(request) + + +def test_check_valid_creds_rest_flattened(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = datatransfer.CheckValidCredsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/dataSources/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = datatransfer.CheckValidCredsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.check_valid_creds(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/dataSources/*}:checkValidCreds" + % client.transport._host, + args[1], + ) + + +def test_check_valid_creds_rest_flattened_error(transport: str = "rest"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.check_valid_creds( + datatransfer.CheckValidCredsRequest(), + name="name_value", + ) + + +def test_check_valid_creds_rest_error(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + datatransfer.EnrollDataSourcesRequest, + dict, + ], +) +def test_enroll_data_sources_rest(request_type): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.enroll_data_sources(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_enroll_data_sources_rest_interceptors(null_interceptor): + transport = transports.DataTransferServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataTransferServiceRestInterceptor(), + ) + client = DataTransferServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.DataTransferServiceRestInterceptor, "pre_enroll_data_sources" + ) as pre: + pre.assert_not_called() + pb_message = datatransfer.EnrollDataSourcesRequest.pb( + datatransfer.EnrollDataSourcesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = datatransfer.EnrollDataSourcesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.enroll_data_sources( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_enroll_data_sources_rest_bad_request( + transport: str = "rest", request_type=datatransfer.EnrollDataSourcesRequest +): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.enroll_data_sources(request) + + +def test_enroll_data_sources_rest_error(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DataTransferServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DataTransferServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataTransferServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.DataTransferServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DataTransferServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DataTransferServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DataTransferServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataTransferServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DataTransferServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = DataTransferServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DataTransferServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DataTransferServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataTransferServiceGrpcTransport, + transports.DataTransferServiceGrpcAsyncIOTransport, + transports.DataTransferServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = DataTransferServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.DataTransferServiceGrpcTransport, + ) + + +def test_data_transfer_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.DataTransferServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_data_transfer_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.bigquery_datatransfer_v1.services.data_transfer_service.transports.DataTransferServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.DataTransferServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "get_data_source", + "list_data_sources", + "create_transfer_config", + "update_transfer_config", + "delete_transfer_config", "get_transfer_config", "list_transfer_configs", "schedule_transfer_runs", @@ -5427,6 +9782,7 @@ def test_data_transfer_service_transport_auth_adc(transport_class): [ transports.DataTransferServiceGrpcTransport, transports.DataTransferServiceGrpcAsyncIOTransport, + transports.DataTransferServiceRestTransport, ], ) def test_data_transfer_service_transport_auth_gdch_credentials(transport_class): @@ -5526,11 +9882,23 @@ def test_data_transfer_service_grpc_transport_client_cert_source_for_mtls( ) +def test_data_transfer_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.DataTransferServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + @pytest.mark.parametrize( "transport_name", [ "grpc", "grpc_asyncio", + "rest", ], ) def test_data_transfer_service_host_no_port(transport_name): @@ -5541,7 +9909,11 @@ def test_data_transfer_service_host_no_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("bigquerydatatransfer.googleapis.com:443") + assert client.transport._host == ( + "bigquerydatatransfer.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://bigquerydatatransfer.googleapis.com" + ) @pytest.mark.parametrize( @@ -5549,6 +9921,7 @@ def test_data_transfer_service_host_no_port(transport_name): [ "grpc", "grpc_asyncio", + "rest", ], ) def test_data_transfer_service_host_with_port(transport_name): @@ -5559,7 +9932,75 @@ def test_data_transfer_service_host_with_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("bigquerydatatransfer.googleapis.com:8000") + assert client.transport._host == ( + "bigquerydatatransfer.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://bigquerydatatransfer.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_data_transfer_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = DataTransferServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = DataTransferServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get_data_source._session + session2 = client2.transport.get_data_source._session + assert session1 != session2 + session1 = client1.transport.list_data_sources._session + session2 = client2.transport.list_data_sources._session + assert session1 != session2 + session1 = client1.transport.create_transfer_config._session + session2 = client2.transport.create_transfer_config._session + assert session1 != session2 + session1 = client1.transport.update_transfer_config._session + session2 = client2.transport.update_transfer_config._session + assert session1 != session2 + session1 = client1.transport.delete_transfer_config._session + session2 = client2.transport.delete_transfer_config._session + assert session1 != session2 + session1 = client1.transport.get_transfer_config._session + session2 = client2.transport.get_transfer_config._session + assert session1 != session2 + session1 = client1.transport.list_transfer_configs._session + session2 = client2.transport.list_transfer_configs._session + assert session1 != session2 + session1 = client1.transport.schedule_transfer_runs._session + session2 = client2.transport.schedule_transfer_runs._session + assert session1 != session2 + session1 = client1.transport.start_manual_transfer_runs._session + session2 = client2.transport.start_manual_transfer_runs._session + assert session1 != session2 + session1 = client1.transport.get_transfer_run._session + session2 = client2.transport.get_transfer_run._session + assert session1 != session2 + session1 = client1.transport.delete_transfer_run._session + session2 = client2.transport.delete_transfer_run._session + assert session1 != session2 + session1 = client1.transport.list_transfer_runs._session + session2 = client2.transport.list_transfer_runs._session + assert session1 != session2 + session1 = client1.transport.list_transfer_logs._session + session2 = client2.transport.list_transfer_logs._session + assert session1 != session2 + session1 = client1.transport.check_valid_creds._session + session2 = client2.transport.check_valid_creds._session + assert session1 != session2 + session1 = client1.transport.enroll_data_sources._session + session2 = client2.transport.enroll_data_sources._session + assert session1 != session2 def test_data_transfer_service_grpc_transport_channel(): @@ -5900,6 +10341,120 @@ async def test_transport_close_async(): close.assert_called_once() +def test_get_location_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.GetLocationRequest +): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_location(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request( + transport: str = "rest", request_type=locations_pb2.ListLocationsRequest +): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_locations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + def test_list_locations(transport: str = "grpc"): client = DataTransferServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6192,6 +10747,7 @@ async def test_get_location_from_dict_async(): def test_transport_close(): transports = { + "rest": "_session", "grpc": "_grpc_channel", } @@ -6209,6 +10765,7 @@ def test_transport_close(): def test_client_ctx(): transports = [ + "rest", "grpc", ] for transport in transports: