From 0cd439b262e423a1883952918dd7b13c77aa27a0 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Fri, 16 Dec 2022 03:28:06 +0100 Subject: [PATCH 1/4] chore(deps): update dependency google-cloud-bigquery-datatransfer to v3.8.0 (#414) --- samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index b74d4185..d35ef094 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-bigquery-datatransfer==3.7.3 +google-cloud-bigquery-datatransfer==3.8.0 From b968f532430b0d88055ee171f82e27517f24e8bd Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 3 Jan 2023 20:27:08 +0100 Subject: [PATCH 2/4] chore(deps): update dependency mock to v5 (#415) --- samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/snippets/requirements-test.txt b/samples/snippets/requirements-test.txt index 90a137d2..40ee72f1 100644 --- a/samples/snippets/requirements-test.txt +++ b/samples/snippets/requirements-test.txt @@ -1,4 +1,4 @@ google-cloud-bigquery==3.4.1 google-cloud-pubsub==2.13.11 pytest==7.2.0 -mock==4.0.3 +mock==5.0.0 From 6538b29601e91d16e6a2095a256b4b18920d73b9 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 4 Jan 2023 10:41:16 -0500 Subject: [PATCH 3/4] feat: Add location methods (#416) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: Add location methods PiperOrigin-RevId: 499272288 Source-Link: https://github.com/googleapis/googleapis/commit/62c5867e6adcd7910203ae6adc10f2b1533c29e6 Source-Link: https://github.com/googleapis/googleapis-gen/commit/d1a5f9d4646e4cf78fdead068c3e8c064347ce45 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZDFhNWY5ZDQ2NDZlNGNmNzhmZGVhZDA2OGMzZThjMDY0MzQ3Y2U0NSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../data_transfer_service/async_client.py | 109 +++++++ .../services/data_transfer_service/client.py | 109 +++++++ .../data_transfer_service/transports/base.py | 22 ++ .../data_transfer_service/transports/grpc.py | 37 +++ .../transports/grpc_asyncio.py | 37 +++ ...google.cloud.bigquery.datatransfer.v1.json | 2 +- .../test_data_transfer_service.py | 293 ++++++++++++++++++ 7 files changed, 608 insertions(+), 1 deletion(-) diff --git a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/async_client.py b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/async_client.py index 92eb4a55..309e9dc2 100644 --- a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/async_client.py +++ b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/async_client.py @@ -43,6 +43,7 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object] # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.protobuf import duration_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import struct_pb2 # type: ignore @@ -1969,6 +1970,114 @@ async def sample_enroll_data_sources(): metadata=metadata, ) + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def __aenter__(self): return self diff --git a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py index 232d3bc4..8ec16a16 100644 --- a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py +++ b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/client.py @@ -47,6 +47,7 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object] # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.protobuf import duration_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import struct_pb2 # type: ignore @@ -2143,6 +2144,114 @@ def __exit__(self, type, value, traceback): """ self.transport.close() + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=package_version.__version__ diff --git a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/base.py b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/base.py index ac205aeb..4f4b04fc 100644 --- a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/base.py +++ b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/base.py @@ -22,6 +22,7 @@ from google.api_core import retry as retries import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.oauth2 import service_account # type: ignore from google.protobuf import empty_pb2 # type: ignore @@ -465,6 +466,27 @@ def enroll_data_sources( ]: raise NotImplementedError() + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc.py b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc.py index 94c62596..fb57faf6 100644 --- a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc.py +++ b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc.py @@ -20,6 +20,7 @@ import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore import grpc # type: ignore @@ -663,6 +664,42 @@ def enroll_data_sources( def close(self): self.grpc_channel.close() + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + @property def kind(self) -> str: return "grpc" diff --git a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc_asyncio.py b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc_asyncio.py index 83b8943e..91f3492c 100644 --- a/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc_asyncio.py +++ b/google/cloud/bigquery_datatransfer_v1/services/data_transfer_service/transports/grpc_asyncio.py @@ -19,6 +19,7 @@ from google.api_core import gapic_v1, grpc_helpers_async from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore import grpc # type: ignore from grpc.experimental import aio # type: ignore @@ -682,5 +683,41 @@ def enroll_data_sources( def close(self): return self.grpc_channel.close() + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + __all__ = ("DataTransferServiceGrpcAsyncIOTransport",) diff --git a/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json b/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json index b98866b4..a381c552 100644 --- a/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json +++ b/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-datatransfer", - "version": "3.8.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py b/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py index b4192079..483c9a7f 100644 --- a/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py +++ b/tests/unit/gapic/bigquery_datatransfer_v1/test_data_transfer_service.py @@ -30,6 +30,7 @@ import google.auth from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 from google.oauth2 import service_account from google.protobuf import duration_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore @@ -5338,6 +5339,8 @@ def test_data_transfer_service_base_transport(): "list_transfer_logs", "check_valid_creds", "enroll_data_sources", + "get_location", + "list_locations", ) for method in methods: with pytest.raises(NotImplementedError): @@ -5897,6 +5900,296 @@ async def test_transport_close_async(): close.assert_called_once() +def test_list_locations(transport: str = "grpc"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc"): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_list_locations_field_headers(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_locations_from_dict(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_get_location_field_headers(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials() + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials() + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +def test_get_location_from_dict(): + client = DataTransferServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = DataTransferServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_transport_close(): transports = { "grpc": "_grpc_channel", From 308ac794daca7bc460334d20824f35638a0303ae Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 4 Jan 2023 10:52:07 -0500 Subject: [PATCH 4/4] chore(main): release 3.9.0 (#417) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .release-please-manifest.json | 2 +- CHANGELOG.md | 7 +++++++ google/cloud/bigquery_datatransfer/gapic_version.py | 2 +- google/cloud/bigquery_datatransfer_v1/gapic_version.py | 2 +- ...pet_metadata_google.cloud.bigquery.datatransfer.v1.json | 2 +- 5 files changed, 11 insertions(+), 4 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 573f0901..f1c8ae50 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "3.8.0" + ".": "3.9.0" } diff --git a/CHANGELOG.md b/CHANGELOG.md index 4f2842af..6ed3fac7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-bigquery-datatransfer/#history +## [3.9.0](https://github.com/googleapis/python-bigquery-datatransfer/compare/v3.8.0...v3.9.0) (2023-01-04) + + +### Features + +* Add location methods ([#416](https://github.com/googleapis/python-bigquery-datatransfer/issues/416)) ([6538b29](https://github.com/googleapis/python-bigquery-datatransfer/commit/6538b29601e91d16e6a2095a256b4b18920d73b9)) + ## [3.8.0](https://github.com/googleapis/python-bigquery-datatransfer/compare/v3.7.3...v3.8.0) (2022-12-15) diff --git a/google/cloud/bigquery_datatransfer/gapic_version.py b/google/cloud/bigquery_datatransfer/gapic_version.py index 4052fbb0..90b3aae3 100644 --- a/google/cloud/bigquery_datatransfer/gapic_version.py +++ b/google/cloud/bigquery_datatransfer/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.8.0" # {x-release-please-version} +__version__ = "3.9.0" # {x-release-please-version} diff --git a/google/cloud/bigquery_datatransfer_v1/gapic_version.py b/google/cloud/bigquery_datatransfer_v1/gapic_version.py index 4052fbb0..90b3aae3 100644 --- a/google/cloud/bigquery_datatransfer_v1/gapic_version.py +++ b/google/cloud/bigquery_datatransfer_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.8.0" # {x-release-please-version} +__version__ = "3.9.0" # {x-release-please-version} diff --git a/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json b/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json index a381c552..08e4e06e 100644 --- a/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json +++ b/samples/generated_samples/snippet_metadata_google.cloud.bigquery.datatransfer.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-bigquery-datatransfer", - "version": "0.1.0" + "version": "3.9.0" }, "snippets": [ {