diff --git a/CHANGELOG.md b/CHANGELOG.md index 920d7b6..50e0430 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [1.2.0](https://github.com/googleapis/python-datastream/compare/v1.1.1...v1.2.0) (2022-08-24) + + +### Features + +* added support for BigQuery destination and PostgreSQL source types ([#124](https://github.com/googleapis/python-datastream/issues/124)) ([6eb26d1](https://github.com/googleapis/python-datastream/commit/6eb26d19f6c6098152885c46ea3cce29b199dae6)) + ## [1.1.1](https://github.com/googleapis/python-datastream/compare/v1.1.0...v1.1.1) (2022-08-11) diff --git a/google/cloud/datastream/__init__.py b/google/cloud/datastream/__init__.py index 2127eed..c5cba0f 100644 --- a/google/cloud/datastream/__init__.py +++ b/google/cloud/datastream/__init__.py @@ -58,6 +58,8 @@ from google.cloud.datastream_v1.types.datastream_resources import ( AvroFileFormat, BackfillJob, + BigQueryDestinationConfig, + BigQueryProfile, ConnectionProfile, DestinationConfig, Error, @@ -78,6 +80,12 @@ OracleSchema, OracleSourceConfig, OracleTable, + PostgresqlColumn, + PostgresqlProfile, + PostgresqlRdbms, + PostgresqlSchema, + PostgresqlSourceConfig, + PostgresqlTable, PrivateConnection, PrivateConnectivity, Route, @@ -132,6 +140,8 @@ "UpdateStreamRequest", "AvroFileFormat", "BackfillJob", + "BigQueryDestinationConfig", + "BigQueryProfile", "ConnectionProfile", "DestinationConfig", "Error", @@ -152,6 +162,12 @@ "OracleSchema", "OracleSourceConfig", "OracleTable", + "PostgresqlColumn", + "PostgresqlProfile", + "PostgresqlRdbms", + "PostgresqlSchema", + "PostgresqlSourceConfig", + "PostgresqlTable", "PrivateConnection", "PrivateConnectivity", "Route", diff --git a/google/cloud/datastream_v1/__init__.py b/google/cloud/datastream_v1/__init__.py index 4f92d57..e7a015d 100644 --- a/google/cloud/datastream_v1/__init__.py +++ b/google/cloud/datastream_v1/__init__.py @@ -55,6 +55,8 @@ from .types.datastream_resources import ( AvroFileFormat, BackfillJob, + BigQueryDestinationConfig, + BigQueryProfile, ConnectionProfile, DestinationConfig, Error, @@ -75,6 +77,12 @@ OracleSchema, OracleSourceConfig, OracleTable, + PostgresqlColumn, + PostgresqlProfile, + PostgresqlRdbms, + PostgresqlSchema, + PostgresqlSourceConfig, + PostgresqlTable, PrivateConnection, PrivateConnectivity, Route, @@ -93,6 +101,8 @@ "DatastreamAsyncClient", "AvroFileFormat", "BackfillJob", + "BigQueryDestinationConfig", + "BigQueryProfile", "ConnectionProfile", "CreateConnectionProfileRequest", "CreatePrivateConnectionRequest", @@ -143,6 +153,12 @@ "OracleSchema", "OracleSourceConfig", "OracleTable", + "PostgresqlColumn", + "PostgresqlProfile", + "PostgresqlRdbms", + "PostgresqlSchema", + "PostgresqlSourceConfig", + "PostgresqlTable", "PrivateConnection", "PrivateConnectivity", "Route", diff --git a/google/cloud/datastream_v1/services/datastream/async_client.py b/google/cloud/datastream_v1/services/datastream/async_client.py index 2872ed8..32f0019 100644 --- a/google/cloud/datastream_v1/services/datastream/async_client.py +++ b/google/cloud/datastream_v1/services/datastream/async_client.py @@ -33,6 +33,10 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -2985,6 +2989,331 @@ async def sample_delete_route(): # Done; return the response. return response + async def list_operations( + self, + request: operations_pb2.ListOperationsRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: operations_pb2.GetOperationRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: operations_pb2.DeleteOperationRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_operation( + self, + request: operations_pb2.CancelOperationRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_location( + self, + request: locations_pb2.GetLocationRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: locations_pb2.ListLocationsRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._client._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def __aenter__(self): return self diff --git a/google/cloud/datastream_v1/services/datastream/client.py b/google/cloud/datastream_v1/services/datastream/client.py index 4f34172..5e9913e 100644 --- a/google/cloud/datastream_v1/services/datastream/client.py +++ b/google/cloud/datastream_v1/services/datastream/client.py @@ -36,6 +36,10 @@ from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 from google.protobuf import empty_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -3338,6 +3342,331 @@ def __exit__(self, type, value, traceback): """ self.transport.close() + def list_operations( + self, + request: operations_pb2.ListOperationsRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_operations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + request: operations_pb2.GetOperationRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_operation( + self, + request: operations_pb2.DeleteOperationRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.delete_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_operation( + self, + request: operations_pb2.CancelOperationRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.cancel_operation, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_location( + self, + request: locations_pb2.GetLocationRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_location, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_locations( + self, + request: locations_pb2.ListLocationsRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.list_locations, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/datastream_v1/services/datastream/transports/base.py b/google/cloud/datastream_v1/services/datastream/transports/base.py index 3240729..14f5ed6 100644 --- a/google/cloud/datastream_v1/services/datastream/transports/base.py +++ b/google/cloud/datastream_v1/services/datastream/transports/base.py @@ -22,6 +22,9 @@ from google.api_core import retry as retries import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore from google.oauth2 import service_account # type: ignore import pkg_resources @@ -529,6 +532,60 @@ def delete_route( ]: raise NotImplementedError() + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/google/cloud/datastream_v1/services/datastream/transports/grpc.py b/google/cloud/datastream_v1/services/datastream/transports/grpc.py index 2a93326..d9075ee 100644 --- a/google/cloud/datastream_v1/services/datastream/transports/grpc.py +++ b/google/cloud/datastream_v1/services/datastream/transports/grpc.py @@ -20,6 +20,9 @@ import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore import grpc # type: ignore @@ -953,6 +956,112 @@ def delete_route( def close(self): self.grpc_channel.close() + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + @property def kind(self) -> str: return "grpc" diff --git a/google/cloud/datastream_v1/services/datastream/transports/grpc_asyncio.py b/google/cloud/datastream_v1/services/datastream/transports/grpc_asyncio.py index e082a82..6298b87 100644 --- a/google/cloud/datastream_v1/services/datastream/transports/grpc_asyncio.py +++ b/google/cloud/datastream_v1/services/datastream/transports/grpc_asyncio.py @@ -19,6 +19,9 @@ from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 # type: ignore import grpc # type: ignore from grpc.experimental import aio # type: ignore @@ -977,5 +980,111 @@ def delete_route( def close(self): return self.grpc_channel.close() + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self.grpc_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self.grpc_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + __all__ = ("DatastreamGrpcAsyncIOTransport",) diff --git a/google/cloud/datastream_v1/types/__init__.py b/google/cloud/datastream_v1/types/__init__.py index ea4dd7f..b559bff 100644 --- a/google/cloud/datastream_v1/types/__init__.py +++ b/google/cloud/datastream_v1/types/__init__.py @@ -53,6 +53,8 @@ from .datastream_resources import ( AvroFileFormat, BackfillJob, + BigQueryDestinationConfig, + BigQueryProfile, ConnectionProfile, DestinationConfig, Error, @@ -73,6 +75,12 @@ OracleSchema, OracleSourceConfig, OracleTable, + PostgresqlColumn, + PostgresqlProfile, + PostgresqlRdbms, + PostgresqlSchema, + PostgresqlSourceConfig, + PostgresqlTable, PrivateConnection, PrivateConnectivity, Route, @@ -125,6 +133,8 @@ "UpdateStreamRequest", "AvroFileFormat", "BackfillJob", + "BigQueryDestinationConfig", + "BigQueryProfile", "ConnectionProfile", "DestinationConfig", "Error", @@ -145,6 +155,12 @@ "OracleSchema", "OracleSourceConfig", "OracleTable", + "PostgresqlColumn", + "PostgresqlProfile", + "PostgresqlRdbms", + "PostgresqlSchema", + "PostgresqlSourceConfig", + "PostgresqlTable", "PrivateConnection", "PrivateConnectivity", "Route", diff --git a/google/cloud/datastream_v1/types/datastream.py b/google/cloud/datastream_v1/types/datastream.py index d1136aa..3889a78 100644 --- a/google/cloud/datastream_v1/types/datastream.py +++ b/google/cloud/datastream_v1/types/datastream.py @@ -104,6 +104,11 @@ class DiscoverConnectionProfileRequest(proto.Message): MySQL RDBMS to enrich with child data objects and metadata. + This field is a member of `oneof`_ ``data_object``. + postgresql_rdbms (google.cloud.datastream_v1.types.PostgresqlRdbms): + PostgreSQL RDBMS to enrich with child data + objects and metadata. + This field is a member of `oneof`_ ``data_object``. """ @@ -144,6 +149,12 @@ class DiscoverConnectionProfileRequest(proto.Message): oneof="data_object", message=datastream_resources.MysqlRdbms, ) + postgresql_rdbms = proto.Field( + proto.MESSAGE, + number=102, + oneof="data_object", + message=datastream_resources.PostgresqlRdbms, + ) class DiscoverConnectionProfileResponse(proto.Message): @@ -164,6 +175,10 @@ class DiscoverConnectionProfileResponse(proto.Message): mysql_rdbms (google.cloud.datastream_v1.types.MysqlRdbms): Enriched MySQL RDBMS object. + This field is a member of `oneof`_ ``data_object``. + postgresql_rdbms (google.cloud.datastream_v1.types.PostgresqlRdbms): + Enriched PostgreSQL RDBMS object. + This field is a member of `oneof`_ ``data_object``. """ @@ -179,6 +194,12 @@ class DiscoverConnectionProfileResponse(proto.Message): oneof="data_object", message=datastream_resources.MysqlRdbms, ) + postgresql_rdbms = proto.Field( + proto.MESSAGE, + number=102, + oneof="data_object", + message=datastream_resources.PostgresqlRdbms, + ) class FetchStaticIpsRequest(proto.Message): diff --git a/google/cloud/datastream_v1/types/datastream_resources.py b/google/cloud/datastream_v1/types/datastream_resources.py index 55e3eef..1b53e53 100644 --- a/google/cloud/datastream_v1/types/datastream_resources.py +++ b/google/cloud/datastream_v1/types/datastream_resources.py @@ -22,7 +22,9 @@ manifest={ "OracleProfile", "MysqlProfile", + "PostgresqlProfile", "GcsProfile", + "BigQueryProfile", "StaticServiceIpConnectivity", "ForwardSshTunnelConnectivity", "VpcPeeringConfig", @@ -36,6 +38,11 @@ "OracleSchema", "OracleRdbms", "OracleSourceConfig", + "PostgresqlColumn", + "PostgresqlTable", + "PostgresqlSchema", + "PostgresqlRdbms", + "PostgresqlSourceConfig", "MysqlColumn", "MysqlTable", "MysqlDatabase", @@ -45,6 +52,7 @@ "AvroFileFormat", "JsonFileFormat", "GcsDestinationConfig", + "BigQueryDestinationConfig", "DestinationConfig", "Stream", "StreamObject", @@ -145,6 +153,49 @@ class MysqlProfile(proto.Message): ) +class PostgresqlProfile(proto.Message): + r"""PostgreSQL database profile. + + Attributes: + hostname (str): + Required. Hostname for the PostgreSQL + connection. + port (int): + Port for the PostgreSQL connection, default + value is 5432. + username (str): + Required. Username for the PostgreSQL + connection. + password (str): + Required. Password for the PostgreSQL + connection. + database (str): + Required. Database for the PostgreSQL + connection. + """ + + hostname = proto.Field( + proto.STRING, + number=1, + ) + port = proto.Field( + proto.INT32, + number=2, + ) + username = proto.Field( + proto.STRING, + number=3, + ) + password = proto.Field( + proto.STRING, + number=4, + ) + database = proto.Field( + proto.STRING, + number=5, + ) + + class GcsProfile(proto.Message): r"""Cloud Storage bucket profile. @@ -166,6 +217,10 @@ class GcsProfile(proto.Message): ) +class BigQueryProfile(proto.Message): + r"""BigQuery warehouse profile.""" + + class StaticServiceIpConnectivity(proto.Message): r"""Static IP address connectivity.""" @@ -477,6 +532,14 @@ class ConnectionProfile(proto.Message): mysql_profile (google.cloud.datastream_v1.types.MysqlProfile): MySQL ConnectionProfile configuration. + This field is a member of `oneof`_ ``profile``. + bigquery_profile (google.cloud.datastream_v1.types.BigQueryProfile): + BigQuery Connection Profile configuration. + + This field is a member of `oneof`_ ``profile``. + postgresql_profile (google.cloud.datastream_v1.types.PostgresqlProfile): + PostgreSQL Connection Profile configuration. + This field is a member of `oneof`_ ``profile``. static_service_ip_connectivity (google.cloud.datastream_v1.types.StaticServiceIpConnectivity): Static Service IP connectivity. @@ -533,6 +596,18 @@ class ConnectionProfile(proto.Message): oneof="profile", message="MysqlProfile", ) + bigquery_profile = proto.Field( + proto.MESSAGE, + number=103, + oneof="profile", + message="BigQueryProfile", + ) + postgresql_profile = proto.Field( + proto.MESSAGE, + number=104, + oneof="profile", + message="PostgresqlProfile", + ) static_service_ip_connectivity = proto.Field( proto.MESSAGE, number=200, @@ -626,8 +701,8 @@ class OracleTable(proto.Message): Table name. oracle_columns (Sequence[google.cloud.datastream_v1.types.OracleColumn]): Oracle columns in the schema. - When unspecified as part of inclue/exclude - lists, includes/excludes everything. + When unspecified as part of include/exclude + objects, includes/excludes everything. """ table = proto.Field( @@ -681,13 +756,39 @@ class OracleRdbms(proto.Message): class OracleSourceConfig(proto.Message): r"""Oracle data source configuration + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: include_objects (google.cloud.datastream_v1.types.OracleRdbms): Oracle objects to include in the stream. exclude_objects (google.cloud.datastream_v1.types.OracleRdbms): Oracle objects to exclude from the stream. + max_concurrent_cdc_tasks (int): + Maximum number of concurrent CDC tasks. The + number should be non negative. If not set (or + set to 0), the system's default value will be + used. + drop_large_objects (google.cloud.datastream_v1.types.OracleSourceConfig.DropLargeObjects): + Drop large object values. + + This field is a member of `oneof`_ ``large_objects_handling``. + stream_large_objects (google.cloud.datastream_v1.types.OracleSourceConfig.StreamLargeObjects): + Stream large object values. + + This field is a member of `oneof`_ ``large_objects_handling``. """ + class DropLargeObjects(proto.Message): + r"""Configuration to drop large object values.""" + + class StreamLargeObjects(proto.Message): + r"""Configuration to stream large object values.""" + include_objects = proto.Field( proto.MESSAGE, number=1, @@ -698,6 +799,178 @@ class OracleSourceConfig(proto.Message): number=2, message="OracleRdbms", ) + max_concurrent_cdc_tasks = proto.Field( + proto.INT32, + number=3, + ) + drop_large_objects = proto.Field( + proto.MESSAGE, + number=100, + oneof="large_objects_handling", + message=DropLargeObjects, + ) + stream_large_objects = proto.Field( + proto.MESSAGE, + number=102, + oneof="large_objects_handling", + message=StreamLargeObjects, + ) + + +class PostgresqlColumn(proto.Message): + r"""PostgreSQL Column. + + Attributes: + column (str): + Column name. + data_type (str): + The PostgreSQL data type. + length (int): + Column length. + precision (int): + Column precision. + scale (int): + Column scale. + primary_key (bool): + Whether or not the column represents a + primary key. + nullable (bool): + Whether or not the column can accept a null + value. + ordinal_position (int): + The ordinal position of the column in the + table. + """ + + column = proto.Field( + proto.STRING, + number=1, + ) + data_type = proto.Field( + proto.STRING, + number=2, + ) + length = proto.Field( + proto.INT32, + number=3, + ) + precision = proto.Field( + proto.INT32, + number=4, + ) + scale = proto.Field( + proto.INT32, + number=5, + ) + primary_key = proto.Field( + proto.BOOL, + number=7, + ) + nullable = proto.Field( + proto.BOOL, + number=8, + ) + ordinal_position = proto.Field( + proto.INT32, + number=9, + ) + + +class PostgresqlTable(proto.Message): + r"""PostgreSQL table. + + Attributes: + table (str): + Table name. + postgresql_columns (Sequence[google.cloud.datastream_v1.types.PostgresqlColumn]): + PostgreSQL columns in the schema. + When unspecified as part of include/exclude + objects, includes/excludes everything. + """ + + table = proto.Field( + proto.STRING, + number=1, + ) + postgresql_columns = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="PostgresqlColumn", + ) + + +class PostgresqlSchema(proto.Message): + r"""PostgreSQL schema. + + Attributes: + schema (str): + Schema name. + postgresql_tables (Sequence[google.cloud.datastream_v1.types.PostgresqlTable]): + Tables in the schema. + """ + + schema = proto.Field( + proto.STRING, + number=1, + ) + postgresql_tables = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="PostgresqlTable", + ) + + +class PostgresqlRdbms(proto.Message): + r"""PostgreSQL database structure. + + Attributes: + postgresql_schemas (Sequence[google.cloud.datastream_v1.types.PostgresqlSchema]): + PostgreSQL schemas in the database server. + """ + + postgresql_schemas = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="PostgresqlSchema", + ) + + +class PostgresqlSourceConfig(proto.Message): + r"""PostgreSQL data source configuration + + Attributes: + include_objects (google.cloud.datastream_v1.types.PostgresqlRdbms): + PostgreSQL objects to include in the stream. + exclude_objects (google.cloud.datastream_v1.types.PostgresqlRdbms): + PostgreSQL objects to exclude from the + stream. + replication_slot (str): + Required. The name of the logical replication + slot that's configured with the pgoutput plugin. + publication (str): + Required. The name of the publication that includes the set + of all tables that are defined in the stream's + include_objects. + """ + + include_objects = proto.Field( + proto.MESSAGE, + number=1, + message="PostgresqlRdbms", + ) + exclude_objects = proto.Field( + proto.MESSAGE, + number=2, + message="PostgresqlRdbms", + ) + replication_slot = proto.Field( + proto.STRING, + number=3, + ) + publication = proto.Field( + proto.STRING, + number=4, + ) class MysqlColumn(proto.Message): @@ -764,7 +1037,7 @@ class MysqlTable(proto.Message): mysql_columns (Sequence[google.cloud.datastream_v1.types.MysqlColumn]): MySQL columns in the database. When unspecified as part of include/exclude - lists, includes/excludes everything. + objects, includes/excludes everything. """ table = proto.Field( @@ -822,6 +1095,11 @@ class MysqlSourceConfig(proto.Message): MySQL objects to retrieve from the source. exclude_objects (google.cloud.datastream_v1.types.MysqlRdbms): MySQL objects to exclude from the stream. + max_concurrent_cdc_tasks (int): + Maximum number of concurrent CDC tasks. The + number should be non negative. If not set (or + set to 0), the system's default value will be + used. """ include_objects = proto.Field( @@ -834,6 +1112,10 @@ class MysqlSourceConfig(proto.Message): number=2, message="MysqlRdbms", ) + max_concurrent_cdc_tasks = proto.Field( + proto.INT32, + number=3, + ) class SourceConfig(proto.Message): @@ -851,11 +1133,15 @@ class SourceConfig(proto.Message): Required. Source connection profile resoource. Format: ``projects/{project}/locations/{location}/connectionProfiles/{name}`` oracle_source_config (google.cloud.datastream_v1.types.OracleSourceConfig): - Oracle data source configuration + Oracle data source configuration. This field is a member of `oneof`_ ``source_stream_config``. mysql_source_config (google.cloud.datastream_v1.types.MysqlSourceConfig): - MySQL data source configuration + MySQL data source configuration. + + This field is a member of `oneof`_ ``source_stream_config``. + postgresql_source_config (google.cloud.datastream_v1.types.PostgresqlSourceConfig): + PostgreSQL data source configuration. This field is a member of `oneof`_ ``source_stream_config``. """ @@ -876,6 +1162,12 @@ class SourceConfig(proto.Message): oneof="source_stream_config", message="MysqlSourceConfig", ) + postgresql_source_config = proto.Field( + proto.MESSAGE, + number=102, + oneof="source_stream_config", + message="PostgresqlSourceConfig", + ) class AvroFileFormat(proto.Message): @@ -974,9 +1266,127 @@ class GcsDestinationConfig(proto.Message): ) +class BigQueryDestinationConfig(proto.Message): + r""" + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + single_target_dataset (google.cloud.datastream_v1.types.BigQueryDestinationConfig.SingleTargetDataset): + Single destination dataset. + + This field is a member of `oneof`_ ``dataset_config``. + source_hierarchy_datasets (google.cloud.datastream_v1.types.BigQueryDestinationConfig.SourceHierarchyDatasets): + Source hierarchy datasets. + + This field is a member of `oneof`_ ``dataset_config``. + data_freshness (google.protobuf.duration_pb2.Duration): + The guaranteed data freshness (in seconds) + when querying tables created by the stream. + Editing this field will only affect new tables + created in the future, but existing tables will + not be impacted. Lower values mean that queries + will return fresher data, but may result in + higher cost. + """ + + class SingleTargetDataset(proto.Message): + r"""A single target dataset to which all data will be streamed. + + Attributes: + dataset_id (str): + + """ + + dataset_id = proto.Field( + proto.STRING, + number=1, + ) + + class SourceHierarchyDatasets(proto.Message): + r"""Destination datasets are created so that hierarchy of the + destination data objects matches the source hierarchy. + + Attributes: + dataset_template (google.cloud.datastream_v1.types.BigQueryDestinationConfig.SourceHierarchyDatasets.DatasetTemplate): + + """ + + class DatasetTemplate(proto.Message): + r"""Dataset template used for dynamic dataset creation. + + Attributes: + location (str): + Required. The geographic location where the + dataset should reside. See + https://cloud.google.com/bigquery/docs/locations + for supported locations. + dataset_id_prefix (str): + If supplied, every created dataset will have its name + prefixed by the provided value. The prefix and name will be + separated by an underscore. i.e. \_. + kms_key_name (str): + Describes the Cloud KMS encryption key that will be used to + protect destination BigQuery table. The BigQuery Service + Account associated with your project requires access to this + encryption key. i.e. + projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{cryptoKey}. + See + https://cloud.google.com/bigquery/docs/customer-managed-encryption + for more information. + """ + + location = proto.Field( + proto.STRING, + number=1, + ) + dataset_id_prefix = proto.Field( + proto.STRING, + number=2, + ) + kms_key_name = proto.Field( + proto.STRING, + number=3, + ) + + dataset_template = proto.Field( + proto.MESSAGE, + number=2, + message="BigQueryDestinationConfig.SourceHierarchyDatasets.DatasetTemplate", + ) + + single_target_dataset = proto.Field( + proto.MESSAGE, + number=201, + oneof="dataset_config", + message=SingleTargetDataset, + ) + source_hierarchy_datasets = proto.Field( + proto.MESSAGE, + number=202, + oneof="dataset_config", + message=SourceHierarchyDatasets, + ) + data_freshness = proto.Field( + proto.MESSAGE, + number=300, + message=duration_pb2.Duration, + ) + + class DestinationConfig(proto.Message): r"""The configuration of the stream destination. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: @@ -987,6 +1397,10 @@ class DestinationConfig(proto.Message): A configuration for how data should be loaded to Cloud Storage. + This field is a member of `oneof`_ ``destination_stream_config``. + bigquery_destination_config (google.cloud.datastream_v1.types.BigQueryDestinationConfig): + BigQuery destination configuration. + This field is a member of `oneof`_ ``destination_stream_config``. """ @@ -1000,6 +1414,12 @@ class DestinationConfig(proto.Message): oneof="destination_stream_config", message="GcsDestinationConfig", ) + bigquery_destination_config = proto.Field( + proto.MESSAGE, + number=101, + oneof="destination_stream_config", + message="BigQueryDestinationConfig", + ) class Stream(proto.Message): @@ -1088,6 +1508,11 @@ class BackfillAllStrategy(proto.Message): MySQL data source objects to avoid backfilling. + This field is a member of `oneof`_ ``excluded_objects``. + postgresql_excluded_objects (google.cloud.datastream_v1.types.PostgresqlRdbms): + PostgreSQL data source objects to avoid + backfilling. + This field is a member of `oneof`_ ``excluded_objects``. """ @@ -1103,6 +1528,12 @@ class BackfillAllStrategy(proto.Message): oneof="excluded_objects", message="MysqlRdbms", ) + postgresql_excluded_objects = proto.Field( + proto.MESSAGE, + number=3, + oneof="excluded_objects", + message="PostgresqlRdbms", + ) class BackfillNoneStrategy(proto.Message): r"""Backfill strategy to disable automatic backfill for the @@ -1247,6 +1678,10 @@ class SourceObjectIdentifier(proto.Message): mysql_identifier (google.cloud.datastream_v1.types.SourceObjectIdentifier.MysqlObjectIdentifier): Mysql data source object identifier. + This field is a member of `oneof`_ ``source_identifier``. + postgresql_identifier (google.cloud.datastream_v1.types.SourceObjectIdentifier.PostgresqlObjectIdentifier): + PostgreSQL data source object identifier. + This field is a member of `oneof`_ ``source_identifier``. """ @@ -1269,6 +1704,25 @@ class OracleObjectIdentifier(proto.Message): number=2, ) + class PostgresqlObjectIdentifier(proto.Message): + r"""PostgreSQL data source object identifier. + + Attributes: + schema (str): + Required. The schema name. + table (str): + Required. The table name. + """ + + schema = proto.Field( + proto.STRING, + number=1, + ) + table = proto.Field( + proto.STRING, + number=2, + ) + class MysqlObjectIdentifier(proto.Message): r"""Mysql data source object identifier. @@ -1300,6 +1754,12 @@ class MysqlObjectIdentifier(proto.Message): oneof="source_identifier", message=MysqlObjectIdentifier, ) + postgresql_identifier = proto.Field( + proto.MESSAGE, + number=3, + oneof="source_identifier", + message=PostgresqlObjectIdentifier, + ) class BackfillJob(proto.Message): diff --git a/scripts/fixup_datastream_v1_keywords.py b/scripts/fixup_datastream_v1_keywords.py index c4b4fb5..f390fcd 100644 --- a/scripts/fixup_datastream_v1_keywords.py +++ b/scripts/fixup_datastream_v1_keywords.py @@ -47,7 +47,7 @@ class datastreamCallTransformer(cst.CSTTransformer): 'delete_private_connection': ('name', 'request_id', 'force', ), 'delete_route': ('name', 'request_id', ), 'delete_stream': ('name', 'request_id', ), - 'discover_connection_profile': ('parent', 'connection_profile', 'connection_profile_name', 'full_hierarchy', 'hierarchy_depth', 'oracle_rdbms', 'mysql_rdbms', ), + 'discover_connection_profile': ('parent', 'connection_profile', 'connection_profile_name', 'full_hierarchy', 'hierarchy_depth', 'oracle_rdbms', 'mysql_rdbms', 'postgresql_rdbms', ), 'fetch_static_ips': ('name', 'page_size', 'page_token', ), 'get_connection_profile': ('name', ), 'get_private_connection': ('name', ), diff --git a/setup.py b/setup.py index b4c5ce5..f263d7f 100644 --- a/setup.py +++ b/setup.py @@ -21,13 +21,14 @@ name = "google-cloud-datastream" description = "Datastream client library" -version = "1.1.1" +version = "1.2.0" release_status = "Development Status :: 5 - Production/Stable" url = "https://github.com/googleapis/python-datastream" dependencies = [ "google-api-core[grpc] >= 1.32.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*", "proto-plus >= 1.22.0, <2.0.0dev", "protobuf >= 3.19.0, <5.0.0dev", + "grpc-google-iam-v1 >=0.12.4, <1.0.0dev", ] package_root = os.path.abspath(os.path.dirname(__file__)) diff --git a/tests/unit/gapic/datastream_v1/test_datastream.py b/tests/unit/gapic/datastream_v1/test_datastream.py index a11774b..f374459 100644 --- a/tests/unit/gapic/datastream_v1/test_datastream.py +++ b/tests/unit/gapic/datastream_v1/test_datastream.py @@ -39,6 +39,10 @@ import google.auth from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore from google.longrunning import operations_pb2 from google.oauth2 import service_account from google.protobuf import duration_pb2 # type: ignore @@ -7970,6 +7974,12 @@ def test_datastream_base_transport(): "get_route", "list_routes", "delete_route", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", ) for method in methods: with pytest.raises(NotImplementedError): @@ -8646,6 +8656,860 @@ async def test_transport_close_async(): close.assert_called_once() +def test_delete_operation(transport: str = "grpc"): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc"): + client = DatastreamAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_field_headers(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = DatastreamAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_delete_operation_from_dict(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = DatastreamAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc"): + client = DatastreamAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = DatastreamAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = DatastreamAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc"): + client = DatastreamAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = DatastreamAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = DatastreamAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc"): + client = DatastreamAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = DatastreamAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = DatastreamAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_locations(transport: str = "grpc"): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc"): + client = DatastreamAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_list_locations_field_headers(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = DatastreamAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_locations_from_dict(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = DatastreamAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = DatastreamAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_get_location_field_headers(): + client = DatastreamClient(credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = DatastreamAsyncClient(credentials=ga_credentials.AnonymousCredentials()) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +def test_get_location_from_dict(): + client = DatastreamClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = DatastreamAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + def test_transport_close(): transports = { "grpc": "_grpc_channel",