- On January 1, 2020 this library will no longer support Python 2 on the latest released version.
- Previously released library versions will continue to be available. For more information please
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version.
+ Library versions released prior to that date will continue to be available. For more information please
visit
Python 2 support on Google Cloud.
{% block body %} {% endblock %}
diff --git a/docs/conf.py b/docs/conf.py
index 0f581298..f54b8f1c 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -20,12 +20,16 @@
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath(".."))
+# For plugins that can not read conf.py.
+# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85
+sys.path.insert(0, os.path.abspath("."))
+
__version__ = ""
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
-needs_sphinx = "1.6.3"
+needs_sphinx = "1.5.5"
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
@@ -35,24 +39,22 @@
"sphinx.ext.autosummary",
"sphinx.ext.intersphinx",
"sphinx.ext.coverage",
+ "sphinx.ext.doctest",
"sphinx.ext.napoleon",
"sphinx.ext.todo",
"sphinx.ext.viewcode",
+ "recommonmark",
]
# autodoc/autosummary flags
autoclass_content = "both"
-autodoc_default_flags = ["members"]
+autodoc_default_options = {"members": True}
autosummary_generate = True
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
-# Allow markdown includes (so releases.md can include CHANGLEOG.md)
-# http://www.sphinx-doc.org/en/master/markdown.html
-source_parsers = {".md": "recommonmark.parser.CommonMarkParser"}
-
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
@@ -93,7 +95,12 @@
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
-exclude_patterns = ["_build"]
+exclude_patterns = [
+ "_build",
+ "samples/AUTHORING_GUIDE.md",
+ "samples/CONTRIBUTING.md",
+ "samples/snippets/README.rst",
+]
# The reST default role (used for this markup: `text`) to use for all
# documents.
@@ -338,10 +345,11 @@
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {
- "python": ("http://python.readthedocs.org/en/latest/", None),
- "google-auth": ("https://google-auth.readthedocs.io/en/stable", None),
- "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None),
- "grpc": ("https://grpc.io/grpc/python/", None),
+ "python": ("https://python.readthedocs.org/en/latest/", None),
+ "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None),
+ "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,),
+ "grpc": ("https://grpc.github.io/grpc/python/", None),
+ "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None),
}
diff --git a/docs/index.rst b/docs/index.rst
index 36e2f2ed..c4c082aa 100644
--- a/docs/index.rst
+++ b/docs/index.rst
@@ -1,5 +1,7 @@
.. include:: README.rst
+.. include:: multiprocessing.rst
+
API Reference
-------------
.. toctree::
@@ -16,4 +18,4 @@ For a list of all ``google-cloud-recommendations-ai`` releases:
.. toctree::
:maxdepth: 2
- changelog
\ No newline at end of file
+ changelog
diff --git a/docs/multiprocessing.rst b/docs/multiprocessing.rst
new file mode 100644
index 00000000..1cb29d4c
--- /dev/null
+++ b/docs/multiprocessing.rst
@@ -0,0 +1,7 @@
+.. note::
+
+ Because this client uses :mod:`grpcio` library, it is safe to
+ share instances across threads. In multiprocessing scenarios, the best
+ practice is to create client instances *after* the invocation of
+ :func:`os.fork` by :class:`multiprocessing.Pool` or
+ :class:`multiprocessing.Process`.
diff --git a/docs/recommendationengine_v1beta1/catalog_service.rst b/docs/recommendationengine_v1beta1/catalog_service.rst
new file mode 100644
index 00000000..7d05e892
--- /dev/null
+++ b/docs/recommendationengine_v1beta1/catalog_service.rst
@@ -0,0 +1,11 @@
+CatalogService
+--------------------------------
+
+.. automodule:: google.cloud.recommendationengine_v1beta1.services.catalog_service
+ :members:
+ :inherited-members:
+
+
+.. automodule:: google.cloud.recommendationengine_v1beta1.services.catalog_service.pagers
+ :members:
+ :inherited-members:
diff --git a/docs/recommendationengine_v1beta1/prediction_api_key_registry.rst b/docs/recommendationengine_v1beta1/prediction_api_key_registry.rst
new file mode 100644
index 00000000..059e2e7d
--- /dev/null
+++ b/docs/recommendationengine_v1beta1/prediction_api_key_registry.rst
@@ -0,0 +1,11 @@
+PredictionApiKeyRegistry
+------------------------------------------
+
+.. automodule:: google.cloud.recommendationengine_v1beta1.services.prediction_api_key_registry
+ :members:
+ :inherited-members:
+
+
+.. automodule:: google.cloud.recommendationengine_v1beta1.services.prediction_api_key_registry.pagers
+ :members:
+ :inherited-members:
diff --git a/docs/recommendationengine_v1beta1/prediction_service.rst b/docs/recommendationengine_v1beta1/prediction_service.rst
new file mode 100644
index 00000000..93a16f30
--- /dev/null
+++ b/docs/recommendationengine_v1beta1/prediction_service.rst
@@ -0,0 +1,11 @@
+PredictionService
+-----------------------------------
+
+.. automodule:: google.cloud.recommendationengine_v1beta1.services.prediction_service
+ :members:
+ :inherited-members:
+
+
+.. automodule:: google.cloud.recommendationengine_v1beta1.services.prediction_service.pagers
+ :members:
+ :inherited-members:
diff --git a/docs/recommendationengine_v1beta1/services.rst b/docs/recommendationengine_v1beta1/services.rst
index f2a72ea2..ced8732d 100644
--- a/docs/recommendationengine_v1beta1/services.rst
+++ b/docs/recommendationengine_v1beta1/services.rst
@@ -1,6 +1,9 @@
-Client for Google Cloud Recommendationengine API
-================================================
+Services for Google Cloud Recommendationengine v1beta1 API
+==========================================================
+.. toctree::
+ :maxdepth: 2
-.. automodule:: google.cloud.recommendationengine_v1beta1
- :members:
- :inherited-members:
+ catalog_service
+ prediction_api_key_registry
+ prediction_service
+ user_event_service
diff --git a/docs/recommendationengine_v1beta1/types.rst b/docs/recommendationengine_v1beta1/types.rst
index 4aa6ca6f..679552ed 100644
--- a/docs/recommendationengine_v1beta1/types.rst
+++ b/docs/recommendationengine_v1beta1/types.rst
@@ -1,5 +1,7 @@
-Types for Google Cloud Recommendationengine API
-===============================================
+Types for Google Cloud Recommendationengine v1beta1 API
+=======================================================
.. automodule:: google.cloud.recommendationengine_v1beta1.types
:members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/docs/recommendationengine_v1beta1/user_event_service.rst b/docs/recommendationengine_v1beta1/user_event_service.rst
new file mode 100644
index 00000000..665351c9
--- /dev/null
+++ b/docs/recommendationengine_v1beta1/user_event_service.rst
@@ -0,0 +1,11 @@
+UserEventService
+----------------------------------
+
+.. automodule:: google.cloud.recommendationengine_v1beta1.services.user_event_service
+ :members:
+ :inherited-members:
+
+
+.. automodule:: google.cloud.recommendationengine_v1beta1.services.user_event_service.pagers
+ :members:
+ :inherited-members:
diff --git a/google/cloud/recommendationengine/__init__.py b/google/cloud/recommendationengine/__init__.py
index 84017128..80454bde 100644
--- a/google/cloud/recommendationengine/__init__.py
+++ b/google/cloud/recommendationengine/__init__.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright (C) 2019 Google LLC
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -15,16 +15,27 @@
# limitations under the License.
#
-
+from google.cloud.recommendationengine_v1beta1.services.catalog_service.async_client import (
+ CatalogServiceAsyncClient,
+)
from google.cloud.recommendationengine_v1beta1.services.catalog_service.client import (
CatalogServiceClient,
)
+from google.cloud.recommendationengine_v1beta1.services.prediction_api_key_registry.async_client import (
+ PredictionApiKeyRegistryAsyncClient,
+)
from google.cloud.recommendationengine_v1beta1.services.prediction_api_key_registry.client import (
PredictionApiKeyRegistryClient,
)
+from google.cloud.recommendationengine_v1beta1.services.prediction_service.async_client import (
+ PredictionServiceAsyncClient,
+)
from google.cloud.recommendationengine_v1beta1.services.prediction_service.client import (
PredictionServiceClient,
)
+from google.cloud.recommendationengine_v1beta1.services.user_event_service.async_client import (
+ UserEventServiceAsyncClient,
+)
from google.cloud.recommendationengine_v1beta1.services.user_event_service.client import (
UserEventServiceClient,
)
@@ -129,6 +140,7 @@
__all__ = (
"CatalogInlineSource",
"CatalogItem",
+ "CatalogServiceAsyncClient",
"CatalogServiceClient",
"CollectUserEventRequest",
"CreateCatalogItemRequest",
@@ -156,7 +168,9 @@
"PredictRequest",
"PredictResponse",
"PredictionApiKeyRegistration",
+ "PredictionApiKeyRegistryAsyncClient",
"PredictionApiKeyRegistryClient",
+ "PredictionServiceAsyncClient",
"PredictionServiceClient",
"ProductCatalogItem",
"ProductDetail",
@@ -169,6 +183,7 @@
"UserEvent",
"UserEventImportSummary",
"UserEventInlineSource",
+ "UserEventServiceAsyncClient",
"UserEventServiceClient",
"UserInfo",
"WriteUserEventRequest",
diff --git a/google/cloud/recommendationengine_v1beta1/__init__.py b/google/cloud/recommendationengine_v1beta1/__init__.py
index 7e841aab..60f9ea99 100644
--- a/google/cloud/recommendationengine_v1beta1/__init__.py
+++ b/google/cloud/recommendationengine_v1beta1/__init__.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright (C) 2019 Google LLC
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -15,7 +15,6 @@
# limitations under the License.
#
-
from .services.catalog_service import CatalogServiceClient
from .services.prediction_api_key_registry import PredictionApiKeyRegistryClient
from .services.prediction_service import PredictionServiceClient
@@ -74,6 +73,7 @@
__all__ = (
"CatalogInlineSource",
"CatalogItem",
+ "CatalogServiceClient",
"CollectUserEventRequest",
"CreateCatalogItemRequest",
"CreatePredictionApiKeyRegistrationRequest",
@@ -113,8 +113,7 @@
"UserEvent",
"UserEventImportSummary",
"UserEventInlineSource",
- "UserEventServiceClient",
"UserInfo",
"WriteUserEventRequest",
- "CatalogServiceClient",
+ "UserEventServiceClient",
)
diff --git a/google/cloud/recommendationengine_v1beta1/services/__init__.py b/google/cloud/recommendationengine_v1beta1/services/__init__.py
index 2c56c537..42ffdf2b 100644
--- a/google/cloud/recommendationengine_v1beta1/services/__init__.py
+++ b/google/cloud/recommendationengine_v1beta1/services/__init__.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright (C) 2019 Google LLC
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/google/cloud/recommendationengine_v1beta1/services/catalog_service/__init__.py b/google/cloud/recommendationengine_v1beta1/services/catalog_service/__init__.py
index 7e42fe71..d5e1eecf 100644
--- a/google/cloud/recommendationengine_v1beta1/services/catalog_service/__init__.py
+++ b/google/cloud/recommendationengine_v1beta1/services/catalog_service/__init__.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright (C) 2019 Google LLC
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -16,5 +16,9 @@
#
from .client import CatalogServiceClient
+from .async_client import CatalogServiceAsyncClient
-__all__ = ("CatalogServiceClient",)
+__all__ = (
+ "CatalogServiceClient",
+ "CatalogServiceAsyncClient",
+)
diff --git a/google/cloud/recommendationengine_v1beta1/services/catalog_service/async_client.py b/google/cloud/recommendationengine_v1beta1/services/catalog_service/async_client.py
new file mode 100644
index 00000000..611957fd
--- /dev/null
+++ b/google/cloud/recommendationengine_v1beta1/services/catalog_service/async_client.py
@@ -0,0 +1,778 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from collections import OrderedDict
+import functools
+import re
+from typing import Dict, Sequence, Tuple, Type, Union
+import pkg_resources
+
+import google.api_core.client_options as ClientOptions # type: ignore
+from google.api_core import exceptions # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import retry as retries # type: ignore
+from google.auth import credentials # type: ignore
+from google.oauth2 import service_account # type: ignore
+
+from google.api_core import operation # type: ignore
+from google.api_core import operation_async # type: ignore
+from google.cloud.recommendationengine_v1beta1.services.catalog_service import pagers
+from google.cloud.recommendationengine_v1beta1.types import catalog
+from google.cloud.recommendationengine_v1beta1.types import catalog_service
+from google.cloud.recommendationengine_v1beta1.types import common
+from google.cloud.recommendationengine_v1beta1.types import import_
+from google.protobuf import field_mask_pb2 as field_mask # type: ignore
+
+from .transports.base import CatalogServiceTransport, DEFAULT_CLIENT_INFO
+from .transports.grpc_asyncio import CatalogServiceGrpcAsyncIOTransport
+from .client import CatalogServiceClient
+
+
+class CatalogServiceAsyncClient:
+ """Service for ingesting catalog information of the customer's
+ website.
+ """
+
+ _client: CatalogServiceClient
+
+ DEFAULT_ENDPOINT = CatalogServiceClient.DEFAULT_ENDPOINT
+ DEFAULT_MTLS_ENDPOINT = CatalogServiceClient.DEFAULT_MTLS_ENDPOINT
+
+ catalog_path = staticmethod(CatalogServiceClient.catalog_path)
+ parse_catalog_path = staticmethod(CatalogServiceClient.parse_catalog_path)
+
+ common_billing_account_path = staticmethod(
+ CatalogServiceClient.common_billing_account_path
+ )
+ parse_common_billing_account_path = staticmethod(
+ CatalogServiceClient.parse_common_billing_account_path
+ )
+
+ common_folder_path = staticmethod(CatalogServiceClient.common_folder_path)
+ parse_common_folder_path = staticmethod(
+ CatalogServiceClient.parse_common_folder_path
+ )
+
+ common_organization_path = staticmethod(
+ CatalogServiceClient.common_organization_path
+ )
+ parse_common_organization_path = staticmethod(
+ CatalogServiceClient.parse_common_organization_path
+ )
+
+ common_project_path = staticmethod(CatalogServiceClient.common_project_path)
+ parse_common_project_path = staticmethod(
+ CatalogServiceClient.parse_common_project_path
+ )
+
+ common_location_path = staticmethod(CatalogServiceClient.common_location_path)
+ parse_common_location_path = staticmethod(
+ CatalogServiceClient.parse_common_location_path
+ )
+
+ @classmethod
+ def from_service_account_info(cls, info: dict, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials info.
+
+ Args:
+ info (dict): The service account private key info.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ CatalogServiceAsyncClient: The constructed client.
+ """
+ return CatalogServiceClient.from_service_account_info.__func__(CatalogServiceAsyncClient, info, *args, **kwargs) # type: ignore
+
+ @classmethod
+ def from_service_account_file(cls, filename: str, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials
+ file.
+
+ Args:
+ filename (str): The path to the service account private key json
+ file.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ CatalogServiceAsyncClient: The constructed client.
+ """
+ return CatalogServiceClient.from_service_account_file.__func__(CatalogServiceAsyncClient, filename, *args, **kwargs) # type: ignore
+
+ from_service_account_json = from_service_account_file
+
+ @property
+ def transport(self) -> CatalogServiceTransport:
+ """Return the transport used by the client instance.
+
+ Returns:
+ CatalogServiceTransport: The transport used by the client instance.
+ """
+ return self._client.transport
+
+ get_transport_class = functools.partial(
+ type(CatalogServiceClient).get_transport_class, type(CatalogServiceClient)
+ )
+
+ def __init__(
+ self,
+ *,
+ credentials: credentials.Credentials = None,
+ transport: Union[str, CatalogServiceTransport] = "grpc_asyncio",
+ client_options: ClientOptions = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ ) -> None:
+ """Instantiate the catalog service client.
+
+ Args:
+ credentials (Optional[google.auth.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ transport (Union[str, ~.CatalogServiceTransport]): The
+ transport to use. If set to None, a transport is chosen
+ automatically.
+ client_options (ClientOptions): Custom options for the client. It
+ won't take effect if a ``transport`` instance is provided.
+ (1) The ``api_endpoint`` property can be used to override the
+ default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
+ environment variable can also be used to override the endpoint:
+ "always" (always use the default mTLS endpoint), "never" (always
+ use the default regular endpoint) and "auto" (auto switch to the
+ default mTLS endpoint if client certificate is present, this is
+ the default value). However, the ``api_endpoint`` property takes
+ precedence if provided.
+ (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+ is "true", then the ``client_cert_source`` property can be used
+ to provide client certificate for mutual TLS transport. If
+ not provided, the default SSL client certificate will be used if
+ present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+ set, no client certificate will be used.
+
+ Raises:
+ google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
+ creation failed for any reason.
+ """
+
+ self._client = CatalogServiceClient(
+ credentials=credentials,
+ transport=transport,
+ client_options=client_options,
+ client_info=client_info,
+ )
+
+ async def create_catalog_item(
+ self,
+ request: catalog_service.CreateCatalogItemRequest = None,
+ *,
+ parent: str = None,
+ catalog_item: catalog.CatalogItem = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> catalog.CatalogItem:
+ r"""Creates a catalog item.
+
+ Args:
+ request (:class:`google.cloud.recommendationengine_v1beta1.types.CreateCatalogItemRequest`):
+ The request object. Request message for
+ CreateCatalogItem method.
+ parent (:class:`str`):
+ Required. The parent catalog resource name, such as
+ ``projects/*/locations/global/catalogs/default_catalog``
+
+ This corresponds to the ``parent`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ catalog_item (:class:`google.cloud.recommendationengine_v1beta1.types.CatalogItem`):
+ Required. The catalog item to create.
+ This corresponds to the ``catalog_item`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ google.cloud.recommendationengine_v1beta1.types.CatalogItem:
+ CatalogItem captures all metadata
+ information of items to be recommended.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([parent, catalog_item])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = catalog_service.CreateCatalogItemRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if parent is not None:
+ request.parent = parent
+ if catalog_item is not None:
+ request.catalog_item = catalog_item
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.create_catalog_item,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
+ ),
+ deadline=600.0,
+ ),
+ default_timeout=600.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+ async def get_catalog_item(
+ self,
+ request: catalog_service.GetCatalogItemRequest = None,
+ *,
+ name: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> catalog.CatalogItem:
+ r"""Gets a specific catalog item.
+
+ Args:
+ request (:class:`google.cloud.recommendationengine_v1beta1.types.GetCatalogItemRequest`):
+ The request object. Request message for GetCatalogItem
+ method.
+ name (:class:`str`):
+ Required. Full resource name of catalog item, such as
+ ``projects/*/locations/global/catalogs/default_catalog/catalogitems/some_catalog_item_id``
+
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ google.cloud.recommendationengine_v1beta1.types.CatalogItem:
+ CatalogItem captures all metadata
+ information of items to be recommended.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([name])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = catalog_service.GetCatalogItemRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if name is not None:
+ request.name = name
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.get_catalog_item,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
+ ),
+ deadline=600.0,
+ ),
+ default_timeout=600.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+ async def list_catalog_items(
+ self,
+ request: catalog_service.ListCatalogItemsRequest = None,
+ *,
+ parent: str = None,
+ filter: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> pagers.ListCatalogItemsAsyncPager:
+ r"""Gets a list of catalog items.
+
+ Args:
+ request (:class:`google.cloud.recommendationengine_v1beta1.types.ListCatalogItemsRequest`):
+ The request object. Request message for ListCatalogItems
+ method.
+ parent (:class:`str`):
+ Required. The parent catalog resource name, such as
+ ``projects/*/locations/global/catalogs/default_catalog``
+
+ This corresponds to the ``parent`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ filter (:class:`str`):
+ Optional. A filter to apply on the
+ list results.
+
+ This corresponds to the ``filter`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ google.cloud.recommendationengine_v1beta1.services.catalog_service.pagers.ListCatalogItemsAsyncPager:
+ Response message for ListCatalogItems
+ method.
+ Iterating over this object will yield
+ results and resolve additional pages
+ automatically.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([parent, filter])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = catalog_service.ListCatalogItemsRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if parent is not None:
+ request.parent = parent
+ if filter is not None:
+ request.filter = filter
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.list_catalog_items,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
+ ),
+ deadline=600.0,
+ ),
+ default_timeout=600.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # This method is paged; wrap the response in a pager, which provides
+ # an `__aiter__` convenience method.
+ response = pagers.ListCatalogItemsAsyncPager(
+ method=rpc, request=request, response=response, metadata=metadata,
+ )
+
+ # Done; return the response.
+ return response
+
+ async def update_catalog_item(
+ self,
+ request: catalog_service.UpdateCatalogItemRequest = None,
+ *,
+ name: str = None,
+ catalog_item: catalog.CatalogItem = None,
+ update_mask: field_mask.FieldMask = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> catalog.CatalogItem:
+ r"""Updates a catalog item. Partial updating is
+ supported. Non-existing items will be created.
+
+ Args:
+ request (:class:`google.cloud.recommendationengine_v1beta1.types.UpdateCatalogItemRequest`):
+ The request object. Request message for
+ UpdateCatalogItem method.
+ name (:class:`str`):
+ Required. Full resource name of catalog item, such as
+ ``projects/*/locations/global/catalogs/default_catalog/catalogItems/some_catalog_item_id``
+
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ catalog_item (:class:`google.cloud.recommendationengine_v1beta1.types.CatalogItem`):
+ Required. The catalog item to update/create. The
+ 'catalog_item_id' field has to match that in the 'name'.
+
+ This corresponds to the ``catalog_item`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`):
+ Optional. Indicates which fields in
+ the provided 'item' to update. If not
+ set, will by default update all fields.
+
+ This corresponds to the ``update_mask`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ google.cloud.recommendationengine_v1beta1.types.CatalogItem:
+ CatalogItem captures all metadata
+ information of items to be recommended.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([name, catalog_item, update_mask])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = catalog_service.UpdateCatalogItemRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if name is not None:
+ request.name = name
+ if catalog_item is not None:
+ request.catalog_item = catalog_item
+ if update_mask is not None:
+ request.update_mask = update_mask
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.update_catalog_item,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
+ ),
+ deadline=600.0,
+ ),
+ default_timeout=600.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+ async def delete_catalog_item(
+ self,
+ request: catalog_service.DeleteCatalogItemRequest = None,
+ *,
+ name: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> None:
+ r"""Deletes a catalog item.
+
+ Args:
+ request (:class:`google.cloud.recommendationengine_v1beta1.types.DeleteCatalogItemRequest`):
+ The request object. Request message for
+ DeleteCatalogItem method.
+ name (:class:`str`):
+ Required. Full resource name of catalog item, such as
+ ``projects/*/locations/global/catalogs/default_catalog/catalogItems/some_catalog_item_id``
+
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([name])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = catalog_service.DeleteCatalogItemRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if name is not None:
+ request.name = name
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.delete_catalog_item,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
+ ),
+ deadline=600.0,
+ ),
+ default_timeout=600.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ await rpc(
+ request, retry=retry, timeout=timeout, metadata=metadata,
+ )
+
+ async def import_catalog_items(
+ self,
+ request: import_.ImportCatalogItemsRequest = None,
+ *,
+ parent: str = None,
+ request_id: str = None,
+ input_config: import_.InputConfig = None,
+ errors_config: import_.ImportErrorsConfig = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operation_async.AsyncOperation:
+ r"""Bulk import of multiple catalog items. Request
+ processing may be synchronous. No partial updating
+ supported. Non-existing items will be created.
+
+ Operation.response is of type ImportResponse. Note that
+ it is possible for a subset of the items to be
+ successfully updated.
+
+ Args:
+ request (:class:`google.cloud.recommendationengine_v1beta1.types.ImportCatalogItemsRequest`):
+ The request object. Request message for Import methods.
+ parent (:class:`str`):
+ Required.
+ "projects/1234/locations/global/catalogs/default_catalog"
+
+ This corresponds to the ``parent`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ request_id (:class:`str`):
+ Optional. Unique identifier provided
+ by client, within the ancestor dataset
+ scope. Ensures idempotency and used for
+ request deduplication. Server-generated
+ if unspecified. Up to 128 characters
+ long. This is returned as
+ google.longrunning.Operation.name in the
+ response.
+
+ This corresponds to the ``request_id`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ input_config (:class:`google.cloud.recommendationengine_v1beta1.types.InputConfig`):
+ Required. The desired input location
+ of the data.
+
+ This corresponds to the ``input_config`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ errors_config (:class:`google.cloud.recommendationengine_v1beta1.types.ImportErrorsConfig`):
+ Optional. The desired location of
+ errors incurred during the Import.
+
+ This corresponds to the ``errors_config`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ google.api_core.operation_async.AsyncOperation:
+ An object representing a long-running operation.
+
+ The result type for the operation will be :class:`google.cloud.recommendationengine_v1beta1.types.ImportCatalogItemsResponse` Response of the ImportCatalogItemsRequest. If the long running
+ operation is done, then this message is returned by
+ the google.longrunning.Operations.response field if
+ the operation was successful.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([parent, request_id, input_config, errors_config])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = import_.ImportCatalogItemsRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if parent is not None:
+ request.parent = parent
+ if request_id is not None:
+ request.request_id = request_id
+ if input_config is not None:
+ request.input_config = input_config
+ if errors_config is not None:
+ request.errors_config = errors_config
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.import_catalog_items,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
+ ),
+ deadline=600.0,
+ ),
+ default_timeout=600.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Wrap the response in an operation future.
+ response = operation_async.from_gapic(
+ response,
+ self._client._transport.operations_client,
+ import_.ImportCatalogItemsResponse,
+ metadata_type=import_.ImportMetadata,
+ )
+
+ # Done; return the response.
+ return response
+
+
+try:
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
+ gapic_version=pkg_resources.get_distribution(
+ "google-cloud-recommendations-ai",
+ ).version,
+ )
+except pkg_resources.DistributionNotFound:
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
+
+
+__all__ = ("CatalogServiceAsyncClient",)
diff --git a/google/cloud/recommendationengine_v1beta1/services/catalog_service/client.py b/google/cloud/recommendationengine_v1beta1/services/catalog_service/client.py
index 73720841..b040285e 100644
--- a/google/cloud/recommendationengine_v1beta1/services/catalog_service/client.py
+++ b/google/cloud/recommendationengine_v1beta1/services/catalog_service/client.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright (C) 2019 Google LLC
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -16,17 +16,24 @@
#
from collections import OrderedDict
-from typing import Dict, Sequence, Tuple, Type, Union
+from distutils import util
+import os
+import re
+from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union
import pkg_resources
-import google.api_core.client_options as ClientOptions # type: ignore
+from google.api_core import client_options as client_options_lib # type: ignore
from google.api_core import exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.auth import credentials # type: ignore
+from google.auth.transport import mtls # type: ignore
+from google.auth.transport.grpc import SslCredentials # type: ignore
+from google.auth.exceptions import MutualTLSChannelError # type: ignore
from google.oauth2 import service_account # type: ignore
-from google.api_core import operation
+from google.api_core import operation # type: ignore
+from google.api_core import operation_async # type: ignore
from google.cloud.recommendationengine_v1beta1.services.catalog_service import pagers
from google.cloud.recommendationengine_v1beta1.types import catalog
from google.cloud.recommendationengine_v1beta1.types import catalog_service
@@ -34,8 +41,9 @@
from google.cloud.recommendationengine_v1beta1.types import import_
from google.protobuf import field_mask_pb2 as field_mask # type: ignore
-from .transports.base import CatalogServiceTransport
+from .transports.base import CatalogServiceTransport, DEFAULT_CLIENT_INFO
from .transports.grpc import CatalogServiceGrpcTransport
+from .transports.grpc_asyncio import CatalogServiceGrpcAsyncIOTransport
class CatalogServiceClientMeta(type):
@@ -50,8 +58,9 @@ class CatalogServiceClientMeta(type):
OrderedDict()
) # type: Dict[str, Type[CatalogServiceTransport]]
_transport_registry["grpc"] = CatalogServiceGrpcTransport
+ _transport_registry["grpc_asyncio"] = CatalogServiceGrpcAsyncIOTransport
- def get_transport_class(cls, label: str = None) -> Type[CatalogServiceTransport]:
+ def get_transport_class(cls, label: str = None,) -> Type[CatalogServiceTransport]:
"""Return an appropriate transport class.
Args:
@@ -75,10 +84,56 @@ class CatalogServiceClient(metaclass=CatalogServiceClientMeta):
website.
"""
- DEFAULT_OPTIONS = ClientOptions.ClientOptions(
- api_endpoint="recommendationengine.googleapis.com"
+ @staticmethod
+ def _get_default_mtls_endpoint(api_endpoint):
+ """Convert api endpoint to mTLS endpoint.
+ Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
+ "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
+ Args:
+ api_endpoint (Optional[str]): the api endpoint to convert.
+ Returns:
+ str: converted mTLS api endpoint.
+ """
+ if not api_endpoint:
+ return api_endpoint
+
+ mtls_endpoint_re = re.compile(
+ r"(?P
[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?"
+ )
+
+ m = mtls_endpoint_re.match(api_endpoint)
+ name, mtls, sandbox, googledomain = m.groups()
+ if mtls or not googledomain:
+ return api_endpoint
+
+ if sandbox:
+ return api_endpoint.replace(
+ "sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
+ )
+
+ return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
+
+ DEFAULT_ENDPOINT = "recommendationengine.googleapis.com"
+ DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore
+ DEFAULT_ENDPOINT
)
+ @classmethod
+ def from_service_account_info(cls, info: dict, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials info.
+
+ Args:
+ info (dict): The service account private key info.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ CatalogServiceClient: The constructed client.
+ """
+ credentials = service_account.Credentials.from_service_account_info(info)
+ kwargs["credentials"] = credentials
+ return cls(*args, **kwargs)
+
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
@@ -91,7 +146,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs):
kwargs: Additional arguments to pass to the constructor.
Returns:
- {@api.name}: The constructed client.
+ CatalogServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(filename)
kwargs["credentials"] = credentials
@@ -99,12 +154,97 @@ def from_service_account_file(cls, filename: str, *args, **kwargs):
from_service_account_json = from_service_account_file
+ @property
+ def transport(self) -> CatalogServiceTransport:
+ """Return the transport used by the client instance.
+
+ Returns:
+ CatalogServiceTransport: The transport used by the client instance.
+ """
+ return self._transport
+
+ @staticmethod
+ def catalog_path(project: str, location: str, catalog: str,) -> str:
+ """Return a fully-qualified catalog string."""
+ return "projects/{project}/locations/{location}/catalogs/{catalog}".format(
+ project=project, location=location, catalog=catalog,
+ )
+
+ @staticmethod
+ def parse_catalog_path(path: str) -> Dict[str, str]:
+ """Parse a catalog path into its component segments."""
+ m = re.match(
+ r"^projects/(?P.+?)/locations/(?P.+?)/catalogs/(?P.+?)$",
+ path,
+ )
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_billing_account_path(billing_account: str,) -> str:
+ """Return a fully-qualified billing_account string."""
+ return "billingAccounts/{billing_account}".format(
+ billing_account=billing_account,
+ )
+
+ @staticmethod
+ def parse_common_billing_account_path(path: str) -> Dict[str, str]:
+ """Parse a billing_account path into its component segments."""
+ m = re.match(r"^billingAccounts/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_folder_path(folder: str,) -> str:
+ """Return a fully-qualified folder string."""
+ return "folders/{folder}".format(folder=folder,)
+
+ @staticmethod
+ def parse_common_folder_path(path: str) -> Dict[str, str]:
+ """Parse a folder path into its component segments."""
+ m = re.match(r"^folders/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_organization_path(organization: str,) -> str:
+ """Return a fully-qualified organization string."""
+ return "organizations/{organization}".format(organization=organization,)
+
+ @staticmethod
+ def parse_common_organization_path(path: str) -> Dict[str, str]:
+ """Parse a organization path into its component segments."""
+ m = re.match(r"^organizations/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_project_path(project: str,) -> str:
+ """Return a fully-qualified project string."""
+ return "projects/{project}".format(project=project,)
+
+ @staticmethod
+ def parse_common_project_path(path: str) -> Dict[str, str]:
+ """Parse a project path into its component segments."""
+ m = re.match(r"^projects/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_location_path(project: str, location: str,) -> str:
+ """Return a fully-qualified location string."""
+ return "projects/{project}/locations/{location}".format(
+ project=project, location=location,
+ )
+
+ @staticmethod
+ def parse_common_location_path(path: str) -> Dict[str, str]:
+ """Parse a location path into its component segments."""
+ m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
def __init__(
self,
*,
- credentials: credentials.Credentials = None,
- transport: Union[str, CatalogServiceTransport] = None,
- client_options: ClientOptions = DEFAULT_OPTIONS,
+ credentials: Optional[credentials.Credentials] = None,
+ transport: Union[str, CatalogServiceTransport, None] = None,
+ client_options: Optional[client_options_lib.ClientOptions] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the catalog service client.
@@ -114,36 +254,109 @@ def __init__(
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
- transport (Union[str, ~.CatalogServiceTransport]): The
+ transport (Union[str, CatalogServiceTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
- client_options (ClientOptions): Custom options for the client.
+ client_options (google.api_core.client_options.ClientOptions): Custom options for the
+ client. It won't take effect if a ``transport`` instance is provided.
+ (1) The ``api_endpoint`` property can be used to override the
+ default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
+ environment variable can also be used to override the endpoint:
+ "always" (always use the default mTLS endpoint), "never" (always
+ use the default regular endpoint) and "auto" (auto switch to the
+ default mTLS endpoint if client certificate is present, this is
+ the default value). However, the ``api_endpoint`` property takes
+ precedence if provided.
+ (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+ is "true", then the ``client_cert_source`` property can be used
+ to provide client certificate for mutual TLS transport. If
+ not provided, the default SSL client certificate will be used if
+ present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+ set, no client certificate will be used.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+
+ Raises:
+ google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
+ creation failed for any reason.
"""
if isinstance(client_options, dict):
- client_options = ClientOptions.from_dict(client_options)
+ client_options = client_options_lib.from_dict(client_options)
+ if client_options is None:
+ client_options = client_options_lib.ClientOptions()
+
+ # Create SSL credentials for mutual TLS if needed.
+ use_client_cert = bool(
+ util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))
+ )
+
+ client_cert_source_func = None
+ is_mtls = False
+ if use_client_cert:
+ if client_options.client_cert_source:
+ is_mtls = True
+ client_cert_source_func = client_options.client_cert_source
+ else:
+ is_mtls = mtls.has_default_client_cert_source()
+ client_cert_source_func = (
+ mtls.default_client_cert_source() if is_mtls else None
+ )
+
+ # Figure out which api endpoint to use.
+ if client_options.api_endpoint is not None:
+ api_endpoint = client_options.api_endpoint
+ else:
+ use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
+ if use_mtls_env == "never":
+ api_endpoint = self.DEFAULT_ENDPOINT
+ elif use_mtls_env == "always":
+ api_endpoint = self.DEFAULT_MTLS_ENDPOINT
+ elif use_mtls_env == "auto":
+ api_endpoint = (
+ self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT
+ )
+ else:
+ raise MutualTLSChannelError(
+ "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always"
+ )
# Save or instantiate the transport.
# Ordinarily, we provide the transport, but allowing a custom transport
# instance provides an extensibility point for unusual situations.
if isinstance(transport, CatalogServiceTransport):
- if credentials:
+ # transport is a CatalogServiceTransport instance.
+ if credentials or client_options.credentials_file:
raise ValueError(
"When providing a transport instance, "
"provide its credentials directly."
)
+ if client_options.scopes:
+ raise ValueError(
+ "When providing a transport instance, "
+ "provide its scopes directly."
+ )
self._transport = transport
else:
Transport = type(self).get_transport_class(transport)
self._transport = Transport(
credentials=credentials,
- host=client_options.api_endpoint
- or "recommendationengine.googleapis.com",
+ credentials_file=client_options.credentials_file,
+ host=api_endpoint,
+ scopes=client_options.scopes,
+ client_cert_source_for_mtls=client_cert_source_func,
+ quota_project_id=client_options.quota_project_id,
+ client_info=client_info,
)
def create_catalog_item(
self,
request: catalog_service.CreateCatalogItemRequest = None,
*,
+ parent: str = None,
+ catalog_item: catalog.CatalogItem = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
@@ -151,9 +364,21 @@ def create_catalog_item(
r"""Creates a catalog item.
Args:
- request (:class:`~.catalog_service.CreateCatalogItemRequest`):
+ request (google.cloud.recommendationengine_v1beta1.types.CreateCatalogItemRequest):
The request object. Request message for
CreateCatalogItem method.
+ parent (str):
+ Required. The parent catalog resource name, such as
+ ``projects/*/locations/global/catalogs/default_catalog``
+
+ This corresponds to the ``parent`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ catalog_item (google.cloud.recommendationengine_v1beta1.types.CatalogItem):
+ Required. The catalog item to create.
+ This corresponds to the ``catalog_item`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
@@ -162,25 +387,48 @@ def create_catalog_item(
sent along with the request as metadata.
Returns:
- ~.catalog.CatalogItem:
+ google.cloud.recommendationengine_v1beta1.types.CatalogItem:
CatalogItem captures all metadata
information of items to be recommended.
"""
# Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([parent, catalog_item])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a catalog_service.CreateCatalogItemRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, catalog_service.CreateCatalogItemRequest):
+ request = catalog_service.CreateCatalogItemRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
- request = catalog_service.CreateCatalogItemRequest(request)
+ if parent is not None:
+ request.parent = parent
+ if catalog_item is not None:
+ request.catalog_item = catalog_item
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.create_catalog_item,
- default_timeout=None,
- client_info=_client_info,
+ rpc = self._transport._wrapped_methods[self._transport.create_catalog_item]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
- response = rpc(request, retry=retry, timeout=timeout, metadata=metadata)
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
@@ -197,12 +445,13 @@ def get_catalog_item(
r"""Gets a specific catalog item.
Args:
- request (:class:`~.catalog_service.GetCatalogItemRequest`):
+ request (google.cloud.recommendationengine_v1beta1.types.GetCatalogItemRequest):
The request object. Request message for GetCatalogItem
method.
- name (:class:`str`):
+ name (str):
Required. Full resource name of catalog item, such as
``projects/*/locations/global/catalogs/default_catalog/catalogitems/some_catalog_item_id``
+
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -214,7 +463,7 @@ def get_catalog_item(
sent along with the request as metadata.
Returns:
- ~.catalog.CatalogItem:
+ google.cloud.recommendationengine_v1beta1.types.CatalogItem:
CatalogItem captures all metadata
information of items to be recommended.
@@ -222,26 +471,29 @@ def get_catalog_item(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([name]):
+ has_flattened_params = any([name])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- request = catalog_service.GetCatalogItemRequest(request)
+ # Minor optimization to avoid making a copy if the user passes
+ # in a catalog_service.GetCatalogItemRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, catalog_service.GetCatalogItemRequest):
+ request = catalog_service.GetCatalogItemRequest(request)
- # If we have keyword arguments corresponding to fields on the
- # request, apply these.
- if name is not None:
- request.name = name
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if name is not None:
+ request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.get_catalog_item,
- default_timeout=None,
- client_info=_client_info,
- )
+ rpc = self._transport._wrapped_methods[self._transport.get_catalog_item]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -250,7 +502,7 @@ def get_catalog_item(
)
# Send the request.
- response = rpc(request, retry=retry, timeout=timeout, metadata=metadata)
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
@@ -259,6 +511,8 @@ def list_catalog_items(
self,
request: catalog_service.ListCatalogItemsRequest = None,
*,
+ parent: str = None,
+ filter: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
@@ -266,9 +520,23 @@ def list_catalog_items(
r"""Gets a list of catalog items.
Args:
- request (:class:`~.catalog_service.ListCatalogItemsRequest`):
+ request (google.cloud.recommendationengine_v1beta1.types.ListCatalogItemsRequest):
The request object. Request message for ListCatalogItems
method.
+ parent (str):
+ Required. The parent catalog resource name, such as
+ ``projects/*/locations/global/catalogs/default_catalog``
+
+ This corresponds to the ``parent`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ filter (str):
+ Optional. A filter to apply on the
+ list results.
+
+ This corresponds to the ``filter`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
@@ -277,7 +545,7 @@ def list_catalog_items(
sent along with the request as metadata.
Returns:
- ~.pagers.ListCatalogItemsPager:
+ google.cloud.recommendationengine_v1beta1.services.catalog_service.pagers.ListCatalogItemsPager:
Response message for ListCatalogItems
method.
Iterating over this object will yield
@@ -286,16 +554,33 @@ def list_catalog_items(
"""
# Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([parent, filter])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
- request = catalog_service.ListCatalogItemsRequest(request)
+ # Minor optimization to avoid making a copy if the user passes
+ # in a catalog_service.ListCatalogItemsRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, catalog_service.ListCatalogItemsRequest):
+ request = catalog_service.ListCatalogItemsRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if parent is not None:
+ request.parent = parent
+ if filter is not None:
+ request.filter = filter
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.list_catalog_items,
- default_timeout=None,
- client_info=_client_info,
- )
+ rpc = self._transport._wrapped_methods[self._transport.list_catalog_items]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -304,12 +589,12 @@ def list_catalog_items(
)
# Send the request.
- response = rpc(request, retry=retry, timeout=timeout, metadata=metadata)
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# This method is paged; wrap the response in a pager, which provides
# an `__iter__` convenience method.
response = pagers.ListCatalogItemsPager(
- method=rpc, request=request, response=response
+ method=rpc, request=request, response=response, metadata=metadata,
)
# Done; return the response.
@@ -319,6 +604,7 @@ def update_catalog_item(
self,
request: catalog_service.UpdateCatalogItemRequest = None,
*,
+ name: str = None,
catalog_item: catalog.CatalogItem = None,
update_mask: field_mask.FieldMask = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
@@ -329,19 +615,28 @@ def update_catalog_item(
supported. Non-existing items will be created.
Args:
- request (:class:`~.catalog_service.UpdateCatalogItemRequest`):
+ request (google.cloud.recommendationengine_v1beta1.types.UpdateCatalogItemRequest):
The request object. Request message for
UpdateCatalogItem method.
- catalog_item (:class:`~.catalog.CatalogItem`):
+ name (str):
+ Required. Full resource name of catalog item, such as
+ ``projects/*/locations/global/catalogs/default_catalog/catalogItems/some_catalog_item_id``
+
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ catalog_item (google.cloud.recommendationengine_v1beta1.types.CatalogItem):
Required. The catalog item to update/create. The
'catalog_item_id' field has to match that in the 'name'.
+
This corresponds to the ``catalog_item`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- update_mask (:class:`~.field_mask.FieldMask`):
+ update_mask (google.protobuf.field_mask_pb2.FieldMask):
Optional. Indicates which fields in
the provided 'item' to update. If not
set, will by default update all fields.
+
This corresponds to the ``update_mask`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -353,7 +648,7 @@ def update_catalog_item(
sent along with the request as metadata.
Returns:
- ~.catalog.CatalogItem:
+ google.cloud.recommendationengine_v1beta1.types.CatalogItem:
CatalogItem captures all metadata
information of items to be recommended.
@@ -361,33 +656,42 @@ def update_catalog_item(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([catalog_item, update_mask]):
+ has_flattened_params = any([name, catalog_item, update_mask])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- request = catalog_service.UpdateCatalogItemRequest(request)
+ # Minor optimization to avoid making a copy if the user passes
+ # in a catalog_service.UpdateCatalogItemRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, catalog_service.UpdateCatalogItemRequest):
+ request = catalog_service.UpdateCatalogItemRequest(request)
- # If we have keyword arguments corresponding to fields on the
- # request, apply these.
- if catalog_item is not None:
- request.catalog_item = catalog_item
- # If we have keyword arguments corresponding to fields on the
- # request, apply these.
- if update_mask is not None:
- request.update_mask = update_mask
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if name is not None:
+ request.name = name
+ if catalog_item is not None:
+ request.catalog_item = catalog_item
+ if update_mask is not None:
+ request.update_mask = update_mask
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.update_catalog_item,
- default_timeout=None,
- client_info=_client_info,
+ rpc = self._transport._wrapped_methods[self._transport.update_catalog_item]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
- response = rpc(request, retry=retry, timeout=timeout, metadata=metadata)
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
@@ -404,12 +708,13 @@ def delete_catalog_item(
r"""Deletes a catalog item.
Args:
- request (:class:`~.catalog_service.DeleteCatalogItemRequest`):
+ request (google.cloud.recommendationengine_v1beta1.types.DeleteCatalogItemRequest):
The request object. Request message for
DeleteCatalogItem method.
- name (:class:`str`):
+ name (str):
Required. Full resource name of catalog item, such as
``projects/*/locations/global/catalogs/default_catalog/catalogItems/some_catalog_item_id``
+
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -423,34 +728,49 @@ def delete_catalog_item(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([name]):
+ has_flattened_params = any([name])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
- request = catalog_service.DeleteCatalogItemRequest(request)
+ # Minor optimization to avoid making a copy if the user passes
+ # in a catalog_service.DeleteCatalogItemRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, catalog_service.DeleteCatalogItemRequest):
+ request = catalog_service.DeleteCatalogItemRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
- # If we have keyword arguments corresponding to fields on the
- # request, apply these.
- if name is not None:
- request.name = name
+ if name is not None:
+ request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.delete_catalog_item,
- default_timeout=None,
- client_info=_client_info,
+ rpc = self._transport._wrapped_methods[self._transport.delete_catalog_item]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
- rpc(request, retry=retry, timeout=timeout, metadata=metadata)
+ rpc(
+ request, retry=retry, timeout=timeout, metadata=metadata,
+ )
def import_catalog_items(
self,
request: import_.ImportCatalogItemsRequest = None,
*,
+ parent: str = None,
+ request_id: str = None,
+ input_config: import_.InputConfig = None,
+ errors_config: import_.ImportErrorsConfig = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
@@ -464,8 +784,42 @@ def import_catalog_items(
successfully updated.
Args:
- request (:class:`~.import_.ImportCatalogItemsRequest`):
+ request (google.cloud.recommendationengine_v1beta1.types.ImportCatalogItemsRequest):
The request object. Request message for Import methods.
+ parent (str):
+ Required.
+ "projects/1234/locations/global/catalogs/default_catalog"
+
+ This corresponds to the ``parent`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ request_id (str):
+ Optional. Unique identifier provided
+ by client, within the ancestor dataset
+ scope. Ensures idempotency and used for
+ request deduplication. Server-generated
+ if unspecified. Up to 128 characters
+ long. This is returned as
+ google.longrunning.Operation.name in the
+ response.
+
+ This corresponds to the ``request_id`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ input_config (google.cloud.recommendationengine_v1beta1.types.InputConfig):
+ Required. The desired input location
+ of the data.
+
+ This corresponds to the ``input_config`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ errors_config (google.cloud.recommendationengine_v1beta1.types.ImportErrorsConfig):
+ Optional. The desired location of
+ errors incurred during the Import.
+
+ This corresponds to the ``errors_config`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
@@ -474,31 +828,56 @@ def import_catalog_items(
sent along with the request as metadata.
Returns:
- ~.operation.Operation:
+ google.api_core.operation.Operation:
An object representing a long-running operation.
- The result type for the operation will be
- :class:``~.import_.ImportCatalogItemsResponse``:
- Response of the ImportCatalogItemsRequest. If the long
- running operation is done, then this message is returned
- by the google.longrunning.Operations.response field if
- the operation was successful.
+ The result type for the operation will be :class:`google.cloud.recommendationengine_v1beta1.types.ImportCatalogItemsResponse` Response of the ImportCatalogItemsRequest. If the long running
+ operation is done, then this message is returned by
+ the google.longrunning.Operations.response field if
+ the operation was successful.
"""
# Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([parent, request_id, input_config, errors_config])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
- request = import_.ImportCatalogItemsRequest(request)
+ # Minor optimization to avoid making a copy if the user passes
+ # in a import_.ImportCatalogItemsRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, import_.ImportCatalogItemsRequest):
+ request = import_.ImportCatalogItemsRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if parent is not None:
+ request.parent = parent
+ if request_id is not None:
+ request.request_id = request_id
+ if input_config is not None:
+ request.input_config = input_config
+ if errors_config is not None:
+ request.errors_config = errors_config
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.import_catalog_items,
- default_timeout=None,
- client_info=_client_info,
+ rpc = self._transport._wrapped_methods[self._transport.import_catalog_items]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
- response = rpc(request, retry=retry, timeout=timeout, metadata=metadata)
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation.from_gapic(
@@ -513,13 +892,13 @@ def import_catalog_items(
try:
- _client_info = gapic_v1.client_info.ClientInfo(
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
- "google-cloud-recommendations-ai"
- ).version
+ "google-cloud-recommendations-ai",
+ ).version,
)
except pkg_resources.DistributionNotFound:
- _client_info = gapic_v1.client_info.ClientInfo()
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = ("CatalogServiceClient",)
diff --git a/google/cloud/recommendationengine_v1beta1/services/catalog_service/pagers.py b/google/cloud/recommendationengine_v1beta1/services/catalog_service/pagers.py
index 172d9945..f1242353 100644
--- a/google/cloud/recommendationengine_v1beta1/services/catalog_service/pagers.py
+++ b/google/cloud/recommendationengine_v1beta1/services/catalog_service/pagers.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright (C) 2019 Google LLC
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -15,7 +15,16 @@
# limitations under the License.
#
-from typing import Any, Callable, Iterable
+from typing import (
+ Any,
+ AsyncIterable,
+ Awaitable,
+ Callable,
+ Iterable,
+ Sequence,
+ Tuple,
+ Optional,
+)
from google.cloud.recommendationengine_v1beta1.types import catalog
from google.cloud.recommendationengine_v1beta1.types import catalog_service
@@ -25,7 +34,7 @@ class ListCatalogItemsPager:
"""A pager for iterating through ``list_catalog_items`` requests.
This class thinly wraps an initial
- :class:`~.catalog_service.ListCatalogItemsResponse` object, and
+ :class:`google.cloud.recommendationengine_v1beta1.types.ListCatalogItemsResponse` object, and
provides an ``__iter__`` method to iterate through its
``catalog_items`` field.
@@ -34,33 +43,35 @@ class ListCatalogItemsPager:
through the ``catalog_items`` field on the
corresponding responses.
- All the usual :class:`~.catalog_service.ListCatalogItemsResponse`
+ All the usual :class:`google.cloud.recommendationengine_v1beta1.types.ListCatalogItemsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(
self,
- method: Callable[
- [catalog_service.ListCatalogItemsRequest],
- catalog_service.ListCatalogItemsResponse,
- ],
+ method: Callable[..., catalog_service.ListCatalogItemsResponse],
request: catalog_service.ListCatalogItemsRequest,
response: catalog_service.ListCatalogItemsResponse,
+ *,
+ metadata: Sequence[Tuple[str, str]] = ()
):
"""Instantiate the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
- request (:class:`~.catalog_service.ListCatalogItemsRequest`):
+ request (google.cloud.recommendationengine_v1beta1.types.ListCatalogItemsRequest):
The initial request object.
- response (:class:`~.catalog_service.ListCatalogItemsResponse`):
+ response (google.cloud.recommendationengine_v1beta1.types.ListCatalogItemsResponse):
The initial response object.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
"""
self._method = method
self._request = catalog_service.ListCatalogItemsRequest(request)
self._response = response
+ self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@@ -70,7 +81,7 @@ def pages(self) -> Iterable[catalog_service.ListCatalogItemsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
- self._response = self._method(self._request)
+ self._response = self._method(self._request, metadata=self._metadata)
yield self._response
def __iter__(self) -> Iterable[catalog.CatalogItem]:
@@ -79,3 +90,69 @@ def __iter__(self) -> Iterable[catalog.CatalogItem]:
def __repr__(self) -> str:
return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
+
+
+class ListCatalogItemsAsyncPager:
+ """A pager for iterating through ``list_catalog_items`` requests.
+
+ This class thinly wraps an initial
+ :class:`google.cloud.recommendationengine_v1beta1.types.ListCatalogItemsResponse` object, and
+ provides an ``__aiter__`` method to iterate through its
+ ``catalog_items`` field.
+
+ If there are more pages, the ``__aiter__`` method will make additional
+ ``ListCatalogItems`` requests and continue to iterate
+ through the ``catalog_items`` field on the
+ corresponding responses.
+
+ All the usual :class:`google.cloud.recommendationengine_v1beta1.types.ListCatalogItemsResponse`
+ attributes are available on the pager. If multiple requests are made, only
+ the most recent response is retained, and thus used for attribute lookup.
+ """
+
+ def __init__(
+ self,
+ method: Callable[..., Awaitable[catalog_service.ListCatalogItemsResponse]],
+ request: catalog_service.ListCatalogItemsRequest,
+ response: catalog_service.ListCatalogItemsResponse,
+ *,
+ metadata: Sequence[Tuple[str, str]] = ()
+ ):
+ """Instantiate the pager.
+
+ Args:
+ method (Callable): The method that was originally called, and
+ which instantiated this pager.
+ request (google.cloud.recommendationengine_v1beta1.types.ListCatalogItemsRequest):
+ The initial request object.
+ response (google.cloud.recommendationengine_v1beta1.types.ListCatalogItemsResponse):
+ The initial response object.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ self._method = method
+ self._request = catalog_service.ListCatalogItemsRequest(request)
+ self._response = response
+ self._metadata = metadata
+
+ def __getattr__(self, name: str) -> Any:
+ return getattr(self._response, name)
+
+ @property
+ async def pages(self) -> AsyncIterable[catalog_service.ListCatalogItemsResponse]:
+ yield self._response
+ while self._response.next_page_token:
+ self._request.page_token = self._response.next_page_token
+ self._response = await self._method(self._request, metadata=self._metadata)
+ yield self._response
+
+ def __aiter__(self) -> AsyncIterable[catalog.CatalogItem]:
+ async def async_generator():
+ async for page in self.pages:
+ for response in page.catalog_items:
+ yield response
+
+ return async_generator()
+
+ def __repr__(self) -> str:
+ return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
diff --git a/google/cloud/recommendationengine_v1beta1/services/catalog_service/transports/__init__.py b/google/cloud/recommendationengine_v1beta1/services/catalog_service/transports/__init__.py
index 132080b9..dac1279d 100644
--- a/google/cloud/recommendationengine_v1beta1/services/catalog_service/transports/__init__.py
+++ b/google/cloud/recommendationengine_v1beta1/services/catalog_service/transports/__init__.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright (C) 2019 Google LLC
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,11 +20,16 @@
from .base import CatalogServiceTransport
from .grpc import CatalogServiceGrpcTransport
+from .grpc_asyncio import CatalogServiceGrpcAsyncIOTransport
# Compile a registry of transports.
_transport_registry = OrderedDict() # type: Dict[str, Type[CatalogServiceTransport]]
_transport_registry["grpc"] = CatalogServiceGrpcTransport
+_transport_registry["grpc_asyncio"] = CatalogServiceGrpcAsyncIOTransport
-
-__all__ = ("CatalogServiceTransport", "CatalogServiceGrpcTransport")
+__all__ = (
+ "CatalogServiceTransport",
+ "CatalogServiceGrpcTransport",
+ "CatalogServiceGrpcAsyncIOTransport",
+)
diff --git a/google/cloud/recommendationengine_v1beta1/services/catalog_service/transports/base.py b/google/cloud/recommendationengine_v1beta1/services/catalog_service/transports/base.py
index 7e87d539..f52726a5 100644
--- a/google/cloud/recommendationengine_v1beta1/services/catalog_service/transports/base.py
+++ b/google/cloud/recommendationengine_v1beta1/services/catalog_service/transports/base.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright (C) 2019 Google LLC
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -17,8 +17,12 @@
import abc
import typing
+import pkg_resources
-from google import auth
+from google import auth # type: ignore
+from google.api_core import exceptions # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import retry as retries # type: ignore
from google.api_core import operations_v1 # type: ignore
from google.auth import credentials # type: ignore
@@ -29,7 +33,17 @@
from google.protobuf import empty_pb2 as empty # type: ignore
-class CatalogServiceTransport(metaclass=abc.ABCMeta):
+try:
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
+ gapic_version=pkg_resources.get_distribution(
+ "google-cloud-recommendations-ai",
+ ).version,
+ )
+except pkg_resources.DistributionNotFound:
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
+
+
+class CatalogServiceTransport(abc.ABC):
"""Abstract transport class for CatalogService."""
AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",)
@@ -39,6 +53,11 @@ def __init__(
*,
host: str = "recommendationengine.googleapis.com",
credentials: credentials.Credentials = None,
+ credentials_file: typing.Optional[str] = None,
+ scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES,
+ quota_project_id: typing.Optional[str] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ **kwargs,
) -> None:
"""Instantiate the transport.
@@ -49,67 +68,196 @@ def __init__(
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is mutually exclusive with credentials.
+ scope (Optional[Sequence[str]]): A list of scopes.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
"""
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
if ":" not in host:
host += ":443"
self._host = host
+ # Save the scopes.
+ self._scopes = scopes or self.AUTH_SCOPES
+
# If no credentials are provided, then determine the appropriate
# defaults.
- if credentials is None:
- credentials, _ = auth.default(scopes=self.AUTH_SCOPES)
+ if credentials and credentials_file:
+ raise exceptions.DuplicateCredentialArgs(
+ "'credentials_file' and 'credentials' are mutually exclusive"
+ )
+
+ if credentials_file is not None:
+ credentials, _ = auth.load_credentials_from_file(
+ credentials_file, scopes=self._scopes, quota_project_id=quota_project_id
+ )
+
+ elif credentials is None:
+ credentials, _ = auth.default(
+ scopes=self._scopes, quota_project_id=quota_project_id
+ )
# Save the credentials.
self._credentials = credentials
+ def _prep_wrapped_messages(self, client_info):
+ # Precompute the wrapped methods.
+ self._wrapped_methods = {
+ self.create_catalog_item: gapic_v1.method.wrap_method(
+ self.create_catalog_item,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
+ ),
+ deadline=600.0,
+ ),
+ default_timeout=600.0,
+ client_info=client_info,
+ ),
+ self.get_catalog_item: gapic_v1.method.wrap_method(
+ self.get_catalog_item,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
+ ),
+ deadline=600.0,
+ ),
+ default_timeout=600.0,
+ client_info=client_info,
+ ),
+ self.list_catalog_items: gapic_v1.method.wrap_method(
+ self.list_catalog_items,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
+ ),
+ deadline=600.0,
+ ),
+ default_timeout=600.0,
+ client_info=client_info,
+ ),
+ self.update_catalog_item: gapic_v1.method.wrap_method(
+ self.update_catalog_item,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
+ ),
+ deadline=600.0,
+ ),
+ default_timeout=600.0,
+ client_info=client_info,
+ ),
+ self.delete_catalog_item: gapic_v1.method.wrap_method(
+ self.delete_catalog_item,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
+ ),
+ deadline=600.0,
+ ),
+ default_timeout=600.0,
+ client_info=client_info,
+ ),
+ self.import_catalog_items: gapic_v1.method.wrap_method(
+ self.import_catalog_items,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
+ ),
+ deadline=600.0,
+ ),
+ default_timeout=600.0,
+ client_info=client_info,
+ ),
+ }
+
@property
def operations_client(self) -> operations_v1.OperationsClient:
"""Return the client designed to process long-running operations."""
- raise NotImplementedError
+ raise NotImplementedError()
@property
def create_catalog_item(
- self
+ self,
) -> typing.Callable[
- [catalog_service.CreateCatalogItemRequest], catalog.CatalogItem
+ [catalog_service.CreateCatalogItemRequest],
+ typing.Union[catalog.CatalogItem, typing.Awaitable[catalog.CatalogItem]],
]:
- raise NotImplementedError
+ raise NotImplementedError()
@property
def get_catalog_item(
- self
- ) -> typing.Callable[[catalog_service.GetCatalogItemRequest], catalog.CatalogItem]:
- raise NotImplementedError
+ self,
+ ) -> typing.Callable[
+ [catalog_service.GetCatalogItemRequest],
+ typing.Union[catalog.CatalogItem, typing.Awaitable[catalog.CatalogItem]],
+ ]:
+ raise NotImplementedError()
@property
def list_catalog_items(
- self
+ self,
) -> typing.Callable[
[catalog_service.ListCatalogItemsRequest],
- catalog_service.ListCatalogItemsResponse,
+ typing.Union[
+ catalog_service.ListCatalogItemsResponse,
+ typing.Awaitable[catalog_service.ListCatalogItemsResponse],
+ ],
]:
- raise NotImplementedError
+ raise NotImplementedError()
@property
def update_catalog_item(
- self
+ self,
) -> typing.Callable[
- [catalog_service.UpdateCatalogItemRequest], catalog.CatalogItem
+ [catalog_service.UpdateCatalogItemRequest],
+ typing.Union[catalog.CatalogItem, typing.Awaitable[catalog.CatalogItem]],
]:
- raise NotImplementedError
+ raise NotImplementedError()
@property
def delete_catalog_item(
- self
- ) -> typing.Callable[[catalog_service.DeleteCatalogItemRequest], empty.Empty]:
- raise NotImplementedError
+ self,
+ ) -> typing.Callable[
+ [catalog_service.DeleteCatalogItemRequest],
+ typing.Union[empty.Empty, typing.Awaitable[empty.Empty]],
+ ]:
+ raise NotImplementedError()
@property
def import_catalog_items(
- self
- ) -> typing.Callable[[import_.ImportCatalogItemsRequest], operations.Operation]:
- raise NotImplementedError
+ self,
+ ) -> typing.Callable[
+ [import_.ImportCatalogItemsRequest],
+ typing.Union[operations.Operation, typing.Awaitable[operations.Operation]],
+ ]:
+ raise NotImplementedError()
__all__ = ("CatalogServiceTransport",)
diff --git a/google/cloud/recommendationengine_v1beta1/services/catalog_service/transports/grpc.py b/google/cloud/recommendationengine_v1beta1/services/catalog_service/transports/grpc.py
index 51597138..3a25e184 100644
--- a/google/cloud/recommendationengine_v1beta1/services/catalog_service/transports/grpc.py
+++ b/google/cloud/recommendationengine_v1beta1/services/catalog_service/transports/grpc.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright (C) 2019 Google LLC
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -15,11 +15,15 @@
# limitations under the License.
#
-from typing import Callable, Dict
+import warnings
+from typing import Callable, Dict, Optional, Sequence, Tuple
from google.api_core import grpc_helpers # type: ignore
from google.api_core import operations_v1 # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google import auth # type: ignore
from google.auth import credentials # type: ignore
+from google.auth.transport.grpc import SslCredentials # type: ignore
import grpc # type: ignore
@@ -29,7 +33,7 @@
from google.longrunning import operations_pb2 as operations # type: ignore
from google.protobuf import empty_pb2 as empty # type: ignore
-from .base import CatalogServiceTransport
+from .base import CatalogServiceTransport, DEFAULT_CLIENT_INFO
class CatalogServiceGrpcTransport(CatalogServiceTransport):
@@ -46,12 +50,22 @@ class CatalogServiceGrpcTransport(CatalogServiceTransport):
top of HTTP/2); the ``grpcio`` package must be installed.
"""
+ _stubs: Dict[str, Callable]
+
def __init__(
self,
*,
host: str = "recommendationengine.googleapis.com",
credentials: credentials.Credentials = None,
- channel: grpc.Channel = None
+ credentials_file: str = None,
+ scopes: Sequence[str] = None,
+ channel: grpc.Channel = None,
+ api_mtls_endpoint: str = None,
+ client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
+ ssl_channel_credentials: grpc.ChannelCredentials = None,
+ client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
+ quota_project_id: Optional[str] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the transport.
@@ -63,61 +77,155 @@ def __init__(
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is ignored if ``channel`` is provided.
+ scopes (Optional(Sequence[str])): A list of scopes. This argument is
+ ignored if ``channel`` is provided.
channel (Optional[grpc.Channel]): A ``Channel`` instance through
which to make calls.
+ api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
+ If provided, it overrides the ``host`` argument and tries to create
+ a mutual TLS channel with client SSL credentials from
+ ``client_cert_source`` or applicatin default SSL credentials.
+ client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ Deprecated. A callback to provide client SSL certificate bytes and
+ private key bytes, both in PEM format. It is ignored if
+ ``api_mtls_endpoint`` is None.
+ ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
+ for grpc channel. It is ignored if ``channel`` is provided.
+ client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ A callback to provide client certificate bytes and private key bytes,
+ both in PEM format. It is used to configure mutual TLS channel. It is
+ ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+
+ Raises:
+ google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
+ creation failed for any reason.
+ google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
+ and ``credentials_file`` are passed.
"""
- # Sanity check: Ensure that channel and credentials are not both
- # provided.
- if channel:
- credentials = False
+ self._grpc_channel = None
+ self._ssl_channel_credentials = ssl_channel_credentials
+ self._stubs: Dict[str, Callable] = {}
+ self._operations_client = None
- # Run the base constructor.
- super().__init__(host=host, credentials=credentials)
- self._stubs = {} # type: Dict[str, Callable]
+ if api_mtls_endpoint:
+ warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
+ if client_cert_source:
+ warnings.warn("client_cert_source is deprecated", DeprecationWarning)
- # If a channel was explicitly provided, set it.
if channel:
+ # Ignore credentials if a channel was passed.
+ credentials = False
+ # If a channel was explicitly provided, set it.
self._grpc_channel = channel
+ self._ssl_channel_credentials = None
+
+ else:
+ if api_mtls_endpoint:
+ host = api_mtls_endpoint
+
+ # Create SSL credentials with client_cert_source or application
+ # default SSL credentials.
+ if client_cert_source:
+ cert, key = client_cert_source()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+ else:
+ self._ssl_channel_credentials = SslCredentials().ssl_credentials
+
+ else:
+ if client_cert_source_for_mtls and not ssl_channel_credentials:
+ cert, key = client_cert_source_for_mtls()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+
+ # The base transport sets the host, credentials and scopes
+ super().__init__(
+ host=host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ scopes=scopes,
+ quota_project_id=quota_project_id,
+ client_info=client_info,
+ )
+
+ if not self._grpc_channel:
+ self._grpc_channel = type(self).create_channel(
+ self._host,
+ credentials=self._credentials,
+ credentials_file=credentials_file,
+ scopes=self._scopes,
+ ssl_credentials=self._ssl_channel_credentials,
+ quota_project_id=quota_project_id,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+
+ # Wrap messages. This must be done after self._grpc_channel exists
+ self._prep_wrapped_messages(client_info)
@classmethod
def create_channel(
cls,
host: str = "recommendationengine.googleapis.com",
credentials: credentials.Credentials = None,
- **kwargs
+ credentials_file: str = None,
+ scopes: Optional[Sequence[str]] = None,
+ quota_project_id: Optional[str] = None,
+ **kwargs,
) -> grpc.Channel:
"""Create and return a gRPC channel object.
Args:
- address (Optionsl[str]): The host for the channel to use.
+ host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is mutually exclusive with credentials.
+ scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
+ service. These are only used when credentials are not specified and
+ are passed to :func:`google.auth.default`.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
grpc.Channel: A gRPC channel object.
+
+ Raises:
+ google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
+ and ``credentials_file`` are passed.
"""
+ scopes = scopes or cls.AUTH_SCOPES
return grpc_helpers.create_channel(
- host, credentials=credentials, scopes=cls.AUTH_SCOPES, **kwargs
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ scopes=scopes,
+ quota_project_id=quota_project_id,
+ **kwargs,
)
@property
def grpc_channel(self) -> grpc.Channel:
- """Create the channel designed to connect to this service.
-
- This property caches on the instance; repeated calls return
- the same channel.
+ """Return the channel designed to connect to this service.
"""
- # Sanity check: Only create a new channel if we do not already
- # have one.
- if not hasattr(self, "_grpc_channel"):
- self._grpc_channel = self.create_channel(
- self._host, credentials=self._credentials
- )
-
- # Return the channel from cache.
return self._grpc_channel
@property
@@ -128,17 +236,15 @@ def operations_client(self) -> operations_v1.OperationsClient:
client.
"""
# Sanity check: Only create a new client if we do not already have one.
- if "operations_client" not in self.__dict__:
- self.__dict__["operations_client"] = operations_v1.OperationsClient(
- self.grpc_channel
- )
+ if self._operations_client is None:
+ self._operations_client = operations_v1.OperationsClient(self.grpc_channel)
# Return the client from cache.
- return self.__dict__["operations_client"]
+ return self._operations_client
@property
def create_catalog_item(
- self
+ self,
) -> Callable[[catalog_service.CreateCatalogItemRequest], catalog.CatalogItem]:
r"""Return a callable for the create catalog item method over gRPC.
@@ -164,7 +270,7 @@ def create_catalog_item(
@property
def get_catalog_item(
- self
+ self,
) -> Callable[[catalog_service.GetCatalogItemRequest], catalog.CatalogItem]:
r"""Return a callable for the get catalog item method over gRPC.
@@ -190,7 +296,7 @@ def get_catalog_item(
@property
def list_catalog_items(
- self
+ self,
) -> Callable[
[catalog_service.ListCatalogItemsRequest],
catalog_service.ListCatalogItemsResponse,
@@ -219,7 +325,7 @@ def list_catalog_items(
@property
def update_catalog_item(
- self
+ self,
) -> Callable[[catalog_service.UpdateCatalogItemRequest], catalog.CatalogItem]:
r"""Return a callable for the update catalog item method over gRPC.
@@ -246,7 +352,7 @@ def update_catalog_item(
@property
def delete_catalog_item(
- self
+ self,
) -> Callable[[catalog_service.DeleteCatalogItemRequest], empty.Empty]:
r"""Return a callable for the delete catalog item method over gRPC.
@@ -272,7 +378,7 @@ def delete_catalog_item(
@property
def import_catalog_items(
- self
+ self,
) -> Callable[[import_.ImportCatalogItemsRequest], operations.Operation]:
r"""Return a callable for the import catalog items method over gRPC.
diff --git a/google/cloud/recommendationengine_v1beta1/services/catalog_service/transports/grpc_asyncio.py b/google/cloud/recommendationengine_v1beta1/services/catalog_service/transports/grpc_asyncio.py
new file mode 100644
index 00000000..f63a2e53
--- /dev/null
+++ b/google/cloud/recommendationengine_v1beta1/services/catalog_service/transports/grpc_asyncio.py
@@ -0,0 +1,424 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import warnings
+from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple
+
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import grpc_helpers_async # type: ignore
+from google.api_core import operations_v1 # type: ignore
+from google import auth # type: ignore
+from google.auth import credentials # type: ignore
+from google.auth.transport.grpc import SslCredentials # type: ignore
+
+import grpc # type: ignore
+from grpc.experimental import aio # type: ignore
+
+from google.cloud.recommendationengine_v1beta1.types import catalog
+from google.cloud.recommendationengine_v1beta1.types import catalog_service
+from google.cloud.recommendationengine_v1beta1.types import import_
+from google.longrunning import operations_pb2 as operations # type: ignore
+from google.protobuf import empty_pb2 as empty # type: ignore
+
+from .base import CatalogServiceTransport, DEFAULT_CLIENT_INFO
+from .grpc import CatalogServiceGrpcTransport
+
+
+class CatalogServiceGrpcAsyncIOTransport(CatalogServiceTransport):
+ """gRPC AsyncIO backend transport for CatalogService.
+
+ Service for ingesting catalog information of the customer's
+ website.
+
+ This class defines the same methods as the primary client, so the
+ primary client can load the underlying transport implementation
+ and call it.
+
+ It sends protocol buffers over the wire using gRPC (which is built on
+ top of HTTP/2); the ``grpcio`` package must be installed.
+ """
+
+ _grpc_channel: aio.Channel
+ _stubs: Dict[str, Callable] = {}
+
+ @classmethod
+ def create_channel(
+ cls,
+ host: str = "recommendationengine.googleapis.com",
+ credentials: credentials.Credentials = None,
+ credentials_file: Optional[str] = None,
+ scopes: Optional[Sequence[str]] = None,
+ quota_project_id: Optional[str] = None,
+ **kwargs,
+ ) -> aio.Channel:
+ """Create and return a gRPC AsyncIO channel object.
+ Args:
+ host (Optional[str]): The host for the channel to use.
+ credentials (Optional[~.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify this application to the service. If
+ none are specified, the client will attempt to ascertain
+ the credentials from the environment.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is ignored if ``channel`` is provided.
+ scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
+ service. These are only used when credentials are not specified and
+ are passed to :func:`google.auth.default`.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
+ kwargs (Optional[dict]): Keyword arguments, which are passed to the
+ channel creation.
+ Returns:
+ aio.Channel: A gRPC AsyncIO channel object.
+ """
+ scopes = scopes or cls.AUTH_SCOPES
+ return grpc_helpers_async.create_channel(
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ scopes=scopes,
+ quota_project_id=quota_project_id,
+ **kwargs,
+ )
+
+ def __init__(
+ self,
+ *,
+ host: str = "recommendationengine.googleapis.com",
+ credentials: credentials.Credentials = None,
+ credentials_file: Optional[str] = None,
+ scopes: Optional[Sequence[str]] = None,
+ channel: aio.Channel = None,
+ api_mtls_endpoint: str = None,
+ client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
+ ssl_channel_credentials: grpc.ChannelCredentials = None,
+ client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
+ quota_project_id=None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ ) -> None:
+ """Instantiate the transport.
+
+ Args:
+ host (Optional[str]): The hostname to connect to.
+ credentials (Optional[google.auth.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ This argument is ignored if ``channel`` is provided.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is ignored if ``channel`` is provided.
+ scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
+ service. These are only used when credentials are not specified and
+ are passed to :func:`google.auth.default`.
+ channel (Optional[aio.Channel]): A ``Channel`` instance through
+ which to make calls.
+ api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
+ If provided, it overrides the ``host`` argument and tries to create
+ a mutual TLS channel with client SSL credentials from
+ ``client_cert_source`` or applicatin default SSL credentials.
+ client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ Deprecated. A callback to provide client SSL certificate bytes and
+ private key bytes, both in PEM format. It is ignored if
+ ``api_mtls_endpoint`` is None.
+ ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
+ for grpc channel. It is ignored if ``channel`` is provided.
+ client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ A callback to provide client certificate bytes and private key bytes,
+ both in PEM format. It is used to configure mutual TLS channel. It is
+ ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+
+ Raises:
+ google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
+ creation failed for any reason.
+ google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
+ and ``credentials_file`` are passed.
+ """
+ self._grpc_channel = None
+ self._ssl_channel_credentials = ssl_channel_credentials
+ self._stubs: Dict[str, Callable] = {}
+ self._operations_client = None
+
+ if api_mtls_endpoint:
+ warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
+ if client_cert_source:
+ warnings.warn("client_cert_source is deprecated", DeprecationWarning)
+
+ if channel:
+ # Ignore credentials if a channel was passed.
+ credentials = False
+ # If a channel was explicitly provided, set it.
+ self._grpc_channel = channel
+ self._ssl_channel_credentials = None
+
+ else:
+ if api_mtls_endpoint:
+ host = api_mtls_endpoint
+
+ # Create SSL credentials with client_cert_source or application
+ # default SSL credentials.
+ if client_cert_source:
+ cert, key = client_cert_source()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+ else:
+ self._ssl_channel_credentials = SslCredentials().ssl_credentials
+
+ else:
+ if client_cert_source_for_mtls and not ssl_channel_credentials:
+ cert, key = client_cert_source_for_mtls()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+
+ # The base transport sets the host, credentials and scopes
+ super().__init__(
+ host=host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ scopes=scopes,
+ quota_project_id=quota_project_id,
+ client_info=client_info,
+ )
+
+ if not self._grpc_channel:
+ self._grpc_channel = type(self).create_channel(
+ self._host,
+ credentials=self._credentials,
+ credentials_file=credentials_file,
+ scopes=self._scopes,
+ ssl_credentials=self._ssl_channel_credentials,
+ quota_project_id=quota_project_id,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+
+ # Wrap messages. This must be done after self._grpc_channel exists
+ self._prep_wrapped_messages(client_info)
+
+ @property
+ def grpc_channel(self) -> aio.Channel:
+ """Create the channel designed to connect to this service.
+
+ This property caches on the instance; repeated calls return
+ the same channel.
+ """
+ # Return the channel from cache.
+ return self._grpc_channel
+
+ @property
+ def operations_client(self) -> operations_v1.OperationsAsyncClient:
+ """Create the client designed to process long-running operations.
+
+ This property caches on the instance; repeated calls return the same
+ client.
+ """
+ # Sanity check: Only create a new client if we do not already have one.
+ if self._operations_client is None:
+ self._operations_client = operations_v1.OperationsAsyncClient(
+ self.grpc_channel
+ )
+
+ # Return the client from cache.
+ return self._operations_client
+
+ @property
+ def create_catalog_item(
+ self,
+ ) -> Callable[
+ [catalog_service.CreateCatalogItemRequest], Awaitable[catalog.CatalogItem]
+ ]:
+ r"""Return a callable for the create catalog item method over gRPC.
+
+ Creates a catalog item.
+
+ Returns:
+ Callable[[~.CreateCatalogItemRequest],
+ Awaitable[~.CatalogItem]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "create_catalog_item" not in self._stubs:
+ self._stubs["create_catalog_item"] = self.grpc_channel.unary_unary(
+ "/google.cloud.recommendationengine.v1beta1.CatalogService/CreateCatalogItem",
+ request_serializer=catalog_service.CreateCatalogItemRequest.serialize,
+ response_deserializer=catalog.CatalogItem.deserialize,
+ )
+ return self._stubs["create_catalog_item"]
+
+ @property
+ def get_catalog_item(
+ self,
+ ) -> Callable[
+ [catalog_service.GetCatalogItemRequest], Awaitable[catalog.CatalogItem]
+ ]:
+ r"""Return a callable for the get catalog item method over gRPC.
+
+ Gets a specific catalog item.
+
+ Returns:
+ Callable[[~.GetCatalogItemRequest],
+ Awaitable[~.CatalogItem]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "get_catalog_item" not in self._stubs:
+ self._stubs["get_catalog_item"] = self.grpc_channel.unary_unary(
+ "/google.cloud.recommendationengine.v1beta1.CatalogService/GetCatalogItem",
+ request_serializer=catalog_service.GetCatalogItemRequest.serialize,
+ response_deserializer=catalog.CatalogItem.deserialize,
+ )
+ return self._stubs["get_catalog_item"]
+
+ @property
+ def list_catalog_items(
+ self,
+ ) -> Callable[
+ [catalog_service.ListCatalogItemsRequest],
+ Awaitable[catalog_service.ListCatalogItemsResponse],
+ ]:
+ r"""Return a callable for the list catalog items method over gRPC.
+
+ Gets a list of catalog items.
+
+ Returns:
+ Callable[[~.ListCatalogItemsRequest],
+ Awaitable[~.ListCatalogItemsResponse]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "list_catalog_items" not in self._stubs:
+ self._stubs["list_catalog_items"] = self.grpc_channel.unary_unary(
+ "/google.cloud.recommendationengine.v1beta1.CatalogService/ListCatalogItems",
+ request_serializer=catalog_service.ListCatalogItemsRequest.serialize,
+ response_deserializer=catalog_service.ListCatalogItemsResponse.deserialize,
+ )
+ return self._stubs["list_catalog_items"]
+
+ @property
+ def update_catalog_item(
+ self,
+ ) -> Callable[
+ [catalog_service.UpdateCatalogItemRequest], Awaitable[catalog.CatalogItem]
+ ]:
+ r"""Return a callable for the update catalog item method over gRPC.
+
+ Updates a catalog item. Partial updating is
+ supported. Non-existing items will be created.
+
+ Returns:
+ Callable[[~.UpdateCatalogItemRequest],
+ Awaitable[~.CatalogItem]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "update_catalog_item" not in self._stubs:
+ self._stubs["update_catalog_item"] = self.grpc_channel.unary_unary(
+ "/google.cloud.recommendationengine.v1beta1.CatalogService/UpdateCatalogItem",
+ request_serializer=catalog_service.UpdateCatalogItemRequest.serialize,
+ response_deserializer=catalog.CatalogItem.deserialize,
+ )
+ return self._stubs["update_catalog_item"]
+
+ @property
+ def delete_catalog_item(
+ self,
+ ) -> Callable[[catalog_service.DeleteCatalogItemRequest], Awaitable[empty.Empty]]:
+ r"""Return a callable for the delete catalog item method over gRPC.
+
+ Deletes a catalog item.
+
+ Returns:
+ Callable[[~.DeleteCatalogItemRequest],
+ Awaitable[~.Empty]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "delete_catalog_item" not in self._stubs:
+ self._stubs["delete_catalog_item"] = self.grpc_channel.unary_unary(
+ "/google.cloud.recommendationengine.v1beta1.CatalogService/DeleteCatalogItem",
+ request_serializer=catalog_service.DeleteCatalogItemRequest.serialize,
+ response_deserializer=empty.Empty.FromString,
+ )
+ return self._stubs["delete_catalog_item"]
+
+ @property
+ def import_catalog_items(
+ self,
+ ) -> Callable[[import_.ImportCatalogItemsRequest], Awaitable[operations.Operation]]:
+ r"""Return a callable for the import catalog items method over gRPC.
+
+ Bulk import of multiple catalog items. Request
+ processing may be synchronous. No partial updating
+ supported. Non-existing items will be created.
+
+ Operation.response is of type ImportResponse. Note that
+ it is possible for a subset of the items to be
+ successfully updated.
+
+ Returns:
+ Callable[[~.ImportCatalogItemsRequest],
+ Awaitable[~.Operation]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "import_catalog_items" not in self._stubs:
+ self._stubs["import_catalog_items"] = self.grpc_channel.unary_unary(
+ "/google.cloud.recommendationengine.v1beta1.CatalogService/ImportCatalogItems",
+ request_serializer=import_.ImportCatalogItemsRequest.serialize,
+ response_deserializer=operations.Operation.FromString,
+ )
+ return self._stubs["import_catalog_items"]
+
+
+__all__ = ("CatalogServiceGrpcAsyncIOTransport",)
diff --git a/google/cloud/recommendationengine_v1beta1/services/prediction_api_key_registry/__init__.py b/google/cloud/recommendationengine_v1beta1/services/prediction_api_key_registry/__init__.py
index 929b694f..19d1478c 100644
--- a/google/cloud/recommendationengine_v1beta1/services/prediction_api_key_registry/__init__.py
+++ b/google/cloud/recommendationengine_v1beta1/services/prediction_api_key_registry/__init__.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright (C) 2019 Google LLC
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -16,5 +16,9 @@
#
from .client import PredictionApiKeyRegistryClient
+from .async_client import PredictionApiKeyRegistryAsyncClient
-__all__ = ("PredictionApiKeyRegistryClient",)
+__all__ = (
+ "PredictionApiKeyRegistryClient",
+ "PredictionApiKeyRegistryAsyncClient",
+)
diff --git a/google/cloud/recommendationengine_v1beta1/services/prediction_api_key_registry/async_client.py b/google/cloud/recommendationengine_v1beta1/services/prediction_api_key_registry/async_client.py
new file mode 100644
index 00000000..b3c3251d
--- /dev/null
+++ b/google/cloud/recommendationengine_v1beta1/services/prediction_api_key_registry/async_client.py
@@ -0,0 +1,470 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from collections import OrderedDict
+import functools
+import re
+from typing import Dict, Sequence, Tuple, Type, Union
+import pkg_resources
+
+import google.api_core.client_options as ClientOptions # type: ignore
+from google.api_core import exceptions # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import retry as retries # type: ignore
+from google.auth import credentials # type: ignore
+from google.oauth2 import service_account # type: ignore
+
+from google.cloud.recommendationengine_v1beta1.services.prediction_api_key_registry import (
+ pagers,
+)
+from google.cloud.recommendationengine_v1beta1.types import (
+ prediction_apikey_registry_service,
+)
+
+from .transports.base import PredictionApiKeyRegistryTransport, DEFAULT_CLIENT_INFO
+from .transports.grpc_asyncio import PredictionApiKeyRegistryGrpcAsyncIOTransport
+from .client import PredictionApiKeyRegistryClient
+
+
+class PredictionApiKeyRegistryAsyncClient:
+ """Service for registering API keys for use with the ``predict``
+ method. If you use an API key to request predictions, you must first
+ register the API key. Otherwise, your prediction request is
+ rejected. If you use OAuth to authenticate your ``predict`` method
+ call, you do not need to register an API key. You can register up to
+ 20 API keys per project.
+ """
+
+ _client: PredictionApiKeyRegistryClient
+
+ DEFAULT_ENDPOINT = PredictionApiKeyRegistryClient.DEFAULT_ENDPOINT
+ DEFAULT_MTLS_ENDPOINT = PredictionApiKeyRegistryClient.DEFAULT_MTLS_ENDPOINT
+
+ event_store_path = staticmethod(PredictionApiKeyRegistryClient.event_store_path)
+ parse_event_store_path = staticmethod(
+ PredictionApiKeyRegistryClient.parse_event_store_path
+ )
+ prediction_api_key_registration_path = staticmethod(
+ PredictionApiKeyRegistryClient.prediction_api_key_registration_path
+ )
+ parse_prediction_api_key_registration_path = staticmethod(
+ PredictionApiKeyRegistryClient.parse_prediction_api_key_registration_path
+ )
+
+ common_billing_account_path = staticmethod(
+ PredictionApiKeyRegistryClient.common_billing_account_path
+ )
+ parse_common_billing_account_path = staticmethod(
+ PredictionApiKeyRegistryClient.parse_common_billing_account_path
+ )
+
+ common_folder_path = staticmethod(PredictionApiKeyRegistryClient.common_folder_path)
+ parse_common_folder_path = staticmethod(
+ PredictionApiKeyRegistryClient.parse_common_folder_path
+ )
+
+ common_organization_path = staticmethod(
+ PredictionApiKeyRegistryClient.common_organization_path
+ )
+ parse_common_organization_path = staticmethod(
+ PredictionApiKeyRegistryClient.parse_common_organization_path
+ )
+
+ common_project_path = staticmethod(
+ PredictionApiKeyRegistryClient.common_project_path
+ )
+ parse_common_project_path = staticmethod(
+ PredictionApiKeyRegistryClient.parse_common_project_path
+ )
+
+ common_location_path = staticmethod(
+ PredictionApiKeyRegistryClient.common_location_path
+ )
+ parse_common_location_path = staticmethod(
+ PredictionApiKeyRegistryClient.parse_common_location_path
+ )
+
+ @classmethod
+ def from_service_account_info(cls, info: dict, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials info.
+
+ Args:
+ info (dict): The service account private key info.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ PredictionApiKeyRegistryAsyncClient: The constructed client.
+ """
+ return PredictionApiKeyRegistryClient.from_service_account_info.__func__(PredictionApiKeyRegistryAsyncClient, info, *args, **kwargs) # type: ignore
+
+ @classmethod
+ def from_service_account_file(cls, filename: str, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials
+ file.
+
+ Args:
+ filename (str): The path to the service account private key json
+ file.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ PredictionApiKeyRegistryAsyncClient: The constructed client.
+ """
+ return PredictionApiKeyRegistryClient.from_service_account_file.__func__(PredictionApiKeyRegistryAsyncClient, filename, *args, **kwargs) # type: ignore
+
+ from_service_account_json = from_service_account_file
+
+ @property
+ def transport(self) -> PredictionApiKeyRegistryTransport:
+ """Return the transport used by the client instance.
+
+ Returns:
+ PredictionApiKeyRegistryTransport: The transport used by the client instance.
+ """
+ return self._client.transport
+
+ get_transport_class = functools.partial(
+ type(PredictionApiKeyRegistryClient).get_transport_class,
+ type(PredictionApiKeyRegistryClient),
+ )
+
+ def __init__(
+ self,
+ *,
+ credentials: credentials.Credentials = None,
+ transport: Union[str, PredictionApiKeyRegistryTransport] = "grpc_asyncio",
+ client_options: ClientOptions = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ ) -> None:
+ """Instantiate the prediction api key registry client.
+
+ Args:
+ credentials (Optional[google.auth.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ transport (Union[str, ~.PredictionApiKeyRegistryTransport]): The
+ transport to use. If set to None, a transport is chosen
+ automatically.
+ client_options (ClientOptions): Custom options for the client. It
+ won't take effect if a ``transport`` instance is provided.
+ (1) The ``api_endpoint`` property can be used to override the
+ default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
+ environment variable can also be used to override the endpoint:
+ "always" (always use the default mTLS endpoint), "never" (always
+ use the default regular endpoint) and "auto" (auto switch to the
+ default mTLS endpoint if client certificate is present, this is
+ the default value). However, the ``api_endpoint`` property takes
+ precedence if provided.
+ (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+ is "true", then the ``client_cert_source`` property can be used
+ to provide client certificate for mutual TLS transport. If
+ not provided, the default SSL client certificate will be used if
+ present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+ set, no client certificate will be used.
+
+ Raises:
+ google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
+ creation failed for any reason.
+ """
+
+ self._client = PredictionApiKeyRegistryClient(
+ credentials=credentials,
+ transport=transport,
+ client_options=client_options,
+ client_info=client_info,
+ )
+
+ async def create_prediction_api_key_registration(
+ self,
+ request: prediction_apikey_registry_service.CreatePredictionApiKeyRegistrationRequest = None,
+ *,
+ parent: str = None,
+ prediction_api_key_registration: prediction_apikey_registry_service.PredictionApiKeyRegistration = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> prediction_apikey_registry_service.PredictionApiKeyRegistration:
+ r"""Register an API key for use with predict method.
+
+ Args:
+ request (:class:`google.cloud.recommendationengine_v1beta1.types.CreatePredictionApiKeyRegistrationRequest`):
+ The request object. Request message for the
+ `CreatePredictionApiKeyRegistration` method.
+ parent (:class:`str`):
+ Required. The parent resource path.
+ ``projects/*/locations/global/catalogs/default_catalog/eventStores/default_event_store``
+
+ This corresponds to the ``parent`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ prediction_api_key_registration (:class:`google.cloud.recommendationengine_v1beta1.types.PredictionApiKeyRegistration`):
+ Required. The prediction API key
+ registration.
+
+ This corresponds to the ``prediction_api_key_registration`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ google.cloud.recommendationengine_v1beta1.types.PredictionApiKeyRegistration:
+ Registered Api Key.
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([parent, prediction_api_key_registration])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = prediction_apikey_registry_service.CreatePredictionApiKeyRegistrationRequest(
+ request
+ )
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if parent is not None:
+ request.parent = parent
+ if prediction_api_key_registration is not None:
+ request.prediction_api_key_registration = prediction_api_key_registration
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.create_prediction_api_key_registration,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
+ ),
+ deadline=600.0,
+ ),
+ default_timeout=600.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+ async def list_prediction_api_key_registrations(
+ self,
+ request: prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsRequest = None,
+ *,
+ parent: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> pagers.ListPredictionApiKeyRegistrationsAsyncPager:
+ r"""List the registered apiKeys for use with predict
+ method.
+
+ Args:
+ request (:class:`google.cloud.recommendationengine_v1beta1.types.ListPredictionApiKeyRegistrationsRequest`):
+ The request object. Request message for the
+ `ListPredictionApiKeyRegistrations`.
+ parent (:class:`str`):
+ Required. The parent placement resource name such as
+ "projects/1234/locations/global/catalogs/default_catalog/eventStores/default_event_store"
+
+ This corresponds to the ``parent`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ google.cloud.recommendationengine_v1beta1.services.prediction_api_key_registry.pagers.ListPredictionApiKeyRegistrationsAsyncPager:
+ Response message for the
+ ListPredictionApiKeyRegistrations.
+
+ Iterating over this object will yield results and
+ resolve additional pages automatically.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([parent])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsRequest(
+ request
+ )
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if parent is not None:
+ request.parent = parent
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.list_prediction_api_key_registrations,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
+ ),
+ deadline=600.0,
+ ),
+ default_timeout=600.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # This method is paged; wrap the response in a pager, which provides
+ # an `__aiter__` convenience method.
+ response = pagers.ListPredictionApiKeyRegistrationsAsyncPager(
+ method=rpc, request=request, response=response, metadata=metadata,
+ )
+
+ # Done; return the response.
+ return response
+
+ async def delete_prediction_api_key_registration(
+ self,
+ request: prediction_apikey_registry_service.DeletePredictionApiKeyRegistrationRequest = None,
+ *,
+ name: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> None:
+ r"""Unregister an apiKey from using for predict method.
+
+ Args:
+ request (:class:`google.cloud.recommendationengine_v1beta1.types.DeletePredictionApiKeyRegistrationRequest`):
+ The request object. Request message for
+ `DeletePredictionApiKeyRegistration` method.
+ name (:class:`str`):
+ Required. The API key to unregister including full
+ resource path.
+ ``projects/*/locations/global/catalogs/default_catalog/eventStores/default_event_store/predictionApiKeyRegistrations/``
+
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([name])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = prediction_apikey_registry_service.DeletePredictionApiKeyRegistrationRequest(
+ request
+ )
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if name is not None:
+ request.name = name
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.delete_prediction_api_key_registration,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
+ ),
+ deadline=600.0,
+ ),
+ default_timeout=600.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ await rpc(
+ request, retry=retry, timeout=timeout, metadata=metadata,
+ )
+
+
+try:
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
+ gapic_version=pkg_resources.get_distribution(
+ "google-cloud-recommendations-ai",
+ ).version,
+ )
+except pkg_resources.DistributionNotFound:
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
+
+
+__all__ = ("PredictionApiKeyRegistryAsyncClient",)
diff --git a/google/cloud/recommendationengine_v1beta1/services/prediction_api_key_registry/client.py b/google/cloud/recommendationengine_v1beta1/services/prediction_api_key_registry/client.py
index 97473b64..8a2f5c5a 100644
--- a/google/cloud/recommendationengine_v1beta1/services/prediction_api_key_registry/client.py
+++ b/google/cloud/recommendationengine_v1beta1/services/prediction_api_key_registry/client.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright (C) 2019 Google LLC
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -16,14 +16,20 @@
#
from collections import OrderedDict
-from typing import Dict, Sequence, Tuple, Type, Union
+from distutils import util
+import os
+import re
+from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union
import pkg_resources
-import google.api_core.client_options as ClientOptions # type: ignore
+from google.api_core import client_options as client_options_lib # type: ignore
from google.api_core import exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.auth import credentials # type: ignore
+from google.auth.transport import mtls # type: ignore
+from google.auth.transport.grpc import SslCredentials # type: ignore
+from google.auth.exceptions import MutualTLSChannelError # type: ignore
from google.oauth2 import service_account # type: ignore
from google.cloud.recommendationengine_v1beta1.services.prediction_api_key_registry import (
@@ -33,8 +39,9 @@
prediction_apikey_registry_service,
)
-from .transports.base import PredictionApiKeyRegistryTransport
+from .transports.base import PredictionApiKeyRegistryTransport, DEFAULT_CLIENT_INFO
from .transports.grpc import PredictionApiKeyRegistryGrpcTransport
+from .transports.grpc_asyncio import PredictionApiKeyRegistryGrpcAsyncIOTransport
class PredictionApiKeyRegistryClientMeta(type):
@@ -49,9 +56,10 @@ class PredictionApiKeyRegistryClientMeta(type):
OrderedDict()
) # type: Dict[str, Type[PredictionApiKeyRegistryTransport]]
_transport_registry["grpc"] = PredictionApiKeyRegistryGrpcTransport
+ _transport_registry["grpc_asyncio"] = PredictionApiKeyRegistryGrpcAsyncIOTransport
def get_transport_class(
- cls, label: str = None
+ cls, label: str = None,
) -> Type[PredictionApiKeyRegistryTransport]:
"""Return an appropriate transport class.
@@ -80,10 +88,56 @@ class PredictionApiKeyRegistryClient(metaclass=PredictionApiKeyRegistryClientMet
20 API keys per project.
"""
- DEFAULT_OPTIONS = ClientOptions.ClientOptions(
- api_endpoint="recommendationengine.googleapis.com"
+ @staticmethod
+ def _get_default_mtls_endpoint(api_endpoint):
+ """Convert api endpoint to mTLS endpoint.
+ Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
+ "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
+ Args:
+ api_endpoint (Optional[str]): the api endpoint to convert.
+ Returns:
+ str: converted mTLS api endpoint.
+ """
+ if not api_endpoint:
+ return api_endpoint
+
+ mtls_endpoint_re = re.compile(
+ r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?"
+ )
+
+ m = mtls_endpoint_re.match(api_endpoint)
+ name, mtls, sandbox, googledomain = m.groups()
+ if mtls or not googledomain:
+ return api_endpoint
+
+ if sandbox:
+ return api_endpoint.replace(
+ "sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
+ )
+
+ return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
+
+ DEFAULT_ENDPOINT = "recommendationengine.googleapis.com"
+ DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore
+ DEFAULT_ENDPOINT
)
+ @classmethod
+ def from_service_account_info(cls, info: dict, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials info.
+
+ Args:
+ info (dict): The service account private key info.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ PredictionApiKeyRegistryClient: The constructed client.
+ """
+ credentials = service_account.Credentials.from_service_account_info(info)
+ kwargs["credentials"] = credentials
+ return cls(*args, **kwargs)
+
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
@@ -96,7 +150,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs):
kwargs: Additional arguments to pass to the constructor.
Returns:
- {@api.name}: The constructed client.
+ PredictionApiKeyRegistryClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(filename)
kwargs["credentials"] = credentials
@@ -104,12 +158,128 @@ def from_service_account_file(cls, filename: str, *args, **kwargs):
from_service_account_json = from_service_account_file
+ @property
+ def transport(self) -> PredictionApiKeyRegistryTransport:
+ """Return the transport used by the client instance.
+
+ Returns:
+ PredictionApiKeyRegistryTransport: The transport used by the client instance.
+ """
+ return self._transport
+
+ @staticmethod
+ def event_store_path(
+ project: str, location: str, catalog: str, event_store: str,
+ ) -> str:
+ """Return a fully-qualified event_store string."""
+ return "projects/{project}/locations/{location}/catalogs/{catalog}/eventStores/{event_store}".format(
+ project=project,
+ location=location,
+ catalog=catalog,
+ event_store=event_store,
+ )
+
+ @staticmethod
+ def parse_event_store_path(path: str) -> Dict[str, str]:
+ """Parse a event_store path into its component segments."""
+ m = re.match(
+ r"^projects/(?P.+?)/locations/(?P.+?)/catalogs/(?P.+?)/eventStores/(?P.+?)$",
+ path,
+ )
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def prediction_api_key_registration_path(
+ project: str,
+ location: str,
+ catalog: str,
+ event_store: str,
+ prediction_api_key_registration: str,
+ ) -> str:
+ """Return a fully-qualified prediction_api_key_registration string."""
+ return "projects/{project}/locations/{location}/catalogs/{catalog}/eventStores/{event_store}/predictionApiKeyRegistrations/{prediction_api_key_registration}".format(
+ project=project,
+ location=location,
+ catalog=catalog,
+ event_store=event_store,
+ prediction_api_key_registration=prediction_api_key_registration,
+ )
+
+ @staticmethod
+ def parse_prediction_api_key_registration_path(path: str) -> Dict[str, str]:
+ """Parse a prediction_api_key_registration path into its component segments."""
+ m = re.match(
+ r"^projects/(?P.+?)/locations/(?P.+?)/catalogs/(?P.+?)/eventStores/(?P.+?)/predictionApiKeyRegistrations/(?P.+?)$",
+ path,
+ )
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_billing_account_path(billing_account: str,) -> str:
+ """Return a fully-qualified billing_account string."""
+ return "billingAccounts/{billing_account}".format(
+ billing_account=billing_account,
+ )
+
+ @staticmethod
+ def parse_common_billing_account_path(path: str) -> Dict[str, str]:
+ """Parse a billing_account path into its component segments."""
+ m = re.match(r"^billingAccounts/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_folder_path(folder: str,) -> str:
+ """Return a fully-qualified folder string."""
+ return "folders/{folder}".format(folder=folder,)
+
+ @staticmethod
+ def parse_common_folder_path(path: str) -> Dict[str, str]:
+ """Parse a folder path into its component segments."""
+ m = re.match(r"^folders/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_organization_path(organization: str,) -> str:
+ """Return a fully-qualified organization string."""
+ return "organizations/{organization}".format(organization=organization,)
+
+ @staticmethod
+ def parse_common_organization_path(path: str) -> Dict[str, str]:
+ """Parse a organization path into its component segments."""
+ m = re.match(r"^organizations/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_project_path(project: str,) -> str:
+ """Return a fully-qualified project string."""
+ return "projects/{project}".format(project=project,)
+
+ @staticmethod
+ def parse_common_project_path(path: str) -> Dict[str, str]:
+ """Parse a project path into its component segments."""
+ m = re.match(r"^projects/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_location_path(project: str, location: str,) -> str:
+ """Return a fully-qualified location string."""
+ return "projects/{project}/locations/{location}".format(
+ project=project, location=location,
+ )
+
+ @staticmethod
+ def parse_common_location_path(path: str) -> Dict[str, str]:
+ """Parse a location path into its component segments."""
+ m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
def __init__(
self,
*,
- credentials: credentials.Credentials = None,
- transport: Union[str, PredictionApiKeyRegistryTransport] = None,
- client_options: ClientOptions = DEFAULT_OPTIONS,
+ credentials: Optional[credentials.Credentials] = None,
+ transport: Union[str, PredictionApiKeyRegistryTransport, None] = None,
+ client_options: Optional[client_options_lib.ClientOptions] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the prediction api key registry client.
@@ -119,36 +289,109 @@ def __init__(
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
- transport (Union[str, ~.PredictionApiKeyRegistryTransport]): The
+ transport (Union[str, PredictionApiKeyRegistryTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
- client_options (ClientOptions): Custom options for the client.
+ client_options (google.api_core.client_options.ClientOptions): Custom options for the
+ client. It won't take effect if a ``transport`` instance is provided.
+ (1) The ``api_endpoint`` property can be used to override the
+ default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
+ environment variable can also be used to override the endpoint:
+ "always" (always use the default mTLS endpoint), "never" (always
+ use the default regular endpoint) and "auto" (auto switch to the
+ default mTLS endpoint if client certificate is present, this is
+ the default value). However, the ``api_endpoint`` property takes
+ precedence if provided.
+ (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+ is "true", then the ``client_cert_source`` property can be used
+ to provide client certificate for mutual TLS transport. If
+ not provided, the default SSL client certificate will be used if
+ present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+ set, no client certificate will be used.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+
+ Raises:
+ google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
+ creation failed for any reason.
"""
if isinstance(client_options, dict):
- client_options = ClientOptions.from_dict(client_options)
+ client_options = client_options_lib.from_dict(client_options)
+ if client_options is None:
+ client_options = client_options_lib.ClientOptions()
+
+ # Create SSL credentials for mutual TLS if needed.
+ use_client_cert = bool(
+ util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))
+ )
+
+ client_cert_source_func = None
+ is_mtls = False
+ if use_client_cert:
+ if client_options.client_cert_source:
+ is_mtls = True
+ client_cert_source_func = client_options.client_cert_source
+ else:
+ is_mtls = mtls.has_default_client_cert_source()
+ client_cert_source_func = (
+ mtls.default_client_cert_source() if is_mtls else None
+ )
+
+ # Figure out which api endpoint to use.
+ if client_options.api_endpoint is not None:
+ api_endpoint = client_options.api_endpoint
+ else:
+ use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
+ if use_mtls_env == "never":
+ api_endpoint = self.DEFAULT_ENDPOINT
+ elif use_mtls_env == "always":
+ api_endpoint = self.DEFAULT_MTLS_ENDPOINT
+ elif use_mtls_env == "auto":
+ api_endpoint = (
+ self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT
+ )
+ else:
+ raise MutualTLSChannelError(
+ "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always"
+ )
# Save or instantiate the transport.
# Ordinarily, we provide the transport, but allowing a custom transport
# instance provides an extensibility point for unusual situations.
if isinstance(transport, PredictionApiKeyRegistryTransport):
- if credentials:
+ # transport is a PredictionApiKeyRegistryTransport instance.
+ if credentials or client_options.credentials_file:
raise ValueError(
"When providing a transport instance, "
"provide its credentials directly."
)
+ if client_options.scopes:
+ raise ValueError(
+ "When providing a transport instance, "
+ "provide its scopes directly."
+ )
self._transport = transport
else:
Transport = type(self).get_transport_class(transport)
self._transport = Transport(
credentials=credentials,
- host=client_options.api_endpoint
- or "recommendationengine.googleapis.com",
+ credentials_file=client_options.credentials_file,
+ host=api_endpoint,
+ scopes=client_options.scopes,
+ client_cert_source_for_mtls=client_cert_source_func,
+ quota_project_id=client_options.quota_project_id,
+ client_info=client_info,
)
def create_prediction_api_key_registration(
self,
request: prediction_apikey_registry_service.CreatePredictionApiKeyRegistrationRequest = None,
*,
+ parent: str = None,
+ prediction_api_key_registration: prediction_apikey_registry_service.PredictionApiKeyRegistration = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
@@ -156,9 +399,23 @@ def create_prediction_api_key_registration(
r"""Register an API key for use with predict method.
Args:
- request (:class:`~.prediction_apikey_registry_service.CreatePredictionApiKeyRegistrationRequest`):
+ request (google.cloud.recommendationengine_v1beta1.types.CreatePredictionApiKeyRegistrationRequest):
The request object. Request message for the
`CreatePredictionApiKeyRegistration` method.
+ parent (str):
+ Required. The parent resource path.
+ ``projects/*/locations/global/catalogs/default_catalog/eventStores/default_event_store``
+
+ This corresponds to the ``parent`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ prediction_api_key_registration (google.cloud.recommendationengine_v1beta1.types.PredictionApiKeyRegistration):
+ Required. The prediction API key
+ registration.
+
+ This corresponds to the ``prediction_api_key_registration`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
@@ -167,25 +424,55 @@ def create_prediction_api_key_registration(
sent along with the request as metadata.
Returns:
- ~.prediction_apikey_registry_service.PredictionApiKeyRegistration:
+ google.cloud.recommendationengine_v1beta1.types.PredictionApiKeyRegistration:
Registered Api Key.
"""
# Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([parent, prediction_api_key_registration])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
- request = prediction_apikey_registry_service.CreatePredictionApiKeyRegistrationRequest(
- request
- )
+ # Minor optimization to avoid making a copy if the user passes
+ # in a prediction_apikey_registry_service.CreatePredictionApiKeyRegistrationRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(
+ request,
+ prediction_apikey_registry_service.CreatePredictionApiKeyRegistrationRequest,
+ ):
+ request = prediction_apikey_registry_service.CreatePredictionApiKeyRegistrationRequest(
+ request
+ )
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if parent is not None:
+ request.parent = parent
+ if prediction_api_key_registration is not None:
+ request.prediction_api_key_registration = (
+ prediction_api_key_registration
+ )
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.create_prediction_api_key_registration,
- default_timeout=None,
- client_info=_client_info,
+ rpc = self._transport._wrapped_methods[
+ self._transport.create_prediction_api_key_registration
+ ]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
- response = rpc(request, retry=retry, timeout=timeout, metadata=metadata)
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
@@ -194,6 +481,7 @@ def list_prediction_api_key_registrations(
self,
request: prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsRequest = None,
*,
+ parent: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
@@ -202,9 +490,16 @@ def list_prediction_api_key_registrations(
method.
Args:
- request (:class:`~.prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsRequest`):
+ request (google.cloud.recommendationengine_v1beta1.types.ListPredictionApiKeyRegistrationsRequest):
The request object. Request message for the
`ListPredictionApiKeyRegistrations`.
+ parent (str):
+ Required. The parent placement resource name such as
+ "projects/1234/locations/global/catalogs/default_catalog/eventStores/default_event_store"
+
+ This corresponds to the ``parent`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
@@ -213,27 +508,47 @@ def list_prediction_api_key_registrations(
sent along with the request as metadata.
Returns:
- ~.pagers.ListPredictionApiKeyRegistrationsPager:
+ google.cloud.recommendationengine_v1beta1.services.prediction_api_key_registry.pagers.ListPredictionApiKeyRegistrationsPager:
Response message for the
- ``ListPredictionApiKeyRegistrations``.
+ ListPredictionApiKeyRegistrations.
Iterating over this object will yield results and
resolve additional pages automatically.
"""
# Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([parent])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
- request = prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsRequest(
- request
- )
+ # Minor optimization to avoid making a copy if the user passes
+ # in a prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(
+ request,
+ prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsRequest,
+ ):
+ request = prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsRequest(
+ request
+ )
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if parent is not None:
+ request.parent = parent
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.list_prediction_api_key_registrations,
- default_timeout=None,
- client_info=_client_info,
- )
+ rpc = self._transport._wrapped_methods[
+ self._transport.list_prediction_api_key_registrations
+ ]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -242,12 +557,12 @@ def list_prediction_api_key_registrations(
)
# Send the request.
- response = rpc(request, retry=retry, timeout=timeout, metadata=metadata)
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# This method is paged; wrap the response in a pager, which provides
# an `__iter__` convenience method.
response = pagers.ListPredictionApiKeyRegistrationsPager(
- method=rpc, request=request, response=response
+ method=rpc, request=request, response=response, metadata=metadata,
)
# Done; return the response.
@@ -257,6 +572,7 @@ def delete_prediction_api_key_registration(
self,
request: prediction_apikey_registry_service.DeletePredictionApiKeyRegistrationRequest = None,
*,
+ name: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
@@ -264,9 +580,17 @@ def delete_prediction_api_key_registration(
r"""Unregister an apiKey from using for predict method.
Args:
- request (:class:`~.prediction_apikey_registry_service.DeletePredictionApiKeyRegistrationRequest`):
+ request (google.cloud.recommendationengine_v1beta1.types.DeletePredictionApiKeyRegistrationRequest):
The request object. Request message for
`DeletePredictionApiKeyRegistration` method.
+ name (str):
+ Required. The API key to unregister including full
+ resource path.
+ ``projects/*/locations/global/catalogs/default_catalog/eventStores/default_event_store/predictionApiKeyRegistrations/``
+
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
@@ -275,31 +599,59 @@ def delete_prediction_api_key_registration(
sent along with the request as metadata.
"""
# Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([name])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
- request = prediction_apikey_registry_service.DeletePredictionApiKeyRegistrationRequest(
- request
- )
+ # Minor optimization to avoid making a copy if the user passes
+ # in a prediction_apikey_registry_service.DeletePredictionApiKeyRegistrationRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(
+ request,
+ prediction_apikey_registry_service.DeletePredictionApiKeyRegistrationRequest,
+ ):
+ request = prediction_apikey_registry_service.DeletePredictionApiKeyRegistrationRequest(
+ request
+ )
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if name is not None:
+ request.name = name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.delete_prediction_api_key_registration,
- default_timeout=None,
- client_info=_client_info,
+ rpc = self._transport._wrapped_methods[
+ self._transport.delete_prediction_api_key_registration
+ ]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
- rpc(request, retry=retry, timeout=timeout, metadata=metadata)
+ rpc(
+ request, retry=retry, timeout=timeout, metadata=metadata,
+ )
try:
- _client_info = gapic_v1.client_info.ClientInfo(
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
- "google-cloud-recommendations-ai"
- ).version
+ "google-cloud-recommendations-ai",
+ ).version,
)
except pkg_resources.DistributionNotFound:
- _client_info = gapic_v1.client_info.ClientInfo()
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = ("PredictionApiKeyRegistryClient",)
diff --git a/google/cloud/recommendationengine_v1beta1/services/prediction_api_key_registry/pagers.py b/google/cloud/recommendationengine_v1beta1/services/prediction_api_key_registry/pagers.py
index 0b87c655..1f565842 100644
--- a/google/cloud/recommendationengine_v1beta1/services/prediction_api_key_registry/pagers.py
+++ b/google/cloud/recommendationengine_v1beta1/services/prediction_api_key_registry/pagers.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright (C) 2019 Google LLC
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -15,7 +15,16 @@
# limitations under the License.
#
-from typing import Any, Callable, Iterable
+from typing import (
+ Any,
+ AsyncIterable,
+ Awaitable,
+ Callable,
+ Iterable,
+ Sequence,
+ Tuple,
+ Optional,
+)
from google.cloud.recommendationengine_v1beta1.types import (
prediction_apikey_registry_service,
@@ -26,7 +35,7 @@ class ListPredictionApiKeyRegistrationsPager:
"""A pager for iterating through ``list_prediction_api_key_registrations`` requests.
This class thinly wraps an initial
- :class:`~.prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsResponse` object, and
+ :class:`google.cloud.recommendationengine_v1beta1.types.ListPredictionApiKeyRegistrationsResponse` object, and
provides an ``__iter__`` method to iterate through its
``prediction_api_key_registrations`` field.
@@ -35,7 +44,7 @@ class ListPredictionApiKeyRegistrationsPager:
through the ``prediction_api_key_registrations`` field on the
corresponding responses.
- All the usual :class:`~.prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsResponse`
+ All the usual :class:`google.cloud.recommendationengine_v1beta1.types.ListPredictionApiKeyRegistrationsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
@@ -43,50 +52,132 @@ class ListPredictionApiKeyRegistrationsPager:
def __init__(
self,
method: Callable[
- [
- prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsRequest
- ],
+ ...,
prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsResponse,
],
request: prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsRequest,
response: prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsResponse,
+ *,
+ metadata: Sequence[Tuple[str, str]] = ()
):
"""Instantiate the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
- request (:class:`~.prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsRequest`):
+ request (google.cloud.recommendationengine_v1beta1.types.ListPredictionApiKeyRegistrationsRequest):
The initial request object.
- response (:class:`~.prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsResponse`):
+ response (google.cloud.recommendationengine_v1beta1.types.ListPredictionApiKeyRegistrationsResponse):
The initial response object.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
"""
self._method = method
self._request = prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsRequest(
request
)
self._response = response
+ self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@property
def pages(
- self
+ self,
) -> Iterable[
prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsResponse
]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
- self._response = self._method(self._request)
+ self._response = self._method(self._request, metadata=self._metadata)
yield self._response
def __iter__(
- self
+ self,
) -> Iterable[prediction_apikey_registry_service.PredictionApiKeyRegistration]:
for page in self.pages:
yield from page.prediction_api_key_registrations
def __repr__(self) -> str:
return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
+
+
+class ListPredictionApiKeyRegistrationsAsyncPager:
+ """A pager for iterating through ``list_prediction_api_key_registrations`` requests.
+
+ This class thinly wraps an initial
+ :class:`google.cloud.recommendationengine_v1beta1.types.ListPredictionApiKeyRegistrationsResponse` object, and
+ provides an ``__aiter__`` method to iterate through its
+ ``prediction_api_key_registrations`` field.
+
+ If there are more pages, the ``__aiter__`` method will make additional
+ ``ListPredictionApiKeyRegistrations`` requests and continue to iterate
+ through the ``prediction_api_key_registrations`` field on the
+ corresponding responses.
+
+ All the usual :class:`google.cloud.recommendationengine_v1beta1.types.ListPredictionApiKeyRegistrationsResponse`
+ attributes are available on the pager. If multiple requests are made, only
+ the most recent response is retained, and thus used for attribute lookup.
+ """
+
+ def __init__(
+ self,
+ method: Callable[
+ ...,
+ Awaitable[
+ prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsResponse
+ ],
+ ],
+ request: prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsRequest,
+ response: prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsResponse,
+ *,
+ metadata: Sequence[Tuple[str, str]] = ()
+ ):
+ """Instantiate the pager.
+
+ Args:
+ method (Callable): The method that was originally called, and
+ which instantiated this pager.
+ request (google.cloud.recommendationengine_v1beta1.types.ListPredictionApiKeyRegistrationsRequest):
+ The initial request object.
+ response (google.cloud.recommendationengine_v1beta1.types.ListPredictionApiKeyRegistrationsResponse):
+ The initial response object.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ self._method = method
+ self._request = prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsRequest(
+ request
+ )
+ self._response = response
+ self._metadata = metadata
+
+ def __getattr__(self, name: str) -> Any:
+ return getattr(self._response, name)
+
+ @property
+ async def pages(
+ self,
+ ) -> AsyncIterable[
+ prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsResponse
+ ]:
+ yield self._response
+ while self._response.next_page_token:
+ self._request.page_token = self._response.next_page_token
+ self._response = await self._method(self._request, metadata=self._metadata)
+ yield self._response
+
+ def __aiter__(
+ self,
+ ) -> AsyncIterable[prediction_apikey_registry_service.PredictionApiKeyRegistration]:
+ async def async_generator():
+ async for page in self.pages:
+ for response in page.prediction_api_key_registrations:
+ yield response
+
+ return async_generator()
+
+ def __repr__(self) -> str:
+ return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
diff --git a/google/cloud/recommendationengine_v1beta1/services/prediction_api_key_registry/transports/__init__.py b/google/cloud/recommendationengine_v1beta1/services/prediction_api_key_registry/transports/__init__.py
index b9be7297..75ef7aa6 100644
--- a/google/cloud/recommendationengine_v1beta1/services/prediction_api_key_registry/transports/__init__.py
+++ b/google/cloud/recommendationengine_v1beta1/services/prediction_api_key_registry/transports/__init__.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright (C) 2019 Google LLC
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,6 +20,7 @@
from .base import PredictionApiKeyRegistryTransport
from .grpc import PredictionApiKeyRegistryGrpcTransport
+from .grpc_asyncio import PredictionApiKeyRegistryGrpcAsyncIOTransport
# Compile a registry of transports.
@@ -27,6 +28,10 @@
OrderedDict()
) # type: Dict[str, Type[PredictionApiKeyRegistryTransport]]
_transport_registry["grpc"] = PredictionApiKeyRegistryGrpcTransport
+_transport_registry["grpc_asyncio"] = PredictionApiKeyRegistryGrpcAsyncIOTransport
-
-__all__ = ("PredictionApiKeyRegistryTransport", "PredictionApiKeyRegistryGrpcTransport")
+__all__ = (
+ "PredictionApiKeyRegistryTransport",
+ "PredictionApiKeyRegistryGrpcTransport",
+ "PredictionApiKeyRegistryGrpcAsyncIOTransport",
+)
diff --git a/google/cloud/recommendationengine_v1beta1/services/prediction_api_key_registry/transports/base.py b/google/cloud/recommendationengine_v1beta1/services/prediction_api_key_registry/transports/base.py
index dd0a800e..891c14b7 100644
--- a/google/cloud/recommendationengine_v1beta1/services/prediction_api_key_registry/transports/base.py
+++ b/google/cloud/recommendationengine_v1beta1/services/prediction_api_key_registry/transports/base.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright (C) 2019 Google LLC
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -17,8 +17,12 @@
import abc
import typing
+import pkg_resources
-from google import auth
+from google import auth # type: ignore
+from google.api_core import exceptions # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import retry as retries # type: ignore
from google.auth import credentials # type: ignore
from google.cloud.recommendationengine_v1beta1.types import (
@@ -27,7 +31,17 @@
from google.protobuf import empty_pb2 as empty # type: ignore
-class PredictionApiKeyRegistryTransport(metaclass=abc.ABCMeta):
+try:
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
+ gapic_version=pkg_resources.get_distribution(
+ "google-cloud-recommendations-ai",
+ ).version,
+ )
+except pkg_resources.DistributionNotFound:
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
+
+
+class PredictionApiKeyRegistryTransport(abc.ABC):
"""Abstract transport class for PredictionApiKeyRegistry."""
AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",)
@@ -37,6 +51,11 @@ def __init__(
*,
host: str = "recommendationengine.googleapis.com",
credentials: credentials.Credentials = None,
+ credentials_file: typing.Optional[str] = None,
+ scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES,
+ quota_project_id: typing.Optional[str] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ **kwargs,
) -> None:
"""Instantiate the transport.
@@ -47,46 +66,129 @@ def __init__(
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is mutually exclusive with credentials.
+ scope (Optional[Sequence[str]]): A list of scopes.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
"""
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
if ":" not in host:
host += ":443"
self._host = host
+ # Save the scopes.
+ self._scopes = scopes or self.AUTH_SCOPES
+
# If no credentials are provided, then determine the appropriate
# defaults.
- if credentials is None:
- credentials, _ = auth.default(scopes=self.AUTH_SCOPES)
+ if credentials and credentials_file:
+ raise exceptions.DuplicateCredentialArgs(
+ "'credentials_file' and 'credentials' are mutually exclusive"
+ )
+
+ if credentials_file is not None:
+ credentials, _ = auth.load_credentials_from_file(
+ credentials_file, scopes=self._scopes, quota_project_id=quota_project_id
+ )
+
+ elif credentials is None:
+ credentials, _ = auth.default(
+ scopes=self._scopes, quota_project_id=quota_project_id
+ )
# Save the credentials.
self._credentials = credentials
+ def _prep_wrapped_messages(self, client_info):
+ # Precompute the wrapped methods.
+ self._wrapped_methods = {
+ self.create_prediction_api_key_registration: gapic_v1.method.wrap_method(
+ self.create_prediction_api_key_registration,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
+ ),
+ deadline=600.0,
+ ),
+ default_timeout=600.0,
+ client_info=client_info,
+ ),
+ self.list_prediction_api_key_registrations: gapic_v1.method.wrap_method(
+ self.list_prediction_api_key_registrations,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
+ ),
+ deadline=600.0,
+ ),
+ default_timeout=600.0,
+ client_info=client_info,
+ ),
+ self.delete_prediction_api_key_registration: gapic_v1.method.wrap_method(
+ self.delete_prediction_api_key_registration,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
+ ),
+ deadline=600.0,
+ ),
+ default_timeout=600.0,
+ client_info=client_info,
+ ),
+ }
+
@property
def create_prediction_api_key_registration(
- self
+ self,
) -> typing.Callable[
[prediction_apikey_registry_service.CreatePredictionApiKeyRegistrationRequest],
- prediction_apikey_registry_service.PredictionApiKeyRegistration,
+ typing.Union[
+ prediction_apikey_registry_service.PredictionApiKeyRegistration,
+ typing.Awaitable[
+ prediction_apikey_registry_service.PredictionApiKeyRegistration
+ ],
+ ],
]:
- raise NotImplementedError
+ raise NotImplementedError()
@property
def list_prediction_api_key_registrations(
- self
+ self,
) -> typing.Callable[
[prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsRequest],
- prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsResponse,
+ typing.Union[
+ prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsResponse,
+ typing.Awaitable[
+ prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsResponse
+ ],
+ ],
]:
- raise NotImplementedError
+ raise NotImplementedError()
@property
def delete_prediction_api_key_registration(
- self
+ self,
) -> typing.Callable[
[prediction_apikey_registry_service.DeletePredictionApiKeyRegistrationRequest],
- empty.Empty,
+ typing.Union[empty.Empty, typing.Awaitable[empty.Empty]],
]:
- raise NotImplementedError
+ raise NotImplementedError()
__all__ = ("PredictionApiKeyRegistryTransport",)
diff --git a/google/cloud/recommendationengine_v1beta1/services/prediction_api_key_registry/transports/grpc.py b/google/cloud/recommendationengine_v1beta1/services/prediction_api_key_registry/transports/grpc.py
index 27ec0d7e..8e7918cb 100644
--- a/google/cloud/recommendationengine_v1beta1/services/prediction_api_key_registry/transports/grpc.py
+++ b/google/cloud/recommendationengine_v1beta1/services/prediction_api_key_registry/transports/grpc.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright (C) 2019 Google LLC
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -15,10 +15,14 @@
# limitations under the License.
#
-from typing import Callable, Dict
+import warnings
+from typing import Callable, Dict, Optional, Sequence, Tuple
from google.api_core import grpc_helpers # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google import auth # type: ignore
from google.auth import credentials # type: ignore
+from google.auth.transport.grpc import SslCredentials # type: ignore
import grpc # type: ignore
@@ -27,7 +31,7 @@
)
from google.protobuf import empty_pb2 as empty # type: ignore
-from .base import PredictionApiKeyRegistryTransport
+from .base import PredictionApiKeyRegistryTransport, DEFAULT_CLIENT_INFO
class PredictionApiKeyRegistryGrpcTransport(PredictionApiKeyRegistryTransport):
@@ -48,12 +52,22 @@ class PredictionApiKeyRegistryGrpcTransport(PredictionApiKeyRegistryTransport):
top of HTTP/2); the ``grpcio`` package must be installed.
"""
+ _stubs: Dict[str, Callable]
+
def __init__(
self,
*,
host: str = "recommendationengine.googleapis.com",
credentials: credentials.Credentials = None,
- channel: grpc.Channel = None
+ credentials_file: str = None,
+ scopes: Sequence[str] = None,
+ channel: grpc.Channel = None,
+ api_mtls_endpoint: str = None,
+ client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
+ ssl_channel_credentials: grpc.ChannelCredentials = None,
+ client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
+ quota_project_id: Optional[str] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the transport.
@@ -65,66 +79,159 @@ def __init__(
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is ignored if ``channel`` is provided.
+ scopes (Optional(Sequence[str])): A list of scopes. This argument is
+ ignored if ``channel`` is provided.
channel (Optional[grpc.Channel]): A ``Channel`` instance through
which to make calls.
+ api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
+ If provided, it overrides the ``host`` argument and tries to create
+ a mutual TLS channel with client SSL credentials from
+ ``client_cert_source`` or applicatin default SSL credentials.
+ client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ Deprecated. A callback to provide client SSL certificate bytes and
+ private key bytes, both in PEM format. It is ignored if
+ ``api_mtls_endpoint`` is None.
+ ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
+ for grpc channel. It is ignored if ``channel`` is provided.
+ client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ A callback to provide client certificate bytes and private key bytes,
+ both in PEM format. It is used to configure mutual TLS channel. It is
+ ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+
+ Raises:
+ google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
+ creation failed for any reason.
+ google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
+ and ``credentials_file`` are passed.
"""
- # Sanity check: Ensure that channel and credentials are not both
- # provided.
- if channel:
- credentials = False
+ self._grpc_channel = None
+ self._ssl_channel_credentials = ssl_channel_credentials
+ self._stubs: Dict[str, Callable] = {}
- # Run the base constructor.
- super().__init__(host=host, credentials=credentials)
- self._stubs = {} # type: Dict[str, Callable]
+ if api_mtls_endpoint:
+ warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
+ if client_cert_source:
+ warnings.warn("client_cert_source is deprecated", DeprecationWarning)
- # If a channel was explicitly provided, set it.
if channel:
+ # Ignore credentials if a channel was passed.
+ credentials = False
+ # If a channel was explicitly provided, set it.
self._grpc_channel = channel
+ self._ssl_channel_credentials = None
+
+ else:
+ if api_mtls_endpoint:
+ host = api_mtls_endpoint
+
+ # Create SSL credentials with client_cert_source or application
+ # default SSL credentials.
+ if client_cert_source:
+ cert, key = client_cert_source()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+ else:
+ self._ssl_channel_credentials = SslCredentials().ssl_credentials
+
+ else:
+ if client_cert_source_for_mtls and not ssl_channel_credentials:
+ cert, key = client_cert_source_for_mtls()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+
+ # The base transport sets the host, credentials and scopes
+ super().__init__(
+ host=host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ scopes=scopes,
+ quota_project_id=quota_project_id,
+ client_info=client_info,
+ )
+
+ if not self._grpc_channel:
+ self._grpc_channel = type(self).create_channel(
+ self._host,
+ credentials=self._credentials,
+ credentials_file=credentials_file,
+ scopes=self._scopes,
+ ssl_credentials=self._ssl_channel_credentials,
+ quota_project_id=quota_project_id,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+
+ # Wrap messages. This must be done after self._grpc_channel exists
+ self._prep_wrapped_messages(client_info)
@classmethod
def create_channel(
cls,
host: str = "recommendationengine.googleapis.com",
credentials: credentials.Credentials = None,
- **kwargs
+ credentials_file: str = None,
+ scopes: Optional[Sequence[str]] = None,
+ quota_project_id: Optional[str] = None,
+ **kwargs,
) -> grpc.Channel:
"""Create and return a gRPC channel object.
Args:
- address (Optionsl[str]): The host for the channel to use.
+ host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is mutually exclusive with credentials.
+ scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
+ service. These are only used when credentials are not specified and
+ are passed to :func:`google.auth.default`.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
grpc.Channel: A gRPC channel object.
+
+ Raises:
+ google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
+ and ``credentials_file`` are passed.
"""
+ scopes = scopes or cls.AUTH_SCOPES
return grpc_helpers.create_channel(
- host, credentials=credentials, scopes=cls.AUTH_SCOPES, **kwargs
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ scopes=scopes,
+ quota_project_id=quota_project_id,
+ **kwargs,
)
@property
def grpc_channel(self) -> grpc.Channel:
- """Create the channel designed to connect to this service.
-
- This property caches on the instance; repeated calls return
- the same channel.
+ """Return the channel designed to connect to this service.
"""
- # Sanity check: Only create a new channel if we do not already
- # have one.
- if not hasattr(self, "_grpc_channel"):
- self._grpc_channel = self.create_channel(
- self._host, credentials=self._credentials
- )
-
- # Return the channel from cache.
return self._grpc_channel
@property
def create_prediction_api_key_registration(
- self
+ self,
) -> Callable[
[prediction_apikey_registry_service.CreatePredictionApiKeyRegistrationRequest],
prediction_apikey_registry_service.PredictionApiKeyRegistration,
@@ -156,7 +263,7 @@ def create_prediction_api_key_registration(
@property
def list_prediction_api_key_registrations(
- self
+ self,
) -> Callable[
[prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsRequest],
prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsResponse,
@@ -189,7 +296,7 @@ def list_prediction_api_key_registrations(
@property
def delete_prediction_api_key_registration(
- self
+ self,
) -> Callable[
[prediction_apikey_registry_service.DeletePredictionApiKeyRegistrationRequest],
empty.Empty,
diff --git a/google/cloud/recommendationengine_v1beta1/services/prediction_api_key_registry/transports/grpc_asyncio.py b/google/cloud/recommendationengine_v1beta1/services/prediction_api_key_registry/transports/grpc_asyncio.py
new file mode 100644
index 00000000..8ecd591b
--- /dev/null
+++ b/google/cloud/recommendationengine_v1beta1/services/prediction_api_key_registry/transports/grpc_asyncio.py
@@ -0,0 +1,336 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import warnings
+from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple
+
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import grpc_helpers_async # type: ignore
+from google import auth # type: ignore
+from google.auth import credentials # type: ignore
+from google.auth.transport.grpc import SslCredentials # type: ignore
+
+import grpc # type: ignore
+from grpc.experimental import aio # type: ignore
+
+from google.cloud.recommendationengine_v1beta1.types import (
+ prediction_apikey_registry_service,
+)
+from google.protobuf import empty_pb2 as empty # type: ignore
+
+from .base import PredictionApiKeyRegistryTransport, DEFAULT_CLIENT_INFO
+from .grpc import PredictionApiKeyRegistryGrpcTransport
+
+
+class PredictionApiKeyRegistryGrpcAsyncIOTransport(PredictionApiKeyRegistryTransport):
+ """gRPC AsyncIO backend transport for PredictionApiKeyRegistry.
+
+ Service for registering API keys for use with the ``predict``
+ method. If you use an API key to request predictions, you must first
+ register the API key. Otherwise, your prediction request is
+ rejected. If you use OAuth to authenticate your ``predict`` method
+ call, you do not need to register an API key. You can register up to
+ 20 API keys per project.
+
+ This class defines the same methods as the primary client, so the
+ primary client can load the underlying transport implementation
+ and call it.
+
+ It sends protocol buffers over the wire using gRPC (which is built on
+ top of HTTP/2); the ``grpcio`` package must be installed.
+ """
+
+ _grpc_channel: aio.Channel
+ _stubs: Dict[str, Callable] = {}
+
+ @classmethod
+ def create_channel(
+ cls,
+ host: str = "recommendationengine.googleapis.com",
+ credentials: credentials.Credentials = None,
+ credentials_file: Optional[str] = None,
+ scopes: Optional[Sequence[str]] = None,
+ quota_project_id: Optional[str] = None,
+ **kwargs,
+ ) -> aio.Channel:
+ """Create and return a gRPC AsyncIO channel object.
+ Args:
+ host (Optional[str]): The host for the channel to use.
+ credentials (Optional[~.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify this application to the service. If
+ none are specified, the client will attempt to ascertain
+ the credentials from the environment.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is ignored if ``channel`` is provided.
+ scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
+ service. These are only used when credentials are not specified and
+ are passed to :func:`google.auth.default`.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
+ kwargs (Optional[dict]): Keyword arguments, which are passed to the
+ channel creation.
+ Returns:
+ aio.Channel: A gRPC AsyncIO channel object.
+ """
+ scopes = scopes or cls.AUTH_SCOPES
+ return grpc_helpers_async.create_channel(
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ scopes=scopes,
+ quota_project_id=quota_project_id,
+ **kwargs,
+ )
+
+ def __init__(
+ self,
+ *,
+ host: str = "recommendationengine.googleapis.com",
+ credentials: credentials.Credentials = None,
+ credentials_file: Optional[str] = None,
+ scopes: Optional[Sequence[str]] = None,
+ channel: aio.Channel = None,
+ api_mtls_endpoint: str = None,
+ client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
+ ssl_channel_credentials: grpc.ChannelCredentials = None,
+ client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
+ quota_project_id=None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ ) -> None:
+ """Instantiate the transport.
+
+ Args:
+ host (Optional[str]): The hostname to connect to.
+ credentials (Optional[google.auth.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ This argument is ignored if ``channel`` is provided.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is ignored if ``channel`` is provided.
+ scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
+ service. These are only used when credentials are not specified and
+ are passed to :func:`google.auth.default`.
+ channel (Optional[aio.Channel]): A ``Channel`` instance through
+ which to make calls.
+ api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
+ If provided, it overrides the ``host`` argument and tries to create
+ a mutual TLS channel with client SSL credentials from
+ ``client_cert_source`` or applicatin default SSL credentials.
+ client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ Deprecated. A callback to provide client SSL certificate bytes and
+ private key bytes, both in PEM format. It is ignored if
+ ``api_mtls_endpoint`` is None.
+ ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
+ for grpc channel. It is ignored if ``channel`` is provided.
+ client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ A callback to provide client certificate bytes and private key bytes,
+ both in PEM format. It is used to configure mutual TLS channel. It is
+ ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+
+ Raises:
+ google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
+ creation failed for any reason.
+ google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
+ and ``credentials_file`` are passed.
+ """
+ self._grpc_channel = None
+ self._ssl_channel_credentials = ssl_channel_credentials
+ self._stubs: Dict[str, Callable] = {}
+
+ if api_mtls_endpoint:
+ warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
+ if client_cert_source:
+ warnings.warn("client_cert_source is deprecated", DeprecationWarning)
+
+ if channel:
+ # Ignore credentials if a channel was passed.
+ credentials = False
+ # If a channel was explicitly provided, set it.
+ self._grpc_channel = channel
+ self._ssl_channel_credentials = None
+
+ else:
+ if api_mtls_endpoint:
+ host = api_mtls_endpoint
+
+ # Create SSL credentials with client_cert_source or application
+ # default SSL credentials.
+ if client_cert_source:
+ cert, key = client_cert_source()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+ else:
+ self._ssl_channel_credentials = SslCredentials().ssl_credentials
+
+ else:
+ if client_cert_source_for_mtls and not ssl_channel_credentials:
+ cert, key = client_cert_source_for_mtls()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+
+ # The base transport sets the host, credentials and scopes
+ super().__init__(
+ host=host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ scopes=scopes,
+ quota_project_id=quota_project_id,
+ client_info=client_info,
+ )
+
+ if not self._grpc_channel:
+ self._grpc_channel = type(self).create_channel(
+ self._host,
+ credentials=self._credentials,
+ credentials_file=credentials_file,
+ scopes=self._scopes,
+ ssl_credentials=self._ssl_channel_credentials,
+ quota_project_id=quota_project_id,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+
+ # Wrap messages. This must be done after self._grpc_channel exists
+ self._prep_wrapped_messages(client_info)
+
+ @property
+ def grpc_channel(self) -> aio.Channel:
+ """Create the channel designed to connect to this service.
+
+ This property caches on the instance; repeated calls return
+ the same channel.
+ """
+ # Return the channel from cache.
+ return self._grpc_channel
+
+ @property
+ def create_prediction_api_key_registration(
+ self,
+ ) -> Callable[
+ [prediction_apikey_registry_service.CreatePredictionApiKeyRegistrationRequest],
+ Awaitable[prediction_apikey_registry_service.PredictionApiKeyRegistration],
+ ]:
+ r"""Return a callable for the create prediction api key
+ registration method over gRPC.
+
+ Register an API key for use with predict method.
+
+ Returns:
+ Callable[[~.CreatePredictionApiKeyRegistrationRequest],
+ Awaitable[~.PredictionApiKeyRegistration]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "create_prediction_api_key_registration" not in self._stubs:
+ self._stubs[
+ "create_prediction_api_key_registration"
+ ] = self.grpc_channel.unary_unary(
+ "/google.cloud.recommendationengine.v1beta1.PredictionApiKeyRegistry/CreatePredictionApiKeyRegistration",
+ request_serializer=prediction_apikey_registry_service.CreatePredictionApiKeyRegistrationRequest.serialize,
+ response_deserializer=prediction_apikey_registry_service.PredictionApiKeyRegistration.deserialize,
+ )
+ return self._stubs["create_prediction_api_key_registration"]
+
+ @property
+ def list_prediction_api_key_registrations(
+ self,
+ ) -> Callable[
+ [prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsRequest],
+ Awaitable[
+ prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsResponse
+ ],
+ ]:
+ r"""Return a callable for the list prediction api key
+ registrations method over gRPC.
+
+ List the registered apiKeys for use with predict
+ method.
+
+ Returns:
+ Callable[[~.ListPredictionApiKeyRegistrationsRequest],
+ Awaitable[~.ListPredictionApiKeyRegistrationsResponse]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "list_prediction_api_key_registrations" not in self._stubs:
+ self._stubs[
+ "list_prediction_api_key_registrations"
+ ] = self.grpc_channel.unary_unary(
+ "/google.cloud.recommendationengine.v1beta1.PredictionApiKeyRegistry/ListPredictionApiKeyRegistrations",
+ request_serializer=prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsRequest.serialize,
+ response_deserializer=prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsResponse.deserialize,
+ )
+ return self._stubs["list_prediction_api_key_registrations"]
+
+ @property
+ def delete_prediction_api_key_registration(
+ self,
+ ) -> Callable[
+ [prediction_apikey_registry_service.DeletePredictionApiKeyRegistrationRequest],
+ Awaitable[empty.Empty],
+ ]:
+ r"""Return a callable for the delete prediction api key
+ registration method over gRPC.
+
+ Unregister an apiKey from using for predict method.
+
+ Returns:
+ Callable[[~.DeletePredictionApiKeyRegistrationRequest],
+ Awaitable[~.Empty]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "delete_prediction_api_key_registration" not in self._stubs:
+ self._stubs[
+ "delete_prediction_api_key_registration"
+ ] = self.grpc_channel.unary_unary(
+ "/google.cloud.recommendationengine.v1beta1.PredictionApiKeyRegistry/DeletePredictionApiKeyRegistration",
+ request_serializer=prediction_apikey_registry_service.DeletePredictionApiKeyRegistrationRequest.serialize,
+ response_deserializer=empty.Empty.FromString,
+ )
+ return self._stubs["delete_prediction_api_key_registration"]
+
+
+__all__ = ("PredictionApiKeyRegistryGrpcAsyncIOTransport",)
diff --git a/google/cloud/recommendationengine_v1beta1/services/prediction_service/__init__.py b/google/cloud/recommendationengine_v1beta1/services/prediction_service/__init__.py
index 526c7536..0c847693 100644
--- a/google/cloud/recommendationengine_v1beta1/services/prediction_service/__init__.py
+++ b/google/cloud/recommendationengine_v1beta1/services/prediction_service/__init__.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright (C) 2019 Google LLC
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -16,5 +16,9 @@
#
from .client import PredictionServiceClient
+from .async_client import PredictionServiceAsyncClient
-__all__ = ("PredictionServiceClient",)
+__all__ = (
+ "PredictionServiceClient",
+ "PredictionServiceAsyncClient",
+)
diff --git a/google/cloud/recommendationengine_v1beta1/services/prediction_service/async_client.py b/google/cloud/recommendationengine_v1beta1/services/prediction_service/async_client.py
new file mode 100644
index 00000000..f00d87e4
--- /dev/null
+++ b/google/cloud/recommendationengine_v1beta1/services/prediction_service/async_client.py
@@ -0,0 +1,327 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from collections import OrderedDict
+import functools
+import re
+from typing import Dict, Sequence, Tuple, Type, Union
+import pkg_resources
+
+import google.api_core.client_options as ClientOptions # type: ignore
+from google.api_core import exceptions # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import retry as retries # type: ignore
+from google.auth import credentials # type: ignore
+from google.oauth2 import service_account # type: ignore
+
+from google.cloud.recommendationengine_v1beta1.services.prediction_service import pagers
+from google.cloud.recommendationengine_v1beta1.types import prediction_service
+from google.cloud.recommendationengine_v1beta1.types import user_event as gcr_user_event
+
+from .transports.base import PredictionServiceTransport, DEFAULT_CLIENT_INFO
+from .transports.grpc_asyncio import PredictionServiceGrpcAsyncIOTransport
+from .client import PredictionServiceClient
+
+
+class PredictionServiceAsyncClient:
+ """Service for making recommendation prediction."""
+
+ _client: PredictionServiceClient
+
+ DEFAULT_ENDPOINT = PredictionServiceClient.DEFAULT_ENDPOINT
+ DEFAULT_MTLS_ENDPOINT = PredictionServiceClient.DEFAULT_MTLS_ENDPOINT
+
+ placement_path = staticmethod(PredictionServiceClient.placement_path)
+ parse_placement_path = staticmethod(PredictionServiceClient.parse_placement_path)
+
+ common_billing_account_path = staticmethod(
+ PredictionServiceClient.common_billing_account_path
+ )
+ parse_common_billing_account_path = staticmethod(
+ PredictionServiceClient.parse_common_billing_account_path
+ )
+
+ common_folder_path = staticmethod(PredictionServiceClient.common_folder_path)
+ parse_common_folder_path = staticmethod(
+ PredictionServiceClient.parse_common_folder_path
+ )
+
+ common_organization_path = staticmethod(
+ PredictionServiceClient.common_organization_path
+ )
+ parse_common_organization_path = staticmethod(
+ PredictionServiceClient.parse_common_organization_path
+ )
+
+ common_project_path = staticmethod(PredictionServiceClient.common_project_path)
+ parse_common_project_path = staticmethod(
+ PredictionServiceClient.parse_common_project_path
+ )
+
+ common_location_path = staticmethod(PredictionServiceClient.common_location_path)
+ parse_common_location_path = staticmethod(
+ PredictionServiceClient.parse_common_location_path
+ )
+
+ @classmethod
+ def from_service_account_info(cls, info: dict, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials info.
+
+ Args:
+ info (dict): The service account private key info.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ PredictionServiceAsyncClient: The constructed client.
+ """
+ return PredictionServiceClient.from_service_account_info.__func__(PredictionServiceAsyncClient, info, *args, **kwargs) # type: ignore
+
+ @classmethod
+ def from_service_account_file(cls, filename: str, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials
+ file.
+
+ Args:
+ filename (str): The path to the service account private key json
+ file.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ PredictionServiceAsyncClient: The constructed client.
+ """
+ return PredictionServiceClient.from_service_account_file.__func__(PredictionServiceAsyncClient, filename, *args, **kwargs) # type: ignore
+
+ from_service_account_json = from_service_account_file
+
+ @property
+ def transport(self) -> PredictionServiceTransport:
+ """Return the transport used by the client instance.
+
+ Returns:
+ PredictionServiceTransport: The transport used by the client instance.
+ """
+ return self._client.transport
+
+ get_transport_class = functools.partial(
+ type(PredictionServiceClient).get_transport_class, type(PredictionServiceClient)
+ )
+
+ def __init__(
+ self,
+ *,
+ credentials: credentials.Credentials = None,
+ transport: Union[str, PredictionServiceTransport] = "grpc_asyncio",
+ client_options: ClientOptions = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ ) -> None:
+ """Instantiate the prediction service client.
+
+ Args:
+ credentials (Optional[google.auth.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ transport (Union[str, ~.PredictionServiceTransport]): The
+ transport to use. If set to None, a transport is chosen
+ automatically.
+ client_options (ClientOptions): Custom options for the client. It
+ won't take effect if a ``transport`` instance is provided.
+ (1) The ``api_endpoint`` property can be used to override the
+ default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
+ environment variable can also be used to override the endpoint:
+ "always" (always use the default mTLS endpoint), "never" (always
+ use the default regular endpoint) and "auto" (auto switch to the
+ default mTLS endpoint if client certificate is present, this is
+ the default value). However, the ``api_endpoint`` property takes
+ precedence if provided.
+ (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+ is "true", then the ``client_cert_source`` property can be used
+ to provide client certificate for mutual TLS transport. If
+ not provided, the default SSL client certificate will be used if
+ present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+ set, no client certificate will be used.
+
+ Raises:
+ google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
+ creation failed for any reason.
+ """
+
+ self._client = PredictionServiceClient(
+ credentials=credentials,
+ transport=transport,
+ client_options=client_options,
+ client_info=client_info,
+ )
+
+ async def predict(
+ self,
+ request: prediction_service.PredictRequest = None,
+ *,
+ name: str = None,
+ user_event: gcr_user_event.UserEvent = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> pagers.PredictAsyncPager:
+ r"""Makes a recommendation prediction. If using API Key based
+ authentication, the API Key must be registered using the
+ [PredictionApiKeyRegistry][google.cloud.recommendationengine.v1beta1.PredictionApiKeyRegistry]
+ service. `Learn
+ more `__.
+
+ Args:
+ request (:class:`google.cloud.recommendationengine_v1beta1.types.PredictRequest`):
+ The request object. Request message for Predict method.
+ name (:class:`str`):
+ Required. Full resource name of the format:
+ {name=projects/*/locations/global/catalogs/default_catalog/eventStores/default_event_store/placements/*}
+ The id of the recommendation engine placement. This id
+ is used to identify the set of models that will be used
+ to make the prediction.
+
+ We currently support three placements with the following
+ IDs by default:
+
+ - ``shopping_cart``: Predicts items frequently bought
+ together with one or more catalog items in the same
+ shopping session. Commonly displayed after
+ ``add-to-cart`` events, on product detail pages, or
+ on the shopping cart page.
+
+ - ``home_page``: Predicts the next product that a user
+ will most likely engage with or purchase based on the
+ shopping or viewing history of the specified
+ ``userId`` or ``visitorId``. For example -
+ Recommendations for you.
+
+ - ``product_detail``: Predicts the next product that a
+ user will most likely engage with or purchase. The
+ prediction is based on the shopping or viewing
+ history of the specified ``userId`` or ``visitorId``
+ and its relevance to a specified ``CatalogItem``.
+ Typically used on product detail pages. For example -
+ More items like this.
+
+ - ``recently_viewed_default``: Returns up to 75 items
+ recently viewed by the specified ``userId`` or
+ ``visitorId``, most recent ones first. Returns
+ nothing if neither of them has viewed any items yet.
+ For example - Recently viewed.
+
+ The full list of available placements can be seen at
+ https://console.cloud.google.com/recommendation/datafeeds/default_catalog/dashboard
+
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ user_event (:class:`google.cloud.recommendationengine_v1beta1.types.UserEvent`):
+ Required. Context about the user,
+ what they are looking at and what action
+ they took to trigger the predict
+ request. Note that this user event
+ detail won't be ingested to userEvent
+ logs. Thus, a separate userEvent write
+ request is required for event logging.
+
+ This corresponds to the ``user_event`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ google.cloud.recommendationengine_v1beta1.services.prediction_service.pagers.PredictAsyncPager:
+ Response message for predict method.
+ Iterating over this object will yield
+ results and resolve additional pages
+ automatically.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([name, user_event])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = prediction_service.PredictRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if name is not None:
+ request.name = name
+ if user_event is not None:
+ request.user_event = user_event
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.predict,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
+ ),
+ deadline=600.0,
+ ),
+ default_timeout=600.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # This method is paged; wrap the response in a pager, which provides
+ # an `__aiter__` convenience method.
+ response = pagers.PredictAsyncPager(
+ method=rpc, request=request, response=response, metadata=metadata,
+ )
+
+ # Done; return the response.
+ return response
+
+
+try:
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
+ gapic_version=pkg_resources.get_distribution(
+ "google-cloud-recommendations-ai",
+ ).version,
+ )
+except pkg_resources.DistributionNotFound:
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
+
+
+__all__ = ("PredictionServiceAsyncClient",)
diff --git a/google/cloud/recommendationengine_v1beta1/services/prediction_service/client.py b/google/cloud/recommendationengine_v1beta1/services/prediction_service/client.py
index 22fd3199..75617c51 100644
--- a/google/cloud/recommendationengine_v1beta1/services/prediction_service/client.py
+++ b/google/cloud/recommendationengine_v1beta1/services/prediction_service/client.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright (C) 2019 Google LLC
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -16,21 +16,29 @@
#
from collections import OrderedDict
-from typing import Dict, Sequence, Tuple, Type, Union
+from distutils import util
+import os
+import re
+from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union
import pkg_resources
-import google.api_core.client_options as ClientOptions # type: ignore
+from google.api_core import client_options as client_options_lib # type: ignore
from google.api_core import exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.auth import credentials # type: ignore
+from google.auth.transport import mtls # type: ignore
+from google.auth.transport.grpc import SslCredentials # type: ignore
+from google.auth.exceptions import MutualTLSChannelError # type: ignore
from google.oauth2 import service_account # type: ignore
from google.cloud.recommendationengine_v1beta1.services.prediction_service import pagers
from google.cloud.recommendationengine_v1beta1.types import prediction_service
+from google.cloud.recommendationengine_v1beta1.types import user_event as gcr_user_event
-from .transports.base import PredictionServiceTransport
+from .transports.base import PredictionServiceTransport, DEFAULT_CLIENT_INFO
from .transports.grpc import PredictionServiceGrpcTransport
+from .transports.grpc_asyncio import PredictionServiceGrpcAsyncIOTransport
class PredictionServiceClientMeta(type):
@@ -45,8 +53,11 @@ class PredictionServiceClientMeta(type):
OrderedDict()
) # type: Dict[str, Type[PredictionServiceTransport]]
_transport_registry["grpc"] = PredictionServiceGrpcTransport
+ _transport_registry["grpc_asyncio"] = PredictionServiceGrpcAsyncIOTransport
- def get_transport_class(cls, label: str = None) -> Type[PredictionServiceTransport]:
+ def get_transport_class(
+ cls, label: str = None,
+ ) -> Type[PredictionServiceTransport]:
"""Return an appropriate transport class.
Args:
@@ -68,10 +79,56 @@ def get_transport_class(cls, label: str = None) -> Type[PredictionServiceTranspo
class PredictionServiceClient(metaclass=PredictionServiceClientMeta):
"""Service for making recommendation prediction."""
- DEFAULT_OPTIONS = ClientOptions.ClientOptions(
- api_endpoint="recommendationengine.googleapis.com"
+ @staticmethod
+ def _get_default_mtls_endpoint(api_endpoint):
+ """Convert api endpoint to mTLS endpoint.
+ Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
+ "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
+ Args:
+ api_endpoint (Optional[str]): the api endpoint to convert.
+ Returns:
+ str: converted mTLS api endpoint.
+ """
+ if not api_endpoint:
+ return api_endpoint
+
+ mtls_endpoint_re = re.compile(
+ r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?"
+ )
+
+ m = mtls_endpoint_re.match(api_endpoint)
+ name, mtls, sandbox, googledomain = m.groups()
+ if mtls or not googledomain:
+ return api_endpoint
+
+ if sandbox:
+ return api_endpoint.replace(
+ "sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
+ )
+
+ return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
+
+ DEFAULT_ENDPOINT = "recommendationengine.googleapis.com"
+ DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore
+ DEFAULT_ENDPOINT
)
+ @classmethod
+ def from_service_account_info(cls, info: dict, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials info.
+
+ Args:
+ info (dict): The service account private key info.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ PredictionServiceClient: The constructed client.
+ """
+ credentials = service_account.Credentials.from_service_account_info(info)
+ kwargs["credentials"] = credentials
+ return cls(*args, **kwargs)
+
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
@@ -84,7 +141,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs):
kwargs: Additional arguments to pass to the constructor.
Returns:
- {@api.name}: The constructed client.
+ PredictionServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(filename)
kwargs["credentials"] = credentials
@@ -92,12 +149,103 @@ def from_service_account_file(cls, filename: str, *args, **kwargs):
from_service_account_json = from_service_account_file
+ @property
+ def transport(self) -> PredictionServiceTransport:
+ """Return the transport used by the client instance.
+
+ Returns:
+ PredictionServiceTransport: The transport used by the client instance.
+ """
+ return self._transport
+
+ @staticmethod
+ def placement_path(
+ project: str, location: str, catalog: str, event_store: str, placement: str,
+ ) -> str:
+ """Return a fully-qualified placement string."""
+ return "projects/{project}/locations/{location}/catalogs/{catalog}/eventStores/{event_store}/placements/{placement}".format(
+ project=project,
+ location=location,
+ catalog=catalog,
+ event_store=event_store,
+ placement=placement,
+ )
+
+ @staticmethod
+ def parse_placement_path(path: str) -> Dict[str, str]:
+ """Parse a placement path into its component segments."""
+ m = re.match(
+ r"^projects/(?P.+?)/locations/(?P.+?)/catalogs/(?P.+?)/eventStores/(?P.+?)/placements/(?P.+?)$",
+ path,
+ )
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_billing_account_path(billing_account: str,) -> str:
+ """Return a fully-qualified billing_account string."""
+ return "billingAccounts/{billing_account}".format(
+ billing_account=billing_account,
+ )
+
+ @staticmethod
+ def parse_common_billing_account_path(path: str) -> Dict[str, str]:
+ """Parse a billing_account path into its component segments."""
+ m = re.match(r"^billingAccounts/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_folder_path(folder: str,) -> str:
+ """Return a fully-qualified folder string."""
+ return "folders/{folder}".format(folder=folder,)
+
+ @staticmethod
+ def parse_common_folder_path(path: str) -> Dict[str, str]:
+ """Parse a folder path into its component segments."""
+ m = re.match(r"^folders/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_organization_path(organization: str,) -> str:
+ """Return a fully-qualified organization string."""
+ return "organizations/{organization}".format(organization=organization,)
+
+ @staticmethod
+ def parse_common_organization_path(path: str) -> Dict[str, str]:
+ """Parse a organization path into its component segments."""
+ m = re.match(r"^organizations/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_project_path(project: str,) -> str:
+ """Return a fully-qualified project string."""
+ return "projects/{project}".format(project=project,)
+
+ @staticmethod
+ def parse_common_project_path(path: str) -> Dict[str, str]:
+ """Parse a project path into its component segments."""
+ m = re.match(r"^projects/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_location_path(project: str, location: str,) -> str:
+ """Return a fully-qualified location string."""
+ return "projects/{project}/locations/{location}".format(
+ project=project, location=location,
+ )
+
+ @staticmethod
+ def parse_common_location_path(path: str) -> Dict[str, str]:
+ """Parse a location path into its component segments."""
+ m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
def __init__(
self,
*,
- credentials: credentials.Credentials = None,
- transport: Union[str, PredictionServiceTransport] = None,
- client_options: ClientOptions = DEFAULT_OPTIONS,
+ credentials: Optional[credentials.Credentials] = None,
+ transport: Union[str, PredictionServiceTransport, None] = None,
+ client_options: Optional[client_options_lib.ClientOptions] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the prediction service client.
@@ -107,36 +255,109 @@ def __init__(
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
- transport (Union[str, ~.PredictionServiceTransport]): The
+ transport (Union[str, PredictionServiceTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
- client_options (ClientOptions): Custom options for the client.
+ client_options (google.api_core.client_options.ClientOptions): Custom options for the
+ client. It won't take effect if a ``transport`` instance is provided.
+ (1) The ``api_endpoint`` property can be used to override the
+ default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
+ environment variable can also be used to override the endpoint:
+ "always" (always use the default mTLS endpoint), "never" (always
+ use the default regular endpoint) and "auto" (auto switch to the
+ default mTLS endpoint if client certificate is present, this is
+ the default value). However, the ``api_endpoint`` property takes
+ precedence if provided.
+ (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+ is "true", then the ``client_cert_source`` property can be used
+ to provide client certificate for mutual TLS transport. If
+ not provided, the default SSL client certificate will be used if
+ present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+ set, no client certificate will be used.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+
+ Raises:
+ google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
+ creation failed for any reason.
"""
if isinstance(client_options, dict):
- client_options = ClientOptions.from_dict(client_options)
+ client_options = client_options_lib.from_dict(client_options)
+ if client_options is None:
+ client_options = client_options_lib.ClientOptions()
+
+ # Create SSL credentials for mutual TLS if needed.
+ use_client_cert = bool(
+ util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))
+ )
+
+ client_cert_source_func = None
+ is_mtls = False
+ if use_client_cert:
+ if client_options.client_cert_source:
+ is_mtls = True
+ client_cert_source_func = client_options.client_cert_source
+ else:
+ is_mtls = mtls.has_default_client_cert_source()
+ client_cert_source_func = (
+ mtls.default_client_cert_source() if is_mtls else None
+ )
+
+ # Figure out which api endpoint to use.
+ if client_options.api_endpoint is not None:
+ api_endpoint = client_options.api_endpoint
+ else:
+ use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
+ if use_mtls_env == "never":
+ api_endpoint = self.DEFAULT_ENDPOINT
+ elif use_mtls_env == "always":
+ api_endpoint = self.DEFAULT_MTLS_ENDPOINT
+ elif use_mtls_env == "auto":
+ api_endpoint = (
+ self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT
+ )
+ else:
+ raise MutualTLSChannelError(
+ "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always"
+ )
# Save or instantiate the transport.
# Ordinarily, we provide the transport, but allowing a custom transport
# instance provides an extensibility point for unusual situations.
if isinstance(transport, PredictionServiceTransport):
- if credentials:
+ # transport is a PredictionServiceTransport instance.
+ if credentials or client_options.credentials_file:
raise ValueError(
"When providing a transport instance, "
"provide its credentials directly."
)
+ if client_options.scopes:
+ raise ValueError(
+ "When providing a transport instance, "
+ "provide its scopes directly."
+ )
self._transport = transport
else:
Transport = type(self).get_transport_class(transport)
self._transport = Transport(
credentials=credentials,
- host=client_options.api_endpoint
- or "recommendationengine.googleapis.com",
+ credentials_file=client_options.credentials_file,
+ host=api_endpoint,
+ scopes=client_options.scopes,
+ client_cert_source_for_mtls=client_cert_source_func,
+ quota_project_id=client_options.quota_project_id,
+ client_info=client_info,
)
def predict(
self,
request: prediction_service.PredictRequest = None,
*,
+ name: str = None,
+ user_event: gcr_user_event.UserEvent = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
@@ -148,8 +369,62 @@ def predict(
more `__.
Args:
- request (:class:`~.prediction_service.PredictRequest`):
+ request (google.cloud.recommendationengine_v1beta1.types.PredictRequest):
The request object. Request message for Predict method.
+ name (str):
+ Required. Full resource name of the format:
+ {name=projects/*/locations/global/catalogs/default_catalog/eventStores/default_event_store/placements/*}
+ The id of the recommendation engine placement. This id
+ is used to identify the set of models that will be used
+ to make the prediction.
+
+ We currently support three placements with the following
+ IDs by default:
+
+ - ``shopping_cart``: Predicts items frequently bought
+ together with one or more catalog items in the same
+ shopping session. Commonly displayed after
+ ``add-to-cart`` events, on product detail pages, or
+ on the shopping cart page.
+
+ - ``home_page``: Predicts the next product that a user
+ will most likely engage with or purchase based on the
+ shopping or viewing history of the specified
+ ``userId`` or ``visitorId``. For example -
+ Recommendations for you.
+
+ - ``product_detail``: Predicts the next product that a
+ user will most likely engage with or purchase. The
+ prediction is based on the shopping or viewing
+ history of the specified ``userId`` or ``visitorId``
+ and its relevance to a specified ``CatalogItem``.
+ Typically used on product detail pages. For example -
+ More items like this.
+
+ - ``recently_viewed_default``: Returns up to 75 items
+ recently viewed by the specified ``userId`` or
+ ``visitorId``, most recent ones first. Returns
+ nothing if neither of them has viewed any items yet.
+ For example - Recently viewed.
+
+ The full list of available placements can be seen at
+ https://console.cloud.google.com/recommendation/datafeeds/default_catalog/dashboard
+
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ user_event (google.cloud.recommendationengine_v1beta1.types.UserEvent):
+ Required. Context about the user,
+ what they are looking at and what action
+ they took to trigger the predict
+ request. Note that this user event
+ detail won't be ingested to userEvent
+ logs. Thus, a separate userEvent write
+ request is required for event logging.
+
+ This corresponds to the ``user_event`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
@@ -158,7 +433,7 @@ def predict(
sent along with the request as metadata.
Returns:
- ~.pagers.PredictPager:
+ google.cloud.recommendationengine_v1beta1.services.prediction_service.pagers.PredictPager:
Response message for predict method.
Iterating over this object will yield
results and resolve additional pages
@@ -166,34 +441,61 @@ def predict(
"""
# Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([name, user_event])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a prediction_service.PredictRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, prediction_service.PredictRequest):
+ request = prediction_service.PredictRequest(request)
- request = prediction_service.PredictRequest(request)
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if name is not None:
+ request.name = name
+ if user_event is not None:
+ request.user_event = user_event
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.predict, default_timeout=None, client_info=_client_info
+ rpc = self._transport._wrapped_methods[self._transport.predict]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
- response = rpc(request, retry=retry, timeout=timeout, metadata=metadata)
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# This method is paged; wrap the response in a pager, which provides
# an `__iter__` convenience method.
- response = pagers.PredictPager(method=rpc, request=request, response=response)
+ response = pagers.PredictPager(
+ method=rpc, request=request, response=response, metadata=metadata,
+ )
# Done; return the response.
return response
try:
- _client_info = gapic_v1.client_info.ClientInfo(
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
- "google-cloud-recommendations-ai"
- ).version
+ "google-cloud-recommendations-ai",
+ ).version,
)
except pkg_resources.DistributionNotFound:
- _client_info = gapic_v1.client_info.ClientInfo()
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = ("PredictionServiceClient",)
diff --git a/google/cloud/recommendationengine_v1beta1/services/prediction_service/pagers.py b/google/cloud/recommendationengine_v1beta1/services/prediction_service/pagers.py
index e13699b2..05d9251e 100644
--- a/google/cloud/recommendationengine_v1beta1/services/prediction_service/pagers.py
+++ b/google/cloud/recommendationengine_v1beta1/services/prediction_service/pagers.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright (C) 2019 Google LLC
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -15,7 +15,16 @@
# limitations under the License.
#
-from typing import Any, Callable, Iterable
+from typing import (
+ Any,
+ AsyncIterable,
+ Awaitable,
+ Callable,
+ Iterable,
+ Sequence,
+ Tuple,
+ Optional,
+)
from google.cloud.recommendationengine_v1beta1.types import prediction_service
@@ -24,7 +33,7 @@ class PredictPager:
"""A pager for iterating through ``predict`` requests.
This class thinly wraps an initial
- :class:`~.prediction_service.PredictResponse` object, and
+ :class:`google.cloud.recommendationengine_v1beta1.types.PredictResponse` object, and
provides an ``__iter__`` method to iterate through its
``results`` field.
@@ -33,32 +42,35 @@ class PredictPager:
through the ``results`` field on the
corresponding responses.
- All the usual :class:`~.prediction_service.PredictResponse`
+ All the usual :class:`google.cloud.recommendationengine_v1beta1.types.PredictResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(
self,
- method: Callable[
- [prediction_service.PredictRequest], prediction_service.PredictResponse
- ],
+ method: Callable[..., prediction_service.PredictResponse],
request: prediction_service.PredictRequest,
response: prediction_service.PredictResponse,
+ *,
+ metadata: Sequence[Tuple[str, str]] = ()
):
"""Instantiate the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
- request (:class:`~.prediction_service.PredictRequest`):
+ request (google.cloud.recommendationengine_v1beta1.types.PredictRequest):
The initial request object.
- response (:class:`~.prediction_service.PredictResponse`):
+ response (google.cloud.recommendationengine_v1beta1.types.PredictResponse):
The initial response object.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
"""
self._method = method
self._request = prediction_service.PredictRequest(request)
self._response = response
+ self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@@ -68,7 +80,7 @@ def pages(self) -> Iterable[prediction_service.PredictResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
- self._response = self._method(self._request)
+ self._response = self._method(self._request, metadata=self._metadata)
yield self._response
def __iter__(self) -> Iterable[prediction_service.PredictResponse.PredictionResult]:
@@ -77,3 +89,71 @@ def __iter__(self) -> Iterable[prediction_service.PredictResponse.PredictionResu
def __repr__(self) -> str:
return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
+
+
+class PredictAsyncPager:
+ """A pager for iterating through ``predict`` requests.
+
+ This class thinly wraps an initial
+ :class:`google.cloud.recommendationengine_v1beta1.types.PredictResponse` object, and
+ provides an ``__aiter__`` method to iterate through its
+ ``results`` field.
+
+ If there are more pages, the ``__aiter__`` method will make additional
+ ``Predict`` requests and continue to iterate
+ through the ``results`` field on the
+ corresponding responses.
+
+ All the usual :class:`google.cloud.recommendationengine_v1beta1.types.PredictResponse`
+ attributes are available on the pager. If multiple requests are made, only
+ the most recent response is retained, and thus used for attribute lookup.
+ """
+
+ def __init__(
+ self,
+ method: Callable[..., Awaitable[prediction_service.PredictResponse]],
+ request: prediction_service.PredictRequest,
+ response: prediction_service.PredictResponse,
+ *,
+ metadata: Sequence[Tuple[str, str]] = ()
+ ):
+ """Instantiate the pager.
+
+ Args:
+ method (Callable): The method that was originally called, and
+ which instantiated this pager.
+ request (google.cloud.recommendationengine_v1beta1.types.PredictRequest):
+ The initial request object.
+ response (google.cloud.recommendationengine_v1beta1.types.PredictResponse):
+ The initial response object.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ self._method = method
+ self._request = prediction_service.PredictRequest(request)
+ self._response = response
+ self._metadata = metadata
+
+ def __getattr__(self, name: str) -> Any:
+ return getattr(self._response, name)
+
+ @property
+ async def pages(self) -> AsyncIterable[prediction_service.PredictResponse]:
+ yield self._response
+ while self._response.next_page_token:
+ self._request.page_token = self._response.next_page_token
+ self._response = await self._method(self._request, metadata=self._metadata)
+ yield self._response
+
+ def __aiter__(
+ self,
+ ) -> AsyncIterable[prediction_service.PredictResponse.PredictionResult]:
+ async def async_generator():
+ async for page in self.pages:
+ for response in page.results:
+ yield response
+
+ return async_generator()
+
+ def __repr__(self) -> str:
+ return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
diff --git a/google/cloud/recommendationengine_v1beta1/services/prediction_service/transports/__init__.py b/google/cloud/recommendationengine_v1beta1/services/prediction_service/transports/__init__.py
index c5713f19..9ec1369a 100644
--- a/google/cloud/recommendationengine_v1beta1/services/prediction_service/transports/__init__.py
+++ b/google/cloud/recommendationengine_v1beta1/services/prediction_service/transports/__init__.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright (C) 2019 Google LLC
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,11 +20,16 @@
from .base import PredictionServiceTransport
from .grpc import PredictionServiceGrpcTransport
+from .grpc_asyncio import PredictionServiceGrpcAsyncIOTransport
# Compile a registry of transports.
_transport_registry = OrderedDict() # type: Dict[str, Type[PredictionServiceTransport]]
_transport_registry["grpc"] = PredictionServiceGrpcTransport
+_transport_registry["grpc_asyncio"] = PredictionServiceGrpcAsyncIOTransport
-
-__all__ = ("PredictionServiceTransport", "PredictionServiceGrpcTransport")
+__all__ = (
+ "PredictionServiceTransport",
+ "PredictionServiceGrpcTransport",
+ "PredictionServiceGrpcAsyncIOTransport",
+)
diff --git a/google/cloud/recommendationengine_v1beta1/services/prediction_service/transports/base.py b/google/cloud/recommendationengine_v1beta1/services/prediction_service/transports/base.py
index be6f1803..9b3d901a 100644
--- a/google/cloud/recommendationengine_v1beta1/services/prediction_service/transports/base.py
+++ b/google/cloud/recommendationengine_v1beta1/services/prediction_service/transports/base.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright (C) 2019 Google LLC
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -17,14 +17,28 @@
import abc
import typing
+import pkg_resources
-from google import auth
+from google import auth # type: ignore
+from google.api_core import exceptions # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import retry as retries # type: ignore
from google.auth import credentials # type: ignore
from google.cloud.recommendationengine_v1beta1.types import prediction_service
-class PredictionServiceTransport(metaclass=abc.ABCMeta):
+try:
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
+ gapic_version=pkg_resources.get_distribution(
+ "google-cloud-recommendations-ai",
+ ).version,
+ )
+except pkg_resources.DistributionNotFound:
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
+
+
+class PredictionServiceTransport(abc.ABC):
"""Abstract transport class for PredictionService."""
AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",)
@@ -34,6 +48,11 @@ def __init__(
*,
host: str = "recommendationengine.googleapis.com",
credentials: credentials.Credentials = None,
+ credentials_file: typing.Optional[str] = None,
+ scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES,
+ quota_project_id: typing.Optional[str] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ **kwargs,
) -> None:
"""Instantiate the transport.
@@ -44,27 +63,76 @@ def __init__(
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is mutually exclusive with credentials.
+ scope (Optional[Sequence[str]]): A list of scopes.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
"""
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
if ":" not in host:
host += ":443"
self._host = host
+ # Save the scopes.
+ self._scopes = scopes or self.AUTH_SCOPES
+
# If no credentials are provided, then determine the appropriate
# defaults.
- if credentials is None:
- credentials, _ = auth.default(scopes=self.AUTH_SCOPES)
+ if credentials and credentials_file:
+ raise exceptions.DuplicateCredentialArgs(
+ "'credentials_file' and 'credentials' are mutually exclusive"
+ )
+
+ if credentials_file is not None:
+ credentials, _ = auth.load_credentials_from_file(
+ credentials_file, scopes=self._scopes, quota_project_id=quota_project_id
+ )
+
+ elif credentials is None:
+ credentials, _ = auth.default(
+ scopes=self._scopes, quota_project_id=quota_project_id
+ )
# Save the credentials.
self._credentials = credentials
+ def _prep_wrapped_messages(self, client_info):
+ # Precompute the wrapped methods.
+ self._wrapped_methods = {
+ self.predict: gapic_v1.method.wrap_method(
+ self.predict,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
+ ),
+ deadline=600.0,
+ ),
+ default_timeout=600.0,
+ client_info=client_info,
+ ),
+ }
+
@property
def predict(
- self
+ self,
) -> typing.Callable[
- [prediction_service.PredictRequest], prediction_service.PredictResponse
+ [prediction_service.PredictRequest],
+ typing.Union[
+ prediction_service.PredictResponse,
+ typing.Awaitable[prediction_service.PredictResponse],
+ ],
]:
- raise NotImplementedError
+ raise NotImplementedError()
__all__ = ("PredictionServiceTransport",)
diff --git a/google/cloud/recommendationengine_v1beta1/services/prediction_service/transports/grpc.py b/google/cloud/recommendationengine_v1beta1/services/prediction_service/transports/grpc.py
index 8c35dfd6..0c18a57c 100644
--- a/google/cloud/recommendationengine_v1beta1/services/prediction_service/transports/grpc.py
+++ b/google/cloud/recommendationengine_v1beta1/services/prediction_service/transports/grpc.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright (C) 2019 Google LLC
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -15,16 +15,20 @@
# limitations under the License.
#
-from typing import Callable, Dict
+import warnings
+from typing import Callable, Dict, Optional, Sequence, Tuple
from google.api_core import grpc_helpers # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google import auth # type: ignore
from google.auth import credentials # type: ignore
+from google.auth.transport.grpc import SslCredentials # type: ignore
import grpc # type: ignore
from google.cloud.recommendationengine_v1beta1.types import prediction_service
-from .base import PredictionServiceTransport
+from .base import PredictionServiceTransport, DEFAULT_CLIENT_INFO
class PredictionServiceGrpcTransport(PredictionServiceTransport):
@@ -40,12 +44,22 @@ class PredictionServiceGrpcTransport(PredictionServiceTransport):
top of HTTP/2); the ``grpcio`` package must be installed.
"""
+ _stubs: Dict[str, Callable]
+
def __init__(
self,
*,
host: str = "recommendationengine.googleapis.com",
credentials: credentials.Credentials = None,
- channel: grpc.Channel = None
+ credentials_file: str = None,
+ scopes: Sequence[str] = None,
+ channel: grpc.Channel = None,
+ api_mtls_endpoint: str = None,
+ client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
+ ssl_channel_credentials: grpc.ChannelCredentials = None,
+ client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
+ quota_project_id: Optional[str] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the transport.
@@ -57,66 +71,159 @@ def __init__(
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is ignored if ``channel`` is provided.
+ scopes (Optional(Sequence[str])): A list of scopes. This argument is
+ ignored if ``channel`` is provided.
channel (Optional[grpc.Channel]): A ``Channel`` instance through
which to make calls.
+ api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
+ If provided, it overrides the ``host`` argument and tries to create
+ a mutual TLS channel with client SSL credentials from
+ ``client_cert_source`` or applicatin default SSL credentials.
+ client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ Deprecated. A callback to provide client SSL certificate bytes and
+ private key bytes, both in PEM format. It is ignored if
+ ``api_mtls_endpoint`` is None.
+ ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
+ for grpc channel. It is ignored if ``channel`` is provided.
+ client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ A callback to provide client certificate bytes and private key bytes,
+ both in PEM format. It is used to configure mutual TLS channel. It is
+ ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+
+ Raises:
+ google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
+ creation failed for any reason.
+ google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
+ and ``credentials_file`` are passed.
"""
- # Sanity check: Ensure that channel and credentials are not both
- # provided.
- if channel:
- credentials = False
+ self._grpc_channel = None
+ self._ssl_channel_credentials = ssl_channel_credentials
+ self._stubs: Dict[str, Callable] = {}
- # Run the base constructor.
- super().__init__(host=host, credentials=credentials)
- self._stubs = {} # type: Dict[str, Callable]
+ if api_mtls_endpoint:
+ warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
+ if client_cert_source:
+ warnings.warn("client_cert_source is deprecated", DeprecationWarning)
- # If a channel was explicitly provided, set it.
if channel:
+ # Ignore credentials if a channel was passed.
+ credentials = False
+ # If a channel was explicitly provided, set it.
self._grpc_channel = channel
+ self._ssl_channel_credentials = None
+
+ else:
+ if api_mtls_endpoint:
+ host = api_mtls_endpoint
+
+ # Create SSL credentials with client_cert_source or application
+ # default SSL credentials.
+ if client_cert_source:
+ cert, key = client_cert_source()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+ else:
+ self._ssl_channel_credentials = SslCredentials().ssl_credentials
+
+ else:
+ if client_cert_source_for_mtls and not ssl_channel_credentials:
+ cert, key = client_cert_source_for_mtls()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+
+ # The base transport sets the host, credentials and scopes
+ super().__init__(
+ host=host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ scopes=scopes,
+ quota_project_id=quota_project_id,
+ client_info=client_info,
+ )
+
+ if not self._grpc_channel:
+ self._grpc_channel = type(self).create_channel(
+ self._host,
+ credentials=self._credentials,
+ credentials_file=credentials_file,
+ scopes=self._scopes,
+ ssl_credentials=self._ssl_channel_credentials,
+ quota_project_id=quota_project_id,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+
+ # Wrap messages. This must be done after self._grpc_channel exists
+ self._prep_wrapped_messages(client_info)
@classmethod
def create_channel(
cls,
host: str = "recommendationengine.googleapis.com",
credentials: credentials.Credentials = None,
- **kwargs
+ credentials_file: str = None,
+ scopes: Optional[Sequence[str]] = None,
+ quota_project_id: Optional[str] = None,
+ **kwargs,
) -> grpc.Channel:
"""Create and return a gRPC channel object.
Args:
- address (Optionsl[str]): The host for the channel to use.
+ host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is mutually exclusive with credentials.
+ scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
+ service. These are only used when credentials are not specified and
+ are passed to :func:`google.auth.default`.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
grpc.Channel: A gRPC channel object.
+
+ Raises:
+ google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
+ and ``credentials_file`` are passed.
"""
+ scopes = scopes or cls.AUTH_SCOPES
return grpc_helpers.create_channel(
- host, credentials=credentials, scopes=cls.AUTH_SCOPES, **kwargs
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ scopes=scopes,
+ quota_project_id=quota_project_id,
+ **kwargs,
)
@property
def grpc_channel(self) -> grpc.Channel:
- """Create the channel designed to connect to this service.
-
- This property caches on the instance; repeated calls return
- the same channel.
+ """Return the channel designed to connect to this service.
"""
- # Sanity check: Only create a new channel if we do not already
- # have one.
- if not hasattr(self, "_grpc_channel"):
- self._grpc_channel = self.create_channel(
- self._host, credentials=self._credentials
- )
-
- # Return the channel from cache.
return self._grpc_channel
@property
def predict(
- self
+ self,
) -> Callable[
[prediction_service.PredictRequest], prediction_service.PredictResponse
]:
diff --git a/google/cloud/recommendationengine_v1beta1/services/prediction_service/transports/grpc_asyncio.py b/google/cloud/recommendationengine_v1beta1/services/prediction_service/transports/grpc_asyncio.py
new file mode 100644
index 00000000..56dd10e7
--- /dev/null
+++ b/google/cloud/recommendationengine_v1beta1/services/prediction_service/transports/grpc_asyncio.py
@@ -0,0 +1,262 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import warnings
+from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple
+
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import grpc_helpers_async # type: ignore
+from google import auth # type: ignore
+from google.auth import credentials # type: ignore
+from google.auth.transport.grpc import SslCredentials # type: ignore
+
+import grpc # type: ignore
+from grpc.experimental import aio # type: ignore
+
+from google.cloud.recommendationengine_v1beta1.types import prediction_service
+
+from .base import PredictionServiceTransport, DEFAULT_CLIENT_INFO
+from .grpc import PredictionServiceGrpcTransport
+
+
+class PredictionServiceGrpcAsyncIOTransport(PredictionServiceTransport):
+ """gRPC AsyncIO backend transport for PredictionService.
+
+ Service for making recommendation prediction.
+
+ This class defines the same methods as the primary client, so the
+ primary client can load the underlying transport implementation
+ and call it.
+
+ It sends protocol buffers over the wire using gRPC (which is built on
+ top of HTTP/2); the ``grpcio`` package must be installed.
+ """
+
+ _grpc_channel: aio.Channel
+ _stubs: Dict[str, Callable] = {}
+
+ @classmethod
+ def create_channel(
+ cls,
+ host: str = "recommendationengine.googleapis.com",
+ credentials: credentials.Credentials = None,
+ credentials_file: Optional[str] = None,
+ scopes: Optional[Sequence[str]] = None,
+ quota_project_id: Optional[str] = None,
+ **kwargs,
+ ) -> aio.Channel:
+ """Create and return a gRPC AsyncIO channel object.
+ Args:
+ host (Optional[str]): The host for the channel to use.
+ credentials (Optional[~.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify this application to the service. If
+ none are specified, the client will attempt to ascertain
+ the credentials from the environment.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is ignored if ``channel`` is provided.
+ scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
+ service. These are only used when credentials are not specified and
+ are passed to :func:`google.auth.default`.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
+ kwargs (Optional[dict]): Keyword arguments, which are passed to the
+ channel creation.
+ Returns:
+ aio.Channel: A gRPC AsyncIO channel object.
+ """
+ scopes = scopes or cls.AUTH_SCOPES
+ return grpc_helpers_async.create_channel(
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ scopes=scopes,
+ quota_project_id=quota_project_id,
+ **kwargs,
+ )
+
+ def __init__(
+ self,
+ *,
+ host: str = "recommendationengine.googleapis.com",
+ credentials: credentials.Credentials = None,
+ credentials_file: Optional[str] = None,
+ scopes: Optional[Sequence[str]] = None,
+ channel: aio.Channel = None,
+ api_mtls_endpoint: str = None,
+ client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
+ ssl_channel_credentials: grpc.ChannelCredentials = None,
+ client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
+ quota_project_id=None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ ) -> None:
+ """Instantiate the transport.
+
+ Args:
+ host (Optional[str]): The hostname to connect to.
+ credentials (Optional[google.auth.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ This argument is ignored if ``channel`` is provided.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is ignored if ``channel`` is provided.
+ scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
+ service. These are only used when credentials are not specified and
+ are passed to :func:`google.auth.default`.
+ channel (Optional[aio.Channel]): A ``Channel`` instance through
+ which to make calls.
+ api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
+ If provided, it overrides the ``host`` argument and tries to create
+ a mutual TLS channel with client SSL credentials from
+ ``client_cert_source`` or applicatin default SSL credentials.
+ client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ Deprecated. A callback to provide client SSL certificate bytes and
+ private key bytes, both in PEM format. It is ignored if
+ ``api_mtls_endpoint`` is None.
+ ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
+ for grpc channel. It is ignored if ``channel`` is provided.
+ client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ A callback to provide client certificate bytes and private key bytes,
+ both in PEM format. It is used to configure mutual TLS channel. It is
+ ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+
+ Raises:
+ google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
+ creation failed for any reason.
+ google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
+ and ``credentials_file`` are passed.
+ """
+ self._grpc_channel = None
+ self._ssl_channel_credentials = ssl_channel_credentials
+ self._stubs: Dict[str, Callable] = {}
+
+ if api_mtls_endpoint:
+ warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
+ if client_cert_source:
+ warnings.warn("client_cert_source is deprecated", DeprecationWarning)
+
+ if channel:
+ # Ignore credentials if a channel was passed.
+ credentials = False
+ # If a channel was explicitly provided, set it.
+ self._grpc_channel = channel
+ self._ssl_channel_credentials = None
+
+ else:
+ if api_mtls_endpoint:
+ host = api_mtls_endpoint
+
+ # Create SSL credentials with client_cert_source or application
+ # default SSL credentials.
+ if client_cert_source:
+ cert, key = client_cert_source()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+ else:
+ self._ssl_channel_credentials = SslCredentials().ssl_credentials
+
+ else:
+ if client_cert_source_for_mtls and not ssl_channel_credentials:
+ cert, key = client_cert_source_for_mtls()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+
+ # The base transport sets the host, credentials and scopes
+ super().__init__(
+ host=host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ scopes=scopes,
+ quota_project_id=quota_project_id,
+ client_info=client_info,
+ )
+
+ if not self._grpc_channel:
+ self._grpc_channel = type(self).create_channel(
+ self._host,
+ credentials=self._credentials,
+ credentials_file=credentials_file,
+ scopes=self._scopes,
+ ssl_credentials=self._ssl_channel_credentials,
+ quota_project_id=quota_project_id,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+
+ # Wrap messages. This must be done after self._grpc_channel exists
+ self._prep_wrapped_messages(client_info)
+
+ @property
+ def grpc_channel(self) -> aio.Channel:
+ """Create the channel designed to connect to this service.
+
+ This property caches on the instance; repeated calls return
+ the same channel.
+ """
+ # Return the channel from cache.
+ return self._grpc_channel
+
+ @property
+ def predict(
+ self,
+ ) -> Callable[
+ [prediction_service.PredictRequest],
+ Awaitable[prediction_service.PredictResponse],
+ ]:
+ r"""Return a callable for the predict method over gRPC.
+
+ Makes a recommendation prediction. If using API Key based
+ authentication, the API Key must be registered using the
+ [PredictionApiKeyRegistry][google.cloud.recommendationengine.v1beta1.PredictionApiKeyRegistry]
+ service. `Learn
+ more `__.
+
+ Returns:
+ Callable[[~.PredictRequest],
+ Awaitable[~.PredictResponse]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "predict" not in self._stubs:
+ self._stubs["predict"] = self.grpc_channel.unary_unary(
+ "/google.cloud.recommendationengine.v1beta1.PredictionService/Predict",
+ request_serializer=prediction_service.PredictRequest.serialize,
+ response_deserializer=prediction_service.PredictResponse.deserialize,
+ )
+ return self._stubs["predict"]
+
+
+__all__ = ("PredictionServiceGrpcAsyncIOTransport",)
diff --git a/google/cloud/recommendationengine_v1beta1/services/user_event_service/__init__.py b/google/cloud/recommendationengine_v1beta1/services/user_event_service/__init__.py
index 3946471a..33435f73 100644
--- a/google/cloud/recommendationengine_v1beta1/services/user_event_service/__init__.py
+++ b/google/cloud/recommendationengine_v1beta1/services/user_event_service/__init__.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright (C) 2019 Google LLC
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -16,5 +16,9 @@
#
from .client import UserEventServiceClient
+from .async_client import UserEventServiceAsyncClient
-__all__ = ("UserEventServiceClient",)
+__all__ = (
+ "UserEventServiceClient",
+ "UserEventServiceAsyncClient",
+)
diff --git a/google/cloud/recommendationengine_v1beta1/services/user_event_service/async_client.py b/google/cloud/recommendationengine_v1beta1/services/user_event_service/async_client.py
new file mode 100644
index 00000000..fe2b306c
--- /dev/null
+++ b/google/cloud/recommendationengine_v1beta1/services/user_event_service/async_client.py
@@ -0,0 +1,864 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from collections import OrderedDict
+import functools
+import re
+from typing import Dict, Sequence, Tuple, Type, Union
+import pkg_resources
+
+import google.api_core.client_options as ClientOptions # type: ignore
+from google.api_core import exceptions # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import retry as retries # type: ignore
+from google.auth import credentials # type: ignore
+from google.oauth2 import service_account # type: ignore
+
+from google.api import httpbody_pb2 as httpbody # type: ignore
+from google.api_core import operation # type: ignore
+from google.api_core import operation_async # type: ignore
+from google.cloud.recommendationengine_v1beta1.services.user_event_service import pagers
+from google.cloud.recommendationengine_v1beta1.types import import_
+from google.cloud.recommendationengine_v1beta1.types import user_event
+from google.cloud.recommendationengine_v1beta1.types import user_event as gcr_user_event
+from google.cloud.recommendationengine_v1beta1.types import user_event_service
+from google.protobuf import any_pb2 as gp_any # type: ignore
+from google.protobuf import timestamp_pb2 as timestamp # type: ignore
+
+from .transports.base import UserEventServiceTransport, DEFAULT_CLIENT_INFO
+from .transports.grpc_asyncio import UserEventServiceGrpcAsyncIOTransport
+from .client import UserEventServiceClient
+
+
+class UserEventServiceAsyncClient:
+ """Service for ingesting end user actions on the customer
+ website.
+ """
+
+ _client: UserEventServiceClient
+
+ DEFAULT_ENDPOINT = UserEventServiceClient.DEFAULT_ENDPOINT
+ DEFAULT_MTLS_ENDPOINT = UserEventServiceClient.DEFAULT_MTLS_ENDPOINT
+
+ event_store_path = staticmethod(UserEventServiceClient.event_store_path)
+ parse_event_store_path = staticmethod(UserEventServiceClient.parse_event_store_path)
+
+ common_billing_account_path = staticmethod(
+ UserEventServiceClient.common_billing_account_path
+ )
+ parse_common_billing_account_path = staticmethod(
+ UserEventServiceClient.parse_common_billing_account_path
+ )
+
+ common_folder_path = staticmethod(UserEventServiceClient.common_folder_path)
+ parse_common_folder_path = staticmethod(
+ UserEventServiceClient.parse_common_folder_path
+ )
+
+ common_organization_path = staticmethod(
+ UserEventServiceClient.common_organization_path
+ )
+ parse_common_organization_path = staticmethod(
+ UserEventServiceClient.parse_common_organization_path
+ )
+
+ common_project_path = staticmethod(UserEventServiceClient.common_project_path)
+ parse_common_project_path = staticmethod(
+ UserEventServiceClient.parse_common_project_path
+ )
+
+ common_location_path = staticmethod(UserEventServiceClient.common_location_path)
+ parse_common_location_path = staticmethod(
+ UserEventServiceClient.parse_common_location_path
+ )
+
+ @classmethod
+ def from_service_account_info(cls, info: dict, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials info.
+
+ Args:
+ info (dict): The service account private key info.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ UserEventServiceAsyncClient: The constructed client.
+ """
+ return UserEventServiceClient.from_service_account_info.__func__(UserEventServiceAsyncClient, info, *args, **kwargs) # type: ignore
+
+ @classmethod
+ def from_service_account_file(cls, filename: str, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials
+ file.
+
+ Args:
+ filename (str): The path to the service account private key json
+ file.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ UserEventServiceAsyncClient: The constructed client.
+ """
+ return UserEventServiceClient.from_service_account_file.__func__(UserEventServiceAsyncClient, filename, *args, **kwargs) # type: ignore
+
+ from_service_account_json = from_service_account_file
+
+ @property
+ def transport(self) -> UserEventServiceTransport:
+ """Return the transport used by the client instance.
+
+ Returns:
+ UserEventServiceTransport: The transport used by the client instance.
+ """
+ return self._client.transport
+
+ get_transport_class = functools.partial(
+ type(UserEventServiceClient).get_transport_class, type(UserEventServiceClient)
+ )
+
+ def __init__(
+ self,
+ *,
+ credentials: credentials.Credentials = None,
+ transport: Union[str, UserEventServiceTransport] = "grpc_asyncio",
+ client_options: ClientOptions = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ ) -> None:
+ """Instantiate the user event service client.
+
+ Args:
+ credentials (Optional[google.auth.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ transport (Union[str, ~.UserEventServiceTransport]): The
+ transport to use. If set to None, a transport is chosen
+ automatically.
+ client_options (ClientOptions): Custom options for the client. It
+ won't take effect if a ``transport`` instance is provided.
+ (1) The ``api_endpoint`` property can be used to override the
+ default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
+ environment variable can also be used to override the endpoint:
+ "always" (always use the default mTLS endpoint), "never" (always
+ use the default regular endpoint) and "auto" (auto switch to the
+ default mTLS endpoint if client certificate is present, this is
+ the default value). However, the ``api_endpoint`` property takes
+ precedence if provided.
+ (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+ is "true", then the ``client_cert_source`` property can be used
+ to provide client certificate for mutual TLS transport. If
+ not provided, the default SSL client certificate will be used if
+ present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+ set, no client certificate will be used.
+
+ Raises:
+ google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
+ creation failed for any reason.
+ """
+
+ self._client = UserEventServiceClient(
+ credentials=credentials,
+ transport=transport,
+ client_options=client_options,
+ client_info=client_info,
+ )
+
+ async def write_user_event(
+ self,
+ request: user_event_service.WriteUserEventRequest = None,
+ *,
+ parent: str = None,
+ user_event: gcr_user_event.UserEvent = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> gcr_user_event.UserEvent:
+ r"""Writes a single user event.
+
+ Args:
+ request (:class:`google.cloud.recommendationengine_v1beta1.types.WriteUserEventRequest`):
+ The request object. Request message for WriteUserEvent
+ method.
+ parent (:class:`str`):
+ Required. The parent eventStore resource name, such as
+ "projects/1234/locations/global/catalogs/default_catalog/eventStores/default_event_store".
+
+ This corresponds to the ``parent`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ user_event (:class:`google.cloud.recommendationengine_v1beta1.types.UserEvent`):
+ Required. User event to write.
+ This corresponds to the ``user_event`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ google.cloud.recommendationengine_v1beta1.types.UserEvent:
+ UserEvent captures all metadata
+ information recommendation engine needs
+ to know about how end users interact
+ with customers' website.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([parent, user_event])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = user_event_service.WriteUserEventRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if parent is not None:
+ request.parent = parent
+ if user_event is not None:
+ request.user_event = user_event
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.write_user_event,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
+ ),
+ deadline=600.0,
+ ),
+ default_timeout=600.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+ async def collect_user_event(
+ self,
+ request: user_event_service.CollectUserEventRequest = None,
+ *,
+ parent: str = None,
+ user_event: str = None,
+ uri: str = None,
+ ets: int = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> httpbody.HttpBody:
+ r"""Writes a single user event from the browser. This
+ uses a GET request to due to browser restriction of
+ POST-ing to a 3rd party domain.
+ This method is used only by the Recommendations AI
+ JavaScript pixel. Users should not call this method
+ directly.
+
+ Args:
+ request (:class:`google.cloud.recommendationengine_v1beta1.types.CollectUserEventRequest`):
+ The request object. Request message for CollectUserEvent
+ method.
+ parent (:class:`str`):
+ Required. The parent eventStore name, such as
+ "projects/1234/locations/global/catalogs/default_catalog/eventStores/default_event_store".
+
+ This corresponds to the ``parent`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ user_event (:class:`str`):
+ Required. URL encoded UserEvent
+ proto.
+
+ This corresponds to the ``user_event`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ uri (:class:`str`):
+ Optional. The url including cgi-
+ arameters but excluding the hash
+ fragment. The URL must be truncated to
+ 1.5K bytes to conservatively be under
+ the 2K bytes. This is often more useful
+ than the referer url, because many
+ browsers only send the domain for 3rd
+ party requests.
+
+ This corresponds to the ``uri`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ ets (:class:`int`):
+ Optional. The event timestamp in
+ milliseconds. This prevents browser
+ caching of otherwise identical get
+ requests. The name is abbreviated to
+ reduce the payload bytes.
+
+ This corresponds to the ``ets`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ google.api.httpbody_pb2.HttpBody:
+ Message that represents an arbitrary HTTP body. It should only be used for
+ payload formats that can't be represented as JSON,
+ such as raw binary or an HTML page.
+
+ This message can be used both in streaming and
+ non-streaming API methods in the request as well as
+ the response.
+
+ It can be used as a top-level request field, which is
+ convenient if one wants to extract parameters from
+ either the URL or HTTP template into the request
+ fields and also want access to the raw HTTP body.
+
+ Example:
+
+ message GetResourceRequest {
+ // A unique request id. string request_id = 1;
+
+ // The raw HTTP body is bound to this field.
+ google.api.HttpBody http_body = 2;
+
+ }
+
+ service ResourceService {
+ rpc GetResource(GetResourceRequest) returns
+ (google.api.HttpBody); rpc
+ UpdateResource(google.api.HttpBody) returns
+ (google.protobuf.Empty);
+
+ }
+
+ Example with streaming methods:
+
+ service CaldavService {
+ rpc GetCalendar(stream google.api.HttpBody)
+ returns (stream google.api.HttpBody);
+
+ rpc UpdateCalendar(stream google.api.HttpBody)
+ returns (stream google.api.HttpBody);
+
+ }
+
+ Use of this type only changes how the request and
+ response bodies are handled, all other features will
+ continue to work unchanged.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([parent, user_event, uri, ets])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = user_event_service.CollectUserEventRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if parent is not None:
+ request.parent = parent
+ if user_event is not None:
+ request.user_event = user_event
+ if uri is not None:
+ request.uri = uri
+ if ets is not None:
+ request.ets = ets
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.collect_user_event,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
+ ),
+ deadline=600.0,
+ ),
+ default_timeout=600.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+ async def list_user_events(
+ self,
+ request: user_event_service.ListUserEventsRequest = None,
+ *,
+ parent: str = None,
+ filter: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> pagers.ListUserEventsAsyncPager:
+ r"""Gets a list of user events within a time range, with
+ potential filtering.
+
+ Args:
+ request (:class:`google.cloud.recommendationengine_v1beta1.types.ListUserEventsRequest`):
+ The request object. Request message for ListUserEvents
+ method.
+ parent (:class:`str`):
+ Required. The parent eventStore resource name, such as
+ ``projects/*/locations/*/catalogs/default_catalog/eventStores/default_event_store``
+
+ This corresponds to the ``parent`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ filter (:class:`str`):
+ Optional. Filtering expression to specify restrictions
+ over returned events. This is a sequence of terms, where
+ each term applies some kind of a restriction to the
+ returned user events. Use this expression to restrict
+ results to a specific time range, or filter events by
+ eventType. eg: eventTime > "2012-04-23T18:25:43.511Z"
+ eventsMissingCatalogItems
+ eventTime<"2012-04-23T18:25:43.511Z" eventType=search
+
+ We expect only 3 types of fields:
+
+ ::
+
+ * eventTime: this can be specified a maximum of 2 times, once with a
+ less than operator and once with a greater than operator. The
+ eventTime restrict should result in one contiguous valid eventTime
+ range.
+
+ * eventType: only 1 eventType restriction can be specified.
+
+ * eventsMissingCatalogItems: specififying this will restrict results
+ to events for which catalog items were not found in the catalog. The
+ default behavior is to return only those events for which catalog
+ items were found.
+
+ Some examples of valid filters expressions:
+
+ - Example 1: eventTime > "2012-04-23T18:25:43.511Z"
+ eventTime < "2012-04-23T18:30:43.511Z"
+ - Example 2: eventTime > "2012-04-23T18:25:43.511Z"
+ eventType = detail-page-view
+ - Example 3: eventsMissingCatalogItems eventType =
+ search eventTime < "2018-04-23T18:30:43.511Z"
+ - Example 4: eventTime > "2012-04-23T18:25:43.511Z"
+ - Example 5: eventType = search
+ - Example 6: eventsMissingCatalogItems
+
+ This corresponds to the ``filter`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ google.cloud.recommendationengine_v1beta1.services.user_event_service.pagers.ListUserEventsAsyncPager:
+ Response message for ListUserEvents
+ method.
+ Iterating over this object will yield
+ results and resolve additional pages
+ automatically.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([parent, filter])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = user_event_service.ListUserEventsRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if parent is not None:
+ request.parent = parent
+ if filter is not None:
+ request.filter = filter
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.list_user_events,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
+ ),
+ deadline=600.0,
+ ),
+ default_timeout=600.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # This method is paged; wrap the response in a pager, which provides
+ # an `__aiter__` convenience method.
+ response = pagers.ListUserEventsAsyncPager(
+ method=rpc, request=request, response=response, metadata=metadata,
+ )
+
+ # Done; return the response.
+ return response
+
+ async def purge_user_events(
+ self,
+ request: user_event_service.PurgeUserEventsRequest = None,
+ *,
+ parent: str = None,
+ filter: str = None,
+ force: bool = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operation_async.AsyncOperation:
+ r"""Deletes permanently all user events specified by the
+ filter provided. Depending on the number of events
+ specified by the filter, this operation could take hours
+ or days to complete. To test a filter, use the list
+ command first.
+
+ Args:
+ request (:class:`google.cloud.recommendationengine_v1beta1.types.PurgeUserEventsRequest`):
+ The request object. Request message for PurgeUserEvents
+ method.
+ parent (:class:`str`):
+ Required. The resource name of the event_store under
+ which the events are created. The format is
+ "projects/${projectId}/locations/global/catalogs/${catalogId}/eventStores/${eventStoreId}"
+
+ This corresponds to the ``parent`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ filter (:class:`str`):
+ Required. The filter string to specify the events to be
+ deleted. Empty string filter is not allowed. This filter
+ can also be used with ListUserEvents API to list events
+ that will be deleted. The eligible fields for filtering
+ are:
+
+ - eventType - UserEvent.eventType field of type string.
+ - eventTime - in ISO 8601 "zulu" format.
+ - visitorId - field of type string. Specifying this
+ will delete all events associated with a visitor.
+ - userId - field of type string. Specifying this will
+ delete all events associated with a user. Example 1:
+ Deleting all events in a time range.
+ ``eventTime > "2012-04-23T18:25:43.511Z" eventTime < "2012-04-23T18:30:43.511Z"``
+ Example 2: Deleting specific eventType in time range.
+ ``eventTime > "2012-04-23T18:25:43.511Z" eventType = "detail-page-view"``
+ Example 3: Deleting all events for a specific visitor
+ ``visitorId = visitor1024`` The filtering fields are
+ assumed to have an implicit AND.
+
+ This corresponds to the ``filter`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ force (:class:`bool`):
+ Optional. The default value is false.
+ Override this flag to true to actually
+ perform the purge. If the field is not
+ set to true, a sampling of events to be
+ deleted will be returned.
+
+ This corresponds to the ``force`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ google.api_core.operation_async.AsyncOperation:
+ An object representing a long-running operation.
+
+ The result type for the operation will be :class:`google.cloud.recommendationengine_v1beta1.types.PurgeUserEventsResponse` Response of the PurgeUserEventsRequest. If the long running operation is
+ successfully done, then this message is returned by
+ the google.longrunning.Operations.response field.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([parent, filter, force])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = user_event_service.PurgeUserEventsRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if parent is not None:
+ request.parent = parent
+ if filter is not None:
+ request.filter = filter
+ if force is not None:
+ request.force = force
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.purge_user_events,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
+ ),
+ deadline=600.0,
+ ),
+ default_timeout=600.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Wrap the response in an operation future.
+ response = operation_async.from_gapic(
+ response,
+ self._client._transport.operations_client,
+ user_event_service.PurgeUserEventsResponse,
+ metadata_type=user_event_service.PurgeUserEventsMetadata,
+ )
+
+ # Done; return the response.
+ return response
+
+ async def import_user_events(
+ self,
+ request: import_.ImportUserEventsRequest = None,
+ *,
+ parent: str = None,
+ request_id: str = None,
+ input_config: import_.InputConfig = None,
+ errors_config: import_.ImportErrorsConfig = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> operation_async.AsyncOperation:
+ r"""Bulk import of User events. Request processing might
+ be synchronous. Events that already exist are skipped.
+ Use this method for backfilling historical user events.
+ Operation.response is of type ImportResponse. Note that
+ it is possible for a subset of the items to be
+ successfully inserted. Operation.metadata is of type
+ ImportMetadata.
+
+ Args:
+ request (:class:`google.cloud.recommendationengine_v1beta1.types.ImportUserEventsRequest`):
+ The request object. Request message for the
+ ImportUserEvents request.
+ parent (:class:`str`):
+ Required.
+ "projects/1234/locations/global/catalogs/default_catalog/eventStores/default_event_store"
+
+ This corresponds to the ``parent`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ request_id (:class:`str`):
+ Optional. Unique identifier provided by client, within
+ the ancestor dataset scope. Ensures idempotency for
+ expensive long running operations. Server-generated if
+ unspecified. Up to 128 characters long. This is returned
+ as google.longrunning.Operation.name in the response.
+ Note that this field must not be set if the desired
+ input config is catalog_inline_source.
+
+ This corresponds to the ``request_id`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ input_config (:class:`google.cloud.recommendationengine_v1beta1.types.InputConfig`):
+ Required. The desired input location
+ of the data.
+
+ This corresponds to the ``input_config`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ errors_config (:class:`google.cloud.recommendationengine_v1beta1.types.ImportErrorsConfig`):
+ Optional. The desired location of
+ errors incurred during the Import.
+
+ This corresponds to the ``errors_config`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ google.api_core.operation_async.AsyncOperation:
+ An object representing a long-running operation.
+
+ The result type for the operation will be :class:`google.cloud.recommendationengine_v1beta1.types.ImportUserEventsResponse` Response of the ImportUserEventsRequest. If the long running
+ operation was successful, then this message is
+ returned by the
+ google.longrunning.Operations.response field if the
+ operation was successful.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([parent, request_id, input_config, errors_config])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = import_.ImportUserEventsRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if parent is not None:
+ request.parent = parent
+ if request_id is not None:
+ request.request_id = request_id
+ if input_config is not None:
+ request.input_config = input_config
+ if errors_config is not None:
+ request.errors_config = errors_config
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.import_user_events,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
+ ),
+ deadline=600.0,
+ ),
+ default_timeout=600.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Wrap the response in an operation future.
+ response = operation_async.from_gapic(
+ response,
+ self._client._transport.operations_client,
+ import_.ImportUserEventsResponse,
+ metadata_type=import_.ImportMetadata,
+ )
+
+ # Done; return the response.
+ return response
+
+
+try:
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
+ gapic_version=pkg_resources.get_distribution(
+ "google-cloud-recommendations-ai",
+ ).version,
+ )
+except pkg_resources.DistributionNotFound:
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
+
+
+__all__ = ("UserEventServiceAsyncClient",)
diff --git a/google/cloud/recommendationengine_v1beta1/services/user_event_service/client.py b/google/cloud/recommendationengine_v1beta1/services/user_event_service/client.py
index f5b57a4c..c00bcae6 100644
--- a/google/cloud/recommendationengine_v1beta1/services/user_event_service/client.py
+++ b/google/cloud/recommendationengine_v1beta1/services/user_event_service/client.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright (C) 2019 Google LLC
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -16,27 +16,36 @@
#
from collections import OrderedDict
-from typing import Dict, Sequence, Tuple, Type, Union
+from distutils import util
+import os
+import re
+from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union
import pkg_resources
-import google.api_core.client_options as ClientOptions # type: ignore
+from google.api_core import client_options as client_options_lib # type: ignore
from google.api_core import exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.auth import credentials # type: ignore
+from google.auth.transport import mtls # type: ignore
+from google.auth.transport.grpc import SslCredentials # type: ignore
+from google.auth.exceptions import MutualTLSChannelError # type: ignore
from google.oauth2 import service_account # type: ignore
from google.api import httpbody_pb2 as httpbody # type: ignore
-from google.api_core import operation
+from google.api_core import operation # type: ignore
+from google.api_core import operation_async # type: ignore
from google.cloud.recommendationengine_v1beta1.services.user_event_service import pagers
from google.cloud.recommendationengine_v1beta1.types import import_
from google.cloud.recommendationengine_v1beta1.types import user_event
+from google.cloud.recommendationengine_v1beta1.types import user_event as gcr_user_event
from google.cloud.recommendationengine_v1beta1.types import user_event_service
-from google.protobuf import any_pb2 as any # type: ignore
+from google.protobuf import any_pb2 as gp_any # type: ignore
from google.protobuf import timestamp_pb2 as timestamp # type: ignore
-from .transports.base import UserEventServiceTransport
+from .transports.base import UserEventServiceTransport, DEFAULT_CLIENT_INFO
from .transports.grpc import UserEventServiceGrpcTransport
+from .transports.grpc_asyncio import UserEventServiceGrpcAsyncIOTransport
class UserEventServiceClientMeta(type):
@@ -51,8 +60,9 @@ class UserEventServiceClientMeta(type):
OrderedDict()
) # type: Dict[str, Type[UserEventServiceTransport]]
_transport_registry["grpc"] = UserEventServiceGrpcTransport
+ _transport_registry["grpc_asyncio"] = UserEventServiceGrpcAsyncIOTransport
- def get_transport_class(cls, label: str = None) -> Type[UserEventServiceTransport]:
+ def get_transport_class(cls, label: str = None,) -> Type[UserEventServiceTransport]:
"""Return an appropriate transport class.
Args:
@@ -76,10 +86,56 @@ class UserEventServiceClient(metaclass=UserEventServiceClientMeta):
website.
"""
- DEFAULT_OPTIONS = ClientOptions.ClientOptions(
- api_endpoint="recommendationengine.googleapis.com"
+ @staticmethod
+ def _get_default_mtls_endpoint(api_endpoint):
+ """Convert api endpoint to mTLS endpoint.
+ Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
+ "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
+ Args:
+ api_endpoint (Optional[str]): the api endpoint to convert.
+ Returns:
+ str: converted mTLS api endpoint.
+ """
+ if not api_endpoint:
+ return api_endpoint
+
+ mtls_endpoint_re = re.compile(
+ r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?"
+ )
+
+ m = mtls_endpoint_re.match(api_endpoint)
+ name, mtls, sandbox, googledomain = m.groups()
+ if mtls or not googledomain:
+ return api_endpoint
+
+ if sandbox:
+ return api_endpoint.replace(
+ "sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
+ )
+
+ return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
+
+ DEFAULT_ENDPOINT = "recommendationengine.googleapis.com"
+ DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore
+ DEFAULT_ENDPOINT
)
+ @classmethod
+ def from_service_account_info(cls, info: dict, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials info.
+
+ Args:
+ info (dict): The service account private key info.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ UserEventServiceClient: The constructed client.
+ """
+ credentials = service_account.Credentials.from_service_account_info(info)
+ kwargs["credentials"] = credentials
+ return cls(*args, **kwargs)
+
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
@@ -92,7 +148,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs):
kwargs: Additional arguments to pass to the constructor.
Returns:
- {@api.name}: The constructed client.
+ UserEventServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(filename)
kwargs["credentials"] = credentials
@@ -100,12 +156,102 @@ def from_service_account_file(cls, filename: str, *args, **kwargs):
from_service_account_json = from_service_account_file
+ @property
+ def transport(self) -> UserEventServiceTransport:
+ """Return the transport used by the client instance.
+
+ Returns:
+ UserEventServiceTransport: The transport used by the client instance.
+ """
+ return self._transport
+
+ @staticmethod
+ def event_store_path(
+ project: str, location: str, catalog: str, event_store: str,
+ ) -> str:
+ """Return a fully-qualified event_store string."""
+ return "projects/{project}/locations/{location}/catalogs/{catalog}/eventStores/{event_store}".format(
+ project=project,
+ location=location,
+ catalog=catalog,
+ event_store=event_store,
+ )
+
+ @staticmethod
+ def parse_event_store_path(path: str) -> Dict[str, str]:
+ """Parse a event_store path into its component segments."""
+ m = re.match(
+ r"^projects/(?P.+?)/locations/(?P.+?)/catalogs/(?P.+?)/eventStores/(?P.+?)$",
+ path,
+ )
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_billing_account_path(billing_account: str,) -> str:
+ """Return a fully-qualified billing_account string."""
+ return "billingAccounts/{billing_account}".format(
+ billing_account=billing_account,
+ )
+
+ @staticmethod
+ def parse_common_billing_account_path(path: str) -> Dict[str, str]:
+ """Parse a billing_account path into its component segments."""
+ m = re.match(r"^billingAccounts/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_folder_path(folder: str,) -> str:
+ """Return a fully-qualified folder string."""
+ return "folders/{folder}".format(folder=folder,)
+
+ @staticmethod
+ def parse_common_folder_path(path: str) -> Dict[str, str]:
+ """Parse a folder path into its component segments."""
+ m = re.match(r"^folders/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_organization_path(organization: str,) -> str:
+ """Return a fully-qualified organization string."""
+ return "organizations/{organization}".format(organization=organization,)
+
+ @staticmethod
+ def parse_common_organization_path(path: str) -> Dict[str, str]:
+ """Parse a organization path into its component segments."""
+ m = re.match(r"^organizations/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_project_path(project: str,) -> str:
+ """Return a fully-qualified project string."""
+ return "projects/{project}".format(project=project,)
+
+ @staticmethod
+ def parse_common_project_path(path: str) -> Dict[str, str]:
+ """Parse a project path into its component segments."""
+ m = re.match(r"^projects/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_location_path(project: str, location: str,) -> str:
+ """Return a fully-qualified location string."""
+ return "projects/{project}/locations/{location}".format(
+ project=project, location=location,
+ )
+
+ @staticmethod
+ def parse_common_location_path(path: str) -> Dict[str, str]:
+ """Parse a location path into its component segments."""
+ m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
def __init__(
self,
*,
- credentials: credentials.Credentials = None,
- transport: Union[str, UserEventServiceTransport] = None,
- client_options: ClientOptions = DEFAULT_OPTIONS,
+ credentials: Optional[credentials.Credentials] = None,
+ transport: Union[str, UserEventServiceTransport, None] = None,
+ client_options: Optional[client_options_lib.ClientOptions] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the user event service client.
@@ -115,46 +261,131 @@ def __init__(
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
- transport (Union[str, ~.UserEventServiceTransport]): The
+ transport (Union[str, UserEventServiceTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
- client_options (ClientOptions): Custom options for the client.
+ client_options (google.api_core.client_options.ClientOptions): Custom options for the
+ client. It won't take effect if a ``transport`` instance is provided.
+ (1) The ``api_endpoint`` property can be used to override the
+ default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
+ environment variable can also be used to override the endpoint:
+ "always" (always use the default mTLS endpoint), "never" (always
+ use the default regular endpoint) and "auto" (auto switch to the
+ default mTLS endpoint if client certificate is present, this is
+ the default value). However, the ``api_endpoint`` property takes
+ precedence if provided.
+ (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+ is "true", then the ``client_cert_source`` property can be used
+ to provide client certificate for mutual TLS transport. If
+ not provided, the default SSL client certificate will be used if
+ present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+ set, no client certificate will be used.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+
+ Raises:
+ google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
+ creation failed for any reason.
"""
if isinstance(client_options, dict):
- client_options = ClientOptions.from_dict(client_options)
+ client_options = client_options_lib.from_dict(client_options)
+ if client_options is None:
+ client_options = client_options_lib.ClientOptions()
+
+ # Create SSL credentials for mutual TLS if needed.
+ use_client_cert = bool(
+ util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))
+ )
+
+ client_cert_source_func = None
+ is_mtls = False
+ if use_client_cert:
+ if client_options.client_cert_source:
+ is_mtls = True
+ client_cert_source_func = client_options.client_cert_source
+ else:
+ is_mtls = mtls.has_default_client_cert_source()
+ client_cert_source_func = (
+ mtls.default_client_cert_source() if is_mtls else None
+ )
+
+ # Figure out which api endpoint to use.
+ if client_options.api_endpoint is not None:
+ api_endpoint = client_options.api_endpoint
+ else:
+ use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
+ if use_mtls_env == "never":
+ api_endpoint = self.DEFAULT_ENDPOINT
+ elif use_mtls_env == "always":
+ api_endpoint = self.DEFAULT_MTLS_ENDPOINT
+ elif use_mtls_env == "auto":
+ api_endpoint = (
+ self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT
+ )
+ else:
+ raise MutualTLSChannelError(
+ "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always"
+ )
# Save or instantiate the transport.
# Ordinarily, we provide the transport, but allowing a custom transport
# instance provides an extensibility point for unusual situations.
if isinstance(transport, UserEventServiceTransport):
- if credentials:
+ # transport is a UserEventServiceTransport instance.
+ if credentials or client_options.credentials_file:
raise ValueError(
"When providing a transport instance, "
"provide its credentials directly."
)
+ if client_options.scopes:
+ raise ValueError(
+ "When providing a transport instance, "
+ "provide its scopes directly."
+ )
self._transport = transport
else:
Transport = type(self).get_transport_class(transport)
self._transport = Transport(
credentials=credentials,
- host=client_options.api_endpoint
- or "recommendationengine.googleapis.com",
+ credentials_file=client_options.credentials_file,
+ host=api_endpoint,
+ scopes=client_options.scopes,
+ client_cert_source_for_mtls=client_cert_source_func,
+ quota_project_id=client_options.quota_project_id,
+ client_info=client_info,
)
def write_user_event(
self,
request: user_event_service.WriteUserEventRequest = None,
*,
+ parent: str = None,
+ user_event: gcr_user_event.UserEvent = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
- ) -> user_event.UserEvent:
+ ) -> gcr_user_event.UserEvent:
r"""Writes a single user event.
Args:
- request (:class:`~.user_event_service.WriteUserEventRequest`):
+ request (google.cloud.recommendationengine_v1beta1.types.WriteUserEventRequest):
The request object. Request message for WriteUserEvent
method.
+ parent (str):
+ Required. The parent eventStore resource name, such as
+ "projects/1234/locations/global/catalogs/default_catalog/eventStores/default_event_store".
+
+ This corresponds to the ``parent`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ user_event (google.cloud.recommendationengine_v1beta1.types.UserEvent):
+ Required. User event to write.
+ This corresponds to the ``user_event`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
@@ -163,7 +394,7 @@ def write_user_event(
sent along with the request as metadata.
Returns:
- ~.user_event.UserEvent:
+ google.cloud.recommendationengine_v1beta1.types.UserEvent:
UserEvent captures all metadata
information recommendation engine needs
to know about how end users interact
@@ -171,19 +402,42 @@ def write_user_event(
"""
# Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([parent, user_event])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
- request = user_event_service.WriteUserEventRequest(request)
+ # Minor optimization to avoid making a copy if the user passes
+ # in a user_event_service.WriteUserEventRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, user_event_service.WriteUserEventRequest):
+ request = user_event_service.WriteUserEventRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if parent is not None:
+ request.parent = parent
+ if user_event is not None:
+ request.user_event = user_event
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.write_user_event,
- default_timeout=None,
- client_info=_client_info,
+ rpc = self._transport._wrapped_methods[self._transport.write_user_event]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
- response = rpc(request, retry=retry, timeout=timeout, metadata=metadata)
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
@@ -192,6 +446,10 @@ def collect_user_event(
self,
request: user_event_service.CollectUserEventRequest = None,
*,
+ parent: str = None,
+ user_event: str = None,
+ uri: str = None,
+ ets: int = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
@@ -204,9 +462,46 @@ def collect_user_event(
directly.
Args:
- request (:class:`~.user_event_service.CollectUserEventRequest`):
+ request (google.cloud.recommendationengine_v1beta1.types.CollectUserEventRequest):
The request object. Request message for CollectUserEvent
method.
+ parent (str):
+ Required. The parent eventStore name, such as
+ "projects/1234/locations/global/catalogs/default_catalog/eventStores/default_event_store".
+
+ This corresponds to the ``parent`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ user_event (str):
+ Required. URL encoded UserEvent
+ proto.
+
+ This corresponds to the ``user_event`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ uri (str):
+ Optional. The url including cgi-
+ arameters but excluding the hash
+ fragment. The URL must be truncated to
+ 1.5K bytes to conservatively be under
+ the 2K bytes. This is often more useful
+ than the referer url, because many
+ browsers only send the domain for 3rd
+ party requests.
+
+ This corresponds to the ``uri`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ ets (int):
+ Optional. The event timestamp in
+ milliseconds. This prevents browser
+ caching of otherwise identical get
+ requests. The name is abbreviated to
+ reduce the payload bytes.
+
+ This corresponds to the ``ets`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
@@ -215,64 +510,86 @@ def collect_user_event(
sent along with the request as metadata.
Returns:
- ~.httpbody.HttpBody:
- Message that represents an arbitrary HTTP body. It
- should only be used for payload formats that can't be
- represented as JSON, such as raw binary or an HTML page.
+ google.api.httpbody_pb2.HttpBody:
+ Message that represents an arbitrary HTTP body. It should only be used for
+ payload formats that can't be represented as JSON,
+ such as raw binary or an HTML page.
- This message can be used both in streaming and
- non-streaming API methods in the request as well as the
- response.
+ This message can be used both in streaming and
+ non-streaming API methods in the request as well as
+ the response.
- It can be used as a top-level request field, which is
- convenient if one wants to extract parameters from
- either the URL or HTTP template into the request fields
- and also want access to the raw HTTP body.
+ It can be used as a top-level request field, which is
+ convenient if one wants to extract parameters from
+ either the URL or HTTP template into the request
+ fields and also want access to the raw HTTP body.
- Example:
+ Example:
- ::
+ message GetResourceRequest {
+ // A unique request id. string request_id = 1;
- message GetResourceRequest {
- // A unique request id.
- string request_id = 1;
+ // The raw HTTP body is bound to this field.
+ google.api.HttpBody http_body = 2;
- // The raw HTTP body is bound to this field.
- google.api.HttpBody http_body = 2;
- }
+ }
- service ResourceService {
- rpc GetResource(GetResourceRequest) returns (google.api.HttpBody);
- rpc UpdateResource(google.api.HttpBody) returns (google.protobuf.Empty);
- }
+ service ResourceService {
+ rpc GetResource(GetResourceRequest) returns
+ (google.api.HttpBody); rpc
+ UpdateResource(google.api.HttpBody) returns
+ (google.protobuf.Empty);
- Example with streaming methods:
+ }
- ::
+ Example with streaming methods:
+
+ service CaldavService {
+ rpc GetCalendar(stream google.api.HttpBody)
+ returns (stream google.api.HttpBody);
+
+ rpc UpdateCalendar(stream google.api.HttpBody)
+ returns (stream google.api.HttpBody);
- service CaldavService {
- rpc GetCalendar(stream google.api.HttpBody)
- returns (stream google.api.HttpBody);
- rpc UpdateCalendar(stream google.api.HttpBody)
- returns (stream google.api.HttpBody);
- }
+ }
- Use of this type only changes how the request and
- response bodies are handled, all other features will
- continue to work unchanged.
+ Use of this type only changes how the request and
+ response bodies are handled, all other features will
+ continue to work unchanged.
"""
# Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([parent, user_event, uri, ets])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
- request = user_event_service.CollectUserEventRequest(request)
+ # Minor optimization to avoid making a copy if the user passes
+ # in a user_event_service.CollectUserEventRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, user_event_service.CollectUserEventRequest):
+ request = user_event_service.CollectUserEventRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if parent is not None:
+ request.parent = parent
+ if user_event is not None:
+ request.user_event = user_event
+ if uri is not None:
+ request.uri = uri
+ if ets is not None:
+ request.ets = ets
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.collect_user_event,
- default_timeout=None,
- client_info=_client_info,
- )
+ rpc = self._transport._wrapped_methods[self._transport.collect_user_event]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -281,7 +598,7 @@ def collect_user_event(
)
# Send the request.
- response = rpc(request, retry=retry, timeout=timeout, metadata=metadata)
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
@@ -290,6 +607,8 @@ def list_user_events(
self,
request: user_event_service.ListUserEventsRequest = None,
*,
+ parent: str = None,
+ filter: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
@@ -298,9 +617,57 @@ def list_user_events(
potential filtering.
Args:
- request (:class:`~.user_event_service.ListUserEventsRequest`):
+ request (google.cloud.recommendationengine_v1beta1.types.ListUserEventsRequest):
The request object. Request message for ListUserEvents
method.
+ parent (str):
+ Required. The parent eventStore resource name, such as
+ ``projects/*/locations/*/catalogs/default_catalog/eventStores/default_event_store``
+
+ This corresponds to the ``parent`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ filter (str):
+ Optional. Filtering expression to specify restrictions
+ over returned events. This is a sequence of terms, where
+ each term applies some kind of a restriction to the
+ returned user events. Use this expression to restrict
+ results to a specific time range, or filter events by
+ eventType. eg: eventTime > "2012-04-23T18:25:43.511Z"
+ eventsMissingCatalogItems
+ eventTime<"2012-04-23T18:25:43.511Z" eventType=search
+
+ We expect only 3 types of fields:
+
+ ::
+
+ * eventTime: this can be specified a maximum of 2 times, once with a
+ less than operator and once with a greater than operator. The
+ eventTime restrict should result in one contiguous valid eventTime
+ range.
+
+ * eventType: only 1 eventType restriction can be specified.
+
+ * eventsMissingCatalogItems: specififying this will restrict results
+ to events for which catalog items were not found in the catalog. The
+ default behavior is to return only those events for which catalog
+ items were found.
+
+ Some examples of valid filters expressions:
+
+ - Example 1: eventTime > "2012-04-23T18:25:43.511Z"
+ eventTime < "2012-04-23T18:30:43.511Z"
+ - Example 2: eventTime > "2012-04-23T18:25:43.511Z"
+ eventType = detail-page-view
+ - Example 3: eventsMissingCatalogItems eventType =
+ search eventTime < "2018-04-23T18:30:43.511Z"
+ - Example 4: eventTime > "2012-04-23T18:25:43.511Z"
+ - Example 5: eventType = search
+ - Example 6: eventsMissingCatalogItems
+
+ This corresponds to the ``filter`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
@@ -309,7 +676,7 @@ def list_user_events(
sent along with the request as metadata.
Returns:
- ~.pagers.ListUserEventsPager:
+ google.cloud.recommendationengine_v1beta1.services.user_event_service.pagers.ListUserEventsPager:
Response message for ListUserEvents
method.
Iterating over this object will yield
@@ -318,16 +685,33 @@ def list_user_events(
"""
# Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([parent, filter])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a user_event_service.ListUserEventsRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, user_event_service.ListUserEventsRequest):
+ request = user_event_service.ListUserEventsRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
- request = user_event_service.ListUserEventsRequest(request)
+ if parent is not None:
+ request.parent = parent
+ if filter is not None:
+ request.filter = filter
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.list_user_events,
- default_timeout=None,
- client_info=_client_info,
- )
+ rpc = self._transport._wrapped_methods[self._transport.list_user_events]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -336,12 +720,12 @@ def list_user_events(
)
# Send the request.
- response = rpc(request, retry=retry, timeout=timeout, metadata=metadata)
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# This method is paged; wrap the response in a pager, which provides
# an `__iter__` convenience method.
response = pagers.ListUserEventsPager(
- method=rpc, request=request, response=response
+ method=rpc, request=request, response=response, metadata=metadata,
)
# Done; return the response.
@@ -351,6 +735,9 @@ def purge_user_events(
self,
request: user_event_service.PurgeUserEventsRequest = None,
*,
+ parent: str = None,
+ filter: str = None,
+ force: bool = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
@@ -362,9 +749,51 @@ def purge_user_events(
command first.
Args:
- request (:class:`~.user_event_service.PurgeUserEventsRequest`):
+ request (google.cloud.recommendationengine_v1beta1.types.PurgeUserEventsRequest):
The request object. Request message for PurgeUserEvents
method.
+ parent (str):
+ Required. The resource name of the event_store under
+ which the events are created. The format is
+ "projects/${projectId}/locations/global/catalogs/${catalogId}/eventStores/${eventStoreId}"
+
+ This corresponds to the ``parent`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ filter (str):
+ Required. The filter string to specify the events to be
+ deleted. Empty string filter is not allowed. This filter
+ can also be used with ListUserEvents API to list events
+ that will be deleted. The eligible fields for filtering
+ are:
+
+ - eventType - UserEvent.eventType field of type string.
+ - eventTime - in ISO 8601 "zulu" format.
+ - visitorId - field of type string. Specifying this
+ will delete all events associated with a visitor.
+ - userId - field of type string. Specifying this will
+ delete all events associated with a user. Example 1:
+ Deleting all events in a time range.
+ ``eventTime > "2012-04-23T18:25:43.511Z" eventTime < "2012-04-23T18:30:43.511Z"``
+ Example 2: Deleting specific eventType in time range.
+ ``eventTime > "2012-04-23T18:25:43.511Z" eventType = "detail-page-view"``
+ Example 3: Deleting all events for a specific visitor
+ ``visitorId = visitor1024`` The filtering fields are
+ assumed to have an implicit AND.
+
+ This corresponds to the ``filter`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ force (bool):
+ Optional. The default value is false.
+ Override this flag to true to actually
+ perform the purge. If the field is not
+ set to true, a sampling of events to be
+ deleted will be returned.
+
+ This corresponds to the ``force`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
@@ -373,31 +802,53 @@ def purge_user_events(
sent along with the request as metadata.
Returns:
- ~.operation.Operation:
+ google.api_core.operation.Operation:
An object representing a long-running operation.
- The result type for the operation will be
- :class:``~.user_event_service.PurgeUserEventsResponse``:
- Response of the PurgeUserEventsRequest. If the long
- running operation is successfully done, then this
- message is returned by the
- google.longrunning.Operations.response field.
+ The result type for the operation will be :class:`google.cloud.recommendationengine_v1beta1.types.PurgeUserEventsResponse` Response of the PurgeUserEventsRequest. If the long running operation is
+ successfully done, then this message is returned by
+ the google.longrunning.Operations.response field.
"""
# Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([parent, filter, force])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a user_event_service.PurgeUserEventsRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, user_event_service.PurgeUserEventsRequest):
+ request = user_event_service.PurgeUserEventsRequest(request)
- request = user_event_service.PurgeUserEventsRequest(request)
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if parent is not None:
+ request.parent = parent
+ if filter is not None:
+ request.filter = filter
+ if force is not None:
+ request.force = force
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.purge_user_events,
- default_timeout=None,
- client_info=_client_info,
+ rpc = self._transport._wrapped_methods[self._transport.purge_user_events]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
- response = rpc(request, retry=retry, timeout=timeout, metadata=metadata)
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation.from_gapic(
@@ -414,6 +865,10 @@ def import_user_events(
self,
request: import_.ImportUserEventsRequest = None,
*,
+ parent: str = None,
+ request_id: str = None,
+ input_config: import_.InputConfig = None,
+ errors_config: import_.ImportErrorsConfig = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
@@ -427,9 +882,42 @@ def import_user_events(
ImportMetadata.
Args:
- request (:class:`~.import_.ImportUserEventsRequest`):
+ request (google.cloud.recommendationengine_v1beta1.types.ImportUserEventsRequest):
The request object. Request message for the
ImportUserEvents request.
+ parent (str):
+ Required.
+ "projects/1234/locations/global/catalogs/default_catalog/eventStores/default_event_store"
+
+ This corresponds to the ``parent`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ request_id (str):
+ Optional. Unique identifier provided by client, within
+ the ancestor dataset scope. Ensures idempotency for
+ expensive long running operations. Server-generated if
+ unspecified. Up to 128 characters long. This is returned
+ as google.longrunning.Operation.name in the response.
+ Note that this field must not be set if the desired
+ input config is catalog_inline_source.
+
+ This corresponds to the ``request_id`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ input_config (google.cloud.recommendationengine_v1beta1.types.InputConfig):
+ Required. The desired input location
+ of the data.
+
+ This corresponds to the ``input_config`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ errors_config (google.cloud.recommendationengine_v1beta1.types.ImportErrorsConfig):
+ Optional. The desired location of
+ errors incurred during the Import.
+
+ This corresponds to the ``errors_config`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
@@ -438,31 +926,57 @@ def import_user_events(
sent along with the request as metadata.
Returns:
- ~.operation.Operation:
+ google.api_core.operation.Operation:
An object representing a long-running operation.
- The result type for the operation will be
- :class:``~.import_.ImportUserEventsResponse``: Response
- of the ImportUserEventsRequest. If the long running
- operation was successful, then this message is returned
- by the google.longrunning.Operations.response field if
- the operation was successful.
+ The result type for the operation will be :class:`google.cloud.recommendationengine_v1beta1.types.ImportUserEventsResponse` Response of the ImportUserEventsRequest. If the long running
+ operation was successful, then this message is
+ returned by the
+ google.longrunning.Operations.response field if the
+ operation was successful.
"""
# Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([parent, request_id, input_config, errors_config])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
- request = import_.ImportUserEventsRequest(request)
+ # Minor optimization to avoid making a copy if the user passes
+ # in a import_.ImportUserEventsRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, import_.ImportUserEventsRequest):
+ request = import_.ImportUserEventsRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+
+ if parent is not None:
+ request.parent = parent
+ if request_id is not None:
+ request.request_id = request_id
+ if input_config is not None:
+ request.input_config = input_config
+ if errors_config is not None:
+ request.errors_config = errors_config
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.import_user_events,
- default_timeout=None,
- client_info=_client_info,
+ rpc = self._transport._wrapped_methods[self._transport.import_user_events]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
- response = rpc(request, retry=retry, timeout=timeout, metadata=metadata)
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation.from_gapic(
@@ -477,13 +991,13 @@ def import_user_events(
try:
- _client_info = gapic_v1.client_info.ClientInfo(
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
- "google-cloud-recommendations-ai"
- ).version
+ "google-cloud-recommendations-ai",
+ ).version,
)
except pkg_resources.DistributionNotFound:
- _client_info = gapic_v1.client_info.ClientInfo()
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = ("UserEventServiceClient",)
diff --git a/google/cloud/recommendationengine_v1beta1/services/user_event_service/pagers.py b/google/cloud/recommendationengine_v1beta1/services/user_event_service/pagers.py
index 82fdac72..1e81f9d7 100644
--- a/google/cloud/recommendationengine_v1beta1/services/user_event_service/pagers.py
+++ b/google/cloud/recommendationengine_v1beta1/services/user_event_service/pagers.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright (C) 2019 Google LLC
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -15,7 +15,16 @@
# limitations under the License.
#
-from typing import Any, Callable, Iterable
+from typing import (
+ Any,
+ AsyncIterable,
+ Awaitable,
+ Callable,
+ Iterable,
+ Sequence,
+ Tuple,
+ Optional,
+)
from google.cloud.recommendationengine_v1beta1.types import user_event
from google.cloud.recommendationengine_v1beta1.types import user_event_service
@@ -25,7 +34,7 @@ class ListUserEventsPager:
"""A pager for iterating through ``list_user_events`` requests.
This class thinly wraps an initial
- :class:`~.user_event_service.ListUserEventsResponse` object, and
+ :class:`google.cloud.recommendationengine_v1beta1.types.ListUserEventsResponse` object, and
provides an ``__iter__`` method to iterate through its
``user_events`` field.
@@ -34,33 +43,35 @@ class ListUserEventsPager:
through the ``user_events`` field on the
corresponding responses.
- All the usual :class:`~.user_event_service.ListUserEventsResponse`
+ All the usual :class:`google.cloud.recommendationengine_v1beta1.types.ListUserEventsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
def __init__(
self,
- method: Callable[
- [user_event_service.ListUserEventsRequest],
- user_event_service.ListUserEventsResponse,
- ],
+ method: Callable[..., user_event_service.ListUserEventsResponse],
request: user_event_service.ListUserEventsRequest,
response: user_event_service.ListUserEventsResponse,
+ *,
+ metadata: Sequence[Tuple[str, str]] = ()
):
"""Instantiate the pager.
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
- request (:class:`~.user_event_service.ListUserEventsRequest`):
+ request (google.cloud.recommendationengine_v1beta1.types.ListUserEventsRequest):
The initial request object.
- response (:class:`~.user_event_service.ListUserEventsResponse`):
+ response (google.cloud.recommendationengine_v1beta1.types.ListUserEventsResponse):
The initial response object.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
"""
self._method = method
self._request = user_event_service.ListUserEventsRequest(request)
self._response = response
+ self._metadata = metadata
def __getattr__(self, name: str) -> Any:
return getattr(self._response, name)
@@ -70,7 +81,7 @@ def pages(self) -> Iterable[user_event_service.ListUserEventsResponse]:
yield self._response
while self._response.next_page_token:
self._request.page_token = self._response.next_page_token
- self._response = self._method(self._request)
+ self._response = self._method(self._request, metadata=self._metadata)
yield self._response
def __iter__(self) -> Iterable[user_event.UserEvent]:
@@ -79,3 +90,69 @@ def __iter__(self) -> Iterable[user_event.UserEvent]:
def __repr__(self) -> str:
return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
+
+
+class ListUserEventsAsyncPager:
+ """A pager for iterating through ``list_user_events`` requests.
+
+ This class thinly wraps an initial
+ :class:`google.cloud.recommendationengine_v1beta1.types.ListUserEventsResponse` object, and
+ provides an ``__aiter__`` method to iterate through its
+ ``user_events`` field.
+
+ If there are more pages, the ``__aiter__`` method will make additional
+ ``ListUserEvents`` requests and continue to iterate
+ through the ``user_events`` field on the
+ corresponding responses.
+
+ All the usual :class:`google.cloud.recommendationengine_v1beta1.types.ListUserEventsResponse`
+ attributes are available on the pager. If multiple requests are made, only
+ the most recent response is retained, and thus used for attribute lookup.
+ """
+
+ def __init__(
+ self,
+ method: Callable[..., Awaitable[user_event_service.ListUserEventsResponse]],
+ request: user_event_service.ListUserEventsRequest,
+ response: user_event_service.ListUserEventsResponse,
+ *,
+ metadata: Sequence[Tuple[str, str]] = ()
+ ):
+ """Instantiate the pager.
+
+ Args:
+ method (Callable): The method that was originally called, and
+ which instantiated this pager.
+ request (google.cloud.recommendationengine_v1beta1.types.ListUserEventsRequest):
+ The initial request object.
+ response (google.cloud.recommendationengine_v1beta1.types.ListUserEventsResponse):
+ The initial response object.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ self._method = method
+ self._request = user_event_service.ListUserEventsRequest(request)
+ self._response = response
+ self._metadata = metadata
+
+ def __getattr__(self, name: str) -> Any:
+ return getattr(self._response, name)
+
+ @property
+ async def pages(self) -> AsyncIterable[user_event_service.ListUserEventsResponse]:
+ yield self._response
+ while self._response.next_page_token:
+ self._request.page_token = self._response.next_page_token
+ self._response = await self._method(self._request, metadata=self._metadata)
+ yield self._response
+
+ def __aiter__(self) -> AsyncIterable[user_event.UserEvent]:
+ async def async_generator():
+ async for page in self.pages:
+ for response in page.user_events:
+ yield response
+
+ return async_generator()
+
+ def __repr__(self) -> str:
+ return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
diff --git a/google/cloud/recommendationengine_v1beta1/services/user_event_service/transports/__init__.py b/google/cloud/recommendationengine_v1beta1/services/user_event_service/transports/__init__.py
index c09f367c..c9246aa4 100644
--- a/google/cloud/recommendationengine_v1beta1/services/user_event_service/transports/__init__.py
+++ b/google/cloud/recommendationengine_v1beta1/services/user_event_service/transports/__init__.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright (C) 2019 Google LLC
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -20,11 +20,16 @@
from .base import UserEventServiceTransport
from .grpc import UserEventServiceGrpcTransport
+from .grpc_asyncio import UserEventServiceGrpcAsyncIOTransport
# Compile a registry of transports.
_transport_registry = OrderedDict() # type: Dict[str, Type[UserEventServiceTransport]]
_transport_registry["grpc"] = UserEventServiceGrpcTransport
+_transport_registry["grpc_asyncio"] = UserEventServiceGrpcAsyncIOTransport
-
-__all__ = ("UserEventServiceTransport", "UserEventServiceGrpcTransport")
+__all__ = (
+ "UserEventServiceTransport",
+ "UserEventServiceGrpcTransport",
+ "UserEventServiceGrpcAsyncIOTransport",
+)
diff --git a/google/cloud/recommendationengine_v1beta1/services/user_event_service/transports/base.py b/google/cloud/recommendationengine_v1beta1/services/user_event_service/transports/base.py
index 28440877..3db40469 100644
--- a/google/cloud/recommendationengine_v1beta1/services/user_event_service/transports/base.py
+++ b/google/cloud/recommendationengine_v1beta1/services/user_event_service/transports/base.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright (C) 2019 Google LLC
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -17,19 +17,33 @@
import abc
import typing
+import pkg_resources
-from google import auth
+from google import auth # type: ignore
+from google.api_core import exceptions # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import retry as retries # type: ignore
from google.api_core import operations_v1 # type: ignore
from google.auth import credentials # type: ignore
from google.api import httpbody_pb2 as httpbody # type: ignore
from google.cloud.recommendationengine_v1beta1.types import import_
-from google.cloud.recommendationengine_v1beta1.types import user_event
+from google.cloud.recommendationengine_v1beta1.types import user_event as gcr_user_event
from google.cloud.recommendationengine_v1beta1.types import user_event_service
from google.longrunning import operations_pb2 as operations # type: ignore
-class UserEventServiceTransport(metaclass=abc.ABCMeta):
+try:
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
+ gapic_version=pkg_resources.get_distribution(
+ "google-cloud-recommendations-ai",
+ ).version,
+ )
+except pkg_resources.DistributionNotFound:
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
+
+
+class UserEventServiceTransport(abc.ABC):
"""Abstract transport class for UserEventService."""
AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",)
@@ -39,6 +53,11 @@ def __init__(
*,
host: str = "recommendationengine.googleapis.com",
credentials: credentials.Credentials = None,
+ credentials_file: typing.Optional[str] = None,
+ scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES,
+ quota_project_id: typing.Optional[str] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ **kwargs,
) -> None:
"""Instantiate the transport.
@@ -49,63 +68,175 @@ def __init__(
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is mutually exclusive with credentials.
+ scope (Optional[Sequence[str]]): A list of scopes.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
"""
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
if ":" not in host:
host += ":443"
self._host = host
+ # Save the scopes.
+ self._scopes = scopes or self.AUTH_SCOPES
+
# If no credentials are provided, then determine the appropriate
# defaults.
- if credentials is None:
- credentials, _ = auth.default(scopes=self.AUTH_SCOPES)
+ if credentials and credentials_file:
+ raise exceptions.DuplicateCredentialArgs(
+ "'credentials_file' and 'credentials' are mutually exclusive"
+ )
+
+ if credentials_file is not None:
+ credentials, _ = auth.load_credentials_from_file(
+ credentials_file, scopes=self._scopes, quota_project_id=quota_project_id
+ )
+
+ elif credentials is None:
+ credentials, _ = auth.default(
+ scopes=self._scopes, quota_project_id=quota_project_id
+ )
# Save the credentials.
self._credentials = credentials
+ def _prep_wrapped_messages(self, client_info):
+ # Precompute the wrapped methods.
+ self._wrapped_methods = {
+ self.write_user_event: gapic_v1.method.wrap_method(
+ self.write_user_event,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
+ ),
+ deadline=600.0,
+ ),
+ default_timeout=600.0,
+ client_info=client_info,
+ ),
+ self.collect_user_event: gapic_v1.method.wrap_method(
+ self.collect_user_event,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
+ ),
+ deadline=600.0,
+ ),
+ default_timeout=600.0,
+ client_info=client_info,
+ ),
+ self.list_user_events: gapic_v1.method.wrap_method(
+ self.list_user_events,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
+ ),
+ deadline=600.0,
+ ),
+ default_timeout=600.0,
+ client_info=client_info,
+ ),
+ self.purge_user_events: gapic_v1.method.wrap_method(
+ self.purge_user_events,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
+ ),
+ deadline=600.0,
+ ),
+ default_timeout=600.0,
+ client_info=client_info,
+ ),
+ self.import_user_events: gapic_v1.method.wrap_method(
+ self.import_user_events,
+ default_retry=retries.Retry(
+ initial=0.1,
+ maximum=60.0,
+ multiplier=1.3,
+ predicate=retries.if_exception_type(
+ exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
+ ),
+ deadline=600.0,
+ ),
+ default_timeout=600.0,
+ client_info=client_info,
+ ),
+ }
+
@property
def operations_client(self) -> operations_v1.OperationsClient:
"""Return the client designed to process long-running operations."""
- raise NotImplementedError
+ raise NotImplementedError()
@property
def write_user_event(
- self
+ self,
) -> typing.Callable[
- [user_event_service.WriteUserEventRequest], user_event.UserEvent
+ [user_event_service.WriteUserEventRequest],
+ typing.Union[
+ gcr_user_event.UserEvent, typing.Awaitable[gcr_user_event.UserEvent]
+ ],
]:
- raise NotImplementedError
+ raise NotImplementedError()
@property
def collect_user_event(
- self
+ self,
) -> typing.Callable[
- [user_event_service.CollectUserEventRequest], httpbody.HttpBody
+ [user_event_service.CollectUserEventRequest],
+ typing.Union[httpbody.HttpBody, typing.Awaitable[httpbody.HttpBody]],
]:
- raise NotImplementedError
+ raise NotImplementedError()
@property
def list_user_events(
- self
+ self,
) -> typing.Callable[
[user_event_service.ListUserEventsRequest],
- user_event_service.ListUserEventsResponse,
+ typing.Union[
+ user_event_service.ListUserEventsResponse,
+ typing.Awaitable[user_event_service.ListUserEventsResponse],
+ ],
]:
- raise NotImplementedError
+ raise NotImplementedError()
@property
def purge_user_events(
- self
+ self,
) -> typing.Callable[
- [user_event_service.PurgeUserEventsRequest], operations.Operation
+ [user_event_service.PurgeUserEventsRequest],
+ typing.Union[operations.Operation, typing.Awaitable[operations.Operation]],
]:
- raise NotImplementedError
+ raise NotImplementedError()
@property
def import_user_events(
- self
- ) -> typing.Callable[[import_.ImportUserEventsRequest], operations.Operation]:
- raise NotImplementedError
+ self,
+ ) -> typing.Callable[
+ [import_.ImportUserEventsRequest],
+ typing.Union[operations.Operation, typing.Awaitable[operations.Operation]],
+ ]:
+ raise NotImplementedError()
__all__ = ("UserEventServiceTransport",)
diff --git a/google/cloud/recommendationengine_v1beta1/services/user_event_service/transports/grpc.py b/google/cloud/recommendationengine_v1beta1/services/user_event_service/transports/grpc.py
index 23f00e06..fbfe1a2c 100644
--- a/google/cloud/recommendationengine_v1beta1/services/user_event_service/transports/grpc.py
+++ b/google/cloud/recommendationengine_v1beta1/services/user_event_service/transports/grpc.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright (C) 2019 Google LLC
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -15,21 +15,25 @@
# limitations under the License.
#
-from typing import Callable, Dict
+import warnings
+from typing import Callable, Dict, Optional, Sequence, Tuple
from google.api_core import grpc_helpers # type: ignore
from google.api_core import operations_v1 # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google import auth # type: ignore
from google.auth import credentials # type: ignore
+from google.auth.transport.grpc import SslCredentials # type: ignore
import grpc # type: ignore
from google.api import httpbody_pb2 as httpbody # type: ignore
from google.cloud.recommendationengine_v1beta1.types import import_
-from google.cloud.recommendationengine_v1beta1.types import user_event
+from google.cloud.recommendationengine_v1beta1.types import user_event as gcr_user_event
from google.cloud.recommendationengine_v1beta1.types import user_event_service
from google.longrunning import operations_pb2 as operations # type: ignore
-from .base import UserEventServiceTransport
+from .base import UserEventServiceTransport, DEFAULT_CLIENT_INFO
class UserEventServiceGrpcTransport(UserEventServiceTransport):
@@ -46,12 +50,22 @@ class UserEventServiceGrpcTransport(UserEventServiceTransport):
top of HTTP/2); the ``grpcio`` package must be installed.
"""
+ _stubs: Dict[str, Callable]
+
def __init__(
self,
*,
host: str = "recommendationengine.googleapis.com",
credentials: credentials.Credentials = None,
- channel: grpc.Channel = None
+ credentials_file: str = None,
+ scopes: Sequence[str] = None,
+ channel: grpc.Channel = None,
+ api_mtls_endpoint: str = None,
+ client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
+ ssl_channel_credentials: grpc.ChannelCredentials = None,
+ client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
+ quota_project_id: Optional[str] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the transport.
@@ -63,61 +77,155 @@ def __init__(
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is ignored if ``channel`` is provided.
+ scopes (Optional(Sequence[str])): A list of scopes. This argument is
+ ignored if ``channel`` is provided.
channel (Optional[grpc.Channel]): A ``Channel`` instance through
which to make calls.
+ api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
+ If provided, it overrides the ``host`` argument and tries to create
+ a mutual TLS channel with client SSL credentials from
+ ``client_cert_source`` or applicatin default SSL credentials.
+ client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ Deprecated. A callback to provide client SSL certificate bytes and
+ private key bytes, both in PEM format. It is ignored if
+ ``api_mtls_endpoint`` is None.
+ ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
+ for grpc channel. It is ignored if ``channel`` is provided.
+ client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ A callback to provide client certificate bytes and private key bytes,
+ both in PEM format. It is used to configure mutual TLS channel. It is
+ ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+
+ Raises:
+ google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
+ creation failed for any reason.
+ google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
+ and ``credentials_file`` are passed.
"""
- # Sanity check: Ensure that channel and credentials are not both
- # provided.
- if channel:
- credentials = False
+ self._grpc_channel = None
+ self._ssl_channel_credentials = ssl_channel_credentials
+ self._stubs: Dict[str, Callable] = {}
+ self._operations_client = None
- # Run the base constructor.
- super().__init__(host=host, credentials=credentials)
- self._stubs = {} # type: Dict[str, Callable]
+ if api_mtls_endpoint:
+ warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
+ if client_cert_source:
+ warnings.warn("client_cert_source is deprecated", DeprecationWarning)
- # If a channel was explicitly provided, set it.
if channel:
+ # Ignore credentials if a channel was passed.
+ credentials = False
+ # If a channel was explicitly provided, set it.
self._grpc_channel = channel
+ self._ssl_channel_credentials = None
+
+ else:
+ if api_mtls_endpoint:
+ host = api_mtls_endpoint
+
+ # Create SSL credentials with client_cert_source or application
+ # default SSL credentials.
+ if client_cert_source:
+ cert, key = client_cert_source()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+ else:
+ self._ssl_channel_credentials = SslCredentials().ssl_credentials
+
+ else:
+ if client_cert_source_for_mtls and not ssl_channel_credentials:
+ cert, key = client_cert_source_for_mtls()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+
+ # The base transport sets the host, credentials and scopes
+ super().__init__(
+ host=host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ scopes=scopes,
+ quota_project_id=quota_project_id,
+ client_info=client_info,
+ )
+
+ if not self._grpc_channel:
+ self._grpc_channel = type(self).create_channel(
+ self._host,
+ credentials=self._credentials,
+ credentials_file=credentials_file,
+ scopes=self._scopes,
+ ssl_credentials=self._ssl_channel_credentials,
+ quota_project_id=quota_project_id,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+
+ # Wrap messages. This must be done after self._grpc_channel exists
+ self._prep_wrapped_messages(client_info)
@classmethod
def create_channel(
cls,
host: str = "recommendationengine.googleapis.com",
credentials: credentials.Credentials = None,
- **kwargs
+ credentials_file: str = None,
+ scopes: Optional[Sequence[str]] = None,
+ quota_project_id: Optional[str] = None,
+ **kwargs,
) -> grpc.Channel:
"""Create and return a gRPC channel object.
Args:
- address (Optionsl[str]): The host for the channel to use.
+ host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is mutually exclusive with credentials.
+ scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
+ service. These are only used when credentials are not specified and
+ are passed to :func:`google.auth.default`.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
grpc.Channel: A gRPC channel object.
+
+ Raises:
+ google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
+ and ``credentials_file`` are passed.
"""
+ scopes = scopes or cls.AUTH_SCOPES
return grpc_helpers.create_channel(
- host, credentials=credentials, scopes=cls.AUTH_SCOPES, **kwargs
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ scopes=scopes,
+ quota_project_id=quota_project_id,
+ **kwargs,
)
@property
def grpc_channel(self) -> grpc.Channel:
- """Create the channel designed to connect to this service.
-
- This property caches on the instance; repeated calls return
- the same channel.
+ """Return the channel designed to connect to this service.
"""
- # Sanity check: Only create a new channel if we do not already
- # have one.
- if not hasattr(self, "_grpc_channel"):
- self._grpc_channel = self.create_channel(
- self._host, credentials=self._credentials
- )
-
- # Return the channel from cache.
return self._grpc_channel
@property
@@ -128,18 +236,16 @@ def operations_client(self) -> operations_v1.OperationsClient:
client.
"""
# Sanity check: Only create a new client if we do not already have one.
- if "operations_client" not in self.__dict__:
- self.__dict__["operations_client"] = operations_v1.OperationsClient(
- self.grpc_channel
- )
+ if self._operations_client is None:
+ self._operations_client = operations_v1.OperationsClient(self.grpc_channel)
# Return the client from cache.
- return self.__dict__["operations_client"]
+ return self._operations_client
@property
def write_user_event(
- self
- ) -> Callable[[user_event_service.WriteUserEventRequest], user_event.UserEvent]:
+ self,
+ ) -> Callable[[user_event_service.WriteUserEventRequest], gcr_user_event.UserEvent]:
r"""Return a callable for the write user event method over gRPC.
Writes a single user event.
@@ -158,13 +264,13 @@ def write_user_event(
self._stubs["write_user_event"] = self.grpc_channel.unary_unary(
"/google.cloud.recommendationengine.v1beta1.UserEventService/WriteUserEvent",
request_serializer=user_event_service.WriteUserEventRequest.serialize,
- response_deserializer=user_event.UserEvent.deserialize,
+ response_deserializer=gcr_user_event.UserEvent.deserialize,
)
return self._stubs["write_user_event"]
@property
def collect_user_event(
- self
+ self,
) -> Callable[[user_event_service.CollectUserEventRequest], httpbody.HttpBody]:
r"""Return a callable for the collect user event method over gRPC.
@@ -195,7 +301,7 @@ def collect_user_event(
@property
def list_user_events(
- self
+ self,
) -> Callable[
[user_event_service.ListUserEventsRequest],
user_event_service.ListUserEventsResponse,
@@ -225,7 +331,7 @@ def list_user_events(
@property
def purge_user_events(
- self
+ self,
) -> Callable[[user_event_service.PurgeUserEventsRequest], operations.Operation]:
r"""Return a callable for the purge user events method over gRPC.
@@ -255,7 +361,7 @@ def purge_user_events(
@property
def import_user_events(
- self
+ self,
) -> Callable[[import_.ImportUserEventsRequest], operations.Operation]:
r"""Return a callable for the import user events method over gRPC.
diff --git a/google/cloud/recommendationengine_v1beta1/services/user_event_service/transports/grpc_asyncio.py b/google/cloud/recommendationengine_v1beta1/services/user_event_service/transports/grpc_asyncio.py
new file mode 100644
index 00000000..af3fb16c
--- /dev/null
+++ b/google/cloud/recommendationengine_v1beta1/services/user_event_service/transports/grpc_asyncio.py
@@ -0,0 +1,407 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import warnings
+from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple
+
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import grpc_helpers_async # type: ignore
+from google.api_core import operations_v1 # type: ignore
+from google import auth # type: ignore
+from google.auth import credentials # type: ignore
+from google.auth.transport.grpc import SslCredentials # type: ignore
+
+import grpc # type: ignore
+from grpc.experimental import aio # type: ignore
+
+from google.api import httpbody_pb2 as httpbody # type: ignore
+from google.cloud.recommendationengine_v1beta1.types import import_
+from google.cloud.recommendationengine_v1beta1.types import user_event as gcr_user_event
+from google.cloud.recommendationengine_v1beta1.types import user_event_service
+from google.longrunning import operations_pb2 as operations # type: ignore
+
+from .base import UserEventServiceTransport, DEFAULT_CLIENT_INFO
+from .grpc import UserEventServiceGrpcTransport
+
+
+class UserEventServiceGrpcAsyncIOTransport(UserEventServiceTransport):
+ """gRPC AsyncIO backend transport for UserEventService.
+
+ Service for ingesting end user actions on the customer
+ website.
+
+ This class defines the same methods as the primary client, so the
+ primary client can load the underlying transport implementation
+ and call it.
+
+ It sends protocol buffers over the wire using gRPC (which is built on
+ top of HTTP/2); the ``grpcio`` package must be installed.
+ """
+
+ _grpc_channel: aio.Channel
+ _stubs: Dict[str, Callable] = {}
+
+ @classmethod
+ def create_channel(
+ cls,
+ host: str = "recommendationengine.googleapis.com",
+ credentials: credentials.Credentials = None,
+ credentials_file: Optional[str] = None,
+ scopes: Optional[Sequence[str]] = None,
+ quota_project_id: Optional[str] = None,
+ **kwargs,
+ ) -> aio.Channel:
+ """Create and return a gRPC AsyncIO channel object.
+ Args:
+ host (Optional[str]): The host for the channel to use.
+ credentials (Optional[~.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify this application to the service. If
+ none are specified, the client will attempt to ascertain
+ the credentials from the environment.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is ignored if ``channel`` is provided.
+ scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
+ service. These are only used when credentials are not specified and
+ are passed to :func:`google.auth.default`.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
+ kwargs (Optional[dict]): Keyword arguments, which are passed to the
+ channel creation.
+ Returns:
+ aio.Channel: A gRPC AsyncIO channel object.
+ """
+ scopes = scopes or cls.AUTH_SCOPES
+ return grpc_helpers_async.create_channel(
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ scopes=scopes,
+ quota_project_id=quota_project_id,
+ **kwargs,
+ )
+
+ def __init__(
+ self,
+ *,
+ host: str = "recommendationengine.googleapis.com",
+ credentials: credentials.Credentials = None,
+ credentials_file: Optional[str] = None,
+ scopes: Optional[Sequence[str]] = None,
+ channel: aio.Channel = None,
+ api_mtls_endpoint: str = None,
+ client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
+ ssl_channel_credentials: grpc.ChannelCredentials = None,
+ client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
+ quota_project_id=None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ ) -> None:
+ """Instantiate the transport.
+
+ Args:
+ host (Optional[str]): The hostname to connect to.
+ credentials (Optional[google.auth.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ This argument is ignored if ``channel`` is provided.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is ignored if ``channel`` is provided.
+ scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
+ service. These are only used when credentials are not specified and
+ are passed to :func:`google.auth.default`.
+ channel (Optional[aio.Channel]): A ``Channel`` instance through
+ which to make calls.
+ api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
+ If provided, it overrides the ``host`` argument and tries to create
+ a mutual TLS channel with client SSL credentials from
+ ``client_cert_source`` or applicatin default SSL credentials.
+ client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ Deprecated. A callback to provide client SSL certificate bytes and
+ private key bytes, both in PEM format. It is ignored if
+ ``api_mtls_endpoint`` is None.
+ ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
+ for grpc channel. It is ignored if ``channel`` is provided.
+ client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ A callback to provide client certificate bytes and private key bytes,
+ both in PEM format. It is used to configure mutual TLS channel. It is
+ ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+
+ Raises:
+ google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
+ creation failed for any reason.
+ google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
+ and ``credentials_file`` are passed.
+ """
+ self._grpc_channel = None
+ self._ssl_channel_credentials = ssl_channel_credentials
+ self._stubs: Dict[str, Callable] = {}
+ self._operations_client = None
+
+ if api_mtls_endpoint:
+ warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
+ if client_cert_source:
+ warnings.warn("client_cert_source is deprecated", DeprecationWarning)
+
+ if channel:
+ # Ignore credentials if a channel was passed.
+ credentials = False
+ # If a channel was explicitly provided, set it.
+ self._grpc_channel = channel
+ self._ssl_channel_credentials = None
+
+ else:
+ if api_mtls_endpoint:
+ host = api_mtls_endpoint
+
+ # Create SSL credentials with client_cert_source or application
+ # default SSL credentials.
+ if client_cert_source:
+ cert, key = client_cert_source()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+ else:
+ self._ssl_channel_credentials = SslCredentials().ssl_credentials
+
+ else:
+ if client_cert_source_for_mtls and not ssl_channel_credentials:
+ cert, key = client_cert_source_for_mtls()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+
+ # The base transport sets the host, credentials and scopes
+ super().__init__(
+ host=host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ scopes=scopes,
+ quota_project_id=quota_project_id,
+ client_info=client_info,
+ )
+
+ if not self._grpc_channel:
+ self._grpc_channel = type(self).create_channel(
+ self._host,
+ credentials=self._credentials,
+ credentials_file=credentials_file,
+ scopes=self._scopes,
+ ssl_credentials=self._ssl_channel_credentials,
+ quota_project_id=quota_project_id,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+
+ # Wrap messages. This must be done after self._grpc_channel exists
+ self._prep_wrapped_messages(client_info)
+
+ @property
+ def grpc_channel(self) -> aio.Channel:
+ """Create the channel designed to connect to this service.
+
+ This property caches on the instance; repeated calls return
+ the same channel.
+ """
+ # Return the channel from cache.
+ return self._grpc_channel
+
+ @property
+ def operations_client(self) -> operations_v1.OperationsAsyncClient:
+ """Create the client designed to process long-running operations.
+
+ This property caches on the instance; repeated calls return the same
+ client.
+ """
+ # Sanity check: Only create a new client if we do not already have one.
+ if self._operations_client is None:
+ self._operations_client = operations_v1.OperationsAsyncClient(
+ self.grpc_channel
+ )
+
+ # Return the client from cache.
+ return self._operations_client
+
+ @property
+ def write_user_event(
+ self,
+ ) -> Callable[
+ [user_event_service.WriteUserEventRequest], Awaitable[gcr_user_event.UserEvent]
+ ]:
+ r"""Return a callable for the write user event method over gRPC.
+
+ Writes a single user event.
+
+ Returns:
+ Callable[[~.WriteUserEventRequest],
+ Awaitable[~.UserEvent]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "write_user_event" not in self._stubs:
+ self._stubs["write_user_event"] = self.grpc_channel.unary_unary(
+ "/google.cloud.recommendationengine.v1beta1.UserEventService/WriteUserEvent",
+ request_serializer=user_event_service.WriteUserEventRequest.serialize,
+ response_deserializer=gcr_user_event.UserEvent.deserialize,
+ )
+ return self._stubs["write_user_event"]
+
+ @property
+ def collect_user_event(
+ self,
+ ) -> Callable[
+ [user_event_service.CollectUserEventRequest], Awaitable[httpbody.HttpBody]
+ ]:
+ r"""Return a callable for the collect user event method over gRPC.
+
+ Writes a single user event from the browser. This
+ uses a GET request to due to browser restriction of
+ POST-ing to a 3rd party domain.
+ This method is used only by the Recommendations AI
+ JavaScript pixel. Users should not call this method
+ directly.
+
+ Returns:
+ Callable[[~.CollectUserEventRequest],
+ Awaitable[~.HttpBody]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "collect_user_event" not in self._stubs:
+ self._stubs["collect_user_event"] = self.grpc_channel.unary_unary(
+ "/google.cloud.recommendationengine.v1beta1.UserEventService/CollectUserEvent",
+ request_serializer=user_event_service.CollectUserEventRequest.serialize,
+ response_deserializer=httpbody.HttpBody.FromString,
+ )
+ return self._stubs["collect_user_event"]
+
+ @property
+ def list_user_events(
+ self,
+ ) -> Callable[
+ [user_event_service.ListUserEventsRequest],
+ Awaitable[user_event_service.ListUserEventsResponse],
+ ]:
+ r"""Return a callable for the list user events method over gRPC.
+
+ Gets a list of user events within a time range, with
+ potential filtering.
+
+ Returns:
+ Callable[[~.ListUserEventsRequest],
+ Awaitable[~.ListUserEventsResponse]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "list_user_events" not in self._stubs:
+ self._stubs["list_user_events"] = self.grpc_channel.unary_unary(
+ "/google.cloud.recommendationengine.v1beta1.UserEventService/ListUserEvents",
+ request_serializer=user_event_service.ListUserEventsRequest.serialize,
+ response_deserializer=user_event_service.ListUserEventsResponse.deserialize,
+ )
+ return self._stubs["list_user_events"]
+
+ @property
+ def purge_user_events(
+ self,
+ ) -> Callable[
+ [user_event_service.PurgeUserEventsRequest], Awaitable[operations.Operation]
+ ]:
+ r"""Return a callable for the purge user events method over gRPC.
+
+ Deletes permanently all user events specified by the
+ filter provided. Depending on the number of events
+ specified by the filter, this operation could take hours
+ or days to complete. To test a filter, use the list
+ command first.
+
+ Returns:
+ Callable[[~.PurgeUserEventsRequest],
+ Awaitable[~.Operation]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "purge_user_events" not in self._stubs:
+ self._stubs["purge_user_events"] = self.grpc_channel.unary_unary(
+ "/google.cloud.recommendationengine.v1beta1.UserEventService/PurgeUserEvents",
+ request_serializer=user_event_service.PurgeUserEventsRequest.serialize,
+ response_deserializer=operations.Operation.FromString,
+ )
+ return self._stubs["purge_user_events"]
+
+ @property
+ def import_user_events(
+ self,
+ ) -> Callable[[import_.ImportUserEventsRequest], Awaitable[operations.Operation]]:
+ r"""Return a callable for the import user events method over gRPC.
+
+ Bulk import of User events. Request processing might
+ be synchronous. Events that already exist are skipped.
+ Use this method for backfilling historical user events.
+ Operation.response is of type ImportResponse. Note that
+ it is possible for a subset of the items to be
+ successfully inserted. Operation.metadata is of type
+ ImportMetadata.
+
+ Returns:
+ Callable[[~.ImportUserEventsRequest],
+ Awaitable[~.Operation]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "import_user_events" not in self._stubs:
+ self._stubs["import_user_events"] = self.grpc_channel.unary_unary(
+ "/google.cloud.recommendationengine.v1beta1.UserEventService/ImportUserEvents",
+ request_serializer=import_.ImportUserEventsRequest.serialize,
+ response_deserializer=operations.Operation.FromString,
+ )
+ return self._stubs["import_user_events"]
+
+
+__all__ = ("UserEventServiceGrpcAsyncIOTransport",)
diff --git a/google/cloud/recommendationengine_v1beta1/types/__init__.py b/google/cloud/recommendationengine_v1beta1/types/__init__.py
index e3f846c7..fc832b8c 100644
--- a/google/cloud/recommendationengine_v1beta1/types/__init__.py
+++ b/google/cloud/recommendationengine_v1beta1/types/__init__.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright (C) 2019 Google LLC
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -15,96 +15,102 @@
# limitations under the License.
#
-from .common import FeatureMap
-from .catalog import CatalogItem, ProductCatalogItem, Image
-from .user_event import (
- UserEvent,
- UserInfo,
- EventDetail,
- ProductEventDetail,
- PurchaseTransaction,
- ProductDetail,
+from .catalog import (
+ CatalogItem,
+ Image,
+ ProductCatalogItem,
)
-from .prediction_service import PredictRequest, PredictResponse
+from .catalog_service import (
+ CreateCatalogItemRequest,
+ DeleteCatalogItemRequest,
+ GetCatalogItemRequest,
+ ListCatalogItemsRequest,
+ ListCatalogItemsResponse,
+ UpdateCatalogItemRequest,
+)
+from .common import FeatureMap
from .import_ import (
- GcsSource,
CatalogInlineSource,
- UserEventInlineSource,
- ImportErrorsConfig,
+ GcsSource,
ImportCatalogItemsRequest,
- ImportUserEventsRequest,
- InputConfig,
- ImportMetadata,
ImportCatalogItemsResponse,
+ ImportErrorsConfig,
+ ImportMetadata,
+ ImportUserEventsRequest,
ImportUserEventsResponse,
+ InputConfig,
UserEventImportSummary,
-)
-from .user_event_service import (
- PurgeUserEventsRequest,
- PurgeUserEventsMetadata,
- PurgeUserEventsResponse,
- WriteUserEventRequest,
- CollectUserEventRequest,
- ListUserEventsRequest,
- ListUserEventsResponse,
+ UserEventInlineSource,
)
from .prediction_apikey_registry_service import (
- PredictionApiKeyRegistration,
CreatePredictionApiKeyRegistrationRequest,
+ DeletePredictionApiKeyRegistrationRequest,
ListPredictionApiKeyRegistrationsRequest,
ListPredictionApiKeyRegistrationsResponse,
- DeletePredictionApiKeyRegistrationRequest,
+ PredictionApiKeyRegistration,
)
-from .catalog_service import (
- CreateCatalogItemRequest,
- GetCatalogItemRequest,
- ListCatalogItemsRequest,
- ListCatalogItemsResponse,
- UpdateCatalogItemRequest,
- DeleteCatalogItemRequest,
+from .prediction_service import (
+ PredictRequest,
+ PredictResponse,
+)
+from .user_event import (
+ EventDetail,
+ ProductDetail,
+ ProductEventDetail,
+ PurchaseTransaction,
+ UserEvent,
+ UserInfo,
+)
+from .user_event_service import (
+ CollectUserEventRequest,
+ ListUserEventsRequest,
+ ListUserEventsResponse,
+ PurgeUserEventsMetadata,
+ PurgeUserEventsRequest,
+ PurgeUserEventsResponse,
+ WriteUserEventRequest,
)
-
__all__ = (
- "FeatureMap",
"CatalogItem",
- "ProductCatalogItem",
"Image",
- "UserEvent",
- "UserInfo",
- "EventDetail",
- "ProductEventDetail",
- "PurchaseTransaction",
- "ProductDetail",
- "PredictRequest",
- "PredictResponse",
- "GcsSource",
+ "ProductCatalogItem",
+ "CreateCatalogItemRequest",
+ "DeleteCatalogItemRequest",
+ "GetCatalogItemRequest",
+ "ListCatalogItemsRequest",
+ "ListCatalogItemsResponse",
+ "UpdateCatalogItemRequest",
+ "FeatureMap",
"CatalogInlineSource",
- "UserEventInlineSource",
- "ImportErrorsConfig",
+ "GcsSource",
"ImportCatalogItemsRequest",
- "ImportUserEventsRequest",
- "InputConfig",
- "ImportMetadata",
"ImportCatalogItemsResponse",
+ "ImportErrorsConfig",
+ "ImportMetadata",
+ "ImportUserEventsRequest",
"ImportUserEventsResponse",
+ "InputConfig",
"UserEventImportSummary",
- "PurgeUserEventsRequest",
- "PurgeUserEventsMetadata",
- "PurgeUserEventsResponse",
- "WriteUserEventRequest",
- "CollectUserEventRequest",
- "ListUserEventsRequest",
- "ListUserEventsResponse",
- "PredictionApiKeyRegistration",
+ "UserEventInlineSource",
"CreatePredictionApiKeyRegistrationRequest",
+ "DeletePredictionApiKeyRegistrationRequest",
"ListPredictionApiKeyRegistrationsRequest",
"ListPredictionApiKeyRegistrationsResponse",
- "DeletePredictionApiKeyRegistrationRequest",
- "CreateCatalogItemRequest",
- "GetCatalogItemRequest",
- "ListCatalogItemsRequest",
- "ListCatalogItemsResponse",
- "UpdateCatalogItemRequest",
- "DeleteCatalogItemRequest",
+ "PredictionApiKeyRegistration",
+ "PredictRequest",
+ "PredictResponse",
+ "EventDetail",
+ "ProductDetail",
+ "ProductEventDetail",
+ "PurchaseTransaction",
+ "UserEvent",
+ "UserInfo",
+ "CollectUserEventRequest",
+ "ListUserEventsRequest",
+ "ListUserEventsResponse",
+ "PurgeUserEventsMetadata",
+ "PurgeUserEventsRequest",
+ "PurgeUserEventsResponse",
+ "WriteUserEventRequest",
)
diff --git a/google/cloud/recommendationengine_v1beta1/types/catalog.py b/google/cloud/recommendationengine_v1beta1/types/catalog.py
index 667999ed..ab92b2af 100644
--- a/google/cloud/recommendationengine_v1beta1/types/catalog.py
+++ b/google/cloud/recommendationengine_v1beta1/types/catalog.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright (C) 2019 Google LLC
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -23,7 +23,7 @@
__protobuf__ = proto.module(
package="google.cloud.recommendationengine.v1beta1",
- manifest={"CatalogItem", "ProductCatalogItem", "Image"},
+ manifest={"CatalogItem", "ProductCatalogItem", "Image",},
)
@@ -39,7 +39,7 @@ class CatalogItem(proto.Message):
within the same catalog. It should also be used
when logging user events in order for the user
events to be joined with the Catalog.
- category_hierarchies (Sequence[~.catalog.CatalogItem.CategoryHierarchy]):
+ category_hierarchies (Sequence[google.cloud.recommendationengine_v1beta1.types.CatalogItem.CategoryHierarchy]):
Required. Catalog item categories. This field is repeated
for supporting one catalog item belonging to several
parallel category hierarchies.
@@ -60,7 +60,7 @@ class CatalogItem(proto.Message):
description (str):
Optional. Catalog item description. UTF-8
encoded string with a length limit of 5 KiB.
- item_attributes (~.common.FeatureMap):
+ item_attributes (google.cloud.recommendationengine_v1beta1.types.FeatureMap):
Optional. Highly encouraged. Extra catalog
item attributes to be included in the
recommendation model. For example, for retail
@@ -88,7 +88,7 @@ class CatalogItem(proto.Message):
This field must be enabled before it can be used. `Learn
more `__.
- product_metadata (~.catalog.ProductCatalogItem):
+ product_metadata (google.cloud.recommendationengine_v1beta1.types.ProductCatalogItem):
Optional. Metadata specific to retail
products.
"""
@@ -108,17 +108,28 @@ class CategoryHierarchy(proto.Message):
categories = proto.RepeatedField(proto.STRING, number=1)
id = proto.Field(proto.STRING, number=1)
+
category_hierarchies = proto.RepeatedField(
- proto.MESSAGE, number=2, message=CategoryHierarchy
+ proto.MESSAGE, number=2, message=CategoryHierarchy,
)
+
title = proto.Field(proto.STRING, number=3)
+
description = proto.Field(proto.STRING, number=4)
- item_attributes = proto.Field(proto.MESSAGE, number=5, message=common.FeatureMap)
+
+ item_attributes = proto.Field(proto.MESSAGE, number=5, message=common.FeatureMap,)
+
language_code = proto.Field(proto.STRING, number=6)
+
tags = proto.RepeatedField(proto.STRING, number=8)
+
item_group_id = proto.Field(proto.STRING, number=9)
+
product_metadata = proto.Field(
- proto.MESSAGE, number=10, message="ProductCatalogItem"
+ proto.MESSAGE,
+ number=10,
+ oneof="recommendation_type",
+ message="ProductCatalogItem",
)
@@ -127,11 +138,11 @@ class ProductCatalogItem(proto.Message):
products.
Attributes:
- exact_price (~.catalog.ProductCatalogItem.ExactPrice):
+ exact_price (google.cloud.recommendationengine_v1beta1.types.ProductCatalogItem.ExactPrice):
Optional. The exact product price.
- price_range (~.catalog.ProductCatalogItem.PriceRange):
+ price_range (google.cloud.recommendationengine_v1beta1.types.ProductCatalogItem.PriceRange):
Optional. The product price range.
- costs (Sequence[~.catalog.ProductCatalogItem.CostsEntry]):
+ costs (Sequence[google.cloud.recommendationengine_v1beta1.types.ProductCatalogItem.CostsEntry]):
Optional. A map to pass the costs associated with the
product.
@@ -146,7 +157,7 @@ class ProductCatalogItem(proto.Message):
Optional. Only required if the price is set.
Currency code for price/costs. Use three-
character ISO-4217 code.
- stock_state (~.catalog.ProductCatalogItem.StockState):
+ stock_state (google.cloud.recommendationengine_v1beta1.types.ProductCatalogItem.StockState):
Optional. Online stock state of the catalog item. Default is
``IN_STOCK``.
available_quantity (int):
@@ -155,7 +166,7 @@ class ProductCatalogItem(proto.Message):
Optional. Canonical URL directly linking to
the item detail page with a length limit of 5
KiB..
- images (Sequence[~.catalog.Image]):
+ images (Sequence[google.cloud.recommendationengine_v1beta1.types.Image]):
Optional. Product images for the catalog
item.
"""
@@ -164,6 +175,7 @@ class StockState(proto.Enum):
r"""Item stock state. If this field is unspecified, the item is
assumed to be in stock.
"""
+ _pb_options = {"allow_alias": True}
STOCK_STATE_UNSPECIFIED = 0
IN_STOCK = 0
OUT_OF_STOCK = 1
@@ -183,6 +195,7 @@ class ExactPrice(proto.Message):
"""
display_price = proto.Field(proto.FLOAT, number=1)
+
original_price = proto.Field(proto.FLOAT, number=2)
class PriceRange(proto.Message):
@@ -190,23 +203,35 @@ class PriceRange(proto.Message):
different variations of the same product.
Attributes:
- min (float):
+ min_ (float):
Required. The minimum product price.
- max (float):
+ max_ (float):
Required. The maximum product price.
"""
- min = proto.Field(proto.FLOAT, number=1)
- max = proto.Field(proto.FLOAT, number=2)
+ min_ = proto.Field(proto.FLOAT, number=1)
+
+ max_ = proto.Field(proto.FLOAT, number=2)
+
+ exact_price = proto.Field(
+ proto.MESSAGE, number=1, oneof="price", message=ExactPrice,
+ )
+
+ price_range = proto.Field(
+ proto.MESSAGE, number=2, oneof="price", message=PriceRange,
+ )
- exact_price = proto.Field(proto.MESSAGE, number=1, message=ExactPrice)
- price_range = proto.Field(proto.MESSAGE, number=2, message=PriceRange)
costs = proto.MapField(proto.STRING, proto.FLOAT, number=3)
+
currency_code = proto.Field(proto.STRING, number=4)
- stock_state = proto.Field(proto.ENUM, number=5, enum=StockState)
+
+ stock_state = proto.Field(proto.ENUM, number=5, enum=StockState,)
+
available_quantity = proto.Field(proto.INT64, number=6)
+
canonical_product_uri = proto.Field(proto.STRING, number=7)
- images = proto.RepeatedField(proto.MESSAGE, number=8, message="Image")
+
+ images = proto.RepeatedField(proto.MESSAGE, number=8, message="Image",)
class Image(proto.Message):
@@ -225,7 +250,9 @@ class Image(proto.Message):
"""
uri = proto.Field(proto.STRING, number=1)
+
height = proto.Field(proto.INT32, number=2)
+
width = proto.Field(proto.INT32, number=3)
diff --git a/google/cloud/recommendationengine_v1beta1/types/catalog_service.py b/google/cloud/recommendationengine_v1beta1/types/catalog_service.py
index 99f83cd8..bb04239c 100644
--- a/google/cloud/recommendationengine_v1beta1/types/catalog_service.py
+++ b/google/cloud/recommendationengine_v1beta1/types/catalog_service.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright (C) 2019 Google LLC
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -42,12 +42,13 @@ class CreateCatalogItemRequest(proto.Message):
parent (str):
Required. The parent catalog resource name, such as
``projects/*/locations/global/catalogs/default_catalog``
- catalog_item (~.catalog.CatalogItem):
+ catalog_item (google.cloud.recommendationengine_v1beta1.types.CatalogItem):
Required. The catalog item to create.
"""
parent = proto.Field(proto.STRING, number=1)
- catalog_item = proto.Field(proto.MESSAGE, number=2, message=catalog.CatalogItem)
+
+ catalog_item = proto.Field(proto.MESSAGE, number=2, message=catalog.CatalogItem,)
class GetCatalogItemRequest(proto.Message):
@@ -82,8 +83,11 @@ class ListCatalogItemsRequest(proto.Message):
"""
parent = proto.Field(proto.STRING, number=1)
+
page_size = proto.Field(proto.INT32, number=2)
+
page_token = proto.Field(proto.STRING, number=3)
+
filter = proto.Field(proto.STRING, number=4)
@@ -91,7 +95,7 @@ class ListCatalogItemsResponse(proto.Message):
r"""Response message for ListCatalogItems method.
Attributes:
- catalog_items (Sequence[~.catalog.CatalogItem]):
+ catalog_items (Sequence[google.cloud.recommendationengine_v1beta1.types.CatalogItem]):
The catalog items.
next_page_token (str):
If empty, the list is complete. If nonempty, the token to
@@ -104,8 +108,9 @@ def raw_page(self):
return self
catalog_items = proto.RepeatedField(
- proto.MESSAGE, number=1, message=catalog.CatalogItem
+ proto.MESSAGE, number=1, message=catalog.CatalogItem,
)
+
next_page_token = proto.Field(proto.STRING, number=2)
@@ -116,18 +121,20 @@ class UpdateCatalogItemRequest(proto.Message):
name (str):
Required. Full resource name of catalog item, such as
``projects/*/locations/global/catalogs/default_catalog/catalogItems/some_catalog_item_id``
- catalog_item (~.catalog.CatalogItem):
+ catalog_item (google.cloud.recommendationengine_v1beta1.types.CatalogItem):
Required. The catalog item to update/create. The
'catalog_item_id' field has to match that in the 'name'.
- update_mask (~.field_mask.FieldMask):
+ update_mask (google.protobuf.field_mask_pb2.FieldMask):
Optional. Indicates which fields in the
provided 'item' to update. If not set, will by
default update all fields.
"""
name = proto.Field(proto.STRING, number=1)
- catalog_item = proto.Field(proto.MESSAGE, number=2, message=catalog.CatalogItem)
- update_mask = proto.Field(proto.MESSAGE, number=3, message=field_mask.FieldMask)
+
+ catalog_item = proto.Field(proto.MESSAGE, number=2, message=catalog.CatalogItem,)
+
+ update_mask = proto.Field(proto.MESSAGE, number=3, message=field_mask.FieldMask,)
class DeleteCatalogItemRequest(proto.Message):
diff --git a/google/cloud/recommendationengine_v1beta1/types/common.py b/google/cloud/recommendationengine_v1beta1/types/common.py
index b03a4d58..8d79cc1a 100644
--- a/google/cloud/recommendationengine_v1beta1/types/common.py
+++ b/google/cloud/recommendationengine_v1beta1/types/common.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright (C) 2019 Google LLC
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -19,7 +19,7 @@
__protobuf__ = proto.module(
- package="google.cloud.recommendationengine.v1beta1", manifest={"FeatureMap"}
+ package="google.cloud.recommendationengine.v1beta1", manifest={"FeatureMap",},
)
@@ -29,7 +29,7 @@ class FeatureMap(proto.Message):
categorical/numerical features.
Attributes:
- categorical_features (Sequence[~.common.FeatureMap.CategoricalFeaturesEntry]):
+ categorical_features (Sequence[google.cloud.recommendationengine_v1beta1.types.FeatureMap.CategoricalFeaturesEntry]):
Categorical features that can take on one of a limited
number of possible values. Some examples would be the
brand/maker of a product, or country of a customer.
@@ -38,7 +38,7 @@ class FeatureMap(proto.Message):
For example:
``{ "colors": {"value": ["yellow", "green"]}, "sizes": {"value":["S", "M"]}``
- numerical_features (Sequence[~.common.FeatureMap.NumericalFeaturesEntry]):
+ numerical_features (Sequence[google.cloud.recommendationengine_v1beta1.types.FeatureMap.NumericalFeaturesEntry]):
Numerical features. Some examples would be the height/weight
of a product, or age of a customer.
@@ -70,10 +70,11 @@ class FloatList(proto.Message):
value = proto.RepeatedField(proto.FLOAT, number=1)
categorical_features = proto.MapField(
- proto.STRING, proto.MESSAGE, number=1, message=StringList
+ proto.STRING, proto.MESSAGE, number=1, message=StringList,
)
+
numerical_features = proto.MapField(
- proto.STRING, proto.MESSAGE, number=2, message=FloatList
+ proto.STRING, proto.MESSAGE, number=2, message=FloatList,
)
diff --git a/google/cloud/recommendationengine_v1beta1/types/import_.py b/google/cloud/recommendationengine_v1beta1/types/import_.py
index 0f72fbfc..10b70713 100644
--- a/google/cloud/recommendationengine_v1beta1/types/import_.py
+++ b/google/cloud/recommendationengine_v1beta1/types/import_.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright (C) 2019 Google LLC
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -67,13 +67,13 @@ class CatalogInlineSource(proto.Message):
method.
Attributes:
- catalog_items (Sequence[~.catalog.CatalogItem]):
+ catalog_items (Sequence[google.cloud.recommendationengine_v1beta1.types.CatalogItem]):
Optional. A list of catalog items to
update/create. Recommended max of 10k items.
"""
catalog_items = proto.RepeatedField(
- proto.MESSAGE, number=1, message=catalog.CatalogItem
+ proto.MESSAGE, number=1, message=catalog.CatalogItem,
)
@@ -82,13 +82,13 @@ class UserEventInlineSource(proto.Message):
method.
Attributes:
- user_events (Sequence[~.user_event.UserEvent]):
+ user_events (Sequence[google.cloud.recommendationengine_v1beta1.types.UserEvent]):
Optional. A list of user events to import.
Recommended max of 10k items.
"""
user_events = proto.RepeatedField(
- proto.MESSAGE, number=1, message=user_event.UserEvent
+ proto.MESSAGE, number=1, message=user_event.UserEvent,
)
@@ -103,7 +103,7 @@ class ImportErrorsConfig(proto.Message):
JSON-encoded ``google.rpc.Status`` message.
"""
- gcs_prefix = proto.Field(proto.STRING, number=1)
+ gcs_prefix = proto.Field(proto.STRING, number=1, oneof="destination")
class ImportCatalogItemsRequest(proto.Message):
@@ -121,18 +121,21 @@ class ImportCatalogItemsRequest(proto.Message):
Up to 128 characters long. This is returned as
google.longrunning.Operation.name in the
response.
- input_config (~.import_.InputConfig):
+ input_config (google.cloud.recommendationengine_v1beta1.types.InputConfig):
Required. The desired input location of the
data.
- errors_config (~.import_.ImportErrorsConfig):
+ errors_config (google.cloud.recommendationengine_v1beta1.types.ImportErrorsConfig):
Optional. The desired location of errors
incurred during the Import.
"""
parent = proto.Field(proto.STRING, number=1)
+
request_id = proto.Field(proto.STRING, number=2)
- input_config = proto.Field(proto.MESSAGE, number=3, message="InputConfig")
- errors_config = proto.Field(proto.MESSAGE, number=4, message=ImportErrorsConfig)
+
+ input_config = proto.Field(proto.MESSAGE, number=3, message="InputConfig",)
+
+ errors_config = proto.Field(proto.MESSAGE, number=4, message="ImportErrorsConfig",)
class ImportUserEventsRequest(proto.Message):
@@ -150,41 +153,48 @@ class ImportUserEventsRequest(proto.Message):
google.longrunning.Operation.name in the response. Note that
this field must not be set if the desired input config is
catalog_inline_source.
- input_config (~.import_.InputConfig):
+ input_config (google.cloud.recommendationengine_v1beta1.types.InputConfig):
Required. The desired input location of the
data.
- errors_config (~.import_.ImportErrorsConfig):
+ errors_config (google.cloud.recommendationengine_v1beta1.types.ImportErrorsConfig):
Optional. The desired location of errors
incurred during the Import.
"""
parent = proto.Field(proto.STRING, number=1)
+
request_id = proto.Field(proto.STRING, number=2)
- input_config = proto.Field(proto.MESSAGE, number=3, message="InputConfig")
- errors_config = proto.Field(proto.MESSAGE, number=4, message=ImportErrorsConfig)
+
+ input_config = proto.Field(proto.MESSAGE, number=3, message="InputConfig",)
+
+ errors_config = proto.Field(proto.MESSAGE, number=4, message="ImportErrorsConfig",)
class InputConfig(proto.Message):
r"""The input config source.
Attributes:
- catalog_inline_source (~.import_.CatalogInlineSource):
+ catalog_inline_source (google.cloud.recommendationengine_v1beta1.types.CatalogInlineSource):
The Inline source for the input content for
Catalog items.
- gcs_source (~.import_.GcsSource):
+ gcs_source (google.cloud.recommendationengine_v1beta1.types.GcsSource):
Google Cloud Storage location for the input
content.
- user_event_inline_source (~.import_.UserEventInlineSource):
+ user_event_inline_source (google.cloud.recommendationengine_v1beta1.types.UserEventInlineSource):
The Inline source for the input content for
UserEvents.
"""
catalog_inline_source = proto.Field(
- proto.MESSAGE, number=1, message=CatalogInlineSource
+ proto.MESSAGE, number=1, oneof="source", message="CatalogInlineSource",
+ )
+
+ gcs_source = proto.Field(
+ proto.MESSAGE, number=2, oneof="source", message="GcsSource",
)
- gcs_source = proto.Field(proto.MESSAGE, number=2, message=GcsSource)
+
user_event_inline_source = proto.Field(
- proto.MESSAGE, number=3, message=UserEventInlineSource
+ proto.MESSAGE, number=3, oneof="source", message="UserEventInlineSource",
)
@@ -200,7 +210,7 @@ class ImportMetadata(proto.Message):
Id of the request / operation. This is
parroting back the requestId that was passed in
the request.
- create_time (~.timestamp.Timestamp):
+ create_time (google.protobuf.timestamp_pb2.Timestamp):
Operation create time.
success_count (int):
Count of entries that were processed
@@ -208,17 +218,22 @@ class ImportMetadata(proto.Message):
failure_count (int):
Count of entries that encountered errors
while processing.
- update_time (~.timestamp.Timestamp):
+ update_time (google.protobuf.timestamp_pb2.Timestamp):
Operation last update time. If the operation
is done, this is also the finish time.
"""
operation_name = proto.Field(proto.STRING, number=5)
+
request_id = proto.Field(proto.STRING, number=3)
- create_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp)
+
+ create_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,)
+
success_count = proto.Field(proto.INT64, number=1)
+
failure_count = proto.Field(proto.INT64, number=2)
- update_time = proto.Field(proto.MESSAGE, number=6, message=timestamp.Timestamp)
+
+ update_time = proto.Field(proto.MESSAGE, number=6, message=timestamp.Timestamp,)
class ImportCatalogItemsResponse(proto.Message):
@@ -228,16 +243,17 @@ class ImportCatalogItemsResponse(proto.Message):
was successful.
Attributes:
- error_samples (Sequence[~.status.Status]):
+ error_samples (Sequence[google.rpc.status_pb2.Status]):
A sample of errors encountered while
processing the request.
- errors_config (~.import_.ImportErrorsConfig):
+ errors_config (google.cloud.recommendationengine_v1beta1.types.ImportErrorsConfig):
Echoes the destination for the complete
errors in the request if set.
"""
- error_samples = proto.RepeatedField(proto.MESSAGE, number=1, message=status.Status)
- errors_config = proto.Field(proto.MESSAGE, number=2, message=ImportErrorsConfig)
+ error_samples = proto.RepeatedField(proto.MESSAGE, number=1, message=status.Status,)
+
+ errors_config = proto.Field(proto.MESSAGE, number=2, message="ImportErrorsConfig",)
class ImportUserEventsResponse(proto.Message):
@@ -247,21 +263,23 @@ class ImportUserEventsResponse(proto.Message):
was successful.
Attributes:
- error_samples (Sequence[~.status.Status]):
+ error_samples (Sequence[google.rpc.status_pb2.Status]):
A sample of errors encountered while
processing the request.
- errors_config (~.import_.ImportErrorsConfig):
+ errors_config (google.cloud.recommendationengine_v1beta1.types.ImportErrorsConfig):
Echoes the destination for the complete
errors if this field was set in the request.
- import_summary (~.import_.UserEventImportSummary):
+ import_summary (google.cloud.recommendationengine_v1beta1.types.UserEventImportSummary):
Aggregated statistics of user event import
status.
"""
- error_samples = proto.RepeatedField(proto.MESSAGE, number=1, message=status.Status)
- errors_config = proto.Field(proto.MESSAGE, number=2, message=ImportErrorsConfig)
+ error_samples = proto.RepeatedField(proto.MESSAGE, number=1, message=status.Status,)
+
+ errors_config = proto.Field(proto.MESSAGE, number=2, message="ImportErrorsConfig",)
+
import_summary = proto.Field(
- proto.MESSAGE, number=3, message="UserEventImportSummary"
+ proto.MESSAGE, number=3, message="UserEventImportSummary",
)
@@ -280,6 +298,7 @@ class UserEventImportSummary(proto.Message):
"""
joined_events_count = proto.Field(proto.INT64, number=1)
+
unjoined_events_count = proto.Field(proto.INT64, number=2)
diff --git a/google/cloud/recommendationengine_v1beta1/types/prediction_apikey_registry_service.py b/google/cloud/recommendationengine_v1beta1/types/prediction_apikey_registry_service.py
index 462f55cb..150f27e8 100644
--- a/google/cloud/recommendationengine_v1beta1/types/prediction_apikey_registry_service.py
+++ b/google/cloud/recommendationengine_v1beta1/types/prediction_apikey_registry_service.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright (C) 2019 Google LLC
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -49,14 +49,15 @@ class CreatePredictionApiKeyRegistrationRequest(proto.Message):
parent (str):
Required. The parent resource path.
``projects/*/locations/global/catalogs/default_catalog/eventStores/default_event_store``
- prediction_api_key_registration (~.prediction_apikey_registry_service.PredictionApiKeyRegistration):
+ prediction_api_key_registration (google.cloud.recommendationengine_v1beta1.types.PredictionApiKeyRegistration):
Required. The prediction API key
registration.
"""
parent = proto.Field(proto.STRING, number=1)
+
prediction_api_key_registration = proto.Field(
- proto.MESSAGE, number=2, message=PredictionApiKeyRegistration
+ proto.MESSAGE, number=2, message="PredictionApiKeyRegistration",
)
@@ -77,7 +78,9 @@ class ListPredictionApiKeyRegistrationsRequest(proto.Message):
"""
parent = proto.Field(proto.STRING, number=1)
+
page_size = proto.Field(proto.INT32, number=2)
+
page_token = proto.Field(proto.STRING, number=3)
@@ -85,7 +88,7 @@ class ListPredictionApiKeyRegistrationsResponse(proto.Message):
r"""Response message for the ``ListPredictionApiKeyRegistrations``.
Attributes:
- prediction_api_key_registrations (Sequence[~.prediction_apikey_registry_service.PredictionApiKeyRegistration]):
+ prediction_api_key_registrations (Sequence[google.cloud.recommendationengine_v1beta1.types.PredictionApiKeyRegistration]):
The list of registered API keys.
next_page_token (str):
If empty, the list is complete. If nonempty, pass the token
@@ -98,8 +101,9 @@ def raw_page(self):
return self
prediction_api_key_registrations = proto.RepeatedField(
- proto.MESSAGE, number=1, message=PredictionApiKeyRegistration
+ proto.MESSAGE, number=1, message="PredictionApiKeyRegistration",
)
+
next_page_token = proto.Field(proto.STRING, number=2)
diff --git a/google/cloud/recommendationengine_v1beta1/types/prediction_service.py b/google/cloud/recommendationengine_v1beta1/types/prediction_service.py
index 79c7d019..eba37139 100644
--- a/google/cloud/recommendationengine_v1beta1/types/prediction_service.py
+++ b/google/cloud/recommendationengine_v1beta1/types/prediction_service.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright (C) 2019 Google LLC
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -24,7 +24,7 @@
__protobuf__ = proto.module(
package="google.cloud.recommendationengine.v1beta1",
- manifest={"PredictRequest", "PredictResponse"},
+ manifest={"PredictRequest", "PredictResponse",},
)
@@ -68,7 +68,7 @@ class PredictRequest(proto.Message):
The full list of available placements can be seen at
https://console.cloud.google.com/recommendation/datafeeds/default_catalog/dashboard
- user_event (~.gcr_user_event.UserEvent):
+ user_event (google.cloud.recommendationengine_v1beta1.types.UserEvent):
Required. Context about the user, what they
are looking at and what action they took to
trigger the predict request. Note that this user
@@ -109,7 +109,7 @@ class PredictRequest(proto.Message):
used that returns arbitrary catalog items. Note
that the dryRun mode should only be used for
testing the API, or if the model is not ready.
- params (Sequence[~.prediction_service.PredictRequest.ParamsEntry]):
+ params (Sequence[google.cloud.recommendationengine_v1beta1.types.PredictRequest.ParamsEntry]):
Optional. Additional domain specific parameters for the
predictions.
@@ -125,7 +125,7 @@ class PredictRequest(proto.Message):
response. The given 'score' indicates the probability of
an item being clicked/purchased given the user's context
and history.
- labels (Sequence[~.prediction_service.PredictRequest.LabelsEntry]):
+ labels (Sequence[google.cloud.recommendationengine_v1beta1.types.PredictRequest.LabelsEntry]):
Optional. The labels for the predict request.
- Label keys can contain lowercase letters, digits and
@@ -142,12 +142,21 @@ class PredictRequest(proto.Message):
"""
name = proto.Field(proto.STRING, number=1)
- user_event = proto.Field(proto.MESSAGE, number=2, message=gcr_user_event.UserEvent)
+
+ user_event = proto.Field(proto.MESSAGE, number=2, message=gcr_user_event.UserEvent,)
+
page_size = proto.Field(proto.INT32, number=7)
+
page_token = proto.Field(proto.STRING, number=8)
+
filter = proto.Field(proto.STRING, number=3)
+
dry_run = proto.Field(proto.BOOL, number=4)
- params = proto.MapField(proto.STRING, proto.MESSAGE, number=6, message=struct.Value)
+
+ params = proto.MapField(
+ proto.STRING, proto.MESSAGE, number=6, message=struct.Value,
+ )
+
labels = proto.MapField(proto.STRING, proto.STRING, number=9)
@@ -155,7 +164,7 @@ class PredictResponse(proto.Message):
r"""Response message for predict method.
Attributes:
- results (Sequence[~.prediction_service.PredictResponse.PredictionResult]):
+ results (Sequence[google.cloud.recommendationengine_v1beta1.types.PredictResponse.PredictionResult]):
A list of recommended items. The order
represents the ranking (from the most relevant
item to the least).
@@ -170,7 +179,7 @@ class PredictResponse(proto.Message):
dry_run (bool):
True if the dryRun property was set in the
request.
- metadata (Sequence[~.prediction_service.PredictResponse.MetadataEntry]):
+ metadata (Sequence[google.cloud.recommendationengine_v1beta1.types.PredictResponse.MetadataEntry]):
Additional domain specific prediction
response metadata.
next_page_token (str):
@@ -185,7 +194,7 @@ class PredictionResult(proto.Message):
Attributes:
id (str):
ID of the recommended catalog item
- item_metadata (Sequence[~.prediction_service.PredictResponse.PredictionResult.ItemMetadataEntry]):
+ item_metadata (Sequence[google.cloud.recommendationengine_v1beta1.types.PredictResponse.PredictionResult.ItemMetadataEntry]):
Additional item metadata / annotations.
Possible values:
@@ -199,21 +208,27 @@ class PredictionResult(proto.Message):
"""
id = proto.Field(proto.STRING, number=1)
+
item_metadata = proto.MapField(
- proto.STRING, proto.MESSAGE, number=2, message=struct.Value
+ proto.STRING, proto.MESSAGE, number=2, message=struct.Value,
)
@property
def raw_page(self):
return self
- results = proto.RepeatedField(proto.MESSAGE, number=1, message=PredictionResult)
+ results = proto.RepeatedField(proto.MESSAGE, number=1, message=PredictionResult,)
+
recommendation_token = proto.Field(proto.STRING, number=2)
+
items_missing_in_catalog = proto.RepeatedField(proto.STRING, number=3)
+
dry_run = proto.Field(proto.BOOL, number=4)
+
metadata = proto.MapField(
- proto.STRING, proto.MESSAGE, number=5, message=struct.Value
+ proto.STRING, proto.MESSAGE, number=5, message=struct.Value,
)
+
next_page_token = proto.Field(proto.STRING, number=6)
diff --git a/google/cloud/recommendationengine_v1beta1/types/recommendationengine_resources.py b/google/cloud/recommendationengine_v1beta1/types/recommendationengine_resources.py
new file mode 100644
index 00000000..b9777025
--- /dev/null
+++ b/google/cloud/recommendationengine_v1beta1/types/recommendationengine_resources.py
@@ -0,0 +1,24 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+
+__protobuf__ = proto.module(
+ package="google.cloud.recommendationengine.v1beta1", manifest={},
+)
+
+
+__all__ = tuple(sorted(__protobuf__.manifest))
diff --git a/google/cloud/recommendationengine_v1beta1/types/user_event.py b/google/cloud/recommendationengine_v1beta1/types/user_event.py
index f48b64de..8c1f0e3a 100644
--- a/google/cloud/recommendationengine_v1beta1/types/user_event.py
+++ b/google/cloud/recommendationengine_v1beta1/types/user_event.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright (C) 2019 Google LLC
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -63,12 +63,12 @@ class UserEvent(proto.Message):
- ``shopping-cart-page-view`` User viewing a shopping cart.
- ``impression`` List of items displayed. Used by Google
Tag Manager.
- user_info (~.user_event.UserInfo):
+ user_info (google.cloud.recommendationengine_v1beta1.types.UserInfo):
Required. User information.
- event_detail (~.user_event.EventDetail):
+ event_detail (google.cloud.recommendationengine_v1beta1.types.EventDetail):
Optional. User event detailed information
common across different recommendation types.
- product_event_detail (~.user_event.ProductEventDetail):
+ product_event_detail (google.cloud.recommendationengine_v1beta1.types.ProductEventDetail):
Optional. Retail product specific user event metadata.
This field is required for the following event types:
@@ -94,10 +94,10 @@ class UserEvent(proto.Message):
This field is not allowed for the following event types:
- ``home-page-view``
- event_time (~.timestamp.Timestamp):
+ event_time (google.protobuf.timestamp_pb2.Timestamp):
Optional. Only required for ImportUserEvents
method. Timestamp of user event created.
- event_source (~.user_event.UserEvent.EventSource):
+ event_source (google.cloud.recommendationengine_v1beta1.types.UserEvent.EventSource):
Optional. This field should *not* be set when using
JavaScript pixel or the Recommendations AI Tag. Defaults to
``EVENT_SOURCE_UNSPECIFIED``.
@@ -111,13 +111,18 @@ class EventSource(proto.Enum):
BATCH_UPLOAD = 3
event_type = proto.Field(proto.STRING, number=1)
- user_info = proto.Field(proto.MESSAGE, number=2, message="UserInfo")
- event_detail = proto.Field(proto.MESSAGE, number=3, message="EventDetail")
+
+ user_info = proto.Field(proto.MESSAGE, number=2, message="UserInfo",)
+
+ event_detail = proto.Field(proto.MESSAGE, number=3, message="EventDetail",)
+
product_event_detail = proto.Field(
- proto.MESSAGE, number=4, message="ProductEventDetail"
+ proto.MESSAGE, number=4, message="ProductEventDetail",
)
- event_time = proto.Field(proto.MESSAGE, number=5, message=timestamp.Timestamp)
- event_source = proto.Field(proto.ENUM, number=6, enum=EventSource)
+
+ event_time = proto.Field(proto.MESSAGE, number=5, message=timestamp.Timestamp,)
+
+ event_source = proto.Field(proto.ENUM, number=6, enum=EventSource,)
class UserInfo(proto.Message):
@@ -162,9 +167,13 @@ class UserInfo(proto.Message):
"""
visitor_id = proto.Field(proto.STRING, number=1)
+
user_id = proto.Field(proto.STRING, number=2)
+
ip_address = proto.Field(proto.STRING, number=3)
+
user_agent = proto.Field(proto.STRING, number=4)
+
direct_user_request = proto.Field(proto.BOOL, number=5)
@@ -215,7 +224,7 @@ class EventDetail(proto.Message):
Optional, but highly encouraged for user events that are the
result of a recommendation prediction query.
- event_attributes (~.common.FeatureMap):
+ event_attributes (google.cloud.recommendationengine_v1beta1.types.FeatureMap):
Optional. Extra user event features to include in the
recommendation model.
@@ -226,11 +235,16 @@ class EventDetail(proto.Message):
"""
uri = proto.Field(proto.STRING, number=1)
+
referrer_uri = proto.Field(proto.STRING, number=6)
+
page_view_id = proto.Field(proto.STRING, number=2)
+
experiment_ids = proto.RepeatedField(proto.STRING, number=3)
+
recommendation_token = proto.Field(proto.STRING, number=4)
- event_attributes = proto.Field(proto.MESSAGE, number=5, message=common.FeatureMap)
+
+ event_attributes = proto.Field(proto.MESSAGE, number=5, message=common.FeatureMap,)
class ProductEventDetail(proto.Message):
@@ -242,14 +256,14 @@ class ProductEventDetail(proto.Message):
Required for ``search`` events. Other event types should not
set this field. The user's search query as UTF-8 encoded
text with a length limit of 5 KiB.
- page_categories (Sequence[~.catalog.CatalogItem.CategoryHierarchy]):
+ page_categories (Sequence[google.cloud.recommendationengine_v1beta1.types.CatalogItem.CategoryHierarchy]):
Required for ``category-page-view`` events. Other event
types should not set this field. The categories associated
with a category page. Category pages include special pages
such as sales or promotions. For instance, a special sale
page may have the category hierarchy: categories : ["Sales",
"2017 Black Friday Deals"].
- product_details (Sequence[~.user_event.ProductDetail]):
+ product_details (Sequence[google.cloud.recommendationengine_v1beta1.types.ProductDetail]):
The main product details related to the event.
This field is required for the following event types:
@@ -288,7 +302,7 @@ class ProductEventDetail(proto.Message):
``remove-from-cart``, ``checkout-start``,
``purchase-complete``, or ``shopping-cart-page-view``
events.
- purchase_transaction (~.user_event.PurchaseTransaction):
+ purchase_transaction (google.cloud.recommendationengine_v1beta1.types.PurchaseTransaction):
Optional. A transaction represents the entire purchase
transaction. Required for ``purchase-complete`` events.
Optional for ``checkout-start`` events. Other event types
@@ -296,16 +310,21 @@ class ProductEventDetail(proto.Message):
"""
search_query = proto.Field(proto.STRING, number=1)
+
page_categories = proto.RepeatedField(
- proto.MESSAGE, number=2, message=catalog.CatalogItem.CategoryHierarchy
+ proto.MESSAGE, number=2, message=catalog.CatalogItem.CategoryHierarchy,
)
+
product_details = proto.RepeatedField(
- proto.MESSAGE, number=3, message="ProductDetail"
+ proto.MESSAGE, number=3, message="ProductDetail",
)
+
list_id = proto.Field(proto.STRING, number=4)
+
cart_id = proto.Field(proto.STRING, number=5)
+
purchase_transaction = proto.Field(
- proto.MESSAGE, number=6, message="PurchaseTransaction"
+ proto.MESSAGE, number=6, message="PurchaseTransaction",
)
@@ -322,10 +341,10 @@ class PurchaseTransaction(proto.Message):
adjustments to total revenue that you want to include as
part of your revenue calculations. This field is not
required if the event type is ``refund``.
- taxes (Sequence[~.user_event.PurchaseTransaction.TaxesEntry]):
+ taxes (Sequence[google.cloud.recommendationengine_v1beta1.types.PurchaseTransaction.TaxesEntry]):
Optional. All the taxes associated with the
transaction.
- costs (Sequence[~.user_event.PurchaseTransaction.CostsEntry]):
+ costs (Sequence[google.cloud.recommendationengine_v1beta1.types.PurchaseTransaction.CostsEntry]):
Optional. All the costs associated with the product. These
can be manufacturing costs, shipping expenses not borne by
the end user, or any other costs.
@@ -343,9 +362,13 @@ class PurchaseTransaction(proto.Message):
"""
id = proto.Field(proto.STRING, number=1)
+
revenue = proto.Field(proto.FLOAT, number=2)
+
taxes = proto.MapField(proto.STRING, proto.FLOAT, number=3)
+
costs = proto.MapField(proto.STRING, proto.FLOAT, number=4)
+
currency_code = proto.Field(proto.STRING, number=6)
@@ -369,7 +392,7 @@ class ProductDetail(proto.Message):
discounted price). If provided, this will
override the display price in Catalog for this
product.
- stock_state (~.catalog.ProductCatalogItem.StockState):
+ stock_state (google.cloud.recommendationengine_v1beta1.types.ProductCatalogItem.StockState):
Optional. Item stock state. If provided, this
overrides the stock state in Catalog for items
in this event.
@@ -390,21 +413,28 @@ class ProductDetail(proto.Message):
``stock_state`` field to be ``OUT_OF_STOCK``. Leaving this
field unspecified / as zero is not sufficient to mark the
item out of stock.
- item_attributes (~.common.FeatureMap):
+ item_attributes (google.cloud.recommendationengine_v1beta1.types.FeatureMap):
Optional. Extra features associated with a
product in the user event.
"""
id = proto.Field(proto.STRING, number=1)
+
currency_code = proto.Field(proto.STRING, number=2)
+
original_price = proto.Field(proto.FLOAT, number=3)
+
display_price = proto.Field(proto.FLOAT, number=4)
+
stock_state = proto.Field(
- proto.ENUM, number=5, enum=catalog.ProductCatalogItem.StockState
+ proto.ENUM, number=5, enum=catalog.ProductCatalogItem.StockState,
)
+
quantity = proto.Field(proto.INT32, number=6)
+
available_quantity = proto.Field(proto.INT32, number=7)
- item_attributes = proto.Field(proto.MESSAGE, number=8, message=common.FeatureMap)
+
+ item_attributes = proto.Field(proto.MESSAGE, number=8, message=common.FeatureMap,)
__all__ = tuple(sorted(__protobuf__.manifest))
diff --git a/google/cloud/recommendationengine_v1beta1/types/user_event_service.py b/google/cloud/recommendationengine_v1beta1/types/user_event_service.py
index f63f15c5..fce9b154 100644
--- a/google/cloud/recommendationengine_v1beta1/types/user_event_service.py
+++ b/google/cloud/recommendationengine_v1beta1/types/user_event_service.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-# Copyright (C) 2019 Google LLC
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -72,7 +72,9 @@ class PurgeUserEventsRequest(proto.Message):
"""
parent = proto.Field(proto.STRING, number=1)
+
filter = proto.Field(proto.STRING, number=2)
+
force = proto.Field(proto.BOOL, number=3)
@@ -84,12 +86,13 @@ class PurgeUserEventsMetadata(proto.Message):
Attributes:
operation_name (str):
The ID of the request / operation.
- create_time (~.timestamp.Timestamp):
+ create_time (google.protobuf.timestamp_pb2.Timestamp):
Operation create time.
"""
operation_name = proto.Field(proto.STRING, number=1)
- create_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp)
+
+ create_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,)
class PurgeUserEventsResponse(proto.Message):
@@ -101,15 +104,16 @@ class PurgeUserEventsResponse(proto.Message):
purged_events_count (int):
The total count of events purged as a result
of the operation.
- user_events_sample (Sequence[~.gcr_user_event.UserEvent]):
+ user_events_sample (Sequence[google.cloud.recommendationengine_v1beta1.types.UserEvent]):
A sampling of events deleted (or will be deleted) depending
on the ``force`` property in the request. Max of 500 items
will be returned.
"""
purged_events_count = proto.Field(proto.INT64, number=1)
+
user_events_sample = proto.RepeatedField(
- proto.MESSAGE, number=2, message=gcr_user_event.UserEvent
+ proto.MESSAGE, number=2, message=gcr_user_event.UserEvent,
)
@@ -120,12 +124,13 @@ class WriteUserEventRequest(proto.Message):
parent (str):
Required. The parent eventStore resource name, such as
"projects/1234/locations/global/catalogs/default_catalog/eventStores/default_event_store".
- user_event (~.gcr_user_event.UserEvent):
+ user_event (google.cloud.recommendationengine_v1beta1.types.UserEvent):
Required. User event to write.
"""
parent = proto.Field(proto.STRING, number=1)
- user_event = proto.Field(proto.MESSAGE, number=2, message=gcr_user_event.UserEvent)
+
+ user_event = proto.Field(proto.MESSAGE, number=2, message=gcr_user_event.UserEvent,)
class CollectUserEventRequest(proto.Message):
@@ -152,8 +157,11 @@ class CollectUserEventRequest(proto.Message):
"""
parent = proto.Field(proto.STRING, number=1)
+
user_event = proto.Field(proto.STRING, number=2)
+
uri = proto.Field(proto.STRING, number=3)
+
ets = proto.Field(proto.INT64, number=4)
@@ -211,8 +219,11 @@ class ListUserEventsRequest(proto.Message):
"""
parent = proto.Field(proto.STRING, number=1)
+
page_size = proto.Field(proto.INT32, number=2)
+
page_token = proto.Field(proto.STRING, number=3)
+
filter = proto.Field(proto.STRING, number=4)
@@ -220,7 +231,7 @@ class ListUserEventsResponse(proto.Message):
r"""Response message for ListUserEvents method.
Attributes:
- user_events (Sequence[~.gcr_user_event.UserEvent]):
+ user_events (Sequence[google.cloud.recommendationengine_v1beta1.types.UserEvent]):
The user events.
next_page_token (str):
If empty, the list is complete. If nonempty, the token to
@@ -232,8 +243,9 @@ def raw_page(self):
return self
user_events = proto.RepeatedField(
- proto.MESSAGE, number=1, message=gcr_user_event.UserEvent
+ proto.MESSAGE, number=1, message=gcr_user_event.UserEvent,
)
+
next_page_token = proto.Field(proto.STRING, number=2)
diff --git a/mypy.ini b/mypy.ini
index f23e6b53..4505b485 100644
--- a/mypy.ini
+++ b/mypy.ini
@@ -1,3 +1,3 @@
[mypy]
-python_version = 3.5
+python_version = 3.6
namespace_packages = True
diff --git a/noxfile.py b/noxfile.py
index 4109e2b3..43dd3024 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -18,19 +18,37 @@
from __future__ import absolute_import
import os
+import pathlib
import shutil
import nox
-BLACK_VERSION = "black==19.3b0"
+BLACK_VERSION = "black==19.10b0"
BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"]
-if os.path.exists("samples"):
- BLACK_PATHS.append("samples")
+DEFAULT_PYTHON_VERSION = "3.8"
+SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"]
+UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"]
+CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute()
-@nox.session(python="3.7")
+# 'docfx' is excluded since it only needs to run in 'docs-presubmit'
+nox.options.sessions = [
+ "unit",
+ "system",
+ "cover",
+ "lint",
+ "lint_setup_py",
+ "blacken",
+ "docs",
+]
+
+# Error if a python version is missing
+nox.options.error_on_missing_interpreters = True
+
+
+@nox.session(python=DEFAULT_PYTHON_VERSION)
def lint(session):
"""Run linters.
@@ -38,7 +56,9 @@ def lint(session):
serious code quality issues.
"""
session.install("flake8", BLACK_VERSION)
- session.run("black", "--check", *BLACK_PATHS)
+ session.run(
+ "black", "--check", *BLACK_PATHS,
+ )
session.run("flake8", "google", "tests")
@@ -53,10 +73,12 @@ def blacken(session):
check the state of the `gcp_ubuntu_config` we use for that Kokoro run.
"""
session.install(BLACK_VERSION)
- session.run("black", *BLACK_PATHS)
+ session.run(
+ "black", *BLACK_PATHS,
+ )
-@nox.session(python="3.7")
+@nox.session(python=DEFAULT_PYTHON_VERSION)
def lint_setup_py(session):
"""Verify that setup.py is valid (including RST check)."""
session.install("docutils", "pygments")
@@ -65,16 +87,23 @@ def lint_setup_py(session):
def default(session):
# Install all test dependencies, then install this package in-place.
- session.install("mock", "pytest", "pytest-cov")
- session.install("-e", ".")
+
+ constraints_path = str(
+ CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt"
+ )
+ session.install("asyncmock", "pytest-asyncio", "-c", constraints_path)
+
+ session.install("mock", "pytest", "pytest-cov", "-c", constraints_path)
+
+ session.install("-e", ".", "-c", constraints_path)
# Run py.test against the unit tests.
session.run(
"py.test",
"--quiet",
- "--cov=google.cloud.recommendationengine",
- "--cov=google.cloud",
- "--cov=tests.unit",
+ f"--junitxml=unit_{session.python}_sponge_log.xml",
+ "--cov=google/cloud",
+ "--cov=tests/unit",
"--cov-append",
"--cov-config=.coveragerc",
"--cov-report=",
@@ -84,20 +113,30 @@ def default(session):
)
-@nox.session(python=["3.6", "3.7", "3.8"])
+@nox.session(python=UNIT_TEST_PYTHON_VERSIONS)
def unit(session):
"""Run the unit test suite."""
default(session)
-@nox.session(python=["3.7"])
+@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS)
def system(session):
"""Run the system test suite."""
+ constraints_path = str(
+ CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt"
+ )
system_test_path = os.path.join("tests", "system.py")
system_test_folder_path = os.path.join("tests", "system")
+
+ # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true.
+ if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false":
+ session.skip("RUN_SYSTEM_TESTS is set to false, skipping")
# Sanity check: Only run tests if the environment variable is set.
if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""):
session.skip("Credentials must be set via environment variable")
+ # Install pyopenssl for mTLS testing.
+ if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true":
+ session.install("pyopenssl")
system_test_exists = os.path.exists(system_test_path)
system_test_folder_exists = os.path.exists(system_test_folder_path)
@@ -110,18 +149,29 @@ def system(session):
# Install all test dependencies, then install this package into the
# virtualenv's dist-packages.
- session.install("mock", "pytest")
-
- session.install("-e", ".")
+ session.install("mock", "pytest", "google-cloud-testutils", "-c", constraints_path)
+ session.install("-e", ".", "-c", constraints_path)
# Run py.test against the system tests.
if system_test_exists:
- session.run("py.test", "--quiet", system_test_path, *session.posargs)
+ session.run(
+ "py.test",
+ "--quiet",
+ f"--junitxml=system_{session.python}_sponge_log.xml",
+ system_test_path,
+ *session.posargs,
+ )
if system_test_folder_exists:
- session.run("py.test", "--quiet", system_test_folder_path, *session.posargs)
+ session.run(
+ "py.test",
+ "--quiet",
+ f"--junitxml=system_{session.python}_sponge_log.xml",
+ system_test_folder_path,
+ *session.posargs,
+ )
-@nox.session(python="3.7")
+@nox.session(python=DEFAULT_PYTHON_VERSION)
def cover(session):
"""Run the final coverage report.
@@ -129,12 +179,12 @@ def cover(session):
test runs (not system test runs), and then erases coverage data.
"""
session.install("coverage", "pytest-cov")
- session.run("coverage", "report", "--show-missing", "--fail-under=100")
+ session.run("coverage", "report", "--show-missing", "--fail-under=98")
session.run("coverage", "erase")
-@nox.session(python="3.7")
+@nox.session(python=DEFAULT_PYTHON_VERSION)
def docs(session):
"""Build the docs for this library."""
@@ -154,3 +204,38 @@ def docs(session):
os.path.join("docs", ""),
os.path.join("docs", "_build", "html", ""),
)
+
+
+@nox.session(python=DEFAULT_PYTHON_VERSION)
+def docfx(session):
+ """Build the docfx yaml files for this library."""
+
+ session.install("-e", ".")
+ # sphinx-docfx-yaml supports up to sphinx version 1.5.5.
+ # https://github.com/docascode/sphinx-docfx-yaml/issues/97
+ session.install("sphinx==1.5.5", "alabaster", "recommonmark", "sphinx-docfx-yaml")
+
+ shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True)
+ session.run(
+ "sphinx-build",
+ "-T", # show full traceback on exception
+ "-N", # no colors
+ "-D",
+ (
+ "extensions=sphinx.ext.autodoc,"
+ "sphinx.ext.autosummary,"
+ "docfx_yaml.extension,"
+ "sphinx.ext.intersphinx,"
+ "sphinx.ext.coverage,"
+ "sphinx.ext.napoleon,"
+ "sphinx.ext.todo,"
+ "sphinx.ext.viewcode,"
+ "recommonmark"
+ ),
+ "-b",
+ "html",
+ "-d",
+ os.path.join("docs", "_build", "doctrees", ""),
+ os.path.join("docs", ""),
+ os.path.join("docs", "_build", "html", ""),
+ )
diff --git a/renovate.json b/renovate.json
index 4fa94931..f08bc22c 100644
--- a/renovate.json
+++ b/renovate.json
@@ -1,5 +1,6 @@
{
"extends": [
"config:base", ":preserveSemverRanges"
- ]
+ ],
+ "ignorePaths": [".pre-commit-config.yaml"]
}
diff --git a/scripts/decrypt-secrets.sh b/scripts/decrypt-secrets.sh
new file mode 100755
index 00000000..21f6d2a2
--- /dev/null
+++ b/scripts/decrypt-secrets.sh
@@ -0,0 +1,46 @@
+#!/bin/bash
+
+# Copyright 2015 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+ROOT=$( dirname "$DIR" )
+
+# Work from the project root.
+cd $ROOT
+
+# Prevent it from overriding files.
+# We recommend that sample authors use their own service account files and cloud project.
+# In that case, they are supposed to prepare these files by themselves.
+if [[ -f "testing/test-env.sh" ]] || \
+ [[ -f "testing/service-account.json" ]] || \
+ [[ -f "testing/client-secrets.json" ]]; then
+ echo "One or more target files exist, aborting."
+ exit 1
+fi
+
+# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources.
+PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}"
+
+gcloud secrets versions access latest --secret="python-docs-samples-test-env" \
+ --project="${PROJECT_ID}" \
+ > testing/test-env.sh
+gcloud secrets versions access latest \
+ --secret="python-docs-samples-service-account" \
+ --project="${PROJECT_ID}" \
+ > testing/service-account.json
+gcloud secrets versions access latest \
+ --secret="python-docs-samples-client-secrets" \
+ --project="${PROJECT_ID}" \
+ > testing/client-secrets.json
diff --git a/scripts/fixup_recommendationengine_v1beta1_keywords.py b/scripts/fixup_recommendationengine_v1beta1_keywords.py
new file mode 100644
index 00000000..15ac8f30
--- /dev/null
+++ b/scripts/fixup_recommendationengine_v1beta1_keywords.py
@@ -0,0 +1,193 @@
+#! /usr/bin/env python3
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import argparse
+import os
+import libcst as cst
+import pathlib
+import sys
+from typing import (Any, Callable, Dict, List, Sequence, Tuple)
+
+
+def partition(
+ predicate: Callable[[Any], bool],
+ iterator: Sequence[Any]
+) -> Tuple[List[Any], List[Any]]:
+ """A stable, out-of-place partition."""
+ results = ([], [])
+
+ for i in iterator:
+ results[int(predicate(i))].append(i)
+
+ # Returns trueList, falseList
+ return results[1], results[0]
+
+
+class recommendationengineCallTransformer(cst.CSTTransformer):
+ CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata')
+ METHOD_TO_PARAMS: Dict[str, Tuple[str]] = {
+ 'collect_user_event': ('parent', 'user_event', 'uri', 'ets', ),
+ 'create_catalog_item': ('parent', 'catalog_item', ),
+ 'create_prediction_api_key_registration': ('parent', 'prediction_api_key_registration', ),
+ 'delete_catalog_item': ('name', ),
+ 'delete_prediction_api_key_registration': ('name', ),
+ 'get_catalog_item': ('name', ),
+ 'import_catalog_items': ('parent', 'input_config', 'request_id', 'errors_config', ),
+ 'import_user_events': ('parent', 'input_config', 'request_id', 'errors_config', ),
+ 'list_catalog_items': ('parent', 'page_size', 'page_token', 'filter', ),
+ 'list_prediction_api_key_registrations': ('parent', 'page_size', 'page_token', ),
+ 'list_user_events': ('parent', 'page_size', 'page_token', 'filter', ),
+ 'predict': ('name', 'user_event', 'page_size', 'page_token', 'filter', 'dry_run', 'params', 'labels', ),
+ 'purge_user_events': ('parent', 'filter', 'force', ),
+ 'update_catalog_item': ('name', 'catalog_item', 'update_mask', ),
+ 'write_user_event': ('parent', 'user_event', ),
+
+ }
+
+ def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode:
+ try:
+ key = original.func.attr.value
+ kword_params = self.METHOD_TO_PARAMS[key]
+ except (AttributeError, KeyError):
+ # Either not a method from the API or too convoluted to be sure.
+ return updated
+
+ # If the existing code is valid, keyword args come after positional args.
+ # Therefore, all positional args must map to the first parameters.
+ args, kwargs = partition(lambda a: not bool(a.keyword), updated.args)
+ if any(k.keyword.value == "request" for k in kwargs):
+ # We've already fixed this file, don't fix it again.
+ return updated
+
+ kwargs, ctrl_kwargs = partition(
+ lambda a: not a.keyword.value in self.CTRL_PARAMS,
+ kwargs
+ )
+
+ args, ctrl_args = args[:len(kword_params)], args[len(kword_params):]
+ ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl))
+ for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS))
+
+ request_arg = cst.Arg(
+ value=cst.Dict([
+ cst.DictElement(
+ cst.SimpleString("'{}'".format(name)),
+ cst.Element(value=arg.value)
+ )
+ # Note: the args + kwargs looks silly, but keep in mind that
+ # the control parameters had to be stripped out, and that
+ # those could have been passed positionally or by keyword.
+ for name, arg in zip(kword_params, args + kwargs)]),
+ keyword=cst.Name("request")
+ )
+
+ return updated.with_changes(
+ args=[request_arg] + ctrl_kwargs
+ )
+
+
+def fix_files(
+ in_dir: pathlib.Path,
+ out_dir: pathlib.Path,
+ *,
+ transformer=recommendationengineCallTransformer(),
+):
+ """Duplicate the input dir to the output dir, fixing file method calls.
+
+ Preconditions:
+ * in_dir is a real directory
+ * out_dir is a real, empty directory
+ """
+ pyfile_gen = (
+ pathlib.Path(os.path.join(root, f))
+ for root, _, files in os.walk(in_dir)
+ for f in files if os.path.splitext(f)[1] == ".py"
+ )
+
+ for fpath in pyfile_gen:
+ with open(fpath, 'r') as f:
+ src = f.read()
+
+ # Parse the code and insert method call fixes.
+ tree = cst.parse_module(src)
+ updated = tree.visit(transformer)
+
+ # Create the path and directory structure for the new file.
+ updated_path = out_dir.joinpath(fpath.relative_to(in_dir))
+ updated_path.parent.mkdir(parents=True, exist_ok=True)
+
+ # Generate the updated source file at the corresponding path.
+ with open(updated_path, 'w') as f:
+ f.write(updated.code)
+
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser(
+ description="""Fix up source that uses the recommendationengine client library.
+
+The existing sources are NOT overwritten but are copied to output_dir with changes made.
+
+Note: This tool operates at a best-effort level at converting positional
+ parameters in client method calls to keyword based parameters.
+ Cases where it WILL FAIL include
+ A) * or ** expansion in a method call.
+ B) Calls via function or method alias (includes free function calls)
+ C) Indirect or dispatched calls (e.g. the method is looked up dynamically)
+
+ These all constitute false negatives. The tool will also detect false
+ positives when an API method shares a name with another method.
+""")
+ parser.add_argument(
+ '-d',
+ '--input-directory',
+ required=True,
+ dest='input_dir',
+ help='the input directory to walk for python files to fix up',
+ )
+ parser.add_argument(
+ '-o',
+ '--output-directory',
+ required=True,
+ dest='output_dir',
+ help='the directory to output files fixed via un-flattening',
+ )
+ args = parser.parse_args()
+ input_dir = pathlib.Path(args.input_dir)
+ output_dir = pathlib.Path(args.output_dir)
+ if not input_dir.is_dir():
+ print(
+ f"input directory '{input_dir}' does not exist or is not a directory",
+ file=sys.stderr,
+ )
+ sys.exit(-1)
+
+ if not output_dir.is_dir():
+ print(
+ f"output directory '{output_dir}' does not exist or is not a directory",
+ file=sys.stderr,
+ )
+ sys.exit(-1)
+
+ if os.listdir(output_dir):
+ print(
+ f"output directory '{output_dir}' is not empty",
+ file=sys.stderr,
+ )
+ sys.exit(-1)
+
+ fix_files(input_dir, output_dir)
diff --git a/scripts/readme-gen/readme_gen.py b/scripts/readme-gen/readme_gen.py
new file mode 100644
index 00000000..d309d6e9
--- /dev/null
+++ b/scripts/readme-gen/readme_gen.py
@@ -0,0 +1,66 @@
+#!/usr/bin/env python
+
+# Copyright 2016 Google Inc
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Generates READMEs using configuration defined in yaml."""
+
+import argparse
+import io
+import os
+import subprocess
+
+import jinja2
+import yaml
+
+
+jinja_env = jinja2.Environment(
+ trim_blocks=True,
+ loader=jinja2.FileSystemLoader(
+ os.path.abspath(os.path.join(os.path.dirname(__file__), 'templates'))))
+
+README_TMPL = jinja_env.get_template('README.tmpl.rst')
+
+
+def get_help(file):
+ return subprocess.check_output(['python', file, '--help']).decode()
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('source')
+ parser.add_argument('--destination', default='README.rst')
+
+ args = parser.parse_args()
+
+ source = os.path.abspath(args.source)
+ root = os.path.dirname(source)
+ destination = os.path.join(root, args.destination)
+
+ jinja_env.globals['get_help'] = get_help
+
+ with io.open(source, 'r') as f:
+ config = yaml.load(f)
+
+ # This allows get_help to execute in the right directory.
+ os.chdir(root)
+
+ output = README_TMPL.render(config)
+
+ with io.open(destination, 'w') as f:
+ f.write(output)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/scripts/readme-gen/templates/README.tmpl.rst b/scripts/readme-gen/templates/README.tmpl.rst
new file mode 100644
index 00000000..4fd23976
--- /dev/null
+++ b/scripts/readme-gen/templates/README.tmpl.rst
@@ -0,0 +1,87 @@
+{# The following line is a lie. BUT! Once jinja2 is done with it, it will
+ become truth! #}
+.. This file is automatically generated. Do not edit this file directly.
+
+{{product.name}} Python Samples
+===============================================================================
+
+.. image:: https://gstatic.com/cloudssh/images/open-btn.png
+ :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/README.rst
+
+
+This directory contains samples for {{product.name}}. {{product.description}}
+
+{{description}}
+
+.. _{{product.name}}: {{product.url}}
+
+{% if required_api_url %}
+To run the sample, you need to enable the API at: {{required_api_url}}
+{% endif %}
+
+{% if required_role %}
+To run the sample, you need to have `{{required_role}}` role.
+{% endif %}
+
+{{other_required_steps}}
+
+{% if setup %}
+Setup
+-------------------------------------------------------------------------------
+
+{% for section in setup %}
+
+{% include section + '.tmpl.rst' %}
+
+{% endfor %}
+{% endif %}
+
+{% if samples %}
+Samples
+-------------------------------------------------------------------------------
+
+{% for sample in samples %}
+{{sample.name}}
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+{% if not sample.hide_cloudshell_button %}
+.. image:: https://gstatic.com/cloudssh/images/open-btn.png
+ :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/{{sample.file}},{{folder}}/README.rst
+{% endif %}
+
+
+{{sample.description}}
+
+To run this sample:
+
+.. code-block:: bash
+
+ $ python {{sample.file}}
+{% if sample.show_help %}
+
+ {{get_help(sample.file)|indent}}
+{% endif %}
+
+
+{% endfor %}
+{% endif %}
+
+{% if cloud_client_library %}
+
+The client library
+-------------------------------------------------------------------------------
+
+This sample uses the `Google Cloud Client Library for Python`_.
+You can read the documentation for more details on API usage and use GitHub
+to `browse the source`_ and `report issues`_.
+
+.. _Google Cloud Client Library for Python:
+ https://googlecloudplatform.github.io/google-cloud-python/
+.. _browse the source:
+ https://github.com/GoogleCloudPlatform/google-cloud-python
+.. _report issues:
+ https://github.com/GoogleCloudPlatform/google-cloud-python/issues
+
+{% endif %}
+
+.. _Google Cloud SDK: https://cloud.google.com/sdk/
\ No newline at end of file
diff --git a/scripts/readme-gen/templates/auth.tmpl.rst b/scripts/readme-gen/templates/auth.tmpl.rst
new file mode 100644
index 00000000..1446b94a
--- /dev/null
+++ b/scripts/readme-gen/templates/auth.tmpl.rst
@@ -0,0 +1,9 @@
+Authentication
+++++++++++++++
+
+This sample requires you to have authentication setup. Refer to the
+`Authentication Getting Started Guide`_ for instructions on setting up
+credentials for applications.
+
+.. _Authentication Getting Started Guide:
+ https://cloud.google.com/docs/authentication/getting-started
diff --git a/scripts/readme-gen/templates/auth_api_key.tmpl.rst b/scripts/readme-gen/templates/auth_api_key.tmpl.rst
new file mode 100644
index 00000000..11957ce2
--- /dev/null
+++ b/scripts/readme-gen/templates/auth_api_key.tmpl.rst
@@ -0,0 +1,14 @@
+Authentication
+++++++++++++++
+
+Authentication for this service is done via an `API Key`_. To obtain an API
+Key:
+
+1. Open the `Cloud Platform Console`_
+2. Make sure that billing is enabled for your project.
+3. From the **Credentials** page, create a new **API Key** or use an existing
+ one for your project.
+
+.. _API Key:
+ https://developers.google.com/api-client-library/python/guide/aaa_apikeys
+.. _Cloud Console: https://console.cloud.google.com/project?_
diff --git a/scripts/readme-gen/templates/install_deps.tmpl.rst b/scripts/readme-gen/templates/install_deps.tmpl.rst
new file mode 100644
index 00000000..a0406dba
--- /dev/null
+++ b/scripts/readme-gen/templates/install_deps.tmpl.rst
@@ -0,0 +1,29 @@
+Install Dependencies
+++++++++++++++++++++
+
+#. Clone python-docs-samples and change directory to the sample directory you want to use.
+
+ .. code-block:: bash
+
+ $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git
+
+#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions.
+
+ .. _Python Development Environment Setup Guide:
+ https://cloud.google.com/python/setup
+
+#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+.
+
+ .. code-block:: bash
+
+ $ virtualenv env
+ $ source env/bin/activate
+
+#. Install the dependencies needed to run the samples.
+
+ .. code-block:: bash
+
+ $ pip install -r requirements.txt
+
+.. _pip: https://pip.pypa.io/
+.. _virtualenv: https://virtualenv.pypa.io/
diff --git a/scripts/readme-gen/templates/install_portaudio.tmpl.rst b/scripts/readme-gen/templates/install_portaudio.tmpl.rst
new file mode 100644
index 00000000..5ea33d18
--- /dev/null
+++ b/scripts/readme-gen/templates/install_portaudio.tmpl.rst
@@ -0,0 +1,35 @@
+Install PortAudio
++++++++++++++++++
+
+Install `PortAudio`_. This is required by the `PyAudio`_ library to stream
+audio from your computer's microphone. PyAudio depends on PortAudio for cross-platform compatibility, and is installed differently depending on the
+platform.
+
+* For Mac OS X, you can use `Homebrew`_::
+
+ brew install portaudio
+
+ **Note**: if you encounter an error when running `pip install` that indicates
+ it can't find `portaudio.h`, try running `pip install` with the following
+ flags::
+
+ pip install --global-option='build_ext' \
+ --global-option='-I/usr/local/include' \
+ --global-option='-L/usr/local/lib' \
+ pyaudio
+
+* For Debian / Ubuntu Linux::
+
+ apt-get install portaudio19-dev python-all-dev
+
+* Windows may work without having to install PortAudio explicitly (it will get
+ installed with PyAudio).
+
+For more details, see the `PyAudio installation`_ page.
+
+
+.. _PyAudio: https://people.csail.mit.edu/hubert/pyaudio/
+.. _PortAudio: http://www.portaudio.com/
+.. _PyAudio installation:
+ https://people.csail.mit.edu/hubert/pyaudio/#downloads
+.. _Homebrew: http://brew.sh
diff --git a/setup.cfg b/setup.cfg
index 3bd55550..c3a2b39f 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -1,3 +1,19 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
# Generated by synthtool. DO NOT EDIT!
[bdist_wheel]
universal = 1
diff --git a/setup.py b/setup.py
index 46d5a606..ffa79be0 100644
--- a/setup.py
+++ b/setup.py
@@ -19,7 +19,7 @@
import os
import setuptools # type: ignore
-version = "0.1.0"
+version = "0.2.0"
package_root = os.path.abspath(os.path.dirname(__file__))
@@ -40,14 +40,10 @@
platforms="Posix; MacOS X; Windows",
include_package_data=True,
install_requires=(
- "google-api-core >= 1.8.0, < 2.0.0dev",
- "googleapis-common-protos >= 1.5.8",
- "grpcio >= 1.10.0",
- "proto-plus >= 0.4.0",
+ "google-api-core[grpc] >= 1.22.2, < 2.0.0dev",
+ "proto-plus >= 1.15.0",
),
python_requires=">=3.6",
- setup_requires=["libcst >= 0.2.5"],
- scripts=["scripts/fixup_keywords.py"],
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
@@ -55,6 +51,7 @@
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
+ "Programming Language :: Python :: 3.9",
"Topic :: Internet",
"Topic :: Software Development :: Libraries :: Python Modules",
],
diff --git a/synth.metadata b/synth.metadata
index 10ec37b0..83c6880b 100644
--- a/synth.metadata
+++ b/synth.metadata
@@ -1,25 +1,32 @@
{
- "updateTime": "2020-03-09T20:18:03.054595Z",
"sources": [
{
"git": {
"name": ".",
- "remote": "sso://devrel/cloud/libraries/python/python-recommendations-ai"
+ "remote": "git@github.com:googleapis/python-recommendations-ai",
+ "sha": "6c6581870702de9834b39ef63b66ee3b1acf13c7"
}
},
{
"git": {
"name": "googleapis",
"remote": "https://github.com/googleapis/googleapis.git",
- "sha": "8f631c4c70a60a9c7da3749511ee4ad432b62898",
- "internalRef": "299885195"
+ "sha": "c0507a2bf28ea9f9af82559f2c876d2af598d6d9",
+ "internalRef": "365593953"
}
},
{
- "template": {
- "name": "python_library",
- "origin": "synthtool.gcp",
- "version": "2020.2.4"
+ "git": {
+ "name": "synthtool",
+ "remote": "https://github.com/googleapis/synthtool.git",
+ "sha": "60fc32cc950c20cfdd774b846a4a7dc5d1e4d0ef"
+ }
+ },
+ {
+ "git": {
+ "name": "synthtool",
+ "remote": "https://github.com/googleapis/synthtool.git",
+ "sha": "60fc32cc950c20cfdd774b846a4a7dc5d1e4d0ef"
}
}
],
@@ -30,7 +37,7 @@
"apiName": "recommendationengine",
"apiVersion": "v1beta1",
"language": "python",
- "generator": "gapic-generator-python"
+ "generator": "bazel"
}
}
]
diff --git a/synth.py b/synth.py
index 9abe02b2..d1a21752 100644
--- a/synth.py
+++ b/synth.py
@@ -14,49 +14,94 @@
"""This script is used to synthesize generated parts of this library."""
import os
+import re
import synthtool as s
import synthtool.gcp as gcp
from synthtool.languages import python
-gapic = gcp.GAPICMicrogenerator()
+gapic = gcp.GAPICBazel()
common = gcp.CommonTemplates()
# ----------------------------------------------------------------------------
# Generate Recommendations AI GAPIC layer
# ----------------------------------------------------------------------------
library = gapic.py_library(
- "recommendationengine", "v1beta1"
+ service="recommendationengine",
+ version="v1beta1",
+ bazel_target="//google/cloud/recommendationengine/v1beta1:recommendationengine-v1beta1-py",
)
-s.move(library, excludes=["setup.py", "docs/index.rst"])
+s.move(library, excludes=["setup.py", "docs/index.rst", "README.rst"])
-# correct license headers
-python.fix_pb2_headers()
-python.fix_pb2_grpc_headers()
# rename library to recommendations ai, to be consistent with product branding
-s.replace(["google/**/*.py", "tests/**/*.py"], "google-cloud-recommendationengine", "google-cloud-recommendations-ai")
+s.replace(
+ ["google/**/*.py", "tests/**/*.py"],
+ "google-cloud-recommendationengine",
+ "google-cloud-recommendations-ai",
+)
# surround path with * with ``
-s.replace("google/**/*.py", '''"(projects/\*/.*)"\.''', "``\g<1>``" )
-s.replace("google/**/import_.py", "gs://bucket/directory/\*\.json", "``gs://bucket/directory/*.json``")
+s.replace("google/**/*.py", """"(projects/\*/.*)"\.""", "``\g<1>``")
+s.replace(
+ "google/**/*client.py",
+ '''"projects/\*/locations/global/catalogs/default_catalog/eventStores/default_event_store/predictionApiKeyRegistrations/\"''',
+ """``projects/*/locations/global/catalogs/default_catalog/eventStores/default_event_store/predictionApiKeyRegistrations/``"""
+)
+s.replace(
+ "google/**/import_.py",
+ "gs://bucket/directory/\*\.json",
+ "``gs://bucket/directory/*.json``",
+)
+
+
+# Delete broken path helper 'catalog_item_path_path'
+# https://github.com/googleapis/gapic-generator-python/issues/701
+s.replace(
+ "google/**/client.py",
+ """\s+@staticmethod
+\s+def catalog_item_path_path.*?
+\s+return m\.groupdict\(\) if m else \{\}
+""",
+ "",
+ flags=re.MULTILINE | re.DOTALL,
+)
+
+s.replace(
+ "google/**/async_client.py",
+ """parse_catalog_item_path_path =.*?\)""",
+ "",
+ flags=re.MULTILINE | re.DOTALL,
+)
+s.replace(
+ "google/**/async_client.py",
+ """catalog_item_path_path =.*?\)""",
+ "",
+ flags=re.MULTILINE | re.DOTALL,
+)
+
+# Delete unit tests for 'catalog_item_path_path'
+s.replace(
+ "tests/**/test_catalog_service.py",
+ """def test_catalog_item_path_path.*?assert expected == actual""",
+ "",
+ flags=re.MULTILINE | re.DOTALL,
+)
+
+s.replace(
+ "tests/**/test_catalog_service.py",
+ """def test_parse_catalog_item_path_path.*?assert expected == actual""",
+ "",
+ flags=re.MULTILINE | re.DOTALL,
+)
# ----------------------------------------------------------------------------
# Add templated files
# ----------------------------------------------------------------------------
-templated_files = common.py_library(cov_level=100)
-s.move(templated_files, excludes=[".coveragerc"]) # the microgenerator has a good coveragerc file
-s.replace(".gitignore", "bigquery/docs/generated", "htmlcov") # temporary hack to ignore htmlcov
-
-# Remove 2.7 and 3.5 tests from noxfile.py
-s.replace("noxfile.py", '''\["2\.7", ''', '[')
-s.replace("noxfile.py", '''"3.5", ''', '')
-
-# Expand flake errors permitted to accomodate the Microgenerator
-# TODO: remove extra error codes once issues below are resolved
-# F401: https://github.com/googleapis/gapic-generator-python/issues/324
-# F841: local variable 'client'/'response' is assigned to but never use
-s.replace(".flake8", "ignore = .*", "ignore = E203, E266, E501, W503, F401, F841")
+templated_files = common.py_library(cov_level=98, microgenerator=True)
+s.move(
+ templated_files, excludes=[".coveragerc"]
+) # the microgenerator has a good coveragerc file
-s.shell.run(["nox", "-s", "blacken"], hide_output=False)
\ No newline at end of file
+s.shell.run(["nox", "-s", "blacken"], hide_output=False)
diff --git a/testing/.gitignore b/testing/.gitignore
new file mode 100644
index 00000000..b05fbd63
--- /dev/null
+++ b/testing/.gitignore
@@ -0,0 +1,3 @@
+test-env.sh
+service-account.json
+client-secrets.json
\ No newline at end of file
diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt
new file mode 100644
index 00000000..a9297012
--- /dev/null
+++ b/testing/constraints-3.6.txt
@@ -0,0 +1,9 @@
+# This constraints file is used to check that lower bounds
+# are correct in setup.py
+# List all library dependencies and extras in this file.
+# Pin the version to the lower bound.
+
+# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev",
+# Then this file should have google-cloud-foo==1.14.0
+google-api-core==1.22.2
+proto-plus==1.15.0
diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt
new file mode 100644
index 00000000..da93009b
--- /dev/null
+++ b/testing/constraints-3.7.txt
@@ -0,0 +1,2 @@
+# This constraints file is left inentionally empty
+# so the latest version of dependencies is installed
\ No newline at end of file
diff --git a/testing/constraints-3.8.txt b/testing/constraints-3.8.txt
new file mode 100644
index 00000000..da93009b
--- /dev/null
+++ b/testing/constraints-3.8.txt
@@ -0,0 +1,2 @@
+# This constraints file is left inentionally empty
+# so the latest version of dependencies is installed
\ No newline at end of file
diff --git a/testing/constraints-3.9.txt b/testing/constraints-3.9.txt
new file mode 100644
index 00000000..da93009b
--- /dev/null
+++ b/testing/constraints-3.9.txt
@@ -0,0 +1,2 @@
+# This constraints file is left inentionally empty
+# so the latest version of dependencies is installed
\ No newline at end of file
diff --git a/tests/unit/gapic/recommendationengine_v1beta1/__init__.py b/tests/unit/gapic/recommendationengine_v1beta1/__init__.py
new file mode 100644
index 00000000..42ffdf2b
--- /dev/null
+++ b/tests/unit/gapic/recommendationengine_v1beta1/__init__.py
@@ -0,0 +1,16 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
diff --git a/tests/unit/gapic/recommendationengine_v1beta1/test_catalog_service.py b/tests/unit/gapic/recommendationengine_v1beta1/test_catalog_service.py
new file mode 100644
index 00000000..cfa8391b
--- /dev/null
+++ b/tests/unit/gapic/recommendationengine_v1beta1/test_catalog_service.py
@@ -0,0 +1,2650 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import os
+import mock
+
+import grpc
+from grpc.experimental import aio
+import math
+import pytest
+from proto.marshal.rules.dates import DurationRule, TimestampRule
+
+from google import auth
+from google.api_core import client_options
+from google.api_core import exceptions
+from google.api_core import future
+from google.api_core import gapic_v1
+from google.api_core import grpc_helpers
+from google.api_core import grpc_helpers_async
+from google.api_core import operation_async # type: ignore
+from google.api_core import operations_v1
+from google.auth import credentials
+from google.auth.exceptions import MutualTLSChannelError
+from google.cloud.recommendationengine_v1beta1.services.catalog_service import (
+ CatalogServiceAsyncClient,
+)
+from google.cloud.recommendationengine_v1beta1.services.catalog_service import (
+ CatalogServiceClient,
+)
+from google.cloud.recommendationengine_v1beta1.services.catalog_service import pagers
+from google.cloud.recommendationengine_v1beta1.services.catalog_service import (
+ transports,
+)
+from google.cloud.recommendationengine_v1beta1.types import catalog
+from google.cloud.recommendationengine_v1beta1.types import catalog_service
+from google.cloud.recommendationengine_v1beta1.types import common
+from google.cloud.recommendationengine_v1beta1.types import import_
+from google.cloud.recommendationengine_v1beta1.types import user_event
+from google.longrunning import operations_pb2
+from google.oauth2 import service_account
+from google.protobuf import field_mask_pb2 as field_mask # type: ignore
+from google.protobuf import timestamp_pb2 as timestamp # type: ignore
+
+
+def client_cert_source_callback():
+ return b"cert bytes", b"key bytes"
+
+
+# If default endpoint is localhost, then default mtls endpoint will be the same.
+# This method modifies the default endpoint so the client can produce a different
+# mtls endpoint for endpoint testing purposes.
+def modify_default_endpoint(client):
+ return (
+ "foo.googleapis.com"
+ if ("localhost" in client.DEFAULT_ENDPOINT)
+ else client.DEFAULT_ENDPOINT
+ )
+
+
+def test__get_default_mtls_endpoint():
+ api_endpoint = "example.googleapis.com"
+ api_mtls_endpoint = "example.mtls.googleapis.com"
+ sandbox_endpoint = "example.sandbox.googleapis.com"
+ sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
+ non_googleapi = "api.example.com"
+
+ assert CatalogServiceClient._get_default_mtls_endpoint(None) is None
+ assert (
+ CatalogServiceClient._get_default_mtls_endpoint(api_endpoint)
+ == api_mtls_endpoint
+ )
+ assert (
+ CatalogServiceClient._get_default_mtls_endpoint(api_mtls_endpoint)
+ == api_mtls_endpoint
+ )
+ assert (
+ CatalogServiceClient._get_default_mtls_endpoint(sandbox_endpoint)
+ == sandbox_mtls_endpoint
+ )
+ assert (
+ CatalogServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint)
+ == sandbox_mtls_endpoint
+ )
+ assert (
+ CatalogServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class", [CatalogServiceClient, CatalogServiceAsyncClient,]
+)
+def test_catalog_service_client_from_service_account_info(client_class):
+ creds = credentials.AnonymousCredentials()
+ with mock.patch.object(
+ service_account.Credentials, "from_service_account_info"
+ ) as factory:
+ factory.return_value = creds
+ info = {"valid": True}
+ client = client_class.from_service_account_info(info)
+ assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
+
+ assert client.transport._host == "recommendationengine.googleapis.com:443"
+
+
+@pytest.mark.parametrize(
+ "client_class", [CatalogServiceClient, CatalogServiceAsyncClient,]
+)
+def test_catalog_service_client_from_service_account_file(client_class):
+ creds = credentials.AnonymousCredentials()
+ with mock.patch.object(
+ service_account.Credentials, "from_service_account_file"
+ ) as factory:
+ factory.return_value = creds
+ client = client_class.from_service_account_file("dummy/file/path.json")
+ assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
+
+ client = client_class.from_service_account_json("dummy/file/path.json")
+ assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
+
+ assert client.transport._host == "recommendationengine.googleapis.com:443"
+
+
+def test_catalog_service_client_get_transport_class():
+ transport = CatalogServiceClient.get_transport_class()
+ available_transports = [
+ transports.CatalogServiceGrpcTransport,
+ ]
+ assert transport in available_transports
+
+ transport = CatalogServiceClient.get_transport_class("grpc")
+ assert transport == transports.CatalogServiceGrpcTransport
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name",
+ [
+ (CatalogServiceClient, transports.CatalogServiceGrpcTransport, "grpc"),
+ (
+ CatalogServiceAsyncClient,
+ transports.CatalogServiceGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ ),
+ ],
+)
+@mock.patch.object(
+ CatalogServiceClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(CatalogServiceClient),
+)
+@mock.patch.object(
+ CatalogServiceAsyncClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(CatalogServiceAsyncClient),
+)
+def test_catalog_service_client_client_options(
+ client_class, transport_class, transport_name
+):
+ # Check that if channel is provided we won't create a new one.
+ with mock.patch.object(CatalogServiceClient, "get_transport_class") as gtc:
+ transport = transport_class(credentials=credentials.AnonymousCredentials())
+ client = client_class(transport=transport)
+ gtc.assert_not_called()
+
+ # Check that if channel is provided via str we will create a new one.
+ with mock.patch.object(CatalogServiceClient, "get_transport_class") as gtc:
+ client = client_class(transport=transport_name)
+ gtc.assert_called()
+
+ # Check the case api_endpoint is provided.
+ options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host="squid.clam.whelk",
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
+ # "never".
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
+ # "always".
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_MTLS_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
+ # unsupported value.
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
+ with pytest.raises(MutualTLSChannelError):
+ client = client_class()
+
+ # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
+ ):
+ with pytest.raises(ValueError):
+ client = client_class()
+
+ # Check the case quota_project_id is provided
+ options = client_options.ClientOptions(quota_project_id="octopus")
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id="octopus",
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name,use_client_cert_env",
+ [
+ (CatalogServiceClient, transports.CatalogServiceGrpcTransport, "grpc", "true"),
+ (
+ CatalogServiceAsyncClient,
+ transports.CatalogServiceGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ "true",
+ ),
+ (CatalogServiceClient, transports.CatalogServiceGrpcTransport, "grpc", "false"),
+ (
+ CatalogServiceAsyncClient,
+ transports.CatalogServiceGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ "false",
+ ),
+ ],
+)
+@mock.patch.object(
+ CatalogServiceClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(CatalogServiceClient),
+)
+@mock.patch.object(
+ CatalogServiceAsyncClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(CatalogServiceAsyncClient),
+)
+@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
+def test_catalog_service_client_mtls_env_auto(
+ client_class, transport_class, transport_name, use_client_cert_env
+):
+ # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
+ # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
+
+ # Check the case client_cert_source is provided. Whether client cert is used depends on
+ # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ options = client_options.ClientOptions(
+ client_cert_source=client_cert_source_callback
+ )
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+
+ if use_client_cert_env == "false":
+ expected_client_cert_source = None
+ expected_host = client.DEFAULT_ENDPOINT
+ else:
+ expected_client_cert_source = client_cert_source_callback
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
+
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=expected_host,
+ scopes=None,
+ client_cert_source_for_mtls=expected_client_cert_source,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case ADC client cert is provided. Whether client cert is used depends on
+ # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ with mock.patch(
+ "google.auth.transport.mtls.has_default_client_cert_source",
+ return_value=True,
+ ):
+ with mock.patch(
+ "google.auth.transport.mtls.default_client_cert_source",
+ return_value=client_cert_source_callback,
+ ):
+ if use_client_cert_env == "false":
+ expected_host = client.DEFAULT_ENDPOINT
+ expected_client_cert_source = None
+ else:
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
+ expected_client_cert_source = client_cert_source_callback
+
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=expected_host,
+ scopes=None,
+ client_cert_source_for_mtls=expected_client_cert_source,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case client_cert_source and ADC client cert are not provided.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ with mock.patch(
+ "google.auth.transport.mtls.has_default_client_cert_source",
+ return_value=False,
+ ):
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name",
+ [
+ (CatalogServiceClient, transports.CatalogServiceGrpcTransport, "grpc"),
+ (
+ CatalogServiceAsyncClient,
+ transports.CatalogServiceGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ ),
+ ],
+)
+def test_catalog_service_client_client_options_scopes(
+ client_class, transport_class, transport_name
+):
+ # Check the case scopes are provided.
+ options = client_options.ClientOptions(scopes=["1", "2"],)
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=["1", "2"],
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name",
+ [
+ (CatalogServiceClient, transports.CatalogServiceGrpcTransport, "grpc"),
+ (
+ CatalogServiceAsyncClient,
+ transports.CatalogServiceGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ ),
+ ],
+)
+def test_catalog_service_client_client_options_credentials_file(
+ client_class, transport_class, transport_name
+):
+ # Check the case credentials file is provided.
+ options = client_options.ClientOptions(credentials_file="credentials.json")
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file="credentials.json",
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+def test_catalog_service_client_client_options_from_dict():
+ with mock.patch(
+ "google.cloud.recommendationengine_v1beta1.services.catalog_service.transports.CatalogServiceGrpcTransport.__init__"
+ ) as grpc_transport:
+ grpc_transport.return_value = None
+ client = CatalogServiceClient(
+ client_options={"api_endpoint": "squid.clam.whelk"}
+ )
+ grpc_transport.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host="squid.clam.whelk",
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+def test_create_catalog_item(
+ transport: str = "grpc", request_type=catalog_service.CreateCatalogItemRequest
+):
+ client = CatalogServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.create_catalog_item), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = catalog.CatalogItem(
+ id="id_value",
+ title="title_value",
+ description="description_value",
+ language_code="language_code_value",
+ tags=["tags_value"],
+ item_group_id="item_group_id_value",
+ product_metadata=catalog.ProductCatalogItem(
+ exact_price=catalog.ProductCatalogItem.ExactPrice(display_price=0.1384)
+ ),
+ )
+
+ response = client.create_catalog_item(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == catalog_service.CreateCatalogItemRequest()
+
+ # Establish that the response is the type that we expect.
+
+ assert isinstance(response, catalog.CatalogItem)
+
+ assert response.id == "id_value"
+
+ assert response.title == "title_value"
+
+ assert response.description == "description_value"
+
+ assert response.language_code == "language_code_value"
+
+ assert response.tags == ["tags_value"]
+
+ assert response.item_group_id == "item_group_id_value"
+
+
+def test_create_catalog_item_from_dict():
+ test_create_catalog_item(request_type=dict)
+
+
+def test_create_catalog_item_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = CatalogServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.create_catalog_item), "__call__"
+ ) as call:
+ client.create_catalog_item()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == catalog_service.CreateCatalogItemRequest()
+
+
+@pytest.mark.asyncio
+async def test_create_catalog_item_async(
+ transport: str = "grpc_asyncio",
+ request_type=catalog_service.CreateCatalogItemRequest,
+):
+ client = CatalogServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.create_catalog_item), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ catalog.CatalogItem(
+ id="id_value",
+ title="title_value",
+ description="description_value",
+ language_code="language_code_value",
+ tags=["tags_value"],
+ item_group_id="item_group_id_value",
+ )
+ )
+
+ response = await client.create_catalog_item(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == catalog_service.CreateCatalogItemRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, catalog.CatalogItem)
+
+ assert response.id == "id_value"
+
+ assert response.title == "title_value"
+
+ assert response.description == "description_value"
+
+ assert response.language_code == "language_code_value"
+
+ assert response.tags == ["tags_value"]
+
+ assert response.item_group_id == "item_group_id_value"
+
+
+@pytest.mark.asyncio
+async def test_create_catalog_item_async_from_dict():
+ await test_create_catalog_item_async(request_type=dict)
+
+
+def test_create_catalog_item_field_headers():
+ client = CatalogServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = catalog_service.CreateCatalogItemRequest()
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.create_catalog_item), "__call__"
+ ) as call:
+ call.return_value = catalog.CatalogItem()
+
+ client.create_catalog_item(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_create_catalog_item_field_headers_async():
+ client = CatalogServiceAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = catalog_service.CreateCatalogItemRequest()
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.create_catalog_item), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.CatalogItem())
+
+ await client.create_catalog_item(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+def test_create_catalog_item_flattened():
+ client = CatalogServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.create_catalog_item), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = catalog.CatalogItem()
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.create_catalog_item(
+ parent="parent_value", catalog_item=catalog.CatalogItem(id="id_value"),
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].parent == "parent_value"
+
+ assert args[0].catalog_item == catalog.CatalogItem(id="id_value")
+
+
+def test_create_catalog_item_flattened_error():
+ client = CatalogServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.create_catalog_item(
+ catalog_service.CreateCatalogItemRequest(),
+ parent="parent_value",
+ catalog_item=catalog.CatalogItem(id="id_value"),
+ )
+
+
+@pytest.mark.asyncio
+async def test_create_catalog_item_flattened_async():
+ client = CatalogServiceAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.create_catalog_item), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = catalog.CatalogItem()
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.CatalogItem())
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.create_catalog_item(
+ parent="parent_value", catalog_item=catalog.CatalogItem(id="id_value"),
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].parent == "parent_value"
+
+ assert args[0].catalog_item == catalog.CatalogItem(id="id_value")
+
+
+@pytest.mark.asyncio
+async def test_create_catalog_item_flattened_error_async():
+ client = CatalogServiceAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.create_catalog_item(
+ catalog_service.CreateCatalogItemRequest(),
+ parent="parent_value",
+ catalog_item=catalog.CatalogItem(id="id_value"),
+ )
+
+
+def test_get_catalog_item(
+ transport: str = "grpc", request_type=catalog_service.GetCatalogItemRequest
+):
+ client = CatalogServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.get_catalog_item), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = catalog.CatalogItem(
+ id="id_value",
+ title="title_value",
+ description="description_value",
+ language_code="language_code_value",
+ tags=["tags_value"],
+ item_group_id="item_group_id_value",
+ product_metadata=catalog.ProductCatalogItem(
+ exact_price=catalog.ProductCatalogItem.ExactPrice(display_price=0.1384)
+ ),
+ )
+
+ response = client.get_catalog_item(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == catalog_service.GetCatalogItemRequest()
+
+ # Establish that the response is the type that we expect.
+
+ assert isinstance(response, catalog.CatalogItem)
+
+ assert response.id == "id_value"
+
+ assert response.title == "title_value"
+
+ assert response.description == "description_value"
+
+ assert response.language_code == "language_code_value"
+
+ assert response.tags == ["tags_value"]
+
+ assert response.item_group_id == "item_group_id_value"
+
+
+def test_get_catalog_item_from_dict():
+ test_get_catalog_item(request_type=dict)
+
+
+def test_get_catalog_item_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = CatalogServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.get_catalog_item), "__call__") as call:
+ client.get_catalog_item()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == catalog_service.GetCatalogItemRequest()
+
+
+@pytest.mark.asyncio
+async def test_get_catalog_item_async(
+ transport: str = "grpc_asyncio", request_type=catalog_service.GetCatalogItemRequest
+):
+ client = CatalogServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.get_catalog_item), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ catalog.CatalogItem(
+ id="id_value",
+ title="title_value",
+ description="description_value",
+ language_code="language_code_value",
+ tags=["tags_value"],
+ item_group_id="item_group_id_value",
+ )
+ )
+
+ response = await client.get_catalog_item(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == catalog_service.GetCatalogItemRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, catalog.CatalogItem)
+
+ assert response.id == "id_value"
+
+ assert response.title == "title_value"
+
+ assert response.description == "description_value"
+
+ assert response.language_code == "language_code_value"
+
+ assert response.tags == ["tags_value"]
+
+ assert response.item_group_id == "item_group_id_value"
+
+
+@pytest.mark.asyncio
+async def test_get_catalog_item_async_from_dict():
+ await test_get_catalog_item_async(request_type=dict)
+
+
+def test_get_catalog_item_field_headers():
+ client = CatalogServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = catalog_service.GetCatalogItemRequest()
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.get_catalog_item), "__call__") as call:
+ call.return_value = catalog.CatalogItem()
+
+ client.get_catalog_item(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_get_catalog_item_field_headers_async():
+ client = CatalogServiceAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = catalog_service.GetCatalogItemRequest()
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.get_catalog_item), "__call__") as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.CatalogItem())
+
+ await client.get_catalog_item(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+def test_get_catalog_item_flattened():
+ client = CatalogServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.get_catalog_item), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = catalog.CatalogItem()
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.get_catalog_item(name="name_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].name == "name_value"
+
+
+def test_get_catalog_item_flattened_error():
+ client = CatalogServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.get_catalog_item(
+ catalog_service.GetCatalogItemRequest(), name="name_value",
+ )
+
+
+@pytest.mark.asyncio
+async def test_get_catalog_item_flattened_async():
+ client = CatalogServiceAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.get_catalog_item), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = catalog.CatalogItem()
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.CatalogItem())
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.get_catalog_item(name="name_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].name == "name_value"
+
+
+@pytest.mark.asyncio
+async def test_get_catalog_item_flattened_error_async():
+ client = CatalogServiceAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.get_catalog_item(
+ catalog_service.GetCatalogItemRequest(), name="name_value",
+ )
+
+
+def test_list_catalog_items(
+ transport: str = "grpc", request_type=catalog_service.ListCatalogItemsRequest
+):
+ client = CatalogServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.list_catalog_items), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = catalog_service.ListCatalogItemsResponse(
+ next_page_token="next_page_token_value",
+ )
+
+ response = client.list_catalog_items(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == catalog_service.ListCatalogItemsRequest()
+
+ # Establish that the response is the type that we expect.
+
+ assert isinstance(response, pagers.ListCatalogItemsPager)
+
+ assert response.next_page_token == "next_page_token_value"
+
+
+def test_list_catalog_items_from_dict():
+ test_list_catalog_items(request_type=dict)
+
+
+def test_list_catalog_items_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = CatalogServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.list_catalog_items), "__call__"
+ ) as call:
+ client.list_catalog_items()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == catalog_service.ListCatalogItemsRequest()
+
+
+@pytest.mark.asyncio
+async def test_list_catalog_items_async(
+ transport: str = "grpc_asyncio",
+ request_type=catalog_service.ListCatalogItemsRequest,
+):
+ client = CatalogServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.list_catalog_items), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ catalog_service.ListCatalogItemsResponse(
+ next_page_token="next_page_token_value",
+ )
+ )
+
+ response = await client.list_catalog_items(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == catalog_service.ListCatalogItemsRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, pagers.ListCatalogItemsAsyncPager)
+
+ assert response.next_page_token == "next_page_token_value"
+
+
+@pytest.mark.asyncio
+async def test_list_catalog_items_async_from_dict():
+ await test_list_catalog_items_async(request_type=dict)
+
+
+def test_list_catalog_items_field_headers():
+ client = CatalogServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = catalog_service.ListCatalogItemsRequest()
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.list_catalog_items), "__call__"
+ ) as call:
+ call.return_value = catalog_service.ListCatalogItemsResponse()
+
+ client.list_catalog_items(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_list_catalog_items_field_headers_async():
+ client = CatalogServiceAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = catalog_service.ListCatalogItemsRequest()
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.list_catalog_items), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ catalog_service.ListCatalogItemsResponse()
+ )
+
+ await client.list_catalog_items(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+def test_list_catalog_items_flattened():
+ client = CatalogServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.list_catalog_items), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = catalog_service.ListCatalogItemsResponse()
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.list_catalog_items(
+ parent="parent_value", filter="filter_value",
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].parent == "parent_value"
+
+ assert args[0].filter == "filter_value"
+
+
+def test_list_catalog_items_flattened_error():
+ client = CatalogServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.list_catalog_items(
+ catalog_service.ListCatalogItemsRequest(),
+ parent="parent_value",
+ filter="filter_value",
+ )
+
+
+@pytest.mark.asyncio
+async def test_list_catalog_items_flattened_async():
+ client = CatalogServiceAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.list_catalog_items), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = catalog_service.ListCatalogItemsResponse()
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ catalog_service.ListCatalogItemsResponse()
+ )
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.list_catalog_items(
+ parent="parent_value", filter="filter_value",
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].parent == "parent_value"
+
+ assert args[0].filter == "filter_value"
+
+
+@pytest.mark.asyncio
+async def test_list_catalog_items_flattened_error_async():
+ client = CatalogServiceAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.list_catalog_items(
+ catalog_service.ListCatalogItemsRequest(),
+ parent="parent_value",
+ filter="filter_value",
+ )
+
+
+def test_list_catalog_items_pager():
+ client = CatalogServiceClient(credentials=credentials.AnonymousCredentials,)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.list_catalog_items), "__call__"
+ ) as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ catalog_service.ListCatalogItemsResponse(
+ catalog_items=[
+ catalog.CatalogItem(),
+ catalog.CatalogItem(),
+ catalog.CatalogItem(),
+ ],
+ next_page_token="abc",
+ ),
+ catalog_service.ListCatalogItemsResponse(
+ catalog_items=[], next_page_token="def",
+ ),
+ catalog_service.ListCatalogItemsResponse(
+ catalog_items=[catalog.CatalogItem(),], next_page_token="ghi",
+ ),
+ catalog_service.ListCatalogItemsResponse(
+ catalog_items=[catalog.CatalogItem(), catalog.CatalogItem(),],
+ ),
+ RuntimeError,
+ )
+
+ metadata = ()
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)),
+ )
+ pager = client.list_catalog_items(request={})
+
+ assert pager._metadata == metadata
+
+ results = [i for i in pager]
+ assert len(results) == 6
+ assert all(isinstance(i, catalog.CatalogItem) for i in results)
+
+
+def test_list_catalog_items_pages():
+ client = CatalogServiceClient(credentials=credentials.AnonymousCredentials,)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.list_catalog_items), "__call__"
+ ) as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ catalog_service.ListCatalogItemsResponse(
+ catalog_items=[
+ catalog.CatalogItem(),
+ catalog.CatalogItem(),
+ catalog.CatalogItem(),
+ ],
+ next_page_token="abc",
+ ),
+ catalog_service.ListCatalogItemsResponse(
+ catalog_items=[], next_page_token="def",
+ ),
+ catalog_service.ListCatalogItemsResponse(
+ catalog_items=[catalog.CatalogItem(),], next_page_token="ghi",
+ ),
+ catalog_service.ListCatalogItemsResponse(
+ catalog_items=[catalog.CatalogItem(), catalog.CatalogItem(),],
+ ),
+ RuntimeError,
+ )
+ pages = list(client.list_catalog_items(request={}).pages)
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
+
+
+@pytest.mark.asyncio
+async def test_list_catalog_items_async_pager():
+ client = CatalogServiceAsyncClient(credentials=credentials.AnonymousCredentials,)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.list_catalog_items),
+ "__call__",
+ new_callable=mock.AsyncMock,
+ ) as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ catalog_service.ListCatalogItemsResponse(
+ catalog_items=[
+ catalog.CatalogItem(),
+ catalog.CatalogItem(),
+ catalog.CatalogItem(),
+ ],
+ next_page_token="abc",
+ ),
+ catalog_service.ListCatalogItemsResponse(
+ catalog_items=[], next_page_token="def",
+ ),
+ catalog_service.ListCatalogItemsResponse(
+ catalog_items=[catalog.CatalogItem(),], next_page_token="ghi",
+ ),
+ catalog_service.ListCatalogItemsResponse(
+ catalog_items=[catalog.CatalogItem(), catalog.CatalogItem(),],
+ ),
+ RuntimeError,
+ )
+ async_pager = await client.list_catalog_items(request={},)
+ assert async_pager.next_page_token == "abc"
+ responses = []
+ async for response in async_pager:
+ responses.append(response)
+
+ assert len(responses) == 6
+ assert all(isinstance(i, catalog.CatalogItem) for i in responses)
+
+
+@pytest.mark.asyncio
+async def test_list_catalog_items_async_pages():
+ client = CatalogServiceAsyncClient(credentials=credentials.AnonymousCredentials,)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.list_catalog_items),
+ "__call__",
+ new_callable=mock.AsyncMock,
+ ) as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ catalog_service.ListCatalogItemsResponse(
+ catalog_items=[
+ catalog.CatalogItem(),
+ catalog.CatalogItem(),
+ catalog.CatalogItem(),
+ ],
+ next_page_token="abc",
+ ),
+ catalog_service.ListCatalogItemsResponse(
+ catalog_items=[], next_page_token="def",
+ ),
+ catalog_service.ListCatalogItemsResponse(
+ catalog_items=[catalog.CatalogItem(),], next_page_token="ghi",
+ ),
+ catalog_service.ListCatalogItemsResponse(
+ catalog_items=[catalog.CatalogItem(), catalog.CatalogItem(),],
+ ),
+ RuntimeError,
+ )
+ pages = []
+ async for page_ in (await client.list_catalog_items(request={})).pages:
+ pages.append(page_)
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
+
+
+def test_update_catalog_item(
+ transport: str = "grpc", request_type=catalog_service.UpdateCatalogItemRequest
+):
+ client = CatalogServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.update_catalog_item), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = catalog.CatalogItem(
+ id="id_value",
+ title="title_value",
+ description="description_value",
+ language_code="language_code_value",
+ tags=["tags_value"],
+ item_group_id="item_group_id_value",
+ product_metadata=catalog.ProductCatalogItem(
+ exact_price=catalog.ProductCatalogItem.ExactPrice(display_price=0.1384)
+ ),
+ )
+
+ response = client.update_catalog_item(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == catalog_service.UpdateCatalogItemRequest()
+
+ # Establish that the response is the type that we expect.
+
+ assert isinstance(response, catalog.CatalogItem)
+
+ assert response.id == "id_value"
+
+ assert response.title == "title_value"
+
+ assert response.description == "description_value"
+
+ assert response.language_code == "language_code_value"
+
+ assert response.tags == ["tags_value"]
+
+ assert response.item_group_id == "item_group_id_value"
+
+
+def test_update_catalog_item_from_dict():
+ test_update_catalog_item(request_type=dict)
+
+
+def test_update_catalog_item_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = CatalogServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.update_catalog_item), "__call__"
+ ) as call:
+ client.update_catalog_item()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == catalog_service.UpdateCatalogItemRequest()
+
+
+@pytest.mark.asyncio
+async def test_update_catalog_item_async(
+ transport: str = "grpc_asyncio",
+ request_type=catalog_service.UpdateCatalogItemRequest,
+):
+ client = CatalogServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.update_catalog_item), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ catalog.CatalogItem(
+ id="id_value",
+ title="title_value",
+ description="description_value",
+ language_code="language_code_value",
+ tags=["tags_value"],
+ item_group_id="item_group_id_value",
+ )
+ )
+
+ response = await client.update_catalog_item(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == catalog_service.UpdateCatalogItemRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, catalog.CatalogItem)
+
+ assert response.id == "id_value"
+
+ assert response.title == "title_value"
+
+ assert response.description == "description_value"
+
+ assert response.language_code == "language_code_value"
+
+ assert response.tags == ["tags_value"]
+
+ assert response.item_group_id == "item_group_id_value"
+
+
+@pytest.mark.asyncio
+async def test_update_catalog_item_async_from_dict():
+ await test_update_catalog_item_async(request_type=dict)
+
+
+def test_update_catalog_item_field_headers():
+ client = CatalogServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = catalog_service.UpdateCatalogItemRequest()
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.update_catalog_item), "__call__"
+ ) as call:
+ call.return_value = catalog.CatalogItem()
+
+ client.update_catalog_item(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_update_catalog_item_field_headers_async():
+ client = CatalogServiceAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = catalog_service.UpdateCatalogItemRequest()
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.update_catalog_item), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.CatalogItem())
+
+ await client.update_catalog_item(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+def test_update_catalog_item_flattened():
+ client = CatalogServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.update_catalog_item), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = catalog.CatalogItem()
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.update_catalog_item(
+ name="name_value",
+ catalog_item=catalog.CatalogItem(id="id_value"),
+ update_mask=field_mask.FieldMask(paths=["paths_value"]),
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].name == "name_value"
+
+ assert args[0].catalog_item == catalog.CatalogItem(id="id_value")
+
+ assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"])
+
+
+def test_update_catalog_item_flattened_error():
+ client = CatalogServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.update_catalog_item(
+ catalog_service.UpdateCatalogItemRequest(),
+ name="name_value",
+ catalog_item=catalog.CatalogItem(id="id_value"),
+ update_mask=field_mask.FieldMask(paths=["paths_value"]),
+ )
+
+
+@pytest.mark.asyncio
+async def test_update_catalog_item_flattened_async():
+ client = CatalogServiceAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.update_catalog_item), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = catalog.CatalogItem()
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.CatalogItem())
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.update_catalog_item(
+ name="name_value",
+ catalog_item=catalog.CatalogItem(id="id_value"),
+ update_mask=field_mask.FieldMask(paths=["paths_value"]),
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].name == "name_value"
+
+ assert args[0].catalog_item == catalog.CatalogItem(id="id_value")
+
+ assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"])
+
+
+@pytest.mark.asyncio
+async def test_update_catalog_item_flattened_error_async():
+ client = CatalogServiceAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.update_catalog_item(
+ catalog_service.UpdateCatalogItemRequest(),
+ name="name_value",
+ catalog_item=catalog.CatalogItem(id="id_value"),
+ update_mask=field_mask.FieldMask(paths=["paths_value"]),
+ )
+
+
+def test_delete_catalog_item(
+ transport: str = "grpc", request_type=catalog_service.DeleteCatalogItemRequest
+):
+ client = CatalogServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.delete_catalog_item), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = None
+
+ response = client.delete_catalog_item(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == catalog_service.DeleteCatalogItemRequest()
+
+ # Establish that the response is the type that we expect.
+ assert response is None
+
+
+def test_delete_catalog_item_from_dict():
+ test_delete_catalog_item(request_type=dict)
+
+
+def test_delete_catalog_item_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = CatalogServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.delete_catalog_item), "__call__"
+ ) as call:
+ client.delete_catalog_item()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == catalog_service.DeleteCatalogItemRequest()
+
+
+@pytest.mark.asyncio
+async def test_delete_catalog_item_async(
+ transport: str = "grpc_asyncio",
+ request_type=catalog_service.DeleteCatalogItemRequest,
+):
+ client = CatalogServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.delete_catalog_item), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+
+ response = await client.delete_catalog_item(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == catalog_service.DeleteCatalogItemRequest()
+
+ # Establish that the response is the type that we expect.
+ assert response is None
+
+
+@pytest.mark.asyncio
+async def test_delete_catalog_item_async_from_dict():
+ await test_delete_catalog_item_async(request_type=dict)
+
+
+def test_delete_catalog_item_field_headers():
+ client = CatalogServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = catalog_service.DeleteCatalogItemRequest()
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.delete_catalog_item), "__call__"
+ ) as call:
+ call.return_value = None
+
+ client.delete_catalog_item(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_delete_catalog_item_field_headers_async():
+ client = CatalogServiceAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = catalog_service.DeleteCatalogItemRequest()
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.delete_catalog_item), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+
+ await client.delete_catalog_item(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+def test_delete_catalog_item_flattened():
+ client = CatalogServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.delete_catalog_item), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = None
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.delete_catalog_item(name="name_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].name == "name_value"
+
+
+def test_delete_catalog_item_flattened_error():
+ client = CatalogServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.delete_catalog_item(
+ catalog_service.DeleteCatalogItemRequest(), name="name_value",
+ )
+
+
+@pytest.mark.asyncio
+async def test_delete_catalog_item_flattened_async():
+ client = CatalogServiceAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.delete_catalog_item), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = None
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.delete_catalog_item(name="name_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].name == "name_value"
+
+
+@pytest.mark.asyncio
+async def test_delete_catalog_item_flattened_error_async():
+ client = CatalogServiceAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.delete_catalog_item(
+ catalog_service.DeleteCatalogItemRequest(), name="name_value",
+ )
+
+
+def test_import_catalog_items(
+ transport: str = "grpc", request_type=import_.ImportCatalogItemsRequest
+):
+ client = CatalogServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.import_catalog_items), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/spam")
+
+ response = client.import_catalog_items(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == import_.ImportCatalogItemsRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, future.Future)
+
+
+def test_import_catalog_items_from_dict():
+ test_import_catalog_items(request_type=dict)
+
+
+def test_import_catalog_items_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = CatalogServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.import_catalog_items), "__call__"
+ ) as call:
+ client.import_catalog_items()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == import_.ImportCatalogItemsRequest()
+
+
+@pytest.mark.asyncio
+async def test_import_catalog_items_async(
+ transport: str = "grpc_asyncio", request_type=import_.ImportCatalogItemsRequest
+):
+ client = CatalogServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.import_catalog_items), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/spam")
+ )
+
+ response = await client.import_catalog_items(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == import_.ImportCatalogItemsRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, future.Future)
+
+
+@pytest.mark.asyncio
+async def test_import_catalog_items_async_from_dict():
+ await test_import_catalog_items_async(request_type=dict)
+
+
+def test_import_catalog_items_field_headers():
+ client = CatalogServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = import_.ImportCatalogItemsRequest()
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.import_catalog_items), "__call__"
+ ) as call:
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ client.import_catalog_items(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_import_catalog_items_field_headers_async():
+ client = CatalogServiceAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = import_.ImportCatalogItemsRequest()
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.import_catalog_items), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/op")
+ )
+
+ await client.import_catalog_items(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+def test_import_catalog_items_flattened():
+ client = CatalogServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.import_catalog_items), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.import_catalog_items(
+ parent="parent_value",
+ request_id="request_id_value",
+ input_config=import_.InputConfig(
+ catalog_inline_source=import_.CatalogInlineSource(
+ catalog_items=[catalog.CatalogItem(id="id_value")]
+ )
+ ),
+ errors_config=import_.ImportErrorsConfig(gcs_prefix="gcs_prefix_value"),
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].parent == "parent_value"
+
+ assert args[0].request_id == "request_id_value"
+
+ assert args[0].input_config == import_.InputConfig(
+ catalog_inline_source=import_.CatalogInlineSource(
+ catalog_items=[catalog.CatalogItem(id="id_value")]
+ )
+ )
+
+ assert args[0].errors_config == import_.ImportErrorsConfig(
+ gcs_prefix="gcs_prefix_value"
+ )
+
+
+def test_import_catalog_items_flattened_error():
+ client = CatalogServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.import_catalog_items(
+ import_.ImportCatalogItemsRequest(),
+ parent="parent_value",
+ request_id="request_id_value",
+ input_config=import_.InputConfig(
+ catalog_inline_source=import_.CatalogInlineSource(
+ catalog_items=[catalog.CatalogItem(id="id_value")]
+ )
+ ),
+ errors_config=import_.ImportErrorsConfig(gcs_prefix="gcs_prefix_value"),
+ )
+
+
+@pytest.mark.asyncio
+async def test_import_catalog_items_flattened_async():
+ client = CatalogServiceAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.import_catalog_items), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/spam")
+ )
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.import_catalog_items(
+ parent="parent_value",
+ request_id="request_id_value",
+ input_config=import_.InputConfig(
+ catalog_inline_source=import_.CatalogInlineSource(
+ catalog_items=[catalog.CatalogItem(id="id_value")]
+ )
+ ),
+ errors_config=import_.ImportErrorsConfig(gcs_prefix="gcs_prefix_value"),
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].parent == "parent_value"
+
+ assert args[0].request_id == "request_id_value"
+
+ assert args[0].input_config == import_.InputConfig(
+ catalog_inline_source=import_.CatalogInlineSource(
+ catalog_items=[catalog.CatalogItem(id="id_value")]
+ )
+ )
+
+ assert args[0].errors_config == import_.ImportErrorsConfig(
+ gcs_prefix="gcs_prefix_value"
+ )
+
+
+@pytest.mark.asyncio
+async def test_import_catalog_items_flattened_error_async():
+ client = CatalogServiceAsyncClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.import_catalog_items(
+ import_.ImportCatalogItemsRequest(),
+ parent="parent_value",
+ request_id="request_id_value",
+ input_config=import_.InputConfig(
+ catalog_inline_source=import_.CatalogInlineSource(
+ catalog_items=[catalog.CatalogItem(id="id_value")]
+ )
+ ),
+ errors_config=import_.ImportErrorsConfig(gcs_prefix="gcs_prefix_value"),
+ )
+
+
+def test_credentials_transport_error():
+ # It is an error to provide credentials and a transport instance.
+ transport = transports.CatalogServiceGrpcTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ with pytest.raises(ValueError):
+ client = CatalogServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # It is an error to provide a credentials file and a transport instance.
+ transport = transports.CatalogServiceGrpcTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ with pytest.raises(ValueError):
+ client = CatalogServiceClient(
+ client_options={"credentials_file": "credentials.json"},
+ transport=transport,
+ )
+
+ # It is an error to provide scopes and a transport instance.
+ transport = transports.CatalogServiceGrpcTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ with pytest.raises(ValueError):
+ client = CatalogServiceClient(
+ client_options={"scopes": ["1", "2"]}, transport=transport,
+ )
+
+
+def test_transport_instance():
+ # A client may be instantiated with a custom transport instance.
+ transport = transports.CatalogServiceGrpcTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ client = CatalogServiceClient(transport=transport)
+ assert client.transport is transport
+
+
+def test_transport_get_channel():
+ # A client may be instantiated with a custom transport instance.
+ transport = transports.CatalogServiceGrpcTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ channel = transport.grpc_channel
+ assert channel
+
+ transport = transports.CatalogServiceGrpcAsyncIOTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ channel = transport.grpc_channel
+ assert channel
+
+
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.CatalogServiceGrpcTransport,
+ transports.CatalogServiceGrpcAsyncIOTransport,
+ ],
+)
+def test_transport_adc(transport_class):
+ # Test default credentials are used if not provided.
+ with mock.patch.object(auth, "default") as adc:
+ adc.return_value = (credentials.AnonymousCredentials(), None)
+ transport_class()
+ adc.assert_called_once()
+
+
+def test_transport_grpc_default():
+ # A client should use the gRPC transport by default.
+ client = CatalogServiceClient(credentials=credentials.AnonymousCredentials(),)
+ assert isinstance(client.transport, transports.CatalogServiceGrpcTransport,)
+
+
+def test_catalog_service_base_transport_error():
+ # Passing both a credentials object and credentials_file should raise an error
+ with pytest.raises(exceptions.DuplicateCredentialArgs):
+ transport = transports.CatalogServiceTransport(
+ credentials=credentials.AnonymousCredentials(),
+ credentials_file="credentials.json",
+ )
+
+
+def test_catalog_service_base_transport():
+ # Instantiate the base transport.
+ with mock.patch(
+ "google.cloud.recommendationengine_v1beta1.services.catalog_service.transports.CatalogServiceTransport.__init__"
+ ) as Transport:
+ Transport.return_value = None
+ transport = transports.CatalogServiceTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Every method on the transport should just blindly
+ # raise NotImplementedError.
+ methods = (
+ "create_catalog_item",
+ "get_catalog_item",
+ "list_catalog_items",
+ "update_catalog_item",
+ "delete_catalog_item",
+ "import_catalog_items",
+ )
+ for method in methods:
+ with pytest.raises(NotImplementedError):
+ getattr(transport, method)(request=object())
+
+ # Additionally, the LRO client (a property) should
+ # also raise NotImplementedError
+ with pytest.raises(NotImplementedError):
+ transport.operations_client
+
+
+def test_catalog_service_base_transport_with_credentials_file():
+ # Instantiate the base transport with a credentials file
+ with mock.patch.object(
+ auth, "load_credentials_from_file"
+ ) as load_creds, mock.patch(
+ "google.cloud.recommendationengine_v1beta1.services.catalog_service.transports.CatalogServiceTransport._prep_wrapped_messages"
+ ) as Transport:
+ Transport.return_value = None
+ load_creds.return_value = (credentials.AnonymousCredentials(), None)
+ transport = transports.CatalogServiceTransport(
+ credentials_file="credentials.json", quota_project_id="octopus",
+ )
+ load_creds.assert_called_once_with(
+ "credentials.json",
+ scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ quota_project_id="octopus",
+ )
+
+
+def test_catalog_service_base_transport_with_adc():
+ # Test the default credentials are used if credentials and credentials_file are None.
+ with mock.patch.object(auth, "default") as adc, mock.patch(
+ "google.cloud.recommendationengine_v1beta1.services.catalog_service.transports.CatalogServiceTransport._prep_wrapped_messages"
+ ) as Transport:
+ Transport.return_value = None
+ adc.return_value = (credentials.AnonymousCredentials(), None)
+ transport = transports.CatalogServiceTransport()
+ adc.assert_called_once()
+
+
+def test_catalog_service_auth_adc():
+ # If no credentials are provided, we should use ADC credentials.
+ with mock.patch.object(auth, "default") as adc:
+ adc.return_value = (credentials.AnonymousCredentials(), None)
+ CatalogServiceClient()
+ adc.assert_called_once_with(
+ scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ quota_project_id=None,
+ )
+
+
+def test_catalog_service_transport_auth_adc():
+ # If credentials and host are not provided, the transport class should use
+ # ADC credentials.
+ with mock.patch.object(auth, "default") as adc:
+ adc.return_value = (credentials.AnonymousCredentials(), None)
+ transports.CatalogServiceGrpcTransport(
+ host="squid.clam.whelk", quota_project_id="octopus"
+ )
+ adc.assert_called_once_with(
+ scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ quota_project_id="octopus",
+ )
+
+
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.CatalogServiceGrpcTransport,
+ transports.CatalogServiceGrpcAsyncIOTransport,
+ ],
+)
+def test_catalog_service_grpc_transport_client_cert_source_for_mtls(transport_class):
+ cred = credentials.AnonymousCredentials()
+
+ # Check ssl_channel_credentials is used if provided.
+ with mock.patch.object(transport_class, "create_channel") as mock_create_channel:
+ mock_ssl_channel_creds = mock.Mock()
+ transport_class(
+ host="squid.clam.whelk",
+ credentials=cred,
+ ssl_channel_credentials=mock_ssl_channel_creds,
+ )
+ mock_create_channel.assert_called_once_with(
+ "squid.clam.whelk:443",
+ credentials=cred,
+ credentials_file=None,
+ scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ ssl_credentials=mock_ssl_channel_creds,
+ quota_project_id=None,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+
+ # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls
+ # is used.
+ with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()):
+ with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred:
+ transport_class(
+ credentials=cred,
+ client_cert_source_for_mtls=client_cert_source_callback,
+ )
+ expected_cert, expected_key = client_cert_source_callback()
+ mock_ssl_cred.assert_called_once_with(
+ certificate_chain=expected_cert, private_key=expected_key
+ )
+
+
+def test_catalog_service_host_no_port():
+ client = CatalogServiceClient(
+ credentials=credentials.AnonymousCredentials(),
+ client_options=client_options.ClientOptions(
+ api_endpoint="recommendationengine.googleapis.com"
+ ),
+ )
+ assert client.transport._host == "recommendationengine.googleapis.com:443"
+
+
+def test_catalog_service_host_with_port():
+ client = CatalogServiceClient(
+ credentials=credentials.AnonymousCredentials(),
+ client_options=client_options.ClientOptions(
+ api_endpoint="recommendationengine.googleapis.com:8000"
+ ),
+ )
+ assert client.transport._host == "recommendationengine.googleapis.com:8000"
+
+
+def test_catalog_service_grpc_transport_channel():
+ channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials())
+
+ # Check that channel is used if provided.
+ transport = transports.CatalogServiceGrpcTransport(
+ host="squid.clam.whelk", channel=channel,
+ )
+ assert transport.grpc_channel == channel
+ assert transport._host == "squid.clam.whelk:443"
+ assert transport._ssl_channel_credentials == None
+
+
+def test_catalog_service_grpc_asyncio_transport_channel():
+ channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials())
+
+ # Check that channel is used if provided.
+ transport = transports.CatalogServiceGrpcAsyncIOTransport(
+ host="squid.clam.whelk", channel=channel,
+ )
+ assert transport.grpc_channel == channel
+ assert transport._host == "squid.clam.whelk:443"
+ assert transport._ssl_channel_credentials == None
+
+
+# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
+# removed from grpc/grpc_asyncio transport constructor.
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.CatalogServiceGrpcTransport,
+ transports.CatalogServiceGrpcAsyncIOTransport,
+ ],
+)
+def test_catalog_service_transport_channel_mtls_with_client_cert_source(
+ transport_class,
+):
+ with mock.patch(
+ "grpc.ssl_channel_credentials", autospec=True
+ ) as grpc_ssl_channel_cred:
+ with mock.patch.object(
+ transport_class, "create_channel"
+ ) as grpc_create_channel:
+ mock_ssl_cred = mock.Mock()
+ grpc_ssl_channel_cred.return_value = mock_ssl_cred
+
+ mock_grpc_channel = mock.Mock()
+ grpc_create_channel.return_value = mock_grpc_channel
+
+ cred = credentials.AnonymousCredentials()
+ with pytest.warns(DeprecationWarning):
+ with mock.patch.object(auth, "default") as adc:
+ adc.return_value = (cred, None)
+ transport = transport_class(
+ host="squid.clam.whelk",
+ api_mtls_endpoint="mtls.squid.clam.whelk",
+ client_cert_source=client_cert_source_callback,
+ )
+ adc.assert_called_once()
+
+ grpc_ssl_channel_cred.assert_called_once_with(
+ certificate_chain=b"cert bytes", private_key=b"key bytes"
+ )
+ grpc_create_channel.assert_called_once_with(
+ "mtls.squid.clam.whelk:443",
+ credentials=cred,
+ credentials_file=None,
+ scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ ssl_credentials=mock_ssl_cred,
+ quota_project_id=None,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+ assert transport.grpc_channel == mock_grpc_channel
+ assert transport._ssl_channel_credentials == mock_ssl_cred
+
+
+# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
+# removed from grpc/grpc_asyncio transport constructor.
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.CatalogServiceGrpcTransport,
+ transports.CatalogServiceGrpcAsyncIOTransport,
+ ],
+)
+def test_catalog_service_transport_channel_mtls_with_adc(transport_class):
+ mock_ssl_cred = mock.Mock()
+ with mock.patch.multiple(
+ "google.auth.transport.grpc.SslCredentials",
+ __init__=mock.Mock(return_value=None),
+ ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
+ ):
+ with mock.patch.object(
+ transport_class, "create_channel"
+ ) as grpc_create_channel:
+ mock_grpc_channel = mock.Mock()
+ grpc_create_channel.return_value = mock_grpc_channel
+ mock_cred = mock.Mock()
+
+ with pytest.warns(DeprecationWarning):
+ transport = transport_class(
+ host="squid.clam.whelk",
+ credentials=mock_cred,
+ api_mtls_endpoint="mtls.squid.clam.whelk",
+ client_cert_source=None,
+ )
+
+ grpc_create_channel.assert_called_once_with(
+ "mtls.squid.clam.whelk:443",
+ credentials=mock_cred,
+ credentials_file=None,
+ scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ ssl_credentials=mock_ssl_cred,
+ quota_project_id=None,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+ assert transport.grpc_channel == mock_grpc_channel
+
+
+def test_catalog_service_grpc_lro_client():
+ client = CatalogServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+ transport = client.transport
+
+ # Ensure that we have a api-core operations client.
+ assert isinstance(transport.operations_client, operations_v1.OperationsClient,)
+
+ # Ensure that subsequent calls to the property send the exact same object.
+ assert transport.operations_client is transport.operations_client
+
+
+def test_catalog_service_grpc_lro_async_client():
+ client = CatalogServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio",
+ )
+ transport = client.transport
+
+ # Ensure that we have a api-core operations client.
+ assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,)
+
+ # Ensure that subsequent calls to the property send the exact same object.
+ assert transport.operations_client is transport.operations_client
+
+
+def test_catalog_path():
+ project = "squid"
+ location = "clam"
+ catalog = "whelk"
+
+ expected = "projects/{project}/locations/{location}/catalogs/{catalog}".format(
+ project=project, location=location, catalog=catalog,
+ )
+ actual = CatalogServiceClient.catalog_path(project, location, catalog)
+ assert expected == actual
+
+
+def test_parse_catalog_path():
+ expected = {
+ "project": "octopus",
+ "location": "oyster",
+ "catalog": "nudibranch",
+ }
+ path = CatalogServiceClient.catalog_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = CatalogServiceClient.parse_catalog_path(path)
+ assert expected == actual
+
+
+def test_common_billing_account_path():
+ billing_account = "squid"
+
+ expected = "billingAccounts/{billing_account}".format(
+ billing_account=billing_account,
+ )
+ actual = CatalogServiceClient.common_billing_account_path(billing_account)
+ assert expected == actual
+
+
+def test_parse_common_billing_account_path():
+ expected = {
+ "billing_account": "clam",
+ }
+ path = CatalogServiceClient.common_billing_account_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = CatalogServiceClient.parse_common_billing_account_path(path)
+ assert expected == actual
+
+
+def test_common_folder_path():
+ folder = "whelk"
+
+ expected = "folders/{folder}".format(folder=folder,)
+ actual = CatalogServiceClient.common_folder_path(folder)
+ assert expected == actual
+
+
+def test_parse_common_folder_path():
+ expected = {
+ "folder": "octopus",
+ }
+ path = CatalogServiceClient.common_folder_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = CatalogServiceClient.parse_common_folder_path(path)
+ assert expected == actual
+
+
+def test_common_organization_path():
+ organization = "oyster"
+
+ expected = "organizations/{organization}".format(organization=organization,)
+ actual = CatalogServiceClient.common_organization_path(organization)
+ assert expected == actual
+
+
+def test_parse_common_organization_path():
+ expected = {
+ "organization": "nudibranch",
+ }
+ path = CatalogServiceClient.common_organization_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = CatalogServiceClient.parse_common_organization_path(path)
+ assert expected == actual
+
+
+def test_common_project_path():
+ project = "cuttlefish"
+
+ expected = "projects/{project}".format(project=project,)
+ actual = CatalogServiceClient.common_project_path(project)
+ assert expected == actual
+
+
+def test_parse_common_project_path():
+ expected = {
+ "project": "mussel",
+ }
+ path = CatalogServiceClient.common_project_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = CatalogServiceClient.parse_common_project_path(path)
+ assert expected == actual
+
+
+def test_common_location_path():
+ project = "winkle"
+ location = "nautilus"
+
+ expected = "projects/{project}/locations/{location}".format(
+ project=project, location=location,
+ )
+ actual = CatalogServiceClient.common_location_path(project, location)
+ assert expected == actual
+
+
+def test_parse_common_location_path():
+ expected = {
+ "project": "scallop",
+ "location": "abalone",
+ }
+ path = CatalogServiceClient.common_location_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = CatalogServiceClient.parse_common_location_path(path)
+ assert expected == actual
+
+
+def test_client_withDEFAULT_CLIENT_INFO():
+ client_info = gapic_v1.client_info.ClientInfo()
+
+ with mock.patch.object(
+ transports.CatalogServiceTransport, "_prep_wrapped_messages"
+ ) as prep:
+ client = CatalogServiceClient(
+ credentials=credentials.AnonymousCredentials(), client_info=client_info,
+ )
+ prep.assert_called_once_with(client_info)
+
+ with mock.patch.object(
+ transports.CatalogServiceTransport, "_prep_wrapped_messages"
+ ) as prep:
+ transport_class = CatalogServiceClient.get_transport_class()
+ transport = transport_class(
+ credentials=credentials.AnonymousCredentials(), client_info=client_info,
+ )
+ prep.assert_called_once_with(client_info)
diff --git a/tests/unit/gapic/recommendationengine_v1beta1/test_prediction_api_key_registry.py b/tests/unit/gapic/recommendationengine_v1beta1/test_prediction_api_key_registry.py
new file mode 100644
index 00000000..0ba68a97
--- /dev/null
+++ b/tests/unit/gapic/recommendationengine_v1beta1/test_prediction_api_key_registry.py
@@ -0,0 +1,2020 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import os
+import mock
+
+import grpc
+from grpc.experimental import aio
+import math
+import pytest
+from proto.marshal.rules.dates import DurationRule, TimestampRule
+
+from google import auth
+from google.api_core import client_options
+from google.api_core import exceptions
+from google.api_core import gapic_v1
+from google.api_core import grpc_helpers
+from google.api_core import grpc_helpers_async
+from google.auth import credentials
+from google.auth.exceptions import MutualTLSChannelError
+from google.cloud.recommendationengine_v1beta1.services.prediction_api_key_registry import (
+ PredictionApiKeyRegistryAsyncClient,
+)
+from google.cloud.recommendationengine_v1beta1.services.prediction_api_key_registry import (
+ PredictionApiKeyRegistryClient,
+)
+from google.cloud.recommendationengine_v1beta1.services.prediction_api_key_registry import (
+ pagers,
+)
+from google.cloud.recommendationengine_v1beta1.services.prediction_api_key_registry import (
+ transports,
+)
+from google.cloud.recommendationengine_v1beta1.types import (
+ prediction_apikey_registry_service,
+)
+from google.oauth2 import service_account
+
+
+def client_cert_source_callback():
+ return b"cert bytes", b"key bytes"
+
+
+# If default endpoint is localhost, then default mtls endpoint will be the same.
+# This method modifies the default endpoint so the client can produce a different
+# mtls endpoint for endpoint testing purposes.
+def modify_default_endpoint(client):
+ return (
+ "foo.googleapis.com"
+ if ("localhost" in client.DEFAULT_ENDPOINT)
+ else client.DEFAULT_ENDPOINT
+ )
+
+
+def test__get_default_mtls_endpoint():
+ api_endpoint = "example.googleapis.com"
+ api_mtls_endpoint = "example.mtls.googleapis.com"
+ sandbox_endpoint = "example.sandbox.googleapis.com"
+ sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
+ non_googleapi = "api.example.com"
+
+ assert PredictionApiKeyRegistryClient._get_default_mtls_endpoint(None) is None
+ assert (
+ PredictionApiKeyRegistryClient._get_default_mtls_endpoint(api_endpoint)
+ == api_mtls_endpoint
+ )
+ assert (
+ PredictionApiKeyRegistryClient._get_default_mtls_endpoint(api_mtls_endpoint)
+ == api_mtls_endpoint
+ )
+ assert (
+ PredictionApiKeyRegistryClient._get_default_mtls_endpoint(sandbox_endpoint)
+ == sandbox_mtls_endpoint
+ )
+ assert (
+ PredictionApiKeyRegistryClient._get_default_mtls_endpoint(sandbox_mtls_endpoint)
+ == sandbox_mtls_endpoint
+ )
+ assert (
+ PredictionApiKeyRegistryClient._get_default_mtls_endpoint(non_googleapi)
+ == non_googleapi
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class",
+ [PredictionApiKeyRegistryClient, PredictionApiKeyRegistryAsyncClient,],
+)
+def test_prediction_api_key_registry_client_from_service_account_info(client_class):
+ creds = credentials.AnonymousCredentials()
+ with mock.patch.object(
+ service_account.Credentials, "from_service_account_info"
+ ) as factory:
+ factory.return_value = creds
+ info = {"valid": True}
+ client = client_class.from_service_account_info(info)
+ assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
+
+ assert client.transport._host == "recommendationengine.googleapis.com:443"
+
+
+@pytest.mark.parametrize(
+ "client_class",
+ [PredictionApiKeyRegistryClient, PredictionApiKeyRegistryAsyncClient,],
+)
+def test_prediction_api_key_registry_client_from_service_account_file(client_class):
+ creds = credentials.AnonymousCredentials()
+ with mock.patch.object(
+ service_account.Credentials, "from_service_account_file"
+ ) as factory:
+ factory.return_value = creds
+ client = client_class.from_service_account_file("dummy/file/path.json")
+ assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
+
+ client = client_class.from_service_account_json("dummy/file/path.json")
+ assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
+
+ assert client.transport._host == "recommendationengine.googleapis.com:443"
+
+
+def test_prediction_api_key_registry_client_get_transport_class():
+ transport = PredictionApiKeyRegistryClient.get_transport_class()
+ available_transports = [
+ transports.PredictionApiKeyRegistryGrpcTransport,
+ ]
+ assert transport in available_transports
+
+ transport = PredictionApiKeyRegistryClient.get_transport_class("grpc")
+ assert transport == transports.PredictionApiKeyRegistryGrpcTransport
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name",
+ [
+ (
+ PredictionApiKeyRegistryClient,
+ transports.PredictionApiKeyRegistryGrpcTransport,
+ "grpc",
+ ),
+ (
+ PredictionApiKeyRegistryAsyncClient,
+ transports.PredictionApiKeyRegistryGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ ),
+ ],
+)
+@mock.patch.object(
+ PredictionApiKeyRegistryClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(PredictionApiKeyRegistryClient),
+)
+@mock.patch.object(
+ PredictionApiKeyRegistryAsyncClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(PredictionApiKeyRegistryAsyncClient),
+)
+def test_prediction_api_key_registry_client_client_options(
+ client_class, transport_class, transport_name
+):
+ # Check that if channel is provided we won't create a new one.
+ with mock.patch.object(
+ PredictionApiKeyRegistryClient, "get_transport_class"
+ ) as gtc:
+ transport = transport_class(credentials=credentials.AnonymousCredentials())
+ client = client_class(transport=transport)
+ gtc.assert_not_called()
+
+ # Check that if channel is provided via str we will create a new one.
+ with mock.patch.object(
+ PredictionApiKeyRegistryClient, "get_transport_class"
+ ) as gtc:
+ client = client_class(transport=transport_name)
+ gtc.assert_called()
+
+ # Check the case api_endpoint is provided.
+ options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host="squid.clam.whelk",
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
+ # "never".
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
+ # "always".
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_MTLS_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
+ # unsupported value.
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
+ with pytest.raises(MutualTLSChannelError):
+ client = client_class()
+
+ # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
+ ):
+ with pytest.raises(ValueError):
+ client = client_class()
+
+ # Check the case quota_project_id is provided
+ options = client_options.ClientOptions(quota_project_id="octopus")
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id="octopus",
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name,use_client_cert_env",
+ [
+ (
+ PredictionApiKeyRegistryClient,
+ transports.PredictionApiKeyRegistryGrpcTransport,
+ "grpc",
+ "true",
+ ),
+ (
+ PredictionApiKeyRegistryAsyncClient,
+ transports.PredictionApiKeyRegistryGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ "true",
+ ),
+ (
+ PredictionApiKeyRegistryClient,
+ transports.PredictionApiKeyRegistryGrpcTransport,
+ "grpc",
+ "false",
+ ),
+ (
+ PredictionApiKeyRegistryAsyncClient,
+ transports.PredictionApiKeyRegistryGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ "false",
+ ),
+ ],
+)
+@mock.patch.object(
+ PredictionApiKeyRegistryClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(PredictionApiKeyRegistryClient),
+)
+@mock.patch.object(
+ PredictionApiKeyRegistryAsyncClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(PredictionApiKeyRegistryAsyncClient),
+)
+@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
+def test_prediction_api_key_registry_client_mtls_env_auto(
+ client_class, transport_class, transport_name, use_client_cert_env
+):
+ # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
+ # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
+
+ # Check the case client_cert_source is provided. Whether client cert is used depends on
+ # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ options = client_options.ClientOptions(
+ client_cert_source=client_cert_source_callback
+ )
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+
+ if use_client_cert_env == "false":
+ expected_client_cert_source = None
+ expected_host = client.DEFAULT_ENDPOINT
+ else:
+ expected_client_cert_source = client_cert_source_callback
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
+
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=expected_host,
+ scopes=None,
+ client_cert_source_for_mtls=expected_client_cert_source,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case ADC client cert is provided. Whether client cert is used depends on
+ # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ with mock.patch(
+ "google.auth.transport.mtls.has_default_client_cert_source",
+ return_value=True,
+ ):
+ with mock.patch(
+ "google.auth.transport.mtls.default_client_cert_source",
+ return_value=client_cert_source_callback,
+ ):
+ if use_client_cert_env == "false":
+ expected_host = client.DEFAULT_ENDPOINT
+ expected_client_cert_source = None
+ else:
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
+ expected_client_cert_source = client_cert_source_callback
+
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=expected_host,
+ scopes=None,
+ client_cert_source_for_mtls=expected_client_cert_source,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case client_cert_source and ADC client cert are not provided.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ with mock.patch(
+ "google.auth.transport.mtls.has_default_client_cert_source",
+ return_value=False,
+ ):
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name",
+ [
+ (
+ PredictionApiKeyRegistryClient,
+ transports.PredictionApiKeyRegistryGrpcTransport,
+ "grpc",
+ ),
+ (
+ PredictionApiKeyRegistryAsyncClient,
+ transports.PredictionApiKeyRegistryGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ ),
+ ],
+)
+def test_prediction_api_key_registry_client_client_options_scopes(
+ client_class, transport_class, transport_name
+):
+ # Check the case scopes are provided.
+ options = client_options.ClientOptions(scopes=["1", "2"],)
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=["1", "2"],
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name",
+ [
+ (
+ PredictionApiKeyRegistryClient,
+ transports.PredictionApiKeyRegistryGrpcTransport,
+ "grpc",
+ ),
+ (
+ PredictionApiKeyRegistryAsyncClient,
+ transports.PredictionApiKeyRegistryGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ ),
+ ],
+)
+def test_prediction_api_key_registry_client_client_options_credentials_file(
+ client_class, transport_class, transport_name
+):
+ # Check the case credentials file is provided.
+ options = client_options.ClientOptions(credentials_file="credentials.json")
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file="credentials.json",
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+def test_prediction_api_key_registry_client_client_options_from_dict():
+ with mock.patch(
+ "google.cloud.recommendationengine_v1beta1.services.prediction_api_key_registry.transports.PredictionApiKeyRegistryGrpcTransport.__init__"
+ ) as grpc_transport:
+ grpc_transport.return_value = None
+ client = PredictionApiKeyRegistryClient(
+ client_options={"api_endpoint": "squid.clam.whelk"}
+ )
+ grpc_transport.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host="squid.clam.whelk",
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+def test_create_prediction_api_key_registration(
+ transport: str = "grpc",
+ request_type=prediction_apikey_registry_service.CreatePredictionApiKeyRegistrationRequest,
+):
+ client = PredictionApiKeyRegistryClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.create_prediction_api_key_registration), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = prediction_apikey_registry_service.PredictionApiKeyRegistration(
+ api_key="api_key_value",
+ )
+
+ response = client.create_prediction_api_key_registration(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert (
+ args[0]
+ == prediction_apikey_registry_service.CreatePredictionApiKeyRegistrationRequest()
+ )
+
+ # Establish that the response is the type that we expect.
+
+ assert isinstance(
+ response, prediction_apikey_registry_service.PredictionApiKeyRegistration
+ )
+
+ assert response.api_key == "api_key_value"
+
+
+def test_create_prediction_api_key_registration_from_dict():
+ test_create_prediction_api_key_registration(request_type=dict)
+
+
+def test_create_prediction_api_key_registration_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = PredictionApiKeyRegistryClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.create_prediction_api_key_registration), "__call__"
+ ) as call:
+ client.create_prediction_api_key_registration()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert (
+ args[0]
+ == prediction_apikey_registry_service.CreatePredictionApiKeyRegistrationRequest()
+ )
+
+
+@pytest.mark.asyncio
+async def test_create_prediction_api_key_registration_async(
+ transport: str = "grpc_asyncio",
+ request_type=prediction_apikey_registry_service.CreatePredictionApiKeyRegistrationRequest,
+):
+ client = PredictionApiKeyRegistryAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.create_prediction_api_key_registration), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ prediction_apikey_registry_service.PredictionApiKeyRegistration(
+ api_key="api_key_value",
+ )
+ )
+
+ response = await client.create_prediction_api_key_registration(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert (
+ args[0]
+ == prediction_apikey_registry_service.CreatePredictionApiKeyRegistrationRequest()
+ )
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(
+ response, prediction_apikey_registry_service.PredictionApiKeyRegistration
+ )
+
+ assert response.api_key == "api_key_value"
+
+
+@pytest.mark.asyncio
+async def test_create_prediction_api_key_registration_async_from_dict():
+ await test_create_prediction_api_key_registration_async(request_type=dict)
+
+
+def test_create_prediction_api_key_registration_field_headers():
+ client = PredictionApiKeyRegistryClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = (
+ prediction_apikey_registry_service.CreatePredictionApiKeyRegistrationRequest()
+ )
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.create_prediction_api_key_registration), "__call__"
+ ) as call:
+ call.return_value = (
+ prediction_apikey_registry_service.PredictionApiKeyRegistration()
+ )
+
+ client.create_prediction_api_key_registration(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_create_prediction_api_key_registration_field_headers_async():
+ client = PredictionApiKeyRegistryAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = (
+ prediction_apikey_registry_service.CreatePredictionApiKeyRegistrationRequest()
+ )
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.create_prediction_api_key_registration), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ prediction_apikey_registry_service.PredictionApiKeyRegistration()
+ )
+
+ await client.create_prediction_api_key_registration(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+def test_create_prediction_api_key_registration_flattened():
+ client = PredictionApiKeyRegistryClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.create_prediction_api_key_registration), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = (
+ prediction_apikey_registry_service.PredictionApiKeyRegistration()
+ )
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.create_prediction_api_key_registration(
+ parent="parent_value",
+ prediction_api_key_registration=prediction_apikey_registry_service.PredictionApiKeyRegistration(
+ api_key="api_key_value"
+ ),
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].parent == "parent_value"
+
+ assert args[
+ 0
+ ].prediction_api_key_registration == prediction_apikey_registry_service.PredictionApiKeyRegistration(
+ api_key="api_key_value"
+ )
+
+
+def test_create_prediction_api_key_registration_flattened_error():
+ client = PredictionApiKeyRegistryClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.create_prediction_api_key_registration(
+ prediction_apikey_registry_service.CreatePredictionApiKeyRegistrationRequest(),
+ parent="parent_value",
+ prediction_api_key_registration=prediction_apikey_registry_service.PredictionApiKeyRegistration(
+ api_key="api_key_value"
+ ),
+ )
+
+
+@pytest.mark.asyncio
+async def test_create_prediction_api_key_registration_flattened_async():
+ client = PredictionApiKeyRegistryAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.create_prediction_api_key_registration), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = (
+ prediction_apikey_registry_service.PredictionApiKeyRegistration()
+ )
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ prediction_apikey_registry_service.PredictionApiKeyRegistration()
+ )
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.create_prediction_api_key_registration(
+ parent="parent_value",
+ prediction_api_key_registration=prediction_apikey_registry_service.PredictionApiKeyRegistration(
+ api_key="api_key_value"
+ ),
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].parent == "parent_value"
+
+ assert args[
+ 0
+ ].prediction_api_key_registration == prediction_apikey_registry_service.PredictionApiKeyRegistration(
+ api_key="api_key_value"
+ )
+
+
+@pytest.mark.asyncio
+async def test_create_prediction_api_key_registration_flattened_error_async():
+ client = PredictionApiKeyRegistryAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.create_prediction_api_key_registration(
+ prediction_apikey_registry_service.CreatePredictionApiKeyRegistrationRequest(),
+ parent="parent_value",
+ prediction_api_key_registration=prediction_apikey_registry_service.PredictionApiKeyRegistration(
+ api_key="api_key_value"
+ ),
+ )
+
+
+def test_list_prediction_api_key_registrations(
+ transport: str = "grpc",
+ request_type=prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsRequest,
+):
+ client = PredictionApiKeyRegistryClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.list_prediction_api_key_registrations), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsResponse(
+ next_page_token="next_page_token_value",
+ )
+
+ response = client.list_prediction_api_key_registrations(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert (
+ args[0]
+ == prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsRequest()
+ )
+
+ # Establish that the response is the type that we expect.
+
+ assert isinstance(response, pagers.ListPredictionApiKeyRegistrationsPager)
+
+ assert response.next_page_token == "next_page_token_value"
+
+
+def test_list_prediction_api_key_registrations_from_dict():
+ test_list_prediction_api_key_registrations(request_type=dict)
+
+
+def test_list_prediction_api_key_registrations_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = PredictionApiKeyRegistryClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.list_prediction_api_key_registrations), "__call__"
+ ) as call:
+ client.list_prediction_api_key_registrations()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert (
+ args[0]
+ == prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsRequest()
+ )
+
+
+@pytest.mark.asyncio
+async def test_list_prediction_api_key_registrations_async(
+ transport: str = "grpc_asyncio",
+ request_type=prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsRequest,
+):
+ client = PredictionApiKeyRegistryAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.list_prediction_api_key_registrations), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsResponse(
+ next_page_token="next_page_token_value",
+ )
+ )
+
+ response = await client.list_prediction_api_key_registrations(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert (
+ args[0]
+ == prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsRequest()
+ )
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, pagers.ListPredictionApiKeyRegistrationsAsyncPager)
+
+ assert response.next_page_token == "next_page_token_value"
+
+
+@pytest.mark.asyncio
+async def test_list_prediction_api_key_registrations_async_from_dict():
+ await test_list_prediction_api_key_registrations_async(request_type=dict)
+
+
+def test_list_prediction_api_key_registrations_field_headers():
+ client = PredictionApiKeyRegistryClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = (
+ prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsRequest()
+ )
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.list_prediction_api_key_registrations), "__call__"
+ ) as call:
+ call.return_value = (
+ prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsResponse()
+ )
+
+ client.list_prediction_api_key_registrations(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_list_prediction_api_key_registrations_field_headers_async():
+ client = PredictionApiKeyRegistryAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = (
+ prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsRequest()
+ )
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.list_prediction_api_key_registrations), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsResponse()
+ )
+
+ await client.list_prediction_api_key_registrations(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+def test_list_prediction_api_key_registrations_flattened():
+ client = PredictionApiKeyRegistryClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.list_prediction_api_key_registrations), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = (
+ prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsResponse()
+ )
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.list_prediction_api_key_registrations(parent="parent_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].parent == "parent_value"
+
+
+def test_list_prediction_api_key_registrations_flattened_error():
+ client = PredictionApiKeyRegistryClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.list_prediction_api_key_registrations(
+ prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsRequest(),
+ parent="parent_value",
+ )
+
+
+@pytest.mark.asyncio
+async def test_list_prediction_api_key_registrations_flattened_async():
+ client = PredictionApiKeyRegistryAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.list_prediction_api_key_registrations), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = (
+ prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsResponse()
+ )
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsResponse()
+ )
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.list_prediction_api_key_registrations(
+ parent="parent_value",
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].parent == "parent_value"
+
+
+@pytest.mark.asyncio
+async def test_list_prediction_api_key_registrations_flattened_error_async():
+ client = PredictionApiKeyRegistryAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.list_prediction_api_key_registrations(
+ prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsRequest(),
+ parent="parent_value",
+ )
+
+
+def test_list_prediction_api_key_registrations_pager():
+ client = PredictionApiKeyRegistryClient(
+ credentials=credentials.AnonymousCredentials,
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.list_prediction_api_key_registrations), "__call__"
+ ) as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsResponse(
+ prediction_api_key_registrations=[
+ prediction_apikey_registry_service.PredictionApiKeyRegistration(),
+ prediction_apikey_registry_service.PredictionApiKeyRegistration(),
+ prediction_apikey_registry_service.PredictionApiKeyRegistration(),
+ ],
+ next_page_token="abc",
+ ),
+ prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsResponse(
+ prediction_api_key_registrations=[], next_page_token="def",
+ ),
+ prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsResponse(
+ prediction_api_key_registrations=[
+ prediction_apikey_registry_service.PredictionApiKeyRegistration(),
+ ],
+ next_page_token="ghi",
+ ),
+ prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsResponse(
+ prediction_api_key_registrations=[
+ prediction_apikey_registry_service.PredictionApiKeyRegistration(),
+ prediction_apikey_registry_service.PredictionApiKeyRegistration(),
+ ],
+ ),
+ RuntimeError,
+ )
+
+ metadata = ()
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)),
+ )
+ pager = client.list_prediction_api_key_registrations(request={})
+
+ assert pager._metadata == metadata
+
+ results = [i for i in pager]
+ assert len(results) == 6
+ assert all(
+ isinstance(
+ i, prediction_apikey_registry_service.PredictionApiKeyRegistration
+ )
+ for i in results
+ )
+
+
+def test_list_prediction_api_key_registrations_pages():
+ client = PredictionApiKeyRegistryClient(
+ credentials=credentials.AnonymousCredentials,
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.list_prediction_api_key_registrations), "__call__"
+ ) as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsResponse(
+ prediction_api_key_registrations=[
+ prediction_apikey_registry_service.PredictionApiKeyRegistration(),
+ prediction_apikey_registry_service.PredictionApiKeyRegistration(),
+ prediction_apikey_registry_service.PredictionApiKeyRegistration(),
+ ],
+ next_page_token="abc",
+ ),
+ prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsResponse(
+ prediction_api_key_registrations=[], next_page_token="def",
+ ),
+ prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsResponse(
+ prediction_api_key_registrations=[
+ prediction_apikey_registry_service.PredictionApiKeyRegistration(),
+ ],
+ next_page_token="ghi",
+ ),
+ prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsResponse(
+ prediction_api_key_registrations=[
+ prediction_apikey_registry_service.PredictionApiKeyRegistration(),
+ prediction_apikey_registry_service.PredictionApiKeyRegistration(),
+ ],
+ ),
+ RuntimeError,
+ )
+ pages = list(client.list_prediction_api_key_registrations(request={}).pages)
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
+
+
+@pytest.mark.asyncio
+async def test_list_prediction_api_key_registrations_async_pager():
+ client = PredictionApiKeyRegistryAsyncClient(
+ credentials=credentials.AnonymousCredentials,
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.list_prediction_api_key_registrations),
+ "__call__",
+ new_callable=mock.AsyncMock,
+ ) as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsResponse(
+ prediction_api_key_registrations=[
+ prediction_apikey_registry_service.PredictionApiKeyRegistration(),
+ prediction_apikey_registry_service.PredictionApiKeyRegistration(),
+ prediction_apikey_registry_service.PredictionApiKeyRegistration(),
+ ],
+ next_page_token="abc",
+ ),
+ prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsResponse(
+ prediction_api_key_registrations=[], next_page_token="def",
+ ),
+ prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsResponse(
+ prediction_api_key_registrations=[
+ prediction_apikey_registry_service.PredictionApiKeyRegistration(),
+ ],
+ next_page_token="ghi",
+ ),
+ prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsResponse(
+ prediction_api_key_registrations=[
+ prediction_apikey_registry_service.PredictionApiKeyRegistration(),
+ prediction_apikey_registry_service.PredictionApiKeyRegistration(),
+ ],
+ ),
+ RuntimeError,
+ )
+ async_pager = await client.list_prediction_api_key_registrations(request={},)
+ assert async_pager.next_page_token == "abc"
+ responses = []
+ async for response in async_pager:
+ responses.append(response)
+
+ assert len(responses) == 6
+ assert all(
+ isinstance(
+ i, prediction_apikey_registry_service.PredictionApiKeyRegistration
+ )
+ for i in responses
+ )
+
+
+@pytest.mark.asyncio
+async def test_list_prediction_api_key_registrations_async_pages():
+ client = PredictionApiKeyRegistryAsyncClient(
+ credentials=credentials.AnonymousCredentials,
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.list_prediction_api_key_registrations),
+ "__call__",
+ new_callable=mock.AsyncMock,
+ ) as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsResponse(
+ prediction_api_key_registrations=[
+ prediction_apikey_registry_service.PredictionApiKeyRegistration(),
+ prediction_apikey_registry_service.PredictionApiKeyRegistration(),
+ prediction_apikey_registry_service.PredictionApiKeyRegistration(),
+ ],
+ next_page_token="abc",
+ ),
+ prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsResponse(
+ prediction_api_key_registrations=[], next_page_token="def",
+ ),
+ prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsResponse(
+ prediction_api_key_registrations=[
+ prediction_apikey_registry_service.PredictionApiKeyRegistration(),
+ ],
+ next_page_token="ghi",
+ ),
+ prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsResponse(
+ prediction_api_key_registrations=[
+ prediction_apikey_registry_service.PredictionApiKeyRegistration(),
+ prediction_apikey_registry_service.PredictionApiKeyRegistration(),
+ ],
+ ),
+ RuntimeError,
+ )
+ pages = []
+ async for page_ in (
+ await client.list_prediction_api_key_registrations(request={})
+ ).pages:
+ pages.append(page_)
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
+
+
+def test_delete_prediction_api_key_registration(
+ transport: str = "grpc",
+ request_type=prediction_apikey_registry_service.DeletePredictionApiKeyRegistrationRequest,
+):
+ client = PredictionApiKeyRegistryClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.delete_prediction_api_key_registration), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = None
+
+ response = client.delete_prediction_api_key_registration(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert (
+ args[0]
+ == prediction_apikey_registry_service.DeletePredictionApiKeyRegistrationRequest()
+ )
+
+ # Establish that the response is the type that we expect.
+ assert response is None
+
+
+def test_delete_prediction_api_key_registration_from_dict():
+ test_delete_prediction_api_key_registration(request_type=dict)
+
+
+def test_delete_prediction_api_key_registration_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = PredictionApiKeyRegistryClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.delete_prediction_api_key_registration), "__call__"
+ ) as call:
+ client.delete_prediction_api_key_registration()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert (
+ args[0]
+ == prediction_apikey_registry_service.DeletePredictionApiKeyRegistrationRequest()
+ )
+
+
+@pytest.mark.asyncio
+async def test_delete_prediction_api_key_registration_async(
+ transport: str = "grpc_asyncio",
+ request_type=prediction_apikey_registry_service.DeletePredictionApiKeyRegistrationRequest,
+):
+ client = PredictionApiKeyRegistryAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.delete_prediction_api_key_registration), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+
+ response = await client.delete_prediction_api_key_registration(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert (
+ args[0]
+ == prediction_apikey_registry_service.DeletePredictionApiKeyRegistrationRequest()
+ )
+
+ # Establish that the response is the type that we expect.
+ assert response is None
+
+
+@pytest.mark.asyncio
+async def test_delete_prediction_api_key_registration_async_from_dict():
+ await test_delete_prediction_api_key_registration_async(request_type=dict)
+
+
+def test_delete_prediction_api_key_registration_field_headers():
+ client = PredictionApiKeyRegistryClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = (
+ prediction_apikey_registry_service.DeletePredictionApiKeyRegistrationRequest()
+ )
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.delete_prediction_api_key_registration), "__call__"
+ ) as call:
+ call.return_value = None
+
+ client.delete_prediction_api_key_registration(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_delete_prediction_api_key_registration_field_headers_async():
+ client = PredictionApiKeyRegistryAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = (
+ prediction_apikey_registry_service.DeletePredictionApiKeyRegistrationRequest()
+ )
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.delete_prediction_api_key_registration), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+
+ await client.delete_prediction_api_key_registration(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+def test_delete_prediction_api_key_registration_flattened():
+ client = PredictionApiKeyRegistryClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.delete_prediction_api_key_registration), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = None
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.delete_prediction_api_key_registration(name="name_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].name == "name_value"
+
+
+def test_delete_prediction_api_key_registration_flattened_error():
+ client = PredictionApiKeyRegistryClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.delete_prediction_api_key_registration(
+ prediction_apikey_registry_service.DeletePredictionApiKeyRegistrationRequest(),
+ name="name_value",
+ )
+
+
+@pytest.mark.asyncio
+async def test_delete_prediction_api_key_registration_flattened_async():
+ client = PredictionApiKeyRegistryAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.delete_prediction_api_key_registration), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = None
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.delete_prediction_api_key_registration(
+ name="name_value",
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].name == "name_value"
+
+
+@pytest.mark.asyncio
+async def test_delete_prediction_api_key_registration_flattened_error_async():
+ client = PredictionApiKeyRegistryAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.delete_prediction_api_key_registration(
+ prediction_apikey_registry_service.DeletePredictionApiKeyRegistrationRequest(),
+ name="name_value",
+ )
+
+
+def test_credentials_transport_error():
+ # It is an error to provide credentials and a transport instance.
+ transport = transports.PredictionApiKeyRegistryGrpcTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ with pytest.raises(ValueError):
+ client = PredictionApiKeyRegistryClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # It is an error to provide a credentials file and a transport instance.
+ transport = transports.PredictionApiKeyRegistryGrpcTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ with pytest.raises(ValueError):
+ client = PredictionApiKeyRegistryClient(
+ client_options={"credentials_file": "credentials.json"},
+ transport=transport,
+ )
+
+ # It is an error to provide scopes and a transport instance.
+ transport = transports.PredictionApiKeyRegistryGrpcTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ with pytest.raises(ValueError):
+ client = PredictionApiKeyRegistryClient(
+ client_options={"scopes": ["1", "2"]}, transport=transport,
+ )
+
+
+def test_transport_instance():
+ # A client may be instantiated with a custom transport instance.
+ transport = transports.PredictionApiKeyRegistryGrpcTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ client = PredictionApiKeyRegistryClient(transport=transport)
+ assert client.transport is transport
+
+
+def test_transport_get_channel():
+ # A client may be instantiated with a custom transport instance.
+ transport = transports.PredictionApiKeyRegistryGrpcTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ channel = transport.grpc_channel
+ assert channel
+
+ transport = transports.PredictionApiKeyRegistryGrpcAsyncIOTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ channel = transport.grpc_channel
+ assert channel
+
+
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.PredictionApiKeyRegistryGrpcTransport,
+ transports.PredictionApiKeyRegistryGrpcAsyncIOTransport,
+ ],
+)
+def test_transport_adc(transport_class):
+ # Test default credentials are used if not provided.
+ with mock.patch.object(auth, "default") as adc:
+ adc.return_value = (credentials.AnonymousCredentials(), None)
+ transport_class()
+ adc.assert_called_once()
+
+
+def test_transport_grpc_default():
+ # A client should use the gRPC transport by default.
+ client = PredictionApiKeyRegistryClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ assert isinstance(
+ client.transport, transports.PredictionApiKeyRegistryGrpcTransport,
+ )
+
+
+def test_prediction_api_key_registry_base_transport_error():
+ # Passing both a credentials object and credentials_file should raise an error
+ with pytest.raises(exceptions.DuplicateCredentialArgs):
+ transport = transports.PredictionApiKeyRegistryTransport(
+ credentials=credentials.AnonymousCredentials(),
+ credentials_file="credentials.json",
+ )
+
+
+def test_prediction_api_key_registry_base_transport():
+ # Instantiate the base transport.
+ with mock.patch(
+ "google.cloud.recommendationengine_v1beta1.services.prediction_api_key_registry.transports.PredictionApiKeyRegistryTransport.__init__"
+ ) as Transport:
+ Transport.return_value = None
+ transport = transports.PredictionApiKeyRegistryTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Every method on the transport should just blindly
+ # raise NotImplementedError.
+ methods = (
+ "create_prediction_api_key_registration",
+ "list_prediction_api_key_registrations",
+ "delete_prediction_api_key_registration",
+ )
+ for method in methods:
+ with pytest.raises(NotImplementedError):
+ getattr(transport, method)(request=object())
+
+
+def test_prediction_api_key_registry_base_transport_with_credentials_file():
+ # Instantiate the base transport with a credentials file
+ with mock.patch.object(
+ auth, "load_credentials_from_file"
+ ) as load_creds, mock.patch(
+ "google.cloud.recommendationengine_v1beta1.services.prediction_api_key_registry.transports.PredictionApiKeyRegistryTransport._prep_wrapped_messages"
+ ) as Transport:
+ Transport.return_value = None
+ load_creds.return_value = (credentials.AnonymousCredentials(), None)
+ transport = transports.PredictionApiKeyRegistryTransport(
+ credentials_file="credentials.json", quota_project_id="octopus",
+ )
+ load_creds.assert_called_once_with(
+ "credentials.json",
+ scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ quota_project_id="octopus",
+ )
+
+
+def test_prediction_api_key_registry_base_transport_with_adc():
+ # Test the default credentials are used if credentials and credentials_file are None.
+ with mock.patch.object(auth, "default") as adc, mock.patch(
+ "google.cloud.recommendationengine_v1beta1.services.prediction_api_key_registry.transports.PredictionApiKeyRegistryTransport._prep_wrapped_messages"
+ ) as Transport:
+ Transport.return_value = None
+ adc.return_value = (credentials.AnonymousCredentials(), None)
+ transport = transports.PredictionApiKeyRegistryTransport()
+ adc.assert_called_once()
+
+
+def test_prediction_api_key_registry_auth_adc():
+ # If no credentials are provided, we should use ADC credentials.
+ with mock.patch.object(auth, "default") as adc:
+ adc.return_value = (credentials.AnonymousCredentials(), None)
+ PredictionApiKeyRegistryClient()
+ adc.assert_called_once_with(
+ scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ quota_project_id=None,
+ )
+
+
+def test_prediction_api_key_registry_transport_auth_adc():
+ # If credentials and host are not provided, the transport class should use
+ # ADC credentials.
+ with mock.patch.object(auth, "default") as adc:
+ adc.return_value = (credentials.AnonymousCredentials(), None)
+ transports.PredictionApiKeyRegistryGrpcTransport(
+ host="squid.clam.whelk", quota_project_id="octopus"
+ )
+ adc.assert_called_once_with(
+ scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ quota_project_id="octopus",
+ )
+
+
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.PredictionApiKeyRegistryGrpcTransport,
+ transports.PredictionApiKeyRegistryGrpcAsyncIOTransport,
+ ],
+)
+def test_prediction_api_key_registry_grpc_transport_client_cert_source_for_mtls(
+ transport_class,
+):
+ cred = credentials.AnonymousCredentials()
+
+ # Check ssl_channel_credentials is used if provided.
+ with mock.patch.object(transport_class, "create_channel") as mock_create_channel:
+ mock_ssl_channel_creds = mock.Mock()
+ transport_class(
+ host="squid.clam.whelk",
+ credentials=cred,
+ ssl_channel_credentials=mock_ssl_channel_creds,
+ )
+ mock_create_channel.assert_called_once_with(
+ "squid.clam.whelk:443",
+ credentials=cred,
+ credentials_file=None,
+ scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ ssl_credentials=mock_ssl_channel_creds,
+ quota_project_id=None,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+
+ # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls
+ # is used.
+ with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()):
+ with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred:
+ transport_class(
+ credentials=cred,
+ client_cert_source_for_mtls=client_cert_source_callback,
+ )
+ expected_cert, expected_key = client_cert_source_callback()
+ mock_ssl_cred.assert_called_once_with(
+ certificate_chain=expected_cert, private_key=expected_key
+ )
+
+
+def test_prediction_api_key_registry_host_no_port():
+ client = PredictionApiKeyRegistryClient(
+ credentials=credentials.AnonymousCredentials(),
+ client_options=client_options.ClientOptions(
+ api_endpoint="recommendationengine.googleapis.com"
+ ),
+ )
+ assert client.transport._host == "recommendationengine.googleapis.com:443"
+
+
+def test_prediction_api_key_registry_host_with_port():
+ client = PredictionApiKeyRegistryClient(
+ credentials=credentials.AnonymousCredentials(),
+ client_options=client_options.ClientOptions(
+ api_endpoint="recommendationengine.googleapis.com:8000"
+ ),
+ )
+ assert client.transport._host == "recommendationengine.googleapis.com:8000"
+
+
+def test_prediction_api_key_registry_grpc_transport_channel():
+ channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials())
+
+ # Check that channel is used if provided.
+ transport = transports.PredictionApiKeyRegistryGrpcTransport(
+ host="squid.clam.whelk", channel=channel,
+ )
+ assert transport.grpc_channel == channel
+ assert transport._host == "squid.clam.whelk:443"
+ assert transport._ssl_channel_credentials == None
+
+
+def test_prediction_api_key_registry_grpc_asyncio_transport_channel():
+ channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials())
+
+ # Check that channel is used if provided.
+ transport = transports.PredictionApiKeyRegistryGrpcAsyncIOTransport(
+ host="squid.clam.whelk", channel=channel,
+ )
+ assert transport.grpc_channel == channel
+ assert transport._host == "squid.clam.whelk:443"
+ assert transport._ssl_channel_credentials == None
+
+
+# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
+# removed from grpc/grpc_asyncio transport constructor.
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.PredictionApiKeyRegistryGrpcTransport,
+ transports.PredictionApiKeyRegistryGrpcAsyncIOTransport,
+ ],
+)
+def test_prediction_api_key_registry_transport_channel_mtls_with_client_cert_source(
+ transport_class,
+):
+ with mock.patch(
+ "grpc.ssl_channel_credentials", autospec=True
+ ) as grpc_ssl_channel_cred:
+ with mock.patch.object(
+ transport_class, "create_channel"
+ ) as grpc_create_channel:
+ mock_ssl_cred = mock.Mock()
+ grpc_ssl_channel_cred.return_value = mock_ssl_cred
+
+ mock_grpc_channel = mock.Mock()
+ grpc_create_channel.return_value = mock_grpc_channel
+
+ cred = credentials.AnonymousCredentials()
+ with pytest.warns(DeprecationWarning):
+ with mock.patch.object(auth, "default") as adc:
+ adc.return_value = (cred, None)
+ transport = transport_class(
+ host="squid.clam.whelk",
+ api_mtls_endpoint="mtls.squid.clam.whelk",
+ client_cert_source=client_cert_source_callback,
+ )
+ adc.assert_called_once()
+
+ grpc_ssl_channel_cred.assert_called_once_with(
+ certificate_chain=b"cert bytes", private_key=b"key bytes"
+ )
+ grpc_create_channel.assert_called_once_with(
+ "mtls.squid.clam.whelk:443",
+ credentials=cred,
+ credentials_file=None,
+ scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ ssl_credentials=mock_ssl_cred,
+ quota_project_id=None,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+ assert transport.grpc_channel == mock_grpc_channel
+ assert transport._ssl_channel_credentials == mock_ssl_cred
+
+
+# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
+# removed from grpc/grpc_asyncio transport constructor.
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.PredictionApiKeyRegistryGrpcTransport,
+ transports.PredictionApiKeyRegistryGrpcAsyncIOTransport,
+ ],
+)
+def test_prediction_api_key_registry_transport_channel_mtls_with_adc(transport_class):
+ mock_ssl_cred = mock.Mock()
+ with mock.patch.multiple(
+ "google.auth.transport.grpc.SslCredentials",
+ __init__=mock.Mock(return_value=None),
+ ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
+ ):
+ with mock.patch.object(
+ transport_class, "create_channel"
+ ) as grpc_create_channel:
+ mock_grpc_channel = mock.Mock()
+ grpc_create_channel.return_value = mock_grpc_channel
+ mock_cred = mock.Mock()
+
+ with pytest.warns(DeprecationWarning):
+ transport = transport_class(
+ host="squid.clam.whelk",
+ credentials=mock_cred,
+ api_mtls_endpoint="mtls.squid.clam.whelk",
+ client_cert_source=None,
+ )
+
+ grpc_create_channel.assert_called_once_with(
+ "mtls.squid.clam.whelk:443",
+ credentials=mock_cred,
+ credentials_file=None,
+ scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ ssl_credentials=mock_ssl_cred,
+ quota_project_id=None,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+ assert transport.grpc_channel == mock_grpc_channel
+
+
+def test_event_store_path():
+ project = "squid"
+ location = "clam"
+ catalog = "whelk"
+ event_store = "octopus"
+
+ expected = "projects/{project}/locations/{location}/catalogs/{catalog}/eventStores/{event_store}".format(
+ project=project, location=location, catalog=catalog, event_store=event_store,
+ )
+ actual = PredictionApiKeyRegistryClient.event_store_path(
+ project, location, catalog, event_store
+ )
+ assert expected == actual
+
+
+def test_parse_event_store_path():
+ expected = {
+ "project": "oyster",
+ "location": "nudibranch",
+ "catalog": "cuttlefish",
+ "event_store": "mussel",
+ }
+ path = PredictionApiKeyRegistryClient.event_store_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = PredictionApiKeyRegistryClient.parse_event_store_path(path)
+ assert expected == actual
+
+
+def test_prediction_api_key_registration_path():
+ project = "winkle"
+ location = "nautilus"
+ catalog = "scallop"
+ event_store = "abalone"
+ prediction_api_key_registration = "squid"
+
+ expected = "projects/{project}/locations/{location}/catalogs/{catalog}/eventStores/{event_store}/predictionApiKeyRegistrations/{prediction_api_key_registration}".format(
+ project=project,
+ location=location,
+ catalog=catalog,
+ event_store=event_store,
+ prediction_api_key_registration=prediction_api_key_registration,
+ )
+ actual = PredictionApiKeyRegistryClient.prediction_api_key_registration_path(
+ project, location, catalog, event_store, prediction_api_key_registration
+ )
+ assert expected == actual
+
+
+def test_parse_prediction_api_key_registration_path():
+ expected = {
+ "project": "clam",
+ "location": "whelk",
+ "catalog": "octopus",
+ "event_store": "oyster",
+ "prediction_api_key_registration": "nudibranch",
+ }
+ path = PredictionApiKeyRegistryClient.prediction_api_key_registration_path(
+ **expected
+ )
+
+ # Check that the path construction is reversible.
+ actual = PredictionApiKeyRegistryClient.parse_prediction_api_key_registration_path(
+ path
+ )
+ assert expected == actual
+
+
+def test_common_billing_account_path():
+ billing_account = "cuttlefish"
+
+ expected = "billingAccounts/{billing_account}".format(
+ billing_account=billing_account,
+ )
+ actual = PredictionApiKeyRegistryClient.common_billing_account_path(billing_account)
+ assert expected == actual
+
+
+def test_parse_common_billing_account_path():
+ expected = {
+ "billing_account": "mussel",
+ }
+ path = PredictionApiKeyRegistryClient.common_billing_account_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = PredictionApiKeyRegistryClient.parse_common_billing_account_path(path)
+ assert expected == actual
+
+
+def test_common_folder_path():
+ folder = "winkle"
+
+ expected = "folders/{folder}".format(folder=folder,)
+ actual = PredictionApiKeyRegistryClient.common_folder_path(folder)
+ assert expected == actual
+
+
+def test_parse_common_folder_path():
+ expected = {
+ "folder": "nautilus",
+ }
+ path = PredictionApiKeyRegistryClient.common_folder_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = PredictionApiKeyRegistryClient.parse_common_folder_path(path)
+ assert expected == actual
+
+
+def test_common_organization_path():
+ organization = "scallop"
+
+ expected = "organizations/{organization}".format(organization=organization,)
+ actual = PredictionApiKeyRegistryClient.common_organization_path(organization)
+ assert expected == actual
+
+
+def test_parse_common_organization_path():
+ expected = {
+ "organization": "abalone",
+ }
+ path = PredictionApiKeyRegistryClient.common_organization_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = PredictionApiKeyRegistryClient.parse_common_organization_path(path)
+ assert expected == actual
+
+
+def test_common_project_path():
+ project = "squid"
+
+ expected = "projects/{project}".format(project=project,)
+ actual = PredictionApiKeyRegistryClient.common_project_path(project)
+ assert expected == actual
+
+
+def test_parse_common_project_path():
+ expected = {
+ "project": "clam",
+ }
+ path = PredictionApiKeyRegistryClient.common_project_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = PredictionApiKeyRegistryClient.parse_common_project_path(path)
+ assert expected == actual
+
+
+def test_common_location_path():
+ project = "whelk"
+ location = "octopus"
+
+ expected = "projects/{project}/locations/{location}".format(
+ project=project, location=location,
+ )
+ actual = PredictionApiKeyRegistryClient.common_location_path(project, location)
+ assert expected == actual
+
+
+def test_parse_common_location_path():
+ expected = {
+ "project": "oyster",
+ "location": "nudibranch",
+ }
+ path = PredictionApiKeyRegistryClient.common_location_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = PredictionApiKeyRegistryClient.parse_common_location_path(path)
+ assert expected == actual
+
+
+def test_client_withDEFAULT_CLIENT_INFO():
+ client_info = gapic_v1.client_info.ClientInfo()
+
+ with mock.patch.object(
+ transports.PredictionApiKeyRegistryTransport, "_prep_wrapped_messages"
+ ) as prep:
+ client = PredictionApiKeyRegistryClient(
+ credentials=credentials.AnonymousCredentials(), client_info=client_info,
+ )
+ prep.assert_called_once_with(client_info)
+
+ with mock.patch.object(
+ transports.PredictionApiKeyRegistryTransport, "_prep_wrapped_messages"
+ ) as prep:
+ transport_class = PredictionApiKeyRegistryClient.get_transport_class()
+ transport = transport_class(
+ credentials=credentials.AnonymousCredentials(), client_info=client_info,
+ )
+ prep.assert_called_once_with(client_info)
diff --git a/tests/unit/gapic/recommendationengine_v1beta1/test_prediction_service.py b/tests/unit/gapic/recommendationengine_v1beta1/test_prediction_service.py
new file mode 100644
index 00000000..2b191a45
--- /dev/null
+++ b/tests/unit/gapic/recommendationengine_v1beta1/test_prediction_service.py
@@ -0,0 +1,1380 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import os
+import mock
+
+import grpc
+from grpc.experimental import aio
+import math
+import pytest
+from proto.marshal.rules.dates import DurationRule, TimestampRule
+
+from google import auth
+from google.api_core import client_options
+from google.api_core import exceptions
+from google.api_core import gapic_v1
+from google.api_core import grpc_helpers
+from google.api_core import grpc_helpers_async
+from google.auth import credentials
+from google.auth.exceptions import MutualTLSChannelError
+from google.cloud.recommendationengine_v1beta1.services.prediction_service import (
+ PredictionServiceAsyncClient,
+)
+from google.cloud.recommendationengine_v1beta1.services.prediction_service import (
+ PredictionServiceClient,
+)
+from google.cloud.recommendationengine_v1beta1.services.prediction_service import pagers
+from google.cloud.recommendationengine_v1beta1.services.prediction_service import (
+ transports,
+)
+from google.cloud.recommendationengine_v1beta1.types import catalog
+from google.cloud.recommendationengine_v1beta1.types import common
+from google.cloud.recommendationengine_v1beta1.types import prediction_service
+from google.cloud.recommendationengine_v1beta1.types import user_event as gcr_user_event
+from google.oauth2 import service_account
+from google.protobuf import struct_pb2 as struct # type: ignore
+from google.protobuf import timestamp_pb2 as timestamp # type: ignore
+
+
+def client_cert_source_callback():
+ return b"cert bytes", b"key bytes"
+
+
+# If default endpoint is localhost, then default mtls endpoint will be the same.
+# This method modifies the default endpoint so the client can produce a different
+# mtls endpoint for endpoint testing purposes.
+def modify_default_endpoint(client):
+ return (
+ "foo.googleapis.com"
+ if ("localhost" in client.DEFAULT_ENDPOINT)
+ else client.DEFAULT_ENDPOINT
+ )
+
+
+def test__get_default_mtls_endpoint():
+ api_endpoint = "example.googleapis.com"
+ api_mtls_endpoint = "example.mtls.googleapis.com"
+ sandbox_endpoint = "example.sandbox.googleapis.com"
+ sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
+ non_googleapi = "api.example.com"
+
+ assert PredictionServiceClient._get_default_mtls_endpoint(None) is None
+ assert (
+ PredictionServiceClient._get_default_mtls_endpoint(api_endpoint)
+ == api_mtls_endpoint
+ )
+ assert (
+ PredictionServiceClient._get_default_mtls_endpoint(api_mtls_endpoint)
+ == api_mtls_endpoint
+ )
+ assert (
+ PredictionServiceClient._get_default_mtls_endpoint(sandbox_endpoint)
+ == sandbox_mtls_endpoint
+ )
+ assert (
+ PredictionServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint)
+ == sandbox_mtls_endpoint
+ )
+ assert (
+ PredictionServiceClient._get_default_mtls_endpoint(non_googleapi)
+ == non_googleapi
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class", [PredictionServiceClient, PredictionServiceAsyncClient,]
+)
+def test_prediction_service_client_from_service_account_info(client_class):
+ creds = credentials.AnonymousCredentials()
+ with mock.patch.object(
+ service_account.Credentials, "from_service_account_info"
+ ) as factory:
+ factory.return_value = creds
+ info = {"valid": True}
+ client = client_class.from_service_account_info(info)
+ assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
+
+ assert client.transport._host == "recommendationengine.googleapis.com:443"
+
+
+@pytest.mark.parametrize(
+ "client_class", [PredictionServiceClient, PredictionServiceAsyncClient,]
+)
+def test_prediction_service_client_from_service_account_file(client_class):
+ creds = credentials.AnonymousCredentials()
+ with mock.patch.object(
+ service_account.Credentials, "from_service_account_file"
+ ) as factory:
+ factory.return_value = creds
+ client = client_class.from_service_account_file("dummy/file/path.json")
+ assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
+
+ client = client_class.from_service_account_json("dummy/file/path.json")
+ assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
+
+ assert client.transport._host == "recommendationengine.googleapis.com:443"
+
+
+def test_prediction_service_client_get_transport_class():
+ transport = PredictionServiceClient.get_transport_class()
+ available_transports = [
+ transports.PredictionServiceGrpcTransport,
+ ]
+ assert transport in available_transports
+
+ transport = PredictionServiceClient.get_transport_class("grpc")
+ assert transport == transports.PredictionServiceGrpcTransport
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name",
+ [
+ (PredictionServiceClient, transports.PredictionServiceGrpcTransport, "grpc"),
+ (
+ PredictionServiceAsyncClient,
+ transports.PredictionServiceGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ ),
+ ],
+)
+@mock.patch.object(
+ PredictionServiceClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(PredictionServiceClient),
+)
+@mock.patch.object(
+ PredictionServiceAsyncClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(PredictionServiceAsyncClient),
+)
+def test_prediction_service_client_client_options(
+ client_class, transport_class, transport_name
+):
+ # Check that if channel is provided we won't create a new one.
+ with mock.patch.object(PredictionServiceClient, "get_transport_class") as gtc:
+ transport = transport_class(credentials=credentials.AnonymousCredentials())
+ client = client_class(transport=transport)
+ gtc.assert_not_called()
+
+ # Check that if channel is provided via str we will create a new one.
+ with mock.patch.object(PredictionServiceClient, "get_transport_class") as gtc:
+ client = client_class(transport=transport_name)
+ gtc.assert_called()
+
+ # Check the case api_endpoint is provided.
+ options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host="squid.clam.whelk",
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
+ # "never".
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
+ # "always".
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_MTLS_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
+ # unsupported value.
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
+ with pytest.raises(MutualTLSChannelError):
+ client = client_class()
+
+ # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
+ ):
+ with pytest.raises(ValueError):
+ client = client_class()
+
+ # Check the case quota_project_id is provided
+ options = client_options.ClientOptions(quota_project_id="octopus")
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id="octopus",
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name,use_client_cert_env",
+ [
+ (
+ PredictionServiceClient,
+ transports.PredictionServiceGrpcTransport,
+ "grpc",
+ "true",
+ ),
+ (
+ PredictionServiceAsyncClient,
+ transports.PredictionServiceGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ "true",
+ ),
+ (
+ PredictionServiceClient,
+ transports.PredictionServiceGrpcTransport,
+ "grpc",
+ "false",
+ ),
+ (
+ PredictionServiceAsyncClient,
+ transports.PredictionServiceGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ "false",
+ ),
+ ],
+)
+@mock.patch.object(
+ PredictionServiceClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(PredictionServiceClient),
+)
+@mock.patch.object(
+ PredictionServiceAsyncClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(PredictionServiceAsyncClient),
+)
+@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
+def test_prediction_service_client_mtls_env_auto(
+ client_class, transport_class, transport_name, use_client_cert_env
+):
+ # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
+ # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
+
+ # Check the case client_cert_source is provided. Whether client cert is used depends on
+ # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ options = client_options.ClientOptions(
+ client_cert_source=client_cert_source_callback
+ )
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+
+ if use_client_cert_env == "false":
+ expected_client_cert_source = None
+ expected_host = client.DEFAULT_ENDPOINT
+ else:
+ expected_client_cert_source = client_cert_source_callback
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
+
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=expected_host,
+ scopes=None,
+ client_cert_source_for_mtls=expected_client_cert_source,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case ADC client cert is provided. Whether client cert is used depends on
+ # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ with mock.patch(
+ "google.auth.transport.mtls.has_default_client_cert_source",
+ return_value=True,
+ ):
+ with mock.patch(
+ "google.auth.transport.mtls.default_client_cert_source",
+ return_value=client_cert_source_callback,
+ ):
+ if use_client_cert_env == "false":
+ expected_host = client.DEFAULT_ENDPOINT
+ expected_client_cert_source = None
+ else:
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
+ expected_client_cert_source = client_cert_source_callback
+
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=expected_host,
+ scopes=None,
+ client_cert_source_for_mtls=expected_client_cert_source,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case client_cert_source and ADC client cert are not provided.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ with mock.patch(
+ "google.auth.transport.mtls.has_default_client_cert_source",
+ return_value=False,
+ ):
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name",
+ [
+ (PredictionServiceClient, transports.PredictionServiceGrpcTransport, "grpc"),
+ (
+ PredictionServiceAsyncClient,
+ transports.PredictionServiceGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ ),
+ ],
+)
+def test_prediction_service_client_client_options_scopes(
+ client_class, transport_class, transport_name
+):
+ # Check the case scopes are provided.
+ options = client_options.ClientOptions(scopes=["1", "2"],)
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=["1", "2"],
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name",
+ [
+ (PredictionServiceClient, transports.PredictionServiceGrpcTransport, "grpc"),
+ (
+ PredictionServiceAsyncClient,
+ transports.PredictionServiceGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ ),
+ ],
+)
+def test_prediction_service_client_client_options_credentials_file(
+ client_class, transport_class, transport_name
+):
+ # Check the case credentials file is provided.
+ options = client_options.ClientOptions(credentials_file="credentials.json")
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file="credentials.json",
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+def test_prediction_service_client_client_options_from_dict():
+ with mock.patch(
+ "google.cloud.recommendationengine_v1beta1.services.prediction_service.transports.PredictionServiceGrpcTransport.__init__"
+ ) as grpc_transport:
+ grpc_transport.return_value = None
+ client = PredictionServiceClient(
+ client_options={"api_endpoint": "squid.clam.whelk"}
+ )
+ grpc_transport.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host="squid.clam.whelk",
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+def test_predict(
+ transport: str = "grpc", request_type=prediction_service.PredictRequest
+):
+ client = PredictionServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.predict), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = prediction_service.PredictResponse(
+ recommendation_token="recommendation_token_value",
+ items_missing_in_catalog=["items_missing_in_catalog_value"],
+ dry_run=True,
+ next_page_token="next_page_token_value",
+ )
+
+ response = client.predict(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == prediction_service.PredictRequest()
+
+ # Establish that the response is the type that we expect.
+
+ assert isinstance(response, pagers.PredictPager)
+
+ assert response.recommendation_token == "recommendation_token_value"
+
+ assert response.items_missing_in_catalog == ["items_missing_in_catalog_value"]
+
+ assert response.dry_run is True
+
+ assert response.next_page_token == "next_page_token_value"
+
+
+def test_predict_from_dict():
+ test_predict(request_type=dict)
+
+
+def test_predict_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = PredictionServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.predict), "__call__") as call:
+ client.predict()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == prediction_service.PredictRequest()
+
+
+@pytest.mark.asyncio
+async def test_predict_async(
+ transport: str = "grpc_asyncio", request_type=prediction_service.PredictRequest
+):
+ client = PredictionServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.predict), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ prediction_service.PredictResponse(
+ recommendation_token="recommendation_token_value",
+ items_missing_in_catalog=["items_missing_in_catalog_value"],
+ dry_run=True,
+ next_page_token="next_page_token_value",
+ )
+ )
+
+ response = await client.predict(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == prediction_service.PredictRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, pagers.PredictAsyncPager)
+
+ assert response.recommendation_token == "recommendation_token_value"
+
+ assert response.items_missing_in_catalog == ["items_missing_in_catalog_value"]
+
+ assert response.dry_run is True
+
+ assert response.next_page_token == "next_page_token_value"
+
+
+@pytest.mark.asyncio
+async def test_predict_async_from_dict():
+ await test_predict_async(request_type=dict)
+
+
+def test_predict_field_headers():
+ client = PredictionServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = prediction_service.PredictRequest()
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.predict), "__call__") as call:
+ call.return_value = prediction_service.PredictResponse()
+
+ client.predict(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_predict_field_headers_async():
+ client = PredictionServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = prediction_service.PredictRequest()
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.predict), "__call__") as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ prediction_service.PredictResponse()
+ )
+
+ await client.predict(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+def test_predict_flattened():
+ client = PredictionServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.predict), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = prediction_service.PredictResponse()
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.predict(
+ name="name_value",
+ user_event=gcr_user_event.UserEvent(event_type="event_type_value"),
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].name == "name_value"
+
+ assert args[0].user_event == gcr_user_event.UserEvent(
+ event_type="event_type_value"
+ )
+
+
+def test_predict_flattened_error():
+ client = PredictionServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.predict(
+ prediction_service.PredictRequest(),
+ name="name_value",
+ user_event=gcr_user_event.UserEvent(event_type="event_type_value"),
+ )
+
+
+@pytest.mark.asyncio
+async def test_predict_flattened_async():
+ client = PredictionServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.predict), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = prediction_service.PredictResponse()
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ prediction_service.PredictResponse()
+ )
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.predict(
+ name="name_value",
+ user_event=gcr_user_event.UserEvent(event_type="event_type_value"),
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].name == "name_value"
+
+ assert args[0].user_event == gcr_user_event.UserEvent(
+ event_type="event_type_value"
+ )
+
+
+@pytest.mark.asyncio
+async def test_predict_flattened_error_async():
+ client = PredictionServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.predict(
+ prediction_service.PredictRequest(),
+ name="name_value",
+ user_event=gcr_user_event.UserEvent(event_type="event_type_value"),
+ )
+
+
+def test_predict_pager():
+ client = PredictionServiceClient(credentials=credentials.AnonymousCredentials,)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.predict), "__call__") as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ prediction_service.PredictResponse(
+ results=[
+ prediction_service.PredictResponse.PredictionResult(),
+ prediction_service.PredictResponse.PredictionResult(),
+ prediction_service.PredictResponse.PredictionResult(),
+ ],
+ next_page_token="abc",
+ ),
+ prediction_service.PredictResponse(results=[], next_page_token="def",),
+ prediction_service.PredictResponse(
+ results=[prediction_service.PredictResponse.PredictionResult(),],
+ next_page_token="ghi",
+ ),
+ prediction_service.PredictResponse(
+ results=[
+ prediction_service.PredictResponse.PredictionResult(),
+ prediction_service.PredictResponse.PredictionResult(),
+ ],
+ ),
+ RuntimeError,
+ )
+
+ metadata = ()
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", ""),)),
+ )
+ pager = client.predict(request={})
+
+ assert pager._metadata == metadata
+
+ results = [i for i in pager]
+ assert len(results) == 6
+ assert all(
+ isinstance(i, prediction_service.PredictResponse.PredictionResult)
+ for i in results
+ )
+
+
+def test_predict_pages():
+ client = PredictionServiceClient(credentials=credentials.AnonymousCredentials,)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.predict), "__call__") as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ prediction_service.PredictResponse(
+ results=[
+ prediction_service.PredictResponse.PredictionResult(),
+ prediction_service.PredictResponse.PredictionResult(),
+ prediction_service.PredictResponse.PredictionResult(),
+ ],
+ next_page_token="abc",
+ ),
+ prediction_service.PredictResponse(results=[], next_page_token="def",),
+ prediction_service.PredictResponse(
+ results=[prediction_service.PredictResponse.PredictionResult(),],
+ next_page_token="ghi",
+ ),
+ prediction_service.PredictResponse(
+ results=[
+ prediction_service.PredictResponse.PredictionResult(),
+ prediction_service.PredictResponse.PredictionResult(),
+ ],
+ ),
+ RuntimeError,
+ )
+ pages = list(client.predict(request={}).pages)
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
+
+
+@pytest.mark.asyncio
+async def test_predict_async_pager():
+ client = PredictionServiceAsyncClient(credentials=credentials.AnonymousCredentials,)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.predict), "__call__", new_callable=mock.AsyncMock
+ ) as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ prediction_service.PredictResponse(
+ results=[
+ prediction_service.PredictResponse.PredictionResult(),
+ prediction_service.PredictResponse.PredictionResult(),
+ prediction_service.PredictResponse.PredictionResult(),
+ ],
+ next_page_token="abc",
+ ),
+ prediction_service.PredictResponse(results=[], next_page_token="def",),
+ prediction_service.PredictResponse(
+ results=[prediction_service.PredictResponse.PredictionResult(),],
+ next_page_token="ghi",
+ ),
+ prediction_service.PredictResponse(
+ results=[
+ prediction_service.PredictResponse.PredictionResult(),
+ prediction_service.PredictResponse.PredictionResult(),
+ ],
+ ),
+ RuntimeError,
+ )
+ async_pager = await client.predict(request={},)
+ assert async_pager.next_page_token == "abc"
+ responses = []
+ async for response in async_pager:
+ responses.append(response)
+
+ assert len(responses) == 6
+ assert all(
+ isinstance(i, prediction_service.PredictResponse.PredictionResult)
+ for i in responses
+ )
+
+
+@pytest.mark.asyncio
+async def test_predict_async_pages():
+ client = PredictionServiceAsyncClient(credentials=credentials.AnonymousCredentials,)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.predict), "__call__", new_callable=mock.AsyncMock
+ ) as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ prediction_service.PredictResponse(
+ results=[
+ prediction_service.PredictResponse.PredictionResult(),
+ prediction_service.PredictResponse.PredictionResult(),
+ prediction_service.PredictResponse.PredictionResult(),
+ ],
+ next_page_token="abc",
+ ),
+ prediction_service.PredictResponse(results=[], next_page_token="def",),
+ prediction_service.PredictResponse(
+ results=[prediction_service.PredictResponse.PredictionResult(),],
+ next_page_token="ghi",
+ ),
+ prediction_service.PredictResponse(
+ results=[
+ prediction_service.PredictResponse.PredictionResult(),
+ prediction_service.PredictResponse.PredictionResult(),
+ ],
+ ),
+ RuntimeError,
+ )
+ pages = []
+ async for page_ in (await client.predict(request={})).pages:
+ pages.append(page_)
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
+
+
+def test_credentials_transport_error():
+ # It is an error to provide credentials and a transport instance.
+ transport = transports.PredictionServiceGrpcTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ with pytest.raises(ValueError):
+ client = PredictionServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # It is an error to provide a credentials file and a transport instance.
+ transport = transports.PredictionServiceGrpcTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ with pytest.raises(ValueError):
+ client = PredictionServiceClient(
+ client_options={"credentials_file": "credentials.json"},
+ transport=transport,
+ )
+
+ # It is an error to provide scopes and a transport instance.
+ transport = transports.PredictionServiceGrpcTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ with pytest.raises(ValueError):
+ client = PredictionServiceClient(
+ client_options={"scopes": ["1", "2"]}, transport=transport,
+ )
+
+
+def test_transport_instance():
+ # A client may be instantiated with a custom transport instance.
+ transport = transports.PredictionServiceGrpcTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ client = PredictionServiceClient(transport=transport)
+ assert client.transport is transport
+
+
+def test_transport_get_channel():
+ # A client may be instantiated with a custom transport instance.
+ transport = transports.PredictionServiceGrpcTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ channel = transport.grpc_channel
+ assert channel
+
+ transport = transports.PredictionServiceGrpcAsyncIOTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ channel = transport.grpc_channel
+ assert channel
+
+
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.PredictionServiceGrpcTransport,
+ transports.PredictionServiceGrpcAsyncIOTransport,
+ ],
+)
+def test_transport_adc(transport_class):
+ # Test default credentials are used if not provided.
+ with mock.patch.object(auth, "default") as adc:
+ adc.return_value = (credentials.AnonymousCredentials(), None)
+ transport_class()
+ adc.assert_called_once()
+
+
+def test_transport_grpc_default():
+ # A client should use the gRPC transport by default.
+ client = PredictionServiceClient(credentials=credentials.AnonymousCredentials(),)
+ assert isinstance(client.transport, transports.PredictionServiceGrpcTransport,)
+
+
+def test_prediction_service_base_transport_error():
+ # Passing both a credentials object and credentials_file should raise an error
+ with pytest.raises(exceptions.DuplicateCredentialArgs):
+ transport = transports.PredictionServiceTransport(
+ credentials=credentials.AnonymousCredentials(),
+ credentials_file="credentials.json",
+ )
+
+
+def test_prediction_service_base_transport():
+ # Instantiate the base transport.
+ with mock.patch(
+ "google.cloud.recommendationengine_v1beta1.services.prediction_service.transports.PredictionServiceTransport.__init__"
+ ) as Transport:
+ Transport.return_value = None
+ transport = transports.PredictionServiceTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Every method on the transport should just blindly
+ # raise NotImplementedError.
+ methods = ("predict",)
+ for method in methods:
+ with pytest.raises(NotImplementedError):
+ getattr(transport, method)(request=object())
+
+
+def test_prediction_service_base_transport_with_credentials_file():
+ # Instantiate the base transport with a credentials file
+ with mock.patch.object(
+ auth, "load_credentials_from_file"
+ ) as load_creds, mock.patch(
+ "google.cloud.recommendationengine_v1beta1.services.prediction_service.transports.PredictionServiceTransport._prep_wrapped_messages"
+ ) as Transport:
+ Transport.return_value = None
+ load_creds.return_value = (credentials.AnonymousCredentials(), None)
+ transport = transports.PredictionServiceTransport(
+ credentials_file="credentials.json", quota_project_id="octopus",
+ )
+ load_creds.assert_called_once_with(
+ "credentials.json",
+ scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ quota_project_id="octopus",
+ )
+
+
+def test_prediction_service_base_transport_with_adc():
+ # Test the default credentials are used if credentials and credentials_file are None.
+ with mock.patch.object(auth, "default") as adc, mock.patch(
+ "google.cloud.recommendationengine_v1beta1.services.prediction_service.transports.PredictionServiceTransport._prep_wrapped_messages"
+ ) as Transport:
+ Transport.return_value = None
+ adc.return_value = (credentials.AnonymousCredentials(), None)
+ transport = transports.PredictionServiceTransport()
+ adc.assert_called_once()
+
+
+def test_prediction_service_auth_adc():
+ # If no credentials are provided, we should use ADC credentials.
+ with mock.patch.object(auth, "default") as adc:
+ adc.return_value = (credentials.AnonymousCredentials(), None)
+ PredictionServiceClient()
+ adc.assert_called_once_with(
+ scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ quota_project_id=None,
+ )
+
+
+def test_prediction_service_transport_auth_adc():
+ # If credentials and host are not provided, the transport class should use
+ # ADC credentials.
+ with mock.patch.object(auth, "default") as adc:
+ adc.return_value = (credentials.AnonymousCredentials(), None)
+ transports.PredictionServiceGrpcTransport(
+ host="squid.clam.whelk", quota_project_id="octopus"
+ )
+ adc.assert_called_once_with(
+ scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ quota_project_id="octopus",
+ )
+
+
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.PredictionServiceGrpcTransport,
+ transports.PredictionServiceGrpcAsyncIOTransport,
+ ],
+)
+def test_prediction_service_grpc_transport_client_cert_source_for_mtls(transport_class):
+ cred = credentials.AnonymousCredentials()
+
+ # Check ssl_channel_credentials is used if provided.
+ with mock.patch.object(transport_class, "create_channel") as mock_create_channel:
+ mock_ssl_channel_creds = mock.Mock()
+ transport_class(
+ host="squid.clam.whelk",
+ credentials=cred,
+ ssl_channel_credentials=mock_ssl_channel_creds,
+ )
+ mock_create_channel.assert_called_once_with(
+ "squid.clam.whelk:443",
+ credentials=cred,
+ credentials_file=None,
+ scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ ssl_credentials=mock_ssl_channel_creds,
+ quota_project_id=None,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+
+ # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls
+ # is used.
+ with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()):
+ with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred:
+ transport_class(
+ credentials=cred,
+ client_cert_source_for_mtls=client_cert_source_callback,
+ )
+ expected_cert, expected_key = client_cert_source_callback()
+ mock_ssl_cred.assert_called_once_with(
+ certificate_chain=expected_cert, private_key=expected_key
+ )
+
+
+def test_prediction_service_host_no_port():
+ client = PredictionServiceClient(
+ credentials=credentials.AnonymousCredentials(),
+ client_options=client_options.ClientOptions(
+ api_endpoint="recommendationengine.googleapis.com"
+ ),
+ )
+ assert client.transport._host == "recommendationengine.googleapis.com:443"
+
+
+def test_prediction_service_host_with_port():
+ client = PredictionServiceClient(
+ credentials=credentials.AnonymousCredentials(),
+ client_options=client_options.ClientOptions(
+ api_endpoint="recommendationengine.googleapis.com:8000"
+ ),
+ )
+ assert client.transport._host == "recommendationengine.googleapis.com:8000"
+
+
+def test_prediction_service_grpc_transport_channel():
+ channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials())
+
+ # Check that channel is used if provided.
+ transport = transports.PredictionServiceGrpcTransport(
+ host="squid.clam.whelk", channel=channel,
+ )
+ assert transport.grpc_channel == channel
+ assert transport._host == "squid.clam.whelk:443"
+ assert transport._ssl_channel_credentials == None
+
+
+def test_prediction_service_grpc_asyncio_transport_channel():
+ channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials())
+
+ # Check that channel is used if provided.
+ transport = transports.PredictionServiceGrpcAsyncIOTransport(
+ host="squid.clam.whelk", channel=channel,
+ )
+ assert transport.grpc_channel == channel
+ assert transport._host == "squid.clam.whelk:443"
+ assert transport._ssl_channel_credentials == None
+
+
+# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
+# removed from grpc/grpc_asyncio transport constructor.
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.PredictionServiceGrpcTransport,
+ transports.PredictionServiceGrpcAsyncIOTransport,
+ ],
+)
+def test_prediction_service_transport_channel_mtls_with_client_cert_source(
+ transport_class,
+):
+ with mock.patch(
+ "grpc.ssl_channel_credentials", autospec=True
+ ) as grpc_ssl_channel_cred:
+ with mock.patch.object(
+ transport_class, "create_channel"
+ ) as grpc_create_channel:
+ mock_ssl_cred = mock.Mock()
+ grpc_ssl_channel_cred.return_value = mock_ssl_cred
+
+ mock_grpc_channel = mock.Mock()
+ grpc_create_channel.return_value = mock_grpc_channel
+
+ cred = credentials.AnonymousCredentials()
+ with pytest.warns(DeprecationWarning):
+ with mock.patch.object(auth, "default") as adc:
+ adc.return_value = (cred, None)
+ transport = transport_class(
+ host="squid.clam.whelk",
+ api_mtls_endpoint="mtls.squid.clam.whelk",
+ client_cert_source=client_cert_source_callback,
+ )
+ adc.assert_called_once()
+
+ grpc_ssl_channel_cred.assert_called_once_with(
+ certificate_chain=b"cert bytes", private_key=b"key bytes"
+ )
+ grpc_create_channel.assert_called_once_with(
+ "mtls.squid.clam.whelk:443",
+ credentials=cred,
+ credentials_file=None,
+ scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ ssl_credentials=mock_ssl_cred,
+ quota_project_id=None,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+ assert transport.grpc_channel == mock_grpc_channel
+ assert transport._ssl_channel_credentials == mock_ssl_cred
+
+
+# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
+# removed from grpc/grpc_asyncio transport constructor.
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.PredictionServiceGrpcTransport,
+ transports.PredictionServiceGrpcAsyncIOTransport,
+ ],
+)
+def test_prediction_service_transport_channel_mtls_with_adc(transport_class):
+ mock_ssl_cred = mock.Mock()
+ with mock.patch.multiple(
+ "google.auth.transport.grpc.SslCredentials",
+ __init__=mock.Mock(return_value=None),
+ ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
+ ):
+ with mock.patch.object(
+ transport_class, "create_channel"
+ ) as grpc_create_channel:
+ mock_grpc_channel = mock.Mock()
+ grpc_create_channel.return_value = mock_grpc_channel
+ mock_cred = mock.Mock()
+
+ with pytest.warns(DeprecationWarning):
+ transport = transport_class(
+ host="squid.clam.whelk",
+ credentials=mock_cred,
+ api_mtls_endpoint="mtls.squid.clam.whelk",
+ client_cert_source=None,
+ )
+
+ grpc_create_channel.assert_called_once_with(
+ "mtls.squid.clam.whelk:443",
+ credentials=mock_cred,
+ credentials_file=None,
+ scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ ssl_credentials=mock_ssl_cred,
+ quota_project_id=None,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+ assert transport.grpc_channel == mock_grpc_channel
+
+
+def test_placement_path():
+ project = "squid"
+ location = "clam"
+ catalog = "whelk"
+ event_store = "octopus"
+ placement = "oyster"
+
+ expected = "projects/{project}/locations/{location}/catalogs/{catalog}/eventStores/{event_store}/placements/{placement}".format(
+ project=project,
+ location=location,
+ catalog=catalog,
+ event_store=event_store,
+ placement=placement,
+ )
+ actual = PredictionServiceClient.placement_path(
+ project, location, catalog, event_store, placement
+ )
+ assert expected == actual
+
+
+def test_parse_placement_path():
+ expected = {
+ "project": "nudibranch",
+ "location": "cuttlefish",
+ "catalog": "mussel",
+ "event_store": "winkle",
+ "placement": "nautilus",
+ }
+ path = PredictionServiceClient.placement_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = PredictionServiceClient.parse_placement_path(path)
+ assert expected == actual
+
+
+def test_common_billing_account_path():
+ billing_account = "scallop"
+
+ expected = "billingAccounts/{billing_account}".format(
+ billing_account=billing_account,
+ )
+ actual = PredictionServiceClient.common_billing_account_path(billing_account)
+ assert expected == actual
+
+
+def test_parse_common_billing_account_path():
+ expected = {
+ "billing_account": "abalone",
+ }
+ path = PredictionServiceClient.common_billing_account_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = PredictionServiceClient.parse_common_billing_account_path(path)
+ assert expected == actual
+
+
+def test_common_folder_path():
+ folder = "squid"
+
+ expected = "folders/{folder}".format(folder=folder,)
+ actual = PredictionServiceClient.common_folder_path(folder)
+ assert expected == actual
+
+
+def test_parse_common_folder_path():
+ expected = {
+ "folder": "clam",
+ }
+ path = PredictionServiceClient.common_folder_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = PredictionServiceClient.parse_common_folder_path(path)
+ assert expected == actual
+
+
+def test_common_organization_path():
+ organization = "whelk"
+
+ expected = "organizations/{organization}".format(organization=organization,)
+ actual = PredictionServiceClient.common_organization_path(organization)
+ assert expected == actual
+
+
+def test_parse_common_organization_path():
+ expected = {
+ "organization": "octopus",
+ }
+ path = PredictionServiceClient.common_organization_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = PredictionServiceClient.parse_common_organization_path(path)
+ assert expected == actual
+
+
+def test_common_project_path():
+ project = "oyster"
+
+ expected = "projects/{project}".format(project=project,)
+ actual = PredictionServiceClient.common_project_path(project)
+ assert expected == actual
+
+
+def test_parse_common_project_path():
+ expected = {
+ "project": "nudibranch",
+ }
+ path = PredictionServiceClient.common_project_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = PredictionServiceClient.parse_common_project_path(path)
+ assert expected == actual
+
+
+def test_common_location_path():
+ project = "cuttlefish"
+ location = "mussel"
+
+ expected = "projects/{project}/locations/{location}".format(
+ project=project, location=location,
+ )
+ actual = PredictionServiceClient.common_location_path(project, location)
+ assert expected == actual
+
+
+def test_parse_common_location_path():
+ expected = {
+ "project": "winkle",
+ "location": "nautilus",
+ }
+ path = PredictionServiceClient.common_location_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = PredictionServiceClient.parse_common_location_path(path)
+ assert expected == actual
+
+
+def test_client_withDEFAULT_CLIENT_INFO():
+ client_info = gapic_v1.client_info.ClientInfo()
+
+ with mock.patch.object(
+ transports.PredictionServiceTransport, "_prep_wrapped_messages"
+ ) as prep:
+ client = PredictionServiceClient(
+ credentials=credentials.AnonymousCredentials(), client_info=client_info,
+ )
+ prep.assert_called_once_with(client_info)
+
+ with mock.patch.object(
+ transports.PredictionServiceTransport, "_prep_wrapped_messages"
+ ) as prep:
+ transport_class = PredictionServiceClient.get_transport_class()
+ transport = transport_class(
+ credentials=credentials.AnonymousCredentials(), client_info=client_info,
+ )
+ prep.assert_called_once_with(client_info)
diff --git a/tests/unit/gapic/recommendationengine_v1beta1/test_user_event_service.py b/tests/unit/gapic/recommendationengine_v1beta1/test_user_event_service.py
new file mode 100644
index 00000000..b4753dde
--- /dev/null
+++ b/tests/unit/gapic/recommendationengine_v1beta1/test_user_event_service.py
@@ -0,0 +1,2404 @@
+# -*- coding: utf-8 -*-
+
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import os
+import mock
+
+import grpc
+from grpc.experimental import aio
+import math
+import pytest
+from proto.marshal.rules.dates import DurationRule, TimestampRule
+
+from google import auth
+from google.api import httpbody_pb2 as httpbody # type: ignore
+from google.api_core import client_options
+from google.api_core import exceptions
+from google.api_core import future
+from google.api_core import gapic_v1
+from google.api_core import grpc_helpers
+from google.api_core import grpc_helpers_async
+from google.api_core import operation_async # type: ignore
+from google.api_core import operations_v1
+from google.auth import credentials
+from google.auth.exceptions import MutualTLSChannelError
+from google.cloud.recommendationengine_v1beta1.services.user_event_service import (
+ UserEventServiceAsyncClient,
+)
+from google.cloud.recommendationengine_v1beta1.services.user_event_service import (
+ UserEventServiceClient,
+)
+from google.cloud.recommendationengine_v1beta1.services.user_event_service import pagers
+from google.cloud.recommendationengine_v1beta1.services.user_event_service import (
+ transports,
+)
+from google.cloud.recommendationengine_v1beta1.types import catalog
+from google.cloud.recommendationengine_v1beta1.types import common
+from google.cloud.recommendationengine_v1beta1.types import import_
+from google.cloud.recommendationengine_v1beta1.types import user_event
+from google.cloud.recommendationengine_v1beta1.types import user_event as gcr_user_event
+from google.cloud.recommendationengine_v1beta1.types import user_event_service
+from google.longrunning import operations_pb2
+from google.oauth2 import service_account
+from google.protobuf import any_pb2 as gp_any # type: ignore
+from google.protobuf import timestamp_pb2 as timestamp # type: ignore
+
+
+def client_cert_source_callback():
+ return b"cert bytes", b"key bytes"
+
+
+# If default endpoint is localhost, then default mtls endpoint will be the same.
+# This method modifies the default endpoint so the client can produce a different
+# mtls endpoint for endpoint testing purposes.
+def modify_default_endpoint(client):
+ return (
+ "foo.googleapis.com"
+ if ("localhost" in client.DEFAULT_ENDPOINT)
+ else client.DEFAULT_ENDPOINT
+ )
+
+
+def test__get_default_mtls_endpoint():
+ api_endpoint = "example.googleapis.com"
+ api_mtls_endpoint = "example.mtls.googleapis.com"
+ sandbox_endpoint = "example.sandbox.googleapis.com"
+ sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
+ non_googleapi = "api.example.com"
+
+ assert UserEventServiceClient._get_default_mtls_endpoint(None) is None
+ assert (
+ UserEventServiceClient._get_default_mtls_endpoint(api_endpoint)
+ == api_mtls_endpoint
+ )
+ assert (
+ UserEventServiceClient._get_default_mtls_endpoint(api_mtls_endpoint)
+ == api_mtls_endpoint
+ )
+ assert (
+ UserEventServiceClient._get_default_mtls_endpoint(sandbox_endpoint)
+ == sandbox_mtls_endpoint
+ )
+ assert (
+ UserEventServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint)
+ == sandbox_mtls_endpoint
+ )
+ assert (
+ UserEventServiceClient._get_default_mtls_endpoint(non_googleapi)
+ == non_googleapi
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class", [UserEventServiceClient, UserEventServiceAsyncClient,]
+)
+def test_user_event_service_client_from_service_account_info(client_class):
+ creds = credentials.AnonymousCredentials()
+ with mock.patch.object(
+ service_account.Credentials, "from_service_account_info"
+ ) as factory:
+ factory.return_value = creds
+ info = {"valid": True}
+ client = client_class.from_service_account_info(info)
+ assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
+
+ assert client.transport._host == "recommendationengine.googleapis.com:443"
+
+
+@pytest.mark.parametrize(
+ "client_class", [UserEventServiceClient, UserEventServiceAsyncClient,]
+)
+def test_user_event_service_client_from_service_account_file(client_class):
+ creds = credentials.AnonymousCredentials()
+ with mock.patch.object(
+ service_account.Credentials, "from_service_account_file"
+ ) as factory:
+ factory.return_value = creds
+ client = client_class.from_service_account_file("dummy/file/path.json")
+ assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
+
+ client = client_class.from_service_account_json("dummy/file/path.json")
+ assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
+
+ assert client.transport._host == "recommendationengine.googleapis.com:443"
+
+
+def test_user_event_service_client_get_transport_class():
+ transport = UserEventServiceClient.get_transport_class()
+ available_transports = [
+ transports.UserEventServiceGrpcTransport,
+ ]
+ assert transport in available_transports
+
+ transport = UserEventServiceClient.get_transport_class("grpc")
+ assert transport == transports.UserEventServiceGrpcTransport
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name",
+ [
+ (UserEventServiceClient, transports.UserEventServiceGrpcTransport, "grpc"),
+ (
+ UserEventServiceAsyncClient,
+ transports.UserEventServiceGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ ),
+ ],
+)
+@mock.patch.object(
+ UserEventServiceClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(UserEventServiceClient),
+)
+@mock.patch.object(
+ UserEventServiceAsyncClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(UserEventServiceAsyncClient),
+)
+def test_user_event_service_client_client_options(
+ client_class, transport_class, transport_name
+):
+ # Check that if channel is provided we won't create a new one.
+ with mock.patch.object(UserEventServiceClient, "get_transport_class") as gtc:
+ transport = transport_class(credentials=credentials.AnonymousCredentials())
+ client = client_class(transport=transport)
+ gtc.assert_not_called()
+
+ # Check that if channel is provided via str we will create a new one.
+ with mock.patch.object(UserEventServiceClient, "get_transport_class") as gtc:
+ client = client_class(transport=transport_name)
+ gtc.assert_called()
+
+ # Check the case api_endpoint is provided.
+ options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host="squid.clam.whelk",
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
+ # "never".
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
+ # "always".
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_MTLS_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
+ # unsupported value.
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
+ with pytest.raises(MutualTLSChannelError):
+ client = client_class()
+
+ # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
+ ):
+ with pytest.raises(ValueError):
+ client = client_class()
+
+ # Check the case quota_project_id is provided
+ options = client_options.ClientOptions(quota_project_id="octopus")
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id="octopus",
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name,use_client_cert_env",
+ [
+ (
+ UserEventServiceClient,
+ transports.UserEventServiceGrpcTransport,
+ "grpc",
+ "true",
+ ),
+ (
+ UserEventServiceAsyncClient,
+ transports.UserEventServiceGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ "true",
+ ),
+ (
+ UserEventServiceClient,
+ transports.UserEventServiceGrpcTransport,
+ "grpc",
+ "false",
+ ),
+ (
+ UserEventServiceAsyncClient,
+ transports.UserEventServiceGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ "false",
+ ),
+ ],
+)
+@mock.patch.object(
+ UserEventServiceClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(UserEventServiceClient),
+)
+@mock.patch.object(
+ UserEventServiceAsyncClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(UserEventServiceAsyncClient),
+)
+@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
+def test_user_event_service_client_mtls_env_auto(
+ client_class, transport_class, transport_name, use_client_cert_env
+):
+ # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
+ # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
+
+ # Check the case client_cert_source is provided. Whether client cert is used depends on
+ # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ options = client_options.ClientOptions(
+ client_cert_source=client_cert_source_callback
+ )
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+
+ if use_client_cert_env == "false":
+ expected_client_cert_source = None
+ expected_host = client.DEFAULT_ENDPOINT
+ else:
+ expected_client_cert_source = client_cert_source_callback
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
+
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=expected_host,
+ scopes=None,
+ client_cert_source_for_mtls=expected_client_cert_source,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case ADC client cert is provided. Whether client cert is used depends on
+ # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ with mock.patch(
+ "google.auth.transport.mtls.has_default_client_cert_source",
+ return_value=True,
+ ):
+ with mock.patch(
+ "google.auth.transport.mtls.default_client_cert_source",
+ return_value=client_cert_source_callback,
+ ):
+ if use_client_cert_env == "false":
+ expected_host = client.DEFAULT_ENDPOINT
+ expected_client_cert_source = None
+ else:
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
+ expected_client_cert_source = client_cert_source_callback
+
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=expected_host,
+ scopes=None,
+ client_cert_source_for_mtls=expected_client_cert_source,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case client_cert_source and ADC client cert are not provided.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ with mock.patch(
+ "google.auth.transport.mtls.has_default_client_cert_source",
+ return_value=False,
+ ):
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name",
+ [
+ (UserEventServiceClient, transports.UserEventServiceGrpcTransport, "grpc"),
+ (
+ UserEventServiceAsyncClient,
+ transports.UserEventServiceGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ ),
+ ],
+)
+def test_user_event_service_client_client_options_scopes(
+ client_class, transport_class, transport_name
+):
+ # Check the case scopes are provided.
+ options = client_options.ClientOptions(scopes=["1", "2"],)
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=["1", "2"],
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name",
+ [
+ (UserEventServiceClient, transports.UserEventServiceGrpcTransport, "grpc"),
+ (
+ UserEventServiceAsyncClient,
+ transports.UserEventServiceGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ ),
+ ],
+)
+def test_user_event_service_client_client_options_credentials_file(
+ client_class, transport_class, transport_name
+):
+ # Check the case credentials file is provided.
+ options = client_options.ClientOptions(credentials_file="credentials.json")
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file="credentials.json",
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+def test_user_event_service_client_client_options_from_dict():
+ with mock.patch(
+ "google.cloud.recommendationengine_v1beta1.services.user_event_service.transports.UserEventServiceGrpcTransport.__init__"
+ ) as grpc_transport:
+ grpc_transport.return_value = None
+ client = UserEventServiceClient(
+ client_options={"api_endpoint": "squid.clam.whelk"}
+ )
+ grpc_transport.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host="squid.clam.whelk",
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+def test_write_user_event(
+ transport: str = "grpc", request_type=user_event_service.WriteUserEventRequest
+):
+ client = UserEventServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.write_user_event), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = gcr_user_event.UserEvent(
+ event_type="event_type_value",
+ event_source=gcr_user_event.UserEvent.EventSource.AUTOML,
+ )
+
+ response = client.write_user_event(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == user_event_service.WriteUserEventRequest()
+
+ # Establish that the response is the type that we expect.
+
+ assert isinstance(response, gcr_user_event.UserEvent)
+
+ assert response.event_type == "event_type_value"
+
+ assert response.event_source == gcr_user_event.UserEvent.EventSource.AUTOML
+
+
+def test_write_user_event_from_dict():
+ test_write_user_event(request_type=dict)
+
+
+def test_write_user_event_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = UserEventServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.write_user_event), "__call__") as call:
+ client.write_user_event()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == user_event_service.WriteUserEventRequest()
+
+
+@pytest.mark.asyncio
+async def test_write_user_event_async(
+ transport: str = "grpc_asyncio",
+ request_type=user_event_service.WriteUserEventRequest,
+):
+ client = UserEventServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.write_user_event), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ gcr_user_event.UserEvent(
+ event_type="event_type_value",
+ event_source=gcr_user_event.UserEvent.EventSource.AUTOML,
+ )
+ )
+
+ response = await client.write_user_event(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == user_event_service.WriteUserEventRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, gcr_user_event.UserEvent)
+
+ assert response.event_type == "event_type_value"
+
+ assert response.event_source == gcr_user_event.UserEvent.EventSource.AUTOML
+
+
+@pytest.mark.asyncio
+async def test_write_user_event_async_from_dict():
+ await test_write_user_event_async(request_type=dict)
+
+
+def test_write_user_event_field_headers():
+ client = UserEventServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = user_event_service.WriteUserEventRequest()
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.write_user_event), "__call__") as call:
+ call.return_value = gcr_user_event.UserEvent()
+
+ client.write_user_event(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_write_user_event_field_headers_async():
+ client = UserEventServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = user_event_service.WriteUserEventRequest()
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.write_user_event), "__call__") as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ gcr_user_event.UserEvent()
+ )
+
+ await client.write_user_event(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+def test_write_user_event_flattened():
+ client = UserEventServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.write_user_event), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = gcr_user_event.UserEvent()
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.write_user_event(
+ parent="parent_value",
+ user_event=gcr_user_event.UserEvent(event_type="event_type_value"),
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].parent == "parent_value"
+
+ assert args[0].user_event == gcr_user_event.UserEvent(
+ event_type="event_type_value"
+ )
+
+
+def test_write_user_event_flattened_error():
+ client = UserEventServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.write_user_event(
+ user_event_service.WriteUserEventRequest(),
+ parent="parent_value",
+ user_event=gcr_user_event.UserEvent(event_type="event_type_value"),
+ )
+
+
+@pytest.mark.asyncio
+async def test_write_user_event_flattened_async():
+ client = UserEventServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.write_user_event), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = gcr_user_event.UserEvent()
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ gcr_user_event.UserEvent()
+ )
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.write_user_event(
+ parent="parent_value",
+ user_event=gcr_user_event.UserEvent(event_type="event_type_value"),
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].parent == "parent_value"
+
+ assert args[0].user_event == gcr_user_event.UserEvent(
+ event_type="event_type_value"
+ )
+
+
+@pytest.mark.asyncio
+async def test_write_user_event_flattened_error_async():
+ client = UserEventServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.write_user_event(
+ user_event_service.WriteUserEventRequest(),
+ parent="parent_value",
+ user_event=gcr_user_event.UserEvent(event_type="event_type_value"),
+ )
+
+
+def test_collect_user_event(
+ transport: str = "grpc", request_type=user_event_service.CollectUserEventRequest
+):
+ client = UserEventServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.collect_user_event), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = httpbody.HttpBody(
+ content_type="content_type_value", data=b"data_blob",
+ )
+
+ response = client.collect_user_event(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == user_event_service.CollectUserEventRequest()
+
+ # Establish that the response is the type that we expect.
+
+ assert isinstance(response, httpbody.HttpBody)
+
+ assert response.content_type == "content_type_value"
+
+ assert response.data == b"data_blob"
+
+
+def test_collect_user_event_from_dict():
+ test_collect_user_event(request_type=dict)
+
+
+def test_collect_user_event_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = UserEventServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.collect_user_event), "__call__"
+ ) as call:
+ client.collect_user_event()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == user_event_service.CollectUserEventRequest()
+
+
+@pytest.mark.asyncio
+async def test_collect_user_event_async(
+ transport: str = "grpc_asyncio",
+ request_type=user_event_service.CollectUserEventRequest,
+):
+ client = UserEventServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.collect_user_event), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ httpbody.HttpBody(content_type="content_type_value", data=b"data_blob",)
+ )
+
+ response = await client.collect_user_event(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == user_event_service.CollectUserEventRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, httpbody.HttpBody)
+
+ assert response.content_type == "content_type_value"
+
+ assert response.data == b"data_blob"
+
+
+@pytest.mark.asyncio
+async def test_collect_user_event_async_from_dict():
+ await test_collect_user_event_async(request_type=dict)
+
+
+def test_collect_user_event_field_headers():
+ client = UserEventServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = user_event_service.CollectUserEventRequest()
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.collect_user_event), "__call__"
+ ) as call:
+ call.return_value = httpbody.HttpBody()
+
+ client.collect_user_event(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_collect_user_event_field_headers_async():
+ client = UserEventServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = user_event_service.CollectUserEventRequest()
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.collect_user_event), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(httpbody.HttpBody())
+
+ await client.collect_user_event(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+def test_collect_user_event_flattened():
+ client = UserEventServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.collect_user_event), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = httpbody.HttpBody()
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.collect_user_event(
+ parent="parent_value",
+ user_event="user_event_value",
+ uri="uri_value",
+ ets=332,
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].parent == "parent_value"
+
+ assert args[0].user_event == "user_event_value"
+
+ assert args[0].uri == "uri_value"
+
+ assert args[0].ets == 332
+
+
+def test_collect_user_event_flattened_error():
+ client = UserEventServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.collect_user_event(
+ user_event_service.CollectUserEventRequest(),
+ parent="parent_value",
+ user_event="user_event_value",
+ uri="uri_value",
+ ets=332,
+ )
+
+
+@pytest.mark.asyncio
+async def test_collect_user_event_flattened_async():
+ client = UserEventServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.collect_user_event), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = httpbody.HttpBody()
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(httpbody.HttpBody())
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.collect_user_event(
+ parent="parent_value",
+ user_event="user_event_value",
+ uri="uri_value",
+ ets=332,
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].parent == "parent_value"
+
+ assert args[0].user_event == "user_event_value"
+
+ assert args[0].uri == "uri_value"
+
+ assert args[0].ets == 332
+
+
+@pytest.mark.asyncio
+async def test_collect_user_event_flattened_error_async():
+ client = UserEventServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.collect_user_event(
+ user_event_service.CollectUserEventRequest(),
+ parent="parent_value",
+ user_event="user_event_value",
+ uri="uri_value",
+ ets=332,
+ )
+
+
+def test_list_user_events(
+ transport: str = "grpc", request_type=user_event_service.ListUserEventsRequest
+):
+ client = UserEventServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.list_user_events), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = user_event_service.ListUserEventsResponse(
+ next_page_token="next_page_token_value",
+ )
+
+ response = client.list_user_events(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == user_event_service.ListUserEventsRequest()
+
+ # Establish that the response is the type that we expect.
+
+ assert isinstance(response, pagers.ListUserEventsPager)
+
+ assert response.next_page_token == "next_page_token_value"
+
+
+def test_list_user_events_from_dict():
+ test_list_user_events(request_type=dict)
+
+
+def test_list_user_events_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = UserEventServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.list_user_events), "__call__") as call:
+ client.list_user_events()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == user_event_service.ListUserEventsRequest()
+
+
+@pytest.mark.asyncio
+async def test_list_user_events_async(
+ transport: str = "grpc_asyncio",
+ request_type=user_event_service.ListUserEventsRequest,
+):
+ client = UserEventServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.list_user_events), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ user_event_service.ListUserEventsResponse(
+ next_page_token="next_page_token_value",
+ )
+ )
+
+ response = await client.list_user_events(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == user_event_service.ListUserEventsRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, pagers.ListUserEventsAsyncPager)
+
+ assert response.next_page_token == "next_page_token_value"
+
+
+@pytest.mark.asyncio
+async def test_list_user_events_async_from_dict():
+ await test_list_user_events_async(request_type=dict)
+
+
+def test_list_user_events_field_headers():
+ client = UserEventServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = user_event_service.ListUserEventsRequest()
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.list_user_events), "__call__") as call:
+ call.return_value = user_event_service.ListUserEventsResponse()
+
+ client.list_user_events(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_list_user_events_field_headers_async():
+ client = UserEventServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = user_event_service.ListUserEventsRequest()
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.list_user_events), "__call__") as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ user_event_service.ListUserEventsResponse()
+ )
+
+ await client.list_user_events(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+def test_list_user_events_flattened():
+ client = UserEventServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.list_user_events), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = user_event_service.ListUserEventsResponse()
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.list_user_events(
+ parent="parent_value", filter="filter_value",
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].parent == "parent_value"
+
+ assert args[0].filter == "filter_value"
+
+
+def test_list_user_events_flattened_error():
+ client = UserEventServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.list_user_events(
+ user_event_service.ListUserEventsRequest(),
+ parent="parent_value",
+ filter="filter_value",
+ )
+
+
+@pytest.mark.asyncio
+async def test_list_user_events_flattened_async():
+ client = UserEventServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.list_user_events), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = user_event_service.ListUserEventsResponse()
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ user_event_service.ListUserEventsResponse()
+ )
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.list_user_events(
+ parent="parent_value", filter="filter_value",
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].parent == "parent_value"
+
+ assert args[0].filter == "filter_value"
+
+
+@pytest.mark.asyncio
+async def test_list_user_events_flattened_error_async():
+ client = UserEventServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.list_user_events(
+ user_event_service.ListUserEventsRequest(),
+ parent="parent_value",
+ filter="filter_value",
+ )
+
+
+def test_list_user_events_pager():
+ client = UserEventServiceClient(credentials=credentials.AnonymousCredentials,)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.list_user_events), "__call__") as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ user_event_service.ListUserEventsResponse(
+ user_events=[
+ user_event.UserEvent(),
+ user_event.UserEvent(),
+ user_event.UserEvent(),
+ ],
+ next_page_token="abc",
+ ),
+ user_event_service.ListUserEventsResponse(
+ user_events=[], next_page_token="def",
+ ),
+ user_event_service.ListUserEventsResponse(
+ user_events=[user_event.UserEvent(),], next_page_token="ghi",
+ ),
+ user_event_service.ListUserEventsResponse(
+ user_events=[user_event.UserEvent(), user_event.UserEvent(),],
+ ),
+ RuntimeError,
+ )
+
+ metadata = ()
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)),
+ )
+ pager = client.list_user_events(request={})
+
+ assert pager._metadata == metadata
+
+ results = [i for i in pager]
+ assert len(results) == 6
+ assert all(isinstance(i, user_event.UserEvent) for i in results)
+
+
+def test_list_user_events_pages():
+ client = UserEventServiceClient(credentials=credentials.AnonymousCredentials,)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.list_user_events), "__call__") as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ user_event_service.ListUserEventsResponse(
+ user_events=[
+ user_event.UserEvent(),
+ user_event.UserEvent(),
+ user_event.UserEvent(),
+ ],
+ next_page_token="abc",
+ ),
+ user_event_service.ListUserEventsResponse(
+ user_events=[], next_page_token="def",
+ ),
+ user_event_service.ListUserEventsResponse(
+ user_events=[user_event.UserEvent(),], next_page_token="ghi",
+ ),
+ user_event_service.ListUserEventsResponse(
+ user_events=[user_event.UserEvent(), user_event.UserEvent(),],
+ ),
+ RuntimeError,
+ )
+ pages = list(client.list_user_events(request={}).pages)
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
+
+
+@pytest.mark.asyncio
+async def test_list_user_events_async_pager():
+ client = UserEventServiceAsyncClient(credentials=credentials.AnonymousCredentials,)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.list_user_events), "__call__", new_callable=mock.AsyncMock
+ ) as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ user_event_service.ListUserEventsResponse(
+ user_events=[
+ user_event.UserEvent(),
+ user_event.UserEvent(),
+ user_event.UserEvent(),
+ ],
+ next_page_token="abc",
+ ),
+ user_event_service.ListUserEventsResponse(
+ user_events=[], next_page_token="def",
+ ),
+ user_event_service.ListUserEventsResponse(
+ user_events=[user_event.UserEvent(),], next_page_token="ghi",
+ ),
+ user_event_service.ListUserEventsResponse(
+ user_events=[user_event.UserEvent(), user_event.UserEvent(),],
+ ),
+ RuntimeError,
+ )
+ async_pager = await client.list_user_events(request={},)
+ assert async_pager.next_page_token == "abc"
+ responses = []
+ async for response in async_pager:
+ responses.append(response)
+
+ assert len(responses) == 6
+ assert all(isinstance(i, user_event.UserEvent) for i in responses)
+
+
+@pytest.mark.asyncio
+async def test_list_user_events_async_pages():
+ client = UserEventServiceAsyncClient(credentials=credentials.AnonymousCredentials,)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.list_user_events), "__call__", new_callable=mock.AsyncMock
+ ) as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ user_event_service.ListUserEventsResponse(
+ user_events=[
+ user_event.UserEvent(),
+ user_event.UserEvent(),
+ user_event.UserEvent(),
+ ],
+ next_page_token="abc",
+ ),
+ user_event_service.ListUserEventsResponse(
+ user_events=[], next_page_token="def",
+ ),
+ user_event_service.ListUserEventsResponse(
+ user_events=[user_event.UserEvent(),], next_page_token="ghi",
+ ),
+ user_event_service.ListUserEventsResponse(
+ user_events=[user_event.UserEvent(), user_event.UserEvent(),],
+ ),
+ RuntimeError,
+ )
+ pages = []
+ async for page_ in (await client.list_user_events(request={})).pages:
+ pages.append(page_)
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
+
+
+def test_purge_user_events(
+ transport: str = "grpc", request_type=user_event_service.PurgeUserEventsRequest
+):
+ client = UserEventServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.purge_user_events), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/spam")
+
+ response = client.purge_user_events(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == user_event_service.PurgeUserEventsRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, future.Future)
+
+
+def test_purge_user_events_from_dict():
+ test_purge_user_events(request_type=dict)
+
+
+def test_purge_user_events_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = UserEventServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.purge_user_events), "__call__"
+ ) as call:
+ client.purge_user_events()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == user_event_service.PurgeUserEventsRequest()
+
+
+@pytest.mark.asyncio
+async def test_purge_user_events_async(
+ transport: str = "grpc_asyncio",
+ request_type=user_event_service.PurgeUserEventsRequest,
+):
+ client = UserEventServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.purge_user_events), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/spam")
+ )
+
+ response = await client.purge_user_events(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == user_event_service.PurgeUserEventsRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, future.Future)
+
+
+@pytest.mark.asyncio
+async def test_purge_user_events_async_from_dict():
+ await test_purge_user_events_async(request_type=dict)
+
+
+def test_purge_user_events_field_headers():
+ client = UserEventServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = user_event_service.PurgeUserEventsRequest()
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.purge_user_events), "__call__"
+ ) as call:
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ client.purge_user_events(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_purge_user_events_field_headers_async():
+ client = UserEventServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = user_event_service.PurgeUserEventsRequest()
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.purge_user_events), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/op")
+ )
+
+ await client.purge_user_events(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+def test_purge_user_events_flattened():
+ client = UserEventServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.purge_user_events), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.purge_user_events(
+ parent="parent_value", filter="filter_value", force=True,
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].parent == "parent_value"
+
+ assert args[0].filter == "filter_value"
+
+ assert args[0].force == True
+
+
+def test_purge_user_events_flattened_error():
+ client = UserEventServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.purge_user_events(
+ user_event_service.PurgeUserEventsRequest(),
+ parent="parent_value",
+ filter="filter_value",
+ force=True,
+ )
+
+
+@pytest.mark.asyncio
+async def test_purge_user_events_flattened_async():
+ client = UserEventServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.purge_user_events), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/spam")
+ )
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.purge_user_events(
+ parent="parent_value", filter="filter_value", force=True,
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].parent == "parent_value"
+
+ assert args[0].filter == "filter_value"
+
+ assert args[0].force == True
+
+
+@pytest.mark.asyncio
+async def test_purge_user_events_flattened_error_async():
+ client = UserEventServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.purge_user_events(
+ user_event_service.PurgeUserEventsRequest(),
+ parent="parent_value",
+ filter="filter_value",
+ force=True,
+ )
+
+
+def test_import_user_events(
+ transport: str = "grpc", request_type=import_.ImportUserEventsRequest
+):
+ client = UserEventServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.import_user_events), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/spam")
+
+ response = client.import_user_events(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == import_.ImportUserEventsRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, future.Future)
+
+
+def test_import_user_events_from_dict():
+ test_import_user_events(request_type=dict)
+
+
+def test_import_user_events_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = UserEventServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.import_user_events), "__call__"
+ ) as call:
+ client.import_user_events()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == import_.ImportUserEventsRequest()
+
+
+@pytest.mark.asyncio
+async def test_import_user_events_async(
+ transport: str = "grpc_asyncio", request_type=import_.ImportUserEventsRequest
+):
+ client = UserEventServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.import_user_events), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/spam")
+ )
+
+ response = await client.import_user_events(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == import_.ImportUserEventsRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, future.Future)
+
+
+@pytest.mark.asyncio
+async def test_import_user_events_async_from_dict():
+ await test_import_user_events_async(request_type=dict)
+
+
+def test_import_user_events_field_headers():
+ client = UserEventServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = import_.ImportUserEventsRequest()
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.import_user_events), "__call__"
+ ) as call:
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ client.import_user_events(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_import_user_events_field_headers_async():
+ client = UserEventServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = import_.ImportUserEventsRequest()
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.import_user_events), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/op")
+ )
+
+ await client.import_user_events(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+def test_import_user_events_flattened():
+ client = UserEventServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.import_user_events), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.import_user_events(
+ parent="parent_value",
+ request_id="request_id_value",
+ input_config=import_.InputConfig(
+ catalog_inline_source=import_.CatalogInlineSource(
+ catalog_items=[catalog.CatalogItem(id="id_value")]
+ )
+ ),
+ errors_config=import_.ImportErrorsConfig(gcs_prefix="gcs_prefix_value"),
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].parent == "parent_value"
+
+ assert args[0].request_id == "request_id_value"
+
+ assert args[0].input_config == import_.InputConfig(
+ catalog_inline_source=import_.CatalogInlineSource(
+ catalog_items=[catalog.CatalogItem(id="id_value")]
+ )
+ )
+
+ assert args[0].errors_config == import_.ImportErrorsConfig(
+ gcs_prefix="gcs_prefix_value"
+ )
+
+
+def test_import_user_events_flattened_error():
+ client = UserEventServiceClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.import_user_events(
+ import_.ImportUserEventsRequest(),
+ parent="parent_value",
+ request_id="request_id_value",
+ input_config=import_.InputConfig(
+ catalog_inline_source=import_.CatalogInlineSource(
+ catalog_items=[catalog.CatalogItem(id="id_value")]
+ )
+ ),
+ errors_config=import_.ImportErrorsConfig(gcs_prefix="gcs_prefix_value"),
+ )
+
+
+@pytest.mark.asyncio
+async def test_import_user_events_flattened_async():
+ client = UserEventServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.import_user_events), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = operations_pb2.Operation(name="operations/op")
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ operations_pb2.Operation(name="operations/spam")
+ )
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.import_user_events(
+ parent="parent_value",
+ request_id="request_id_value",
+ input_config=import_.InputConfig(
+ catalog_inline_source=import_.CatalogInlineSource(
+ catalog_items=[catalog.CatalogItem(id="id_value")]
+ )
+ ),
+ errors_config=import_.ImportErrorsConfig(gcs_prefix="gcs_prefix_value"),
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].parent == "parent_value"
+
+ assert args[0].request_id == "request_id_value"
+
+ assert args[0].input_config == import_.InputConfig(
+ catalog_inline_source=import_.CatalogInlineSource(
+ catalog_items=[catalog.CatalogItem(id="id_value")]
+ )
+ )
+
+ assert args[0].errors_config == import_.ImportErrorsConfig(
+ gcs_prefix="gcs_prefix_value"
+ )
+
+
+@pytest.mark.asyncio
+async def test_import_user_events_flattened_error_async():
+ client = UserEventServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.import_user_events(
+ import_.ImportUserEventsRequest(),
+ parent="parent_value",
+ request_id="request_id_value",
+ input_config=import_.InputConfig(
+ catalog_inline_source=import_.CatalogInlineSource(
+ catalog_items=[catalog.CatalogItem(id="id_value")]
+ )
+ ),
+ errors_config=import_.ImportErrorsConfig(gcs_prefix="gcs_prefix_value"),
+ )
+
+
+def test_credentials_transport_error():
+ # It is an error to provide credentials and a transport instance.
+ transport = transports.UserEventServiceGrpcTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ with pytest.raises(ValueError):
+ client = UserEventServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # It is an error to provide a credentials file and a transport instance.
+ transport = transports.UserEventServiceGrpcTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ with pytest.raises(ValueError):
+ client = UserEventServiceClient(
+ client_options={"credentials_file": "credentials.json"},
+ transport=transport,
+ )
+
+ # It is an error to provide scopes and a transport instance.
+ transport = transports.UserEventServiceGrpcTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ with pytest.raises(ValueError):
+ client = UserEventServiceClient(
+ client_options={"scopes": ["1", "2"]}, transport=transport,
+ )
+
+
+def test_transport_instance():
+ # A client may be instantiated with a custom transport instance.
+ transport = transports.UserEventServiceGrpcTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ client = UserEventServiceClient(transport=transport)
+ assert client.transport is transport
+
+
+def test_transport_get_channel():
+ # A client may be instantiated with a custom transport instance.
+ transport = transports.UserEventServiceGrpcTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ channel = transport.grpc_channel
+ assert channel
+
+ transport = transports.UserEventServiceGrpcAsyncIOTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+ channel = transport.grpc_channel
+ assert channel
+
+
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.UserEventServiceGrpcTransport,
+ transports.UserEventServiceGrpcAsyncIOTransport,
+ ],
+)
+def test_transport_adc(transport_class):
+ # Test default credentials are used if not provided.
+ with mock.patch.object(auth, "default") as adc:
+ adc.return_value = (credentials.AnonymousCredentials(), None)
+ transport_class()
+ adc.assert_called_once()
+
+
+def test_transport_grpc_default():
+ # A client should use the gRPC transport by default.
+ client = UserEventServiceClient(credentials=credentials.AnonymousCredentials(),)
+ assert isinstance(client.transport, transports.UserEventServiceGrpcTransport,)
+
+
+def test_user_event_service_base_transport_error():
+ # Passing both a credentials object and credentials_file should raise an error
+ with pytest.raises(exceptions.DuplicateCredentialArgs):
+ transport = transports.UserEventServiceTransport(
+ credentials=credentials.AnonymousCredentials(),
+ credentials_file="credentials.json",
+ )
+
+
+def test_user_event_service_base_transport():
+ # Instantiate the base transport.
+ with mock.patch(
+ "google.cloud.recommendationengine_v1beta1.services.user_event_service.transports.UserEventServiceTransport.__init__"
+ ) as Transport:
+ Transport.return_value = None
+ transport = transports.UserEventServiceTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Every method on the transport should just blindly
+ # raise NotImplementedError.
+ methods = (
+ "write_user_event",
+ "collect_user_event",
+ "list_user_events",
+ "purge_user_events",
+ "import_user_events",
+ )
+ for method in methods:
+ with pytest.raises(NotImplementedError):
+ getattr(transport, method)(request=object())
+
+ # Additionally, the LRO client (a property) should
+ # also raise NotImplementedError
+ with pytest.raises(NotImplementedError):
+ transport.operations_client
+
+
+def test_user_event_service_base_transport_with_credentials_file():
+ # Instantiate the base transport with a credentials file
+ with mock.patch.object(
+ auth, "load_credentials_from_file"
+ ) as load_creds, mock.patch(
+ "google.cloud.recommendationengine_v1beta1.services.user_event_service.transports.UserEventServiceTransport._prep_wrapped_messages"
+ ) as Transport:
+ Transport.return_value = None
+ load_creds.return_value = (credentials.AnonymousCredentials(), None)
+ transport = transports.UserEventServiceTransport(
+ credentials_file="credentials.json", quota_project_id="octopus",
+ )
+ load_creds.assert_called_once_with(
+ "credentials.json",
+ scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ quota_project_id="octopus",
+ )
+
+
+def test_user_event_service_base_transport_with_adc():
+ # Test the default credentials are used if credentials and credentials_file are None.
+ with mock.patch.object(auth, "default") as adc, mock.patch(
+ "google.cloud.recommendationengine_v1beta1.services.user_event_service.transports.UserEventServiceTransport._prep_wrapped_messages"
+ ) as Transport:
+ Transport.return_value = None
+ adc.return_value = (credentials.AnonymousCredentials(), None)
+ transport = transports.UserEventServiceTransport()
+ adc.assert_called_once()
+
+
+def test_user_event_service_auth_adc():
+ # If no credentials are provided, we should use ADC credentials.
+ with mock.patch.object(auth, "default") as adc:
+ adc.return_value = (credentials.AnonymousCredentials(), None)
+ UserEventServiceClient()
+ adc.assert_called_once_with(
+ scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ quota_project_id=None,
+ )
+
+
+def test_user_event_service_transport_auth_adc():
+ # If credentials and host are not provided, the transport class should use
+ # ADC credentials.
+ with mock.patch.object(auth, "default") as adc:
+ adc.return_value = (credentials.AnonymousCredentials(), None)
+ transports.UserEventServiceGrpcTransport(
+ host="squid.clam.whelk", quota_project_id="octopus"
+ )
+ adc.assert_called_once_with(
+ scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ quota_project_id="octopus",
+ )
+
+
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.UserEventServiceGrpcTransport,
+ transports.UserEventServiceGrpcAsyncIOTransport,
+ ],
+)
+def test_user_event_service_grpc_transport_client_cert_source_for_mtls(transport_class):
+ cred = credentials.AnonymousCredentials()
+
+ # Check ssl_channel_credentials is used if provided.
+ with mock.patch.object(transport_class, "create_channel") as mock_create_channel:
+ mock_ssl_channel_creds = mock.Mock()
+ transport_class(
+ host="squid.clam.whelk",
+ credentials=cred,
+ ssl_channel_credentials=mock_ssl_channel_creds,
+ )
+ mock_create_channel.assert_called_once_with(
+ "squid.clam.whelk:443",
+ credentials=cred,
+ credentials_file=None,
+ scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ ssl_credentials=mock_ssl_channel_creds,
+ quota_project_id=None,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+
+ # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls
+ # is used.
+ with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()):
+ with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred:
+ transport_class(
+ credentials=cred,
+ client_cert_source_for_mtls=client_cert_source_callback,
+ )
+ expected_cert, expected_key = client_cert_source_callback()
+ mock_ssl_cred.assert_called_once_with(
+ certificate_chain=expected_cert, private_key=expected_key
+ )
+
+
+def test_user_event_service_host_no_port():
+ client = UserEventServiceClient(
+ credentials=credentials.AnonymousCredentials(),
+ client_options=client_options.ClientOptions(
+ api_endpoint="recommendationengine.googleapis.com"
+ ),
+ )
+ assert client.transport._host == "recommendationengine.googleapis.com:443"
+
+
+def test_user_event_service_host_with_port():
+ client = UserEventServiceClient(
+ credentials=credentials.AnonymousCredentials(),
+ client_options=client_options.ClientOptions(
+ api_endpoint="recommendationengine.googleapis.com:8000"
+ ),
+ )
+ assert client.transport._host == "recommendationengine.googleapis.com:8000"
+
+
+def test_user_event_service_grpc_transport_channel():
+ channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials())
+
+ # Check that channel is used if provided.
+ transport = transports.UserEventServiceGrpcTransport(
+ host="squid.clam.whelk", channel=channel,
+ )
+ assert transport.grpc_channel == channel
+ assert transport._host == "squid.clam.whelk:443"
+ assert transport._ssl_channel_credentials == None
+
+
+def test_user_event_service_grpc_asyncio_transport_channel():
+ channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials())
+
+ # Check that channel is used if provided.
+ transport = transports.UserEventServiceGrpcAsyncIOTransport(
+ host="squid.clam.whelk", channel=channel,
+ )
+ assert transport.grpc_channel == channel
+ assert transport._host == "squid.clam.whelk:443"
+ assert transport._ssl_channel_credentials == None
+
+
+# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
+# removed from grpc/grpc_asyncio transport constructor.
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.UserEventServiceGrpcTransport,
+ transports.UserEventServiceGrpcAsyncIOTransport,
+ ],
+)
+def test_user_event_service_transport_channel_mtls_with_client_cert_source(
+ transport_class,
+):
+ with mock.patch(
+ "grpc.ssl_channel_credentials", autospec=True
+ ) as grpc_ssl_channel_cred:
+ with mock.patch.object(
+ transport_class, "create_channel"
+ ) as grpc_create_channel:
+ mock_ssl_cred = mock.Mock()
+ grpc_ssl_channel_cred.return_value = mock_ssl_cred
+
+ mock_grpc_channel = mock.Mock()
+ grpc_create_channel.return_value = mock_grpc_channel
+
+ cred = credentials.AnonymousCredentials()
+ with pytest.warns(DeprecationWarning):
+ with mock.patch.object(auth, "default") as adc:
+ adc.return_value = (cred, None)
+ transport = transport_class(
+ host="squid.clam.whelk",
+ api_mtls_endpoint="mtls.squid.clam.whelk",
+ client_cert_source=client_cert_source_callback,
+ )
+ adc.assert_called_once()
+
+ grpc_ssl_channel_cred.assert_called_once_with(
+ certificate_chain=b"cert bytes", private_key=b"key bytes"
+ )
+ grpc_create_channel.assert_called_once_with(
+ "mtls.squid.clam.whelk:443",
+ credentials=cred,
+ credentials_file=None,
+ scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ ssl_credentials=mock_ssl_cred,
+ quota_project_id=None,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+ assert transport.grpc_channel == mock_grpc_channel
+ assert transport._ssl_channel_credentials == mock_ssl_cred
+
+
+# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
+# removed from grpc/grpc_asyncio transport constructor.
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.UserEventServiceGrpcTransport,
+ transports.UserEventServiceGrpcAsyncIOTransport,
+ ],
+)
+def test_user_event_service_transport_channel_mtls_with_adc(transport_class):
+ mock_ssl_cred = mock.Mock()
+ with mock.patch.multiple(
+ "google.auth.transport.grpc.SslCredentials",
+ __init__=mock.Mock(return_value=None),
+ ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
+ ):
+ with mock.patch.object(
+ transport_class, "create_channel"
+ ) as grpc_create_channel:
+ mock_grpc_channel = mock.Mock()
+ grpc_create_channel.return_value = mock_grpc_channel
+ mock_cred = mock.Mock()
+
+ with pytest.warns(DeprecationWarning):
+ transport = transport_class(
+ host="squid.clam.whelk",
+ credentials=mock_cred,
+ api_mtls_endpoint="mtls.squid.clam.whelk",
+ client_cert_source=None,
+ )
+
+ grpc_create_channel.assert_called_once_with(
+ "mtls.squid.clam.whelk:443",
+ credentials=mock_cred,
+ credentials_file=None,
+ scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ ssl_credentials=mock_ssl_cred,
+ quota_project_id=None,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+ assert transport.grpc_channel == mock_grpc_channel
+
+
+def test_user_event_service_grpc_lro_client():
+ client = UserEventServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+ transport = client.transport
+
+ # Ensure that we have a api-core operations client.
+ assert isinstance(transport.operations_client, operations_v1.OperationsClient,)
+
+ # Ensure that subsequent calls to the property send the exact same object.
+ assert transport.operations_client is transport.operations_client
+
+
+def test_user_event_service_grpc_lro_async_client():
+ client = UserEventServiceAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio",
+ )
+ transport = client.transport
+
+ # Ensure that we have a api-core operations client.
+ assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,)
+
+ # Ensure that subsequent calls to the property send the exact same object.
+ assert transport.operations_client is transport.operations_client
+
+
+def test_event_store_path():
+ project = "squid"
+ location = "clam"
+ catalog = "whelk"
+ event_store = "octopus"
+
+ expected = "projects/{project}/locations/{location}/catalogs/{catalog}/eventStores/{event_store}".format(
+ project=project, location=location, catalog=catalog, event_store=event_store,
+ )
+ actual = UserEventServiceClient.event_store_path(
+ project, location, catalog, event_store
+ )
+ assert expected == actual
+
+
+def test_parse_event_store_path():
+ expected = {
+ "project": "oyster",
+ "location": "nudibranch",
+ "catalog": "cuttlefish",
+ "event_store": "mussel",
+ }
+ path = UserEventServiceClient.event_store_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = UserEventServiceClient.parse_event_store_path(path)
+ assert expected == actual
+
+
+def test_common_billing_account_path():
+ billing_account = "winkle"
+
+ expected = "billingAccounts/{billing_account}".format(
+ billing_account=billing_account,
+ )
+ actual = UserEventServiceClient.common_billing_account_path(billing_account)
+ assert expected == actual
+
+
+def test_parse_common_billing_account_path():
+ expected = {
+ "billing_account": "nautilus",
+ }
+ path = UserEventServiceClient.common_billing_account_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = UserEventServiceClient.parse_common_billing_account_path(path)
+ assert expected == actual
+
+
+def test_common_folder_path():
+ folder = "scallop"
+
+ expected = "folders/{folder}".format(folder=folder,)
+ actual = UserEventServiceClient.common_folder_path(folder)
+ assert expected == actual
+
+
+def test_parse_common_folder_path():
+ expected = {
+ "folder": "abalone",
+ }
+ path = UserEventServiceClient.common_folder_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = UserEventServiceClient.parse_common_folder_path(path)
+ assert expected == actual
+
+
+def test_common_organization_path():
+ organization = "squid"
+
+ expected = "organizations/{organization}".format(organization=organization,)
+ actual = UserEventServiceClient.common_organization_path(organization)
+ assert expected == actual
+
+
+def test_parse_common_organization_path():
+ expected = {
+ "organization": "clam",
+ }
+ path = UserEventServiceClient.common_organization_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = UserEventServiceClient.parse_common_organization_path(path)
+ assert expected == actual
+
+
+def test_common_project_path():
+ project = "whelk"
+
+ expected = "projects/{project}".format(project=project,)
+ actual = UserEventServiceClient.common_project_path(project)
+ assert expected == actual
+
+
+def test_parse_common_project_path():
+ expected = {
+ "project": "octopus",
+ }
+ path = UserEventServiceClient.common_project_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = UserEventServiceClient.parse_common_project_path(path)
+ assert expected == actual
+
+
+def test_common_location_path():
+ project = "oyster"
+ location = "nudibranch"
+
+ expected = "projects/{project}/locations/{location}".format(
+ project=project, location=location,
+ )
+ actual = UserEventServiceClient.common_location_path(project, location)
+ assert expected == actual
+
+
+def test_parse_common_location_path():
+ expected = {
+ "project": "cuttlefish",
+ "location": "mussel",
+ }
+ path = UserEventServiceClient.common_location_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = UserEventServiceClient.parse_common_location_path(path)
+ assert expected == actual
+
+
+def test_client_withDEFAULT_CLIENT_INFO():
+ client_info = gapic_v1.client_info.ClientInfo()
+
+ with mock.patch.object(
+ transports.UserEventServiceTransport, "_prep_wrapped_messages"
+ ) as prep:
+ client = UserEventServiceClient(
+ credentials=credentials.AnonymousCredentials(), client_info=client_info,
+ )
+ prep.assert_called_once_with(client_info)
+
+ with mock.patch.object(
+ transports.UserEventServiceTransport, "_prep_wrapped_messages"
+ ) as prep:
+ transport_class = UserEventServiceClient.get_transport_class()
+ transport = transport_class(
+ credentials=credentials.AnonymousCredentials(), client_info=client_info,
+ )
+ prep.assert_called_once_with(client_info)
diff --git a/tests/unit/recommendationengine_v1beta1/test_catalog_service.py b/tests/unit/recommendationengine_v1beta1/test_catalog_service.py
deleted file mode 100644
index 612b18cc..00000000
--- a/tests/unit/recommendationengine_v1beta1/test_catalog_service.py
+++ /dev/null
@@ -1,614 +0,0 @@
-# -*- coding: utf-8 -*-
-
-# Copyright (C) 2019 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-from unittest import mock
-
-import grpc
-import math
-import pytest
-
-from google import auth
-from google.api_core import client_options
-from google.api_core import future
-from google.api_core import operations_v1
-from google.auth import credentials
-from google.cloud.recommendationengine_v1beta1.services.catalog_service import (
- CatalogServiceClient,
-)
-from google.cloud.recommendationengine_v1beta1.services.catalog_service import pagers
-from google.cloud.recommendationengine_v1beta1.services.catalog_service import (
- transports,
-)
-from google.cloud.recommendationengine_v1beta1.types import catalog
-from google.cloud.recommendationengine_v1beta1.types import catalog_service
-from google.cloud.recommendationengine_v1beta1.types import common
-from google.cloud.recommendationengine_v1beta1.types import import_
-from google.longrunning import operations_pb2
-from google.oauth2 import service_account
-from google.protobuf import field_mask_pb2 as field_mask # type: ignore
-
-
-def test_catalog_service_client_from_service_account_file():
- creds = credentials.AnonymousCredentials()
- with mock.patch.object(
- service_account.Credentials, "from_service_account_file"
- ) as factory:
- factory.return_value = creds
- client = CatalogServiceClient.from_service_account_file("dummy/file/path.json")
- assert client._transport._credentials == creds
-
- client = CatalogServiceClient.from_service_account_json("dummy/file/path.json")
- assert client._transport._credentials == creds
-
- assert client._transport._host == "recommendationengine.googleapis.com:443"
-
-
-def test_catalog_service_client_client_options():
- # Check the default options have their expected values.
- assert (
- CatalogServiceClient.DEFAULT_OPTIONS.api_endpoint
- == "recommendationengine.googleapis.com"
- )
-
- # Check that options can be customized.
- options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
- with mock.patch(
- "google.cloud.recommendationengine_v1beta1.services.catalog_service.CatalogServiceClient.get_transport_class"
- ) as gtc:
- transport = gtc.return_value = mock.MagicMock()
- client = CatalogServiceClient(client_options=options)
- transport.assert_called_once_with(credentials=None, host="squid.clam.whelk")
-
-
-def test_catalog_service_client_client_options_from_dict():
- with mock.patch(
- "google.cloud.recommendationengine_v1beta1.services.catalog_service.CatalogServiceClient.get_transport_class"
- ) as gtc:
- transport = gtc.return_value = mock.MagicMock()
- client = CatalogServiceClient(
- client_options={"api_endpoint": "squid.clam.whelk"}
- )
- transport.assert_called_once_with(credentials=None, host="squid.clam.whelk")
-
-
-def test_create_catalog_item(transport: str = "grpc"):
- client = CatalogServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport
- )
-
- # Everything is optional in proto3 as far as the runtime is concerned,
- # and we are mocking out the actual API, so just send an empty request.
- request = catalog_service.CreateCatalogItemRequest()
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.create_catalog_item), "__call__"
- ) as call:
- # Designate an appropriate return value for the call.
- call.return_value = catalog.CatalogItem(
- id="id_value",
- title="title_value",
- description="description_value",
- language_code="language_code_value",
- tags=["tags_value"],
- item_group_id="item_group_id_value",
- )
-
- response = client.create_catalog_item(request)
-
- # Establish that the underlying gRPC stub method was called.
- assert len(call.mock_calls) == 1
- _, args, _ = call.mock_calls[0]
-
- assert args[0] == request
-
- # Establish that the response is the type that we expect.
- assert isinstance(response, catalog.CatalogItem)
- assert response.id == "id_value"
- assert response.title == "title_value"
- assert response.description == "description_value"
- assert response.language_code == "language_code_value"
- assert response.tags == ["tags_value"]
- assert response.item_group_id == "item_group_id_value"
-
-
-def test_get_catalog_item(transport: str = "grpc"):
- client = CatalogServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport
- )
-
- # Everything is optional in proto3 as far as the runtime is concerned,
- # and we are mocking out the actual API, so just send an empty request.
- request = catalog_service.GetCatalogItemRequest()
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.get_catalog_item), "__call__"
- ) as call:
- # Designate an appropriate return value for the call.
- call.return_value = catalog.CatalogItem(
- id="id_value",
- title="title_value",
- description="description_value",
- language_code="language_code_value",
- tags=["tags_value"],
- item_group_id="item_group_id_value",
- )
-
- response = client.get_catalog_item(request)
-
- # Establish that the underlying gRPC stub method was called.
- assert len(call.mock_calls) == 1
- _, args, _ = call.mock_calls[0]
-
- assert args[0] == request
-
- # Establish that the response is the type that we expect.
- assert isinstance(response, catalog.CatalogItem)
- assert response.id == "id_value"
- assert response.title == "title_value"
- assert response.description == "description_value"
- assert response.language_code == "language_code_value"
- assert response.tags == ["tags_value"]
- assert response.item_group_id == "item_group_id_value"
-
-
-def test_get_catalog_item_field_headers():
- client = CatalogServiceClient(credentials=credentials.AnonymousCredentials())
-
- # Any value that is part of the HTTP/1.1 URI should be sent as
- # a field header. Set these to a non-empty value.
- request = catalog_service.GetCatalogItemRequest(name="name/value")
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.get_catalog_item), "__call__"
- ) as call:
- call.return_value = catalog.CatalogItem()
- client.get_catalog_item(request)
-
- # Establish that the underlying gRPC stub method was called.
- assert len(call.mock_calls) == 1
- _, args, _ = call.mock_calls[0]
- assert args[0] == request
-
- # Establish that the field header was sent.
- _, _, kw = call.mock_calls[0]
- assert ("x-goog-request-params", "name=name/value") in kw["metadata"]
-
-
-def test_get_catalog_item_flattened():
- client = CatalogServiceClient(credentials=credentials.AnonymousCredentials())
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.get_catalog_item), "__call__"
- ) as call:
- # Designate an appropriate return value for the call.
- call.return_value = catalog.CatalogItem()
-
- # Call the method with a truthy value for each flattened field,
- # using the keyword arguments to the method.
- response = client.get_catalog_item(name="name_value")
-
- # Establish that the underlying call was made with the expected
- # request object values.
- assert len(call.mock_calls) == 1
- _, args, _ = call.mock_calls[0]
- assert args[0].name == "name_value"
-
-
-def test_get_catalog_item_flattened_error():
- client = CatalogServiceClient(credentials=credentials.AnonymousCredentials())
-
- # Attempting to call a method with both a request object and flattened
- # fields is an error.
- with pytest.raises(ValueError):
- client.get_catalog_item(
- catalog_service.GetCatalogItemRequest(), name="name_value"
- )
-
-
-def test_list_catalog_items(transport: str = "grpc"):
- client = CatalogServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport
- )
-
- # Everything is optional in proto3 as far as the runtime is concerned,
- # and we are mocking out the actual API, so just send an empty request.
- request = catalog_service.ListCatalogItemsRequest()
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.list_catalog_items), "__call__"
- ) as call:
- # Designate an appropriate return value for the call.
- call.return_value = catalog_service.ListCatalogItemsResponse(
- next_page_token="next_page_token_value"
- )
-
- response = client.list_catalog_items(request)
-
- # Establish that the underlying gRPC stub method was called.
- assert len(call.mock_calls) == 1
- _, args, _ = call.mock_calls[0]
-
- assert args[0] == request
-
- # Establish that the response is the type that we expect.
- assert isinstance(response, pagers.ListCatalogItemsPager)
- assert response.next_page_token == "next_page_token_value"
-
-
-def test_list_catalog_items_field_headers():
- client = CatalogServiceClient(credentials=credentials.AnonymousCredentials())
-
- # Any value that is part of the HTTP/1.1 URI should be sent as
- # a field header. Set these to a non-empty value.
- request = catalog_service.ListCatalogItemsRequest(parent="parent/value")
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.list_catalog_items), "__call__"
- ) as call:
- call.return_value = catalog_service.ListCatalogItemsResponse()
- client.list_catalog_items(request)
-
- # Establish that the underlying gRPC stub method was called.
- assert len(call.mock_calls) == 1
- _, args, _ = call.mock_calls[0]
- assert args[0] == request
-
- # Establish that the field header was sent.
- _, _, kw = call.mock_calls[0]
- assert ("x-goog-request-params", "parent=parent/value") in kw["metadata"]
-
-
-def test_list_catalog_items_pager():
- client = CatalogServiceClient(credentials=credentials.AnonymousCredentials)
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.list_catalog_items), "__call__"
- ) as call:
- # Set the response to a series of pages.
- call.side_effect = (
- catalog_service.ListCatalogItemsResponse(
- catalog_items=[
- catalog.CatalogItem(),
- catalog.CatalogItem(),
- catalog.CatalogItem(),
- ],
- next_page_token="abc",
- ),
- catalog_service.ListCatalogItemsResponse(
- catalog_items=[], next_page_token="def"
- ),
- catalog_service.ListCatalogItemsResponse(
- catalog_items=[catalog.CatalogItem()], next_page_token="ghi"
- ),
- catalog_service.ListCatalogItemsResponse(
- catalog_items=[catalog.CatalogItem(), catalog.CatalogItem()]
- ),
- RuntimeError,
- )
- results = [i for i in client.list_catalog_items(request={})]
- assert len(results) == 6
- assert all(isinstance(i, catalog.CatalogItem) for i in results)
-
-
-def test_list_catalog_items_pages():
- client = CatalogServiceClient(credentials=credentials.AnonymousCredentials)
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.list_catalog_items), "__call__"
- ) as call:
- # Set the response to a series of pages.
- call.side_effect = (
- catalog_service.ListCatalogItemsResponse(
- catalog_items=[
- catalog.CatalogItem(),
- catalog.CatalogItem(),
- catalog.CatalogItem(),
- ],
- next_page_token="abc",
- ),
- catalog_service.ListCatalogItemsResponse(
- catalog_items=[], next_page_token="def"
- ),
- catalog_service.ListCatalogItemsResponse(
- catalog_items=[catalog.CatalogItem()], next_page_token="ghi"
- ),
- catalog_service.ListCatalogItemsResponse(
- catalog_items=[catalog.CatalogItem(), catalog.CatalogItem()]
- ),
- RuntimeError,
- )
- pages = list(client.list_catalog_items(request={}).pages)
- for page, token in zip(pages, ["abc", "def", "ghi", ""]):
- assert page.raw_page.next_page_token == token
-
-
-def test_update_catalog_item(transport: str = "grpc"):
- client = CatalogServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport
- )
-
- # Everything is optional in proto3 as far as the runtime is concerned,
- # and we are mocking out the actual API, so just send an empty request.
- request = catalog_service.UpdateCatalogItemRequest()
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.update_catalog_item), "__call__"
- ) as call:
- # Designate an appropriate return value for the call.
- call.return_value = catalog.CatalogItem(
- id="id_value",
- title="title_value",
- description="description_value",
- language_code="language_code_value",
- tags=["tags_value"],
- item_group_id="item_group_id_value",
- )
-
- response = client.update_catalog_item(request)
-
- # Establish that the underlying gRPC stub method was called.
- assert len(call.mock_calls) == 1
- _, args, _ = call.mock_calls[0]
-
- assert args[0] == request
-
- # Establish that the response is the type that we expect.
- assert isinstance(response, catalog.CatalogItem)
- assert response.id == "id_value"
- assert response.title == "title_value"
- assert response.description == "description_value"
- assert response.language_code == "language_code_value"
- assert response.tags == ["tags_value"]
- assert response.item_group_id == "item_group_id_value"
-
-
-def test_update_catalog_item_flattened():
- client = CatalogServiceClient(credentials=credentials.AnonymousCredentials())
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.update_catalog_item), "__call__"
- ) as call:
- # Designate an appropriate return value for the call.
- call.return_value = catalog.CatalogItem()
-
- # Call the method with a truthy value for each flattened field,
- # using the keyword arguments to the method.
- response = client.update_catalog_item(
- catalog_item=catalog.CatalogItem(id="id_value"),
- update_mask=field_mask.FieldMask(paths=["paths_value"]),
- )
-
- # Establish that the underlying call was made with the expected
- # request object values.
- assert len(call.mock_calls) == 1
- _, args, _ = call.mock_calls[0]
- assert args[0].catalog_item == catalog.CatalogItem(id="id_value")
- assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"])
-
-
-def test_update_catalog_item_flattened_error():
- client = CatalogServiceClient(credentials=credentials.AnonymousCredentials())
-
- # Attempting to call a method with both a request object and flattened
- # fields is an error.
- with pytest.raises(ValueError):
- client.update_catalog_item(
- catalog_service.UpdateCatalogItemRequest(),
- catalog_item=catalog.CatalogItem(id="id_value"),
- update_mask=field_mask.FieldMask(paths=["paths_value"]),
- )
-
-
-def test_delete_catalog_item(transport: str = "grpc"):
- client = CatalogServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport
- )
-
- # Everything is optional in proto3 as far as the runtime is concerned,
- # and we are mocking out the actual API, so just send an empty request.
- request = catalog_service.DeleteCatalogItemRequest()
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.delete_catalog_item), "__call__"
- ) as call:
- # Designate an appropriate return value for the call.
- call.return_value = None
-
- response = client.delete_catalog_item(request)
-
- # Establish that the underlying gRPC stub method was called.
- assert len(call.mock_calls) == 1
- _, args, _ = call.mock_calls[0]
-
- assert args[0] == request
-
- # Establish that the response is the type that we expect.
- assert response is None
-
-
-def test_delete_catalog_item_flattened():
- client = CatalogServiceClient(credentials=credentials.AnonymousCredentials())
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.delete_catalog_item), "__call__"
- ) as call:
- # Designate an appropriate return value for the call.
- call.return_value = None
-
- # Call the method with a truthy value for each flattened field,
- # using the keyword arguments to the method.
- response = client.delete_catalog_item(name="name_value")
-
- # Establish that the underlying call was made with the expected
- # request object values.
- assert len(call.mock_calls) == 1
- _, args, _ = call.mock_calls[0]
- assert args[0].name == "name_value"
-
-
-def test_delete_catalog_item_flattened_error():
- client = CatalogServiceClient(credentials=credentials.AnonymousCredentials())
-
- # Attempting to call a method with both a request object and flattened
- # fields is an error.
- with pytest.raises(ValueError):
- client.delete_catalog_item(
- catalog_service.DeleteCatalogItemRequest(), name="name_value"
- )
-
-
-def test_import_catalog_items(transport: str = "grpc"):
- client = CatalogServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport
- )
-
- # Everything is optional in proto3 as far as the runtime is concerned,
- # and we are mocking out the actual API, so just send an empty request.
- request = import_.ImportCatalogItemsRequest()
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.import_catalog_items), "__call__"
- ) as call:
- # Designate an appropriate return value for the call.
- call.return_value = operations_pb2.Operation(name="operations/spam")
-
- response = client.import_catalog_items(request)
-
- # Establish that the underlying gRPC stub method was called.
- assert len(call.mock_calls) == 1
- _, args, _ = call.mock_calls[0]
-
- assert args[0] == request
-
- # Establish that the response is the type that we expect.
- assert isinstance(response, future.Future)
-
-
-def test_credentials_transport_error():
- # It is an error to provide credentials and a transport instance.
- transport = transports.CatalogServiceGrpcTransport(
- credentials=credentials.AnonymousCredentials()
- )
- with pytest.raises(ValueError):
- client = CatalogServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport
- )
-
-
-def test_transport_instance():
- # A client may be instantiated with a custom transport instance.
- transport = transports.CatalogServiceGrpcTransport(
- credentials=credentials.AnonymousCredentials()
- )
- client = CatalogServiceClient(transport=transport)
- assert client._transport is transport
-
-
-def test_transport_grpc_default():
- # A client should use the gRPC transport by default.
- client = CatalogServiceClient(credentials=credentials.AnonymousCredentials())
- assert isinstance(client._transport, transports.CatalogServiceGrpcTransport)
-
-
-def test_catalog_service_base_transport():
- # Instantiate the base transport.
- transport = transports.CatalogServiceTransport(
- credentials=credentials.AnonymousCredentials()
- )
-
- # Every method on the transport should just blindly
- # raise NotImplementedError.
- methods = (
- "create_catalog_item",
- "get_catalog_item",
- "list_catalog_items",
- "update_catalog_item",
- "delete_catalog_item",
- "import_catalog_items",
- )
- for method in methods:
- with pytest.raises(NotImplementedError):
- getattr(transport, method)(request=object())
-
- # Additionally, the LRO client (a property) should
- # also raise NotImplementedError
- with pytest.raises(NotImplementedError):
- transport.operations_client
-
-
-def test_catalog_service_auth_adc():
- # If no credentials are provided, we should use ADC credentials.
- with mock.patch.object(auth, "default") as adc:
- adc.return_value = (credentials.AnonymousCredentials(), None)
- CatalogServiceClient()
- adc.assert_called_once_with(
- scopes=("https://www.googleapis.com/auth/cloud-platform",)
- )
-
-
-def test_catalog_service_host_no_port():
- client = CatalogServiceClient(
- credentials=credentials.AnonymousCredentials(),
- client_options=client_options.ClientOptions(
- api_endpoint="recommendationengine.googleapis.com"
- ),
- transport="grpc",
- )
- assert client._transport._host == "recommendationengine.googleapis.com:443"
-
-
-def test_catalog_service_host_with_port():
- client = CatalogServiceClient(
- credentials=credentials.AnonymousCredentials(),
- client_options=client_options.ClientOptions(
- api_endpoint="recommendationengine.googleapis.com:8000"
- ),
- transport="grpc",
- )
- assert client._transport._host == "recommendationengine.googleapis.com:8000"
-
-
-def test_catalog_service_grpc_transport_channel():
- channel = grpc.insecure_channel("http://localhost/")
- transport = transports.CatalogServiceGrpcTransport(channel=channel)
- assert transport.grpc_channel is channel
-
-
-def test_catalog_service_grpc_lro_client():
- client = CatalogServiceClient(
- credentials=credentials.AnonymousCredentials(), transport="grpc"
- )
- transport = client._transport
-
- # Ensure that we have a api-core operations client.
- assert isinstance(transport.operations_client, operations_v1.OperationsClient)
-
- # Ensure that subsequent calls to the property send the exact same object.
- assert transport.operations_client is transport.operations_client
diff --git a/tests/unit/recommendationengine_v1beta1/test_prediction_api_key_registry.py b/tests/unit/recommendationengine_v1beta1/test_prediction_api_key_registry.py
deleted file mode 100644
index 2066682e..00000000
--- a/tests/unit/recommendationengine_v1beta1/test_prediction_api_key_registry.py
+++ /dev/null
@@ -1,387 +0,0 @@
-# -*- coding: utf-8 -*-
-
-# Copyright (C) 2019 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-from unittest import mock
-
-import grpc
-import math
-import pytest
-
-from google import auth
-from google.api_core import client_options
-from google.auth import credentials
-from google.cloud.recommendationengine_v1beta1.services.prediction_api_key_registry import (
- PredictionApiKeyRegistryClient,
-)
-from google.cloud.recommendationengine_v1beta1.services.prediction_api_key_registry import (
- pagers,
-)
-from google.cloud.recommendationengine_v1beta1.services.prediction_api_key_registry import (
- transports,
-)
-from google.cloud.recommendationengine_v1beta1.types import (
- prediction_apikey_registry_service,
-)
-from google.oauth2 import service_account
-
-
-def test_prediction_api_key_registry_client_from_service_account_file():
- creds = credentials.AnonymousCredentials()
- with mock.patch.object(
- service_account.Credentials, "from_service_account_file"
- ) as factory:
- factory.return_value = creds
- client = PredictionApiKeyRegistryClient.from_service_account_file(
- "dummy/file/path.json"
- )
- assert client._transport._credentials == creds
-
- client = PredictionApiKeyRegistryClient.from_service_account_json(
- "dummy/file/path.json"
- )
- assert client._transport._credentials == creds
-
- assert client._transport._host == "recommendationengine.googleapis.com:443"
-
-
-def test_prediction_api_key_registry_client_client_options():
- # Check the default options have their expected values.
- assert (
- PredictionApiKeyRegistryClient.DEFAULT_OPTIONS.api_endpoint
- == "recommendationengine.googleapis.com"
- )
-
- # Check that options can be customized.
- options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
- with mock.patch(
- "google.cloud.recommendationengine_v1beta1.services.prediction_api_key_registry.PredictionApiKeyRegistryClient.get_transport_class"
- ) as gtc:
- transport = gtc.return_value = mock.MagicMock()
- client = PredictionApiKeyRegistryClient(client_options=options)
- transport.assert_called_once_with(credentials=None, host="squid.clam.whelk")
-
-
-def test_prediction_api_key_registry_client_client_options_from_dict():
- with mock.patch(
- "google.cloud.recommendationengine_v1beta1.services.prediction_api_key_registry.PredictionApiKeyRegistryClient.get_transport_class"
- ) as gtc:
- transport = gtc.return_value = mock.MagicMock()
- client = PredictionApiKeyRegistryClient(
- client_options={"api_endpoint": "squid.clam.whelk"}
- )
- transport.assert_called_once_with(credentials=None, host="squid.clam.whelk")
-
-
-def test_create_prediction_api_key_registration(transport: str = "grpc"):
- client = PredictionApiKeyRegistryClient(
- credentials=credentials.AnonymousCredentials(), transport=transport
- )
-
- # Everything is optional in proto3 as far as the runtime is concerned,
- # and we are mocking out the actual API, so just send an empty request.
- request = (
- prediction_apikey_registry_service.CreatePredictionApiKeyRegistrationRequest()
- )
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.create_prediction_api_key_registration), "__call__"
- ) as call:
- # Designate an appropriate return value for the call.
- call.return_value = prediction_apikey_registry_service.PredictionApiKeyRegistration(
- api_key="api_key_value"
- )
-
- response = client.create_prediction_api_key_registration(request)
-
- # Establish that the underlying gRPC stub method was called.
- assert len(call.mock_calls) == 1
- _, args, _ = call.mock_calls[0]
-
- assert args[0] == request
-
- # Establish that the response is the type that we expect.
- assert isinstance(
- response, prediction_apikey_registry_service.PredictionApiKeyRegistration
- )
- assert response.api_key == "api_key_value"
-
-
-def test_list_prediction_api_key_registrations(transport: str = "grpc"):
- client = PredictionApiKeyRegistryClient(
- credentials=credentials.AnonymousCredentials(), transport=transport
- )
-
- # Everything is optional in proto3 as far as the runtime is concerned,
- # and we are mocking out the actual API, so just send an empty request.
- request = (
- prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsRequest()
- )
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.list_prediction_api_key_registrations), "__call__"
- ) as call:
- # Designate an appropriate return value for the call.
- call.return_value = prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsResponse(
- next_page_token="next_page_token_value"
- )
-
- response = client.list_prediction_api_key_registrations(request)
-
- # Establish that the underlying gRPC stub method was called.
- assert len(call.mock_calls) == 1
- _, args, _ = call.mock_calls[0]
-
- assert args[0] == request
-
- # Establish that the response is the type that we expect.
- assert isinstance(response, pagers.ListPredictionApiKeyRegistrationsPager)
- assert response.next_page_token == "next_page_token_value"
-
-
-def test_list_prediction_api_key_registrations_field_headers():
- client = PredictionApiKeyRegistryClient(
- credentials=credentials.AnonymousCredentials()
- )
-
- # Any value that is part of the HTTP/1.1 URI should be sent as
- # a field header. Set these to a non-empty value.
- request = prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsRequest(
- parent="parent/value"
- )
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.list_prediction_api_key_registrations), "__call__"
- ) as call:
- call.return_value = (
- prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsResponse()
- )
- client.list_prediction_api_key_registrations(request)
-
- # Establish that the underlying gRPC stub method was called.
- assert len(call.mock_calls) == 1
- _, args, _ = call.mock_calls[0]
- assert args[0] == request
-
- # Establish that the field header was sent.
- _, _, kw = call.mock_calls[0]
- assert ("x-goog-request-params", "parent=parent/value") in kw["metadata"]
-
-
-def test_list_prediction_api_key_registrations_pager():
- client = PredictionApiKeyRegistryClient(
- credentials=credentials.AnonymousCredentials
- )
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.list_prediction_api_key_registrations), "__call__"
- ) as call:
- # Set the response to a series of pages.
- call.side_effect = (
- prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsResponse(
- prediction_api_key_registrations=[
- prediction_apikey_registry_service.PredictionApiKeyRegistration(),
- prediction_apikey_registry_service.PredictionApiKeyRegistration(),
- prediction_apikey_registry_service.PredictionApiKeyRegistration(),
- ],
- next_page_token="abc",
- ),
- prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsResponse(
- prediction_api_key_registrations=[], next_page_token="def"
- ),
- prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsResponse(
- prediction_api_key_registrations=[
- prediction_apikey_registry_service.PredictionApiKeyRegistration()
- ],
- next_page_token="ghi",
- ),
- prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsResponse(
- prediction_api_key_registrations=[
- prediction_apikey_registry_service.PredictionApiKeyRegistration(),
- prediction_apikey_registry_service.PredictionApiKeyRegistration(),
- ]
- ),
- RuntimeError,
- )
- results = [i for i in client.list_prediction_api_key_registrations(request={})]
- assert len(results) == 6
- assert all(
- isinstance(
- i, prediction_apikey_registry_service.PredictionApiKeyRegistration
- )
- for i in results
- )
-
-
-def test_list_prediction_api_key_registrations_pages():
- client = PredictionApiKeyRegistryClient(
- credentials=credentials.AnonymousCredentials
- )
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.list_prediction_api_key_registrations), "__call__"
- ) as call:
- # Set the response to a series of pages.
- call.side_effect = (
- prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsResponse(
- prediction_api_key_registrations=[
- prediction_apikey_registry_service.PredictionApiKeyRegistration(),
- prediction_apikey_registry_service.PredictionApiKeyRegistration(),
- prediction_apikey_registry_service.PredictionApiKeyRegistration(),
- ],
- next_page_token="abc",
- ),
- prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsResponse(
- prediction_api_key_registrations=[], next_page_token="def"
- ),
- prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsResponse(
- prediction_api_key_registrations=[
- prediction_apikey_registry_service.PredictionApiKeyRegistration()
- ],
- next_page_token="ghi",
- ),
- prediction_apikey_registry_service.ListPredictionApiKeyRegistrationsResponse(
- prediction_api_key_registrations=[
- prediction_apikey_registry_service.PredictionApiKeyRegistration(),
- prediction_apikey_registry_service.PredictionApiKeyRegistration(),
- ]
- ),
- RuntimeError,
- )
- pages = list(client.list_prediction_api_key_registrations(request={}).pages)
- for page, token in zip(pages, ["abc", "def", "ghi", ""]):
- assert page.raw_page.next_page_token == token
-
-
-def test_delete_prediction_api_key_registration(transport: str = "grpc"):
- client = PredictionApiKeyRegistryClient(
- credentials=credentials.AnonymousCredentials(), transport=transport
- )
-
- # Everything is optional in proto3 as far as the runtime is concerned,
- # and we are mocking out the actual API, so just send an empty request.
- request = (
- prediction_apikey_registry_service.DeletePredictionApiKeyRegistrationRequest()
- )
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.delete_prediction_api_key_registration), "__call__"
- ) as call:
- # Designate an appropriate return value for the call.
- call.return_value = None
-
- response = client.delete_prediction_api_key_registration(request)
-
- # Establish that the underlying gRPC stub method was called.
- assert len(call.mock_calls) == 1
- _, args, _ = call.mock_calls[0]
-
- assert args[0] == request
-
- # Establish that the response is the type that we expect.
- assert response is None
-
-
-def test_credentials_transport_error():
- # It is an error to provide credentials and a transport instance.
- transport = transports.PredictionApiKeyRegistryGrpcTransport(
- credentials=credentials.AnonymousCredentials()
- )
- with pytest.raises(ValueError):
- client = PredictionApiKeyRegistryClient(
- credentials=credentials.AnonymousCredentials(), transport=transport
- )
-
-
-def test_transport_instance():
- # A client may be instantiated with a custom transport instance.
- transport = transports.PredictionApiKeyRegistryGrpcTransport(
- credentials=credentials.AnonymousCredentials()
- )
- client = PredictionApiKeyRegistryClient(transport=transport)
- assert client._transport is transport
-
-
-def test_transport_grpc_default():
- # A client should use the gRPC transport by default.
- client = PredictionApiKeyRegistryClient(
- credentials=credentials.AnonymousCredentials()
- )
- assert isinstance(
- client._transport, transports.PredictionApiKeyRegistryGrpcTransport
- )
-
-
-def test_prediction_api_key_registry_base_transport():
- # Instantiate the base transport.
- transport = transports.PredictionApiKeyRegistryTransport(
- credentials=credentials.AnonymousCredentials()
- )
-
- # Every method on the transport should just blindly
- # raise NotImplementedError.
- methods = (
- "create_prediction_api_key_registration",
- "list_prediction_api_key_registrations",
- "delete_prediction_api_key_registration",
- )
- for method in methods:
- with pytest.raises(NotImplementedError):
- getattr(transport, method)(request=object())
-
-
-def test_prediction_api_key_registry_auth_adc():
- # If no credentials are provided, we should use ADC credentials.
- with mock.patch.object(auth, "default") as adc:
- adc.return_value = (credentials.AnonymousCredentials(), None)
- PredictionApiKeyRegistryClient()
- adc.assert_called_once_with(
- scopes=("https://www.googleapis.com/auth/cloud-platform",)
- )
-
-
-def test_prediction_api_key_registry_host_no_port():
- client = PredictionApiKeyRegistryClient(
- credentials=credentials.AnonymousCredentials(),
- client_options=client_options.ClientOptions(
- api_endpoint="recommendationengine.googleapis.com"
- ),
- transport="grpc",
- )
- assert client._transport._host == "recommendationengine.googleapis.com:443"
-
-
-def test_prediction_api_key_registry_host_with_port():
- client = PredictionApiKeyRegistryClient(
- credentials=credentials.AnonymousCredentials(),
- client_options=client_options.ClientOptions(
- api_endpoint="recommendationengine.googleapis.com:8000"
- ),
- transport="grpc",
- )
- assert client._transport._host == "recommendationengine.googleapis.com:8000"
-
-
-def test_prediction_api_key_registry_grpc_transport_channel():
- channel = grpc.insecure_channel("http://localhost/")
- transport = transports.PredictionApiKeyRegistryGrpcTransport(channel=channel)
- assert transport.grpc_channel is channel
diff --git a/tests/unit/recommendationengine_v1beta1/test_prediction_service.py b/tests/unit/recommendationengine_v1beta1/test_prediction_service.py
deleted file mode 100644
index 70871c56..00000000
--- a/tests/unit/recommendationengine_v1beta1/test_prediction_service.py
+++ /dev/null
@@ -1,265 +0,0 @@
-# -*- coding: utf-8 -*-
-
-# Copyright (C) 2019 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-from unittest import mock
-
-import grpc
-import math
-import pytest
-
-from google import auth
-from google.api_core import client_options
-from google.auth import credentials
-from google.cloud.recommendationengine_v1beta1.services.prediction_service import (
- PredictionServiceClient,
-)
-from google.cloud.recommendationengine_v1beta1.services.prediction_service import pagers
-from google.cloud.recommendationengine_v1beta1.services.prediction_service import (
- transports,
-)
-from google.cloud.recommendationengine_v1beta1.types import prediction_service
-from google.oauth2 import service_account
-
-
-def test_prediction_service_client_from_service_account_file():
- creds = credentials.AnonymousCredentials()
- with mock.patch.object(
- service_account.Credentials, "from_service_account_file"
- ) as factory:
- factory.return_value = creds
- client = PredictionServiceClient.from_service_account_file(
- "dummy/file/path.json"
- )
- assert client._transport._credentials == creds
-
- client = PredictionServiceClient.from_service_account_json(
- "dummy/file/path.json"
- )
- assert client._transport._credentials == creds
-
- assert client._transport._host == "recommendationengine.googleapis.com:443"
-
-
-def test_prediction_service_client_client_options():
- # Check the default options have their expected values.
- assert (
- PredictionServiceClient.DEFAULT_OPTIONS.api_endpoint
- == "recommendationengine.googleapis.com"
- )
-
- # Check that options can be customized.
- options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
- with mock.patch(
- "google.cloud.recommendationengine_v1beta1.services.prediction_service.PredictionServiceClient.get_transport_class"
- ) as gtc:
- transport = gtc.return_value = mock.MagicMock()
- client = PredictionServiceClient(client_options=options)
- transport.assert_called_once_with(credentials=None, host="squid.clam.whelk")
-
-
-def test_prediction_service_client_client_options_from_dict():
- with mock.patch(
- "google.cloud.recommendationengine_v1beta1.services.prediction_service.PredictionServiceClient.get_transport_class"
- ) as gtc:
- transport = gtc.return_value = mock.MagicMock()
- client = PredictionServiceClient(
- client_options={"api_endpoint": "squid.clam.whelk"}
- )
- transport.assert_called_once_with(credentials=None, host="squid.clam.whelk")
-
-
-def test_predict(transport: str = "grpc"):
- client = PredictionServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport
- )
-
- # Everything is optional in proto3 as far as the runtime is concerned,
- # and we are mocking out the actual API, so just send an empty request.
- request = prediction_service.PredictRequest()
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.predict), "__call__") as call:
- # Designate an appropriate return value for the call.
- call.return_value = prediction_service.PredictResponse(
- recommendation_token="recommendation_token_value",
- items_missing_in_catalog=["items_missing_in_catalog_value"],
- dry_run=True,
- next_page_token="next_page_token_value",
- )
-
- response = client.predict(request)
-
- # Establish that the underlying gRPC stub method was called.
- assert len(call.mock_calls) == 1
- _, args, _ = call.mock_calls[0]
-
- assert args[0] == request
-
- # Establish that the response is the type that we expect.
- assert isinstance(response, pagers.PredictPager)
- assert response.recommendation_token == "recommendation_token_value"
- assert response.items_missing_in_catalog == ["items_missing_in_catalog_value"]
-
- assert response.dry_run is True
- assert response.next_page_token == "next_page_token_value"
-
-
-def test_predict_pager():
- client = PredictionServiceClient(credentials=credentials.AnonymousCredentials)
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.predict), "__call__") as call:
- # Set the response to a series of pages.
- call.side_effect = (
- prediction_service.PredictResponse(
- results=[
- prediction_service.PredictResponse.PredictionResult(),
- prediction_service.PredictResponse.PredictionResult(),
- prediction_service.PredictResponse.PredictionResult(),
- ],
- next_page_token="abc",
- ),
- prediction_service.PredictResponse(results=[], next_page_token="def"),
- prediction_service.PredictResponse(
- results=[prediction_service.PredictResponse.PredictionResult()],
- next_page_token="ghi",
- ),
- prediction_service.PredictResponse(
- results=[
- prediction_service.PredictResponse.PredictionResult(),
- prediction_service.PredictResponse.PredictionResult(),
- ]
- ),
- RuntimeError,
- )
- results = [i for i in client.predict(request={})]
- assert len(results) == 6
- assert all(
- isinstance(i, prediction_service.PredictResponse.PredictionResult)
- for i in results
- )
-
-
-def test_predict_pages():
- client = PredictionServiceClient(credentials=credentials.AnonymousCredentials)
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.predict), "__call__") as call:
- # Set the response to a series of pages.
- call.side_effect = (
- prediction_service.PredictResponse(
- results=[
- prediction_service.PredictResponse.PredictionResult(),
- prediction_service.PredictResponse.PredictionResult(),
- prediction_service.PredictResponse.PredictionResult(),
- ],
- next_page_token="abc",
- ),
- prediction_service.PredictResponse(results=[], next_page_token="def"),
- prediction_service.PredictResponse(
- results=[prediction_service.PredictResponse.PredictionResult()],
- next_page_token="ghi",
- ),
- prediction_service.PredictResponse(
- results=[
- prediction_service.PredictResponse.PredictionResult(),
- prediction_service.PredictResponse.PredictionResult(),
- ]
- ),
- RuntimeError,
- )
- pages = list(client.predict(request={}).pages)
- for page, token in zip(pages, ["abc", "def", "ghi", ""]):
- assert page.raw_page.next_page_token == token
-
-
-def test_credentials_transport_error():
- # It is an error to provide credentials and a transport instance.
- transport = transports.PredictionServiceGrpcTransport(
- credentials=credentials.AnonymousCredentials()
- )
- with pytest.raises(ValueError):
- client = PredictionServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport
- )
-
-
-def test_transport_instance():
- # A client may be instantiated with a custom transport instance.
- transport = transports.PredictionServiceGrpcTransport(
- credentials=credentials.AnonymousCredentials()
- )
- client = PredictionServiceClient(transport=transport)
- assert client._transport is transport
-
-
-def test_transport_grpc_default():
- # A client should use the gRPC transport by default.
- client = PredictionServiceClient(credentials=credentials.AnonymousCredentials())
- assert isinstance(client._transport, transports.PredictionServiceGrpcTransport)
-
-
-def test_prediction_service_base_transport():
- # Instantiate the base transport.
- transport = transports.PredictionServiceTransport(
- credentials=credentials.AnonymousCredentials()
- )
-
- # Every method on the transport should just blindly
- # raise NotImplementedError.
- methods = ("predict",)
- for method in methods:
- with pytest.raises(NotImplementedError):
- getattr(transport, method)(request=object())
-
-
-def test_prediction_service_auth_adc():
- # If no credentials are provided, we should use ADC credentials.
- with mock.patch.object(auth, "default") as adc:
- adc.return_value = (credentials.AnonymousCredentials(), None)
- PredictionServiceClient()
- adc.assert_called_once_with(
- scopes=("https://www.googleapis.com/auth/cloud-platform",)
- )
-
-
-def test_prediction_service_host_no_port():
- client = PredictionServiceClient(
- credentials=credentials.AnonymousCredentials(),
- client_options=client_options.ClientOptions(
- api_endpoint="recommendationengine.googleapis.com"
- ),
- transport="grpc",
- )
- assert client._transport._host == "recommendationengine.googleapis.com:443"
-
-
-def test_prediction_service_host_with_port():
- client = PredictionServiceClient(
- credentials=credentials.AnonymousCredentials(),
- client_options=client_options.ClientOptions(
- api_endpoint="recommendationengine.googleapis.com:8000"
- ),
- transport="grpc",
- )
- assert client._transport._host == "recommendationengine.googleapis.com:8000"
-
-
-def test_prediction_service_grpc_transport_channel():
- channel = grpc.insecure_channel("http://localhost/")
- transport = transports.PredictionServiceGrpcTransport(channel=channel)
- assert transport.grpc_channel is channel
diff --git a/tests/unit/recommendationengine_v1beta1/test_user_event_service.py b/tests/unit/recommendationengine_v1beta1/test_user_event_service.py
deleted file mode 100644
index be154c85..00000000
--- a/tests/unit/recommendationengine_v1beta1/test_user_event_service.py
+++ /dev/null
@@ -1,458 +0,0 @@
-# -*- coding: utf-8 -*-
-
-# Copyright (C) 2019 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-from unittest import mock
-
-import grpc
-import math
-import pytest
-
-from google import auth
-from google.api import httpbody_pb2 as httpbody # type: ignore
-from google.api_core import client_options
-from google.api_core import future
-from google.api_core import operations_v1
-from google.auth import credentials
-from google.cloud.recommendationengine_v1beta1.services.user_event_service import (
- UserEventServiceClient,
-)
-from google.cloud.recommendationengine_v1beta1.services.user_event_service import pagers
-from google.cloud.recommendationengine_v1beta1.services.user_event_service import (
- transports,
-)
-from google.cloud.recommendationengine_v1beta1.types import import_
-from google.cloud.recommendationengine_v1beta1.types import user_event
-from google.cloud.recommendationengine_v1beta1.types import user_event_service
-from google.longrunning import operations_pb2
-from google.oauth2 import service_account
-from google.protobuf import any_pb2 as any # type: ignore
-from google.protobuf import timestamp_pb2 as timestamp # type: ignore
-
-
-def test_user_event_service_client_from_service_account_file():
- creds = credentials.AnonymousCredentials()
- with mock.patch.object(
- service_account.Credentials, "from_service_account_file"
- ) as factory:
- factory.return_value = creds
- client = UserEventServiceClient.from_service_account_file(
- "dummy/file/path.json"
- )
- assert client._transport._credentials == creds
-
- client = UserEventServiceClient.from_service_account_json(
- "dummy/file/path.json"
- )
- assert client._transport._credentials == creds
-
- assert client._transport._host == "recommendationengine.googleapis.com:443"
-
-
-def test_user_event_service_client_client_options():
- # Check the default options have their expected values.
- assert (
- UserEventServiceClient.DEFAULT_OPTIONS.api_endpoint
- == "recommendationengine.googleapis.com"
- )
-
- # Check that options can be customized.
- options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
- with mock.patch(
- "google.cloud.recommendationengine_v1beta1.services.user_event_service.UserEventServiceClient.get_transport_class"
- ) as gtc:
- transport = gtc.return_value = mock.MagicMock()
- client = UserEventServiceClient(client_options=options)
- transport.assert_called_once_with(credentials=None, host="squid.clam.whelk")
-
-
-def test_user_event_service_client_client_options_from_dict():
- with mock.patch(
- "google.cloud.recommendationengine_v1beta1.services.user_event_service.UserEventServiceClient.get_transport_class"
- ) as gtc:
- transport = gtc.return_value = mock.MagicMock()
- client = UserEventServiceClient(
- client_options={"api_endpoint": "squid.clam.whelk"}
- )
- transport.assert_called_once_with(credentials=None, host="squid.clam.whelk")
-
-
-def test_write_user_event(transport: str = "grpc"):
- client = UserEventServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport
- )
-
- # Everything is optional in proto3 as far as the runtime is concerned,
- # and we are mocking out the actual API, so just send an empty request.
- request = user_event_service.WriteUserEventRequest()
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.write_user_event), "__call__"
- ) as call:
- # Designate an appropriate return value for the call.
- call.return_value = user_event.UserEvent(
- event_type="event_type_value",
- event_source=user_event.UserEvent.EventSource.AUTOML,
- )
-
- response = client.write_user_event(request)
-
- # Establish that the underlying gRPC stub method was called.
- assert len(call.mock_calls) == 1
- _, args, _ = call.mock_calls[0]
-
- assert args[0] == request
-
- # Establish that the response is the type that we expect.
- assert isinstance(response, user_event.UserEvent)
- assert response.event_type == "event_type_value"
- assert response.event_source == user_event.UserEvent.EventSource.AUTOML
-
-
-def test_collect_user_event(transport: str = "grpc"):
- client = UserEventServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport
- )
-
- # Everything is optional in proto3 as far as the runtime is concerned,
- # and we are mocking out the actual API, so just send an empty request.
- request = user_event_service.CollectUserEventRequest()
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.collect_user_event), "__call__"
- ) as call:
- # Designate an appropriate return value for the call.
- call.return_value = httpbody.HttpBody(
- content_type="content_type_value", data=b"data_blob"
- )
-
- response = client.collect_user_event(request)
-
- # Establish that the underlying gRPC stub method was called.
- assert len(call.mock_calls) == 1
- _, args, _ = call.mock_calls[0]
-
- assert args[0] == request
-
- # Establish that the response is the type that we expect.
- assert isinstance(response, httpbody.HttpBody)
- assert response.content_type == "content_type_value"
- assert response.data == b"data_blob"
-
-
-def test_collect_user_event_field_headers():
- client = UserEventServiceClient(credentials=credentials.AnonymousCredentials())
-
- # Any value that is part of the HTTP/1.1 URI should be sent as
- # a field header. Set these to a non-empty value.
- request = user_event_service.CollectUserEventRequest(parent="parent/value")
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.collect_user_event), "__call__"
- ) as call:
- call.return_value = httpbody.HttpBody()
- client.collect_user_event(request)
-
- # Establish that the underlying gRPC stub method was called.
- assert len(call.mock_calls) == 1
- _, args, _ = call.mock_calls[0]
- assert args[0] == request
-
- # Establish that the field header was sent.
- _, _, kw = call.mock_calls[0]
- assert ("x-goog-request-params", "parent=parent/value") in kw["metadata"]
-
-
-def test_list_user_events(transport: str = "grpc"):
- client = UserEventServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport
- )
-
- # Everything is optional in proto3 as far as the runtime is concerned,
- # and we are mocking out the actual API, so just send an empty request.
- request = user_event_service.ListUserEventsRequest()
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.list_user_events), "__call__"
- ) as call:
- # Designate an appropriate return value for the call.
- call.return_value = user_event_service.ListUserEventsResponse(
- next_page_token="next_page_token_value"
- )
-
- response = client.list_user_events(request)
-
- # Establish that the underlying gRPC stub method was called.
- assert len(call.mock_calls) == 1
- _, args, _ = call.mock_calls[0]
-
- assert args[0] == request
-
- # Establish that the response is the type that we expect.
- assert isinstance(response, pagers.ListUserEventsPager)
- assert response.next_page_token == "next_page_token_value"
-
-
-def test_list_user_events_field_headers():
- client = UserEventServiceClient(credentials=credentials.AnonymousCredentials())
-
- # Any value that is part of the HTTP/1.1 URI should be sent as
- # a field header. Set these to a non-empty value.
- request = user_event_service.ListUserEventsRequest(parent="parent/value")
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.list_user_events), "__call__"
- ) as call:
- call.return_value = user_event_service.ListUserEventsResponse()
- client.list_user_events(request)
-
- # Establish that the underlying gRPC stub method was called.
- assert len(call.mock_calls) == 1
- _, args, _ = call.mock_calls[0]
- assert args[0] == request
-
- # Establish that the field header was sent.
- _, _, kw = call.mock_calls[0]
- assert ("x-goog-request-params", "parent=parent/value") in kw["metadata"]
-
-
-def test_list_user_events_pager():
- client = UserEventServiceClient(credentials=credentials.AnonymousCredentials)
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.list_user_events), "__call__"
- ) as call:
- # Set the response to a series of pages.
- call.side_effect = (
- user_event_service.ListUserEventsResponse(
- user_events=[
- user_event.UserEvent(),
- user_event.UserEvent(),
- user_event.UserEvent(),
- ],
- next_page_token="abc",
- ),
- user_event_service.ListUserEventsResponse(
- user_events=[], next_page_token="def"
- ),
- user_event_service.ListUserEventsResponse(
- user_events=[user_event.UserEvent()], next_page_token="ghi"
- ),
- user_event_service.ListUserEventsResponse(
- user_events=[user_event.UserEvent(), user_event.UserEvent()]
- ),
- RuntimeError,
- )
- results = [i for i in client.list_user_events(request={})]
- assert len(results) == 6
- assert all(isinstance(i, user_event.UserEvent) for i in results)
-
-
-def test_list_user_events_pages():
- client = UserEventServiceClient(credentials=credentials.AnonymousCredentials)
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.list_user_events), "__call__"
- ) as call:
- # Set the response to a series of pages.
- call.side_effect = (
- user_event_service.ListUserEventsResponse(
- user_events=[
- user_event.UserEvent(),
- user_event.UserEvent(),
- user_event.UserEvent(),
- ],
- next_page_token="abc",
- ),
- user_event_service.ListUserEventsResponse(
- user_events=[], next_page_token="def"
- ),
- user_event_service.ListUserEventsResponse(
- user_events=[user_event.UserEvent()], next_page_token="ghi"
- ),
- user_event_service.ListUserEventsResponse(
- user_events=[user_event.UserEvent(), user_event.UserEvent()]
- ),
- RuntimeError,
- )
- pages = list(client.list_user_events(request={}).pages)
- for page, token in zip(pages, ["abc", "def", "ghi", ""]):
- assert page.raw_page.next_page_token == token
-
-
-def test_purge_user_events(transport: str = "grpc"):
- client = UserEventServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport
- )
-
- # Everything is optional in proto3 as far as the runtime is concerned,
- # and we are mocking out the actual API, so just send an empty request.
- request = user_event_service.PurgeUserEventsRequest()
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.purge_user_events), "__call__"
- ) as call:
- # Designate an appropriate return value for the call.
- call.return_value = operations_pb2.Operation(name="operations/spam")
-
- response = client.purge_user_events(request)
-
- # Establish that the underlying gRPC stub method was called.
- assert len(call.mock_calls) == 1
- _, args, _ = call.mock_calls[0]
-
- assert args[0] == request
-
- # Establish that the response is the type that we expect.
- assert isinstance(response, future.Future)
-
-
-def test_import_user_events(transport: str = "grpc"):
- client = UserEventServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport
- )
-
- # Everything is optional in proto3 as far as the runtime is concerned,
- # and we are mocking out the actual API, so just send an empty request.
- request = import_.ImportUserEventsRequest()
-
- # Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.import_user_events), "__call__"
- ) as call:
- # Designate an appropriate return value for the call.
- call.return_value = operations_pb2.Operation(name="operations/spam")
-
- response = client.import_user_events(request)
-
- # Establish that the underlying gRPC stub method was called.
- assert len(call.mock_calls) == 1
- _, args, _ = call.mock_calls[0]
-
- assert args[0] == request
-
- # Establish that the response is the type that we expect.
- assert isinstance(response, future.Future)
-
-
-def test_credentials_transport_error():
- # It is an error to provide credentials and a transport instance.
- transport = transports.UserEventServiceGrpcTransport(
- credentials=credentials.AnonymousCredentials()
- )
- with pytest.raises(ValueError):
- client = UserEventServiceClient(
- credentials=credentials.AnonymousCredentials(), transport=transport
- )
-
-
-def test_transport_instance():
- # A client may be instantiated with a custom transport instance.
- transport = transports.UserEventServiceGrpcTransport(
- credentials=credentials.AnonymousCredentials()
- )
- client = UserEventServiceClient(transport=transport)
- assert client._transport is transport
-
-
-def test_transport_grpc_default():
- # A client should use the gRPC transport by default.
- client = UserEventServiceClient(credentials=credentials.AnonymousCredentials())
- assert isinstance(client._transport, transports.UserEventServiceGrpcTransport)
-
-
-def test_user_event_service_base_transport():
- # Instantiate the base transport.
- transport = transports.UserEventServiceTransport(
- credentials=credentials.AnonymousCredentials()
- )
-
- # Every method on the transport should just blindly
- # raise NotImplementedError.
- methods = (
- "write_user_event",
- "collect_user_event",
- "list_user_events",
- "purge_user_events",
- "import_user_events",
- )
- for method in methods:
- with pytest.raises(NotImplementedError):
- getattr(transport, method)(request=object())
-
- # Additionally, the LRO client (a property) should
- # also raise NotImplementedError
- with pytest.raises(NotImplementedError):
- transport.operations_client
-
-
-def test_user_event_service_auth_adc():
- # If no credentials are provided, we should use ADC credentials.
- with mock.patch.object(auth, "default") as adc:
- adc.return_value = (credentials.AnonymousCredentials(), None)
- UserEventServiceClient()
- adc.assert_called_once_with(
- scopes=("https://www.googleapis.com/auth/cloud-platform",)
- )
-
-
-def test_user_event_service_host_no_port():
- client = UserEventServiceClient(
- credentials=credentials.AnonymousCredentials(),
- client_options=client_options.ClientOptions(
- api_endpoint="recommendationengine.googleapis.com"
- ),
- transport="grpc",
- )
- assert client._transport._host == "recommendationengine.googleapis.com:443"
-
-
-def test_user_event_service_host_with_port():
- client = UserEventServiceClient(
- credentials=credentials.AnonymousCredentials(),
- client_options=client_options.ClientOptions(
- api_endpoint="recommendationengine.googleapis.com:8000"
- ),
- transport="grpc",
- )
- assert client._transport._host == "recommendationengine.googleapis.com:8000"
-
-
-def test_user_event_service_grpc_transport_channel():
- channel = grpc.insecure_channel("http://localhost/")
- transport = transports.UserEventServiceGrpcTransport(channel=channel)
- assert transport.grpc_channel is channel
-
-
-def test_user_event_service_grpc_lro_client():
- client = UserEventServiceClient(
- credentials=credentials.AnonymousCredentials(), transport="grpc"
- )
- transport = client._transport
-
- # Ensure that we have a api-core operations client.
- assert isinstance(transport.operations_client, operations_v1.OperationsClient)
-
- # Ensure that subsequent calls to the property send the exact same object.
- assert transport.operations_client is transport.operations_client