diff --git a/CHANGELOG.md b/CHANGELOG.md index e61ced26..59b105e4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,18 @@ [1]: https://pypi.org/project/google-cloud-automl/#history +## 0.7.0 + +10-04-2019 15:37 PDT + +### Implementation Changes +- Return operation future from `AutoMlClient.create_dataset` (via synth).([#9423](https://github.com/googleapis/google-cloud-python/pull/9423)) + + +### New Features +- Add support for V1 API (via synth). ([#9388](https://github.com/googleapis/google-cloud-python/pull/9388)) +- Add support for passing project to 'GcsClient'. ([#9299](https://github.com/googleapis/google-cloud-python/pull/9299)) + ## 0.6.0 09-30-2019 10:40 PDT diff --git a/docs/conf.py b/docs/conf.py index d2091505..9ac18387 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -318,7 +318,7 @@ u"google-cloud-automl Documentation", author, "google-cloud-automl", - "GAPIC library for the {metadata.shortName} v1beta1 service", + "GAPIC library for the {metadata.shortName} v1 service", "APIs", ) ] @@ -344,7 +344,7 @@ "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://requests.kennethreitz.org/en/stable/", None), + "requests": ("https://requests.kennethreitz.org/en/master/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } diff --git a/docs/gapic/v1/api.rst b/docs/gapic/v1/api.rst new file mode 100644 index 00000000..757fc1a0 --- /dev/null +++ b/docs/gapic/v1/api.rst @@ -0,0 +1,6 @@ +Client for Cloud AutoML API +=========================== + +.. automodule:: google.cloud.automl_v1 + :members: + :inherited-members: \ No newline at end of file diff --git a/docs/gapic/v1/types.rst b/docs/gapic/v1/types.rst new file mode 100644 index 00000000..5fd25134 --- /dev/null +++ b/docs/gapic/v1/types.rst @@ -0,0 +1,5 @@ +Types for Cloud AutoML API Client +================================= + +.. automodule:: google.cloud.automl_v1.types + :members: \ No newline at end of file diff --git a/docs/index.rst b/docs/index.rst index 01f57764..90c2bfd5 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -1,8 +1,22 @@ .. include:: README.rst +This package includes clients for multiple versions of the Cloud AutoML API. +By default, you will get ``v1``, the latest stable version. -Api Reference -------------- +v1 API Reference +------------------------ +.. toctree:: + :maxdepth: 2 + + gapic/v1/api + gapic/v1/types + +Previous beta release v1beta1 is provided as well. + +An API and type reference is provided for ``v1beta1``: + +v1beta1 API Reference +---------------------- .. toctree:: :maxdepth: 2 @@ -11,6 +25,7 @@ Api Reference gapic/v1beta1/tables + Changelog --------- diff --git a/google/cloud/automl.py b/google/cloud/automl.py index 77528b3d..9f96f4f4 100644 --- a/google/cloud/automl.py +++ b/google/cloud/automl.py @@ -17,10 +17,10 @@ from __future__ import absolute_import -from google.cloud.automl_v1beta1 import AutoMlClient -from google.cloud.automl_v1beta1 import PredictionServiceClient -from google.cloud.automl_v1beta1 import enums -from google.cloud.automl_v1beta1 import types +from google.cloud.automl_v1 import AutoMlClient +from google.cloud.automl_v1 import PredictionServiceClient +from google.cloud.automl_v1 import enums +from google.cloud.automl_v1 import types __all__ = ("enums", "types", "AutoMlClient", "PredictionServiceClient") diff --git a/google/cloud/automl_v1/__init__.py b/google/cloud/automl_v1/__init__.py new file mode 100644 index 00000000..f68180a5 --- /dev/null +++ b/google/cloud/automl_v1/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from __future__ import absolute_import + +from google.cloud.automl_v1 import types +from google.cloud.automl_v1.gapic import auto_ml_client +from google.cloud.automl_v1.gapic import enums +from google.cloud.automl_v1.gapic import prediction_service_client + + +class AutoMlClient(auto_ml_client.AutoMlClient): + __doc__ = auto_ml_client.AutoMlClient.__doc__ + enums = enums + + +class PredictionServiceClient(prediction_service_client.PredictionServiceClient): + __doc__ = prediction_service_client.PredictionServiceClient.__doc__ + enums = enums + + +__all__ = ("enums", "types", "AutoMlClient", "PredictionServiceClient") diff --git a/google/cloud/automl_v1/gapic/__init__.py b/google/cloud/automl_v1/gapic/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/google/cloud/automl_v1/gapic/auto_ml_client.py b/google/cloud/automl_v1/gapic/auto_ml_client.py new file mode 100644 index 00000000..eebed1ee --- /dev/null +++ b/google/cloud/automl_v1/gapic/auto_ml_client.py @@ -0,0 +1,1514 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Accesses the google.cloud.automl.v1 AutoMl API.""" + +import functools +import pkg_resources +import warnings + +from google.oauth2 import service_account +import google.api_core.client_options +import google.api_core.gapic_v1.client_info +import google.api_core.gapic_v1.config +import google.api_core.gapic_v1.method +import google.api_core.gapic_v1.routing_header +import google.api_core.grpc_helpers +import google.api_core.operation +import google.api_core.operations_v1 +import google.api_core.page_iterator +import google.api_core.path_template +import grpc + +from google.cloud.automl_v1.gapic import auto_ml_client_config +from google.cloud.automl_v1.gapic import enums +from google.cloud.automl_v1.gapic.transports import auto_ml_grpc_transport +from google.cloud.automl_v1.proto import dataset_pb2 +from google.cloud.automl_v1.proto import io_pb2 +from google.cloud.automl_v1.proto import model_evaluation_pb2 +from google.cloud.automl_v1.proto import model_pb2 +from google.cloud.automl_v1.proto import operations_pb2 as proto_operations_pb2 +from google.cloud.automl_v1.proto import service_pb2 +from google.cloud.automl_v1.proto import service_pb2_grpc +from google.longrunning import operations_pb2 as longrunning_operations_pb2 +from google.protobuf import empty_pb2 +from google.protobuf import field_mask_pb2 + + +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-automl").version + + +class AutoMlClient(object): + """ + AutoML Server API. + + The resource names are assigned by the server. The server never reuses + names that it has created after the resources with those names are + deleted. + + An ID of a resource is the last element of the item's resource name. For + ``projects/{project_id}/locations/{location_id}/datasets/{dataset_id}``, + then the id for the item is ``{dataset_id}``. + + Currently the only supported ``location_id`` is "us-central1". + + On any input that is documented to expect a string parameter in + snake\_case or kebab-case, either of those cases is accepted. + """ + + SERVICE_ADDRESS = "automl.googleapis.com:443" + """The default address of the service.""" + + # The name of the interface for this client. This is the key used to + # find the method configuration in the client_config dictionary. + _INTERFACE_NAME = "google.cloud.automl.v1.AutoMl" + + @classmethod + def from_service_account_file(cls, filename, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AutoMlClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @classmethod + def dataset_path(cls, project, location, dataset): + """Return a fully-qualified dataset string.""" + return google.api_core.path_template.expand( + "projects/{project}/locations/{location}/datasets/{dataset}", + project=project, + location=location, + dataset=dataset, + ) + + @classmethod + def location_path(cls, project, location): + """Return a fully-qualified location string.""" + return google.api_core.path_template.expand( + "projects/{project}/locations/{location}", + project=project, + location=location, + ) + + @classmethod + def model_path(cls, project, location, model): + """Return a fully-qualified model string.""" + return google.api_core.path_template.expand( + "projects/{project}/locations/{location}/models/{model}", + project=project, + location=location, + model=model, + ) + + @classmethod + def model_evaluation_path(cls, project, location, model, model_evaluation): + """Return a fully-qualified model_evaluation string.""" + return google.api_core.path_template.expand( + "projects/{project}/locations/{location}/models/{model}/modelEvaluations/{model_evaluation}", + project=project, + location=location, + model=model, + model_evaluation=model_evaluation, + ) + + def __init__( + self, + transport=None, + channel=None, + credentials=None, + client_config=None, + client_info=None, + client_options=None, + ): + """Constructor. + + Args: + transport (Union[~.AutoMlGrpcTransport, + Callable[[~.Credentials, type], ~.AutoMlGrpcTransport]): A transport + instance, responsible for actually making the API calls. + The default transport uses the gRPC protocol. + This argument may also be a callable which returns a + transport instance. Callables will be sent the credentials + as the first argument and the default transport class as + the second argument. + channel (grpc.Channel): DEPRECATED. A ``Channel`` instance + through which to make calls. This argument is mutually exclusive + with ``credentials``; providing both will raise an exception. + credentials (google.auth.credentials.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is mutually exclusive with providing a + transport instance to ``transport``; doing so will raise + an exception. + client_config (dict): DEPRECATED. A dictionary of call options for + each method. If not specified, the default configuration is used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + client_options (Union[dict, google.api_core.client_options.ClientOptions]): + Client options used to set user options on the client. API Endpoint + should be set through client_options. + """ + # Raise deprecation warnings for things we want to go away. + if client_config is not None: + warnings.warn( + "The `client_config` argument is deprecated.", + PendingDeprecationWarning, + stacklevel=2, + ) + else: + client_config = auto_ml_client_config.config + + if channel: + warnings.warn( + "The `channel` argument is deprecated; use " "`transport` instead.", + PendingDeprecationWarning, + stacklevel=2, + ) + + api_endpoint = self.SERVICE_ADDRESS + if client_options: + if type(client_options) == dict: + client_options = google.api_core.client_options.from_dict( + client_options + ) + if client_options.api_endpoint: + api_endpoint = client_options.api_endpoint + + # Instantiate the transport. + # The transport is responsible for handling serialization and + # deserialization and actually sending data to the service. + if transport: + if callable(transport): + self.transport = transport( + credentials=credentials, + default_class=auto_ml_grpc_transport.AutoMlGrpcTransport, + address=api_endpoint, + ) + else: + if credentials: + raise ValueError( + "Received both a transport instance and " + "credentials; these are mutually exclusive." + ) + self.transport = transport + else: + self.transport = auto_ml_grpc_transport.AutoMlGrpcTransport( + address=api_endpoint, channel=channel, credentials=credentials + ) + + if client_info is None: + client_info = google.api_core.gapic_v1.client_info.ClientInfo( + gapic_version=_GAPIC_LIBRARY_VERSION + ) + else: + client_info.gapic_version = _GAPIC_LIBRARY_VERSION + self._client_info = client_info + + # Parse out the default settings for retry and timeout for each RPC + # from the client configuration. + # (Ordinarily, these are the defaults specified in the `*_config.py` + # file next to this one.) + self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( + client_config["interfaces"][self._INTERFACE_NAME] + ) + + # Save a dictionary of cached API call functions. + # These are the actual callables which invoke the proper + # transport methods, wrapped with `wrap_method` to add retry, + # timeout, and the like. + self._inner_api_calls = {} + + # Service calls + def create_dataset( + self, + parent, + dataset, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Creates a dataset. + + Example: + >>> from google.cloud import automl_v1 + >>> + >>> client = automl_v1.AutoMlClient() + >>> + >>> parent = client.location_path('[PROJECT]', '[LOCATION]') + >>> + >>> # TODO: Initialize `dataset`: + >>> dataset = {} + >>> + >>> response = client.create_dataset(parent, dataset) + >>> + >>> def callback(operation_future): + ... # Handle result. + ... result = operation_future.result() + >>> + >>> response.add_done_callback(callback) + >>> + >>> # Handle metadata. + >>> metadata = response.metadata() + + Args: + parent (str): The resource name of the project to create the dataset for. + dataset (Union[dict, ~google.cloud.automl_v1.types.Dataset]): The dataset to create. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.automl_v1.types.Dataset` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.automl_v1.types._OperationFuture` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "create_dataset" not in self._inner_api_calls: + self._inner_api_calls[ + "create_dataset" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.create_dataset, + default_retry=self._method_configs["CreateDataset"].retry, + default_timeout=self._method_configs["CreateDataset"].timeout, + client_info=self._client_info, + ) + + request = service_pb2.CreateDatasetRequest(parent=parent, dataset=dataset) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("parent", parent)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + operation = self._inner_api_calls["create_dataset"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + return google.api_core.operation.from_gapic( + operation, + self.transport._operations_client, + dataset_pb2.Dataset, + metadata_type=proto_operations_pb2.OperationMetadata, + ) + + def update_dataset( + self, + dataset, + update_mask, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Updates a dataset. + + Example: + >>> from google.cloud import automl_v1 + >>> + >>> client = automl_v1.AutoMlClient() + >>> + >>> # TODO: Initialize `dataset`: + >>> dataset = {} + >>> + >>> # TODO: Initialize `update_mask`: + >>> update_mask = {} + >>> + >>> response = client.update_dataset(dataset, update_mask) + + Args: + dataset (Union[dict, ~google.cloud.automl_v1.types.Dataset]): The dataset which replaces the resource on the server. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.automl_v1.types.Dataset` + update_mask (Union[dict, ~google.cloud.automl_v1.types.FieldMask]): Required. The update mask applies to the resource. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.automl_v1.types.FieldMask` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.automl_v1.types.Dataset` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "update_dataset" not in self._inner_api_calls: + self._inner_api_calls[ + "update_dataset" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.update_dataset, + default_retry=self._method_configs["UpdateDataset"].retry, + default_timeout=self._method_configs["UpdateDataset"].timeout, + client_info=self._client_info, + ) + + request = service_pb2.UpdateDatasetRequest( + dataset=dataset, update_mask=update_mask + ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("dataset.name", dataset.name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["update_dataset"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def get_dataset( + self, + name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Gets a dataset. + + Example: + >>> from google.cloud import automl_v1 + >>> + >>> client = automl_v1.AutoMlClient() + >>> + >>> name = client.dataset_path('[PROJECT]', '[LOCATION]', '[DATASET]') + >>> + >>> response = client.get_dataset(name) + + Args: + name (str): The resource name of the dataset to retrieve. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.automl_v1.types.Dataset` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "get_dataset" not in self._inner_api_calls: + self._inner_api_calls[ + "get_dataset" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.get_dataset, + default_retry=self._method_configs["GetDataset"].retry, + default_timeout=self._method_configs["GetDataset"].timeout, + client_info=self._client_info, + ) + + request = service_pb2.GetDatasetRequest(name=name) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("name", name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["get_dataset"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def list_datasets( + self, + parent, + filter_=None, + page_size=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Lists datasets in a project. + + Example: + >>> from google.cloud import automl_v1 + >>> + >>> client = automl_v1.AutoMlClient() + >>> + >>> parent = client.location_path('[PROJECT]', '[LOCATION]') + >>> + >>> # Iterate over all results + >>> for element in client.list_datasets(parent): + ... # process element + ... pass + >>> + >>> + >>> # Alternatively: + >>> + >>> # Iterate over results one page at a time + >>> for page in client.list_datasets(parent).pages: + ... for element in page: + ... # process element + ... pass + + Args: + parent (str): The resource name of the project from which to list datasets. + filter_ (str): An expression for filtering the results of the request. + + - ``dataset_metadata`` - for existence of the case (e.g. + image\_classification\_dataset\_metadata:\*). Some examples of using + the filter are: + + - ``translation_dataset_metadata:*`` --> The dataset has + translation\_dataset\_metadata. + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.api_core.page_iterator.PageIterator` instance. + An iterable of :class:`~google.cloud.automl_v1.types.Dataset` instances. + You can also iterate over the pages of the response + using its `pages` property. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "list_datasets" not in self._inner_api_calls: + self._inner_api_calls[ + "list_datasets" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.list_datasets, + default_retry=self._method_configs["ListDatasets"].retry, + default_timeout=self._method_configs["ListDatasets"].timeout, + client_info=self._client_info, + ) + + request = service_pb2.ListDatasetsRequest( + parent=parent, filter=filter_, page_size=page_size + ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("parent", parent)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + iterator = google.api_core.page_iterator.GRPCIterator( + client=None, + method=functools.partial( + self._inner_api_calls["list_datasets"], + retry=retry, + timeout=timeout, + metadata=metadata, + ), + request=request, + items_field="datasets", + request_token_field="page_token", + response_token_field="next_page_token", + ) + return iterator + + def delete_dataset( + self, + name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Deletes a dataset and all of its contents. Returns empty response in the + ``response`` field when it completes, and ``delete_details`` in the + ``metadata`` field. + + Example: + >>> from google.cloud import automl_v1 + >>> + >>> client = automl_v1.AutoMlClient() + >>> + >>> name = client.dataset_path('[PROJECT]', '[LOCATION]', '[DATASET]') + >>> + >>> response = client.delete_dataset(name) + >>> + >>> def callback(operation_future): + ... # Handle result. + ... result = operation_future.result() + >>> + >>> response.add_done_callback(callback) + >>> + >>> # Handle metadata. + >>> metadata = response.metadata() + + Args: + name (str): The resource name of the dataset to delete. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.automl_v1.types._OperationFuture` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "delete_dataset" not in self._inner_api_calls: + self._inner_api_calls[ + "delete_dataset" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.delete_dataset, + default_retry=self._method_configs["DeleteDataset"].retry, + default_timeout=self._method_configs["DeleteDataset"].timeout, + client_info=self._client_info, + ) + + request = service_pb2.DeleteDatasetRequest(name=name) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("name", name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + operation = self._inner_api_calls["delete_dataset"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + return google.api_core.operation.from_gapic( + operation, + self.transport._operations_client, + empty_pb2.Empty, + metadata_type=proto_operations_pb2.OperationMetadata, + ) + + def import_data( + self, + name, + input_config, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Imports data into a dataset. + + Example: + >>> from google.cloud import automl_v1 + >>> + >>> client = automl_v1.AutoMlClient() + >>> + >>> name = client.dataset_path('[PROJECT]', '[LOCATION]', '[DATASET]') + >>> + >>> # TODO: Initialize `input_config`: + >>> input_config = {} + >>> + >>> response = client.import_data(name, input_config) + >>> + >>> def callback(operation_future): + ... # Handle result. + ... result = operation_future.result() + >>> + >>> response.add_done_callback(callback) + >>> + >>> # Handle metadata. + >>> metadata = response.metadata() + + Args: + name (str): Required. Dataset name. Dataset must already exist. All imported + annotations and examples will be added. + input_config (Union[dict, ~google.cloud.automl_v1.types.InputConfig]): Required. The desired input location and its domain specific semantics, + if any. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.automl_v1.types.InputConfig` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.automl_v1.types._OperationFuture` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "import_data" not in self._inner_api_calls: + self._inner_api_calls[ + "import_data" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.import_data, + default_retry=self._method_configs["ImportData"].retry, + default_timeout=self._method_configs["ImportData"].timeout, + client_info=self._client_info, + ) + + request = service_pb2.ImportDataRequest(name=name, input_config=input_config) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("name", name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + operation = self._inner_api_calls["import_data"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + return google.api_core.operation.from_gapic( + operation, + self.transport._operations_client, + empty_pb2.Empty, + metadata_type=proto_operations_pb2.OperationMetadata, + ) + + def export_data( + self, + name, + output_config, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Exports dataset's data to the provided output location. Returns an empty + response in the ``response`` field when it completes. + + Example: + >>> from google.cloud import automl_v1 + >>> + >>> client = automl_v1.AutoMlClient() + >>> + >>> name = client.dataset_path('[PROJECT]', '[LOCATION]', '[DATASET]') + >>> + >>> # TODO: Initialize `output_config`: + >>> output_config = {} + >>> + >>> response = client.export_data(name, output_config) + >>> + >>> def callback(operation_future): + ... # Handle result. + ... result = operation_future.result() + >>> + >>> response.add_done_callback(callback) + >>> + >>> # Handle metadata. + >>> metadata = response.metadata() + + Args: + name (str): Required. The resource name of the dataset. + output_config (Union[dict, ~google.cloud.automl_v1.types.OutputConfig]): Required. The desired output location. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.automl_v1.types.OutputConfig` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.automl_v1.types._OperationFuture` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "export_data" not in self._inner_api_calls: + self._inner_api_calls[ + "export_data" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.export_data, + default_retry=self._method_configs["ExportData"].retry, + default_timeout=self._method_configs["ExportData"].timeout, + client_info=self._client_info, + ) + + request = service_pb2.ExportDataRequest(name=name, output_config=output_config) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("name", name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + operation = self._inner_api_calls["export_data"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + return google.api_core.operation.from_gapic( + operation, + self.transport._operations_client, + empty_pb2.Empty, + metadata_type=proto_operations_pb2.OperationMetadata, + ) + + def create_model( + self, + parent, + model, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Creates a model. Returns a Model in the ``response`` field when it + completes. When you create a model, several model evaluations are + created for it: a global evaluation, and one evaluation for each + annotation spec. + + Example: + >>> from google.cloud import automl_v1 + >>> + >>> client = automl_v1.AutoMlClient() + >>> + >>> parent = client.location_path('[PROJECT]', '[LOCATION]') + >>> + >>> # TODO: Initialize `model`: + >>> model = {} + >>> + >>> response = client.create_model(parent, model) + >>> + >>> def callback(operation_future): + ... # Handle result. + ... result = operation_future.result() + >>> + >>> response.add_done_callback(callback) + >>> + >>> # Handle metadata. + >>> metadata = response.metadata() + + Args: + parent (str): Resource name of the parent project where the model is being created. + model (Union[dict, ~google.cloud.automl_v1.types.Model]): The model to create. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.automl_v1.types.Model` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.automl_v1.types._OperationFuture` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "create_model" not in self._inner_api_calls: + self._inner_api_calls[ + "create_model" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.create_model, + default_retry=self._method_configs["CreateModel"].retry, + default_timeout=self._method_configs["CreateModel"].timeout, + client_info=self._client_info, + ) + + request = service_pb2.CreateModelRequest(parent=parent, model=model) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("parent", parent)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + operation = self._inner_api_calls["create_model"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + return google.api_core.operation.from_gapic( + operation, + self.transport._operations_client, + model_pb2.Model, + metadata_type=proto_operations_pb2.OperationMetadata, + ) + + def get_model( + self, + name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Gets a model. + + Example: + >>> from google.cloud import automl_v1 + >>> + >>> client = automl_v1.AutoMlClient() + >>> + >>> name = client.model_path('[PROJECT]', '[LOCATION]', '[MODEL]') + >>> + >>> response = client.get_model(name) + + Args: + name (str): Resource name of the model. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.automl_v1.types.Model` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "get_model" not in self._inner_api_calls: + self._inner_api_calls[ + "get_model" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.get_model, + default_retry=self._method_configs["GetModel"].retry, + default_timeout=self._method_configs["GetModel"].timeout, + client_info=self._client_info, + ) + + request = service_pb2.GetModelRequest(name=name) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("name", name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["get_model"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def update_model( + self, + model, + update_mask, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Updates a model. + + Example: + >>> from google.cloud import automl_v1 + >>> + >>> client = automl_v1.AutoMlClient() + >>> + >>> # TODO: Initialize `model`: + >>> model = {} + >>> + >>> # TODO: Initialize `update_mask`: + >>> update_mask = {} + >>> + >>> response = client.update_model(model, update_mask) + + Args: + model (Union[dict, ~google.cloud.automl_v1.types.Model]): The model which replaces the resource on the server. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.automl_v1.types.Model` + update_mask (Union[dict, ~google.cloud.automl_v1.types.FieldMask]): Required. The update mask applies to the resource. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.automl_v1.types.FieldMask` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.automl_v1.types.Model` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "update_model" not in self._inner_api_calls: + self._inner_api_calls[ + "update_model" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.update_model, + default_retry=self._method_configs["UpdateModel"].retry, + default_timeout=self._method_configs["UpdateModel"].timeout, + client_info=self._client_info, + ) + + request = service_pb2.UpdateModelRequest(model=model, update_mask=update_mask) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("model.name", model.name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["update_model"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def list_models( + self, + parent, + filter_=None, + page_size=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Lists models. + + Example: + >>> from google.cloud import automl_v1 + >>> + >>> client = automl_v1.AutoMlClient() + >>> + >>> parent = client.location_path('[PROJECT]', '[LOCATION]') + >>> + >>> # Iterate over all results + >>> for element in client.list_models(parent): + ... # process element + ... pass + >>> + >>> + >>> # Alternatively: + >>> + >>> # Iterate over results one page at a time + >>> for page in client.list_models(parent).pages: + ... for element in page: + ... # process element + ... pass + + Args: + parent (str): Resource name of the project, from which to list the models. + filter_ (str): An expression for filtering the results of the request. + + - ``model_metadata`` - for existence of the case (e.g. + video\_classification\_model\_metadata:\*). + + - ``dataset_id`` - for = or !=. Some examples of using the filter are: + + - ``image_classification_model_metadata:*`` --> The model has + image\_classification\_model\_metadata. + + - ``dataset_id=5`` --> The model was created from a dataset with ID 5. + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.api_core.page_iterator.PageIterator` instance. + An iterable of :class:`~google.cloud.automl_v1.types.Model` instances. + You can also iterate over the pages of the response + using its `pages` property. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "list_models" not in self._inner_api_calls: + self._inner_api_calls[ + "list_models" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.list_models, + default_retry=self._method_configs["ListModels"].retry, + default_timeout=self._method_configs["ListModels"].timeout, + client_info=self._client_info, + ) + + request = service_pb2.ListModelsRequest( + parent=parent, filter=filter_, page_size=page_size + ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("parent", parent)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + iterator = google.api_core.page_iterator.GRPCIterator( + client=None, + method=functools.partial( + self._inner_api_calls["list_models"], + retry=retry, + timeout=timeout, + metadata=metadata, + ), + request=request, + items_field="model", + request_token_field="page_token", + response_token_field="next_page_token", + ) + return iterator + + def delete_model( + self, + name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Deletes a model. Returns ``google.protobuf.Empty`` in the ``response`` + field when it completes, and ``delete_details`` in the ``metadata`` + field. + + Example: + >>> from google.cloud import automl_v1 + >>> + >>> client = automl_v1.AutoMlClient() + >>> + >>> name = client.model_path('[PROJECT]', '[LOCATION]', '[MODEL]') + >>> + >>> response = client.delete_model(name) + >>> + >>> def callback(operation_future): + ... # Handle result. + ... result = operation_future.result() + >>> + >>> response.add_done_callback(callback) + >>> + >>> # Handle metadata. + >>> metadata = response.metadata() + + Args: + name (str): Resource name of the model being deleted. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.automl_v1.types._OperationFuture` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "delete_model" not in self._inner_api_calls: + self._inner_api_calls[ + "delete_model" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.delete_model, + default_retry=self._method_configs["DeleteModel"].retry, + default_timeout=self._method_configs["DeleteModel"].timeout, + client_info=self._client_info, + ) + + request = service_pb2.DeleteModelRequest(name=name) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("name", name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + operation = self._inner_api_calls["delete_model"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + return google.api_core.operation.from_gapic( + operation, + self.transport._operations_client, + empty_pb2.Empty, + metadata_type=proto_operations_pb2.OperationMetadata, + ) + + def get_model_evaluation( + self, + name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Gets a model evaluation. + + Example: + >>> from google.cloud import automl_v1 + >>> + >>> client = automl_v1.AutoMlClient() + >>> + >>> name = client.model_evaluation_path('[PROJECT]', '[LOCATION]', '[MODEL]', '[MODEL_EVALUATION]') + >>> + >>> response = client.get_model_evaluation(name) + + Args: + name (str): Resource name for the model evaluation. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.automl_v1.types.ModelEvaluation` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "get_model_evaluation" not in self._inner_api_calls: + self._inner_api_calls[ + "get_model_evaluation" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.get_model_evaluation, + default_retry=self._method_configs["GetModelEvaluation"].retry, + default_timeout=self._method_configs["GetModelEvaluation"].timeout, + client_info=self._client_info, + ) + + request = service_pb2.GetModelEvaluationRequest(name=name) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("name", name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["get_model_evaluation"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def list_model_evaluations( + self, + parent, + filter_, + page_size=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Lists model evaluations. + + Example: + >>> from google.cloud import automl_v1 + >>> + >>> client = automl_v1.AutoMlClient() + >>> + >>> parent = client.model_path('[PROJECT]', '[LOCATION]', '[MODEL]') + >>> + >>> # TODO: Initialize `filter_`: + >>> filter_ = '' + >>> + >>> # Iterate over all results + >>> for element in client.list_model_evaluations(parent, filter_): + ... # process element + ... pass + >>> + >>> + >>> # Alternatively: + >>> + >>> # Iterate over results one page at a time + >>> for page in client.list_model_evaluations(parent, filter_).pages: + ... for element in page: + ... # process element + ... pass + + Args: + parent (str): Resource name of the model to list the model evaluations for. + If modelId is set as "-", this will list model evaluations from across all + models of the parent location. + filter_ (str): An expression for filtering the results of the request. + + - ``annotation_spec_id`` - for =, != or existence. See example below + for the last. + + Some examples of using the filter are: + + - ``annotation_spec_id!=4`` --> The model evaluation was done for + annotation spec with ID different than 4. + - ``NOT annotation_spec_id:*`` --> The model evaluation was done for + aggregate of all annotation specs. + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.api_core.page_iterator.PageIterator` instance. + An iterable of :class:`~google.cloud.automl_v1.types.ModelEvaluation` instances. + You can also iterate over the pages of the response + using its `pages` property. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "list_model_evaluations" not in self._inner_api_calls: + self._inner_api_calls[ + "list_model_evaluations" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.list_model_evaluations, + default_retry=self._method_configs["ListModelEvaluations"].retry, + default_timeout=self._method_configs["ListModelEvaluations"].timeout, + client_info=self._client_info, + ) + + request = service_pb2.ListModelEvaluationsRequest( + parent=parent, filter=filter_, page_size=page_size + ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("parent", parent)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + iterator = google.api_core.page_iterator.GRPCIterator( + client=None, + method=functools.partial( + self._inner_api_calls["list_model_evaluations"], + retry=retry, + timeout=timeout, + metadata=metadata, + ), + request=request, + items_field="model_evaluation", + request_token_field="page_token", + response_token_field="next_page_token", + ) + return iterator diff --git a/google/cloud/automl_v1/gapic/auto_ml_client_config.py b/google/cloud/automl_v1/gapic/auto_ml_client_config.py new file mode 100644 index 00000000..6822a905 --- /dev/null +++ b/google/cloud/automl_v1/gapic/auto_ml_client_config.py @@ -0,0 +1,93 @@ +config = { + "interfaces": { + "google.cloud.automl.v1.AutoMl": { + "retry_codes": { + "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], + "non_idempotent": [], + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 20000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 20000, + "total_timeout_millis": 600000, + } + }, + "methods": { + "CreateDataset": { + "timeout_millis": 5000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + "UpdateDataset": { + "timeout_millis": 5000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + "GetDataset": { + "timeout_millis": 5000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + "ListDatasets": { + "timeout_millis": 50000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + "DeleteDataset": { + "timeout_millis": 5000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + "ImportData": { + "timeout_millis": 20000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + "ExportData": { + "timeout_millis": 5000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + "CreateModel": { + "timeout_millis": 20000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + "GetModel": { + "timeout_millis": 5000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + "UpdateModel": { + "timeout_millis": 5000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + "ListModels": { + "timeout_millis": 50000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + "DeleteModel": { + "timeout_millis": 5000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + "GetModelEvaluation": { + "timeout_millis": 5000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + "ListModelEvaluations": { + "timeout_millis": 50000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + }, + } + } +} diff --git a/google/cloud/automl_v1/gapic/enums.py b/google/cloud/automl_v1/gapic/enums.py new file mode 100644 index 00000000..d9c50d56 --- /dev/null +++ b/google/cloud/automl_v1/gapic/enums.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Wrappers for protocol buffer enum types.""" + +import enum + + +class Model(object): + class DeploymentState(enum.IntEnum): + """ + Deployment state of the model. + + Attributes: + DEPLOYMENT_STATE_UNSPECIFIED (int): Should not be used, an un-set enum has this value by default. + DEPLOYED (int): Model is deployed. + UNDEPLOYED (int): Model is not deployed. + """ + + DEPLOYMENT_STATE_UNSPECIFIED = 0 + DEPLOYED = 1 + UNDEPLOYED = 2 diff --git a/google/cloud/automl_v1/gapic/prediction_service_client.py b/google/cloud/automl_v1/gapic/prediction_service_client.py new file mode 100644 index 00000000..274d7cf6 --- /dev/null +++ b/google/cloud/automl_v1/gapic/prediction_service_client.py @@ -0,0 +1,297 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Accesses the google.cloud.automl.v1 PredictionService API.""" + +import pkg_resources +import warnings + +from google.oauth2 import service_account +import google.api_core.client_options +import google.api_core.gapic_v1.client_info +import google.api_core.gapic_v1.config +import google.api_core.gapic_v1.method +import google.api_core.gapic_v1.routing_header +import google.api_core.grpc_helpers +import google.api_core.path_template +import grpc + +from google.cloud.automl_v1.gapic import enums +from google.cloud.automl_v1.gapic import prediction_service_client_config +from google.cloud.automl_v1.gapic.transports import prediction_service_grpc_transport +from google.cloud.automl_v1.proto import data_items_pb2 +from google.cloud.automl_v1.proto import dataset_pb2 +from google.cloud.automl_v1.proto import io_pb2 +from google.cloud.automl_v1.proto import model_evaluation_pb2 +from google.cloud.automl_v1.proto import model_pb2 +from google.cloud.automl_v1.proto import operations_pb2 as proto_operations_pb2 +from google.cloud.automl_v1.proto import prediction_service_pb2 +from google.cloud.automl_v1.proto import prediction_service_pb2_grpc +from google.cloud.automl_v1.proto import service_pb2 +from google.cloud.automl_v1.proto import service_pb2_grpc +from google.longrunning import operations_pb2 as longrunning_operations_pb2 +from google.protobuf import empty_pb2 +from google.protobuf import field_mask_pb2 + + +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-automl").version + + +class PredictionServiceClient(object): + """ + AutoML Prediction API. + + On any input that is documented to expect a string parameter in + snake\_case or kebab-case, either of those cases is accepted. + """ + + SERVICE_ADDRESS = "automl.googleapis.com:443" + """The default address of the service.""" + + # The name of the interface for this client. This is the key used to + # find the method configuration in the client_config dictionary. + _INTERFACE_NAME = "google.cloud.automl.v1.PredictionService" + + @classmethod + def from_service_account_file(cls, filename, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PredictionServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @classmethod + def model_path(cls, project, location, model): + """Return a fully-qualified model string.""" + return google.api_core.path_template.expand( + "projects/{project}/locations/{location}/models/{model}", + project=project, + location=location, + model=model, + ) + + def __init__( + self, + transport=None, + channel=None, + credentials=None, + client_config=None, + client_info=None, + client_options=None, + ): + """Constructor. + + Args: + transport (Union[~.PredictionServiceGrpcTransport, + Callable[[~.Credentials, type], ~.PredictionServiceGrpcTransport]): A transport + instance, responsible for actually making the API calls. + The default transport uses the gRPC protocol. + This argument may also be a callable which returns a + transport instance. Callables will be sent the credentials + as the first argument and the default transport class as + the second argument. + channel (grpc.Channel): DEPRECATED. A ``Channel`` instance + through which to make calls. This argument is mutually exclusive + with ``credentials``; providing both will raise an exception. + credentials (google.auth.credentials.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is mutually exclusive with providing a + transport instance to ``transport``; doing so will raise + an exception. + client_config (dict): DEPRECATED. A dictionary of call options for + each method. If not specified, the default configuration is used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + client_options (Union[dict, google.api_core.client_options.ClientOptions]): + Client options used to set user options on the client. API Endpoint + should be set through client_options. + """ + # Raise deprecation warnings for things we want to go away. + if client_config is not None: + warnings.warn( + "The `client_config` argument is deprecated.", + PendingDeprecationWarning, + stacklevel=2, + ) + else: + client_config = prediction_service_client_config.config + + if channel: + warnings.warn( + "The `channel` argument is deprecated; use " "`transport` instead.", + PendingDeprecationWarning, + stacklevel=2, + ) + + api_endpoint = self.SERVICE_ADDRESS + if client_options: + if type(client_options) == dict: + client_options = google.api_core.client_options.from_dict( + client_options + ) + if client_options.api_endpoint: + api_endpoint = client_options.api_endpoint + + # Instantiate the transport. + # The transport is responsible for handling serialization and + # deserialization and actually sending data to the service. + if transport: + if callable(transport): + self.transport = transport( + credentials=credentials, + default_class=prediction_service_grpc_transport.PredictionServiceGrpcTransport, + address=api_endpoint, + ) + else: + if credentials: + raise ValueError( + "Received both a transport instance and " + "credentials; these are mutually exclusive." + ) + self.transport = transport + else: + self.transport = prediction_service_grpc_transport.PredictionServiceGrpcTransport( + address=api_endpoint, channel=channel, credentials=credentials + ) + + if client_info is None: + client_info = google.api_core.gapic_v1.client_info.ClientInfo( + gapic_version=_GAPIC_LIBRARY_VERSION + ) + else: + client_info.gapic_version = _GAPIC_LIBRARY_VERSION + self._client_info = client_info + + # Parse out the default settings for retry and timeout for each RPC + # from the client configuration. + # (Ordinarily, these are the defaults specified in the `*_config.py` + # file next to this one.) + self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( + client_config["interfaces"][self._INTERFACE_NAME] + ) + + # Save a dictionary of cached API call functions. + # These are the actual callables which invoke the proper + # transport methods, wrapped with `wrap_method` to add retry, + # timeout, and the like. + self._inner_api_calls = {} + + # Service calls + def predict( + self, + name, + payload, + params=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Perform an online prediction. The prediction result will be directly + returned in the response. Available for following ML problems, and their + expected request payloads: + + - Translation - TextSnippet, content up to 25,000 characters, UTF-8 + encoded. + + Example: + >>> from google.cloud import automl_v1 + >>> + >>> client = automl_v1.PredictionServiceClient() + >>> + >>> name = client.model_path('[PROJECT]', '[LOCATION]', '[MODEL]') + >>> + >>> # TODO: Initialize `payload`: + >>> payload = {} + >>> + >>> response = client.predict(name, payload) + + Args: + name (str): Name of the model requested to serve the prediction. + payload (Union[dict, ~google.cloud.automl_v1.types.ExamplePayload]): Required. Payload to perform a prediction on. The payload must match the + problem type that the model was trained to solve. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.automl_v1.types.ExamplePayload` + params (dict[str -> str]): Additional domain-specific parameters, any string must be up to 25000 + characters long. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.automl_v1.types.PredictResponse` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "predict" not in self._inner_api_calls: + self._inner_api_calls[ + "predict" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.predict, + default_retry=self._method_configs["Predict"].retry, + default_timeout=self._method_configs["Predict"].timeout, + client_info=self._client_info, + ) + + request = prediction_service_pb2.PredictRequest( + name=name, payload=payload, params=params + ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("name", name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["predict"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) diff --git a/google/cloud/automl_v1/gapic/prediction_service_client_config.py b/google/cloud/automl_v1/gapic/prediction_service_client_config.py new file mode 100644 index 00000000..21fc698d --- /dev/null +++ b/google/cloud/automl_v1/gapic/prediction_service_client_config.py @@ -0,0 +1,28 @@ +config = { + "interfaces": { + "google.cloud.automl.v1.PredictionService": { + "retry_codes": { + "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], + "non_idempotent": [], + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 20000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 20000, + "total_timeout_millis": 600000, + } + }, + "methods": { + "Predict": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + } + }, + } + } +} diff --git a/google/cloud/automl_v1/gapic/transports/__init__.py b/google/cloud/automl_v1/gapic/transports/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/google/cloud/automl_v1/gapic/transports/auto_ml_grpc_transport.py b/google/cloud/automl_v1/gapic/transports/auto_ml_grpc_transport.py new file mode 100644 index 00000000..e07d24ff --- /dev/null +++ b/google/cloud/automl_v1/gapic/transports/auto_ml_grpc_transport.py @@ -0,0 +1,304 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import google.api_core.grpc_helpers +import google.api_core.operations_v1 + +from google.cloud.automl_v1.proto import service_pb2_grpc + + +class AutoMlGrpcTransport(object): + """gRPC transport class providing stubs for + google.cloud.automl.v1 AutoMl API. + + The transport provides access to the raw gRPC stubs, + which can be used to take advantage of advanced + features of gRPC. + """ + + # The scopes needed to make gRPC calls to all of the methods defined + # in this service. + _OAUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + def __init__( + self, channel=None, credentials=None, address="automl.googleapis.com:443" + ): + """Instantiate the transport class. + + Args: + channel (grpc.Channel): A ``Channel`` instance through + which to make calls. This argument is mutually exclusive + with ``credentials``; providing both will raise an exception. + credentials (google.auth.credentials.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If none + are specified, the client will attempt to ascertain the + credentials from the environment. + address (str): The address where the service is hosted. + """ + # If both `channel` and `credentials` are specified, raise an + # exception (channels come with credentials baked in already). + if channel is not None and credentials is not None: + raise ValueError( + "The `channel` and `credentials` arguments are mutually " "exclusive." + ) + + # Create the channel. + if channel is None: + channel = self.create_channel( + address=address, + credentials=credentials, + options={ + "grpc.max_send_message_length": -1, + "grpc.max_receive_message_length": -1, + }.items(), + ) + + self._channel = channel + + # gRPC uses objects called "stubs" that are bound to the + # channel and provide a basic method for each RPC. + self._stubs = {"auto_ml_stub": service_pb2_grpc.AutoMlStub(channel)} + + # Because this API includes a method that returns a + # long-running operation (proto: google.longrunning.Operation), + # instantiate an LRO client. + self._operations_client = google.api_core.operations_v1.OperationsClient( + channel + ) + + @classmethod + def create_channel( + cls, address="automl.googleapis.com:443", credentials=None, **kwargs + ): + """Create and return a gRPC channel object. + + Args: + address (str): The host for the channel to use. + credentials (~.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + kwargs (dict): Keyword arguments, which are passed to the + channel creation. + + Returns: + grpc.Channel: A gRPC channel object. + """ + return google.api_core.grpc_helpers.create_channel( + address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs + ) + + @property + def channel(self): + """The gRPC channel used by the transport. + + Returns: + grpc.Channel: A gRPC channel object. + """ + return self._channel + + @property + def create_dataset(self): + """Return the gRPC stub for :meth:`AutoMlClient.create_dataset`. + + Creates a dataset. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["auto_ml_stub"].CreateDataset + + @property + def update_dataset(self): + """Return the gRPC stub for :meth:`AutoMlClient.update_dataset`. + + Updates a dataset. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["auto_ml_stub"].UpdateDataset + + @property + def get_dataset(self): + """Return the gRPC stub for :meth:`AutoMlClient.get_dataset`. + + Gets a dataset. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["auto_ml_stub"].GetDataset + + @property + def list_datasets(self): + """Return the gRPC stub for :meth:`AutoMlClient.list_datasets`. + + Lists datasets in a project. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["auto_ml_stub"].ListDatasets + + @property + def delete_dataset(self): + """Return the gRPC stub for :meth:`AutoMlClient.delete_dataset`. + + Deletes a dataset and all of its contents. Returns empty response in the + ``response`` field when it completes, and ``delete_details`` in the + ``metadata`` field. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["auto_ml_stub"].DeleteDataset + + @property + def import_data(self): + """Return the gRPC stub for :meth:`AutoMlClient.import_data`. + + Imports data into a dataset. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["auto_ml_stub"].ImportData + + @property + def export_data(self): + """Return the gRPC stub for :meth:`AutoMlClient.export_data`. + + Exports dataset's data to the provided output location. Returns an empty + response in the ``response`` field when it completes. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["auto_ml_stub"].ExportData + + @property + def create_model(self): + """Return the gRPC stub for :meth:`AutoMlClient.create_model`. + + Creates a model. Returns a Model in the ``response`` field when it + completes. When you create a model, several model evaluations are + created for it: a global evaluation, and one evaluation for each + annotation spec. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["auto_ml_stub"].CreateModel + + @property + def get_model(self): + """Return the gRPC stub for :meth:`AutoMlClient.get_model`. + + Gets a model. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["auto_ml_stub"].GetModel + + @property + def update_model(self): + """Return the gRPC stub for :meth:`AutoMlClient.update_model`. + + Updates a model. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["auto_ml_stub"].UpdateModel + + @property + def list_models(self): + """Return the gRPC stub for :meth:`AutoMlClient.list_models`. + + Lists models. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["auto_ml_stub"].ListModels + + @property + def delete_model(self): + """Return the gRPC stub for :meth:`AutoMlClient.delete_model`. + + Deletes a model. Returns ``google.protobuf.Empty`` in the ``response`` + field when it completes, and ``delete_details`` in the ``metadata`` + field. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["auto_ml_stub"].DeleteModel + + @property + def get_model_evaluation(self): + """Return the gRPC stub for :meth:`AutoMlClient.get_model_evaluation`. + + Gets a model evaluation. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["auto_ml_stub"].GetModelEvaluation + + @property + def list_model_evaluations(self): + """Return the gRPC stub for :meth:`AutoMlClient.list_model_evaluations`. + + Lists model evaluations. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["auto_ml_stub"].ListModelEvaluations diff --git a/google/cloud/automl_v1/gapic/transports/prediction_service_grpc_transport.py b/google/cloud/automl_v1/gapic/transports/prediction_service_grpc_transport.py new file mode 100644 index 00000000..9fa5a6f8 --- /dev/null +++ b/google/cloud/automl_v1/gapic/transports/prediction_service_grpc_transport.py @@ -0,0 +1,128 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import google.api_core.grpc_helpers + +from google.cloud.automl_v1.proto import prediction_service_pb2_grpc + + +class PredictionServiceGrpcTransport(object): + """gRPC transport class providing stubs for + google.cloud.automl.v1 PredictionService API. + + The transport provides access to the raw gRPC stubs, + which can be used to take advantage of advanced + features of gRPC. + """ + + # The scopes needed to make gRPC calls to all of the methods defined + # in this service. + _OAUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + def __init__( + self, channel=None, credentials=None, address="automl.googleapis.com:443" + ): + """Instantiate the transport class. + + Args: + channel (grpc.Channel): A ``Channel`` instance through + which to make calls. This argument is mutually exclusive + with ``credentials``; providing both will raise an exception. + credentials (google.auth.credentials.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If none + are specified, the client will attempt to ascertain the + credentials from the environment. + address (str): The address where the service is hosted. + """ + # If both `channel` and `credentials` are specified, raise an + # exception (channels come with credentials baked in already). + if channel is not None and credentials is not None: + raise ValueError( + "The `channel` and `credentials` arguments are mutually " "exclusive." + ) + + # Create the channel. + if channel is None: + channel = self.create_channel( + address=address, + credentials=credentials, + options={ + "grpc.max_send_message_length": -1, + "grpc.max_receive_message_length": -1, + }.items(), + ) + + self._channel = channel + + # gRPC uses objects called "stubs" that are bound to the + # channel and provide a basic method for each RPC. + self._stubs = { + "prediction_service_stub": prediction_service_pb2_grpc.PredictionServiceStub( + channel + ) + } + + @classmethod + def create_channel( + cls, address="automl.googleapis.com:443", credentials=None, **kwargs + ): + """Create and return a gRPC channel object. + + Args: + address (str): The host for the channel to use. + credentials (~.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + kwargs (dict): Keyword arguments, which are passed to the + channel creation. + + Returns: + grpc.Channel: A gRPC channel object. + """ + return google.api_core.grpc_helpers.create_channel( + address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs + ) + + @property + def channel(self): + """The gRPC channel used by the transport. + + Returns: + grpc.Channel: A gRPC channel object. + """ + return self._channel + + @property + def predict(self): + """Return the gRPC stub for :meth:`PredictionServiceClient.predict`. + + Perform an online prediction. The prediction result will be directly + returned in the response. Available for following ML problems, and their + expected request payloads: + + - Translation - TextSnippet, content up to 25,000 characters, UTF-8 + encoded. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["prediction_service_stub"].Predict diff --git a/google/cloud/automl_v1/proto/__init__.py b/google/cloud/automl_v1/proto/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/google/cloud/automl_v1/proto/annotation_payload.proto b/google/cloud/automl_v1/proto/annotation_payload.proto new file mode 100644 index 00000000..9469c261 --- /dev/null +++ b/google/cloud/automl_v1/proto/annotation_payload.proto @@ -0,0 +1,39 @@ +// Copyright 2019 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +syntax = "proto3"; + +package google.cloud.automl.v1; + +import "google/cloud/automl/v1/translation.proto"; +import "google/protobuf/any.proto"; +import "google/api/annotations.proto"; + +option go_package = "google.golang.org/genproto/googleapis/cloud/automl/v1;automl"; +option csharp_namespace = "Google.Cloud.AutoML.V1"; +option java_multiple_files = true; +option java_package = "com.google.cloud.automl.v1"; +option php_namespace = "Google\\Cloud\\AutoML\\V1"; +option ruby_package = "Google::Cloud::AutoML::V1"; + +// Contains annotation information that is relevant to AutoML. +message AnnotationPayload { + // Output only . Additional information about the annotation + // specific to the AutoML domain. + oneof detail { + // Annotation details for translation. + TranslationAnnotation translation = 2; + } +} diff --git a/google/cloud/automl_v1/proto/annotation_payload_pb2.py b/google/cloud/automl_v1/proto/annotation_payload_pb2.py new file mode 100644 index 00000000..9f027e70 --- /dev/null +++ b/google/cloud/automl_v1/proto/annotation_payload_pb2.py @@ -0,0 +1,126 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/automl_v1/proto/annotation_payload.proto + +import sys + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.cloud.automl_v1.proto import ( + translation_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_translation__pb2, +) +from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="google/cloud/automl_v1/proto/annotation_payload.proto", + package="google.cloud.automl.v1", + syntax="proto3", + serialized_options=_b( + "\n\032com.google.cloud.automl.v1P\001Z labels = 39; +} diff --git a/google/cloud/automl_v1/proto/dataset_pb2.py b/google/cloud/automl_v1/proto/dataset_pb2.py new file mode 100644 index 00000000..95d16ad1 --- /dev/null +++ b/google/cloud/automl_v1/proto/dataset_pb2.py @@ -0,0 +1,352 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/automl_v1/proto/dataset.proto + +import sys + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.cloud.automl_v1.proto import ( + translation_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_translation__pb2, +) +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="google/cloud/automl_v1/proto/dataset.proto", + package="google.cloud.automl.v1", + syntax="proto3", + serialized_options=_b( + "\n\032com.google.cloud.automl.v1P\001Z params = 2; +} + +// * For Translation: +// CSV file `translation.csv`, with each line in format: +// ML_USE,GCS_FILE_PATH +// GCS_FILE_PATH leads to a .TSV file which describes examples that have +// given ML_USE, using the following row format per line: +// TEXT_SNIPPET (in source language) \t TEXT_SNIPPET (in target +// language) +// +// `export_data__` +// where will be made +// BigQuery-dataset-name compatible (e.g. most special characters will +// become underscores), and timestamp will be in +// YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" format. In that +// dataset a new table called `primary_table` will be created, and +// filled with precisely the same data as this obtained on import. +message OutputConfig { + // Required. The destination of the output. + oneof destination { + // The Google Cloud Storage location where the output is to be written to. + // For Image Object Detection, Text Extraction, Video Classification and + // Tables, in the given directory a new directory will be created with name: + // export_data-- where + // timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format. All export + // output will be written into that directory. + GcsDestination gcs_destination = 1; + } +} + +// The Google Cloud Storage location for the input content. +message GcsSource { + // Required. Google Cloud Storage URIs to input files, up to 2000 characters + // long. Accepted forms: + // * Full object path, e.g. gs://bucket/directory/object.csv + repeated string input_uris = 1; +} + +// The Google Cloud Storage location where the output is to be written to. +message GcsDestination { + // Required. Google Cloud Storage URI to output directory, up to 2000 + // characters long. + // Accepted forms: + // * Prefix path: gs://bucket/directory + // The requesting user must have write permission to the bucket. + // The directory is created if it doesn't exist. + string output_uri_prefix = 1; +} diff --git a/google/cloud/automl_v1/proto/io_pb2.py b/google/cloud/automl_v1/proto/io_pb2.py new file mode 100644 index 00000000..6413e9cb --- /dev/null +++ b/google/cloud/automl_v1/proto/io_pb2.py @@ -0,0 +1,437 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/automl_v1/proto/io.proto + +import sys + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="google/cloud/automl_v1/proto/io.proto", + package="google.cloud.automl.v1", + syntax="proto3", + serialized_options=_b( + "\n\032com.google.cloud.automl.v1P\001Z_`` + where will be made BigQuery-dataset-name compatible (e.g. most special + characters will become underscores), and timestamp will be in + YYYY\_MM\_DDThh\_mm\_ss\_sssZ "based on ISO-8601" format. In that + dataset a new table called ``primary_table`` will be created, and filled + with precisely the same data as this obtained on import. + + + Attributes: + destination: + Required. The destination of the output. + gcs_destination: + The Google Cloud Storage location where the output is to be + written to. For Image Object Detection, Text Extraction, Video + Classification and Tables, in the given directory a new + directory will be created with name: export\_data-- where + timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format. All + export output will be written into that directory. + """, + # @@protoc_insertion_point(class_scope:google.cloud.automl.v1.OutputConfig) + ), +) +_sym_db.RegisterMessage(OutputConfig) + +GcsSource = _reflection.GeneratedProtocolMessageType( + "GcsSource", + (_message.Message,), + dict( + DESCRIPTOR=_GCSSOURCE, + __module__="google.cloud.automl_v1.proto.io_pb2", + __doc__="""The Google Cloud Storage location for the input content. + + + Attributes: + input_uris: + Required. Google Cloud Storage URIs to input files, up to 2000 + characters long. Accepted forms: \* Full object path, e.g. + gs://bucket/directory/object.csv + """, + # @@protoc_insertion_point(class_scope:google.cloud.automl.v1.GcsSource) + ), +) +_sym_db.RegisterMessage(GcsSource) + +GcsDestination = _reflection.GeneratedProtocolMessageType( + "GcsDestination", + (_message.Message,), + dict( + DESCRIPTOR=_GCSDESTINATION, + __module__="google.cloud.automl_v1.proto.io_pb2", + __doc__="""The Google Cloud Storage location where the output is to be written to. + + + Attributes: + output_uri_prefix: + Required. Google Cloud Storage URI to output directory, up to + 2000 characters long. Accepted forms: \* Prefix path: + gs://bucket/directory The requesting user must have write + permission to the bucket. The directory is created if it + doesn't exist. + """, + # @@protoc_insertion_point(class_scope:google.cloud.automl.v1.GcsDestination) + ), +) +_sym_db.RegisterMessage(GcsDestination) + + +DESCRIPTOR._options = None +_INPUTCONFIG_PARAMSENTRY._options = None +# @@protoc_insertion_point(module_scope) diff --git a/google/cloud/automl_v1/proto/io_pb2_grpc.py b/google/cloud/automl_v1/proto/io_pb2_grpc.py new file mode 100644 index 00000000..07cb78fe --- /dev/null +++ b/google/cloud/automl_v1/proto/io_pb2_grpc.py @@ -0,0 +1,2 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc diff --git a/google/cloud/automl_v1/proto/model.proto b/google/cloud/automl_v1/proto/model.proto new file mode 100644 index 00000000..5f820b42 --- /dev/null +++ b/google/cloud/automl_v1/proto/model.proto @@ -0,0 +1,86 @@ +// Copyright 2019 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +syntax = "proto3"; + +package google.cloud.automl.v1; + +import "google/cloud/automl/v1/translation.proto"; +import "google/protobuf/timestamp.proto"; +import "google/api/annotations.proto"; + +option go_package = "google.golang.org/genproto/googleapis/cloud/automl/v1;automl"; +option csharp_namespace = "Google.Cloud.AutoML.V1"; +option java_multiple_files = true; +option java_package = "com.google.cloud.automl.v1"; +option php_namespace = "Google\\Cloud\\AutoML\\V1"; +option ruby_package = "Google::Cloud::AutoML::V1"; + +// API proto representing a trained machine learning model. +message Model { + // Deployment state of the model. + enum DeploymentState { + // Should not be used, an un-set enum has this value by default. + DEPLOYMENT_STATE_UNSPECIFIED = 0; + + // Model is deployed. + DEPLOYED = 1; + + // Model is not deployed. + UNDEPLOYED = 2; + } + + // Required. + // The model metadata that is specific to the problem type. + // Must match the metadata type of the dataset used to train the model. + oneof model_metadata { + // Metadata for translation models. + TranslationModelMetadata translation_model_metadata = 15; + } + + // Output only. Resource name of the model. + // Format: `projects/{project_id}/locations/{location_id}/models/{model_id}` + string name = 1; + + // Required. The name of the model to show in the interface. The name can be + // up to 32 characters long and can consist only of ASCII Latin letters A-Z + // and a-z, underscores + // (_), and ASCII digits 0-9. It must start with a letter. + string display_name = 2; + + // Required. The resource ID of the dataset used to create the model. The dataset must + // come from the same ancestor project and location. + string dataset_id = 3; + + // Output only. Timestamp when the model training finished and can be used for prediction. + google.protobuf.Timestamp create_time = 7; + + // Output only. Timestamp when this model was last updated. + google.protobuf.Timestamp update_time = 11; + + // Output only. Deployment state of the model. A model can only serve + // prediction requests after it gets deployed. + DeploymentState deployment_state = 8; + + // Optional. The labels with user-defined metadata to organize your model. + // + // Label keys and values can be no longer than 64 characters + // (Unicode codepoints), can only contain lowercase letters, numeric + // characters, underscores and dashes. International characters are allowed. + // Label values are optional. Label keys must start with a letter. + // + // See https://goo.gl/xmQnxf for more information on and examples of labels. + map labels = 34; +} diff --git a/google/cloud/automl_v1/proto/model_evaluation.proto b/google/cloud/automl_v1/proto/model_evaluation.proto new file mode 100644 index 00000000..fe9df1b9 --- /dev/null +++ b/google/cloud/automl_v1/proto/model_evaluation.proto @@ -0,0 +1,62 @@ +// Copyright 2019 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +syntax = "proto3"; + +package google.cloud.automl.v1; + +import "google/cloud/automl/v1/translation.proto"; +import "google/protobuf/timestamp.proto"; +import "google/api/annotations.proto"; + +option go_package = "google.golang.org/genproto/googleapis/cloud/automl/v1;automl"; +option csharp_namespace = "Google.Cloud.AutoML.V1"; +option java_multiple_files = true; +option java_package = "com.google.cloud.automl.v1"; +option php_namespace = "Google\\Cloud\\AutoML\\V1"; +option ruby_package = "Google::Cloud::AutoML::V1"; + +// Evaluation results of a model. +message ModelEvaluation { + // Output only. Problem type specific evaluation metrics. + oneof metrics { + // Model evaluation metrics for translation. + TranslationEvaluationMetrics translation_evaluation_metrics = 9; + } + + // Output only. Resource name of the model evaluation. + // Format: + // + // `projects/{project_id}/locations/{location_id}/models/{model_id}/modelEvaluations/{model_evaluation_id}` + string name = 1; + + // Output only. The ID of the annotation spec that the model evaluation applies to. The + // The ID is empty for the overall model evaluation. + string annotation_spec_id = 2; + + // Output only. Timestamp when this model evaluation was created. + google.protobuf.Timestamp create_time = 5; + + // Output only. The number of examples used for model evaluation, i.e. for + // which ground truth from time of model creation is compared against the + // predicted annotations created by the model. + // For overall ModelEvaluation (i.e. with annotation_spec_id not set) this is + // the total number of all examples used for evaluation. + // Otherwise, this is the count of examples that according to the ground + // truth were annotated by the + // + // [annotation_spec_id][google.cloud.automl.v1beta1.ModelEvaluation.annotation_spec_id]. + int32 evaluated_example_count = 6; +} diff --git a/google/cloud/automl_v1/proto/model_evaluation_pb2.py b/google/cloud/automl_v1/proto/model_evaluation_pb2.py new file mode 100644 index 00000000..ec05252d --- /dev/null +++ b/google/cloud/automl_v1/proto/model_evaluation_pb2.py @@ -0,0 +1,220 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/automl_v1/proto/model_evaluation.proto + +import sys + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.cloud.automl_v1.proto import ( + translation_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_translation__pb2, +) +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="google/cloud/automl_v1/proto/model_evaluation.proto", + package="google.cloud.automl.v1", + syntax="proto3", + serialized_options=_b( + "\n\032com.google.cloud.automl.v1P\001Z params = 3; +} + +// Response message for +// [PredictionService.Predict][google.cloud.automl.v1.PredictionService.Predict]. +message PredictResponse { + // Prediction result. + // Translation and Text Sentiment will return precisely one payload. + repeated AnnotationPayload payload = 1; + + // Additional domain-specific prediction response metadata. + map metadata = 2; +} diff --git a/google/cloud/automl_v1/proto/prediction_service_pb2.py b/google/cloud/automl_v1/proto/prediction_service_pb2.py new file mode 100644 index 00000000..9d438e5f --- /dev/null +++ b/google/cloud/automl_v1/proto/prediction_service_pb2.py @@ -0,0 +1,422 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/automl_v1/proto/prediction_service.proto + +import sys + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.cloud.automl_v1.proto import ( + annotation_payload_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_annotation__payload__pb2, +) +from google.cloud.automl_v1.proto import ( + data_items_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_data__items__pb2, +) +from google.cloud.automl_v1.proto import ( + io_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_io__pb2, +) +from google.cloud.automl_v1.proto import ( + operations_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_operations__pb2, +) +from google.longrunning import ( + operations_pb2 as google_dot_longrunning_dot_operations__pb2, +) + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="google/cloud/automl_v1/proto/prediction_service.proto", + package="google.cloud.automl.v1", + syntax="proto3", + serialized_options=_b( + "\n\032com.google.cloud.automl.v1B\026PredictionServiceProtoP\001Z The dataset has + // translation_dataset_metadata. + string filter = 3; + + // Requested page size. Server may return fewer results than requested. + // If unspecified, server will pick a default size. + int32 page_size = 4; + + // A token identifying a page of results for the server to return + // Typically obtained via + // [ListDatasetsResponse.next_page_token][google.cloud.automl.v1.ListDatasetsResponse.next_page_token] + // of the previous + // [AutoMl.ListDatasets][google.cloud.automl.v1.AutoMl.ListDatasets] call. + string page_token = 6; +} + +// Response message for +// [AutoMl.ListDatasets][google.cloud.automl.v1.AutoMl.ListDatasets]. +message ListDatasetsResponse { + // The datasets read. + repeated Dataset datasets = 1; + + // A token to retrieve next page of results. + // Pass to + // [ListDatasetsRequest.page_token][google.cloud.automl.v1.ListDatasetsRequest.page_token] + // to obtain that page. + string next_page_token = 2; +} + +// Request message for +// [AutoMl.UpdateDataset][google.cloud.automl.v1.AutoMl.UpdateDataset] +message UpdateDatasetRequest { + // The dataset which replaces the resource on the server. + Dataset dataset = 1; + + // Required. The update mask applies to the resource. + google.protobuf.FieldMask update_mask = 2; +} + +// Request message for +// [AutoMl.DeleteDataset][google.cloud.automl.v1.AutoMl.DeleteDataset]. +message DeleteDatasetRequest { + // The resource name of the dataset to delete. + string name = 1; +} + +// Request message for +// [AutoMl.ImportData][google.cloud.automl.v1.AutoMl.ImportData]. +message ImportDataRequest { + // Required. Dataset name. Dataset must already exist. All imported + // annotations and examples will be added. + string name = 1; + + // Required. The desired input location and its domain specific semantics, + // if any. + InputConfig input_config = 3; +} + +// Request message for +// [AutoMl.ExportData][google.cloud.automl.v1.AutoMl.ExportData]. +message ExportDataRequest { + // Required. The resource name of the dataset. + string name = 1; + + // Required. The desired output location. + OutputConfig output_config = 3; +} + +// Request message for +// [AutoMl.CreateModel][google.cloud.automl.v1.AutoMl.CreateModel]. +message CreateModelRequest { + // Resource name of the parent project where the model is being created. + string parent = 1; + + // The model to create. + Model model = 4; +} + +// Request message for +// [AutoMl.GetModel][google.cloud.automl.v1.AutoMl.GetModel]. +message GetModelRequest { + // Resource name of the model. + string name = 1; +} + +// Request message for +// [AutoMl.ListModels][google.cloud.automl.v1.AutoMl.ListModels]. +message ListModelsRequest { + // Resource name of the project, from which to list the models. + string parent = 1; + + // An expression for filtering the results of the request. + // + // * `model_metadata` - for existence of the case (e.g. + // video_classification_model_metadata:*). + // * `dataset_id` - for = or !=. Some examples of using the filter are: + // + // * `image_classification_model_metadata:*` --> The model has + // image_classification_model_metadata. + // * `dataset_id=5` --> The model was created from a dataset with ID 5. + string filter = 3; + + // Requested page size. + int32 page_size = 4; + + // A token identifying a page of results for the server to return + // Typically obtained via + // [ListModelsResponse.next_page_token][google.cloud.automl.v1.ListModelsResponse.next_page_token] + // of the previous + // [AutoMl.ListModels][google.cloud.automl.v1.AutoMl.ListModels] call. + string page_token = 6; +} + +// Response message for +// [AutoMl.ListModels][google.cloud.automl.v1.AutoMl.ListModels]. +message ListModelsResponse { + // List of models in the requested page. + repeated Model model = 1; + + // A token to retrieve next page of results. + // Pass to + // [ListModelsRequest.page_token][google.cloud.automl.v1.ListModelsRequest.page_token] + // to obtain that page. + string next_page_token = 2; +} + +// Request message for +// [AutoMl.DeleteModel][google.cloud.automl.v1.AutoMl.DeleteModel]. +message DeleteModelRequest { + // Resource name of the model being deleted. + string name = 1; +} + +// Request message for +// [AutoMl.UpdateModel][google.cloud.automl.v1.AutoMl.UpdateModel] +message UpdateModelRequest { + // The model which replaces the resource on the server. + Model model = 1; + + // Required. The update mask applies to the resource. + google.protobuf.FieldMask update_mask = 2; +} + +// Request message for +// [AutoMl.GetModelEvaluation][google.cloud.automl.v1.AutoMl.GetModelEvaluation]. +message GetModelEvaluationRequest { + // Resource name for the model evaluation. + string name = 1; +} + +// Request message for +// [AutoMl.ListModelEvaluations][google.cloud.automl.v1.AutoMl.ListModelEvaluations]. +message ListModelEvaluationsRequest { + // Resource name of the model to list the model evaluations for. + // If modelId is set as "-", this will list model evaluations from across all + // models of the parent location. + string parent = 1; + + // An expression for filtering the results of the request. + // + // * `annotation_spec_id` - for =, != or existence. See example below for + // the last. + // + // Some examples of using the filter are: + // + // * `annotation_spec_id!=4` --> The model evaluation was done for + // annotation spec with ID different than 4. + // * `NOT annotation_spec_id:*` --> The model evaluation was done for + // aggregate of all annotation specs. + string filter = 3; + + // Requested page size. + int32 page_size = 4; + + // A token identifying a page of results for the server to return. + // Typically obtained via + // [ListModelEvaluationsResponse.next_page_token][google.cloud.automl.v1.ListModelEvaluationsResponse.next_page_token] + // of the previous + // [AutoMl.ListModelEvaluations][google.cloud.automl.v1.AutoMl.ListModelEvaluations] + // call. + string page_token = 6; +} + +// Response message for +// [AutoMl.ListModelEvaluations][google.cloud.automl.v1.AutoMl.ListModelEvaluations]. +message ListModelEvaluationsResponse { + // List of model evaluations in the requested page. + repeated ModelEvaluation model_evaluation = 1; + + // A token to retrieve next page of results. + // Pass to the + // [ListModelEvaluationsRequest.page_token][google.cloud.automl.v1.ListModelEvaluationsRequest.page_token] + // field of a new + // [AutoMl.ListModelEvaluations][google.cloud.automl.v1.AutoMl.ListModelEvaluations] + // request to obtain that page. + string next_page_token = 2; +} diff --git a/google/cloud/automl_v1/proto/service_pb2.py b/google/cloud/automl_v1/proto/service_pb2.py new file mode 100644 index 00000000..093dfb1f --- /dev/null +++ b/google/cloud/automl_v1/proto/service_pb2.py @@ -0,0 +1,1693 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/automl_v1/proto/service.proto + +import sys + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.cloud.automl_v1.proto import ( + annotation_payload_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_annotation__payload__pb2, +) +from google.cloud.automl_v1.proto import ( + dataset_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_dataset__pb2, +) +from google.cloud.automl_v1.proto import ( + io_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_io__pb2, +) +from google.cloud.automl_v1.proto import ( + model_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_model__pb2, +) +from google.cloud.automl_v1.proto import ( + model_evaluation_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_model__evaluation__pb2, +) +from google.cloud.automl_v1.proto import ( + operations_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_operations__pb2, +) +from google.longrunning import ( + operations_pb2 as google_dot_longrunning_dot_operations__pb2, +) +from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="google/cloud/automl_v1/proto/service.proto", + package="google.cloud.automl.v1", + syntax="proto3", + serialized_options=_b( + "\n\032com.google.cloud.automl.v1B\013AutoMlProtoP\001Z The dataset has + translation\_dataset\_metadata. + page_size: + Requested page size. Server may return fewer results than + requested. If unspecified, server will pick a default size. + page_token: + A token identifying a page of results for the server to return + Typically obtained via [ListDatasetsResponse.next\_page\_token + ][google.cloud.automl.v1.ListDatasetsResponse.next\_page\_toke + n] of the previous [AutoMl.ListDatasets][google.cloud.automl.v + 1.AutoMl.ListDatasets] call. + """, + # @@protoc_insertion_point(class_scope:google.cloud.automl.v1.ListDatasetsRequest) + ), +) +_sym_db.RegisterMessage(ListDatasetsRequest) + +ListDatasetsResponse = _reflection.GeneratedProtocolMessageType( + "ListDatasetsResponse", + (_message.Message,), + dict( + DESCRIPTOR=_LISTDATASETSRESPONSE, + __module__="google.cloud.automl_v1.proto.service_pb2", + __doc__="""Response message for + [AutoMl.ListDatasets][google.cloud.automl.v1.AutoMl.ListDatasets]. + + + Attributes: + datasets: + The datasets read. + next_page_token: + A token to retrieve next page of results. Pass to [ListDataset + sRequest.page\_token][google.cloud.automl.v1.ListDatasetsReque + st.page\_token] to obtain that page. + """, + # @@protoc_insertion_point(class_scope:google.cloud.automl.v1.ListDatasetsResponse) + ), +) +_sym_db.RegisterMessage(ListDatasetsResponse) + +UpdateDatasetRequest = _reflection.GeneratedProtocolMessageType( + "UpdateDatasetRequest", + (_message.Message,), + dict( + DESCRIPTOR=_UPDATEDATASETREQUEST, + __module__="google.cloud.automl_v1.proto.service_pb2", + __doc__="""Request message for + [AutoMl.UpdateDataset][google.cloud.automl.v1.AutoMl.UpdateDataset] + + + Attributes: + dataset: + The dataset which replaces the resource on the server. + update_mask: + Required. The update mask applies to the resource. + """, + # @@protoc_insertion_point(class_scope:google.cloud.automl.v1.UpdateDatasetRequest) + ), +) +_sym_db.RegisterMessage(UpdateDatasetRequest) + +DeleteDatasetRequest = _reflection.GeneratedProtocolMessageType( + "DeleteDatasetRequest", + (_message.Message,), + dict( + DESCRIPTOR=_DELETEDATASETREQUEST, + __module__="google.cloud.automl_v1.proto.service_pb2", + __doc__="""Request message for + [AutoMl.DeleteDataset][google.cloud.automl.v1.AutoMl.DeleteDataset]. + + + Attributes: + name: + The resource name of the dataset to delete. + """, + # @@protoc_insertion_point(class_scope:google.cloud.automl.v1.DeleteDatasetRequest) + ), +) +_sym_db.RegisterMessage(DeleteDatasetRequest) + +ImportDataRequest = _reflection.GeneratedProtocolMessageType( + "ImportDataRequest", + (_message.Message,), + dict( + DESCRIPTOR=_IMPORTDATAREQUEST, + __module__="google.cloud.automl_v1.proto.service_pb2", + __doc__="""Request message for + [AutoMl.ImportData][google.cloud.automl.v1.AutoMl.ImportData]. + + + Attributes: + name: + Required. Dataset name. Dataset must already exist. All + imported annotations and examples will be added. + input_config: + Required. The desired input location and its domain specific + semantics, if any. + """, + # @@protoc_insertion_point(class_scope:google.cloud.automl.v1.ImportDataRequest) + ), +) +_sym_db.RegisterMessage(ImportDataRequest) + +ExportDataRequest = _reflection.GeneratedProtocolMessageType( + "ExportDataRequest", + (_message.Message,), + dict( + DESCRIPTOR=_EXPORTDATAREQUEST, + __module__="google.cloud.automl_v1.proto.service_pb2", + __doc__="""Request message for + [AutoMl.ExportData][google.cloud.automl.v1.AutoMl.ExportData]. + + + Attributes: + name: + Required. The resource name of the dataset. + output_config: + Required. The desired output location. + """, + # @@protoc_insertion_point(class_scope:google.cloud.automl.v1.ExportDataRequest) + ), +) +_sym_db.RegisterMessage(ExportDataRequest) + +CreateModelRequest = _reflection.GeneratedProtocolMessageType( + "CreateModelRequest", + (_message.Message,), + dict( + DESCRIPTOR=_CREATEMODELREQUEST, + __module__="google.cloud.automl_v1.proto.service_pb2", + __doc__="""Request message for + [AutoMl.CreateModel][google.cloud.automl.v1.AutoMl.CreateModel]. + + + Attributes: + parent: + Resource name of the parent project where the model is being + created. + model: + The model to create. + """, + # @@protoc_insertion_point(class_scope:google.cloud.automl.v1.CreateModelRequest) + ), +) +_sym_db.RegisterMessage(CreateModelRequest) + +GetModelRequest = _reflection.GeneratedProtocolMessageType( + "GetModelRequest", + (_message.Message,), + dict( + DESCRIPTOR=_GETMODELREQUEST, + __module__="google.cloud.automl_v1.proto.service_pb2", + __doc__="""Request message for + [AutoMl.GetModel][google.cloud.automl.v1.AutoMl.GetModel]. + + + Attributes: + name: + Resource name of the model. + """, + # @@protoc_insertion_point(class_scope:google.cloud.automl.v1.GetModelRequest) + ), +) +_sym_db.RegisterMessage(GetModelRequest) + +ListModelsRequest = _reflection.GeneratedProtocolMessageType( + "ListModelsRequest", + (_message.Message,), + dict( + DESCRIPTOR=_LISTMODELSREQUEST, + __module__="google.cloud.automl_v1.proto.service_pb2", + __doc__="""Request message for + [AutoMl.ListModels][google.cloud.automl.v1.AutoMl.ListModels]. + + + Attributes: + parent: + Resource name of the project, from which to list the models. + filter: + An expression for filtering the results of the request. - + ``model_metadata`` - for existence of the case (e.g. + video\_classification\_model\_metadata:\*). - ``dataset_id`` + - for = or !=. Some examples of using the filter are: - + ``image_classification_model_metadata:*`` --> The model has + image\_classification\_model\_metadata. - ``dataset_id=5`` + --> The model was created from a dataset with ID 5. + page_size: + Requested page size. + page_token: + A token identifying a page of results for the server to return + Typically obtained via [ListModelsResponse.next\_page\_token][ + google.cloud.automl.v1.ListModelsResponse.next\_page\_token] + of the previous + [AutoMl.ListModels][google.cloud.automl.v1.AutoMl.ListModels] + call. + """, + # @@protoc_insertion_point(class_scope:google.cloud.automl.v1.ListModelsRequest) + ), +) +_sym_db.RegisterMessage(ListModelsRequest) + +ListModelsResponse = _reflection.GeneratedProtocolMessageType( + "ListModelsResponse", + (_message.Message,), + dict( + DESCRIPTOR=_LISTMODELSRESPONSE, + __module__="google.cloud.automl_v1.proto.service_pb2", + __doc__="""Response message for + [AutoMl.ListModels][google.cloud.automl.v1.AutoMl.ListModels]. + + + Attributes: + model: + List of models in the requested page. + next_page_token: + A token to retrieve next page of results. Pass to [ListModelsR + equest.page\_token][google.cloud.automl.v1.ListModelsRequest.p + age\_token] to obtain that page. + """, + # @@protoc_insertion_point(class_scope:google.cloud.automl.v1.ListModelsResponse) + ), +) +_sym_db.RegisterMessage(ListModelsResponse) + +DeleteModelRequest = _reflection.GeneratedProtocolMessageType( + "DeleteModelRequest", + (_message.Message,), + dict( + DESCRIPTOR=_DELETEMODELREQUEST, + __module__="google.cloud.automl_v1.proto.service_pb2", + __doc__="""Request message for + [AutoMl.DeleteModel][google.cloud.automl.v1.AutoMl.DeleteModel]. + + + Attributes: + name: + Resource name of the model being deleted. + """, + # @@protoc_insertion_point(class_scope:google.cloud.automl.v1.DeleteModelRequest) + ), +) +_sym_db.RegisterMessage(DeleteModelRequest) + +UpdateModelRequest = _reflection.GeneratedProtocolMessageType( + "UpdateModelRequest", + (_message.Message,), + dict( + DESCRIPTOR=_UPDATEMODELREQUEST, + __module__="google.cloud.automl_v1.proto.service_pb2", + __doc__="""Request message for + [AutoMl.UpdateModel][google.cloud.automl.v1.AutoMl.UpdateModel] + + + Attributes: + model: + The model which replaces the resource on the server. + update_mask: + Required. The update mask applies to the resource. + """, + # @@protoc_insertion_point(class_scope:google.cloud.automl.v1.UpdateModelRequest) + ), +) +_sym_db.RegisterMessage(UpdateModelRequest) + +GetModelEvaluationRequest = _reflection.GeneratedProtocolMessageType( + "GetModelEvaluationRequest", + (_message.Message,), + dict( + DESCRIPTOR=_GETMODELEVALUATIONREQUEST, + __module__="google.cloud.automl_v1.proto.service_pb2", + __doc__="""Request message for + [AutoMl.GetModelEvaluation][google.cloud.automl.v1.AutoMl.GetModelEvaluation]. + + + Attributes: + name: + Resource name for the model evaluation. + """, + # @@protoc_insertion_point(class_scope:google.cloud.automl.v1.GetModelEvaluationRequest) + ), +) +_sym_db.RegisterMessage(GetModelEvaluationRequest) + +ListModelEvaluationsRequest = _reflection.GeneratedProtocolMessageType( + "ListModelEvaluationsRequest", + (_message.Message,), + dict( + DESCRIPTOR=_LISTMODELEVALUATIONSREQUEST, + __module__="google.cloud.automl_v1.proto.service_pb2", + __doc__="""Request message for + [AutoMl.ListModelEvaluations][google.cloud.automl.v1.AutoMl.ListModelEvaluations]. + + + Attributes: + parent: + Resource name of the model to list the model evaluations for. + If modelId is set as "-", this will list model evaluations + from across all models of the parent location. + filter: + An expression for filtering the results of the request. - + ``annotation_spec_id`` - for =, != or existence. See example + below for the last. Some examples of using the filter are: + - ``annotation_spec_id!=4`` --> The model evaluation was done + for annotation spec with ID different than 4. - ``NOT + annotation_spec_id:*`` --> The model evaluation was done for + aggregate of all annotation specs. + page_size: + Requested page size. + page_token: + A token identifying a page of results for the server to + return. Typically obtained via [ListModelEvaluationsResponse.n + ext\_page\_token][google.cloud.automl.v1.ListModelEvaluationsR + esponse.next\_page\_token] of the previous [AutoMl.ListModelEv + aluations][google.cloud.automl.v1.AutoMl.ListModelEvaluations] + call. + """, + # @@protoc_insertion_point(class_scope:google.cloud.automl.v1.ListModelEvaluationsRequest) + ), +) +_sym_db.RegisterMessage(ListModelEvaluationsRequest) + +ListModelEvaluationsResponse = _reflection.GeneratedProtocolMessageType( + "ListModelEvaluationsResponse", + (_message.Message,), + dict( + DESCRIPTOR=_LISTMODELEVALUATIONSRESPONSE, + __module__="google.cloud.automl_v1.proto.service_pb2", + __doc__="""Response message for + [AutoMl.ListModelEvaluations][google.cloud.automl.v1.AutoMl.ListModelEvaluations]. + + + Attributes: + model_evaluation: + List of model evaluations in the requested page. + next_page_token: + A token to retrieve next page of results. Pass to the [ListMod + elEvaluationsRequest.page\_token][google.cloud.automl.v1.ListM + odelEvaluationsRequest.page\_token] field of a new [AutoMl.Lis + tModelEvaluations][google.cloud.automl.v1.AutoMl.ListModelEval + uations] request to obtain that page. + """, + # @@protoc_insertion_point(class_scope:google.cloud.automl.v1.ListModelEvaluationsResponse) + ), +) +_sym_db.RegisterMessage(ListModelEvaluationsResponse) + + +DESCRIPTOR._options = None + +_AUTOML = _descriptor.ServiceDescriptor( + name="AutoMl", + full_name="google.cloud.automl.v1.AutoMl", + file=DESCRIPTOR, + index=0, + serialized_options=_b( + "\312A\025automl.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" + ), + serialized_start=1871, + serialized_end=4179, + methods=[ + _descriptor.MethodDescriptor( + name="CreateDataset", + full_name="google.cloud.automl.v1.AutoMl.CreateDataset", + index=0, + containing_service=None, + input_type=_CREATEDATASETREQUEST, + output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, + serialized_options=_b( + '\202\323\344\223\0027",/v1/{parent=projects/*/locations/*}/datasets:\007dataset' + ), + ), + _descriptor.MethodDescriptor( + name="GetDataset", + full_name="google.cloud.automl.v1.AutoMl.GetDataset", + index=1, + containing_service=None, + input_type=_GETDATASETREQUEST, + output_type=google_dot_cloud_dot_automl__v1_dot_proto_dot_dataset__pb2._DATASET, + serialized_options=_b( + "\202\323\344\223\002.\022,/v1/{name=projects/*/locations/*/datasets/*}" + ), + ), + _descriptor.MethodDescriptor( + name="ListDatasets", + full_name="google.cloud.automl.v1.AutoMl.ListDatasets", + index=2, + containing_service=None, + input_type=_LISTDATASETSREQUEST, + output_type=_LISTDATASETSRESPONSE, + serialized_options=_b( + "\202\323\344\223\002.\022,/v1/{parent=projects/*/locations/*}/datasets" + ), + ), + _descriptor.MethodDescriptor( + name="UpdateDataset", + full_name="google.cloud.automl.v1.AutoMl.UpdateDataset", + index=3, + containing_service=None, + input_type=_UPDATEDATASETREQUEST, + output_type=google_dot_cloud_dot_automl__v1_dot_proto_dot_dataset__pb2._DATASET, + serialized_options=_b( + "\202\323\344\223\002?24/v1/{dataset.name=projects/*/locations/*/datasets/*}:\007dataset" + ), + ), + _descriptor.MethodDescriptor( + name="DeleteDataset", + full_name="google.cloud.automl.v1.AutoMl.DeleteDataset", + index=4, + containing_service=None, + input_type=_DELETEDATASETREQUEST, + output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, + serialized_options=_b( + "\202\323\344\223\002.*,/v1/{name=projects/*/locations/*/datasets/*}" + ), + ), + _descriptor.MethodDescriptor( + name="ImportData", + full_name="google.cloud.automl.v1.AutoMl.ImportData", + index=5, + containing_service=None, + input_type=_IMPORTDATAREQUEST, + output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, + serialized_options=_b( + '\202\323\344\223\002<"7/v1/{name=projects/*/locations/*/datasets/*}:importData:\001*' + ), + ), + _descriptor.MethodDescriptor( + name="ExportData", + full_name="google.cloud.automl.v1.AutoMl.ExportData", + index=6, + containing_service=None, + input_type=_EXPORTDATAREQUEST, + output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, + serialized_options=_b( + '\202\323\344\223\002<"7/v1/{name=projects/*/locations/*/datasets/*}:exportData:\001*' + ), + ), + _descriptor.MethodDescriptor( + name="CreateModel", + full_name="google.cloud.automl.v1.AutoMl.CreateModel", + index=7, + containing_service=None, + input_type=_CREATEMODELREQUEST, + output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, + serialized_options=_b( + '\202\323\344\223\0023"*/v1/{parent=projects/*/locations/*}/models:\005model' + ), + ), + _descriptor.MethodDescriptor( + name="GetModel", + full_name="google.cloud.automl.v1.AutoMl.GetModel", + index=8, + containing_service=None, + input_type=_GETMODELREQUEST, + output_type=google_dot_cloud_dot_automl__v1_dot_proto_dot_model__pb2._MODEL, + serialized_options=_b( + "\202\323\344\223\002,\022*/v1/{name=projects/*/locations/*/models/*}" + ), + ), + _descriptor.MethodDescriptor( + name="ListModels", + full_name="google.cloud.automl.v1.AutoMl.ListModels", + index=9, + containing_service=None, + input_type=_LISTMODELSREQUEST, + output_type=_LISTMODELSRESPONSE, + serialized_options=_b( + "\202\323\344\223\002,\022*/v1/{parent=projects/*/locations/*}/models" + ), + ), + _descriptor.MethodDescriptor( + name="DeleteModel", + full_name="google.cloud.automl.v1.AutoMl.DeleteModel", + index=10, + containing_service=None, + input_type=_DELETEMODELREQUEST, + output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, + serialized_options=_b( + "\202\323\344\223\002,**/v1/{name=projects/*/locations/*/models/*}" + ), + ), + _descriptor.MethodDescriptor( + name="UpdateModel", + full_name="google.cloud.automl.v1.AutoMl.UpdateModel", + index=11, + containing_service=None, + input_type=_UPDATEMODELREQUEST, + output_type=google_dot_cloud_dot_automl__v1_dot_proto_dot_model__pb2._MODEL, + serialized_options=_b( + "\202\323\344\223\002920/v1/{model.name=projects/*/locations/*/models/*}:\005model" + ), + ), + _descriptor.MethodDescriptor( + name="GetModelEvaluation", + full_name="google.cloud.automl.v1.AutoMl.GetModelEvaluation", + index=12, + containing_service=None, + input_type=_GETMODELEVALUATIONREQUEST, + output_type=google_dot_cloud_dot_automl__v1_dot_proto_dot_model__evaluation__pb2._MODELEVALUATION, + serialized_options=_b( + "\202\323\344\223\002?\022=/v1/{name=projects/*/locations/*/models/*/modelEvaluations/*}" + ), + ), + _descriptor.MethodDescriptor( + name="ListModelEvaluations", + full_name="google.cloud.automl.v1.AutoMl.ListModelEvaluations", + index=13, + containing_service=None, + input_type=_LISTMODELEVALUATIONSREQUEST, + output_type=_LISTMODELEVALUATIONSRESPONSE, + serialized_options=_b( + "\202\323\344\223\002?\022=/v1/{parent=projects/*/locations/*/models/*}/modelEvaluations" + ), + ), + ], +) +_sym_db.RegisterServiceDescriptor(_AUTOML) + +DESCRIPTOR.services_by_name["AutoMl"] = _AUTOML + +# @@protoc_insertion_point(module_scope) diff --git a/google/cloud/automl_v1/proto/service_pb2_grpc.py b/google/cloud/automl_v1/proto/service_pb2_grpc.py new file mode 100644 index 00000000..dd6beb5c --- /dev/null +++ b/google/cloud/automl_v1/proto/service_pb2_grpc.py @@ -0,0 +1,322 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc + +from google.cloud.automl_v1.proto import ( + dataset_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_dataset__pb2, +) +from google.cloud.automl_v1.proto import ( + model_evaluation_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_model__evaluation__pb2, +) +from google.cloud.automl_v1.proto import ( + model_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_model__pb2, +) +from google.cloud.automl_v1.proto import ( + service_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2, +) +from google.longrunning import ( + operations_pb2 as google_dot_longrunning_dot_operations__pb2, +) + + +class AutoMlStub(object): + """AutoML Server API. + + The resource names are assigned by the server. + The server never reuses names that it has created after the resources with + those names are deleted. + + An ID of a resource is the last element of the item's resource name. For + `projects/{project_id}/locations/{location_id}/datasets/{dataset_id}`, then + the id for the item is `{dataset_id}`. + + Currently the only supported `location_id` is "us-central1". + + On any input that is documented to expect a string parameter in + snake_case or kebab-case, either of those cases is accepted. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.CreateDataset = channel.unary_unary( + "/google.cloud.automl.v1.AutoMl/CreateDataset", + request_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.CreateDatasetRequest.SerializeToString, + response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, + ) + self.GetDataset = channel.unary_unary( + "/google.cloud.automl.v1.AutoMl/GetDataset", + request_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.GetDatasetRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_dataset__pb2.Dataset.FromString, + ) + self.ListDatasets = channel.unary_unary( + "/google.cloud.automl.v1.AutoMl/ListDatasets", + request_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.ListDatasetsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.ListDatasetsResponse.FromString, + ) + self.UpdateDataset = channel.unary_unary( + "/google.cloud.automl.v1.AutoMl/UpdateDataset", + request_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.UpdateDatasetRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_dataset__pb2.Dataset.FromString, + ) + self.DeleteDataset = channel.unary_unary( + "/google.cloud.automl.v1.AutoMl/DeleteDataset", + request_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.DeleteDatasetRequest.SerializeToString, + response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, + ) + self.ImportData = channel.unary_unary( + "/google.cloud.automl.v1.AutoMl/ImportData", + request_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.ImportDataRequest.SerializeToString, + response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, + ) + self.ExportData = channel.unary_unary( + "/google.cloud.automl.v1.AutoMl/ExportData", + request_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.ExportDataRequest.SerializeToString, + response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, + ) + self.CreateModel = channel.unary_unary( + "/google.cloud.automl.v1.AutoMl/CreateModel", + request_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.CreateModelRequest.SerializeToString, + response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, + ) + self.GetModel = channel.unary_unary( + "/google.cloud.automl.v1.AutoMl/GetModel", + request_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.GetModelRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_model__pb2.Model.FromString, + ) + self.ListModels = channel.unary_unary( + "/google.cloud.automl.v1.AutoMl/ListModels", + request_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.ListModelsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.ListModelsResponse.FromString, + ) + self.DeleteModel = channel.unary_unary( + "/google.cloud.automl.v1.AutoMl/DeleteModel", + request_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.DeleteModelRequest.SerializeToString, + response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, + ) + self.UpdateModel = channel.unary_unary( + "/google.cloud.automl.v1.AutoMl/UpdateModel", + request_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.UpdateModelRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_model__pb2.Model.FromString, + ) + self.GetModelEvaluation = channel.unary_unary( + "/google.cloud.automl.v1.AutoMl/GetModelEvaluation", + request_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.GetModelEvaluationRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_model__evaluation__pb2.ModelEvaluation.FromString, + ) + self.ListModelEvaluations = channel.unary_unary( + "/google.cloud.automl.v1.AutoMl/ListModelEvaluations", + request_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.ListModelEvaluationsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.ListModelEvaluationsResponse.FromString, + ) + + +class AutoMlServicer(object): + """AutoML Server API. + + The resource names are assigned by the server. + The server never reuses names that it has created after the resources with + those names are deleted. + + An ID of a resource is the last element of the item's resource name. For + `projects/{project_id}/locations/{location_id}/datasets/{dataset_id}`, then + the id for the item is `{dataset_id}`. + + Currently the only supported `location_id` is "us-central1". + + On any input that is documented to expect a string parameter in + snake_case or kebab-case, either of those cases is accepted. + """ + + def CreateDataset(self, request, context): + """Creates a dataset. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def GetDataset(self, request, context): + """Gets a dataset. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def ListDatasets(self, request, context): + """Lists datasets in a project. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def UpdateDataset(self, request, context): + """Updates a dataset. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def DeleteDataset(self, request, context): + """Deletes a dataset and all of its contents. + Returns empty response in the + [response][google.longrunning.Operation.response] field when it completes, + and `delete_details` in the + [metadata][google.longrunning.Operation.metadata] field. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def ImportData(self, request, context): + """Imports data into a dataset. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def ExportData(self, request, context): + """Exports dataset's data to the provided output location. + Returns an empty response in the + [response][google.longrunning.Operation.response] field when it completes. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def CreateModel(self, request, context): + """Creates a model. + Returns a Model in the [response][google.longrunning.Operation.response] + field when it completes. + When you create a model, several model evaluations are created for it: + a global evaluation, and one evaluation for each annotation spec. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def GetModel(self, request, context): + """Gets a model. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def ListModels(self, request, context): + """Lists models. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def DeleteModel(self, request, context): + """Deletes a model. + Returns `google.protobuf.Empty` in the + [response][google.longrunning.Operation.response] field when it completes, + and `delete_details` in the + [metadata][google.longrunning.Operation.metadata] field. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def UpdateModel(self, request, context): + """Updates a model. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def GetModelEvaluation(self, request, context): + """Gets a model evaluation. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def ListModelEvaluations(self, request, context): + """Lists model evaluations. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + +def add_AutoMlServicer_to_server(servicer, server): + rpc_method_handlers = { + "CreateDataset": grpc.unary_unary_rpc_method_handler( + servicer.CreateDataset, + request_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.CreateDatasetRequest.FromString, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ), + "GetDataset": grpc.unary_unary_rpc_method_handler( + servicer.GetDataset, + request_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.GetDatasetRequest.FromString, + response_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_dataset__pb2.Dataset.SerializeToString, + ), + "ListDatasets": grpc.unary_unary_rpc_method_handler( + servicer.ListDatasets, + request_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.ListDatasetsRequest.FromString, + response_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.ListDatasetsResponse.SerializeToString, + ), + "UpdateDataset": grpc.unary_unary_rpc_method_handler( + servicer.UpdateDataset, + request_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.UpdateDatasetRequest.FromString, + response_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_dataset__pb2.Dataset.SerializeToString, + ), + "DeleteDataset": grpc.unary_unary_rpc_method_handler( + servicer.DeleteDataset, + request_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.DeleteDatasetRequest.FromString, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ), + "ImportData": grpc.unary_unary_rpc_method_handler( + servicer.ImportData, + request_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.ImportDataRequest.FromString, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ), + "ExportData": grpc.unary_unary_rpc_method_handler( + servicer.ExportData, + request_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.ExportDataRequest.FromString, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ), + "CreateModel": grpc.unary_unary_rpc_method_handler( + servicer.CreateModel, + request_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.CreateModelRequest.FromString, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ), + "GetModel": grpc.unary_unary_rpc_method_handler( + servicer.GetModel, + request_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.GetModelRequest.FromString, + response_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_model__pb2.Model.SerializeToString, + ), + "ListModels": grpc.unary_unary_rpc_method_handler( + servicer.ListModels, + request_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.ListModelsRequest.FromString, + response_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.ListModelsResponse.SerializeToString, + ), + "DeleteModel": grpc.unary_unary_rpc_method_handler( + servicer.DeleteModel, + request_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.DeleteModelRequest.FromString, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ), + "UpdateModel": grpc.unary_unary_rpc_method_handler( + servicer.UpdateModel, + request_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.UpdateModelRequest.FromString, + response_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_model__pb2.Model.SerializeToString, + ), + "GetModelEvaluation": grpc.unary_unary_rpc_method_handler( + servicer.GetModelEvaluation, + request_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.GetModelEvaluationRequest.FromString, + response_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_model__evaluation__pb2.ModelEvaluation.SerializeToString, + ), + "ListModelEvaluations": grpc.unary_unary_rpc_method_handler( + servicer.ListModelEvaluations, + request_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.ListModelEvaluationsRequest.FromString, + response_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.ListModelEvaluationsResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + "google.cloud.automl.v1.AutoMl", rpc_method_handlers + ) + server.add_generic_rpc_handlers((generic_handler,)) diff --git a/google/cloud/automl_v1/proto/translation.proto b/google/cloud/automl_v1/proto/translation.proto new file mode 100644 index 00000000..bc449fe7 --- /dev/null +++ b/google/cloud/automl_v1/proto/translation.proto @@ -0,0 +1,70 @@ +// Copyright 2019 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +syntax = "proto3"; + +package google.cloud.automl.v1; + +import "google/cloud/automl/v1/data_items.proto"; +import "google/api/annotations.proto"; + +option go_package = "google.golang.org/genproto/googleapis/cloud/automl/v1;automl"; +option csharp_namespace = "Google.Cloud.AutoML.V1"; +option java_multiple_files = true; +option java_outer_classname = "TranslationProto"; +option java_package = "com.google.cloud.automl.v1"; +option php_namespace = "Google\\Cloud\\AutoML\\V1"; +option ruby_package = "Google::Cloud::AutoML::V1"; + +// Dataset metadata that is specific to translation. +message TranslationDatasetMetadata { + // Required. The BCP-47 language code of the source language. + string source_language_code = 1; + + // Required. The BCP-47 language code of the target language. + string target_language_code = 2; +} + +// Evaluation metrics for the dataset. +message TranslationEvaluationMetrics { + // Output only. BLEU score. + double bleu_score = 1; + + // Output only. BLEU score for base model. + double base_bleu_score = 2; +} + +// Model metadata that is specific to translation. +message TranslationModelMetadata { + // The resource name of the model to use as a baseline to train the custom + // model. If unset, we use the default base model provided by Google + // Translate. Format: + // `projects/{project_id}/locations/{location_id}/models/{model_id}` + string base_model = 1; + + // Output only. Inferred from the dataset. + // The source languge (The BCP-47 language code) that is used for training. + string source_language_code = 2; + + // Output only. The target languge (The BCP-47 language code) that is used for + // training. + string target_language_code = 3; +} + +// Annotation details specific to translation. +message TranslationAnnotation { + // Output only . The translated content. + TextSnippet translated_content = 1; +} diff --git a/google/cloud/automl_v1/proto/translation_pb2.py b/google/cloud/automl_v1/proto/translation_pb2.py new file mode 100644 index 00000000..4542dbc5 --- /dev/null +++ b/google/cloud/automl_v1/proto/translation_pb2.py @@ -0,0 +1,370 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/automl_v1/proto/translation.proto + +import sys + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.cloud.automl_v1.proto import ( + data_items_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_data__items__pb2, +) +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="google/cloud/automl_v1/proto/translation.proto", + package="google.cloud.automl.v1", + syntax="proto3", + serialized_options=_b( + "\n\032com.google.cloud.automl.v1B\020TranslationProtoP\001Z>> Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + dataset. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - dataset_name (Optional[string]): + dataset_name (Optional[str]): This is the fully-qualified name generated by the AutoML API for this dataset. This is not to be confused with the human-assigned `dataset_display_name` that is provided when creating a dataset. Either `dataset_name` or `dataset_display_name` must be provided. - dataset_display_name (Optional[string]): + dataset_display_name (Optional[str]): This is the name you provided for the dataset when first creating it. Either `dataset_name` or `dataset_display_name` must be provided. @@ -550,15 +557,15 @@ def create_dataset( >>> Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that will own the + dataset. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - dataset_display_name (string): + dataset_display_name (str): A human-readable name to refer to this dataset by. Returns: @@ -604,19 +611,19 @@ def delete_dataset( >>> Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + dataset. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - dataset_display_name (Optional[string]): + dataset_display_name (Optional[str]): The human-readable name given to the dataset you want to delete. This must be supplied if `dataset` or `dataset_name` are not supplied. - dataset_name (Optional[string]): + dataset_name (Optional[str]): The AutoML-assigned name given to the dataset you want to delete. This must be supplied if `dataset_display_name` or `dataset` are not supplied. @@ -626,8 +633,9 @@ def delete_dataset( supplied. Returns: - A :class:`~google.cloud.automl_v1beta1.types._OperationFuture` - instance. + google.api_core.operation.Operation: + An operation future that can be used to check for + completion synchronously or asynchronously. Raises: google.api_core.exceptions.GoogleAPICallError: If the request @@ -679,7 +687,7 @@ def import_data( ... >>> d = client.create_dataset(dataset_display_name='my_dataset') >>> - >>> client.import_data(dataset=d, + >>> response = client.import_data(dataset=d, ... gcs_input_uris='gs://cloud-ml-tables-data/bank-marketing.csv') ... >>> def callback(operation_future): @@ -689,12 +697,12 @@ def import_data( >>> Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + dataset. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. credentials (Optional[google.auth.credentials.Credentials]): The @@ -702,11 +710,11 @@ def import_data( credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. - dataset_display_name (Optional[string]): + dataset_display_name (Optional[str]): The human-readable name given to the dataset you want to import data into. This must be supplied if `dataset` or `dataset_name` are not supplied. - dataset_name (Optional[string]): + dataset_name (Optional[str]): The AutoML-assigned name given to the dataset you want to import data into. This must be supplied if `dataset_display_name` or `dataset` are not supplied. @@ -720,19 +728,20 @@ def import_data( `gs://{project}-automl-tables-staging/{uploaded_csv_name}` This parameter must be supplied if neither `gcs_input_uris` nor `bigquery_input_uri` is supplied. - gcs_input_uris (Optional[Union[string, Sequence[string]]]): + gcs_input_uris (Optional[Union[str, Sequence[str]]]): Either a single `gs://..` prefixed URI, or a list of URIs referring to GCS-hosted CSV files containing the data to import. This must be supplied if neither `bigquery_input_uri` nor `pandas_dataframe` is supplied. - bigquery_input_uri (Optional[string]): + bigquery_input_uri (Optional[str]): A URI pointing to the BigQuery table containing the data to import. This must be supplied if neither `gcs_input_uris` nor `pandas_dataframe` is supplied. Returns: - A :class:`~google.cloud.automl_v1beta1.types._OperationFuture` - instance. + google.api_core.operation.Operation: + An operation future that can be used to check for + completion synchronously or asynchronously. Raises: google.api_core.exceptions.GoogleAPICallError: If the request @@ -753,7 +762,10 @@ def import_data( request = {} if pandas_dataframe is not None: - self.__ensure_gcs_client_is_initialized(credentials) + project = project or self.project + region = region or self.region + credentials = credentials or self.credentials + self.__ensure_gcs_client_is_initialized(credentials, project) self.gcs_client.ensure_bucket_exists(project, region) gcs_input_uri = self.gcs_client.upload_pandas_dataframe(pandas_dataframe) request = {"gcs_source": {"input_uris": [gcs_input_uri]}} @@ -796,7 +808,7 @@ def export_data( ... >>> d = client.create_dataset(dataset_display_name='my_dataset') >>> - >>> client.export_data(dataset=d, + >>> response = client.export_data(dataset=d, ... gcs_output_uri_prefix='gs://cloud-ml-tables-data/bank-marketing.csv') ... >>> def callback(operation_future): @@ -806,19 +818,19 @@ def export_data( >>> Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + dataset. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - dataset_display_name (Optional[string]): + dataset_display_name (Optional[str]): The human-readable name given to the dataset you want to export data from. This must be supplied if `dataset` or `dataset_name` are not supplied. - dataset_name (Optional[string]): + dataset_name (Optional[str]): The AutoML-assigned name given to the dataset you want to export data from. This must be supplied if `dataset_display_name` or `dataset` are not supplied. @@ -826,16 +838,17 @@ def export_data( The `Dataset` instance you want to export data from. This must be supplied if `dataset_display_name` or `dataset_name` are not supplied. - gcs_output_uri_prefix (Optional[Union[string, Sequence[string]]]): + gcs_output_uri_prefix (Optional[Union[str, Sequence[str]]]): A single `gs://..` prefixed URI to export to. This must be supplied if `bigquery_output_uri` is not. - bigquery_output_uri (Optional[string]): + bigquery_output_uri (Optional[str]): A URI pointing to the BigQuery table containing the data to export. This must be supplied if `gcs_output_uri_prefix` is not. Returns: - A :class:`~google.cloud.automl_v1beta1.types._OperationFuture` - instance. + google.api_core.operation.Operation: + An operation future that can be used to check for + completion synchronously or asynchronously. Raises: google.api_core.exceptions.GoogleAPICallError: If the request @@ -883,15 +896,15 @@ def get_table_spec(self, table_spec_name, project=None, region=None, **kwargs): >>> Args: - table_spec_name (string): + table_spec_name (str): This is the fully-qualified name generated by the AutoML API for this table spec. - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + table. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. @@ -933,19 +946,19 @@ def list_table_specs( ... Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + dataset. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - dataset_display_name (Optional[string]): + dataset_display_name (Optional[str]): The human-readable name given to the dataset you want to read specs from. This must be supplied if `dataset` or `dataset_name` are not supplied. - dataset_name (Optional[string]): + dataset_name (Optional[str]): The AutoML-assigned name given to the dataset you want to read specs from. This must be supplied if `dataset_display_name` or `dataset` are not supplied. @@ -995,15 +1008,15 @@ def get_column_spec(self, column_spec_name, project=None, region=None, **kwargs) >>> Args: - column_spec_name (string): + column_spec_name (str): This is the fully-qualified name generated by the AutoML API for this column spec. - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + column. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. @@ -1047,29 +1060,29 @@ def list_column_specs( ... Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + columns. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - table_spec_name (Optional[string]): + table_spec_name (Optional[str]): The AutoML-assigned name for the table whose specs you want to read. If not supplied, the client can determine this name from a source `Dataset` object. table_spec_index (Optional[int]): If no `table_spec_name` was provided, we use this index to determine which table to read column specs from. - dataset_display_name (Optional[string]): + dataset_display_name (Optional[str]): The human-readable name given to the dataset you want to read specs from. If no `table_spec_name` is supplied, this will be used together with `table_spec_index` to infer the name of table to read specs from. This must be supplied if `table_spec_name`, `dataset` or `dataset_name` are not supplied. - dataset_name (Optional[string]): + dataset_name (Optional[str]): The AutoML-assigned name given to the dataset you want to read specs from. If no `table_spec_name` is supplied, this will be used together with `table_spec_index` to infer the name of @@ -1145,50 +1158,57 @@ def update_column_spec( ... Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): - If you have initialized the client with a value for `region` it - will be used if this parameter is not supplied. - column_spec_name (Optional[string]): - The name AutoML-assigned name for the column you want to - update. - column_spec_display_name (Optional[string]): - The human-readable name of the column you want to update. If - this is supplied in place of `column_spec_name`, you also need - to provide either a way to lookup the source dataset (using one - of the `dataset*` kwargs), or the `table_spec_name` of the - table this column belongs to. - table_spec_name (Optional[string]): - The AutoML-assigned name for the table whose specs you want to - update. If not supplied, the client can determine this name - from a source `Dataset` object. - table_spec_index (Optional[int]): - If no `table_spec_name` was provided, we use this index to - determine which table to update column specs on. - dataset_display_name (Optional[string]): + dataset (Optional[Dataset]): + The `Dataset` instance you want to update specs on. If no + `table_spec_name` is supplied, this will be used together with + `table_spec_index` to infer the name of table to update specs + on. This must be supplied if `table_spec_name`, `dataset_name` + or `dataset_display_name` are not supplied. + dataset_display_name (Optional[str]): The human-readable name given to the dataset you want to update specs on. If no `table_spec_name` is supplied, this will be used together with `table_spec_index` to infer the name of table to update specs on. This must be supplied if `table_spec_name`, `dataset` or `dataset_name` are not supplied. - dataset_name (Optional[string]): + dataset_name (Optional[str]): The AutoML-assigned name given to the dataset you want to update specs one. If no `table_spec_name` is supplied, this will be used together with `table_spec_index` to infer the name of table to update specs on. This must be supplied if `table_spec_name`, `dataset` or `dataset_display_name` are not supplied. - dataset (Optional[Dataset]): - The `Dataset` instance you want to update specs on. If no - `table_spec_name` is supplied, this will be used together with - `table_spec_index` to infer the name of table to update specs - on. This must be supplied if `table_spec_name`, `dataset_name` - or `dataset_display_name` are not supplied. + table_spec_name (Optional[str]): + The AutoML-assigned name for the table whose specs you want to + update. If not supplied, the client can determine this name + from a source `Dataset` object. + table_spec_index (Optional[int]): + If no `table_spec_name` was provided, we use this index to + determine which table to update column specs on. + column_spec_name (Optional[str]): + The name AutoML-assigned name for the column you want to + update. + column_spec_display_name (Optional[str]): + The human-readable name of the column you want to update. If + this is supplied in place of `column_spec_name`, you also need + to provide either a way to lookup the source dataset (using one + of the `dataset*` kwargs), or the `table_spec_name` of the + table this column belongs to. + type_code (Optional[str]): + The desired 'type_code' of the column. For more information + on the available types, please see the documentation: + https://cloud.google.com/automl-tables/docs/reference/rpc/google.cloud.automl.v1beta1#typecode + nullable (Optional[bool]): + Set to `True` or `False` to specify if this column's value + must expected to be present in all rows or not. + project (Optional[str]): The ID of the project that owns the + columns. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): + If you have initialized the client with a value for `region` it + will be used if this parameter is not supplied. Returns: A :class:`~google.cloud.automl_v1beta1.types.ColumnSpec` instance. @@ -1270,24 +1290,24 @@ def set_target_column( ... Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + table. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - column_spec_name (Optional[string]): + column_spec_name (Optional[str]): The name AutoML-assigned name for the column you want to set as the target column. - column_spec_display_name (Optional[string]): + column_spec_display_name (Optional[str]): The human-readable name of the column you want to set as the target column. If this is supplied in place of `column_spec_name`, you also need to provide either a way to lookup the source dataset (using one of the `dataset*` kwargs), or the `table_spec_name` of the table this column belongs to. - table_spec_name (Optional[string]): + table_spec_name (Optional[str]): The AutoML-assigned name for the table whose target column you want to set . If not supplied, the client can determine this name from a source `Dataset` object. @@ -1295,14 +1315,14 @@ def set_target_column( If no `table_spec_name` or `column_spec_name` was provided, we use this index to determine which table to set the target column on. - dataset_display_name (Optional[string]): + dataset_display_name (Optional[str]): The human-readable name given to the dataset you want to update the target column of. If no `table_spec_name` is supplied, this will be used together with `table_spec_index` to infer the name of table to update the target column of. This must be supplied if `table_spec_name`, `dataset` or `dataset_name` are not supplied. - dataset_name (Optional[string]): + dataset_name (Optional[str]): The AutoML-assigned name given to the dataset you want to update the target column of. If no `table_spec_name` is supplied, this will be used together with `table_spec_index` to @@ -1383,28 +1403,28 @@ def set_time_column( ... project='my-project', region='us-central1') ... >>> client.set_time_column(dataset_display_name='my_dataset', - ... column_spec_name='Unix Time') + ... column_spec_display_name='Unix Time') ... Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + table. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - column_spec_name (Optional[string]): + column_spec_name (Optional[str]): The name AutoML-assigned name for the column you want to set as the time column. - column_spec_display_name (Optional[string]): + column_spec_display_name (Optional[str]): The human-readable name of the column you want to set as the time column. If this is supplied in place of `column_spec_name`, you also need to provide either a way to lookup the source dataset (using one of the `dataset*` kwargs), or the `table_spec_name` of the table this column belongs to. - table_spec_name (Optional[string]): + table_spec_name (Optional[str]): The AutoML-assigned name for the table whose time column you want to set . If not supplied, the client can determine this name from a source `Dataset` object. @@ -1412,14 +1432,14 @@ def set_time_column( If no `table_spec_name` or `column_spec_name` was provided, we use this index to determine which table to set the time column on. - dataset_display_name (Optional[string]): + dataset_display_name (Optional[str]): The human-readable name given to the dataset you want to update the time column of. If no `table_spec_name` is supplied, this will be used together with `table_spec_index` to infer the name of table to update the time column of. This must be supplied if `table_spec_name`, `dataset` or `dataset_name` are not supplied. - dataset_name (Optional[string]): + dataset_name (Optional[str]): The AutoML-assigned name given to the dataset you want to update the time column of. If no `table_spec_name` is supplied, this will be used together with `table_spec_index` to @@ -1495,26 +1515,26 @@ def clear_time_column( ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json') ... project='my-project', region='us-central1') ... - >>> client.set_time_column(dataset_display_name='my_dataset') + >>> client.clear_time_column(dataset_display_name='my_dataset') >>> Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + table. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - dataset_display_name (Optional[string]): + dataset_display_name (Optional[str]): The human-readable name given to the dataset you want to update the time column of. If no `table_spec_name` is supplied, this will be used together with `table_spec_index` to infer the name of table to update the time column of. This must be supplied if `table_spec_name`, `dataset` or `dataset_name` are not supplied. - dataset_name (Optional[string]): + dataset_name (Optional[str]): The AutoML-assigned name given to the dataset you want to update the time column of. If no `table_spec_name` is supplied, this will be used together with `table_spec_index` to @@ -1585,24 +1605,24 @@ def set_weight_column( ... Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + table. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - column_spec_name (Optional[string]): + column_spec_name (Optional[str]): The name AutoML-assigned name for the column you want to set as the weight column. - column_spec_display_name (Optional[string]): + column_spec_display_name (Optional[str]): The human-readable name of the column you want to set as the weight column. If this is supplied in place of `column_spec_name`, you also need to provide either a way to lookup the source dataset (using one of the `dataset*` kwargs), or the `table_spec_name` of the table this column belongs to. - table_spec_name (Optional[string]): + table_spec_name (Optional[str]): The AutoML-assigned name for the table whose weight column you want to set . If not supplied, the client can determine this name from a source `Dataset` object. @@ -1610,14 +1630,14 @@ def set_weight_column( If no `table_spec_name` or `column_spec_name` was provided, we use this index to determine which table to set the weight column on. - dataset_display_name (Optional[string]): + dataset_display_name (Optional[str]): The human-readable name given to the dataset you want to update the weight column of. If no `table_spec_name` is supplied, this will be used together with `table_spec_index` to infer the name of table to update the weight column of. This must be supplied if `table_spec_name`, `dataset` or `dataset_name` are not supplied. - dataset_name (Optional[string]): + dataset_name (Optional[str]): The AutoML-assigned name given to the dataset you want to update the weight column of. If no `table_spec_name` is supplied, this will be used together with `table_spec_index` to @@ -1697,22 +1717,22 @@ def clear_weight_column( >>> Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + table. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - dataset_display_name (Optional[string]): + dataset_display_name (Optional[str]): The human-readable name given to the dataset you want to update the weight column of. If no `table_spec_name` is supplied, this will be used together with `table_spec_index` to infer the name of table to update the weight column of. This must be supplied if `table_spec_name`, `dataset` or `dataset_name` are not supplied. - dataset_name (Optional[string]): + dataset_name (Optional[str]): The AutoML-assigned name given to the dataset you want to update the weight column of. If no `table_spec_name` is supplied, this will be used together with `table_spec_index` to @@ -1782,24 +1802,24 @@ def set_test_train_column( ... Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + table. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - column_spec_name (Optional[string]): + column_spec_name (Optional[str]): The name AutoML-assigned name for the column you want to set as the test/train column. - column_spec_display_name (Optional[string]): + column_spec_display_name (Optional[str]): The human-readable name of the column you want to set as the test/train column. If this is supplied in place of `column_spec_name`, you also need to provide either a way to lookup the source dataset (using one of the `dataset*` kwargs), or the `table_spec_name` of the table this column belongs to. - table_spec_name (Optional[string]): + table_spec_name (Optional[str]): The AutoML-assigned name for the table whose test/train column you want to set . If not supplied, the client can determine this name from a source `Dataset` object. @@ -1807,14 +1827,14 @@ def set_test_train_column( If no `table_spec_name` or `column_spec_name` was provided, we use this index to determine which table to set the test/train column on. - dataset_display_name (Optional[string]): + dataset_display_name (Optional[str]): The human-readable name given to the dataset you want to update the test/train column of. If no `table_spec_name` is supplied, this will be used together with `table_spec_index` to infer the name of table to update the test/train column of. This must be supplied if `table_spec_name`, `dataset` or `dataset_name` are not supplied. - dataset_name (Optional[string]): + dataset_name (Optional[str]): The AutoML-assigned name given to the dataset you want to update the test/train column of. If no `table_spec_name` is supplied, this will be used together with `table_spec_index` to @@ -1895,22 +1915,22 @@ def clear_test_train_column( >>> Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + table. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - dataset_display_name (Optional[string]): + dataset_display_name (Optional[str]): The human-readable name given to the dataset you want to update the test/train column of. If no `table_spec_name` is supplied, this will be used together with `table_spec_index` to infer the name of table to update the test/train column of. This must be supplied if `table_spec_name`, `dataset` or `dataset_name` are not supplied. - dataset_name (Optional[string]): + dataset_name (Optional[str]): The AutoML-assigned name given to the dataset you want to update the test/train column of. If no `table_spec_name` is supplied, this will be used together with `table_spec_index` to @@ -1970,12 +1990,12 @@ def list_models(self, project=None, region=None, **kwargs): ... Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + models. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. @@ -2024,19 +2044,19 @@ def list_model_evaluations( ... Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + model. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - model_display_name (Optional[string]): + model_display_name (Optional[str]): The human-readable name given to the model you want to list evaluations for. This must be supplied if `model` or `model_name` are not supplied. - model_name (Optional[string]): + model_name (Optional[str]): The AutoML-assigned name given to the model you want to list evaluations for. This must be supplied if `model_display_name` or `model` are not supplied. @@ -2052,6 +2072,12 @@ def list_model_evaluations( instances. You can also iterate over the pages of the response using its `pages` property. + For a regression model, there will only be one evaluation. For a + classification model there will be on for each classification + label, as well as one for micro-averaged metrics. See more + documentation here: + https://cloud.google.com/automl-tables/docs/evaluate#automl-tables-list-model-evaluations-cli-curl:w + Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. @@ -2096,33 +2122,37 @@ def create_model( ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json') ... project='my-project', region='us-central1') ... - >>> m = client.create_model('my_model', dataset_display_name='my_dataset') + >>> m = client.create_model( + ... 'my_model', + ... dataset_display_name='my_dataset', + ... train_budget_milli_node_hours=1000 + ... ) >>> >>> m.result() # blocks on result >>> Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that will own the + model. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - model_display_name (string): + model_display_name (str): A human-readable name to refer to this model by. train_budget_milli_node_hours (int): The amount of time (in thousandths of an hour) to spend training. This value must be between 1,000 and 72,000 inclusive (between 1 and 72 hours). - optimization_objective (string): + optimization_objective (str): The metric AutoML tables should optimize for. - dataset_display_name (Optional[string]): + dataset_display_name (Optional[str]): The human-readable name given to the dataset you want to train your model on. This must be supplied if `dataset` or `dataset_name` are not supplied. - dataset_name (Optional[string]): + dataset_name (Optional[str]): The AutoML-assigned name given to the dataset you want to train your model on. This must be supplied if `dataset_display_name` or `dataset` are not supplied. @@ -2132,15 +2162,17 @@ def create_model( are not supplied. model_metadata (Optional[Dict]): Optional model metadata to supply to the client. - include_column_spec_names(Optional[string]): + include_column_spec_names(Optional[str]): The list of the names of the columns you want to include to train your model on. - exclude_column_spec_names(Optional[string]): + exclude_column_spec_names(Optional[str]): The list of the names of the columns you want to exclude and not train your model on. Returns: - A :class:`~google.cloud.automl_v1beta1.types._OperationFuture` - instance. + google.api_core.operation.Operation: + An operation future that can be used to check for + completion synchronously or asynchronously. + Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. @@ -2245,19 +2277,19 @@ def delete_model( >>> Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + model. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - model_display_name (Optional[string]): + model_display_name (Optional[str]): The human-readable name given to the model you want to delete. This must be supplied if `model` or `model_name` are not supplied. - model_name (Optional[string]): + model_name (Optional[str]): The AutoML-assigned name given to the model you want to delete. This must be supplied if `model_display_name` or `model` are not supplied. @@ -2267,8 +2299,9 @@ def delete_model( supplied. Returns: - A :class:`~google.cloud.automl_v1beta1.types._OperationFuture` - instance. + google.api_core.operation.Operation: + An operation future that can be used to check for + completion synchronously or asynchronously. Raises: google.api_core.exceptions.GoogleAPICallError: If the request @@ -2312,15 +2345,15 @@ def get_model_evaluation( >>> Args: - model_evaluation_name (string): + model_evaluation_name (str): This is the fully-qualified name generated by the AutoML API for this model evaluation. - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + model. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. @@ -2359,21 +2392,21 @@ def get_model( >>> Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + model. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - model_name (Optional[string]): + model_name (Optional[str]): This is the fully-qualified name generated by the AutoML API for this model. This is not to be confused with the human-assigned `model_display_name` that is provided when creating a model. Either `model_name` or `model_display_name` must be provided. - model_display_name (Optional[string]): + model_display_name (Optional[str]): This is the name you provided for the model when first creating it. Either `model_name` or `model_display_name` must be provided. @@ -2428,19 +2461,19 @@ def deploy_model( >>> Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + model. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - model_display_name (Optional[string]): + model_display_name (Optional[str]): The human-readable name given to the model you want to deploy. This must be supplied if `model` or `model_name` are not supplied. - model_name (Optional[string]): + model_name (Optional[str]): The AutoML-assigned name given to the model you want to deploy. This must be supplied if `model_display_name` or `model` are not supplied. @@ -2450,8 +2483,9 @@ def deploy_model( supplied. Returns: - A :class:`~google.cloud.automl_v1beta1.types._OperationFuture` - instance. + google.api_core.operation.Operation: + An operation future that can be used to check for + completion synchronously or asynchronously. Raises: google.api_core.exceptions.GoogleAPICallError: If the request @@ -2499,19 +2533,19 @@ def undeploy_model( >>> Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + model. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - model_display_name (Optional[string]): + model_display_name (Optional[str]): The human-readable name given to the model you want to undeploy. This must be supplied if `model` or `model_name` are not supplied. - model_name (Optional[string]): + model_name (Optional[str]): The AutoML-assigned name given to the model you want to undeploy. This must be supplied if `model_display_name` or `model` are not supplied. @@ -2521,8 +2555,9 @@ def undeploy_model( supplied. Returns: - A :class:`~google.cloud.automl_v1beta1.types._OperationFuture` - instance. + google.api_core.operation.Operation: + An operation future that can be used to check for + completion synchronously or asynchronously. Raises: google.api_core.exceptions.GoogleAPICallError: If the request @@ -2574,22 +2609,22 @@ def predict( >>> Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + model. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - inputs (Union[List[string], Dict[string, string]]): + inputs (Union[List[str], Dict[str, str]]): Either the sorted list of column values to predict with, or a key-value map of column display name to value to predict with. - model_display_name (Optional[string]): + model_display_name (Optional[str]): The human-readable name given to the model you want to predict with. This must be supplied if `model` or `model_name` are not supplied. - model_name (Optional[string]): + model_name (Optional[str]): The AutoML-assigned name given to the model you want to predict with. This must be supplied if `model_display_name` or `model` are not supplied. @@ -2676,12 +2711,12 @@ def batch_predict( ... Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + model. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. credentials (Optional[google.auth.credentials.Credentials]): The @@ -2695,24 +2730,24 @@ def batch_predict( staged to GCS in `gs://{project}-automl-tables-staging/{uploaded_csv_name}` This must be supplied if neither `gcs_input_uris` nor `bigquery_input_uri` is supplied. - gcs_input_uris (Optional(Union[List[string], string])) + gcs_input_uris (Optional(Union[List[str], str])) Either a list of or a single GCS URI containing the data you want to predict off of. This must be supplied if neither `pandas_dataframe` nor `bigquery_input_uri` is supplied. - gcs_output_uri_prefix (Optional[string]) + gcs_output_uri_prefix (Optional[str]) The folder in GCS you want to write output to. This must be supplied if `bigquery_output_uri` is not. - bigquery_input_uri (Optional[string]) + bigquery_input_uri (Optional[str]) The BigQuery table to input data from. This must be supplied if neither `pandas_dataframe` nor `gcs_input_uris` is supplied. - bigquery_output_uri (Optional[string]) + bigquery_output_uri (Optional[str]) The BigQuery table to output data to. This must be supplied if `gcs_output_uri_prefix` is not. - model_display_name (Optional[string]): + model_display_name (Optional[str]): The human-readable name given to the model you want to predict with. This must be supplied if `model` or `model_name` are not supplied. - model_name (Optional[string]): + model_name (Optional[str]): The AutoML-assigned name given to the model you want to predict with. This must be supplied if `model_display_name` or `model` are not supplied. @@ -2722,8 +2757,9 @@ def batch_predict( supplied. Returns: - A :class:`~google.cloud.automl_v1beta1.types._OperationFuture` - instance. + google.api_core.operation.Operation: + An operation future that can be used to check for + completion synchronously or asynchronously. Raises: google.api_core.exceptions.GoogleAPICallError: If the request @@ -2744,7 +2780,10 @@ def batch_predict( input_request = None if pandas_dataframe is not None: - self.__ensure_gcs_client_is_initialized(credentials) + project = project or self.project + region = region or self.region + credentials = credentials or self.credentials + self.__ensure_gcs_client_is_initialized(credentials, project) self.gcs_client.ensure_bucket_exists(project, region) gcs_input_uri = self.gcs_client.upload_pandas_dataframe(pandas_dataframe) input_request = {"gcs_source": {"input_uris": [gcs_input_uri]}} diff --git a/setup.py b/setup.py index b2c46ea3..6554c687 100644 --- a/setup.py +++ b/setup.py @@ -19,7 +19,7 @@ name = "google-cloud-automl" description = "Cloud AutoML API client library" -version = "0.6.0" +version = "0.7.0" release_status = "Development Status :: 3 - Alpha" dependencies = [ "google-api-core[grpc] >= 1.14.0, < 2.0.0dev", diff --git a/synth.metadata b/synth.metadata index 026f3914..641ff4cd 100644 --- a/synth.metadata +++ b/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-09-25T12:11:40.675705Z", + "updateTime": "2019-10-08T12:12:09.104671Z", "sources": [ { "generator": { "name": "artman", - "version": "0.37.1", - "dockerImage": "googleapis/artman@sha256:6068f67900a3f0bdece596b97bda8fc70406ca0e137a941f4c81d3217c994a80" + "version": "0.38.0", + "dockerImage": "googleapis/artman@sha256:0d2f8d429110aeb8d82df6550ef4ede59d40df9062d260a1580fce688b0512bf" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "9dc1d37b6b9e9581c8ab56c6b2d3b49ff3eeb254", - "internalRef": "271101725" + "sha": "122bdbf877ad87439f8dd9d1474a8e5dde188087", + "internalRef": "273381131" } }, { @@ -34,6 +34,16 @@ "generator": "gapic", "config": "google/cloud/automl/artman_automl_v1beta1.yaml" } + }, + { + "client": { + "source": "googleapis", + "apiName": "automl", + "apiVersion": "v1", + "language": "python", + "generator": "gapic", + "config": "google/cloud/automl/artman_automl_v1.yaml" + } } ] } \ No newline at end of file diff --git a/synth.py b/synth.py index 6176f5b3..937bb0ab 100644 --- a/synth.py +++ b/synth.py @@ -21,7 +21,7 @@ gapic = gcp.GAPICGenerator() common = gcp.CommonTemplates() -versions = ["v1beta1"] +versions = ["v1beta1", "v1"] # ---------------------------------------------------------------------------- diff --git a/tests/unit/gapic/v1/test_auto_ml_client_v1.py b/tests/unit/gapic/v1/test_auto_ml_client_v1.py new file mode 100644 index 00000000..cdf4555f --- /dev/null +++ b/tests/unit/gapic/v1/test_auto_ml_client_v1.py @@ -0,0 +1,780 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Unit tests.""" + +import mock +import pytest + +from google.rpc import status_pb2 + +from google.cloud import automl_v1 +from google.cloud.automl_v1.proto import dataset_pb2 +from google.cloud.automl_v1.proto import io_pb2 +from google.cloud.automl_v1.proto import model_evaluation_pb2 +from google.cloud.automl_v1.proto import model_pb2 +from google.cloud.automl_v1.proto import service_pb2 +from google.longrunning import operations_pb2 +from google.protobuf import empty_pb2 +from google.protobuf import field_mask_pb2 + + +class MultiCallableStub(object): + """Stub for the grpc.UnaryUnaryMultiCallable interface.""" + + def __init__(self, method, channel_stub): + self.method = method + self.channel_stub = channel_stub + + def __call__(self, request, timeout=None, metadata=None, credentials=None): + self.channel_stub.requests.append((self.method, request)) + + response = None + if self.channel_stub.responses: + response = self.channel_stub.responses.pop() + + if isinstance(response, Exception): + raise response + + if response: + return response + + +class ChannelStub(object): + """Stub for the grpc.Channel interface.""" + + def __init__(self, responses=[]): + self.responses = responses + self.requests = [] + + def unary_unary(self, method, request_serializer=None, response_deserializer=None): + return MultiCallableStub(method, self) + + +class CustomException(Exception): + pass + + +class TestAutoMlClient(object): + def test_create_dataset(self): + # Setup Expected Response + name = "name3373707" + display_name = "displayName1615086568" + description = "description-1724546052" + example_count = 1517063674 + etag = "etag3123477" + expected_response = { + "name": name, + "display_name": display_name, + "description": description, + "example_count": example_count, + "etag": etag, + } + expected_response = dataset_pb2.Dataset(**expected_response) + operation = operations_pb2.Operation( + name="operations/test_create_dataset", done=True + ) + operation.response.Pack(expected_response) + + # Mock the API response + channel = ChannelStub(responses=[operation]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + parent = client.location_path("[PROJECT]", "[LOCATION]") + dataset = {} + + response = client.create_dataset(parent, dataset) + result = response.result() + assert expected_response == result + + assert len(channel.requests) == 1 + expected_request = service_pb2.CreateDatasetRequest( + parent=parent, dataset=dataset + ) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_create_dataset_exception(self): + # Setup Response + error = status_pb2.Status() + operation = operations_pb2.Operation( + name="operations/test_create_dataset_exception", done=True + ) + operation.error.CopyFrom(error) + + # Mock the API response + channel = ChannelStub(responses=[operation]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + parent = client.location_path("[PROJECT]", "[LOCATION]") + dataset = {} + + response = client.create_dataset(parent, dataset) + exception = response.exception() + assert exception.errors[0] == error + + def test_update_dataset(self): + # Setup Expected Response + name = "name3373707" + display_name = "displayName1615086568" + description = "description-1724546052" + example_count = 1517063674 + etag = "etag3123477" + expected_response = { + "name": name, + "display_name": display_name, + "description": description, + "example_count": example_count, + "etag": etag, + } + expected_response = dataset_pb2.Dataset(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + dataset = {} + update_mask = {} + + response = client.update_dataset(dataset, update_mask) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = service_pb2.UpdateDatasetRequest( + dataset=dataset, update_mask=update_mask + ) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_update_dataset_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup request + dataset = {} + update_mask = {} + + with pytest.raises(CustomException): + client.update_dataset(dataset, update_mask) + + def test_get_dataset(self): + # Setup Expected Response + name_2 = "name2-1052831874" + display_name = "displayName1615086568" + description = "description-1724546052" + example_count = 1517063674 + etag = "etag3123477" + expected_response = { + "name": name_2, + "display_name": display_name, + "description": description, + "example_count": example_count, + "etag": etag, + } + expected_response = dataset_pb2.Dataset(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + name = client.dataset_path("[PROJECT]", "[LOCATION]", "[DATASET]") + + response = client.get_dataset(name) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = service_pb2.GetDatasetRequest(name=name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_get_dataset_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup request + name = client.dataset_path("[PROJECT]", "[LOCATION]", "[DATASET]") + + with pytest.raises(CustomException): + client.get_dataset(name) + + def test_list_datasets(self): + # Setup Expected Response + next_page_token = "" + datasets_element = {} + datasets = [datasets_element] + expected_response = {"next_page_token": next_page_token, "datasets": datasets} + expected_response = service_pb2.ListDatasetsResponse(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + parent = client.location_path("[PROJECT]", "[LOCATION]") + + paged_list_response = client.list_datasets(parent) + resources = list(paged_list_response) + assert len(resources) == 1 + + assert expected_response.datasets[0] == resources[0] + + assert len(channel.requests) == 1 + expected_request = service_pb2.ListDatasetsRequest(parent=parent) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_list_datasets_exception(self): + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup request + parent = client.location_path("[PROJECT]", "[LOCATION]") + + paged_list_response = client.list_datasets(parent) + with pytest.raises(CustomException): + list(paged_list_response) + + def test_delete_dataset(self): + # Setup Expected Response + expected_response = {} + expected_response = empty_pb2.Empty(**expected_response) + operation = operations_pb2.Operation( + name="operations/test_delete_dataset", done=True + ) + operation.response.Pack(expected_response) + + # Mock the API response + channel = ChannelStub(responses=[operation]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + name = client.dataset_path("[PROJECT]", "[LOCATION]", "[DATASET]") + + response = client.delete_dataset(name) + result = response.result() + assert expected_response == result + + assert len(channel.requests) == 1 + expected_request = service_pb2.DeleteDatasetRequest(name=name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_delete_dataset_exception(self): + # Setup Response + error = status_pb2.Status() + operation = operations_pb2.Operation( + name="operations/test_delete_dataset_exception", done=True + ) + operation.error.CopyFrom(error) + + # Mock the API response + channel = ChannelStub(responses=[operation]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + name = client.dataset_path("[PROJECT]", "[LOCATION]", "[DATASET]") + + response = client.delete_dataset(name) + exception = response.exception() + assert exception.errors[0] == error + + def test_import_data(self): + # Setup Expected Response + expected_response = {} + expected_response = empty_pb2.Empty(**expected_response) + operation = operations_pb2.Operation( + name="operations/test_import_data", done=True + ) + operation.response.Pack(expected_response) + + # Mock the API response + channel = ChannelStub(responses=[operation]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + name = client.dataset_path("[PROJECT]", "[LOCATION]", "[DATASET]") + input_config = {} + + response = client.import_data(name, input_config) + result = response.result() + assert expected_response == result + + assert len(channel.requests) == 1 + expected_request = service_pb2.ImportDataRequest( + name=name, input_config=input_config + ) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_import_data_exception(self): + # Setup Response + error = status_pb2.Status() + operation = operations_pb2.Operation( + name="operations/test_import_data_exception", done=True + ) + operation.error.CopyFrom(error) + + # Mock the API response + channel = ChannelStub(responses=[operation]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + name = client.dataset_path("[PROJECT]", "[LOCATION]", "[DATASET]") + input_config = {} + + response = client.import_data(name, input_config) + exception = response.exception() + assert exception.errors[0] == error + + def test_export_data(self): + # Setup Expected Response + expected_response = {} + expected_response = empty_pb2.Empty(**expected_response) + operation = operations_pb2.Operation( + name="operations/test_export_data", done=True + ) + operation.response.Pack(expected_response) + + # Mock the API response + channel = ChannelStub(responses=[operation]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + name = client.dataset_path("[PROJECT]", "[LOCATION]", "[DATASET]") + output_config = {} + + response = client.export_data(name, output_config) + result = response.result() + assert expected_response == result + + assert len(channel.requests) == 1 + expected_request = service_pb2.ExportDataRequest( + name=name, output_config=output_config + ) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_export_data_exception(self): + # Setup Response + error = status_pb2.Status() + operation = operations_pb2.Operation( + name="operations/test_export_data_exception", done=True + ) + operation.error.CopyFrom(error) + + # Mock the API response + channel = ChannelStub(responses=[operation]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + name = client.dataset_path("[PROJECT]", "[LOCATION]", "[DATASET]") + output_config = {} + + response = client.export_data(name, output_config) + exception = response.exception() + assert exception.errors[0] == error + + def test_create_model(self): + # Setup Expected Response + name = "name3373707" + display_name = "displayName1615086568" + dataset_id = "datasetId-2115646910" + expected_response = { + "name": name, + "display_name": display_name, + "dataset_id": dataset_id, + } + expected_response = model_pb2.Model(**expected_response) + operation = operations_pb2.Operation( + name="operations/test_create_model", done=True + ) + operation.response.Pack(expected_response) + + # Mock the API response + channel = ChannelStub(responses=[operation]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + parent = client.location_path("[PROJECT]", "[LOCATION]") + model = {} + + response = client.create_model(parent, model) + result = response.result() + assert expected_response == result + + assert len(channel.requests) == 1 + expected_request = service_pb2.CreateModelRequest(parent=parent, model=model) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_create_model_exception(self): + # Setup Response + error = status_pb2.Status() + operation = operations_pb2.Operation( + name="operations/test_create_model_exception", done=True + ) + operation.error.CopyFrom(error) + + # Mock the API response + channel = ChannelStub(responses=[operation]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + parent = client.location_path("[PROJECT]", "[LOCATION]") + model = {} + + response = client.create_model(parent, model) + exception = response.exception() + assert exception.errors[0] == error + + def test_get_model(self): + # Setup Expected Response + name_2 = "name2-1052831874" + display_name = "displayName1615086568" + dataset_id = "datasetId-2115646910" + expected_response = { + "name": name_2, + "display_name": display_name, + "dataset_id": dataset_id, + } + expected_response = model_pb2.Model(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + name = client.model_path("[PROJECT]", "[LOCATION]", "[MODEL]") + + response = client.get_model(name) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = service_pb2.GetModelRequest(name=name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_get_model_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup request + name = client.model_path("[PROJECT]", "[LOCATION]", "[MODEL]") + + with pytest.raises(CustomException): + client.get_model(name) + + def test_update_model(self): + # Setup Expected Response + name = "name3373707" + display_name = "displayName1615086568" + dataset_id = "datasetId-2115646910" + expected_response = { + "name": name, + "display_name": display_name, + "dataset_id": dataset_id, + } + expected_response = model_pb2.Model(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + model = {} + update_mask = {} + + response = client.update_model(model, update_mask) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = service_pb2.UpdateModelRequest( + model=model, update_mask=update_mask + ) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_update_model_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup request + model = {} + update_mask = {} + + with pytest.raises(CustomException): + client.update_model(model, update_mask) + + def test_list_models(self): + # Setup Expected Response + next_page_token = "" + model_element = {} + model = [model_element] + expected_response = {"next_page_token": next_page_token, "model": model} + expected_response = service_pb2.ListModelsResponse(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + parent = client.location_path("[PROJECT]", "[LOCATION]") + + paged_list_response = client.list_models(parent) + resources = list(paged_list_response) + assert len(resources) == 1 + + assert expected_response.model[0] == resources[0] + + assert len(channel.requests) == 1 + expected_request = service_pb2.ListModelsRequest(parent=parent) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_list_models_exception(self): + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup request + parent = client.location_path("[PROJECT]", "[LOCATION]") + + paged_list_response = client.list_models(parent) + with pytest.raises(CustomException): + list(paged_list_response) + + def test_delete_model(self): + # Setup Expected Response + expected_response = {} + expected_response = empty_pb2.Empty(**expected_response) + operation = operations_pb2.Operation( + name="operations/test_delete_model", done=True + ) + operation.response.Pack(expected_response) + + # Mock the API response + channel = ChannelStub(responses=[operation]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + name = client.model_path("[PROJECT]", "[LOCATION]", "[MODEL]") + + response = client.delete_model(name) + result = response.result() + assert expected_response == result + + assert len(channel.requests) == 1 + expected_request = service_pb2.DeleteModelRequest(name=name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_delete_model_exception(self): + # Setup Response + error = status_pb2.Status() + operation = operations_pb2.Operation( + name="operations/test_delete_model_exception", done=True + ) + operation.error.CopyFrom(error) + + # Mock the API response + channel = ChannelStub(responses=[operation]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + name = client.model_path("[PROJECT]", "[LOCATION]", "[MODEL]") + + response = client.delete_model(name) + exception = response.exception() + assert exception.errors[0] == error + + def test_get_model_evaluation(self): + # Setup Expected Response + name_2 = "name2-1052831874" + annotation_spec_id = "annotationSpecId60690191" + evaluated_example_count = 277565350 + expected_response = { + "name": name_2, + "annotation_spec_id": annotation_spec_id, + "evaluated_example_count": evaluated_example_count, + } + expected_response = model_evaluation_pb2.ModelEvaluation(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + name = client.model_evaluation_path( + "[PROJECT]", "[LOCATION]", "[MODEL]", "[MODEL_EVALUATION]" + ) + + response = client.get_model_evaluation(name) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = service_pb2.GetModelEvaluationRequest(name=name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_get_model_evaluation_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup request + name = client.model_evaluation_path( + "[PROJECT]", "[LOCATION]", "[MODEL]", "[MODEL_EVALUATION]" + ) + + with pytest.raises(CustomException): + client.get_model_evaluation(name) + + def test_list_model_evaluations(self): + # Setup Expected Response + next_page_token = "" + model_evaluation_element = {} + model_evaluation = [model_evaluation_element] + expected_response = { + "next_page_token": next_page_token, + "model_evaluation": model_evaluation, + } + expected_response = service_pb2.ListModelEvaluationsResponse( + **expected_response + ) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + parent = client.model_path("[PROJECT]", "[LOCATION]", "[MODEL]") + filter_ = "filter-1274492040" + + paged_list_response = client.list_model_evaluations(parent, filter_) + resources = list(paged_list_response) + assert len(resources) == 1 + + assert expected_response.model_evaluation[0] == resources[0] + + assert len(channel.requests) == 1 + expected_request = service_pb2.ListModelEvaluationsRequest( + parent=parent, filter=filter_ + ) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_list_model_evaluations_exception(self): + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup request + parent = client.model_path("[PROJECT]", "[LOCATION]", "[MODEL]") + filter_ = "filter-1274492040" + + paged_list_response = client.list_model_evaluations(parent, filter_) + with pytest.raises(CustomException): + list(paged_list_response) diff --git a/tests/unit/gapic/v1/test_prediction_service_client_v1.py b/tests/unit/gapic/v1/test_prediction_service_client_v1.py new file mode 100644 index 00000000..02d12f0a --- /dev/null +++ b/tests/unit/gapic/v1/test_prediction_service_client_v1.py @@ -0,0 +1,103 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Unit tests.""" + +import mock +import pytest + +from google.cloud import automl_v1 +from google.cloud.automl_v1.proto import data_items_pb2 +from google.cloud.automl_v1.proto import prediction_service_pb2 + + +class MultiCallableStub(object): + """Stub for the grpc.UnaryUnaryMultiCallable interface.""" + + def __init__(self, method, channel_stub): + self.method = method + self.channel_stub = channel_stub + + def __call__(self, request, timeout=None, metadata=None, credentials=None): + self.channel_stub.requests.append((self.method, request)) + + response = None + if self.channel_stub.responses: + response = self.channel_stub.responses.pop() + + if isinstance(response, Exception): + raise response + + if response: + return response + + +class ChannelStub(object): + """Stub for the grpc.Channel interface.""" + + def __init__(self, responses=[]): + self.responses = responses + self.requests = [] + + def unary_unary(self, method, request_serializer=None, response_deserializer=None): + return MultiCallableStub(method, self) + + +class CustomException(Exception): + pass + + +class TestPredictionServiceClient(object): + def test_predict(self): + # Setup Expected Response + expected_response = {} + expected_response = prediction_service_pb2.PredictResponse(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.PredictionServiceClient() + + # Setup Request + name = client.model_path("[PROJECT]", "[LOCATION]", "[MODEL]") + payload = {} + + response = client.predict(name, payload) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = prediction_service_pb2.PredictRequest( + name=name, payload=payload + ) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_predict_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.PredictionServiceClient() + + # Setup request + name = client.model_path("[PROJECT]", "[LOCATION]", "[MODEL]") + payload = {} + + with pytest.raises(CustomException): + client.predict(name, payload) diff --git a/tests/unit/gapic/v1beta1/test_gcs_client_v1beta1.py b/tests/unit/gapic/v1beta1/test_gcs_client_v1beta1.py index 49d4a0f8..f7a2e27a 100644 --- a/tests/unit/gapic/v1beta1/test_gcs_client_v1beta1.py +++ b/tests/unit/gapic/v1beta1/test_gcs_client_v1beta1.py @@ -22,8 +22,11 @@ import re from google.api_core import exceptions +from google.auth.credentials import AnonymousCredentials from google.cloud import automl_v1beta1 +PROJECT = "project" + class TestGcsClient(object): def gcs_client(self, bucket_name=None, client_attrs={}): @@ -32,6 +35,24 @@ def gcs_client(self, bucket_name=None, client_attrs={}): bucket_name=bucket_name, client=client_mock ) + def test_init_with_project_and_credentials(self): + # helper for checking that the storage client is initialized with the + # passed in project and credentials. + class FakeStorageClient: + def __init__(self, project=None, credentials=None): + self.project = project + self.credentials = credentials + + patch = mock.patch("google.cloud.storage.Client", new=FakeStorageClient) + with patch: + credentials = AnonymousCredentials() + gcs_client = automl_v1beta1.tables.gcs_client.GcsClient( + project=PROJECT, credentials=credentials + ) + assert isinstance(gcs_client.client, FakeStorageClient) + assert gcs_client.client.project == PROJECT + assert gcs_client.client.credentials == credentials + def test_ensure_bucket_exists(self): mock_bucket = mock.Mock() gcs_client = self.gcs_client( diff --git a/tests/unit/gapic/v1beta1/test_tables_client_v1beta1.py b/tests/unit/gapic/v1beta1/test_tables_client_v1beta1.py index aa1babfa..199df81c 100644 --- a/tests/unit/gapic/v1beta1/test_tables_client_v1beta1.py +++ b/tests/unit/gapic/v1beta1/test_tables_client_v1beta1.py @@ -20,8 +20,9 @@ import pandas import pytest -from google.cloud import automl_v1beta1 from google.api_core import exceptions +from google.auth.credentials import AnonymousCredentials +from google.cloud import automl_v1beta1 from google.cloud.automl_v1beta1.proto import data_types_pb2 PROJECT = "project" @@ -214,6 +215,33 @@ def test_import_pandas_dataframe(self): "name", {"gcs_source": {"input_uris": ["uri"]}} ) + def test_import_pandas_dataframe_init_gcs(self): + client = automl_v1beta1.TablesClient( + client=mock.Mock(), + prediction_client=mock.Mock(), + project=PROJECT, + region=REGION, + credentials=AnonymousCredentials(), + ) + + dataframe = pandas.DataFrame({}) + patch = mock.patch( + "google.cloud.automl_v1beta1.tables.tables_client.gcs_client.GcsClient", + bucket_name="my_bucket", + ) + with patch as MockGcsClient: + mockInstance = MockGcsClient.return_value + mockInstance.upload_pandas_dataframe.return_value = "uri" + + client.import_data(dataset_name="name", pandas_dataframe=dataframe) + + assert client.gcs_client is mockInstance + client.gcs_client.ensure_bucket_exists.assert_called_with(PROJECT, REGION) + client.gcs_client.upload_pandas_dataframe.assert_called_with(dataframe) + client.auto_ml_client.import_data.assert_called_with( + "name", {"gcs_source": {"input_uris": ["uri"]}} + ) + def test_import_gcs_uri(self): client = self.tables_client({"import_data.return_value": None}, {}) client.import_data(dataset_name="name", gcs_input_uris="uri") @@ -1220,6 +1248,40 @@ def test_batch_predict_pandas_dataframe(self): {"gcs_destination": {"output_uri_prefix": "gs://output"}}, ) + def test_batch_predict_pandas_dataframe_init_gcs(self): + client = automl_v1beta1.TablesClient( + client=mock.Mock(), + prediction_client=mock.Mock(), + project=PROJECT, + region=REGION, + credentials=AnonymousCredentials(), + ) + + dataframe = pandas.DataFrame({}) + patch = mock.patch( + "google.cloud.automl_v1beta1.tables.tables_client.gcs_client.GcsClient", + bucket_name="my_bucket", + ) + with patch as MockGcsClient: + mockInstance = MockGcsClient.return_value + mockInstance.upload_pandas_dataframe.return_value = "gs://input" + + dataframe = pandas.DataFrame({}) + client.batch_predict( + model_name="my_model", + pandas_dataframe=dataframe, + gcs_output_uri_prefix="gs://output", + ) + + client.gcs_client.ensure_bucket_exists.assert_called_with(PROJECT, REGION) + client.gcs_client.upload_pandas_dataframe.assert_called_with(dataframe) + + client.prediction_client.batch_predict.assert_called_with( + "my_model", + {"gcs_source": {"input_uris": ["gs://input"]}}, + {"gcs_destination": {"output_uri_prefix": "gs://output"}}, + ) + def test_batch_predict_gcs(self): client = self.tables_client({}, {}) client.batch_predict(