{% block body %} {% endblock %} diff --git a/docs/conf.py b/docs/conf.py index a2e8ab1e..fcaca368 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -20,6 +20,10 @@ # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath("..")) +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + __version__ = "" # -- General configuration ------------------------------------------------ @@ -90,7 +94,12 @@ # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = ["_build"] +exclude_patterns = [ + "_build", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] # The reST default role (used for this markup: `text`) to use for all # documents. diff --git a/noxfile.py b/noxfile.py index 512fe04a..d86e996e 100644 --- a/noxfile.py +++ b/noxfile.py @@ -100,6 +100,10 @@ def system(session): """Run the system test suite.""" system_test_path = os.path.join("tests", "system.py") system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") # Sanity check: Only run tests if the environment variable is set. if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): session.skip("Credentials must be set via environment variable") @@ -147,11 +151,45 @@ def docs(session): session.install("-e", ".[pandas,storage]") session.install("sphinx<3.0.0", "alabaster", "recommonmark") + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".[pandas,storage]") + session.install("sphinx<3.0.0", "alabaster", "recommonmark", "sphinx-docfx-yaml") + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( "sphinx-build", "-T", # show full traceback on exception "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), "-b", "html", "-d", diff --git a/samples/AUTHORING_GUIDE.md b/samples/AUTHORING_GUIDE.md new file mode 100644 index 00000000..55c97b32 --- /dev/null +++ b/samples/AUTHORING_GUIDE.md @@ -0,0 +1 @@ +See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/AUTHORING_GUIDE.md \ No newline at end of file diff --git a/samples/CONTRIBUTING.md b/samples/CONTRIBUTING.md new file mode 100644 index 00000000..34c882b6 --- /dev/null +++ b/samples/CONTRIBUTING.md @@ -0,0 +1 @@ +See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/CONTRIBUTING.md \ No newline at end of file diff --git a/samples/snippets/noxfile.py b/samples/snippets/noxfile.py new file mode 100644 index 00000000..ba55d7ce --- /dev/null +++ b/samples/snippets/noxfile.py @@ -0,0 +1,224 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function + +import os +from pathlib import Path +import sys + +import nox + + +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING +# DO NOT EDIT THIS FILE EVER! +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING + +# Copy `noxfile_config.py` to your directory and modify it instead. + + +# `TEST_CONFIG` dict is a configuration hook that allows users to +# modify the test configurations. The values here should be in sync +# with `noxfile_config.py`. Users will copy `noxfile_config.py` into +# their directory and modify it. + +TEST_CONFIG = { + # You can opt out from the test for specific Python versions. + 'ignored_versions': ["2.7"], + + # An envvar key for determining the project id to use. Change it + # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a + # build specific Cloud project. You can also use your own string + # to use your own Cloud project. + 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', + # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + + # A dictionary you want to inject into your test. Don't put any + # secrets here. These values will override predefined values. + 'envs': {}, +} + + +try: + # Ensure we can import noxfile_config in the project's directory. + sys.path.append('.') + from noxfile_config import TEST_CONFIG_OVERRIDE +except ImportError as e: + print("No user noxfile_config found: detail: {}".format(e)) + TEST_CONFIG_OVERRIDE = {} + +# Update the TEST_CONFIG with the user supplied values. +TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) + + +def get_pytest_env_vars(): + """Returns a dict for pytest invocation.""" + ret = {} + + # Override the GCLOUD_PROJECT and the alias. + env_key = TEST_CONFIG['gcloud_project_env'] + # This should error out if not set. + ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] + + # Apply user supplied envs. + ret.update(TEST_CONFIG['envs']) + return ret + + +# DO NOT EDIT - automatically generated. +# All versions used to tested samples. +ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"] + +# Any default versions that should be ignored. +IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] + +TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) + +INSTALL_LIBRARY_FROM_SOURCE = bool(os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False)) +# +# Style Checks +# + + +def _determine_local_import_names(start_dir): + """Determines all import names that should be considered "local". + + This is used when running the linter to insure that import order is + properly checked. + """ + file_ext_pairs = [os.path.splitext(path) for path in os.listdir(start_dir)] + return [ + basename + for basename, extension in file_ext_pairs + if extension == ".py" + or os.path.isdir(os.path.join(start_dir, basename)) + and basename not in ("__pycache__") + ] + + +# Linting with flake8. +# +# We ignore the following rules: +# E203: whitespace before ‘:’ +# E266: too many leading ‘#’ for block comment +# E501: line too long +# I202: Additional newline in a section of imports +# +# We also need to specify the rules which are ignored by default: +# ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121'] +FLAKE8_COMMON_ARGS = [ + "--show-source", + "--builtin=gettext", + "--max-complexity=20", + "--import-order-style=google", + "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", + "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", + "--max-line-length=88", +] + + +@nox.session +def lint(session): + session.install("flake8", "flake8-import-order") + + local_names = _determine_local_import_names(".") + args = FLAKE8_COMMON_ARGS + [ + "--application-import-names", + ",".join(local_names), + "." + ] + session.run("flake8", *args) + + +# +# Sample Tests +# + + +PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] + + +def _session_tests(session, post_install=None): + """Runs py.test for a particular project.""" + if os.path.exists("requirements.txt"): + session.install("-r", "requirements.txt") + + if os.path.exists("requirements-test.txt"): + session.install("-r", "requirements-test.txt") + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars() + ) + + +@nox.session(python=ALL_VERSIONS) +def py(session): + """Runs py.test for a sample using the specified version of Python.""" + if session.python in TESTED_VERSIONS: + _session_tests(session) + else: + session.skip("SKIPPED: {} tests are disabled for this sample.".format( + session.python + )) + + +# +# Readmegen +# + + +def _get_repo_root(): + """ Returns the root folder of the project. """ + # Get root of this repository. Assume we don't have directories nested deeper than 10 items. + p = Path(os.getcwd()) + for i in range(10): + if p is None: + break + if Path(p / ".git").exists(): + return str(p) + p = p.parent + raise Exception("Unable to detect repository root.") + + +GENERATED_READMES = sorted([x for x in Path(".").rglob("*.rst.in")]) + + +@nox.session +@nox.parametrize("path", GENERATED_READMES) +def readmegen(session, path): + """(Re-)generates the readme for a sample.""" + session.install("jinja2", "pyyaml") + dir_ = os.path.dirname(path) + + if os.path.exists(os.path.join(dir_, "requirements.txt")): + session.install("-r", os.path.join(dir_, "requirements.txt")) + + in_file = os.path.join(dir_, "README.rst.in") + session.run( + "python", _get_repo_root() + "/scripts/readme-gen/readme_gen.py", in_file + ) diff --git a/synth.metadata b/synth.metadata index 54dcde69..cf6467b0 100644 --- a/synth.metadata +++ b/synth.metadata @@ -3,23 +3,30 @@ { "git": { "name": ".", - "remote": "https://github.com/googleapis/python-automl.git", - "sha": "d6f05a13483cdeccc2532669686d8f6472867bd7" + "remote": "git@github.com:googleapis/python-automl.git", + "sha": "ec9cb308914fe2d5d19cf612c9a51a38ed77ee64" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "5b85137bf6fb01dcf8a949a6a04eee6ed0c22bec", - "internalRef": "317760971" + "sha": "fb84629a56703d04f0b5304c4a9ade7313ebd92d", + "internalRef": "325339219" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "cf2eff09d0f5319a4dc5cdce2b6356d85af4a798" + "sha": "5f2f711c91199ba2f609d3f06a2fe22aee4e5be3" + } + }, + { + "git": { + "name": "synthtool", + "remote": "https://github.com/googleapis/synthtool.git", + "sha": "5f2f711c91199ba2f609d3f06a2fe22aee4e5be3" } } ], diff --git a/synth.py b/synth.py index 498fdba3..a09c3091 100644 --- a/synth.py +++ b/synth.py @@ -18,6 +18,7 @@ import synthtool as s from synthtool import gcp +from synthtool.languages import python gapic = gcp.GAPICBazel() common = gcp.CommonTemplates() @@ -142,9 +143,11 @@ # Add templated files # ---------------------------------------------------------------------------- templated_files = common.py_library( - unit_cov_level=82, cov_level=83 + unit_cov_level=82, cov_level=83, samples=True ) +python.py_samples(skip_readmes=True) + s.move(templated_files) # TODO(busunkim): Use latest sphinx after microgenerator transition From 046012075aa0f8948c429ed863ddeec3bed4d683 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim Date: Tue, 11 Aug 2020 05:34:43 +0000 Subject: [PATCH 24/64] chore: ignore sphinx warnings --- noxfile.py | 1 - synth.py | 4 ++-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/noxfile.py b/noxfile.py index d86e996e..60759b9d 100644 --- a/noxfile.py +++ b/noxfile.py @@ -154,7 +154,6 @@ def docs(session): shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( "sphinx-build", - "-W", # warnings as errors "-T", # show full traceback on exception "-N", # no colors "-b", diff --git a/synth.py b/synth.py index a09c3091..16b099ef 100644 --- a/synth.py +++ b/synth.py @@ -157,8 +157,8 @@ # regex replaces are a brittle temporary solution. s.replace( "noxfile.py", -""""-W", # warnings as errors -\s+"-T", \# show full traceback on exception""", +"""'-W', # warnings as errors +\s+'-T', \# show full traceback on exception""", """"-T", # show full traceback on exception""") From a1c285449c0c4d7d4a14d6deb1d6d639aaa7ac6d Mon Sep 17 00:00:00 2001 From: Noah Negrey Date: Fri, 15 Nov 2019 15:15:24 -0700 Subject: [PATCH 25/64] Add Set Endpoint Samples [(#2497)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2497) * Add Set Endpoint Samples * Add additional test result option * Sample Request update * Add filter_ --- samples/beta/requirements.txt | 1 + samples/beta/set_endpoint.py | 39 +++++++++++++++++++++++++++++++ samples/beta/set_endpoint_test.py | 26 +++++++++++++++++++++ 3 files changed, 66 insertions(+) create mode 100644 samples/beta/requirements.txt create mode 100644 samples/beta/set_endpoint.py create mode 100644 samples/beta/set_endpoint_test.py diff --git a/samples/beta/requirements.txt b/samples/beta/requirements.txt new file mode 100644 index 00000000..7d202213 --- /dev/null +++ b/samples/beta/requirements.txt @@ -0,0 +1 @@ +google-cloud-automl==0.7.0 diff --git a/samples/beta/set_endpoint.py b/samples/beta/set_endpoint.py new file mode 100644 index 00000000..436e427e --- /dev/null +++ b/samples/beta/set_endpoint.py @@ -0,0 +1,39 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def set_endpoint(project_id): + """Change your endpoint""" + # [START automl_set_endpoint] + from google.cloud import automl_v1beta1 as automl + + # You must first create a dataset, using the `eu` endpoint, before you can + # call other operations such as: list, get, import, delete, etc. + client_options = {'api_endpoint': 'eu-automl.googleapis.com:443'} + + # Instantiates a client + client = automl.AutoMlClient(client_options=client_options) + + # A resource that represents Google Cloud Platform location. + # project_id = 'YOUR_PROJECT_ID' + project_location = client.location_path(project_id, 'eu') + # [END automl_set_endpoint] + + # List all the datasets available + # Note: Create a dataset in `eu`, before calling `list_datasets`. + response = client.list_datasets( + project_location, filter_='') + + for dataset in response: + print(dataset) diff --git a/samples/beta/set_endpoint_test.py b/samples/beta/set_endpoint_test.py new file mode 100644 index 00000000..88a0164c --- /dev/null +++ b/samples/beta/set_endpoint_test.py @@ -0,0 +1,26 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import set_endpoint + +PROJECT_ID = os.environ['GCLOUD_PROJECT'] + + +def test_set_endpoint(capsys): + set_endpoint.set_endpoint(PROJECT_ID) + + out, _ = capsys.readouterr() + # Look for the display name + assert 'do_not_delete_me' in out From a951bcea33224695d1ad03ae3515ea78863e8a18 Mon Sep 17 00:00:00 2001 From: DPEBot Date: Fri, 20 Dec 2019 17:41:38 -0800 Subject: [PATCH 26/64] Auto-update dependencies. [(#2005)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2005) * Auto-update dependencies. * Revert update of appengine/flexible/datastore. * revert update of appengine/flexible/scipy * revert update of bigquery/bqml * revert update of bigquery/cloud-client * revert update of bigquery/datalab-migration * revert update of bigtable/quickstart * revert update of compute/api * revert update of container_registry/container_analysis * revert update of dataflow/run_template * revert update of datastore/cloud-ndb * revert update of dialogflow/cloud-client * revert update of dlp * revert update of functions/imagemagick * revert update of functions/ocr/app * revert update of healthcare/api-client/fhir * revert update of iam/api-client * revert update of iot/api-client/gcs_file_to_device * revert update of iot/api-client/mqtt_example * revert update of language/automl * revert update of run/image-processing * revert update of vision/automl * revert update testing/requirements.txt * revert update of vision/cloud-client/detect * revert update of vision/cloud-client/product_search * revert update of jobs/v2/api_client * revert update of jobs/v3/api_client * revert update of opencensus * revert update of translate/cloud-client * revert update to speech/cloud-client Co-authored-by: Kurtis Van Gent <31518063+kurtisvg@users.noreply.github.com> Co-authored-by: Doug Mahugh --- samples/beta/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/beta/requirements.txt b/samples/beta/requirements.txt index 7d202213..08350c58 100644 --- a/samples/beta/requirements.txt +++ b/samples/beta/requirements.txt @@ -1 +1 @@ -google-cloud-automl==0.7.0 +google-cloud-automl==0.9.0 From 34cb1b48056eb52b5215eddca837b5726a409630 Mon Sep 17 00:00:00 2001 From: Noah Negrey Date: Fri, 31 Jan 2020 15:02:47 -0700 Subject: [PATCH 27/64] automl: video beta move model samples from branch to master [(#2754)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2754) * automl: video beta move model samples from branch to master * Fix region tag Co-authored-by: Leah E. Cole <6719667+leahecole@users.noreply.github.com> --- samples/beta/delete_model.py | 31 ++++++++++++++ samples/beta/delete_model_test.py | 31 ++++++++++++++ samples/beta/get_model.py | 44 ++++++++++++++++++++ samples/beta/get_model_evaluation.py | 49 +++++++++++++++++++++++ samples/beta/get_model_evaluation_test.py | 44 ++++++++++++++++++++ samples/beta/get_model_test.py | 26 ++++++++++++ samples/beta/list_models.py | 47 ++++++++++++++++++++++ samples/beta/list_models_test.py | 25 ++++++++++++ 8 files changed, 297 insertions(+) create mode 100644 samples/beta/delete_model.py create mode 100644 samples/beta/delete_model_test.py create mode 100644 samples/beta/get_model.py create mode 100644 samples/beta/get_model_evaluation.py create mode 100644 samples/beta/get_model_evaluation_test.py create mode 100644 samples/beta/get_model_test.py create mode 100644 samples/beta/list_models.py create mode 100644 samples/beta/list_models_test.py diff --git a/samples/beta/delete_model.py b/samples/beta/delete_model.py new file mode 100644 index 00000000..030a2900 --- /dev/null +++ b/samples/beta/delete_model.py @@ -0,0 +1,31 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def delete_model(project_id, model_id): + """Delete a model.""" + # [START automl_delete_model_beta] + from google.cloud import automl_v1beta1 as automl + + # TODO(developer): Uncomment and set the following variables + # project_id = "YOUR_PROJECT_ID" + # model_id = "YOUR_MODEL_ID" + + client = automl.AutoMlClient() + # Get the full path of the model. + model_full_id = client.model_path(project_id, "us-central1", model_id) + response = client.delete_model(model_full_id) + + print("Model deleted. {}".format(response.result())) + # [END automl_delete_model_beta] diff --git a/samples/beta/delete_model_test.py b/samples/beta/delete_model_test.py new file mode 100644 index 00000000..1d3548f3 --- /dev/null +++ b/samples/beta/delete_model_test.py @@ -0,0 +1,31 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +import delete_model + +PROJECT_ID = os.environ["AUTOML_PROJECT_ID"] + + +def test_delete_model(capsys): + # As model creation can take many hours, instead try to delete a + # nonexistent model and confirm that the model was not found, but other + # elements of the request were valid. + try: + delete_model.delete_model(PROJECT_ID, "TRL0000000000000000000") + out, _ = capsys.readouterr() + assert "The model does not exist" in out + except Exception as e: + assert "The model does not exist" in e.message diff --git a/samples/beta/get_model.py b/samples/beta/get_model.py new file mode 100644 index 00000000..834dac0c --- /dev/null +++ b/samples/beta/get_model.py @@ -0,0 +1,44 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def get_model(project_id, model_id): + """Get a model.""" + # [START automl_get_model_beta] + from google.cloud import automl_v1beta1 as automl + + # TODO(developer): Uncomment and set the following variables + # project_id = "YOUR_PROJECT_ID" + # model_id = "YOUR_MODEL_ID" + + client = automl.AutoMlClient() + # Get the full path of the model. + model_full_id = client.model_path(project_id, "us-central1", model_id) + model = client.get_model(model_full_id) + + # Retrieve deployment state. + if model.deployment_state == automl.enums.Model.DeploymentState.DEPLOYED: + deployment_state = "deployed" + else: + deployment_state = "undeployed" + + # Display the model information. + print("Model name: {}".format(model.name)) + print("Model id: {}".format(model.name.split("/")[-1])) + print("Model display name: {}".format(model.display_name)) + print("Model create time:") + print("\tseconds: {}".format(model.create_time.seconds)) + print("\tnanos: {}".format(model.create_time.nanos)) + print("Model deployment state: {}".format(deployment_state)) + # [END automl_get_model_beta] diff --git a/samples/beta/get_model_evaluation.py b/samples/beta/get_model_evaluation.py new file mode 100644 index 00000000..147547c0 --- /dev/null +++ b/samples/beta/get_model_evaluation.py @@ -0,0 +1,49 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def get_model_evaluation(project_id, model_id, model_evaluation_id): + """Get model evaluation.""" + # [START automl_video_classification_get_model_evaluation_beta] + from google.cloud import automl_v1beta1 as automl + + # TODO(developer): Uncomment and set the following variables + # project_id = "YOUR_PROJECT_ID" + # model_id = "YOUR_MODEL_ID" + # model_evaluation_id = "YOUR_MODEL_EVALUATION_ID" + + client = automl.AutoMlClient() + # Get the full path of the model evaluation. + model_evaluation_full_id = client.model_evaluation_path( + project_id, "us-central1", model_id, model_evaluation_id + ) + + # Get complete detail of the model evaluation. + response = client.get_model_evaluation(model_evaluation_full_id) + + print("Model evaluation name: {}".format(response.name)) + print("Model annotation spec id: {}".format(response.annotation_spec_id)) + print("Create Time:") + print("\tseconds: {}".format(response.create_time.seconds)) + print("\tnanos: {}".format(response.create_time.nanos / 1e9)) + print( + "Evaluation example count: {}".format(response.evaluated_example_count) + ) + + print( + "Classification model evaluation metrics: {}".format( + response.classification_evaluation_metrics + ) + ) + # [END automl_video_classification_get_model_evaluation_beta] diff --git a/samples/beta/get_model_evaluation_test.py b/samples/beta/get_model_evaluation_test.py new file mode 100644 index 00000000..5b2ecf36 --- /dev/null +++ b/samples/beta/get_model_evaluation_test.py @@ -0,0 +1,44 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +from google.cloud import automl_v1beta1 as automl +import pytest + +import get_model_evaluation + +PROJECT_ID = os.environ["AUTOML_PROJECT_ID"] +MODEL_ID = os.environ["ENTITY_EXTRACTION_MODEL_ID"] + + +@pytest.fixture(scope="function") +def model_evaluation_id(): + client = automl.AutoMlClient() + model_full_id = client.model_path(PROJECT_ID, "us-central1", MODEL_ID) + generator = client.list_model_evaluations(model_full_id, "").pages + page = next(generator) + evaluation = page.next() + model_evaluation_id = evaluation.name.split( + "{}/modelEvaluations/".format(MODEL_ID) + )[1].split("\n")[0] + yield model_evaluation_id + + +def test_get_model_evaluation(capsys, model_evaluation_id): + get_model_evaluation.get_model_evaluation( + PROJECT_ID, MODEL_ID, model_evaluation_id + ) + out, _ = capsys.readouterr() + assert "Model evaluation name: " in out diff --git a/samples/beta/get_model_test.py b/samples/beta/get_model_test.py new file mode 100644 index 00000000..237ad6da --- /dev/null +++ b/samples/beta/get_model_test.py @@ -0,0 +1,26 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +import get_model + +PROJECT_ID = os.environ["AUTOML_PROJECT_ID"] +MODEL_ID = os.environ["ENTITY_EXTRACTION_MODEL_ID"] + + +def test_get_model(capsys): + get_model.get_model(PROJECT_ID, MODEL_ID) + out, _ = capsys.readouterr() + assert "Model id: " in out diff --git a/samples/beta/list_models.py b/samples/beta/list_models.py new file mode 100644 index 00000000..7e9c7e34 --- /dev/null +++ b/samples/beta/list_models.py @@ -0,0 +1,47 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def list_models(project_id): + """List models.""" + # [START automl_list_models_beta] + from google.cloud import automl_v1beta1 as automl + + # TODO(developer): Uncomment and set the following variables + # project_id = "YOUR_PROJECT_ID" + + client = automl.AutoMlClient() + # A resource that represents Google Cloud Platform location. + project_location = client.location_path(project_id, "us-central1") + response = client.list_models(project_location, "") + + print("List of models:") + for model in response: + # Display the model information. + if ( + model.deployment_state + == automl.enums.Model.DeploymentState.DEPLOYED + ): + deployment_state = "deployed" + else: + deployment_state = "undeployed" + + print("Model name: {}".format(model.name)) + print("Model id: {}".format(model.name.split("/")[-1])) + print("Model display name: {}".format(model.display_name)) + print("Model create time:") + print("\tseconds: {}".format(model.create_time.seconds)) + print("\tnanos: {}".format(model.create_time.nanos)) + print("Model deployment state: {}".format(deployment_state)) + # [END automl_list_models_beta] diff --git a/samples/beta/list_models_test.py b/samples/beta/list_models_test.py new file mode 100644 index 00000000..75f8c40a --- /dev/null +++ b/samples/beta/list_models_test.py @@ -0,0 +1,25 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +import list_models + +PROJECT_ID = os.environ["AUTOML_PROJECT_ID"] + + +def test_list_models(capsys): + list_models.list_models(PROJECT_ID) + out, _ = capsys.readouterr() + assert "Model id: " in out From f1df96c539e7d98bd0291d3bccab300dd7499829 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 6 Mar 2020 19:04:23 +0100 Subject: [PATCH 28/64] Update dependency google-cloud-automl to v0.10.0 [(#3033)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3033) Co-authored-by: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Co-authored-by: Leah E. Cole <6719667+leahecole@users.noreply.github.com> --- samples/beta/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/beta/requirements.txt b/samples/beta/requirements.txt index 08350c58..eb3be761 100644 --- a/samples/beta/requirements.txt +++ b/samples/beta/requirements.txt @@ -1 +1 @@ -google-cloud-automl==0.9.0 +google-cloud-automl==0.10.0 From ffcf2fcde16132970fdaa91c7304bb43aae64f72 Mon Sep 17 00:00:00 2001 From: Noah Negrey Date: Tue, 10 Mar 2020 11:47:40 -0600 Subject: [PATCH 29/64] automl: move video object tracking samples out of branch [(#3043)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3043) * automl: move video object tracking samples out of branch * fix uuid and create test * fix project * use global var for testing * Update video_object_tracking_create_model_test.py --- .../video_object_tracking_create_dataset.py | 38 ++++++++++++++ ...deo_object_tracking_create_dataset_test.py | 51 +++++++++++++++++++ .../video_object_tracking_create_model.py | 42 +++++++++++++++ ...video_object_tracking_create_model_test.py | 47 +++++++++++++++++ 4 files changed, 178 insertions(+) create mode 100644 samples/beta/video_object_tracking_create_dataset.py create mode 100644 samples/beta/video_object_tracking_create_dataset_test.py create mode 100644 samples/beta/video_object_tracking_create_model.py create mode 100644 samples/beta/video_object_tracking_create_model_test.py diff --git a/samples/beta/video_object_tracking_create_dataset.py b/samples/beta/video_object_tracking_create_dataset.py new file mode 100644 index 00000000..2a651d0d --- /dev/null +++ b/samples/beta/video_object_tracking_create_dataset.py @@ -0,0 +1,38 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# [START automl_video_object_tracking_create_dataset_beta] +from google.cloud import automl_v1beta1 as automl + + +def create_dataset( + project_id="YOUR_PROJECT_ID", display_name="your_datasets_display_name" +): + """Create a automl video object tracking dataset.""" + client = automl.AutoMlClient() + + # A resource that represents Google Cloud Platform location. + project_location = client.location_path(project_id, "us-central1") + metadata = automl.types.VideoObjectTrackingDatasetMetadata() + dataset = automl.types.Dataset( + display_name=display_name, + video_object_tracking_dataset_metadata=metadata, + ) + + # Create a dataset with the dataset metadata in the region. + created_dataset = client.create_dataset(project_location, dataset) + # Display the dataset information + print("Dataset name: {}".format(created_dataset.name)) + print("Dataset id: {}".format(created_dataset.name.split("/")[-1])) +# [END automl_video_object_tracking_create_dataset_beta] diff --git a/samples/beta/video_object_tracking_create_dataset_test.py b/samples/beta/video_object_tracking_create_dataset_test.py new file mode 100644 index 00000000..96957f71 --- /dev/null +++ b/samples/beta/video_object_tracking_create_dataset_test.py @@ -0,0 +1,51 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import uuid + +from google.cloud import automl_v1beta1 as automl +import pytest + +import video_object_tracking_create_dataset + +PROJECT_ID = os.environ["AUTOML_PROJECT_ID"] +DATASET_ID = None + + +@pytest.fixture(scope="function", autouse=True) +def teardown(): + yield + + # Delete the created dataset + client = automl.AutoMlClient() + dataset_full_id = client.dataset_path( + PROJECT_ID, "us-central1", DATASET_ID + ) + response = client.delete_dataset(dataset_full_id) + response.result() + + +def test_video_classification_create_dataset(capsys): + # create dataset + dataset_name = "test_{}".format(uuid.uuid4()).replace("-", "")[:32] + video_object_tracking_create_dataset.create_dataset( + PROJECT_ID, dataset_name + ) + out, _ = capsys.readouterr() + assert "Dataset id: " in out + + # Get the dataset id for deletion + global DATASET_ID + DATASET_ID = out.splitlines()[1].split()[2] diff --git a/samples/beta/video_object_tracking_create_model.py b/samples/beta/video_object_tracking_create_model.py new file mode 100644 index 00000000..5ff8be98 --- /dev/null +++ b/samples/beta/video_object_tracking_create_model.py @@ -0,0 +1,42 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# [START automl_video_object_tracking_create_model_beta] +from google.cloud import automl_v1beta1 as automl + + +def create_model( + project_id="YOUR_PROJECT_ID", + dataset_id="YOUR_DATASET_ID", + display_name="your_models_display_name", +): + """Create a automl video classification model.""" + client = automl.AutoMlClient() + + # A resource that represents Google Cloud Platform loacation. + project_location = client.location_path(project_id, "us-central1") + # Leave model unset to use the default base model provided by Google + metadata = automl.types.VideoObjectTrackingModelMetadata() + model = automl.types.Model( + display_name=display_name, + dataset_id=dataset_id, + video_object_tracking_model_metadata=metadata, + ) + + # Create a model with the model metadata in the region. + response = client.create_model(project_location, model) + + print("Training operation name: {}".format(response.operation.name)) + print("Training started...") +# [END automl_video_object_tracking_create_model_beta] diff --git a/samples/beta/video_object_tracking_create_model_test.py b/samples/beta/video_object_tracking_create_model_test.py new file mode 100644 index 00000000..edfea3e4 --- /dev/null +++ b/samples/beta/video_object_tracking_create_model_test.py @@ -0,0 +1,47 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import uuid + +from google.cloud import automl_v1beta1 as automl +import pytest + +import video_object_tracking_create_model + +PROJECT_ID = os.environ["GCLOUD_PROJECT"] +DATASET_ID = "VOT2823376535338090496" +OPERATION_ID = None + + +@pytest.fixture(scope="function", autouse=True) +def teardown(): + yield + + # Cancel the training operation + client = automl.AutoMlClient() + client.transport._operations_client.cancel_operation(OPERATION_ID) + + +def test_video_classification_create_model(capsys): + model_name = "test_{}".format(uuid.uuid4()).replace("-", "")[:32] + video_object_tracking_create_model.create_model( + PROJECT_ID, DATASET_ID, model_name + ) + out, _ = capsys.readouterr() + assert "Training started" in out + + # Cancel the operation + global OPERATION_ID + OPERATION_ID = out.split("Training operation name: ")[1].split("\n")[0] From b9dcc42a8517a43f9a98c9e30141b004a0c69980 Mon Sep 17 00:00:00 2001 From: Noah Negrey Date: Thu, 12 Mar 2020 09:15:07 -0600 Subject: [PATCH 30/64] automl: video beta - move beta samples out of branch and into master [(#2750)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2750) * automl: video beta - move beta samples out of branch and into master * lint * update error message on batch predict Co-authored-by: Leah E. Cole <6719667+leahecole@users.noreply.github.com> --- samples/beta/batch_predict.py | 52 +++++++++++++++++++ samples/beta/batch_predict_test.py | 47 +++++++++++++++++ .../video_classification_create_dataset.py | 45 ++++++++++++++++ ...ideo_classification_create_dataset_test.py | 51 ++++++++++++++++++ .../beta/video_classification_create_model.py | 42 +++++++++++++++ .../video_classification_create_model_test.py | 46 ++++++++++++++++ 6 files changed, 283 insertions(+) create mode 100644 samples/beta/batch_predict.py create mode 100644 samples/beta/batch_predict_test.py create mode 100644 samples/beta/video_classification_create_dataset.py create mode 100644 samples/beta/video_classification_create_dataset_test.py create mode 100644 samples/beta/video_classification_create_model.py create mode 100644 samples/beta/video_classification_create_model_test.py diff --git a/samples/beta/batch_predict.py b/samples/beta/batch_predict.py new file mode 100644 index 00000000..7d634d2e --- /dev/null +++ b/samples/beta/batch_predict.py @@ -0,0 +1,52 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def batch_predict(project_id, model_id, input_uri, output_uri): + """Batch predict""" + # [START automl_batch_predict_beta] + from google.cloud import automl_v1beta1 as automl + + # TODO(developer): Uncomment and set the following variables + # project_id = "YOUR_PROJECT_ID" + # model_id = "YOUR_MODEL_ID" + # input_uri = "gs://YOUR_BUCKET_ID/path/to/your/input/csv_or_jsonl" + # output_uri = "gs://YOUR_BUCKET_ID/path/to/save/results/" + + prediction_client = automl.PredictionServiceClient() + + # Get the full path of the model. + model_full_id = prediction_client.model_path( + project_id, "us-central1", model_id + ) + + gcs_source = automl.types.GcsSource(input_uris=[input_uri]) + + input_config = automl.types.BatchPredictInputConfig(gcs_source=gcs_source) + gcs_destination = automl.types.GcsDestination(output_uri_prefix=output_uri) + output_config = automl.types.BatchPredictOutputConfig( + gcs_destination=gcs_destination + ) + + response = prediction_client.batch_predict( + model_full_id, input_config, output_config + ) + + print("Waiting for operation to complete...") + print( + "Batch Prediction results saved to Cloud Storage bucket. {}".format( + response.result() + ) + ) + # [END automl_batch_predict_beta] diff --git a/samples/beta/batch_predict_test.py b/samples/beta/batch_predict_test.py new file mode 100644 index 00000000..2869873a --- /dev/null +++ b/samples/beta/batch_predict_test.py @@ -0,0 +1,47 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific ladnguage governing permissions and +# limitations under the License. + +import datetime +import os + +import batch_predict + +PROJECT_ID = os.environ["AUTOML_PROJECT_ID"] +BUCKET_ID = "{}-lcm".format(PROJECT_ID) +MODEL_ID = "TEN0000000000000000000" +PREFIX = "TEST_EXPORT_OUTPUT_" + datetime.datetime.now().strftime( + "%Y%m%d%H%M%S" +) + + +def test_batch_predict(capsys): + # As batch prediction can take a long time. Try to batch predict on a model + # and confirm that the model was not found, but other elements of the + # request were valid. + try: + input_uri = "gs://{}/entity-extraction/input.jsonl".format(BUCKET_ID) + output_uri = "gs://{}/{}/".format(BUCKET_ID, PREFIX) + batch_predict.batch_predict( + PROJECT_ID, MODEL_ID, input_uri, output_uri + ) + out, _ = capsys.readouterr() + assert ( + "does not exist" + in out + ) + except Exception as e: + assert ( + "does not exist" + in e.message + ) diff --git a/samples/beta/video_classification_create_dataset.py b/samples/beta/video_classification_create_dataset.py new file mode 100644 index 00000000..19bb271b --- /dev/null +++ b/samples/beta/video_classification_create_dataset.py @@ -0,0 +1,45 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def create_dataset(project_id, display_name): + """Create a dataset.""" + # [START automl_video_classification_create_dataset_beta] + from google.cloud import automl_v1beta1 as automl + + # TODO(developer): Uncomment and set the following variables + # project_id = "YOUR_PROJECT_ID" + # display_name = "your_datasets_display_name" + + client = automl.AutoMlClient() + + # A resource that represents Google Cloud Platform location. + project_location = client.location_path(project_id, "us-central1") + metadata = automl.types.VideoClassificationDatasetMetadata() + dataset = automl.types.Dataset( + display_name=display_name, + video_classification_dataset_metadata=metadata, + ) + + # Create a dataset with the dataset metadata in the region. + created_dataset = client.create_dataset(project_location, dataset) + + # Display the dataset information + print("Dataset name: {}".format(created_dataset.name)) + # To get the dataset id, you have to parse it out of the `name` field. + # As dataset Ids are required for other methods. + # Name Form: + # `projects/{project_id}/locations/{location_id}/datasets/{dataset_id}` + print("Dataset id: {}".format(created_dataset.name.split("/")[-1])) + # [END automl_video_classification_create_dataset_beta] diff --git a/samples/beta/video_classification_create_dataset_test.py b/samples/beta/video_classification_create_dataset_test.py new file mode 100644 index 00000000..2851e42a --- /dev/null +++ b/samples/beta/video_classification_create_dataset_test.py @@ -0,0 +1,51 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import datetime +import os + +from google.cloud import automl_v1beta1 as automl +import pytest + +import video_classification_create_dataset + + +PROJECT_ID = os.environ["AUTOML_PROJECT_ID"] +pytest.DATASET_ID = None + + +@pytest.fixture(scope="function", autouse=True) +def teardown(): + yield + + # Delete the created dataset + client = automl.AutoMlClient() + dataset_full_id = client.dataset_path( + PROJECT_ID, "us-central1", pytest.DATASET_ID + ) + response = client.delete_dataset(dataset_full_id) + response.result() + + +def test_video_classification_create_dataset(capsys): + # create dataset + dataset_name = "test_" + datetime.datetime.now().strftime("%Y%m%d%H%M%S") + video_classification_create_dataset.create_dataset( + PROJECT_ID, dataset_name + ) + out, _ = capsys.readouterr() + assert "Dataset id: " in out + + # Get the the created dataset id for deletion + pytest.DATASET_ID = out.splitlines()[1].split()[2] diff --git a/samples/beta/video_classification_create_model.py b/samples/beta/video_classification_create_model.py new file mode 100644 index 00000000..7fbdfe73 --- /dev/null +++ b/samples/beta/video_classification_create_model.py @@ -0,0 +1,42 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def create_model(project_id, dataset_id, display_name): + """Create a model.""" + # [START automl_video_classification_create_model_beta] + from google.cloud import automl_v1beta1 as automl + + # TODO(developer): Uncomment and set the following variables + # project_id = "YOUR_PROJECT_ID" + # dataset_id = "YOUR_DATASET_ID" + # display_name = "your_models_display_name" + + client = automl.AutoMlClient() + + # A resource that represents Google Cloud Platform location. + project_location = client.location_path(project_id, "us-central1") + metadata = automl.types.VideoClassificationModelMetadata() + model = automl.types.Model( + display_name=display_name, + dataset_id=dataset_id, + video_classification_model_metadata=metadata, + ) + + # Create a model with the model metadata in the region. + response = client.create_model(project_location, model) + + print("Training operation name: {}".format(response.operation.name)) + print("Training started...") + # [END automl_video_classification_create_model_beta] diff --git a/samples/beta/video_classification_create_model_test.py b/samples/beta/video_classification_create_model_test.py new file mode 100644 index 00000000..a9136300 --- /dev/null +++ b/samples/beta/video_classification_create_model_test.py @@ -0,0 +1,46 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +from google.cloud import automl_v1beta1 as automl +import pytest + +import video_classification_create_model + +PROJECT_ID = os.environ["GCLOUD_PROJECT"] +DATASET_ID = "VCN510437278078730240" +pytest.OPERATION_ID = None + + +@pytest.fixture(scope="function", autouse=True) +def teardown(): + yield + + # Cancel the operation + client = automl.AutoMlClient() + client.transport._operations_client.cancel_operation(pytest.OPERATION_ID) + + +def test_video_classification_create_model(capsys): + video_classification_create_model.create_model( + PROJECT_ID, DATASET_ID, "classification_test_create_model" + ) + out, _ = capsys.readouterr() + assert "Training started" in out + + # Get the the operation id for cancellation + pytest.OPERATION_ID = out.split("Training operation name: ")[1].split( + "\n" + )[0] From bceba2c16c152165eb32dd67844c1bbdf7a3e0e4 Mon Sep 17 00:00:00 2001 From: Noah Negrey Date: Thu, 12 Mar 2020 13:56:04 -0600 Subject: [PATCH 31/64] automl: move samples to beta set [(#3045)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3045) --- samples/beta/get_model_evaluation.py | 11 +++++ samples/beta/get_operation_status.py | 34 +++++++++++++++ samples/beta/get_operation_status_test.py | 40 +++++++++++++++++ samples/beta/import_dataset.py | 39 +++++++++++++++++ samples/beta/import_dataset_test.py | 41 ++++++++++++++++++ samples/beta/list_datasets.py | 52 +++++++++++++++++++++++ samples/beta/list_datasets_test.py | 27 ++++++++++++ 7 files changed, 244 insertions(+) create mode 100644 samples/beta/get_operation_status.py create mode 100644 samples/beta/get_operation_status_test.py create mode 100644 samples/beta/import_dataset.py create mode 100644 samples/beta/import_dataset_test.py create mode 100644 samples/beta/list_datasets.py create mode 100644 samples/beta/list_datasets_test.py diff --git a/samples/beta/get_model_evaluation.py b/samples/beta/get_model_evaluation.py index 147547c0..ed540f2e 100644 --- a/samples/beta/get_model_evaluation.py +++ b/samples/beta/get_model_evaluation.py @@ -16,6 +16,7 @@ def get_model_evaluation(project_id, model_id, model_evaluation_id): """Get model evaluation.""" # [START automl_video_classification_get_model_evaluation_beta] + # [START automl_video_object_tracking_get_model_evaluation_beta] from google.cloud import automl_v1beta1 as automl # TODO(developer): Uncomment and set the following variables @@ -41,9 +42,19 @@ def get_model_evaluation(project_id, model_id, model_evaluation_id): "Evaluation example count: {}".format(response.evaluated_example_count) ) + # [END automl_video_object_tracking_get_model_evaluation_beta] + print( "Classification model evaluation metrics: {}".format( response.classification_evaluation_metrics ) ) # [END automl_video_classification_get_model_evaluation_beta] + + # [START automl_video_object_tracking_get_model_evaluation_beta] + print( + "Video object tracking model evaluation metrics: {}".format( + response.video_object_tracking_evaluation_metrics + ) + ) + # [END automl_video_object_tracking_get_model_evaluation_beta] diff --git a/samples/beta/get_operation_status.py b/samples/beta/get_operation_status.py new file mode 100644 index 00000000..f376e246 --- /dev/null +++ b/samples/beta/get_operation_status.py @@ -0,0 +1,34 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# [START automl_get_operation_status_beta] +from google.cloud import automl_v1beta1 as automl + + +def get_operation_status( + operation_full_id="projects/YOUR_PROJECT_ID/locations/us-central1/" + "operations/YOUR_OPERATION_ID", +): + """Get operation status.""" + client = automl.AutoMlClient() + + # Get the latest state of a long-running operation. + response = client.transport._operations_client.get_operation( + operation_full_id + ) + + print("Name: {}".format(response.name)) + print("Operation details:") + print(response) +# [END automl_get_operation_status_beta] diff --git a/samples/beta/get_operation_status_test.py b/samples/beta/get_operation_status_test.py new file mode 100644 index 00000000..7da9e7b3 --- /dev/null +++ b/samples/beta/get_operation_status_test.py @@ -0,0 +1,40 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +from google.cloud import automl_v1beta1 as automl +import pytest + +import get_operation_status + +PROJECT_ID = os.environ["AUTOML_PROJECT_ID"] + + +@pytest.fixture(scope="function") +def operation_id(): + client = automl.AutoMlClient() + project_location = client.location_path(PROJECT_ID, "us-central1") + generator = client.transport._operations_client.list_operations( + project_location, filter_="" + ).pages + page = next(generator) + operation = page.next() + yield operation.name + + +def test_get_operation_status(capsys, operation_id): + get_operation_status.get_operation_status(operation_id) + out, _ = capsys.readouterr() + assert "Operation details" in out diff --git a/samples/beta/import_dataset.py b/samples/beta/import_dataset.py new file mode 100644 index 00000000..4a1f1d92 --- /dev/null +++ b/samples/beta/import_dataset.py @@ -0,0 +1,39 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# [START automl_import_data_beta] +from google.cloud import automl_v1beta1 as automl + + +def import_dataset( + project_id="YOUR_PROJECT_ID", + dataset_id="YOUR_DATASET_ID", + path="gs://YOUR_BUCKET_ID/path/to/data.csv", +): + """Import a dataset.""" + client = automl.AutoMlClient() + # Get the full path of the dataset. + dataset_full_id = client.dataset_path( + project_id, "us-central1", dataset_id + ) + # Get the multiple Google Cloud Storage URIs + input_uris = path.split(",") + gcs_source = automl.types.GcsSource(input_uris=input_uris) + input_config = automl.types.InputConfig(gcs_source=gcs_source) + # Import data from the input URI + response = client.import_data(dataset_full_id, input_config) + + print("Processing import...") + print("Data imported. {}".format(response.result())) +# [END automl_import_data_beta] diff --git a/samples/beta/import_dataset_test.py b/samples/beta/import_dataset_test.py new file mode 100644 index 00000000..35d23edc --- /dev/null +++ b/samples/beta/import_dataset_test.py @@ -0,0 +1,41 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +import import_dataset + +PROJECT_ID = os.environ["AUTOML_PROJECT_ID"] +BUCKET_ID = "{}-lcm".format(PROJECT_ID) +DATASET_ID = "TEN0000000000000000000" + + +def test_import_dataset(capsys): + # As importing a dataset can take a long time and only four operations can + # be run on a dataset at once. Try to import into a nonexistent dataset and + # confirm that the dataset was not found, but other elements of the request + # were valid. + try: + data = "gs://{}/sentiment-analysis/dataset.csv".format(BUCKET_ID) + import_dataset.import_dataset(PROJECT_ID, DATASET_ID, data) + out, _ = capsys.readouterr() + assert ( + "The Dataset doesn't exist or is inaccessible for use with AutoMl." + in out + ) + except Exception as e: + assert ( + "The Dataset doesn't exist or is inaccessible for use with AutoMl." + in e.message + ) diff --git a/samples/beta/list_datasets.py b/samples/beta/list_datasets.py new file mode 100644 index 00000000..1fe49040 --- /dev/null +++ b/samples/beta/list_datasets.py @@ -0,0 +1,52 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# [START automl_video_classification_list_datasets_beta] +# [START automl_video_object_tracking_list_datasets_beta] +from google.cloud import automl_v1beta1 as automl + + +def list_datasets(project_id="YOUR_PROJECT_ID"): + """List datasets.""" + client = automl.AutoMlClient() + # A resource that represents Google Cloud Platform location. + project_location = client.location_path(project_id, "us-central1") + + # List all the datasets available in the region. + response = client.list_datasets(project_location, "") + + print("List of datasets:") + for dataset in response: + print("Dataset name: {}".format(dataset.name)) + print("Dataset id: {}".format(dataset.name.split("/")[-1])) + print("Dataset display name: {}".format(dataset.display_name)) + print("Dataset create time:") + print("\tseconds: {}".format(dataset.create_time.seconds)) + print("\tnanos: {}".format(dataset.create_time.nanos)) + # [END automl_video_object_tracking_list_datasets_beta] + + print( + "Video classification dataset metadata: {}".format( + dataset.video_classification_dataset_metadata + ) + ) + # [END automl_video_classification_list_datasets_beta] + + # [START automl_video_object_tracking_list_datasets_beta] + print( + "Video object tracking dataset metadata: {}".format( + dataset.video_object_tracking_dataset_metadata + ) + ) + # [END automl_video_object_tracking_list_datasets_beta] diff --git a/samples/beta/list_datasets_test.py b/samples/beta/list_datasets_test.py new file mode 100644 index 00000000..7057af81 --- /dev/null +++ b/samples/beta/list_datasets_test.py @@ -0,0 +1,27 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +import list_datasets + +PROJECT_ID = os.environ["AUTOML_PROJECT_ID"] +DATASET_ID = os.environ["ENTITY_EXTRACTION_DATASET_ID"] + + +def test_list_dataset(capsys): + # list datasets + list_datasets.list_datasets(PROJECT_ID) + out, _ = capsys.readouterr() + assert "Dataset id: {}".format(DATASET_ID) in out From 4b1c8da3fe182123c5f6f2e7ed69a407f2e258c0 Mon Sep 17 00:00:00 2001 From: Noah Negrey Date: Tue, 17 Mar 2020 15:20:55 -0600 Subject: [PATCH 32/64] automl: video beta samples move from branch to master [(#2753)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2753) * automl: video beta samples move from branch to master * fix test for when create dataset doesn't return an LRO Co-authored-by: Leah E. Cole <6719667+leahecole@users.noreply.github.com> Co-authored-by: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Co-authored-by: gcf-merge-on-green[bot] <60162190+gcf-merge-on-green[bot]@users.noreply.github.com> --- samples/beta/delete_dataset.py | 33 +++++++++++++++++++++ samples/beta/delete_dataset_test.py | 46 +++++++++++++++++++++++++++++ samples/beta/import_dataset.py | 1 + samples/beta/list_datasets.py | 1 + 4 files changed, 81 insertions(+) create mode 100644 samples/beta/delete_dataset.py create mode 100644 samples/beta/delete_dataset_test.py diff --git a/samples/beta/delete_dataset.py b/samples/beta/delete_dataset.py new file mode 100644 index 00000000..f1935e2b --- /dev/null +++ b/samples/beta/delete_dataset.py @@ -0,0 +1,33 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def delete_dataset(project_id, dataset_id): + """Delete a dataset.""" + # [START automl_delete_dataset_beta] + from google.cloud import automl_v1beta1 as automl + + # TODO(developer): Uncomment and set the following variables + # project_id = "YOUR_PROJECT_ID" + # dataset_id = "YOUR_DATASET_ID" + + client = automl.AutoMlClient() + # Get the full path of the dataset + dataset_full_id = client.dataset_path( + project_id, "us-central1", dataset_id + ) + response = client.delete_dataset(dataset_full_id) + + print("Dataset deleted. {}".format(response.result())) + # [END automl_delete_dataset_beta] diff --git a/samples/beta/delete_dataset_test.py b/samples/beta/delete_dataset_test.py new file mode 100644 index 00000000..d736aa4d --- /dev/null +++ b/samples/beta/delete_dataset_test.py @@ -0,0 +1,46 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import datetime +import os + +from google.cloud import automl_v1beta1 as automl +import pytest + +import delete_dataset + +PROJECT_ID = os.environ["AUTOML_PROJECT_ID"] +BUCKET_ID = "{}-lcm".format(PROJECT_ID) + + +@pytest.fixture(scope="function") +def dataset_id(): + client = automl.AutoMlClient() + project_location = client.location_path(PROJECT_ID, "us-central1") + display_name = "test_" + datetime.datetime.now().strftime("%Y%m%d%H%M%S") + metadata = automl.types.TextExtractionDatasetMetadata() + dataset = automl.types.Dataset( + display_name=display_name, text_extraction_dataset_metadata=metadata + ) + dataset = client.create_dataset(project_location, dataset) + dataset_id = dataset.name.split("/")[-1] + + yield dataset_id + + +def test_delete_dataset(capsys, dataset_id): + # delete dataset + delete_dataset.delete_dataset(PROJECT_ID, dataset_id) + out, _ = capsys.readouterr() + assert "Dataset deleted." in out diff --git a/samples/beta/import_dataset.py b/samples/beta/import_dataset.py index 4a1f1d92..97f1c0b8 100644 --- a/samples/beta/import_dataset.py +++ b/samples/beta/import_dataset.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. + # [START automl_import_data_beta] from google.cloud import automl_v1beta1 as automl diff --git a/samples/beta/list_datasets.py b/samples/beta/list_datasets.py index 1fe49040..5d5c83a3 100644 --- a/samples/beta/list_datasets.py +++ b/samples/beta/list_datasets.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. + # [START automl_video_classification_list_datasets_beta] # [START automl_video_object_tracking_list_datasets_beta] from google.cloud import automl_v1beta1 as automl From 06e3ce6888697fa5688fcb8149c38d240cf51fc8 Mon Sep 17 00:00:00 2001 From: Noah Negrey Date: Thu, 19 Mar 2020 17:32:24 -0600 Subject: [PATCH 33/64] automl: move samples into beta set [(#3044)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3044) --- samples/beta/batch_predict.py | 20 ++++++++++---------- samples/beta/delete_dataset.py | 13 +++++-------- samples/beta/delete_dataset_test.py | 8 ++++---- 3 files changed, 19 insertions(+), 22 deletions(-) diff --git a/samples/beta/batch_predict.py b/samples/beta/batch_predict.py index 7d634d2e..911eec73 100644 --- a/samples/beta/batch_predict.py +++ b/samples/beta/batch_predict.py @@ -13,17 +13,17 @@ # limitations under the License. -def batch_predict(project_id, model_id, input_uri, output_uri): - """Batch predict""" - # [START automl_batch_predict_beta] - from google.cloud import automl_v1beta1 as automl +# [START automl_batch_predict_beta] +from google.cloud import automl_v1beta1 as automl - # TODO(developer): Uncomment and set the following variables - # project_id = "YOUR_PROJECT_ID" - # model_id = "YOUR_MODEL_ID" - # input_uri = "gs://YOUR_BUCKET_ID/path/to/your/input/csv_or_jsonl" - # output_uri = "gs://YOUR_BUCKET_ID/path/to/save/results/" +def batch_predict( + project_id="YOUR_PROJECT_ID", + model_id="YOUR_MODEL_ID", + input_uri="gs://YOUR_BUCKET_ID/path/to/your/input/csv_or_jsonl", + output_uri="gs://YOUR_BUCKET_ID/path/to/save/results/", +): + """Batch predict""" prediction_client = automl.PredictionServiceClient() # Get the full path of the model. @@ -49,4 +49,4 @@ def batch_predict(project_id, model_id, input_uri, output_uri): response.result() ) ) - # [END automl_batch_predict_beta] +# [END automl_batch_predict_beta] diff --git a/samples/beta/delete_dataset.py b/samples/beta/delete_dataset.py index f1935e2b..51647758 100644 --- a/samples/beta/delete_dataset.py +++ b/samples/beta/delete_dataset.py @@ -13,15 +13,12 @@ # limitations under the License. -def delete_dataset(project_id, dataset_id): - """Delete a dataset.""" - # [START automl_delete_dataset_beta] - from google.cloud import automl_v1beta1 as automl +# [START automl_delete_dataset_beta] +from google.cloud import automl_v1beta1 as automl - # TODO(developer): Uncomment and set the following variables - # project_id = "YOUR_PROJECT_ID" - # dataset_id = "YOUR_DATASET_ID" +def delete_dataset(project_id="YOUR_PROJECT_ID", dataset_id="YOUR_DATASET_ID"): + """Delete a dataset.""" client = automl.AutoMlClient() # Get the full path of the dataset dataset_full_id = client.dataset_path( @@ -30,4 +27,4 @@ def delete_dataset(project_id, dataset_id): response = client.delete_dataset(dataset_full_id) print("Dataset deleted. {}".format(response.result())) - # [END automl_delete_dataset_beta] +# [END automl_delete_dataset_beta] diff --git a/samples/beta/delete_dataset_test.py b/samples/beta/delete_dataset_test.py index d736aa4d..9781ad26 100644 --- a/samples/beta/delete_dataset_test.py +++ b/samples/beta/delete_dataset_test.py @@ -12,8 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -import datetime import os +import uuid from google.cloud import automl_v1beta1 as automl import pytest @@ -28,13 +28,13 @@ def dataset_id(): client = automl.AutoMlClient() project_location = client.location_path(PROJECT_ID, "us-central1") - display_name = "test_" + datetime.datetime.now().strftime("%Y%m%d%H%M%S") + display_name = "test_{}".format(uuid.uuid4()).replace("-", "")[:32] metadata = automl.types.TextExtractionDatasetMetadata() dataset = automl.types.Dataset( display_name=display_name, text_extraction_dataset_metadata=metadata ) - dataset = client.create_dataset(project_location, dataset) - dataset_id = dataset.name.split("/")[-1] + response = client.create_dataset(project_location, dataset) + dataset_id = response.name.split("/")[-1] yield dataset_id From 5f2a5791025acb2213fb3a0b140c48365c676544 Mon Sep 17 00:00:00 2001 From: Kurtis Van Gent <31518063+kurtisvg@users.noreply.github.com> Date: Wed, 1 Apr 2020 19:11:50 -0700 Subject: [PATCH 34/64] Simplify noxfile setup. [(#2806)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2806) * chore(deps): update dependency requests to v2.23.0 * Simplify noxfile and add version control. * Configure appengine/standard to only test Python 2.7. * Update Kokokro configs to match noxfile. * Add requirements-test to each folder. * Remove Py2 versions from everything execept appengine/standard. * Remove conftest.py. * Remove appengine/standard/conftest.py * Remove 'no-sucess-flaky-report' from pytest.ini. * Add GAE SDK back to appengine/standard tests. * Fix typo. * Roll pytest to python 2 version. * Add a bunch of testing requirements. * Remove typo. * Add appengine lib directory back in. * Add some additional requirements. * Fix issue with flake8 args. * Even more requirements. * Readd appengine conftest.py. * Add a few more requirements. * Even more Appengine requirements. * Add webtest for appengine/standard/mailgun. * Add some additional requirements. * Add workaround for issue with mailjet-rest. * Add responses for appengine/standard/mailjet. Co-authored-by: Renovate Bot --- samples/beta/requirements-test.txt | 1 + 1 file changed, 1 insertion(+) create mode 100644 samples/beta/requirements-test.txt diff --git a/samples/beta/requirements-test.txt b/samples/beta/requirements-test.txt new file mode 100644 index 00000000..781d4326 --- /dev/null +++ b/samples/beta/requirements-test.txt @@ -0,0 +1 @@ +pytest==5.3.2 From 3e31d7a99a072fa31ed6c7c3b1810ad8331e5486 Mon Sep 17 00:00:00 2001 From: Noah Negrey Date: Thu, 2 Apr 2020 15:32:53 -0600 Subject: [PATCH 35/64] automl: move video classification samples out of branch [(#3042)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3042) * automl: move video classification samples out of branch * fix uuid and create test * fix project * use global for testing * Update video_classification_create_model.py Co-authored-by: Leah E. Cole <6719667+leahecole@users.noreply.github.com> --- .../video_classification_create_dataset.py | 17 ++++++++-------- ...ideo_classification_create_dataset_test.py | 13 ++++++------ .../beta/video_classification_create_model.py | 20 +++++++++---------- .../video_classification_create_model_test.py | 17 ++++++++-------- 4 files changed, 35 insertions(+), 32 deletions(-) diff --git a/samples/beta/video_classification_create_dataset.py b/samples/beta/video_classification_create_dataset.py index 19bb271b..086f98f0 100644 --- a/samples/beta/video_classification_create_dataset.py +++ b/samples/beta/video_classification_create_dataset.py @@ -13,14 +13,14 @@ # limitations under the License. -def create_dataset(project_id, display_name): - """Create a dataset.""" - # [START automl_video_classification_create_dataset_beta] - from google.cloud import automl_v1beta1 as automl +# [START automl_video_classification_create_dataset_beta] +from google.cloud import automl_v1beta1 as automl - # TODO(developer): Uncomment and set the following variables - # project_id = "YOUR_PROJECT_ID" - # display_name = "your_datasets_display_name" + +def create_dataset( + project_id="YOUR_PROJECT_ID", display_name="your_datasets_display_name" +): + """Create a automl video classification dataset.""" client = automl.AutoMlClient() @@ -37,9 +37,10 @@ def create_dataset(project_id, display_name): # Display the dataset information print("Dataset name: {}".format(created_dataset.name)) + # To get the dataset id, you have to parse it out of the `name` field. # As dataset Ids are required for other methods. # Name Form: # `projects/{project_id}/locations/{location_id}/datasets/{dataset_id}` print("Dataset id: {}".format(created_dataset.name.split("/")[-1])) - # [END automl_video_classification_create_dataset_beta] +# [END automl_video_classification_create_dataset_beta] diff --git a/samples/beta/video_classification_create_dataset_test.py b/samples/beta/video_classification_create_dataset_test.py index 2851e42a..443f5042 100644 --- a/samples/beta/video_classification_create_dataset_test.py +++ b/samples/beta/video_classification_create_dataset_test.py @@ -12,8 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -import datetime import os +import uuid from google.cloud import automl_v1beta1 as automl import pytest @@ -22,7 +22,7 @@ PROJECT_ID = os.environ["AUTOML_PROJECT_ID"] -pytest.DATASET_ID = None +DATASET_ID = None @pytest.fixture(scope="function", autouse=True) @@ -32,7 +32,7 @@ def teardown(): # Delete the created dataset client = automl.AutoMlClient() dataset_full_id = client.dataset_path( - PROJECT_ID, "us-central1", pytest.DATASET_ID + PROJECT_ID, "us-central1", DATASET_ID ) response = client.delete_dataset(dataset_full_id) response.result() @@ -40,12 +40,13 @@ def teardown(): def test_video_classification_create_dataset(capsys): # create dataset - dataset_name = "test_" + datetime.datetime.now().strftime("%Y%m%d%H%M%S") + dataset_name = "test_{}".format(uuid.uuid4()).replace("-", "")[:32] video_classification_create_dataset.create_dataset( PROJECT_ID, dataset_name ) out, _ = capsys.readouterr() assert "Dataset id: " in out - # Get the the created dataset id for deletion - pytest.DATASET_ID = out.splitlines()[1].split()[2] + # Get the dataset id for deletion + global DATASET_ID + DATASET_ID = out.splitlines()[1].split()[2] diff --git a/samples/beta/video_classification_create_model.py b/samples/beta/video_classification_create_model.py index 7fbdfe73..5bf19b4e 100644 --- a/samples/beta/video_classification_create_model.py +++ b/samples/beta/video_classification_create_model.py @@ -12,21 +12,21 @@ # See the License for the specific language governing permissions and # limitations under the License. +# [START automl_video_classification_create_model_beta] +from google.cloud import automl_v1beta1 as automl -def create_model(project_id, dataset_id, display_name): - """Create a model.""" - # [START automl_video_classification_create_model_beta] - from google.cloud import automl_v1beta1 as automl - - # TODO(developer): Uncomment and set the following variables - # project_id = "YOUR_PROJECT_ID" - # dataset_id = "YOUR_DATASET_ID" - # display_name = "your_models_display_name" +def create_model( + project_id="YOUR_PROJECT_ID", + dataset_id="YOUR_DATASET_ID", + display_name="your_models_display_name", +): + """Create a automl video classification model.""" client = automl.AutoMlClient() # A resource that represents Google Cloud Platform location. project_location = client.location_path(project_id, "us-central1") + # Leave model unset to use the default base model provided by Google metadata = automl.types.VideoClassificationModelMetadata() model = automl.types.Model( display_name=display_name, @@ -39,4 +39,4 @@ def create_model(project_id, dataset_id, display_name): print("Training operation name: {}".format(response.operation.name)) print("Training started...") - # [END automl_video_classification_create_model_beta] +# [END automl_video_classification_create_model_beta] diff --git a/samples/beta/video_classification_create_model_test.py b/samples/beta/video_classification_create_model_test.py index a9136300..25acb7d6 100644 --- a/samples/beta/video_classification_create_model_test.py +++ b/samples/beta/video_classification_create_model_test.py @@ -13,6 +13,7 @@ # limitations under the License. import os +import uuid from google.cloud import automl_v1beta1 as automl import pytest @@ -21,26 +22,26 @@ PROJECT_ID = os.environ["GCLOUD_PROJECT"] DATASET_ID = "VCN510437278078730240" -pytest.OPERATION_ID = None +OPERATION_ID = None @pytest.fixture(scope="function", autouse=True) def teardown(): yield - # Cancel the operation + # Cancel the training operation client = automl.AutoMlClient() - client.transport._operations_client.cancel_operation(pytest.OPERATION_ID) + client.transport._operations_client.cancel_operation(OPERATION_ID) def test_video_classification_create_model(capsys): + model_name = "test_{}".format(uuid.uuid4()).replace("-", "")[:32] video_classification_create_model.create_model( - PROJECT_ID, DATASET_ID, "classification_test_create_model" + PROJECT_ID, DATASET_ID, model_name ) out, _ = capsys.readouterr() assert "Training started" in out - # Get the the operation id for cancellation - pytest.OPERATION_ID = out.split("Training operation name: ")[1].split( - "\n" - )[0] + # Cancel the operation + global OPERATION_ID + OPERATION_ID = out.split("Training operation name: ")[1].split("\n")[0] From ceb7e4b5756aa89864daed712fd6190b5a907608 Mon Sep 17 00:00:00 2001 From: Takashi Matsuo Date: Tue, 12 May 2020 11:02:39 -0700 Subject: [PATCH 36/64] chore: some lint fixes [(#3737)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3737) --- samples/beta/set_endpoint_test.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/samples/beta/set_endpoint_test.py b/samples/beta/set_endpoint_test.py index 88a0164c..e1518fe9 100644 --- a/samples/beta/set_endpoint_test.py +++ b/samples/beta/set_endpoint_test.py @@ -13,8 +13,10 @@ # limitations under the License. import os + import set_endpoint + PROJECT_ID = os.environ['GCLOUD_PROJECT'] From e87f5d14f625073a6ef44e01f6a19d3516472c55 Mon Sep 17 00:00:00 2001 From: Kurtis Van Gent <31518063+kurtisvg@users.noreply.github.com> Date: Tue, 9 Jun 2020 14:34:27 -0700 Subject: [PATCH 37/64] Replace GCLOUD_PROJECT with GOOGLE_CLOUD_PROJECT. [(#4022)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4022) --- samples/beta/set_endpoint_test.py | 2 +- samples/beta/video_classification_create_model_test.py | 2 +- samples/beta/video_object_tracking_create_model_test.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/samples/beta/set_endpoint_test.py b/samples/beta/set_endpoint_test.py index e1518fe9..02339311 100644 --- a/samples/beta/set_endpoint_test.py +++ b/samples/beta/set_endpoint_test.py @@ -17,7 +17,7 @@ import set_endpoint -PROJECT_ID = os.environ['GCLOUD_PROJECT'] +PROJECT_ID = os.environ['GOOGLE_CLOUD_PROJECT'] def test_set_endpoint(capsys): diff --git a/samples/beta/video_classification_create_model_test.py b/samples/beta/video_classification_create_model_test.py index 25acb7d6..593166cb 100644 --- a/samples/beta/video_classification_create_model_test.py +++ b/samples/beta/video_classification_create_model_test.py @@ -20,7 +20,7 @@ import video_classification_create_model -PROJECT_ID = os.environ["GCLOUD_PROJECT"] +PROJECT_ID = os.environ["GOOGLE_CLOUD_PROJECT"] DATASET_ID = "VCN510437278078730240" OPERATION_ID = None diff --git a/samples/beta/video_object_tracking_create_model_test.py b/samples/beta/video_object_tracking_create_model_test.py index edfea3e4..a06d65bf 100644 --- a/samples/beta/video_object_tracking_create_model_test.py +++ b/samples/beta/video_object_tracking_create_model_test.py @@ -20,7 +20,7 @@ import video_object_tracking_create_model -PROJECT_ID = os.environ["GCLOUD_PROJECT"] +PROJECT_ID = os.environ["GOOGLE_CLOUD_PROJECT"] DATASET_ID = "VOT2823376535338090496" OPERATION_ID = None From 70ef881137009d8dc382e4c5e27a169f77cae457 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Sat, 20 Jun 2020 01:16:04 +0200 Subject: [PATCH 38/64] chore(deps): update dependency google-cloud-automl to v1 [(#4127)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4127) This PR contains the following updates: | Package | Update | Change | |---|---|---| | [google-cloud-automl](https://togithub.com/googleapis/python-automl) | major | `==0.10.0` -> `==1.0.1` | --- ### Release Notes
googleapis/python-automl ### [`v1.0.1`](https://togithub.com/googleapis/python-automl/blob/master/CHANGELOG.md#​101-httpswwwgithubcomgoogleapispython-automlcomparev100v101-2020-06-18) [Compare Source](https://togithub.com/googleapis/python-automl/compare/v0.10.0...v1.0.1)
--- ### Renovate configuration :date: **Schedule**: At any time (no schedule defined). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Never, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#GoogleCloudPlatform/python-docs-samples). --- samples/beta/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/beta/requirements.txt b/samples/beta/requirements.txt index eb3be761..867dfc61 100644 --- a/samples/beta/requirements.txt +++ b/samples/beta/requirements.txt @@ -1 +1 @@ -google-cloud-automl==0.10.0 +google-cloud-automl==1.0.1 From 5ddd96b40c2860e479078830b37e1653fa01ce0a Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 13 Jul 2020 00:46:30 +0200 Subject: [PATCH 39/64] chore(deps): update dependency pytest to v5.4.3 [(#4279)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4279) * chore(deps): update dependency pytest to v5.4.3 * specify pytest for python 2 in appengine Co-authored-by: Leah Cole --- samples/beta/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/beta/requirements-test.txt b/samples/beta/requirements-test.txt index 781d4326..79738af5 100644 --- a/samples/beta/requirements-test.txt +++ b/samples/beta/requirements-test.txt @@ -1 +1 @@ -pytest==5.3.2 +pytest==5.4.3 From 641b3d19880606e951a5170562f4627bc4b5c904 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Sat, 1 Aug 2020 21:51:00 +0200 Subject: [PATCH 40/64] Update dependency pytest to v6 [(#4390)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4390) --- samples/beta/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/beta/requirements-test.txt b/samples/beta/requirements-test.txt index 79738af5..7e460c8c 100644 --- a/samples/beta/requirements-test.txt +++ b/samples/beta/requirements-test.txt @@ -1 +1 @@ -pytest==5.4.3 +pytest==6.0.1 From e34f49a92aaacb7f630cf4a6cc0e6fb4b3a87985 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim Date: Fri, 7 Aug 2020 18:00:26 +0000 Subject: [PATCH 41/64] chore: update templates --- noxfile.py | 34 ++++++ samples/beta/noxfile.py | 224 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 258 insertions(+) create mode 100644 samples/beta/noxfile.py diff --git a/noxfile.py b/noxfile.py index 60759b9d..df117d09 100644 --- a/noxfile.py +++ b/noxfile.py @@ -154,6 +154,7 @@ def docs(session): shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( "sphinx-build", + "-W", # warnings as errors "-T", # show full traceback on exception "-N", # no colors "-b", @@ -196,3 +197,36 @@ def docfx(session): os.path.join("docs", ""), os.path.join("docs", "_build", "html", ""), ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".[pandas,storage]") + session.install("sphinx<3.0.0", "alabaster", "recommonmark", "sphinx-docfx-yaml") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) diff --git a/samples/beta/noxfile.py b/samples/beta/noxfile.py new file mode 100644 index 00000000..ba55d7ce --- /dev/null +++ b/samples/beta/noxfile.py @@ -0,0 +1,224 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function + +import os +from pathlib import Path +import sys + +import nox + + +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING +# DO NOT EDIT THIS FILE EVER! +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING + +# Copy `noxfile_config.py` to your directory and modify it instead. + + +# `TEST_CONFIG` dict is a configuration hook that allows users to +# modify the test configurations. The values here should be in sync +# with `noxfile_config.py`. Users will copy `noxfile_config.py` into +# their directory and modify it. + +TEST_CONFIG = { + # You can opt out from the test for specific Python versions. + 'ignored_versions': ["2.7"], + + # An envvar key for determining the project id to use. Change it + # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a + # build specific Cloud project. You can also use your own string + # to use your own Cloud project. + 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', + # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + + # A dictionary you want to inject into your test. Don't put any + # secrets here. These values will override predefined values. + 'envs': {}, +} + + +try: + # Ensure we can import noxfile_config in the project's directory. + sys.path.append('.') + from noxfile_config import TEST_CONFIG_OVERRIDE +except ImportError as e: + print("No user noxfile_config found: detail: {}".format(e)) + TEST_CONFIG_OVERRIDE = {} + +# Update the TEST_CONFIG with the user supplied values. +TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) + + +def get_pytest_env_vars(): + """Returns a dict for pytest invocation.""" + ret = {} + + # Override the GCLOUD_PROJECT and the alias. + env_key = TEST_CONFIG['gcloud_project_env'] + # This should error out if not set. + ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] + + # Apply user supplied envs. + ret.update(TEST_CONFIG['envs']) + return ret + + +# DO NOT EDIT - automatically generated. +# All versions used to tested samples. +ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"] + +# Any default versions that should be ignored. +IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] + +TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) + +INSTALL_LIBRARY_FROM_SOURCE = bool(os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False)) +# +# Style Checks +# + + +def _determine_local_import_names(start_dir): + """Determines all import names that should be considered "local". + + This is used when running the linter to insure that import order is + properly checked. + """ + file_ext_pairs = [os.path.splitext(path) for path in os.listdir(start_dir)] + return [ + basename + for basename, extension in file_ext_pairs + if extension == ".py" + or os.path.isdir(os.path.join(start_dir, basename)) + and basename not in ("__pycache__") + ] + + +# Linting with flake8. +# +# We ignore the following rules: +# E203: whitespace before ‘:’ +# E266: too many leading ‘#’ for block comment +# E501: line too long +# I202: Additional newline in a section of imports +# +# We also need to specify the rules which are ignored by default: +# ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121'] +FLAKE8_COMMON_ARGS = [ + "--show-source", + "--builtin=gettext", + "--max-complexity=20", + "--import-order-style=google", + "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", + "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", + "--max-line-length=88", +] + + +@nox.session +def lint(session): + session.install("flake8", "flake8-import-order") + + local_names = _determine_local_import_names(".") + args = FLAKE8_COMMON_ARGS + [ + "--application-import-names", + ",".join(local_names), + "." + ] + session.run("flake8", *args) + + +# +# Sample Tests +# + + +PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] + + +def _session_tests(session, post_install=None): + """Runs py.test for a particular project.""" + if os.path.exists("requirements.txt"): + session.install("-r", "requirements.txt") + + if os.path.exists("requirements-test.txt"): + session.install("-r", "requirements-test.txt") + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars() + ) + + +@nox.session(python=ALL_VERSIONS) +def py(session): + """Runs py.test for a sample using the specified version of Python.""" + if session.python in TESTED_VERSIONS: + _session_tests(session) + else: + session.skip("SKIPPED: {} tests are disabled for this sample.".format( + session.python + )) + + +# +# Readmegen +# + + +def _get_repo_root(): + """ Returns the root folder of the project. """ + # Get root of this repository. Assume we don't have directories nested deeper than 10 items. + p = Path(os.getcwd()) + for i in range(10): + if p is None: + break + if Path(p / ".git").exists(): + return str(p) + p = p.parent + raise Exception("Unable to detect repository root.") + + +GENERATED_READMES = sorted([x for x in Path(".").rglob("*.rst.in")]) + + +@nox.session +@nox.parametrize("path", GENERATED_READMES) +def readmegen(session, path): + """(Re-)generates the readme for a sample.""" + session.install("jinja2", "pyyaml") + dir_ = os.path.dirname(path) + + if os.path.exists(os.path.join(dir_, "requirements.txt")): + session.install("-r", os.path.join(dir_, "requirements.txt")) + + in_file = os.path.join(dir_, "README.rst.in") + session.run( + "python", _get_repo_root() + "/scripts/readme-gen/readme_gen.py", in_file + ) From 28697ed088b910daf2bc4f7014fae234252f596e Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Wed, 12 Aug 2020 21:48:31 -0700 Subject: [PATCH 42/64] chore: ignore warnings in sphinx --- noxfile.py | 1 - 1 file changed, 1 deletion(-) diff --git a/noxfile.py b/noxfile.py index df117d09..9a2db338 100644 --- a/noxfile.py +++ b/noxfile.py @@ -154,7 +154,6 @@ def docs(session): shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( "sphinx-build", - "-W", # warnings as errors "-T", # show full traceback on exception "-N", # no colors "-b", From f49335ada2572a3897b3aa021d9b8486b037cb2e Mon Sep 17 00:00:00 2001 From: Bu Sun Kim Date: Thu, 13 Aug 2020 05:37:10 +0000 Subject: [PATCH 43/64] chore: fix batch predict test --- samples/beta/batch_predict.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/samples/beta/batch_predict.py b/samples/beta/batch_predict.py index 911eec73..8dd5acef 100644 --- a/samples/beta/batch_predict.py +++ b/samples/beta/batch_predict.py @@ -38,9 +38,10 @@ def batch_predict( output_config = automl.types.BatchPredictOutputConfig( gcs_destination=gcs_destination ) + params = {} response = prediction_client.batch_predict( - model_full_id, input_config, output_config + model_full_id, input_config, output_config, params=params ) print("Waiting for operation to complete...") From d225a5f97c2823218b91a79e77d3383132875231 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Thu, 13 Aug 2020 16:15:14 -0700 Subject: [PATCH 44/64] docs: add samples from tables/automl (#54) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Tables Notebooks [(#2090)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2090) * initial commit * update census * update notebooks * remove the reference to a bug [(#2100)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2100) as the bug has been fixed in the public client lib * delete this file. [(#2102)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2102) * rename file name [(#2103)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2103) * trying to fix images [(#2101)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2101) * remove typo in installation [(#2110)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2110) * Rename census_income_prediction.ipynb to getting_started_notebook.ipynb [(#2115)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2115) renaming the notebooks as Getting Started (will be in sync with the doc). It will be great if the folder could be renamed too * added back missing file package import [(#2150)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2150) * added back missing file import [(#2145)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2145) * remove incorrect reference to Iris dataset [(#2203)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2203) * conversion to jupyter/colab [(#2340)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2340) plus bug fixes * updated for the Jupyter support [(#2337)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2337) * updated readme for support Jupyter [(#2336)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2336) to approve with the updated notebook supporting jupyter * conversion to jupyer/colab [(#2339)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2339) plus bug fixes * conversion of notebook for jupyter/Colab [(#2338)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2338) conversion of the notebook to support both Jupyter and Colab + bug fixes * [BLOCKED] AutoML Tables: Docs samples updated to use new (pending) client [(#2276)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2276) * AutoML Tables: Docs samples updated to use new (pending) client * Linter warnings * add product recommendation for automl tables notebook [(#2257)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2257) * added colab filtering notebook * update to tables client * update readme * tell user to restart kernel for automl * AutoML Tables: Notebook samples updated to use new tables client [(#2424)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2424) * fix users bug and emphasize kernal restart [(#2407)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2407) * fix problems with automl docs [(#2501)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2501) Today when we try to use the function `batch_predict` follow the docs we receive and error saying: `the paramaters should be a pandas.Dataframe` it’s happens because the first parameter of the function `batch_predict` is a pandas.Dataframe. To solve this problem we need to use de named parameters of python. * Fix typo in GCS URI parameter [(#2459)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2459) * fix: fix tables notebook links and bugs [(#2601)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2601) * feat(tables): update samples to show explainability [(#2523)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2523) * show xai * local feature importance * use updated client * use fixed library * use new model * Auto-update dependencies. [(#2005)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2005) * Auto-update dependencies. * Revert update of appengine/flexible/datastore. * revert update of appengine/flexible/scipy * revert update of bigquery/bqml * revert update of bigquery/cloud-client * revert update of bigquery/datalab-migration * revert update of bigtable/quickstart * revert update of compute/api * revert update of container_registry/container_analysis * revert update of dataflow/run_template * revert update of datastore/cloud-ndb * revert update of dialogflow/cloud-client * revert update of dlp * revert update of functions/imagemagick * revert update of functions/ocr/app * revert update of healthcare/api-client/fhir * revert update of iam/api-client * revert update of iot/api-client/gcs_file_to_device * revert update of iot/api-client/mqtt_example * revert update of language/automl * revert update of run/image-processing * revert update of vision/automl * revert update testing/requirements.txt * revert update of vision/cloud-client/detect * revert update of vision/cloud-client/product_search * revert update of jobs/v2/api_client * revert update of jobs/v3/api_client * revert update of opencensus * revert update of translate/cloud-client * revert update to speech/cloud-client Co-authored-by: Kurtis Van Gent <31518063+kurtisvg@users.noreply.github.com> Co-authored-by: Doug Mahugh * Update dependency google-cloud-automl to v0.10.0 [(#3033)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3033) Co-authored-by: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Co-authored-by: Leah E. Cole <6719667+leahecole@users.noreply.github.com> * Simplify noxfile setup. [(#2806)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2806) * chore(deps): update dependency requests to v2.23.0 * Simplify noxfile and add version control. * Configure appengine/standard to only test Python 2.7. * Update Kokokro configs to match noxfile. * Add requirements-test to each folder. * Remove Py2 versions from everything execept appengine/standard. * Remove conftest.py. * Remove appengine/standard/conftest.py * Remove 'no-sucess-flaky-report' from pytest.ini. * Add GAE SDK back to appengine/standard tests. * Fix typo. * Roll pytest to python 2 version. * Add a bunch of testing requirements. * Remove typo. * Add appengine lib directory back in. * Add some additional requirements. * Fix issue with flake8 args. * Even more requirements. * Readd appengine conftest.py. * Add a few more requirements. * Even more Appengine requirements. * Add webtest for appengine/standard/mailgun. * Add some additional requirements. * Add workaround for issue with mailjet-rest. * Add responses for appengine/standard/mailjet. Co-authored-by: Renovate Bot * chore: some lint fixes [(#3750)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3750) * automl: tables code sample clean-up [(#3571)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3571) * delete unused tables_dataset samples * delete args code associated with unused automl_tables samples * delete tests associated with unused automl_tables samples * restore get_dataset method/yargs without region tagging * Restore update_dataset methodsa without region tagging Co-authored-by: Takashi Matsuo Co-authored-by: Leah E. Cole <6719667+leahecole@users.noreply.github.com> * add example of creating AutoML Tables client with non-default endpoint ('new' sdk) [(#3929)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3929) * add example of creating client with non-default endpoint * more test file cleanup * move connectivity print stmt out of test fn Co-authored-by: Leah E. Cole <6719667+leahecole@users.noreply.github.com> Co-authored-by: Torry Yang * Replace GCLOUD_PROJECT with GOOGLE_CLOUD_PROJECT. [(#4022)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4022) * chore(deps): update dependency google-cloud-automl to v1 [(#4127)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4127) This PR contains the following updates: | Package | Update | Change | |---|---|---| | [google-cloud-automl](https://togithub.com/googleapis/python-automl) | major | `==0.10.0` -> `==1.0.1` | --- ### Release Notes
googleapis/python-automl ### [`v1.0.1`](https://togithub.com/googleapis/python-automl/blob/master/CHANGELOG.md#​101-httpswwwgithubcomgoogleapispython-automlcomparev100v101-2020-06-18) [Compare Source](https://togithub.com/googleapis/python-automl/compare/v0.10.0...v1.0.1)
--- ### Renovate configuration :date: **Schedule**: At any time (no schedule defined). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Never, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#GoogleCloudPlatform/python-docs-samples). * [tables/automl] fix: update the csv file and the dataset name [(#4188)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4188) fixes #4177 fixes #4178 * samples: Automl table batch test [(#4267)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4267) * added rtest req.txt * samples: added automl batch predict test * added missing package * Update tables/automl/batch_predict_test.py Co-authored-by: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Co-authored-by: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> * samples: fixed wrong format on GCS input Uri [(#4270)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4270) ## Description Current predict sample indicates that it can multiples GCS URI inputs but it should be singular. ## Checklist - [X] Please **merge** this PR for me once it is approved. * chore(deps): update dependency pytest to v5.4.3 [(#4279)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4279) * chore(deps): update dependency pytest to v5.4.3 * specify pytest for python 2 in appengine Co-authored-by: Leah Cole * Update automl_tables_predict.py with batch_predict_bq sample [(#4142)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4142) Added a new method `batch_predict_bq` demonstrating running batch_prediction using BigQuery. Added notes in comments about asynchronicity for `batch_predict` method. The region `automl_tables_batch_predict_bq` will be used on cloud.google.com (currently both sections for GCS and BigQuery use the same sample code which is incorrect). Fixes #4141 Note: It's a good idea to open an issue first for discussion. - [x] Please **merge** this PR for me once it is approved. * Update dependency pytest to v6 [(#4390)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4390) * chore: exclude notebooks * chore: update templates * chore: add codeowners and fix tests * chore: ignore warnings from sphinx * chore: fix tables client * test: fix unit tests Co-authored-by: Torry Yang Co-authored-by: florencep Co-authored-by: Mike Burton Co-authored-by: Lars Wander Co-authored-by: Michael Hu Co-authored-by: Michael Hu Co-authored-by: Alefh Sousa Co-authored-by: DPEBot Co-authored-by: Kurtis Van Gent <31518063+kurtisvg@users.noreply.github.com> Co-authored-by: Doug Mahugh Co-authored-by: WhiteSource Renovate Co-authored-by: Leah E. Cole <6719667+leahecole@users.noreply.github.com> Co-authored-by: Takashi Matsuo Co-authored-by: Anthony Co-authored-by: Amy Co-authored-by: Mike <45373284+munkhuushmgl@users.noreply.github.com> Co-authored-by: Leah Cole Co-authored-by: Sergei Dorogin --- .github/CODEOWNERS | 8 + .../automl_v1beta1/tables/tables_client.py | 6 +- samples/tables/automl_tables_dataset.py | 306 +++++++++++ samples/tables/automl_tables_model.py | 514 ++++++++++++++++++ samples/tables/automl_tables_predict.py | 209 +++++++ samples/tables/automl_tables_set_endpoint.py | 33 ++ samples/tables/batch_predict_test.py | 64 +++ samples/tables/dataset_test.py | 112 ++++ samples/tables/endpoint_test.py | 26 + samples/tables/model_test.py | 94 ++++ samples/tables/noxfile.py | 224 ++++++++ samples/tables/predict_test.py | 64 +++ samples/tables/requirements-test.txt | 1 + samples/tables/requirements.txt | 1 + .../v1beta1/test_tables_client_v1beta1.py | 5 + 15 files changed, 1666 insertions(+), 1 deletion(-) create mode 100644 .github/CODEOWNERS create mode 100644 samples/tables/automl_tables_dataset.py create mode 100644 samples/tables/automl_tables_model.py create mode 100644 samples/tables/automl_tables_predict.py create mode 100644 samples/tables/automl_tables_set_endpoint.py create mode 100644 samples/tables/batch_predict_test.py create mode 100644 samples/tables/dataset_test.py create mode 100644 samples/tables/endpoint_test.py create mode 100644 samples/tables/model_test.py create mode 100644 samples/tables/noxfile.py create mode 100644 samples/tables/predict_test.py create mode 100644 samples/tables/requirements-test.txt create mode 100644 samples/tables/requirements.txt diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 00000000..a5567cb9 --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,8 @@ +# Code owners file. +# This file controls who is tagged for review for any given pull request. +# +# For syntax help see: +# https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax + + +/samples/**/*.py @telpirion @sirtorry @googleapis/python-samples-owners diff --git a/google/cloud/automl_v1beta1/tables/tables_client.py b/google/cloud/automl_v1beta1/tables/tables_client.py index 378eca61..f84961c5 100644 --- a/google/cloud/automl_v1beta1/tables/tables_client.py +++ b/google/cloud/automl_v1beta1/tables/tables_client.py @@ -2762,6 +2762,7 @@ def batch_predict( region=None, credentials=None, inputs=None, + params={}, **kwargs ): """Makes a batch prediction on a model. This does _not_ require the @@ -2828,6 +2829,9 @@ def batch_predict( The `model` instance you want to predict with . This must be supplied if `model_display_name` or `model_name` are not supplied. + params (Optional[dict]): + Additional domain-specific parameters for the predictions, + any string must be up to 25000 characters long. Returns: google.api_core.operation.Operation: @@ -2886,7 +2890,7 @@ def batch_predict( ) op = self.prediction_client.batch_predict( - model_name, input_request, output_request, **kwargs + model_name, input_request, output_request, params, **kwargs ) self.__log_operation_info("Batch predict", op) return op diff --git a/samples/tables/automl_tables_dataset.py b/samples/tables/automl_tables_dataset.py new file mode 100644 index 00000000..144f2ee6 --- /dev/null +++ b/samples/tables/automl_tables_dataset.py @@ -0,0 +1,306 @@ +#!/usr/bin/env python + +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This application demonstrates how to perform basic operations on dataset +with the Google AutoML Tables API. + +For more information, the documentation at +https://cloud.google.com/automl-tables/docs. +""" + +import argparse +import os + + +def create_dataset(project_id, compute_region, dataset_display_name): + """Create a dataset.""" + # [START automl_tables_create_dataset] + # TODO(developer): Uncomment and set the following variables + # project_id = 'PROJECT_ID_HERE' + # compute_region = 'COMPUTE_REGION_HERE' + # dataset_display_name = 'DATASET_DISPLAY_NAME_HERE' + + from google.cloud import automl_v1beta1 as automl + + client = automl.TablesClient(project=project_id, region=compute_region) + + # Create a dataset with the given display name + dataset = client.create_dataset(dataset_display_name) + + # Display the dataset information. + print("Dataset name: {}".format(dataset.name)) + print("Dataset id: {}".format(dataset.name.split("/")[-1])) + print("Dataset display name: {}".format(dataset.display_name)) + print("Dataset metadata:") + print("\t{}".format(dataset.tables_dataset_metadata)) + print("Dataset example count: {}".format(dataset.example_count)) + print("Dataset create time:") + print("\tseconds: {}".format(dataset.create_time.seconds)) + print("\tnanos: {}".format(dataset.create_time.nanos)) + + # [END automl_tables_create_dataset] + + return dataset + + +def list_datasets(project_id, compute_region, filter_=None): + """List all datasets.""" + result = [] + # [START automl_tables_list_datasets] + # TODO(developer): Uncomment and set the following variables + # project_id = 'PROJECT_ID_HERE' + # compute_region = 'COMPUTE_REGION_HERE' + # filter_ = 'filter expression here' + + from google.cloud import automl_v1beta1 as automl + + client = automl.TablesClient(project=project_id, region=compute_region) + + # List all the datasets available in the region by applying filter. + response = client.list_datasets(filter_=filter_) + + print("List of datasets:") + for dataset in response: + # Display the dataset information. + print("Dataset name: {}".format(dataset.name)) + print("Dataset id: {}".format(dataset.name.split("/")[-1])) + print("Dataset display name: {}".format(dataset.display_name)) + metadata = dataset.tables_dataset_metadata + print( + "Dataset primary table spec id: {}".format( + metadata.primary_table_spec_id + ) + ) + print( + "Dataset target column spec id: {}".format( + metadata.target_column_spec_id + ) + ) + print( + "Dataset target column spec id: {}".format( + metadata.target_column_spec_id + ) + ) + print( + "Dataset weight column spec id: {}".format( + metadata.weight_column_spec_id + ) + ) + print( + "Dataset ml use column spec id: {}".format( + metadata.ml_use_column_spec_id + ) + ) + print("Dataset example count: {}".format(dataset.example_count)) + print("Dataset create time:") + print("\tseconds: {}".format(dataset.create_time.seconds)) + print("\tnanos: {}".format(dataset.create_time.nanos)) + print("\n") + + # [END automl_tables_list_datasets] + result.append(dataset) + + return result + + +def get_dataset(project_id, compute_region, dataset_display_name): + """Get the dataset.""" + # TODO(developer): Uncomment and set the following variables + # project_id = 'PROJECT_ID_HERE' + # compute_region = 'COMPUTE_REGION_HERE' + # dataset_display_name = 'DATASET_DISPLAY_NAME_HERE' + + from google.cloud import automl_v1beta1 as automl + + client = automl.TablesClient(project=project_id, region=compute_region) + + # Get complete detail of the dataset. + dataset = client.get_dataset(dataset_display_name=dataset_display_name) + + # Display the dataset information. + print("Dataset name: {}".format(dataset.name)) + print("Dataset id: {}".format(dataset.name.split("/")[-1])) + print("Dataset display name: {}".format(dataset.display_name)) + print("Dataset metadata:") + print("\t{}".format(dataset.tables_dataset_metadata)) + print("Dataset example count: {}".format(dataset.example_count)) + print("Dataset create time:") + print("\tseconds: {}".format(dataset.create_time.seconds)) + print("\tnanos: {}".format(dataset.create_time.nanos)) + + return dataset + + +def import_data(project_id, compute_region, dataset_display_name, path): + """Import structured data.""" + # [START automl_tables_import_data] + # TODO(developer): Uncomment and set the following variables + # project_id = 'PROJECT_ID_HERE' + # compute_region = 'COMPUTE_REGION_HERE' + # dataset_display_name = 'DATASET_DISPLAY_NAME' + # path = 'gs://path/to/file.csv' or 'bq://project_id.dataset.table_id' + + from google.cloud import automl_v1beta1 as automl + + client = automl.TablesClient(project=project_id, region=compute_region) + + response = None + if path.startswith("bq"): + response = client.import_data( + dataset_display_name=dataset_display_name, bigquery_input_uri=path + ) + else: + # Get the multiple Google Cloud Storage URIs. + input_uris = path.split(",") + response = client.import_data( + dataset_display_name=dataset_display_name, + gcs_input_uris=input_uris, + ) + + print("Processing import...") + # synchronous check of operation status. + print("Data imported. {}".format(response.result())) + + # [END automl_tables_import_data] + + +def update_dataset( + project_id, + compute_region, + dataset_display_name, + target_column_spec_name=None, + weight_column_spec_name=None, + test_train_column_spec_name=None, +): + """Update dataset.""" + # TODO(developer): Uncomment and set the following variables + # project_id = 'PROJECT_ID_HERE' + # compute_region = 'COMPUTE_REGION_HERE' + # dataset_display_name = 'DATASET_DISPLAY_NAME_HERE' + # target_column_spec_name = 'TARGET_COLUMN_SPEC_NAME_HERE' or None + # weight_column_spec_name = 'WEIGHT_COLUMN_SPEC_NAME_HERE' or None + # test_train_column_spec_name = 'TEST_TRAIN_COLUMN_SPEC_NAME_HERE' or None + + from google.cloud import automl_v1beta1 as automl + + client = automl.TablesClient(project=project_id, region=compute_region) + + if target_column_spec_name is not None: + response = client.set_target_column( + dataset_display_name=dataset_display_name, + column_spec_display_name=target_column_spec_name, + ) + print("Target column updated. {}".format(response)) + if weight_column_spec_name is not None: + response = client.set_weight_column( + dataset_display_name=dataset_display_name, + column_spec_display_name=weight_column_spec_name, + ) + print("Weight column updated. {}".format(response)) + if test_train_column_spec_name is not None: + response = client.set_test_train_column( + dataset_display_name=dataset_display_name, + column_spec_display_name=test_train_column_spec_name, + ) + print("Test/train column updated. {}".format(response)) + + +def delete_dataset(project_id, compute_region, dataset_display_name): + """Delete a dataset""" + # [START automl_tables_delete_dataset] + # TODO(developer): Uncomment and set the following variables + # project_id = 'PROJECT_ID_HERE' + # compute_region = 'COMPUTE_REGION_HERE' + # dataset_display_name = 'DATASET_DISPLAY_NAME_HERE + + from google.cloud import automl_v1beta1 as automl + + client = automl.TablesClient(project=project_id, region=compute_region) + + # Delete a dataset. + response = client.delete_dataset(dataset_display_name=dataset_display_name) + + # synchronous check of operation status. + print("Dataset deleted. {}".format(response.result())) + # [END automl_tables_delete_dataset] + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter, + ) + subparsers = parser.add_subparsers(dest="command") + + create_dataset_parser = subparsers.add_parser( + "create_dataset", help=create_dataset.__doc__ + ) + create_dataset_parser.add_argument("--dataset_name") + + list_datasets_parser = subparsers.add_parser( + "list_datasets", help=list_datasets.__doc__ + ) + list_datasets_parser.add_argument("--filter_") + + get_dataset_parser = subparsers.add_parser( + "get_dataset", help=get_dataset.__doc__ + ) + get_dataset_parser.add_argument("--dataset_display_name") + + import_data_parser = subparsers.add_parser( + "import_data", help=import_data.__doc__ + ) + import_data_parser.add_argument("--dataset_display_name") + import_data_parser.add_argument("--path") + + update_dataset_parser = subparsers.add_parser( + "update_dataset", help=update_dataset.__doc__ + ) + update_dataset_parser.add_argument("--dataset_display_name") + update_dataset_parser.add_argument("--target_column_spec_name") + update_dataset_parser.add_argument("--weight_column_spec_name") + update_dataset_parser.add_argument("--ml_use_column_spec_name") + + delete_dataset_parser = subparsers.add_parser( + "delete_dataset", help=delete_dataset.__doc__ + ) + delete_dataset_parser.add_argument("--dataset_display_name") + + project_id = os.environ["PROJECT_ID"] + compute_region = os.environ["REGION_NAME"] + + args = parser.parse_args() + if args.command == "create_dataset": + create_dataset(project_id, compute_region, args.dataset_name) + if args.command == "list_datasets": + list_datasets(project_id, compute_region, args.filter_) + if args.command == "get_dataset": + get_dataset(project_id, compute_region, args.dataset_display_name) + if args.command == "import_data": + import_data( + project_id, compute_region, args.dataset_display_name, args.path + ) + if args.command == "update_dataset": + update_dataset( + project_id, + compute_region, + args.dataset_display_name, + args.target_column_spec_name, + args.weight_column_spec_name, + args.ml_use_column_spec_name, + ) + if args.command == "delete_dataset": + delete_dataset(project_id, compute_region, args.dataset_display_name) diff --git a/samples/tables/automl_tables_model.py b/samples/tables/automl_tables_model.py new file mode 100644 index 00000000..a77dfe62 --- /dev/null +++ b/samples/tables/automl_tables_model.py @@ -0,0 +1,514 @@ +#!/usr/bin/env python + +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This application demonstrates how to perform basic operations on model +with the Google AutoML Tables API. + +For more information, the documentation at +https://cloud.google.com/automl-tables/docs. +""" + +import argparse +import os + + +def create_model( + project_id, + compute_region, + dataset_display_name, + model_display_name, + train_budget_milli_node_hours, + include_column_spec_names=None, + exclude_column_spec_names=None, +): + """Create a model.""" + # [START automl_tables_create_model] + # TODO(developer): Uncomment and set the following variables + # project_id = 'PROJECT_ID_HERE' + # compute_region = 'COMPUTE_REGION_HERE' + # dataset_display_name = 'DATASET_DISPLAY_NAME_HERE' + # model_display_name = 'MODEL_DISPLAY_NAME_HERE' + # train_budget_milli_node_hours = 'TRAIN_BUDGET_MILLI_NODE_HOURS_HERE' + # include_column_spec_names = 'INCLUDE_COLUMN_SPEC_NAMES_HERE' + # or None if unspecified + # exclude_column_spec_names = 'EXCLUDE_COLUMN_SPEC_NAMES_HERE' + # or None if unspecified + + from google.cloud import automl_v1beta1 as automl + + client = automl.TablesClient(project=project_id, region=compute_region) + + # Create a model with the model metadata in the region. + response = client.create_model( + model_display_name, + train_budget_milli_node_hours=train_budget_milli_node_hours, + dataset_display_name=dataset_display_name, + include_column_spec_names=include_column_spec_names, + exclude_column_spec_names=exclude_column_spec_names, + ) + + print("Training model...") + print("Training operation name: {}".format(response.operation.name)) + print("Training completed: {}".format(response.result())) + + # [END automl_tables_create_model] + + +def get_operation_status(operation_full_id): + """Get operation status.""" + # [START automl_tables_get_operation_status] + # TODO(developer): Uncomment and set the following variables + # operation_full_id = + # 'projects//locations//operations/' + + from google.cloud import automl_v1beta1 as automl + + client = automl.TablesClient() + + # Get the latest state of a long-running operation. + op = client.auto_ml_client.transport._operations_client.get_operation( + operation_full_id + ) + + print("Operation status: {}".format(op)) + + # [END automl_tables_get_operation_status] + + +def list_models(project_id, compute_region, filter_=None): + """List all models.""" + result = [] + # [START automl_tables_list_models] + # TODO(developer): Uncomment and set the following variables + # project_id = 'PROJECT_ID_HERE' + # compute_region = 'COMPUTE_REGION_HERE' + # filter_ = 'DATASET_DISPLAY_NAME_HERE' + + from google.cloud import automl_v1beta1 as automl + from google.cloud.automl_v1beta1 import enums + + client = automl.TablesClient(project=project_id, region=compute_region) + + # List all the models available in the region by applying filter. + response = client.list_models(filter_=filter_) + + print("List of models:") + for model in response: + # Retrieve deployment state. + if model.deployment_state == enums.Model.DeploymentState.DEPLOYED: + deployment_state = "deployed" + else: + deployment_state = "undeployed" + + # Display the model information. + print("Model name: {}".format(model.name)) + print("Model id: {}".format(model.name.split("/")[-1])) + print("Model display name: {}".format(model.display_name)) + metadata = model.tables_model_metadata + print( + "Target column display name: {}".format( + metadata.target_column_spec.display_name + ) + ) + print( + "Training budget in node milli hours: {}".format( + metadata.train_budget_milli_node_hours + ) + ) + print( + "Training cost in node milli hours: {}".format( + metadata.train_cost_milli_node_hours + ) + ) + print("Model create time:") + print("\tseconds: {}".format(model.create_time.seconds)) + print("\tnanos: {}".format(model.create_time.nanos)) + print("Model deployment state: {}".format(deployment_state)) + print("\n") + + # [END automl_tables_list_models] + result.append(model) + + return result + + +def get_model(project_id, compute_region, model_display_name): + """Get model details.""" + # [START automl_tables_get_model] + # TODO(developer): Uncomment and set the following variables + # project_id = 'PROJECT_ID_HERE' + # compute_region = 'COMPUTE_REGION_HERE' + # model_display_name = 'MODEL_DISPLAY_NAME_HERE' + + from google.cloud import automl_v1beta1 as automl + from google.cloud.automl_v1beta1 import enums + + client = automl.TablesClient(project=project_id, region=compute_region) + + # Get complete detail of the model. + model = client.get_model(model_display_name=model_display_name) + + # Retrieve deployment state. + if model.deployment_state == enums.Model.DeploymentState.DEPLOYED: + deployment_state = "deployed" + else: + deployment_state = "undeployed" + + # get features of top importance + feat_list = [ + (column.feature_importance, column.column_display_name) + for column in model.tables_model_metadata.tables_model_column_info + ] + feat_list.sort(reverse=True) + if len(feat_list) < 10: + feat_to_show = len(feat_list) + else: + feat_to_show = 10 + + # Display the model information. + print("Model name: {}".format(model.name)) + print("Model id: {}".format(model.name.split("/")[-1])) + print("Model display name: {}".format(model.display_name)) + print("Features of top importance:") + for feat in feat_list[:feat_to_show]: + print(feat) + print("Model create time:") + print("\tseconds: {}".format(model.create_time.seconds)) + print("\tnanos: {}".format(model.create_time.nanos)) + print("Model deployment state: {}".format(deployment_state)) + + # [END automl_tables_get_model] + + return model + + +def list_model_evaluations( + project_id, compute_region, model_display_name, filter_=None +): + + """List model evaluations.""" + result = [] + # [START automl_tables_list_model_evaluations] + # TODO(developer): Uncomment and set the following variables + # project_id = 'PROJECT_ID_HERE' + # compute_region = 'COMPUTE_REGION_HERE' + # model_display_name = 'MODEL_DISPLAY_NAME_HERE' + # filter_ = 'filter expression here' + + from google.cloud import automl_v1beta1 as automl + + client = automl.TablesClient(project=project_id, region=compute_region) + + # List all the model evaluations in the model by applying filter. + response = client.list_model_evaluations( + model_display_name=model_display_name, filter_=filter_ + ) + + print("List of model evaluations:") + for evaluation in response: + print("Model evaluation name: {}".format(evaluation.name)) + print("Model evaluation id: {}".format(evaluation.name.split("/")[-1])) + print( + "Model evaluation example count: {}".format( + evaluation.evaluated_example_count + ) + ) + print("Model evaluation time:") + print("\tseconds: {}".format(evaluation.create_time.seconds)) + print("\tnanos: {}".format(evaluation.create_time.nanos)) + print("\n") + # [END automl_tables_list_model_evaluations] + result.append(evaluation) + + return result + + +def get_model_evaluation( + project_id, compute_region, model_id, model_evaluation_id +): + """Get model evaluation.""" + # [START automl_tables_get_model_evaluation] + # TODO(developer): Uncomment and set the following variables + # project_id = 'PROJECT_ID_HERE' + # compute_region = 'COMPUTE_REGION_HERE' + # model_id = 'MODEL_ID_HERE' + # model_evaluation_id = 'MODEL_EVALUATION_ID_HERE' + + from google.cloud import automl_v1beta1 as automl + + client = automl.TablesClient() + + # Get the full path of the model evaluation. + model_evaluation_full_id = client.auto_ml_client.model_evaluation_path( + project_id, compute_region, model_id, model_evaluation_id + ) + + # Get complete detail of the model evaluation. + response = client.get_model_evaluation( + model_evaluation_name=model_evaluation_full_id + ) + + print(response) + # [END automl_tables_get_model_evaluation] + return response + + +def display_evaluation( + project_id, compute_region, model_display_name, filter_=None +): + """Display evaluation.""" + # [START automl_tables_display_evaluation] + # TODO(developer): Uncomment and set the following variables + # project_id = 'PROJECT_ID_HERE' + # compute_region = 'COMPUTE_REGION_HERE' + # model_display_name = 'MODEL_DISPLAY_NAME_HERE' + # filter_ = 'filter expression here' + + from google.cloud import automl_v1beta1 as automl + + client = automl.TablesClient(project=project_id, region=compute_region) + + # List all the model evaluations in the model by applying filter. + response = client.list_model_evaluations( + model_display_name=model_display_name, filter_=filter_ + ) + + # Iterate through the results. + for evaluation in response: + # There is evaluation for each class in a model and for overall model. + # Get only the evaluation of overall model. + if not evaluation.annotation_spec_id: + model_evaluation_name = evaluation.name + break + + # Get a model evaluation. + model_evaluation = client.get_model_evaluation( + model_evaluation_name=model_evaluation_name + ) + + classification_metrics = model_evaluation.classification_evaluation_metrics + if str(classification_metrics): + confidence_metrics = classification_metrics.confidence_metrics_entry + + # Showing model score based on threshold of 0.5 + print("Model classification metrics (threshold at 0.5):") + for confidence_metrics_entry in confidence_metrics: + if confidence_metrics_entry.confidence_threshold == 0.5: + print( + "Model Precision: {}%".format( + round(confidence_metrics_entry.precision * 100, 2) + ) + ) + print( + "Model Recall: {}%".format( + round(confidence_metrics_entry.recall * 100, 2) + ) + ) + print( + "Model F1 score: {}%".format( + round(confidence_metrics_entry.f1_score * 100, 2) + ) + ) + print("Model AUPRC: {}".format(classification_metrics.au_prc)) + print("Model AUROC: {}".format(classification_metrics.au_roc)) + print("Model log loss: {}".format(classification_metrics.log_loss)) + + regression_metrics = model_evaluation.regression_evaluation_metrics + if str(regression_metrics): + print("Model regression metrics:") + print( + "Model RMSE: {}".format(regression_metrics.root_mean_squared_error) + ) + print("Model MAE: {}".format(regression_metrics.mean_absolute_error)) + print( + "Model MAPE: {}".format( + regression_metrics.mean_absolute_percentage_error + ) + ) + print("Model R^2: {}".format(regression_metrics.r_squared)) + + # [END automl_tables_display_evaluation] + + +def deploy_model(project_id, compute_region, model_display_name): + """Deploy model.""" + # [START automl_tables_deploy_model] + # TODO(developer): Uncomment and set the following variables + # project_id = 'PROJECT_ID_HERE' + # compute_region = 'COMPUTE_REGION_HERE' + # model_display_name = 'MODEL_DISPLAY_NAME_HERE' + + from google.cloud import automl_v1beta1 as automl + + client = automl.TablesClient(project=project_id, region=compute_region) + + # Deploy model + response = client.deploy_model(model_display_name=model_display_name) + + # synchronous check of operation status. + print("Model deployed. {}".format(response.result())) + + # [END automl_tables_deploy_model] + + +def undeploy_model(project_id, compute_region, model_display_name): + """Undeploy model.""" + # [START automl_tables_undeploy_model] + # TODO(developer): Uncomment and set the following variables + # project_id = 'PROJECT_ID_HERE' + # compute_region = 'COMPUTE_REGION_HERE' + # model_display_name = 'MODEL_DISPLAY_NAME_HERE' + + from google.cloud import automl_v1beta1 as automl + + client = automl.TablesClient(project=project_id, region=compute_region) + + # Undeploy model + response = client.undeploy_model(model_display_name=model_display_name) + + # synchronous check of operation status. + print("Model undeployed. {}".format(response.result())) + + # [END automl_tables_undeploy_model] + + +def delete_model(project_id, compute_region, model_display_name): + """Delete a model.""" + # [START automl_tables_delete_model] + # TODO(developer): Uncomment and set the following variables + # project_id = 'PROJECT_ID_HERE' + # compute_region = 'COMPUTE_REGION_HERE' + # model_display_name = 'MODEL_DISPLAY_NAME_HERE' + + from google.cloud import automl_v1beta1 as automl + + client = automl.TablesClient(project=project_id, region=compute_region) + + # Undeploy model + response = client.delete_model(model_display_name=model_display_name) + + # synchronous check of operation status. + print("Model deleted. {}".format(response.result())) + + # [END automl_tables_delete_model] + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter, + ) + subparsers = parser.add_subparsers(dest="command") + + create_model_parser = subparsers.add_parser( + "create_model", help=create_model.__doc__ + ) + create_model_parser.add_argument("--dataset_display_name") + create_model_parser.add_argument("--model_display_name") + create_model_parser.add_argument( + "--train_budget_milli_node_hours", type=int + ) + + get_operation_status_parser = subparsers.add_parser( + "get_operation_status", help=get_operation_status.__doc__ + ) + get_operation_status_parser.add_argument("--operation_full_id") + + list_models_parser = subparsers.add_parser( + "list_models", help=list_models.__doc__ + ) + list_models_parser.add_argument("--filter_") + + get_model_parser = subparsers.add_parser( + "get_model", help=get_model.__doc__ + ) + get_model_parser.add_argument("--model_display_name") + + list_model_evaluations_parser = subparsers.add_parser( + "list_model_evaluations", help=list_model_evaluations.__doc__ + ) + list_model_evaluations_parser.add_argument("--model_display_name") + list_model_evaluations_parser.add_argument("--filter_") + + get_model_evaluation_parser = subparsers.add_parser( + "get_model_evaluation", help=get_model_evaluation.__doc__ + ) + get_model_evaluation_parser.add_argument("--model_id") + get_model_evaluation_parser.add_argument("--model_evaluation_id") + + display_evaluation_parser = subparsers.add_parser( + "display_evaluation", help=display_evaluation.__doc__ + ) + display_evaluation_parser.add_argument("--model_display_name") + display_evaluation_parser.add_argument("--filter_") + + deploy_model_parser = subparsers.add_parser( + "deploy_model", help=deploy_model.__doc__ + ) + deploy_model_parser.add_argument("--model_display_name") + + undeploy_model_parser = subparsers.add_parser( + "undeploy_model", help=undeploy_model.__doc__ + ) + undeploy_model_parser.add_argument("--model_display_name") + + delete_model_parser = subparsers.add_parser( + "delete_model", help=delete_model.__doc__ + ) + delete_model_parser.add_argument("--model_display_name") + + project_id = os.environ["PROJECT_ID"] + compute_region = os.environ["REGION_NAME"] + + args = parser.parse_args() + + if args.command == "create_model": + create_model( + project_id, + compute_region, + args.dataset_display_name, + args.model_display_name, + args.train_budget_milli_node_hours, + # Input columns are omitted here as argparse does not support + # column spec objects, but it is still included in function def. + ) + if args.command == "get_operation_status": + get_operation_status(args.operation_full_id) + if args.command == "list_models": + list_models(project_id, compute_region, args.filter_) + if args.command == "get_model": + get_model(project_id, compute_region, args.model_display_name) + if args.command == "list_model_evaluations": + list_model_evaluations( + project_id, compute_region, args.model_display_name, args.filter_ + ) + if args.command == "get_model_evaluation": + get_model_evaluation( + project_id, + compute_region, + args.model_display_name, + args.model_evaluation_id, + ) + if args.command == "display_evaluation": + display_evaluation( + project_id, compute_region, args.model_display_name, args.filter_ + ) + if args.command == "deploy_model": + deploy_model(project_id, compute_region, args.model_display_name) + if args.command == "undeploy_model": + undeploy_model(project_id, compute_region, args.model_display_name) + if args.command == "delete_model": + delete_model(project_id, compute_region, args.model_display_name) diff --git a/samples/tables/automl_tables_predict.py b/samples/tables/automl_tables_predict.py new file mode 100644 index 00000000..9787e1b9 --- /dev/null +++ b/samples/tables/automl_tables_predict.py @@ -0,0 +1,209 @@ +#!/usr/bin/env python + +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This application demonstrates how to perform basic operations on prediction +with the Google AutoML Tables API. + +For more information, the documentation at +https://cloud.google.com/automl-tables/docs. +""" + +import argparse +import os + + +def predict( + project_id, + compute_region, + model_display_name, + inputs, + feature_importance=None, +): + """Make a prediction.""" + # [START automl_tables_predict] + # TODO(developer): Uncomment and set the following variables + # project_id = 'PROJECT_ID_HERE' + # compute_region = 'COMPUTE_REGION_HERE' + # model_display_name = 'MODEL_DISPLAY_NAME_HERE' + # inputs = {'value': 3, ...} + + from google.cloud import automl_v1beta1 as automl + + client = automl.TablesClient(project=project_id, region=compute_region) + + if feature_importance: + response = client.predict( + model_display_name=model_display_name, + inputs=inputs, + feature_importance=True, + ) + else: + response = client.predict( + model_display_name=model_display_name, inputs=inputs + ) + + print("Prediction results:") + for result in response.payload: + print( + "Predicted class name: {}".format(result.tables.value.string_value) + ) + print("Predicted class score: {}".format(result.tables.score)) + + if feature_importance: + # get features of top importance + feat_list = [ + (column.feature_importance, column.column_display_name) + for column in result.tables.tables_model_column_info + ] + feat_list.sort(reverse=True) + if len(feat_list) < 10: + feat_to_show = len(feat_list) + else: + feat_to_show = 10 + + print("Features of top importance:") + for feat in feat_list[:feat_to_show]: + print(feat) + + # [END automl_tables_predict] + + +def batch_predict_bq( + project_id, + compute_region, + model_display_name, + bq_input_uri, + bq_output_uri, + params +): + """Make a batch of predictions.""" + # [START automl_tables_batch_predict_bq] + # TODO(developer): Uncomment and set the following variables + # project_id = 'PROJECT_ID_HERE' + # compute_region = 'COMPUTE_REGION_HERE' + # model_display_name = 'MODEL_DISPLAY_NAME_HERE' + # bq_input_uri = 'bq://my-project.my-dataset.my-table' + # bq_output_uri = 'bq://my-project' + # params = {} + + from google.cloud import automl_v1beta1 as automl + + client = automl.TablesClient(project=project_id, region=compute_region) + + # Query model + response = client.batch_predict(bigquery_input_uri=bq_input_uri, + bigquery_output_uri=bq_output_uri, + model_display_name=model_display_name, + params=params) + print("Making batch prediction... ") + # `response` is a async operation descriptor, + # you can register a callback for the operation to complete via `add_done_callback`: + # def callback(operation_future): + # result = operation_future.result() + # response.add_done_callback(callback) + # + # or block the thread polling for the operation's results: + response.result() + # AutoML puts predictions in a newly generated dataset with a name by a mask "prediction_" + model_id + "_" + timestamp + # here's how to get the dataset name: + dataset_name = response.metadata.batch_predict_details.output_info.bigquery_output_dataset + + print("Batch prediction complete.\nResults are in '{}' dataset.\n{}".format( + dataset_name, response.metadata)) + + # [END automl_tables_batch_predict_bq] + + +def batch_predict( + project_id, + compute_region, + model_display_name, + gcs_input_uri, + gcs_output_uri, + params, +): + """Make a batch of predictions.""" + # [START automl_tables_batch_predict] + # TODO(developer): Uncomment and set the following variables + # project_id = 'PROJECT_ID_HERE' + # compute_region = 'COMPUTE_REGION_HERE' + # model_display_name = 'MODEL_DISPLAY_NAME_HERE' + # gcs_input_uri = 'gs://YOUR_BUCKET_ID/path_to_your_input_csv' + # gcs_output_uri = 'gs://YOUR_BUCKET_ID/path_to_save_results/' + # params = {} + + from google.cloud import automl_v1beta1 as automl + + client = automl.TablesClient(project=project_id, region=compute_region) + + # Query model + response = client.batch_predict( + gcs_input_uris=gcs_input_uri, + gcs_output_uri_prefix=gcs_output_uri, + model_display_name=model_display_name, + params=params + ) + print("Making batch prediction... ") + # `response` is a async operation descriptor, + # you can register a callback for the operation to complete via `add_done_callback`: + # def callback(operation_future): + # result = operation_future.result() + # response.add_done_callback(callback) + # + # or block the thread polling for the operation's results: + response.result() + + print("Batch prediction complete.\n{}".format(response.metadata)) + + # [END automl_tables_batch_predict] + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter, + ) + subparsers = parser.add_subparsers(dest="command") + + predict_parser = subparsers.add_parser("predict", help=predict.__doc__) + predict_parser.add_argument("--model_display_name") + predict_parser.add_argument("--file_path") + + batch_predict_parser = subparsers.add_parser( + "batch_predict", help=predict.__doc__ + ) + batch_predict_parser.add_argument("--model_display_name") + batch_predict_parser.add_argument("--input_path") + batch_predict_parser.add_argument("--output_path") + + project_id = os.environ["PROJECT_ID"] + compute_region = os.environ["REGION_NAME"] + + args = parser.parse_args() + + if args.command == "predict": + predict( + project_id, compute_region, args.model_display_name, args.file_path + ) + + if args.command == "batch_predict": + batch_predict( + project_id, + compute_region, + args.model_display_name, + args.input_path, + args.output_path, + ) diff --git a/samples/tables/automl_tables_set_endpoint.py b/samples/tables/automl_tables_set_endpoint.py new file mode 100644 index 00000000..d6ab898b --- /dev/null +++ b/samples/tables/automl_tables_set_endpoint.py @@ -0,0 +1,33 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def create_client_with_endpoint(gcp_project_id): + """Create a Tables client with a non-default endpoint.""" + # [START automl_set_endpoint] + from google.cloud import automl_v1beta1 as automl + from google.api_core.client_options import ClientOptions + + # Set the endpoint you want to use via the ClientOptions. + # gcp_project_id = 'YOUR_PROJECT_ID' + client_options = ClientOptions(api_endpoint="eu-automl.googleapis.com:443") + client = automl.TablesClient( + project=gcp_project_id, region="eu", client_options=client_options + ) + # [END automl_set_endpoint] + + # do simple test to check client connectivity + print(client.list_datasets()) + + return client diff --git a/samples/tables/batch_predict_test.py b/samples/tables/batch_predict_test.py new file mode 100644 index 00000000..203f4c8d --- /dev/null +++ b/samples/tables/batch_predict_test.py @@ -0,0 +1,64 @@ +#!/usr/bin/env python + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +from google.cloud.automl_v1beta1.gapic import enums + +import pytest + +import automl_tables_model +import automl_tables_predict +import model_test + + +PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] +REGION = "us-central1" +STATIC_MODEL = model_test.STATIC_MODEL +GCS_INPUT = "gs://{}-automl-tables-test/bank-marketing.csv".format(PROJECT) +GCS_OUTPUT = "gs://{}-automl-tables-test/TABLE_TEST_OUTPUT/".format(PROJECT) +BQ_INPUT = "bq://{}.automl_test.bank_marketing".format(PROJECT) +BQ_OUTPUT = "bq://{}".format(PROJECT) +PARAMS = {} + + +@pytest.mark.slow +def test_batch_predict(capsys): + ensure_model_online() + + automl_tables_predict.batch_predict( + PROJECT, REGION, STATIC_MODEL, GCS_INPUT, GCS_OUTPUT, PARAMS + ) + out, _ = capsys.readouterr() + assert "Batch prediction complete" in out + + +@pytest.mark.slow +def test_batch_predict_bq(capsys): + ensure_model_online() + automl_tables_predict.batch_predict_bq( + PROJECT, REGION, STATIC_MODEL, BQ_INPUT, BQ_OUTPUT, PARAMS + ) + out, _ = capsys.readouterr() + assert "Batch prediction complete" in out + + +def ensure_model_online(): + model = model_test.ensure_model_ready() + if model.deployment_state != enums.Model.DeploymentState.DEPLOYED: + automl_tables_model.deploy_model(PROJECT, REGION, model.display_name) + + return automl_tables_model.get_model(PROJECT, REGION, model.display_name) diff --git a/samples/tables/dataset_test.py b/samples/tables/dataset_test.py new file mode 100644 index 00000000..27570f0b --- /dev/null +++ b/samples/tables/dataset_test.py @@ -0,0 +1,112 @@ +#!/usr/bin/env python + +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import os +import random +import string +import time + +from google.api_core import exceptions +import pytest + +import automl_tables_dataset + + +PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] +REGION = "us-central1" +STATIC_DATASET = "do_not_delete_this_table_python" +GCS_DATASET = ("gs://python-docs-samples-tests-automl-tables-test" + "/bank-marketing.csv") + +ID = "{rand}_{time}".format( + rand="".join( + [random.choice(string.ascii_letters + string.digits) for n in range(4)] + ), + time=int(time.time()), +) + + +def _id(name): + return "{}_{}".format(name, ID) + + +def ensure_dataset_ready(): + dataset = None + name = STATIC_DATASET + try: + dataset = automl_tables_dataset.get_dataset(PROJECT, REGION, name) + except exceptions.NotFound: + dataset = automl_tables_dataset.create_dataset(PROJECT, REGION, name) + + if dataset.example_count is None or dataset.example_count == 0: + automl_tables_dataset.import_data(PROJECT, REGION, name, GCS_DATASET) + dataset = automl_tables_dataset.get_dataset(PROJECT, REGION, name) + + automl_tables_dataset.update_dataset( + PROJECT, + REGION, + dataset.display_name, + target_column_spec_name="Deposit", + ) + + return dataset + + +@pytest.mark.slow +def test_dataset_create_import_delete(capsys): + name = _id("d_cr_dl") + dataset = automl_tables_dataset.create_dataset(PROJECT, REGION, name) + assert dataset is not None + assert dataset.display_name == name + + automl_tables_dataset.import_data(PROJECT, REGION, name, GCS_DATASET) + + out, _ = capsys.readouterr() + assert "Data imported." in out + + automl_tables_dataset.delete_dataset(PROJECT, REGION, name) + + with pytest.raises(exceptions.NotFound): + automl_tables_dataset.get_dataset(PROJECT, REGION, name) + + +def test_dataset_update(capsys): + dataset = ensure_dataset_ready() + automl_tables_dataset.update_dataset( + PROJECT, + REGION, + dataset.display_name, + target_column_spec_name="Deposit", + weight_column_spec_name="Balance", + ) + + out, _ = capsys.readouterr() + assert "Target column updated." in out + assert "Weight column updated." in out + + +def test_list_datasets(): + ensure_dataset_ready() + assert ( + next( + ( + d + for d in automl_tables_dataset.list_datasets(PROJECT, REGION) + if d.display_name == STATIC_DATASET + ), + None, + ) + is not None + ) diff --git a/samples/tables/endpoint_test.py b/samples/tables/endpoint_test.py new file mode 100644 index 00000000..5a20aba5 --- /dev/null +++ b/samples/tables/endpoint_test.py @@ -0,0 +1,26 @@ +#!/usr/bin/env python + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import os + +import automl_tables_set_endpoint + +PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] + + +def test_client_creation(capsys): + automl_tables_set_endpoint.create_client_with_endpoint(PROJECT) + out, _ = capsys.readouterr() + assert "GRPCIterator" in out diff --git a/samples/tables/model_test.py b/samples/tables/model_test.py new file mode 100644 index 00000000..484eaf82 --- /dev/null +++ b/samples/tables/model_test.py @@ -0,0 +1,94 @@ +#!/usr/bin/env python + +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import os +import random +import string +import time + +from google.api_core import exceptions + +import automl_tables_model +import dataset_test + + +PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] +REGION = "us-central1" +STATIC_MODEL = "do_not_delete_this_model_0" +GCS_DATASET = "gs://cloud-ml-tables-data/bank-marketing.csv" + +ID = "{rand}_{time}".format( + rand="".join( + [random.choice(string.ascii_letters + string.digits) for n in range(4)] + ), + time=int(time.time()), +) + + +def _id(name): + return "{}_{}".format(name, ID) + + +def test_list_models(): + ensure_model_ready() + assert ( + next( + ( + m + for m in automl_tables_model.list_models(PROJECT, REGION) + if m.display_name == STATIC_MODEL + ), + None, + ) + is not None + ) + + +def test_list_model_evaluations(): + model = ensure_model_ready() + mes = automl_tables_model.list_model_evaluations( + PROJECT, REGION, model.display_name + ) + assert len(mes) > 0 + for me in mes: + assert me.name.startswith(model.name) + + +def test_get_model_evaluations(): + model = ensure_model_ready() + me = automl_tables_model.list_model_evaluations( + PROJECT, REGION, model.display_name + )[0] + mep = automl_tables_model.get_model_evaluation( + PROJECT, + REGION, + model.name.rpartition("/")[2], + me.name.rpartition("/")[2], + ) + + assert mep.name == me.name + + +def ensure_model_ready(): + name = STATIC_MODEL + try: + return automl_tables_model.get_model(PROJECT, REGION, name) + except exceptions.NotFound: + pass + + dataset = dataset_test.ensure_dataset_ready() + return automl_tables_model.create_model( + PROJECT, REGION, dataset.display_name, name, 1000 + ) diff --git a/samples/tables/noxfile.py b/samples/tables/noxfile.py new file mode 100644 index 00000000..ba55d7ce --- /dev/null +++ b/samples/tables/noxfile.py @@ -0,0 +1,224 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function + +import os +from pathlib import Path +import sys + +import nox + + +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING +# DO NOT EDIT THIS FILE EVER! +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING + +# Copy `noxfile_config.py` to your directory and modify it instead. + + +# `TEST_CONFIG` dict is a configuration hook that allows users to +# modify the test configurations. The values here should be in sync +# with `noxfile_config.py`. Users will copy `noxfile_config.py` into +# their directory and modify it. + +TEST_CONFIG = { + # You can opt out from the test for specific Python versions. + 'ignored_versions': ["2.7"], + + # An envvar key for determining the project id to use. Change it + # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a + # build specific Cloud project. You can also use your own string + # to use your own Cloud project. + 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', + # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + + # A dictionary you want to inject into your test. Don't put any + # secrets here. These values will override predefined values. + 'envs': {}, +} + + +try: + # Ensure we can import noxfile_config in the project's directory. + sys.path.append('.') + from noxfile_config import TEST_CONFIG_OVERRIDE +except ImportError as e: + print("No user noxfile_config found: detail: {}".format(e)) + TEST_CONFIG_OVERRIDE = {} + +# Update the TEST_CONFIG with the user supplied values. +TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) + + +def get_pytest_env_vars(): + """Returns a dict for pytest invocation.""" + ret = {} + + # Override the GCLOUD_PROJECT and the alias. + env_key = TEST_CONFIG['gcloud_project_env'] + # This should error out if not set. + ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] + + # Apply user supplied envs. + ret.update(TEST_CONFIG['envs']) + return ret + + +# DO NOT EDIT - automatically generated. +# All versions used to tested samples. +ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"] + +# Any default versions that should be ignored. +IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] + +TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) + +INSTALL_LIBRARY_FROM_SOURCE = bool(os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False)) +# +# Style Checks +# + + +def _determine_local_import_names(start_dir): + """Determines all import names that should be considered "local". + + This is used when running the linter to insure that import order is + properly checked. + """ + file_ext_pairs = [os.path.splitext(path) for path in os.listdir(start_dir)] + return [ + basename + for basename, extension in file_ext_pairs + if extension == ".py" + or os.path.isdir(os.path.join(start_dir, basename)) + and basename not in ("__pycache__") + ] + + +# Linting with flake8. +# +# We ignore the following rules: +# E203: whitespace before ‘:’ +# E266: too many leading ‘#’ for block comment +# E501: line too long +# I202: Additional newline in a section of imports +# +# We also need to specify the rules which are ignored by default: +# ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121'] +FLAKE8_COMMON_ARGS = [ + "--show-source", + "--builtin=gettext", + "--max-complexity=20", + "--import-order-style=google", + "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", + "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", + "--max-line-length=88", +] + + +@nox.session +def lint(session): + session.install("flake8", "flake8-import-order") + + local_names = _determine_local_import_names(".") + args = FLAKE8_COMMON_ARGS + [ + "--application-import-names", + ",".join(local_names), + "." + ] + session.run("flake8", *args) + + +# +# Sample Tests +# + + +PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] + + +def _session_tests(session, post_install=None): + """Runs py.test for a particular project.""" + if os.path.exists("requirements.txt"): + session.install("-r", "requirements.txt") + + if os.path.exists("requirements-test.txt"): + session.install("-r", "requirements-test.txt") + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars() + ) + + +@nox.session(python=ALL_VERSIONS) +def py(session): + """Runs py.test for a sample using the specified version of Python.""" + if session.python in TESTED_VERSIONS: + _session_tests(session) + else: + session.skip("SKIPPED: {} tests are disabled for this sample.".format( + session.python + )) + + +# +# Readmegen +# + + +def _get_repo_root(): + """ Returns the root folder of the project. """ + # Get root of this repository. Assume we don't have directories nested deeper than 10 items. + p = Path(os.getcwd()) + for i in range(10): + if p is None: + break + if Path(p / ".git").exists(): + return str(p) + p = p.parent + raise Exception("Unable to detect repository root.") + + +GENERATED_READMES = sorted([x for x in Path(".").rglob("*.rst.in")]) + + +@nox.session +@nox.parametrize("path", GENERATED_READMES) +def readmegen(session, path): + """(Re-)generates the readme for a sample.""" + session.install("jinja2", "pyyaml") + dir_ = os.path.dirname(path) + + if os.path.exists(os.path.join(dir_, "requirements.txt")): + session.install("-r", os.path.join(dir_, "requirements.txt")) + + in_file = os.path.join(dir_, "README.rst.in") + session.run( + "python", _get_repo_root() + "/scripts/readme-gen/readme_gen.py", in_file + ) diff --git a/samples/tables/predict_test.py b/samples/tables/predict_test.py new file mode 100644 index 00000000..d608e182 --- /dev/null +++ b/samples/tables/predict_test.py @@ -0,0 +1,64 @@ +#!/usr/bin/env python + +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +from google.cloud.automl_v1beta1.gapic import enums + +import automl_tables_model +import automl_tables_predict +import model_test + + +PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] +REGION = "us-central1" +STATIC_MODEL = model_test.STATIC_MODEL + + +def test_predict(capsys): + inputs = { + "Age": 31, + "Balance": 200, + "Campaign": 2, + "Contact": "cellular", + "Day": "4", + "Default": "no", + "Duration": 12, + "Education": "primary", + "Housing": "yes", + "Job": "blue-collar", + "Loan": "no", + "MaritalStatus": "divorced", + "Month": "jul", + "PDays": 4, + "POutcome": "0", + "Previous": 12, + } + + ensure_model_online() + automl_tables_predict.predict(PROJECT, REGION, STATIC_MODEL, inputs, True) + out, _ = capsys.readouterr() + assert "Predicted class name:" in out + assert "Predicted class score:" in out + assert "Features of top importance:" in out + + +def ensure_model_online(): + model = model_test.ensure_model_ready() + if model.deployment_state != enums.Model.DeploymentState.DEPLOYED: + automl_tables_model.deploy_model(PROJECT, REGION, model.display_name) + + return automl_tables_model.get_model(PROJECT, REGION, model.display_name) diff --git a/samples/tables/requirements-test.txt b/samples/tables/requirements-test.txt new file mode 100644 index 00000000..7e460c8c --- /dev/null +++ b/samples/tables/requirements-test.txt @@ -0,0 +1 @@ +pytest==6.0.1 diff --git a/samples/tables/requirements.txt b/samples/tables/requirements.txt new file mode 100644 index 00000000..867dfc61 --- /dev/null +++ b/samples/tables/requirements.txt @@ -0,0 +1 @@ +google-cloud-automl==1.0.1 diff --git a/tests/unit/gapic/v1beta1/test_tables_client_v1beta1.py b/tests/unit/gapic/v1beta1/test_tables_client_v1beta1.py index 3566846d..61f39a98 100644 --- a/tests/unit/gapic/v1beta1/test_tables_client_v1beta1.py +++ b/tests/unit/gapic/v1beta1/test_tables_client_v1beta1.py @@ -1299,6 +1299,7 @@ def test_batch_predict_pandas_dataframe(self): "my_model", {"gcs_source": {"input_uris": ["gs://input"]}}, {"gcs_destination": {"output_uri_prefix": "gs://output"}}, + {}, ) def test_batch_predict_pandas_dataframe_init_gcs(self): @@ -1333,6 +1334,7 @@ def test_batch_predict_pandas_dataframe_init_gcs(self): "my_model", {"gcs_source": {"input_uris": ["gs://input"]}}, {"gcs_destination": {"output_uri_prefix": "gs://output"}}, + {}, ) def test_batch_predict_gcs(self): @@ -1346,6 +1348,7 @@ def test_batch_predict_gcs(self): "my_model", {"gcs_source": {"input_uris": ["gs://input"]}}, {"gcs_destination": {"output_uri_prefix": "gs://output"}}, + {}, ) def test_batch_predict_bigquery(self): @@ -1359,6 +1362,7 @@ def test_batch_predict_bigquery(self): "my_model", {"bigquery_source": {"input_uri": "bq://input"}}, {"bigquery_destination": {"output_uri": "bq://output"}}, + {}, ) def test_batch_predict_mixed(self): @@ -1372,6 +1376,7 @@ def test_batch_predict_mixed(self): "my_model", {"gcs_source": {"input_uris": ["gs://input"]}}, {"bigquery_destination": {"output_uri": "bq://output"}}, + {}, ) def test_batch_predict_missing_input_gcs_uri(self): From 9feb4cc5e04a01a4199da43400457cca6c0bfa05 Mon Sep 17 00:00:00 2001 From: Naofumi Yamada Date: Thu, 10 Sep 2020 05:02:04 +0900 Subject: [PATCH 45/64] fix: `update_column_spec` typo in TablesClient docstring (#18) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The docstring contains the non-existent function `update_column_specs`, and fixed it to` update_column_spec` which I think is more appropriate. The code cause AttributeError: 'TablesClient' object has no attribute 'update_column_specs'. Fixes #17 🦕 --- google/cloud/automl_v1beta1/tables/tables_client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/google/cloud/automl_v1beta1/tables/tables_client.py b/google/cloud/automl_v1beta1/tables/tables_client.py index f84961c5..f0a1678e 100644 --- a/google/cloud/automl_v1beta1/tables/tables_client.py +++ b/google/cloud/automl_v1beta1/tables/tables_client.py @@ -1205,7 +1205,7 @@ def update_column_spec( ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json'), ... project='my-project', region='us-central1') ... - >>> client.update_column_specs(dataset_display_name='my_dataset', + >>> client.update_column_spec(dataset_display_name='my_dataset', ... column_spec_display_name='Outcome', type_code='CATEGORY') ... From 2c98635bfa3e7db88963590120e54744ef9320c5 Mon Sep 17 00:00:00 2001 From: Torry Yang Date: Fri, 20 Jul 2018 16:24:34 -0700 Subject: [PATCH 46/64] automl beta [(#1575)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1575) * automl initial commit * lint * fix import groupings * add requirements.txt * address review comments --- .../snippets/automl_translation_dataset.py | 278 ++++++++++++++++ samples/snippets/automl_translation_model.py | 300 ++++++++++++++++++ .../snippets/automl_translation_predict.py | 109 +++++++ samples/snippets/dataset_test.py | 69 ++++ samples/snippets/model_test.py | 78 +++++ samples/snippets/predict_test.py | 31 ++ 6 files changed, 865 insertions(+) create mode 100755 samples/snippets/automl_translation_dataset.py create mode 100755 samples/snippets/automl_translation_model.py create mode 100644 samples/snippets/automl_translation_predict.py create mode 100644 samples/snippets/dataset_test.py create mode 100644 samples/snippets/model_test.py create mode 100644 samples/snippets/predict_test.py diff --git a/samples/snippets/automl_translation_dataset.py b/samples/snippets/automl_translation_dataset.py new file mode 100755 index 00000000..e579ac35 --- /dev/null +++ b/samples/snippets/automl_translation_dataset.py @@ -0,0 +1,278 @@ +#!/usr/bin/env python + +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This application demonstrates how to perform basic operations on dataset +with the Google AutoML Translation API. + +For more information, see the documentation at +https://cloud.google.com/translate/automl/docs +""" + +import argparse +import os + + +def create_dataset(project_id, compute_region, dataset_name, source, target): + """Create a dataset.""" + # [START automl_translation_create_dataset] + # TODO(developer): Uncomment and set the following variables + # project_id = 'PROJECT_ID_HERE' + # compute_region = 'COMPUTE_REGION_HERE' + # dataset_name = 'DATASET_NAME_HERE' + # source = 'LANGUAGE_CODE_OF_SOURCE_LANGUAGE' + # target = 'LANGUAGE_CODE_OF_TARGET_LANGUAGE' + + from google.cloud import automl_v1beta1 as automl + + client = automl.AutoMlClient() + + # A resource that represents Google Cloud Platform location. + project_location = client.location_path(project_id, compute_region) + + # Specify the source and target language. + dataset_metadata = { + "source_language_code": source, + "target_language_code": target, + } + # Set dataset name and dataset metadata + my_dataset = { + "display_name": dataset_name, + "translation_dataset_metadata": dataset_metadata, + } + + # Create a dataset with the dataset metadata in the region. + dataset = client.create_dataset(project_location, my_dataset) + + # Display the dataset information + print("Dataset name: {}".format(dataset.name)) + print("Dataset id: {}".format(dataset.name.split("/")[-1])) + print("Dataset display name: {}".format(dataset.display_name)) + print("Translation dataset Metadata:") + print( + "\tsource_language_code: {}".format( + dataset.translation_dataset_metadata.source_language_code + ) + ) + print( + "\ttarget_language_code: {}".format( + dataset.translation_dataset_metadata.target_language_code + ) + ) + print("Dataset create time:") + print("\tseconds: {}".format(dataset.create_time.seconds)) + print("\tnanos: {}".format(dataset.create_time.nanos)) + + # [END automl_translation_create_dataset] + + +def list_datasets(project_id, compute_region, filter_): + """List Datasets.""" + # [START automl_translation_list_datasets] + # TODO(developer): Uncomment and set the following variables + # project_id = 'PROJECT_ID_HERE' + # compute_region = 'COMPUTE_REGION_HERE' + # filter_ = 'filter expression here' + + from google.cloud import automl_v1beta1 as automl + + client = automl.AutoMlClient() + + # A resource that represents Google Cloud Platform location. + project_location = client.location_path(project_id, compute_region) + + # List all the datasets available in the region by applying filter. + response = client.list_datasets(project_location, filter_) + + print("List of datasets:") + for dataset in response: + # Display the dataset information + print("Dataset name: {}".format(dataset.name)) + print("Dataset id: {}".format(dataset.name.split("/")[-1])) + print("Dataset display name: {}".format(dataset.display_name)) + print("Translation dataset metadata:") + print( + "\tsource_language_code: {}".format( + dataset.translation_dataset_metadata.source_language_code + ) + ) + print( + "\ttarget_language_code: {}".format( + dataset.translation_dataset_metadata.target_language_code + ) + ) + print("Dataset create time:") + print("\tseconds: {}".format(dataset.create_time.seconds)) + print("\tnanos: {}".format(dataset.create_time.nanos)) + + # [END automl_translation_list_datasets] + + +def get_dataset(project_id, compute_region, dataset_id): + """Get the dataset.""" + # [START automl_translation_get_dataset] + # TODO(developer): Uncomment and set the following variables + # project_id = 'PROJECT_ID_HERE' + # compute_region = 'COMPUTE_REGION_HERE' + # dataset_id = 'DATASET_ID_HERE' + + from google.cloud import automl_v1beta1 as automl + + client = automl.AutoMlClient() + + # Get the full path of the dataset + dataset_full_id = client.dataset_path( + project_id, compute_region, dataset_id + ) + + # Get complete detail of the dataset. + dataset = client.get_dataset(dataset_full_id) + + # Display the dataset information + print("Dataset name: {}".format(dataset.name)) + print("Dataset id: {}".format(dataset.name.split("/")[-1])) + print("Dataset display name: {}".format(dataset.display_name)) + print("Translation dataset metadata:") + print( + "\tsource_language_code: {}".format( + dataset.translation_dataset_metadata.source_language_code + ) + ) + print( + "\ttarget_language_code: {}".format( + dataset.translation_dataset_metadata.target_language_code + ) + ) + print("Dataset create time:") + print("\tseconds: {}".format(dataset.create_time.seconds)) + print("\tnanos: {}".format(dataset.create_time.nanos)) + + # [END automl_translation_get_dataset] + + +def import_data(project_id, compute_region, dataset_id, path): + """Import sentence pairs to the dataset.""" + # [START automl_translation_import_data] + # TODO(developer): Uncomment and set the following variables + # project_id = 'PROJECT_ID_HERE' + # compute_region = 'COMPUTE_REGION_HERE' + # dataset_id = 'DATASET_ID_HERE' + # path = 'gs://path/to/file.csv' + + from google.cloud import automl_v1beta1 as automl + + client = automl.AutoMlClient() + + # Get the full path of the dataset. + dataset_full_id = client.dataset_path( + project_id, compute_region, dataset_id + ) + + # Get the multiple Google Cloud Storage URIs + input_uris = path.split(",") + input_config = {"gcs_source": {"input_uris": input_uris}} + + # Import data from the input URI + response = client.import_data(dataset_full_id, input_config) + + print("Processing import...") + # synchronous check of operation status + print("Data imported. {}".format(response.result())) + + # [END automl_translation_import_data] + + +def delete_dataset(project_id, compute_region, dataset_id): + """Delete a dataset.""" + # [START automl_translation_delete_dataset]] + # TODO(developer): Uncomment and set the following variables + # project_id = 'PROJECT_ID_HERE' + # compute_region = 'COMPUTE_REGION_HERE' + # dataset_id = 'DATASET_ID_HERE' + + from google.cloud import automl_v1beta1 as automl + + client = automl.AutoMlClient() + + # Get the full path of the dataset. + dataset_full_id = client.dataset_path( + project_id, compute_region, dataset_id + ) + + # Delete a dataset. + response = client.delete_dataset(dataset_full_id) + + # synchronous check of operation status + print("Dataset deleted. {}".format(response.result())) + + # [END automl_translation_delete_dataset] + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter, + ) + subparsers = parser.add_subparsers(dest="command") + + create_dataset_parser = subparsers.add_parser( + "create_dataset", help=create_dataset.__doc__ + ) + create_dataset_parser.add_argument("dataset_name") + create_dataset_parser.add_argument("source") + create_dataset_parser.add_argument("target") + + list_datasets_parser = subparsers.add_parser( + "list_datasets", help=list_datasets.__doc__ + ) + list_datasets_parser.add_argument("filter", nargs="?", default="") + + import_data_parser = subparsers.add_parser( + "import_data", help=import_data.__doc__ + ) + import_data_parser.add_argument("dataset_id") + import_data_parser.add_argument("path") + + delete_dataset_parser = subparsers.add_parser( + "delete_dataset", help=delete_dataset.__doc__ + ) + delete_dataset_parser.add_argument("dataset_id") + + get_dataset_parser = subparsers.add_parser( + "get_dataset", help=get_dataset.__doc__ + ) + get_dataset_parser.add_argument("dataset_id") + + project_id = os.environ["PROJECT_ID"] + compute_region = os.environ["REGION_NAME"] + + args = parser.parse_args() + + if args.command == "create_dataset": + create_dataset( + project_id, + compute_region, + args.dataset_name, + args.source, + args.target, + ) + if args.command == "list_datasets": + list_datasets(project_id, compute_region, args.filter) + if args.command == "get_dataset": + get_dataset(project_id, compute_region, args.dataset_id) + if args.command == "import_data": + import_data(project_id, compute_region, args.dataset_id, args.path) + if args.command == "delete_dataset": + delete_dataset(project_id, compute_region, args.dataset_id) diff --git a/samples/snippets/automl_translation_model.py b/samples/snippets/automl_translation_model.py new file mode 100755 index 00000000..0b9b6f53 --- /dev/null +++ b/samples/snippets/automl_translation_model.py @@ -0,0 +1,300 @@ +#!/usr/bin/env python + +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This application demonstrates how to perform basic operations on model +with the Google AutoML Translation API. + +For more information, see the documentation at +https://cloud.google.com/translate/automl/docs +""" + +import argparse +import os + + +def create_model(project_id, compute_region, dataset_id, model_name): + """Create a model.""" + # [START automl_translation_create_model] + # TODO(developer): Uncomment and set the following variables + # project_id = 'PROJECT_ID_HERE' + # compute_region = 'COMPUTE_REGION_HERE' + # dataset_id = 'DATASET_ID_HERE' + # model_name = 'MODEL_NAME_HERE' + + from google.cloud import automl_v1beta1 as automl + + client = automl.AutoMlClient() + + # A resource that represents Google Cloud Platform location. + project_location = client.location_path(project_id, compute_region) + + # Set model name and dataset. + my_model = { + "display_name": model_name, + "dataset_id": dataset_id, + "translation_model_metadata": {"base_model": ""}, + } + + # Create a model with the model metadata in the region. + response = client.create_model(project_location, my_model) + + print("Training operation name: {}".format(response.operation.name)) + print("Training started...") + + # [END automl_translation_create_model] + + +def list_models(project_id, compute_region, filter_): + """List all models.""" + # [START automl_translation_list_models] + # TODO(developer): Uncomment and set the following variables + # project_id = 'PROJECT_ID_HERE' + # compute_region = 'COMPUTE_REGION_HERE' + # filter_ = 'DATASET_ID_HERE' + + from google.cloud import automl_v1beta1 as automl + from google.cloud.automl_v1beta1 import enums + + client = automl.AutoMlClient() + + # A resource that represents Google Cloud Platform location. + project_location = client.location_path(project_id, compute_region) + + # List all the models available in the region by applying filter. + response = client.list_models(project_location, filter_) + + print("List of models:") + for model in response: + # Display the model information. + if model.deployment_state == enums.Model.DeploymentState.DEPLOYED: + deployment_state = "deployed" + else: + deployment_state = "undeployed" + + print("Model name: {}".format(model.name)) + print("Model id: {}".format(model.name.split("/")[-1])) + print("Model display name: {}".format(model.display_name)) + print("Model create time:") + print("\tseconds: {}".format(model.create_time.seconds)) + print("\tnanos: {}".format(model.create_time.nanos)) + print("Model deployment state: {}".format(deployment_state)) + + # [END automl_translation_list_models] + + +def get_model(project_id, compute_region, model_id): + """Get model details.""" + # [START automl_translation_get_model] + # TODO(developer): Uncomment and set the following variables + # project_id = 'PROJECT_ID_HERE' + # compute_region = 'COMPUTE_REGION_HERE' + # model_id = 'MODEL_ID_HERE' + + from google.cloud import automl_v1beta1 as automl + from google.cloud.automl_v1beta1 import enums + + client = automl.AutoMlClient() + + # Get the full path of the model. + model_full_id = client.model_path(project_id, compute_region, model_id) + + # Get complete detail of the model. + model = client.get_model(model_full_id) + + # Retrieve deployment state. + if model.deployment_state == enums.Model.DeploymentState.DEPLOYED: + deployment_state = "deployed" + else: + deployment_state = "undeployed" + + # Display the model information. + print("Model name: {}".format(model.name)) + print("Model id: {}".format(model.name.split("/")[-1])) + print("Model display name: {}".format(model.display_name)) + print("Model create time:") + print("\tseconds: {}".format(model.create_time.seconds)) + print("\tnanos: {}".format(model.create_time.nanos)) + print("Model deployment state: {}".format(deployment_state)) + + # [END automl_translation_get_model] + + +def list_model_evaluations(project_id, compute_region, model_id, filter_): + """List model evaluations.""" + # [START automl_translation_list_model_evaluations] + # TODO(developer): Uncomment and set the following variables + # project_id = 'PROJECT_ID_HERE' + # compute_region = 'COMPUTE_REGION_HERE' + # model_id = 'MODEL_ID_HERE' + # filter_ = 'filter expression here' + + from google.cloud import automl_v1beta1 as automl + + client = automl.AutoMlClient() + + # Get the full path of the model. + model_full_id = client.model_path(project_id, compute_region, model_id) + + print("List of model evaluations:") + for element in client.list_model_evaluations(model_full_id, filter_): + print(element) + + # [END automl_translation_list_model_evaluations] + + +def get_model_evaluation( + project_id, compute_region, model_id, model_evaluation_id +): + """Get model evaluation.""" + # [START automl_translation_get_model_evaluation] + # TODO(developer): Uncomment and set the following variables + # project_id = 'PROJECT_ID_HERE' + # compute_region = 'COMPUTE_REGION_HERE' + # model_id = 'MODEL_ID_HERE' + # model_evaluation_id = 'MODEL_EVALUATION_ID_HERE' + + from google.cloud import automl_v1beta1 as automl + + client = automl.AutoMlClient() + + # Get the full path of the model evaluation. + model_evaluation_full_id = client.model_evaluation_path( + project_id, compute_region, model_id, model_evaluation_id + ) + + # Get complete detail of the model evaluation. + response = client.get_model_evaluation(model_evaluation_full_id) + + print(response) + + # [END automl_translation_get_model_evaluation] + + +def delete_model(project_id, compute_region, model_id): + """Delete a model.""" + # [START automl_translation_delete_model] + # TODO(developer): Uncomment and set the following variables + # project_id = 'PROJECT_ID_HERE' + # compute_region = 'COMPUTE_REGION_HERE' + # model_id = 'MODEL_ID_HERE' + + from google.cloud import automl_v1beta1 as automl + + client = automl.AutoMlClient() + + # Get the full path of the model. + model_full_id = client.model_path(project_id, compute_region, model_id) + + # Delete a model. + response = client.delete_model(model_full_id) + + # synchronous check of operation status. + print("Model deleted. {}".format(response.result())) + + # [END automl_translation_delete_model] + + +def get_operation_status(operation_full_id): + """Get operation status.""" + # [START automl_translation_get_operation_status] + # TODO(developer): Uncomment and set the following variables + # operation_full_id = + # 'projects//locations//operations/' + + from google.cloud import automl_v1beta1 as automl + + client = automl.AutoMlClient() + + # Get the latest state of a long-running operation. + response = client.transport._operations_client.get_operation( + operation_full_id + ) + + print("Operation status: {}".format(response)) + + # [END automl_translation_get_operation_status] + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter, + ) + subparsers = parser.add_subparsers(dest="command") + + create_model_parser = subparsers.add_parser( + "create_model", help=create_model.__doc__ + ) + create_model_parser.add_argument("dataset_id") + create_model_parser.add_argument("model_name") + + list_model_evaluations_parser = subparsers.add_parser( + "list_model_evaluations", help=list_model_evaluations.__doc__ + ) + list_model_evaluations_parser.add_argument("model_id") + list_model_evaluations_parser.add_argument("filter", nargs="?", default="") + + get_model_evaluation_parser = subparsers.add_parser( + "get_model_evaluation", help=get_model_evaluation.__doc__ + ) + get_model_evaluation_parser.add_argument("model_id") + get_model_evaluation_parser.add_argument("model_evaluation_id") + + get_model_parser = subparsers.add_parser( + "get_model", help=get_model.__doc__ + ) + get_model_parser.add_argument("model_id") + + get_operation_status_parser = subparsers.add_parser( + "get_operation_status", help=get_operation_status.__doc__ + ) + get_operation_status_parser.add_argument("operation_full_id") + + list_models_parser = subparsers.add_parser( + "list_models", help=list_models.__doc__ + ) + list_models_parser.add_argument("filter", nargs="?", default="") + + delete_model_parser = subparsers.add_parser( + "delete_model", help=delete_model.__doc__ + ) + delete_model_parser.add_argument("model_id") + + project_id = os.environ["PROJECT_ID"] + compute_region = os.environ["REGION_NAME"] + + args = parser.parse_args() + + if args.command == "create_model": + create_model( + project_id, compute_region, args.dataset_id, args.model_name + ) + if args.command == "list_models": + list_models(project_id, compute_region, args.filter) + if args.command == "get_model": + get_model(project_id, compute_region, args.model_id) + if args.command == "list_model_evaluations": + list_model_evaluations( + project_id, compute_region, args.model_id, args.filter + ) + if args.command == "get_model_evaluation": + get_model_evaluation( + project_id, compute_region, args.model_id, args.model_evaluation_id + ) + if args.command == "delete_model": + delete_model(project_id, compute_region, args.model_id) + if args.command == "get_operation_status": + get_operation_status(args.operation_full_id) diff --git a/samples/snippets/automl_translation_predict.py b/samples/snippets/automl_translation_predict.py new file mode 100644 index 00000000..1dac70b7 --- /dev/null +++ b/samples/snippets/automl_translation_predict.py @@ -0,0 +1,109 @@ +#!/usr/bin/env python + +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This application demonstrates how to perform basic operations on prediction +with the Google AutoML Translation API. + +For more information, see the documentation at +https://cloud.google.com/translate/automl/docs +""" + +import argparse +import os + + +def predict( + project_id, + compute_region, + model_id, + file_path, + translation_allow_fallback=False, +): + """Translate the content.""" + # [START automl_translation_predict] + # project_id = 'PROJECT_ID_HERE' + # compute_region = 'COMPUTE_REGION_HERE' + # model_id = 'MODEL_ID_HERE' + # file_path = '/local/path/to/file' + # translation_allow_fallback = True allows fallback to Google Translate + + from google.cloud import automl_v1beta1 as automl + + automl_client = automl.AutoMlClient() + + # Create client for prediction service. + prediction_client = automl.PredictionServiceClient() + + # Get the full path of the model. + model_full_id = automl_client.model_path( + project_id, compute_region, model_id + ) + + # Read the file content for translation. + with open(file_path, "rb") as content_file: + content = content_file.read() + content.decode("utf-8") + + # Set the payload by giving the content of the file. + payload = {"text_snippet": {"content": content}} + + # params is additional domain-specific parameters. + # translation_allow_fallback allows to use Google translation model. + params = {} + if translation_allow_fallback: + params = {"translation_allow_fallback": "True"} + + response = prediction_client.predict(model_full_id, payload, params) + translated_content = response.payload[0].translation.translated_content + + print(u"Translated content: {}".format(translated_content.content)) + + # [END automl_translation_predict] + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter, + ) + subparsers = parser.add_subparsers(dest="command") + + predict_parser = subparsers.add_parser("predict", help=predict.__doc__) + predict_parser.add_argument("model_id") + predict_parser.add_argument("file_path") + predict_parser.add_argument( + "translation_allow_fallback", + nargs="?", + choices=["False", "True"], + default="False", + ) + + project_id = os.environ["PROJECT_ID"] + compute_region = os.environ["REGION_NAME"] + + args = parser.parse_args() + + if args.command == "predict": + translation_allow_fallback = ( + True if args.translation_allow_fallback == "True" else False + ) + predict( + project_id, + compute_region, + args.model_id, + args.file_path, + translation_allow_fallback, + ) diff --git a/samples/snippets/dataset_test.py b/samples/snippets/dataset_test.py new file mode 100644 index 00000000..29e3e5c9 --- /dev/null +++ b/samples/snippets/dataset_test.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python + +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import datetime +import os + +import pytest + +import automl_translation_dataset + +project_id = os.environ["GCLOUD_PROJECT"] +compute_region = "us-central1" + + +@pytest.mark.slow +def test_dataset_create_import_delete(capsys): + # create dataset + dataset_name = "test_" + datetime.datetime.now().strftime("%Y%m%d%H%M%S") + automl_translation_dataset.create_dataset( + project_id, compute_region, dataset_name, "en", "ja" + ) + out, _ = capsys.readouterr() + create_dataset_output = out.splitlines() + assert "Dataset id: " in create_dataset_output[1] + + # import data + dataset_id = create_dataset_output[1].split()[2] + data = "gs://{}-vcm/en-ja.csv".format(project_id) + automl_translation_dataset.import_data( + project_id, compute_region, dataset_id, data + ) + out, _ = capsys.readouterr() + assert "Data imported." in out + + # delete dataset + automl_translation_dataset.delete_dataset( + project_id, compute_region, dataset_id + ) + out, _ = capsys.readouterr() + assert "Dataset deleted." in out + + +def test_dataset_list_get(capsys): + # list datasets + automl_translation_dataset.list_datasets(project_id, compute_region, "") + out, _ = capsys.readouterr() + list_dataset_output = out.splitlines() + assert "Dataset id: " in list_dataset_output[2] + + # get dataset + dataset_id = list_dataset_output[2].split()[2] + automl_translation_dataset.get_dataset( + project_id, compute_region, dataset_id + ) + out, _ = capsys.readouterr() + assert "Dataset name: " in out diff --git a/samples/snippets/model_test.py b/samples/snippets/model_test.py new file mode 100644 index 00000000..7f915c5d --- /dev/null +++ b/samples/snippets/model_test.py @@ -0,0 +1,78 @@ +#!/usr/bin/env python + +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import datetime +import os + +from google.cloud import automl_v1beta1 as automl + +import automl_translation_model + +project_id = os.environ["GCLOUD_PROJECT"] +compute_region = "us-central1" + + +def test_model_create_status_delete(capsys): + # create model + client = automl.AutoMlClient() + model_name = "test_" + datetime.datetime.now().strftime("%Y%m%d%H%M%S") + project_location = client.location_path(project_id, compute_region) + my_model = { + "display_name": model_name, + "dataset_id": "3876092572857648864", + "translation_model_metadata": {"base_model": ""}, + } + response = client.create_model(project_location, my_model) + operation_name = response.operation.name + assert operation_name + + # get operation status + automl_translation_model.get_operation_status(operation_name) + out, _ = capsys.readouterr() + assert "Operation status: " in out + + # cancel operation + response.cancel() + + +def test_model_list_get_evaluate(capsys): + # list models + automl_translation_model.list_models(project_id, compute_region, "") + out, _ = capsys.readouterr() + list_models_output = out.splitlines() + assert "Model id: " in list_models_output[2] + + # get model + model_id = list_models_output[2].split()[2] + automl_translation_model.get_model(project_id, compute_region, model_id) + out, _ = capsys.readouterr() + assert "Model name: " in out + + # list model evaluations + automl_translation_model.list_model_evaluations( + project_id, compute_region, model_id, "" + ) + out, _ = capsys.readouterr() + list_evals_output = out.splitlines() + assert "name: " in list_evals_output[1] + + # get model evaluation + model_evaluation_id = list_evals_output[1].split("/")[-1][:-1] + automl_translation_model.get_model_evaluation( + project_id, compute_region, model_id, model_evaluation_id + ) + out, _ = capsys.readouterr() + assert "evaluation_metric" in out diff --git a/samples/snippets/predict_test.py b/samples/snippets/predict_test.py new file mode 100644 index 00000000..87aea8fa --- /dev/null +++ b/samples/snippets/predict_test.py @@ -0,0 +1,31 @@ +#!/usr/bin/env python + +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +import automl_translation_predict + +project_id = os.environ["GCLOUD_PROJECT"] +compute_region = "us-central1" + + +def test_predict(capsys): + model_id = "3128559826197068699" + automl_translation_predict.predict( + project_id, compute_region, model_id, "resources/input.txt", False + ) + out, _ = capsys.readouterr() + assert "Translated content: " in out From 31f06c5c72e4874d32099b76c9d03b06acc38a3c Mon Sep 17 00:00:00 2001 From: Torry Yang Date: Tue, 24 Jul 2018 09:19:56 -0700 Subject: [PATCH 47/64] remove translate prediction fallback [(#1598)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1598) --- .../snippets/automl_translation_predict.py | 29 ++----------------- samples/snippets/predict_test.py | 2 +- 2 files changed, 3 insertions(+), 28 deletions(-) diff --git a/samples/snippets/automl_translation_predict.py b/samples/snippets/automl_translation_predict.py index 1dac70b7..653cf388 100644 --- a/samples/snippets/automl_translation_predict.py +++ b/samples/snippets/automl_translation_predict.py @@ -25,20 +25,13 @@ import os -def predict( - project_id, - compute_region, - model_id, - file_path, - translation_allow_fallback=False, -): +def predict(project_id, compute_region, model_id, file_path): """Translate the content.""" # [START automl_translation_predict] # project_id = 'PROJECT_ID_HERE' # compute_region = 'COMPUTE_REGION_HERE' # model_id = 'MODEL_ID_HERE' # file_path = '/local/path/to/file' - # translation_allow_fallback = True allows fallback to Google Translate from google.cloud import automl_v1beta1 as automl @@ -61,10 +54,7 @@ def predict( payload = {"text_snippet": {"content": content}} # params is additional domain-specific parameters. - # translation_allow_fallback allows to use Google translation model. params = {} - if translation_allow_fallback: - params = {"translation_allow_fallback": "True"} response = prediction_client.predict(model_full_id, payload, params) translated_content = response.payload[0].translation.translated_content @@ -84,12 +74,6 @@ def predict( predict_parser = subparsers.add_parser("predict", help=predict.__doc__) predict_parser.add_argument("model_id") predict_parser.add_argument("file_path") - predict_parser.add_argument( - "translation_allow_fallback", - nargs="?", - choices=["False", "True"], - default="False", - ) project_id = os.environ["PROJECT_ID"] compute_region = os.environ["REGION_NAME"] @@ -97,13 +81,4 @@ def predict( args = parser.parse_args() if args.command == "predict": - translation_allow_fallback = ( - True if args.translation_allow_fallback == "True" else False - ) - predict( - project_id, - compute_region, - args.model_id, - args.file_path, - translation_allow_fallback, - ) + predict(project_id, compute_region, args.model_id, args.file_path) diff --git a/samples/snippets/predict_test.py b/samples/snippets/predict_test.py index 87aea8fa..c9fb7e04 100644 --- a/samples/snippets/predict_test.py +++ b/samples/snippets/predict_test.py @@ -25,7 +25,7 @@ def test_predict(capsys): model_id = "3128559826197068699" automl_translation_predict.predict( - project_id, compute_region, model_id, "resources/input.txt", False + project_id, compute_region, model_id, "resources/input.txt" ) out, _ = capsys.readouterr() assert "Translated content: " in out From 5e7986c2e6898eb8e791bda6c3a2ec821f7f623b Mon Sep 17 00:00:00 2001 From: Torry Yang Date: Thu, 2 Aug 2018 17:40:16 -0700 Subject: [PATCH 48/64] skip automl model create/delete test [(#1608)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1608) * skip model create/delete test * add skip reason --- samples/snippets/model_test.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/samples/snippets/model_test.py b/samples/snippets/model_test.py index 7f915c5d..0d37a85c 100644 --- a/samples/snippets/model_test.py +++ b/samples/snippets/model_test.py @@ -18,6 +18,7 @@ import os from google.cloud import automl_v1beta1 as automl +import pytest import automl_translation_model @@ -25,6 +26,7 @@ compute_region = "us-central1" +@pytest.mark.skip(reason="creates too many models") def test_model_create_status_delete(capsys): # create model client = automl.AutoMlClient() From f27bfb82583c0a02fb709924ff771d967fdd79dd Mon Sep 17 00:00:00 2001 From: DPE bot Date: Tue, 28 Aug 2018 11:17:45 -0700 Subject: [PATCH 49/64] Auto-update dependencies. [(#1658)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1658) * Auto-update dependencies. * Rollback appengine/standard/bigquery/. * Rollback appengine/standard/iap/. * Rollback bigtable/metricscaler. * Rolledback appengine/flexible/datastore. * Rollback dataproc/ * Rollback jobs/api_client * Rollback vision/cloud-client. * Rollback functions/ocr/app. * Rollback iot/api-client/end_to_end_example. * Rollback storage/cloud-client. * Rollback kms/api-client. * Rollback dlp/ * Rollback bigquery/cloud-client. * Rollback iot/api-client/manager. * Rollback appengine/flexible/cloudsql_postgresql. --- samples/snippets/requirements.txt | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index f6a42ea2..8d8575ea 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -1,2 +1,6 @@ +<<<<<<< HEAD google-cloud-automl==1.0.1 google-cloud-storage==1.29.0 +======= +google-cloud-automl==0.1.1 +>>>>>>> d89df5c (Auto-update dependencies. [(#1658)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1658)) From 1b054f4892f9ad8ce75d15676b2157710a895994 Mon Sep 17 00:00:00 2001 From: Alix Hamilton Date: Wed, 29 Aug 2018 12:37:06 -0700 Subject: [PATCH 50/64] Update AutoML region tags to use standard product prefixes [(#1669)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1669) --- .../snippets/automl_translation_dataset.py | 20 ++++++------- samples/snippets/automl_translation_model.py | 28 +++++++++---------- .../snippets/automl_translation_predict.py | 4 +-- 3 files changed, 26 insertions(+), 26 deletions(-) diff --git a/samples/snippets/automl_translation_dataset.py b/samples/snippets/automl_translation_dataset.py index e579ac35..c60ef544 100755 --- a/samples/snippets/automl_translation_dataset.py +++ b/samples/snippets/automl_translation_dataset.py @@ -27,7 +27,7 @@ def create_dataset(project_id, compute_region, dataset_name, source, target): """Create a dataset.""" - # [START automl_translation_create_dataset] + # [START automl_translate_create_dataset] # TODO(developer): Uncomment and set the following variables # project_id = 'PROJECT_ID_HERE' # compute_region = 'COMPUTE_REGION_HERE' @@ -75,12 +75,12 @@ def create_dataset(project_id, compute_region, dataset_name, source, target): print("\tseconds: {}".format(dataset.create_time.seconds)) print("\tnanos: {}".format(dataset.create_time.nanos)) - # [END automl_translation_create_dataset] + # [END automl_translate_create_dataset] def list_datasets(project_id, compute_region, filter_): """List Datasets.""" - # [START automl_translation_list_datasets] + # [START automl_translate_list_datasets] # TODO(developer): Uncomment and set the following variables # project_id = 'PROJECT_ID_HERE' # compute_region = 'COMPUTE_REGION_HERE' @@ -117,12 +117,12 @@ def list_datasets(project_id, compute_region, filter_): print("\tseconds: {}".format(dataset.create_time.seconds)) print("\tnanos: {}".format(dataset.create_time.nanos)) - # [END automl_translation_list_datasets] + # [END automl_translate_list_datasets] def get_dataset(project_id, compute_region, dataset_id): """Get the dataset.""" - # [START automl_translation_get_dataset] + # [START automl_translate_get_dataset] # TODO(developer): Uncomment and set the following variables # project_id = 'PROJECT_ID_HERE' # compute_region = 'COMPUTE_REGION_HERE' @@ -159,12 +159,12 @@ def get_dataset(project_id, compute_region, dataset_id): print("\tseconds: {}".format(dataset.create_time.seconds)) print("\tnanos: {}".format(dataset.create_time.nanos)) - # [END automl_translation_get_dataset] + # [END automl_translate_get_dataset] def import_data(project_id, compute_region, dataset_id, path): """Import sentence pairs to the dataset.""" - # [START automl_translation_import_data] + # [START automl_translate_import_data] # TODO(developer): Uncomment and set the following variables # project_id = 'PROJECT_ID_HERE' # compute_region = 'COMPUTE_REGION_HERE' @@ -191,12 +191,12 @@ def import_data(project_id, compute_region, dataset_id, path): # synchronous check of operation status print("Data imported. {}".format(response.result())) - # [END automl_translation_import_data] + # [END automl_translate_import_data] def delete_dataset(project_id, compute_region, dataset_id): """Delete a dataset.""" - # [START automl_translation_delete_dataset]] + # [START automl_translate_delete_dataset]] # TODO(developer): Uncomment and set the following variables # project_id = 'PROJECT_ID_HERE' # compute_region = 'COMPUTE_REGION_HERE' @@ -217,7 +217,7 @@ def delete_dataset(project_id, compute_region, dataset_id): # synchronous check of operation status print("Dataset deleted. {}".format(response.result())) - # [END automl_translation_delete_dataset] + # [END automl_translate_delete_dataset] if __name__ == "__main__": diff --git a/samples/snippets/automl_translation_model.py b/samples/snippets/automl_translation_model.py index 0b9b6f53..77a4ed73 100755 --- a/samples/snippets/automl_translation_model.py +++ b/samples/snippets/automl_translation_model.py @@ -27,7 +27,7 @@ def create_model(project_id, compute_region, dataset_id, model_name): """Create a model.""" - # [START automl_translation_create_model] + # [START automl_translate_create_model] # TODO(developer): Uncomment and set the following variables # project_id = 'PROJECT_ID_HERE' # compute_region = 'COMPUTE_REGION_HERE' @@ -54,12 +54,12 @@ def create_model(project_id, compute_region, dataset_id, model_name): print("Training operation name: {}".format(response.operation.name)) print("Training started...") - # [END automl_translation_create_model] + # [END automl_translate_create_model] def list_models(project_id, compute_region, filter_): """List all models.""" - # [START automl_translation_list_models] + # [START automl_translate_list_models] # TODO(developer): Uncomment and set the following variables # project_id = 'PROJECT_ID_HERE' # compute_region = 'COMPUTE_REGION_HERE' @@ -92,12 +92,12 @@ def list_models(project_id, compute_region, filter_): print("\tnanos: {}".format(model.create_time.nanos)) print("Model deployment state: {}".format(deployment_state)) - # [END automl_translation_list_models] + # [END automl_translate_list_models] def get_model(project_id, compute_region, model_id): """Get model details.""" - # [START automl_translation_get_model] + # [START automl_translate_get_model] # TODO(developer): Uncomment and set the following variables # project_id = 'PROJECT_ID_HERE' # compute_region = 'COMPUTE_REGION_HERE' @@ -129,12 +129,12 @@ def get_model(project_id, compute_region, model_id): print("\tnanos: {}".format(model.create_time.nanos)) print("Model deployment state: {}".format(deployment_state)) - # [END automl_translation_get_model] + # [END automl_translate_get_model] def list_model_evaluations(project_id, compute_region, model_id, filter_): """List model evaluations.""" - # [START automl_translation_list_model_evaluations] + # [START automl_translate_list_model_evaluations] # TODO(developer): Uncomment and set the following variables # project_id = 'PROJECT_ID_HERE' # compute_region = 'COMPUTE_REGION_HERE' @@ -152,14 +152,14 @@ def list_model_evaluations(project_id, compute_region, model_id, filter_): for element in client.list_model_evaluations(model_full_id, filter_): print(element) - # [END automl_translation_list_model_evaluations] + # [END automl_translate_list_model_evaluations] def get_model_evaluation( project_id, compute_region, model_id, model_evaluation_id ): """Get model evaluation.""" - # [START automl_translation_get_model_evaluation] + # [START automl_translate_get_model_evaluation] # TODO(developer): Uncomment and set the following variables # project_id = 'PROJECT_ID_HERE' # compute_region = 'COMPUTE_REGION_HERE' @@ -180,12 +180,12 @@ def get_model_evaluation( print(response) - # [END automl_translation_get_model_evaluation] + # [END automl_translate_get_model_evaluation] def delete_model(project_id, compute_region, model_id): """Delete a model.""" - # [START automl_translation_delete_model] + # [START automl_translate_delete_model] # TODO(developer): Uncomment and set the following variables # project_id = 'PROJECT_ID_HERE' # compute_region = 'COMPUTE_REGION_HERE' @@ -204,12 +204,12 @@ def delete_model(project_id, compute_region, model_id): # synchronous check of operation status. print("Model deleted. {}".format(response.result())) - # [END automl_translation_delete_model] + # [END automl_translate_delete_model] def get_operation_status(operation_full_id): """Get operation status.""" - # [START automl_translation_get_operation_status] + # [START automl_translate_get_operation_status] # TODO(developer): Uncomment and set the following variables # operation_full_id = # 'projects//locations//operations/' @@ -225,7 +225,7 @@ def get_operation_status(operation_full_id): print("Operation status: {}".format(response)) - # [END automl_translation_get_operation_status] + # [END automl_translate_get_operation_status] if __name__ == "__main__": diff --git a/samples/snippets/automl_translation_predict.py b/samples/snippets/automl_translation_predict.py index 653cf388..b15e0e30 100644 --- a/samples/snippets/automl_translation_predict.py +++ b/samples/snippets/automl_translation_predict.py @@ -27,7 +27,7 @@ def predict(project_id, compute_region, model_id, file_path): """Translate the content.""" - # [START automl_translation_predict] + # [START automl_translate_predict] # project_id = 'PROJECT_ID_HERE' # compute_region = 'COMPUTE_REGION_HERE' # model_id = 'MODEL_ID_HERE' @@ -61,7 +61,7 @@ def predict(project_id, compute_region, model_id, file_path): print(u"Translated content: {}".format(translated_content.content)) - # [END automl_translation_predict] + # [END automl_translate_predict] if __name__ == "__main__": From e34ea26279cb3ef67f0144e8995904678dcc2c06 Mon Sep 17 00:00:00 2001 From: Alix Hamilton Date: Thu, 6 Sep 2018 10:54:34 -0700 Subject: [PATCH 51/64] Fix AutoML region tag typos [(#1687)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1687) * fixes vision delete dataset region tag * removes extra bracket --- samples/snippets/automl_translation_dataset.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/snippets/automl_translation_dataset.py b/samples/snippets/automl_translation_dataset.py index c60ef544..cf3e50ae 100755 --- a/samples/snippets/automl_translation_dataset.py +++ b/samples/snippets/automl_translation_dataset.py @@ -196,7 +196,7 @@ def import_data(project_id, compute_region, dataset_id, path): def delete_dataset(project_id, compute_region, dataset_id): """Delete a dataset.""" - # [START automl_translate_delete_dataset]] + # [START automl_translate_delete_dataset] # TODO(developer): Uncomment and set the following variables # project_id = 'PROJECT_ID_HERE' # compute_region = 'COMPUTE_REGION_HERE' From f226974b423a10112bbb92f80c8478a6728b6eb7 Mon Sep 17 00:00:00 2001 From: Charles Engelke Date: Fri, 19 Oct 2018 15:21:41 -0700 Subject: [PATCH 52/64] Fixed name of model [(#1779)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1779) * Fixed name of model * update model ids --- samples/snippets/predict_test.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/snippets/predict_test.py b/samples/snippets/predict_test.py index c9fb7e04..f9d98dfb 100644 --- a/samples/snippets/predict_test.py +++ b/samples/snippets/predict_test.py @@ -23,7 +23,7 @@ def test_predict(capsys): - model_id = "3128559826197068699" + model_id = "TRL3128559826197068699" automl_translation_predict.predict( project_id, compute_region, model_id, "resources/input.txt" ) From a19a80cf3eb7eb8087fc37bb871abf1cb21cb27a Mon Sep 17 00:00:00 2001 From: DPEBot Date: Wed, 6 Feb 2019 12:06:35 -0800 Subject: [PATCH 53/64] Auto-update dependencies. [(#1980)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1980) * Auto-update dependencies. * Update requirements.txt * Update requirements.txt --- samples/snippets/requirements.txt | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index 8d8575ea..4b8bd4dd 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -1,6 +1,2 @@ -<<<<<<< HEAD google-cloud-automl==1.0.1 -google-cloud-storage==1.29.0 -======= -google-cloud-automl==0.1.1 ->>>>>>> d89df5c (Auto-update dependencies. [(#1658)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1658)) +google-cloud-storage==1.29.0 \ No newline at end of file From c2e32249da8ad316183368ca04dc1c66414130d1 Mon Sep 17 00:00:00 2001 From: Charles Engelke Date: Fri, 26 Apr 2019 14:44:38 -0700 Subject: [PATCH 54/64] Updated beta version of automl [(#2124)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2124) --- samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index 4b8bd4dd..f6a42ea2 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -1,2 +1,2 @@ google-cloud-automl==1.0.1 -google-cloud-storage==1.29.0 \ No newline at end of file +google-cloud-storage==1.29.0 From 76a6dc8744e368b665a7226cbfac09984fa6f475 Mon Sep 17 00:00:00 2001 From: Mike <45373284+munkhuushmgl@users.noreply.github.com> Date: Mon, 2 Mar 2020 15:29:23 -0800 Subject: [PATCH 55/64] Translate: migrate published v3 translate batch samples [(#2914)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2914) * Translate: migrate published b v3 tch samples * added missing requirements * extended wait time * inlined some vals and specified input and output * added link to supported file types & modified default values of input uri * fixed small nit --- samples/snippets/requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index f6a42ea2..8225b703 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -1,2 +1,3 @@ +google-cloud-translate==2.0.0 google-cloud-automl==1.0.1 google-cloud-storage==1.29.0 From e5a9effd125abbb42dea953ca806b11ba64dbbdd Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 19 May 2020 04:18:01 +0200 Subject: [PATCH 56/64] chore(deps): update dependency google-cloud-storage to v1.28.1 [(#3785)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3785) * chore(deps): update dependency google-cloud-storage to v1.28.1 * [asset] testing: use uuid instead of time Co-authored-by: Takashi Matsuo --- samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index 8225b703..e20862e9 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -1,3 +1,3 @@ -google-cloud-translate==2.0.0 +google-cloud-translate==2.0.1 google-cloud-automl==1.0.1 google-cloud-storage==1.29.0 From 6be3319632248c82d862aa1cd01cd4478673e598 Mon Sep 17 00:00:00 2001 From: Kurtis Van Gent <31518063+kurtisvg@users.noreply.github.com> Date: Tue, 9 Jun 2020 14:34:27 -0700 Subject: [PATCH 57/64] Replace GCLOUD_PROJECT with GOOGLE_CLOUD_PROJECT. [(#4022)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4022) --- samples/snippets/dataset_test.py | 2 +- samples/snippets/model_test.py | 2 +- samples/snippets/predict_test.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/samples/snippets/dataset_test.py b/samples/snippets/dataset_test.py index 29e3e5c9..4430ec54 100644 --- a/samples/snippets/dataset_test.py +++ b/samples/snippets/dataset_test.py @@ -21,7 +21,7 @@ import automl_translation_dataset -project_id = os.environ["GCLOUD_PROJECT"] +project_id = os.environ["GOOGLE_CLOUD_PROJECT"] compute_region = "us-central1" diff --git a/samples/snippets/model_test.py b/samples/snippets/model_test.py index 0d37a85c..e19a50ea 100644 --- a/samples/snippets/model_test.py +++ b/samples/snippets/model_test.py @@ -22,7 +22,7 @@ import automl_translation_model -project_id = os.environ["GCLOUD_PROJECT"] +project_id = os.environ["GOOGLE_CLOUD_PROJECT"] compute_region = "us-central1" diff --git a/samples/snippets/predict_test.py b/samples/snippets/predict_test.py index f9d98dfb..d00a4658 100644 --- a/samples/snippets/predict_test.py +++ b/samples/snippets/predict_test.py @@ -18,7 +18,7 @@ import automl_translation_predict -project_id = os.environ["GCLOUD_PROJECT"] +project_id = os.environ["GOOGLE_CLOUD_PROJECT"] compute_region = "us-central1" From 5f7d141afe732acf7458a9ac98618e93baa93d38 Mon Sep 17 00:00:00 2001 From: Takashi Matsuo Date: Thu, 23 Jul 2020 11:23:31 -0700 Subject: [PATCH 58/64] fix(translate): fix a broken test [(#4360)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4360) * fix(translate): fix a broken test fixes #4353 * use uuid * fix builds --- samples/snippets/dataset_test.py | 4 ++-- samples/snippets/model_test.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/samples/snippets/dataset_test.py b/samples/snippets/dataset_test.py index 4430ec54..eb5796d5 100644 --- a/samples/snippets/dataset_test.py +++ b/samples/snippets/dataset_test.py @@ -14,8 +14,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -import datetime import os +import uuid import pytest @@ -28,7 +28,7 @@ @pytest.mark.slow def test_dataset_create_import_delete(capsys): # create dataset - dataset_name = "test_" + datetime.datetime.now().strftime("%Y%m%d%H%M%S") + dataset_name = f"test_{uuid.uuid4().hex[:27]}" automl_translation_dataset.create_dataset( project_id, compute_region, dataset_name, "en", "ja" ) diff --git a/samples/snippets/model_test.py b/samples/snippets/model_test.py index e19a50ea..fd2fabc3 100644 --- a/samples/snippets/model_test.py +++ b/samples/snippets/model_test.py @@ -77,4 +77,4 @@ def test_model_list_get_evaluate(capsys): project_id, compute_region, model_id, model_evaluation_id ) out, _ = capsys.readouterr() - assert "evaluation_metric" in out + assert model_evaluation_id in out From ea8887dab216a0a2b8c8c875da6445df7fe7cd5e Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 5 Aug 2020 01:36:03 +0200 Subject: [PATCH 59/64] chore(deps): update dependency google-cloud-translate to v2.0.2 [(#4426)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4426) This PR contains the following updates: | Package | Update | Change | |---|---|---| | [google-cloud-translate](https://togithub.com/googleapis/python-translate) | patch | `==2.0.1` -> `==2.0.2` | ---
googleapis/python-translate [Compare Source](https://togithub.com/googleapis/python-translate/compare/v2.0.1...v2.0.2)
--- :date: **Schedule**: At any time (no schedule defined). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Never, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#GoogleCloudPlatform/python-docs-samples). --- samples/snippets/requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index e20862e9..f60c9855 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -1,3 +1,3 @@ -google-cloud-translate==2.0.1 -google-cloud-automl==1.0.1 +google-cloud-translate==2.0.2 google-cloud-storage==1.29.0 +google-cloud-automl==1.0.1 From 7a2694ea86dd740ef7a1d1dc1aea48f8d1692357 Mon Sep 17 00:00:00 2001 From: Renovate Bot Date: Wed, 29 Jul 2020 18:11:30 +0000 Subject: [PATCH 60/64] Update dependency google-cloud-storage to v1.30.0 --- samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index f60c9855..185fa19f 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -1,3 +1,3 @@ google-cloud-translate==2.0.2 -google-cloud-storage==1.29.0 +google-cloud-storage==1.30.0 google-cloud-automl==1.0.1 From abc507005d5255ed5adf2c4b8e0b23042a0bdf47 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim Date: Tue, 18 Aug 2020 00:33:29 +0000 Subject: [PATCH 61/64] docs: add cancel operation sample --- samples/beta/cancel_operation.py | 63 ++++++++++++++++++++++++++++++++ 1 file changed, 63 insertions(+) create mode 100644 samples/beta/cancel_operation.py diff --git a/samples/beta/cancel_operation.py b/samples/beta/cancel_operation.py new file mode 100644 index 00000000..9240a468 --- /dev/null +++ b/samples/beta/cancel_operation.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys + +# [START automl_cancel_operation] + +from google.cloud import automl_v1beta1 + + +def sample_cancel_operation(project, operation_id): + """ + Cancel Long-Running Operation + + Args: + project Required. Your Google Cloud Project ID. + operation_id Required. The ID of the Operation. + """ + + client = automl_v1beta1.AutoMlClient() + + operations_client = client.transport._operations_client + + # project = '[Google Cloud Project ID]' + # operation_id = '[Operation ID]' + name = "projects/{}/locations/us-central1/operations/{}".format( + project, operation_id + ) + + operations_client.cancel_operation(name) + + print(u"Cancelled operation: {}".format(name)) + + +# [END automl_cancel_operation] + + +def main(): + import argparse + + parser = argparse.ArgumentParser() + parser.add_argument("--project", type=str, default="[Google Cloud Project ID]") + parser.add_argument("--operation_id", type=str, default="[Operation ID]") + args = parser.parse_args() + + sample_cancel_operation(args.project, args.operation_id) + + +if __name__ == "__main__": + main() \ No newline at end of file From e4ead08398e2b029ca718662dfb420275629c2b8 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim Date: Tue, 18 Aug 2020 01:22:32 +0000 Subject: [PATCH 62/64] chore: lint --- samples/beta/cancel_operation.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/samples/beta/cancel_operation.py b/samples/beta/cancel_operation.py index 9240a468..a30fe2a4 100644 --- a/samples/beta/cancel_operation.py +++ b/samples/beta/cancel_operation.py @@ -14,7 +14,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -import sys # [START automl_cancel_operation] @@ -60,4 +59,4 @@ def main(): if __name__ == "__main__": - main() \ No newline at end of file + main() From 009085e0a82d1d7729349746c2c8954d5d60e0a9 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Wed, 16 Sep 2020 14:36:40 -0600 Subject: [PATCH 63/64] feat!: move to microgen (#61) See UPGRADING.md --- README.rst | 45 +- UPGRADING.md | 269 + docs/UPGRADING.md | 1 + docs/automl_v1/services.rst | 9 + docs/automl_v1/types.rst | 5 + docs/automl_v1beta1/services.rst | 12 + docs/automl_v1beta1/types.rst | 5 + docs/gapic/v1/api.rst | 6 - docs/gapic/v1/types.rst | 5 - docs/gapic/v1beta1/api.rst | 6 - docs/gapic/v1beta1/tables.rst | 5 - docs/gapic/v1beta1/types.rst | 5 - docs/index.rst | 22 +- google/cloud/automl/__init__.py | 200 + google/cloud/automl/py.typed | 2 + google/cloud/automl_v1/__init__.py | 201 +- .../cloud/automl_v1/gapic/auto_ml_client.py | 1911 ----- .../automl_v1/gapic/auto_ml_client_config.py | 132 - google/cloud/automl_v1/gapic/enums.py | 109 - .../gapic/prediction_service_client.py | 534 -- .../gapic/prediction_service_client_config.py | 39 - .../automl_v1/gapic/transports/__init__.py | 0 .../transports/auto_ml_grpc_transport.py | 386 - .../prediction_service_grpc_transport.py | 192 - google/cloud/automl_v1/proto/__init__.py | 0 .../automl_v1/proto/annotation_payload_pb2.py | 316 - .../proto/annotation_payload_pb2_grpc.py | 2 - .../automl_v1/proto/annotation_spec_pb2.py | 143 - .../proto/annotation_spec_pb2_grpc.py | 2 - .../automl_v1/proto/classification_pb2.py | 864 --- .../proto/classification_pb2_grpc.py | 2 - .../cloud/automl_v1/proto/data_items_pb2.py | 937 --- .../automl_v1/proto/data_items_pb2_grpc.py | 2 - google/cloud/automl_v1/proto/dataset_pb2.py | 529 -- .../cloud/automl_v1/proto/dataset_pb2_grpc.py | 2 - google/cloud/automl_v1/proto/detection_pb2.py | 475 -- .../automl_v1/proto/detection_pb2_grpc.py | 2 - google/cloud/automl_v1/proto/geometry_pb2.py | 172 - .../automl_v1/proto/geometry_pb2_grpc.py | 2 - google/cloud/automl_v1/proto/image_pb2.py | 736 -- .../cloud/automl_v1/proto/image_pb2_grpc.py | 2 - google/cloud/automl_v1/proto/io_pb2.py | 1597 ---- google/cloud/automl_v1/proto/io_pb2_grpc.py | 2 - .../automl_v1/proto/model_evaluation_pb2.py | 404 - .../proto/model_evaluation_pb2_grpc.py | 2 - google/cloud/automl_v1/proto/model_pb2.py | 598 -- .../cloud/automl_v1/proto/model_pb2_grpc.py | 2 - .../cloud/automl_v1/proto/operations_pb2.py | 1131 --- .../automl_v1/proto/operations_pb2_grpc.py | 2 - .../automl_v1/proto/prediction_service_pb2.py | 907 --- .../proto/prediction_service_pb2_grpc.py | 124 - google/cloud/automl_v1/proto/service_pb2.py | 2136 ------ .../cloud/automl_v1/proto/service_pb2_grpc.py | 424 -- .../automl_v1/proto/text_extraction_pb2.py | 354 - .../proto/text_extraction_pb2_grpc.py | 2 - google/cloud/automl_v1/proto/text_pb2.py | 345 - google/cloud/automl_v1/proto/text_pb2_grpc.py | 2 - .../cloud/automl_v1/proto/text_segment_pb2.py | 139 - .../automl_v1/proto/text_segment_pb2_grpc.py | 2 - .../automl_v1/proto/text_sentiment_pb2.py | 333 - .../proto/text_sentiment_pb2_grpc.py | 2 - .../cloud/automl_v1/proto/translation_pb2.py | 376 - .../automl_v1/proto/translation_pb2_grpc.py | 2 - google/cloud/automl_v1/py.typed | 2 + google/cloud/automl_v1/services/__init__.py | 16 + .../services/auto_ml}/__init__.py | 20 +- .../services/auto_ml/async_client.py | 1853 +++++ .../automl_v1/services/auto_ml/client.py | 1972 +++++ .../automl_v1/services/auto_ml/pagers.py | 407 + .../services/auto_ml/transports/__init__.py | 36 + .../services/auto_ml/transports/base.py | 442 ++ .../services/auto_ml/transports/grpc.py | 771 ++ .../auto_ml/transports/grpc_asyncio.py | 773 ++ .../services/prediction_service/__init__.py} | 19 +- .../prediction_service/async_client.py | 486 ++ .../services/prediction_service/client.py | 621 ++ .../prediction_service/transports/__init__.py | 36 + .../prediction_service/transports/base.py | 144 + .../prediction_service/transports/grpc.py | 345 + .../transports/grpc_asyncio.py | 341 + google/cloud/automl_v1/types.py | 92 - google/cloud/automl_v1/types/__init__.py | 209 + .../automl_v1/types/annotation_payload.py | 105 + .../cloud/automl_v1/types/annotation_spec.py | 50 + .../cloud/automl_v1/types/classification.py | 249 + google/cloud/automl_v1/types/data_items.py | 221 + google/cloud/automl_v1/types/dataset.py | 144 + google/cloud/automl_v1/types/detection.py | 137 + google/cloud/automl_v1/types/geometry.py | 62 + google/cloud/automl_v1/types/image.py | 282 + google/cloud/automl_v1/types/io.py | 1556 ++++ google/cloud/automl_v1/types/model.py | 151 + .../cloud/automl_v1/types/model_evaluation.py | 143 + google/cloud/automl_v1/types/operations.py | 265 + .../automl_v1/types/prediction_service.py | 275 + google/cloud/automl_v1/types/service.py | 504 ++ google/cloud/automl_v1/types/text.py | 93 + .../cloud/automl_v1/types/text_extraction.py | 102 + google/cloud/automl_v1/types/text_segment.py | 51 + .../cloud/automl_v1/types/text_sentiment.py | 111 + google/cloud/automl_v1/types/translation.py | 105 + google/cloud/automl_v1beta1/__init__.py | 295 +- google/cloud/automl_v1beta1/gapic/__init__.py | 0 .../automl_v1beta1/gapic/auto_ml_client.py | 2470 ------ .../gapic/auto_ml_client_config.py | 162 - google/cloud/automl_v1beta1/gapic/enums.py | 158 - .../gapic/prediction_service_client.py | 514 -- .../gapic/prediction_service_client_config.py | 39 - .../gapic/transports/__init__.py | 0 .../transports/auto_ml_grpc_transport.py | 477 -- .../prediction_service_grpc_transport.py | 175 - google/cloud/automl_v1beta1/proto/__init__.py | 0 .../proto/annotation_payload.proto | 77 - .../proto/annotation_payload_pb2.py | 417 -- .../proto/annotation_payload_pb2_grpc.py | 2 - .../proto/annotation_spec.proto | 48 - .../proto/annotation_spec_pb2.py | 143 - .../proto/annotation_spec_pb2_grpc.py | 2 - .../automl_v1beta1/proto/classification.proto | 216 - .../proto/classification_pb2.py | 1027 --- .../proto/classification_pb2_grpc.py | 2 - .../automl_v1beta1/proto/column_spec.proto | 78 - .../automl_v1beta1/proto/column_spec_pb2.py | 319 - .../proto/column_spec_pb2_grpc.py | 2 - .../automl_v1beta1/proto/data_items.proto | 221 - .../automl_v1beta1/proto/data_items_pb2.py | 1089 --- .../proto/data_items_pb2_grpc.py | 2 - .../automl_v1beta1/proto/data_stats.proto | 166 - .../automl_v1beta1/proto/data_stats_pb2.py | 1361 ---- .../proto/data_stats_pb2_grpc.py | 2 - .../automl_v1beta1/proto/data_types.proto | 105 - .../automl_v1beta1/proto/data_types_pb2.py | 441 -- .../proto/data_types_pb2_grpc.py | 2 - .../cloud/automl_v1beta1/proto/dataset.proto | 96 - .../cloud/automl_v1beta1/proto/dataset_pb2.py | 533 -- .../automl_v1beta1/proto/dataset_pb2_grpc.py | 2 - .../automl_v1beta1/proto/detection.proto | 135 - .../automl_v1beta1/proto/detection_pb2.py | 757 -- .../proto/detection_pb2_grpc.py | 2 - .../cloud/automl_v1beta1/proto/geometry.proto | 46 - .../automl_v1beta1/proto/geometry_pb2.py | 172 - .../automl_v1beta1/proto/geometry_pb2_grpc.py | 2 - google/cloud/automl_v1beta1/proto/image.proto | 193 - .../cloud/automl_v1beta1/proto/image_pb2.py | 726 -- .../automl_v1beta1/proto/image_pb2_grpc.py | 2 - google/cloud/automl_v1beta1/proto/io.proto | 1132 --- google/cloud/automl_v1beta1/proto/io_pb2.py | 1873 ----- .../cloud/automl_v1beta1/proto/io_pb2_grpc.py | 2 - google/cloud/automl_v1beta1/proto/model.proto | 108 - .../proto/model_evaluation.proto | 116 - .../proto/model_evaluation_pb2.py | 479 -- .../proto/model_evaluation_pb2_grpc.py | 2 - .../cloud/automl_v1beta1/proto/model_pb2.py | 580 -- .../automl_v1beta1/proto/model_pb2_grpc.py | 2 - .../automl_v1beta1/proto/operations.proto | 189 - .../automl_v1beta1/proto/operations_pb2.py | 1306 ---- .../proto/operations_pb2_grpc.py | 2 - .../proto/prediction_service.proto | 268 - .../proto/prediction_service_pb2.py | 906 --- .../proto/prediction_service_pb2_grpc.py | 104 - .../cloud/automl_v1beta1/proto/ranges.proto | 35 - .../cloud/automl_v1beta1/proto/ranges_pb2.py | 112 - .../automl_v1beta1/proto/ranges_pb2_grpc.py | 2 - .../automl_v1beta1/proto/regression.proto | 44 - .../automl_v1beta1/proto/regression_pb2.py | 178 - .../proto/regression_pb2_grpc.py | 2 - .../cloud/automl_v1beta1/proto/service.proto | 800 -- .../cloud/automl_v1beta1/proto/service_pb2.py | 3050 -------- .../automl_v1beta1/proto/service_pb2_grpc.py | 546 -- .../automl_v1beta1/proto/table_spec.proto | 78 - .../automl_v1beta1/proto/table_spec_pb2.py | 248 - .../proto/table_spec_pb2_grpc.py | 2 - .../cloud/automl_v1beta1/proto/tables.proto | 292 - .../cloud/automl_v1beta1/proto/tables_pb2.py | 984 --- .../automl_v1beta1/proto/tables_pb2_grpc.py | 2 - .../cloud/automl_v1beta1/proto/temporal.proto | 37 - .../automl_v1beta1/proto/temporal_pb2.py | 125 - .../automl_v1beta1/proto/temporal_pb2_grpc.py | 2 - google/cloud/automl_v1beta1/proto/text.proto | 65 - .../proto/text_extraction.proto | 68 - .../proto/text_extraction_pb2.py | 354 - .../proto/text_extraction_pb2_grpc.py | 2 - google/cloud/automl_v1beta1/proto/text_pb2.py | 345 - .../automl_v1beta1/proto/text_pb2_grpc.py | 2 - .../automl_v1beta1/proto/text_segment.proto | 41 - .../automl_v1beta1/proto/text_segment_pb2.py | 139 - .../proto/text_segment_pb2_grpc.py | 2 - .../automl_v1beta1/proto/text_sentiment.proto | 80 - .../proto/text_sentiment_pb2.py | 357 - .../proto/text_sentiment_pb2_grpc.py | 2 - .../automl_v1beta1/proto/translation.proto | 69 - .../automl_v1beta1/proto/translation_pb2.py | 376 - .../proto/translation_pb2_grpc.py | 2 - google/cloud/automl_v1beta1/proto/video.proto | 48 - .../cloud/automl_v1beta1/proto/video_pb2.py | 183 - .../automl_v1beta1/proto/video_pb2_grpc.py | 2 - google/cloud/automl_v1beta1/py.typed | 2 + .../cloud/automl_v1beta1/services/__init__.py | 16 + .../services/auto_ml/__init__.py | 24 + .../services/auto_ml/async_client.py | 2358 ++++++ .../automl_v1beta1/services/auto_ml/client.py | 2503 +++++++ .../automl_v1beta1/services/auto_ml/pagers.py | 665 ++ .../services/auto_ml/transports/__init__.py | 36 + .../services/auto_ml/transports/base.py | 561 ++ .../services/auto_ml/transports/grpc.py | 947 +++ .../auto_ml/transports/grpc_asyncio.py | 957 +++ .../services/prediction_service/__init__.py | 24 + .../prediction_service/async_client.py | 458 ++ .../services/prediction_service/client.py | 593 ++ .../prediction_service/transports/__init__.py | 36 + .../prediction_service/transports/base.py | 144 + .../prediction_service/transports/grpc.py | 326 + .../transports/grpc_asyncio.py | 322 + .../services/tables}/__init__.py | 0 .../{ => services}/tables/gcs_client.py | 0 .../{ => services}/tables/tables_client.py | 436 +- .../cloud/automl_v1beta1/tables/__init__.py | 0 google/cloud/automl_v1beta1/types.py | 114 - google/cloud/automl_v1beta1/types/__init__.py | 295 + .../types/annotation_payload.py | 131 + .../automl_v1beta1/types/annotation_spec.py | 50 + .../automl_v1beta1/types/classification.py | 308 + .../cloud/automl_v1beta1/types/column_spec.py | 98 + .../cloud/automl_v1beta1/types/data_items.py | 261 + .../cloud/automl_v1beta1/types/data_stats.py | 275 + .../cloud/automl_v1beta1/types/data_types.py | 111 + google/cloud/automl_v1beta1/types/dataset.py | 164 + .../cloud/automl_v1beta1/types/detection.py | 217 + google/cloud/automl_v1beta1/types/geometry.py | 63 + google/cloud/automl_v1beta1/types/image.py | 266 + google/cloud/automl_v1beta1/types/io.py | 1115 +++ google/cloud/automl_v1beta1/types/model.py | 161 + .../automl_v1beta1/types/model_evaluation.py | 166 + .../cloud/automl_v1beta1/types/operations.py | 308 + .../types/prediction_service.py | 257 + google/cloud/automl_v1beta1/types/ranges.py | 41 + .../cloud/automl_v1beta1/types/regression.py | 55 + google/cloud/automl_v1beta1/types/service.py | 704 ++ .../cloud/automl_v1beta1/types/table_spec.py | 93 + google/cloud/automl_v1beta1/types/tables.py | 369 + google/cloud/automl_v1beta1/types/temporal.py | 49 + google/cloud/automl_v1beta1/types/text.py | 93 + .../automl_v1beta1/types/text_extraction.py | 102 + .../automl_v1beta1/types/text_segment.py | 51 + .../automl_v1beta1/types/text_sentiment.py | 116 + .../cloud/automl_v1beta1/types/translation.py | 105 + google/cloud/automl_v1beta1/types/video.py | 50 + mypy.ini | 3 + noxfile.py | 7 +- samples/beta/batch_predict.py | 18 +- samples/beta/cancel_operation.py | 2 +- samples/beta/delete_dataset.py | 2 +- samples/beta/delete_dataset_test.py | 8 +- samples/beta/delete_model.py | 2 +- samples/beta/get_model.py | 8 +- samples/beta/get_model_evaluation.py | 11 +- samples/beta/get_model_evaluation_test.py | 9 +- samples/beta/get_operation_status.py | 2 +- samples/beta/get_operation_status_test.py | 4 +- samples/beta/import_dataset.py | 6 +- samples/beta/list_datasets.py | 9 +- samples/beta/list_models.py | 11 +- samples/beta/set_endpoint.py | 9 +- .../video_classification_create_dataset.py | 8 +- ...ideo_classification_create_dataset_test.py | 2 +- .../beta/video_classification_create_model.py | 9 +- .../video_classification_create_model_test.py | 3 +- .../video_object_tracking_create_dataset.py | 8 +- ...deo_object_tracking_create_dataset_test.py | 2 +- .../video_object_tracking_create_model.py | 8 +- ...video_object_tracking_create_model_test.py | 2 +- .../snippets/automl_translation_dataset.py | 27 +- samples/snippets/automl_translation_model.py | 40 +- .../snippets/automl_translation_predict.py | 8 +- samples/snippets/batch_predict.py | 20 +- samples/snippets/delete_dataset.py | 2 +- samples/snippets/delete_dataset_test.py | 8 +- samples/snippets/delete_model.py | 2 +- samples/snippets/deploy_model.py | 4 +- samples/snippets/export_dataset.py | 8 +- samples/snippets/get_dataset.py | 6 +- samples/snippets/get_model.py | 8 +- samples/snippets/get_model_evaluation.py | 11 +- samples/snippets/get_model_evaluation_test.py | 2 +- samples/snippets/get_operation_status.py | 2 +- samples/snippets/get_operation_status_test.py | 5 +- samples/snippets/import_dataset.py | 6 +- ...nguage_entity_extraction_create_dataset.py | 8 +- ...e_entity_extraction_create_dataset_test.py | 2 +- ...language_entity_extraction_create_model.py | 8 +- .../language_entity_extraction_predict.py | 8 +- ...language_entity_extraction_predict_test.py | 6 +- ...guage_sentiment_analysis_create_dataset.py | 8 +- ..._sentiment_analysis_create_dataset_test.py | 2 +- ...anguage_sentiment_analysis_create_model.py | 9 +- ...ge_sentiment_analysis_create_model_test.py | 7 +- .../language_sentiment_analysis_predict.py | 8 +- ...anguage_sentiment_analysis_predict_test.py | 6 +- ...uage_text_classification_create_dataset.py | 10 +- ...text_classification_create_dataset_test.py | 2 +- ...nguage_text_classification_create_model.py | 8 +- ...e_text_classification_create_model_test.py | 2 +- .../language_text_classification_predict.py | 8 +- ...nguage_text_classification_predict_test.py | 6 +- samples/snippets/list_datasets.py | 9 +- samples/snippets/list_model_evaluations.py | 6 +- samples/snippets/list_models.py | 12 +- samples/snippets/list_operation_status.py | 6 +- samples/snippets/model_test.py | 4 +- samples/snippets/translate_create_dataset.py | 8 +- .../snippets/translate_create_dataset_test.py | 2 +- samples/snippets/translate_create_model.py | 8 +- .../snippets/translate_create_model_test.py | 2 +- samples/snippets/translate_predict.py | 8 +- samples/snippets/translate_predict_test.py | 6 +- samples/snippets/undeploy_model.py | 2 +- .../vision_classification_create_dataset.py | 10 +- ...sion_classification_create_dataset_test.py | 2 +- .../vision_classification_create_model.py | 9 +- ...vision_classification_create_model_test.py | 7 +- ..._classification_deploy_model_node_count.py | 9 +- .../snippets/vision_classification_predict.py | 14 +- .../vision_classification_predict_test.py | 6 +- .../vision_object_detection_create_dataset.py | 8 +- ...on_object_detection_create_dataset_test.py | 2 +- .../vision_object_detection_create_model.py | 9 +- ...sion_object_detection_create_model_test.py | 7 +- ...bject_detection_deploy_model_node_count.py | 8 +- .../vision_object_detection_predict.py | 14 +- .../vision_object_detection_predict_test.py | 6 +- samples/tables/automl_tables_dataset.py | 18 +- samples/tables/automl_tables_model.py | 43 +- samples/tables/automl_tables_predict.py | 2 +- samples/tables/batch_predict_test.py | 4 +- samples/tables/endpoint_test.py | 2 +- samples/tables/predict_test.py | 4 +- scripts/fixup_automl_v1_keywords.py | 197 + scripts/fixup_automl_v1beta1_keywords.py | 203 + setup.py | 19 +- synth.metadata | 12 +- synth.py | 104 +- .../v1beta1/test_system_tables_client_v1.py | 3 +- tests/unit/gapic/automl_v1/__init__.py | 1 + tests/unit/gapic/automl_v1/test_auto_ml.py | 5000 +++++++++++++ .../automl_v1/test_prediction_service.py | 1234 +++ tests/unit/gapic/automl_v1beta1/__init__.py | 1 + .../unit/gapic/automl_v1beta1/test_auto_ml.py | 6622 +++++++++++++++++ .../automl_v1beta1/test_prediction_service.py | 1237 +++ tests/unit/gapic/v1/test_auto_ml_client_v1.py | 991 --- .../v1/test_prediction_service_client_v1.py | 165 - .../v1beta1/test_auto_ml_client_v1beta1.py | 1255 ---- .../test_prediction_service_client_v1beta1.py | 170 - .../v1beta1 => }/test_gcs_client_v1beta1.py | 6 +- .../test_tables_client_v1beta1.py | 871 ++- 354 files changed, 45761 insertions(+), 48392 deletions(-) create mode 100644 UPGRADING.md create mode 120000 docs/UPGRADING.md create mode 100644 docs/automl_v1/services.rst create mode 100644 docs/automl_v1/types.rst create mode 100644 docs/automl_v1beta1/services.rst create mode 100644 docs/automl_v1beta1/types.rst delete mode 100644 docs/gapic/v1/api.rst delete mode 100644 docs/gapic/v1/types.rst delete mode 100644 docs/gapic/v1beta1/api.rst delete mode 100644 docs/gapic/v1beta1/tables.rst delete mode 100644 docs/gapic/v1beta1/types.rst create mode 100644 google/cloud/automl/__init__.py create mode 100644 google/cloud/automl/py.typed delete mode 100644 google/cloud/automl_v1/gapic/auto_ml_client.py delete mode 100644 google/cloud/automl_v1/gapic/auto_ml_client_config.py delete mode 100644 google/cloud/automl_v1/gapic/enums.py delete mode 100644 google/cloud/automl_v1/gapic/prediction_service_client.py delete mode 100644 google/cloud/automl_v1/gapic/prediction_service_client_config.py delete mode 100644 google/cloud/automl_v1/gapic/transports/__init__.py delete mode 100644 google/cloud/automl_v1/gapic/transports/auto_ml_grpc_transport.py delete mode 100644 google/cloud/automl_v1/gapic/transports/prediction_service_grpc_transport.py delete mode 100644 google/cloud/automl_v1/proto/__init__.py delete mode 100644 google/cloud/automl_v1/proto/annotation_payload_pb2.py delete mode 100644 google/cloud/automl_v1/proto/annotation_payload_pb2_grpc.py delete mode 100644 google/cloud/automl_v1/proto/annotation_spec_pb2.py delete mode 100644 google/cloud/automl_v1/proto/annotation_spec_pb2_grpc.py delete mode 100644 google/cloud/automl_v1/proto/classification_pb2.py delete mode 100644 google/cloud/automl_v1/proto/classification_pb2_grpc.py delete mode 100644 google/cloud/automl_v1/proto/data_items_pb2.py delete mode 100644 google/cloud/automl_v1/proto/data_items_pb2_grpc.py delete mode 100644 google/cloud/automl_v1/proto/dataset_pb2.py delete mode 100644 google/cloud/automl_v1/proto/dataset_pb2_grpc.py delete mode 100644 google/cloud/automl_v1/proto/detection_pb2.py delete mode 100644 google/cloud/automl_v1/proto/detection_pb2_grpc.py delete mode 100644 google/cloud/automl_v1/proto/geometry_pb2.py delete mode 100644 google/cloud/automl_v1/proto/geometry_pb2_grpc.py delete mode 100644 google/cloud/automl_v1/proto/image_pb2.py delete mode 100644 google/cloud/automl_v1/proto/image_pb2_grpc.py delete mode 100644 google/cloud/automl_v1/proto/io_pb2.py delete mode 100644 google/cloud/automl_v1/proto/io_pb2_grpc.py delete mode 100644 google/cloud/automl_v1/proto/model_evaluation_pb2.py delete mode 100644 google/cloud/automl_v1/proto/model_evaluation_pb2_grpc.py delete mode 100644 google/cloud/automl_v1/proto/model_pb2.py delete mode 100644 google/cloud/automl_v1/proto/model_pb2_grpc.py delete mode 100644 google/cloud/automl_v1/proto/operations_pb2.py delete mode 100644 google/cloud/automl_v1/proto/operations_pb2_grpc.py delete mode 100644 google/cloud/automl_v1/proto/prediction_service_pb2.py delete mode 100644 google/cloud/automl_v1/proto/prediction_service_pb2_grpc.py delete mode 100644 google/cloud/automl_v1/proto/service_pb2.py delete mode 100644 google/cloud/automl_v1/proto/service_pb2_grpc.py delete mode 100644 google/cloud/automl_v1/proto/text_extraction_pb2.py delete mode 100644 google/cloud/automl_v1/proto/text_extraction_pb2_grpc.py delete mode 100644 google/cloud/automl_v1/proto/text_pb2.py delete mode 100644 google/cloud/automl_v1/proto/text_pb2_grpc.py delete mode 100644 google/cloud/automl_v1/proto/text_segment_pb2.py delete mode 100644 google/cloud/automl_v1/proto/text_segment_pb2_grpc.py delete mode 100644 google/cloud/automl_v1/proto/text_sentiment_pb2.py delete mode 100644 google/cloud/automl_v1/proto/text_sentiment_pb2_grpc.py delete mode 100644 google/cloud/automl_v1/proto/translation_pb2.py delete mode 100644 google/cloud/automl_v1/proto/translation_pb2_grpc.py create mode 100644 google/cloud/automl_v1/py.typed create mode 100644 google/cloud/automl_v1/services/__init__.py rename google/cloud/{ => automl_v1/services/auto_ml}/__init__.py (66%) create mode 100644 google/cloud/automl_v1/services/auto_ml/async_client.py create mode 100644 google/cloud/automl_v1/services/auto_ml/client.py create mode 100644 google/cloud/automl_v1/services/auto_ml/pagers.py create mode 100644 google/cloud/automl_v1/services/auto_ml/transports/__init__.py create mode 100644 google/cloud/automl_v1/services/auto_ml/transports/base.py create mode 100644 google/cloud/automl_v1/services/auto_ml/transports/grpc.py create mode 100644 google/cloud/automl_v1/services/auto_ml/transports/grpc_asyncio.py rename google/cloud/{automl.py => automl_v1/services/prediction_service/__init__.py} (64%) create mode 100644 google/cloud/automl_v1/services/prediction_service/async_client.py create mode 100644 google/cloud/automl_v1/services/prediction_service/client.py create mode 100644 google/cloud/automl_v1/services/prediction_service/transports/__init__.py create mode 100644 google/cloud/automl_v1/services/prediction_service/transports/base.py create mode 100644 google/cloud/automl_v1/services/prediction_service/transports/grpc.py create mode 100644 google/cloud/automl_v1/services/prediction_service/transports/grpc_asyncio.py delete mode 100644 google/cloud/automl_v1/types.py create mode 100644 google/cloud/automl_v1/types/__init__.py create mode 100644 google/cloud/automl_v1/types/annotation_payload.py create mode 100644 google/cloud/automl_v1/types/annotation_spec.py create mode 100644 google/cloud/automl_v1/types/classification.py create mode 100644 google/cloud/automl_v1/types/data_items.py create mode 100644 google/cloud/automl_v1/types/dataset.py create mode 100644 google/cloud/automl_v1/types/detection.py create mode 100644 google/cloud/automl_v1/types/geometry.py create mode 100644 google/cloud/automl_v1/types/image.py create mode 100644 google/cloud/automl_v1/types/io.py create mode 100644 google/cloud/automl_v1/types/model.py create mode 100644 google/cloud/automl_v1/types/model_evaluation.py create mode 100644 google/cloud/automl_v1/types/operations.py create mode 100644 google/cloud/automl_v1/types/prediction_service.py create mode 100644 google/cloud/automl_v1/types/service.py create mode 100644 google/cloud/automl_v1/types/text.py create mode 100644 google/cloud/automl_v1/types/text_extraction.py create mode 100644 google/cloud/automl_v1/types/text_segment.py create mode 100644 google/cloud/automl_v1/types/text_sentiment.py create mode 100644 google/cloud/automl_v1/types/translation.py delete mode 100644 google/cloud/automl_v1beta1/gapic/__init__.py delete mode 100644 google/cloud/automl_v1beta1/gapic/auto_ml_client.py delete mode 100644 google/cloud/automl_v1beta1/gapic/auto_ml_client_config.py delete mode 100644 google/cloud/automl_v1beta1/gapic/enums.py delete mode 100644 google/cloud/automl_v1beta1/gapic/prediction_service_client.py delete mode 100644 google/cloud/automl_v1beta1/gapic/prediction_service_client_config.py delete mode 100644 google/cloud/automl_v1beta1/gapic/transports/__init__.py delete mode 100644 google/cloud/automl_v1beta1/gapic/transports/auto_ml_grpc_transport.py delete mode 100644 google/cloud/automl_v1beta1/gapic/transports/prediction_service_grpc_transport.py delete mode 100644 google/cloud/automl_v1beta1/proto/__init__.py delete mode 100644 google/cloud/automl_v1beta1/proto/annotation_payload.proto delete mode 100644 google/cloud/automl_v1beta1/proto/annotation_payload_pb2.py delete mode 100644 google/cloud/automl_v1beta1/proto/annotation_payload_pb2_grpc.py delete mode 100644 google/cloud/automl_v1beta1/proto/annotation_spec.proto delete mode 100644 google/cloud/automl_v1beta1/proto/annotation_spec_pb2.py delete mode 100644 google/cloud/automl_v1beta1/proto/annotation_spec_pb2_grpc.py delete mode 100644 google/cloud/automl_v1beta1/proto/classification.proto delete mode 100644 google/cloud/automl_v1beta1/proto/classification_pb2.py delete mode 100644 google/cloud/automl_v1beta1/proto/classification_pb2_grpc.py delete mode 100644 google/cloud/automl_v1beta1/proto/column_spec.proto delete mode 100644 google/cloud/automl_v1beta1/proto/column_spec_pb2.py delete mode 100644 google/cloud/automl_v1beta1/proto/column_spec_pb2_grpc.py delete mode 100644 google/cloud/automl_v1beta1/proto/data_items.proto delete mode 100644 google/cloud/automl_v1beta1/proto/data_items_pb2.py delete mode 100644 google/cloud/automl_v1beta1/proto/data_items_pb2_grpc.py delete mode 100644 google/cloud/automl_v1beta1/proto/data_stats.proto delete mode 100644 google/cloud/automl_v1beta1/proto/data_stats_pb2.py delete mode 100644 google/cloud/automl_v1beta1/proto/data_stats_pb2_grpc.py delete mode 100644 google/cloud/automl_v1beta1/proto/data_types.proto delete mode 100644 google/cloud/automl_v1beta1/proto/data_types_pb2.py delete mode 100644 google/cloud/automl_v1beta1/proto/data_types_pb2_grpc.py delete mode 100644 google/cloud/automl_v1beta1/proto/dataset.proto delete mode 100644 google/cloud/automl_v1beta1/proto/dataset_pb2.py delete mode 100644 google/cloud/automl_v1beta1/proto/dataset_pb2_grpc.py delete mode 100644 google/cloud/automl_v1beta1/proto/detection.proto delete mode 100644 google/cloud/automl_v1beta1/proto/detection_pb2.py delete mode 100644 google/cloud/automl_v1beta1/proto/detection_pb2_grpc.py delete mode 100644 google/cloud/automl_v1beta1/proto/geometry.proto delete mode 100644 google/cloud/automl_v1beta1/proto/geometry_pb2.py delete mode 100644 google/cloud/automl_v1beta1/proto/geometry_pb2_grpc.py delete mode 100644 google/cloud/automl_v1beta1/proto/image.proto delete mode 100644 google/cloud/automl_v1beta1/proto/image_pb2.py delete mode 100644 google/cloud/automl_v1beta1/proto/image_pb2_grpc.py delete mode 100644 google/cloud/automl_v1beta1/proto/io.proto delete mode 100644 google/cloud/automl_v1beta1/proto/io_pb2.py delete mode 100644 google/cloud/automl_v1beta1/proto/io_pb2_grpc.py delete mode 100644 google/cloud/automl_v1beta1/proto/model.proto delete mode 100644 google/cloud/automl_v1beta1/proto/model_evaluation.proto delete mode 100644 google/cloud/automl_v1beta1/proto/model_evaluation_pb2.py delete mode 100644 google/cloud/automl_v1beta1/proto/model_evaluation_pb2_grpc.py delete mode 100644 google/cloud/automl_v1beta1/proto/model_pb2.py delete mode 100644 google/cloud/automl_v1beta1/proto/model_pb2_grpc.py delete mode 100644 google/cloud/automl_v1beta1/proto/operations.proto delete mode 100644 google/cloud/automl_v1beta1/proto/operations_pb2.py delete mode 100644 google/cloud/automl_v1beta1/proto/operations_pb2_grpc.py delete mode 100644 google/cloud/automl_v1beta1/proto/prediction_service.proto delete mode 100644 google/cloud/automl_v1beta1/proto/prediction_service_pb2.py delete mode 100644 google/cloud/automl_v1beta1/proto/prediction_service_pb2_grpc.py delete mode 100644 google/cloud/automl_v1beta1/proto/ranges.proto delete mode 100644 google/cloud/automl_v1beta1/proto/ranges_pb2.py delete mode 100644 google/cloud/automl_v1beta1/proto/ranges_pb2_grpc.py delete mode 100644 google/cloud/automl_v1beta1/proto/regression.proto delete mode 100644 google/cloud/automl_v1beta1/proto/regression_pb2.py delete mode 100644 google/cloud/automl_v1beta1/proto/regression_pb2_grpc.py delete mode 100644 google/cloud/automl_v1beta1/proto/service.proto delete mode 100644 google/cloud/automl_v1beta1/proto/service_pb2.py delete mode 100644 google/cloud/automl_v1beta1/proto/service_pb2_grpc.py delete mode 100644 google/cloud/automl_v1beta1/proto/table_spec.proto delete mode 100644 google/cloud/automl_v1beta1/proto/table_spec_pb2.py delete mode 100644 google/cloud/automl_v1beta1/proto/table_spec_pb2_grpc.py delete mode 100644 google/cloud/automl_v1beta1/proto/tables.proto delete mode 100644 google/cloud/automl_v1beta1/proto/tables_pb2.py delete mode 100644 google/cloud/automl_v1beta1/proto/tables_pb2_grpc.py delete mode 100644 google/cloud/automl_v1beta1/proto/temporal.proto delete mode 100644 google/cloud/automl_v1beta1/proto/temporal_pb2.py delete mode 100644 google/cloud/automl_v1beta1/proto/temporal_pb2_grpc.py delete mode 100644 google/cloud/automl_v1beta1/proto/text.proto delete mode 100644 google/cloud/automl_v1beta1/proto/text_extraction.proto delete mode 100644 google/cloud/automl_v1beta1/proto/text_extraction_pb2.py delete mode 100644 google/cloud/automl_v1beta1/proto/text_extraction_pb2_grpc.py delete mode 100644 google/cloud/automl_v1beta1/proto/text_pb2.py delete mode 100644 google/cloud/automl_v1beta1/proto/text_pb2_grpc.py delete mode 100644 google/cloud/automl_v1beta1/proto/text_segment.proto delete mode 100644 google/cloud/automl_v1beta1/proto/text_segment_pb2.py delete mode 100644 google/cloud/automl_v1beta1/proto/text_segment_pb2_grpc.py delete mode 100644 google/cloud/automl_v1beta1/proto/text_sentiment.proto delete mode 100644 google/cloud/automl_v1beta1/proto/text_sentiment_pb2.py delete mode 100644 google/cloud/automl_v1beta1/proto/text_sentiment_pb2_grpc.py delete mode 100644 google/cloud/automl_v1beta1/proto/translation.proto delete mode 100644 google/cloud/automl_v1beta1/proto/translation_pb2.py delete mode 100644 google/cloud/automl_v1beta1/proto/translation_pb2_grpc.py delete mode 100644 google/cloud/automl_v1beta1/proto/video.proto delete mode 100644 google/cloud/automl_v1beta1/proto/video_pb2.py delete mode 100644 google/cloud/automl_v1beta1/proto/video_pb2_grpc.py create mode 100644 google/cloud/automl_v1beta1/py.typed create mode 100644 google/cloud/automl_v1beta1/services/__init__.py create mode 100644 google/cloud/automl_v1beta1/services/auto_ml/__init__.py create mode 100644 google/cloud/automl_v1beta1/services/auto_ml/async_client.py create mode 100644 google/cloud/automl_v1beta1/services/auto_ml/client.py create mode 100644 google/cloud/automl_v1beta1/services/auto_ml/pagers.py create mode 100644 google/cloud/automl_v1beta1/services/auto_ml/transports/__init__.py create mode 100644 google/cloud/automl_v1beta1/services/auto_ml/transports/base.py create mode 100644 google/cloud/automl_v1beta1/services/auto_ml/transports/grpc.py create mode 100644 google/cloud/automl_v1beta1/services/auto_ml/transports/grpc_asyncio.py create mode 100644 google/cloud/automl_v1beta1/services/prediction_service/__init__.py create mode 100644 google/cloud/automl_v1beta1/services/prediction_service/async_client.py create mode 100644 google/cloud/automl_v1beta1/services/prediction_service/client.py create mode 100644 google/cloud/automl_v1beta1/services/prediction_service/transports/__init__.py create mode 100644 google/cloud/automl_v1beta1/services/prediction_service/transports/base.py create mode 100644 google/cloud/automl_v1beta1/services/prediction_service/transports/grpc.py create mode 100644 google/cloud/automl_v1beta1/services/prediction_service/transports/grpc_asyncio.py rename google/cloud/{automl_v1/gapic => automl_v1beta1/services/tables}/__init__.py (100%) rename google/cloud/automl_v1beta1/{ => services}/tables/gcs_client.py (100%) rename google/cloud/automl_v1beta1/{ => services}/tables/tables_client.py (90%) delete mode 100644 google/cloud/automl_v1beta1/tables/__init__.py delete mode 100644 google/cloud/automl_v1beta1/types.py create mode 100644 google/cloud/automl_v1beta1/types/__init__.py create mode 100644 google/cloud/automl_v1beta1/types/annotation_payload.py create mode 100644 google/cloud/automl_v1beta1/types/annotation_spec.py create mode 100644 google/cloud/automl_v1beta1/types/classification.py create mode 100644 google/cloud/automl_v1beta1/types/column_spec.py create mode 100644 google/cloud/automl_v1beta1/types/data_items.py create mode 100644 google/cloud/automl_v1beta1/types/data_stats.py create mode 100644 google/cloud/automl_v1beta1/types/data_types.py create mode 100644 google/cloud/automl_v1beta1/types/dataset.py create mode 100644 google/cloud/automl_v1beta1/types/detection.py create mode 100644 google/cloud/automl_v1beta1/types/geometry.py create mode 100644 google/cloud/automl_v1beta1/types/image.py create mode 100644 google/cloud/automl_v1beta1/types/io.py create mode 100644 google/cloud/automl_v1beta1/types/model.py create mode 100644 google/cloud/automl_v1beta1/types/model_evaluation.py create mode 100644 google/cloud/automl_v1beta1/types/operations.py create mode 100644 google/cloud/automl_v1beta1/types/prediction_service.py create mode 100644 google/cloud/automl_v1beta1/types/ranges.py create mode 100644 google/cloud/automl_v1beta1/types/regression.py create mode 100644 google/cloud/automl_v1beta1/types/service.py create mode 100644 google/cloud/automl_v1beta1/types/table_spec.py create mode 100644 google/cloud/automl_v1beta1/types/tables.py create mode 100644 google/cloud/automl_v1beta1/types/temporal.py create mode 100644 google/cloud/automl_v1beta1/types/text.py create mode 100644 google/cloud/automl_v1beta1/types/text_extraction.py create mode 100644 google/cloud/automl_v1beta1/types/text_segment.py create mode 100644 google/cloud/automl_v1beta1/types/text_sentiment.py create mode 100644 google/cloud/automl_v1beta1/types/translation.py create mode 100644 google/cloud/automl_v1beta1/types/video.py create mode 100644 mypy.ini create mode 100644 scripts/fixup_automl_v1_keywords.py create mode 100644 scripts/fixup_automl_v1beta1_keywords.py create mode 100644 tests/unit/gapic/automl_v1/__init__.py create mode 100644 tests/unit/gapic/automl_v1/test_auto_ml.py create mode 100644 tests/unit/gapic/automl_v1/test_prediction_service.py create mode 100644 tests/unit/gapic/automl_v1beta1/__init__.py create mode 100644 tests/unit/gapic/automl_v1beta1/test_auto_ml.py create mode 100644 tests/unit/gapic/automl_v1beta1/test_prediction_service.py delete mode 100644 tests/unit/gapic/v1/test_auto_ml_client_v1.py delete mode 100644 tests/unit/gapic/v1/test_prediction_service_client_v1.py delete mode 100644 tests/unit/gapic/v1beta1/test_auto_ml_client_v1beta1.py delete mode 100644 tests/unit/gapic/v1beta1/test_prediction_service_client_v1beta1.py rename tests/unit/{gapic/v1beta1 => }/test_gcs_client_v1beta1.py (97%) rename tests/unit/{gapic/v1beta1 => }/test_tables_client_v1beta1.py (68%) diff --git a/README.rst b/README.rst index bfab72eb..af1699ec 100644 --- a/README.rst +++ b/README.rst @@ -56,7 +56,9 @@ Python >= 3.5 Deprecated Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^^ -Python == 2.7. Python 2.7 support will be removed on January 1, 2020. +Python == 2.7. + +The last version of this library compatible with Python 2.7 is google-cloud-automl==1.0.1. Mac/Linux @@ -80,18 +82,6 @@ Windows \Scripts\activate \Scripts\pip.exe install google-cloud-automl -Example Usage -~~~~~~~~~~~~~ - -.. code-block:: python - - from google.cloud.automl_v1beta1 import PredictionServiceClient - - client = PredictionServiceClient() - model_path = client.model_path('my-project-123', 'us-central', 'model-name') - payload = {...} - params = {'foo': 1} - response = client.predict(model_path, payload, params=params) Next Steps ~~~~~~~~~~ @@ -100,32 +90,3 @@ Next Steps API to see other available methods on the client. - Read the `Product documentation`_ to learn more about the product and see How-to Guides. - -Making & Testing Local Changes -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -If you want to make changes to this library, here is how to set up your -development environment: - -1. Make sure you have `virtualenv`_ installed and activated as shown above. -2. Run the following one-time setup (it will be persisted in your virtualenv): - - .. code-block:: console - - pip install -r ../docs/requirements.txt - pip install -U nox mock pytest - -3. If you want to run all tests, you will need a billing-enabled - `GCP project`_, and a `service account`_ with access to the AutoML APIs. - Note: the first time the tests run in a new project it will take a _long_ - time, on the order of 2-3 hours. This is one-time setup that will be skipped - in future runs. - -.. _service account: https://cloud.google.com/iam/docs/creating-managing-service-accounts -.. _GCP project: https://cloud.google.com/resource-manager/docs/creating-managing-projects - -.. code-block:: console - - export PROJECT_ID= GOOGLE_APPLICATION_CREDENTIALS= - nox - diff --git a/UPGRADING.md b/UPGRADING.md new file mode 100644 index 00000000..d7b3ec05 --- /dev/null +++ b/UPGRADING.md @@ -0,0 +1,269 @@ +# 3.0.0 Migration Guide + +The 2.0 release of the `google-cloud-automl` client is a significant upgrade based on a [next-gen code generator](https://github.com/googleapis/gapic-generator-python), and includes substantial interface changes. Existing code written for earlier versions of this library will likely require updates to use this version. This document describes the changes that have been made, and what you need to do to update your usage. + +If you experience issues or have questions, please file an [issue](https://github.com/googleapis/python-automl/issues). + +## Supported Python Versions + +> **WARNING**: Breaking change + +The 2.0.0 release requires Python 3.6+. + + +## Method Calls + +> **WARNING**: Breaking change + +Methods expect request objects. We provide a script that will convert most common use cases. + +* Install the library + +```py +python3 -m pip install google-cloud-automl +``` + +* The script `fixup_automl_{version}_keywords.py` is shipped with the library. It expects +an input directory (with the code to convert) and an empty destination directory. + +```sh +$ fixup_automl_v1_keywords.py --input-directory .samples/ --output-directory samples/ +``` + +**Before:** +```py +from google.cloud import automl + +project_id = "YOUR_PROJECT_ID" +model_id = "YOUR_MODEL_ID" + +client = automl.AutoMlClient() +# Get the full path of the model. +model_full_id = client.model_path(project_id, "us-central1", model_id) +response = client.deploy_model(model_full_id) +``` + + +**After:** +```py +from google.cloud import automl + +project_id = "YOUR_PROJECT_ID" +model_id = "YOUR_MODEL_ID" + +client = automl.AutoMlClient() +# Get the full path of the model. +model_full_id = client.model_path(project_id, "us-central1", model_id) +response = client.deploy_model(name=model_full_id) +``` + +### More Details + +In `google-cloud-automl<2.0.0`, parameters required by the API were positional parameters and optional parameters were keyword parameters. + +**Before:** +```py + def batch_predict( + self, + name, + input_config, + output_config, + params=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): +``` + +In the 2.0.0 release, all methods have a single positional parameter `request`. Method docstrings indicate whether a parameter is required or optional. + +Some methods have additional keyword only parameters. The available parameters depend on the [`google.api.method_signature` annotation](https://github.com/googleapis/googleapis/blob/2db5725bf898b544a0cf951e1694d3b0fce5eda3/google/cloud/automl/v1/prediction_service.proto#L86) specified by the API producer. + + +**After:** +```py +def batch_predict( + self, + request: prediction_service.BatchPredictRequest = None, + *, + name: str = None, + input_config: io.BatchPredictInputConfig = None, + output_config: io.BatchPredictOutputConfig = None, + params: Sequence[prediction_service.BatchPredictRequest.ParamsEntry] = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operation.Operation: +``` + +> **NOTE:** The `request` parameter and flattened keyword parameters for the API are mutually exclusive. +> Passing both will result in an error. + + +Both of these calls are valid: + +```py +response = client.batch_predict( + request={ + "name": name, + "input_config": input_config, + "output_config": output_config, + "params": params, + } +) +``` + +```py +response = client.batch_predict( + name=name, + input_config=input_config, + output_config=output_config, + params=params, +) +``` + +This call is invalid because it mixes `request` with a keyword argument `params`. Executing this code +will result in an error. + +```py +response = client.batch_predict( + request={ + "name": name, + "input_config": input_config, + "output_config": output_config, + }, + params=params, +) +``` + + +The method `list_datasets` takes an argument `filter` instead of `filter_`. + +**Before** +```py +from google.cloud import automl + +project_id = "PROJECT_ID" + +client = automl.AutoMlClient() +project_location = client.location_path(project_id, "us-central1") + +# List all the datasets available in the region. +response = client.list_datasets(project_location, filter_="") +``` + +**After** +```py +from google.cloud import automl + +project_id = "PROJECT_ID" +client = automl.AutoMlClient() +# A resource that represents Google Cloud Platform location. +project_location = f"projects/{project_id}/locations/us-central1" + +# List all the datasets available in the region. +response = client.list_datasets(parent=project_location, filter="") +``` + +### Changes to v1beta1 Tables Client + +Optional arguments are now keyword-only arguments and *must* be passed by name. +See [PEP 3102](https://www.python.org/dev/peps/pep-3102/). + +***Before** +```py + def predict( + self, + inputs, + model=None, + model_name=None, + model_display_name=None, + feature_importance=False, + project=None, + region=None, + **kwargs + ): +``` + +**After** +```py + def predict( + self, + inputs, + *, + model=None, + model_name=None, + model_display_name=None, + feature_importance=False, + project=None, + region=None, + **kwargs, + ): +``` + +**kwargs passed to methods must be either (1) kwargs on the underlying method (`retry`, `timeout`, or `metadata`) or (2) attributes of the request object. + +The following call is valid because `filter` is an attribute of `automl_v1beta1.ListDatasetsRequest`. + +```py +from google.cloud import automl_v1beta1 as automl + +client = automl.TablesClient(project=project_id, region=compute_region) + +# List all the datasets available in the region by applying filter. +response = client.list_datasets(filter=filter) +``` + + + +## Enums and types + + +> **WARNING**: Breaking change + +The submodule `enums` and `types` have been removed. + +**Before:** +```py + +from google.cloud import automl + +gcs_source = automl.types.GcsSource(input_uris=["gs://YOUR_BUCKET_ID/path/to/your/input/csv_or_jsonl"]) +deployment_state = automl.enums.Model.DeploymentState.DEPLOYED +``` + + +**After:** +```py +from google.cloud import automl + +gcs_source = automl.GcsSource(input_uris=["gs://YOUR_BUCKET_ID/path/to/your/input/csv_or_jsonl"]) +deployment_state = automl.Model.DeploymentState.DEPLOYED +``` + + +## Resource Path Helper Methods + +The following resource name helpers have been removed. Please construct the strings manually. + +```py +from google.cloud import automl + +project = "my-project" +location = "us-central1" +dataset = "my-dataset" +model = "my-model" +annotation_spec = "test-annotation" +model_evaluation = "test-evaluation" + +# AutoMlClient +annotation_spec_path = f"projects/{project}/locations/{location}/datasets/{dataset}/annotationSpecs/{annotation_spec}" +location_path = f"projects/{project}/locations/{location}" +model_evaluation_path = f"projects/{project}/locations/{location}/models/{model}/modelEvaluations/{model_evaluation}", + +# PredictionServiceClient +model_path = f"projects/{project}/locations/{location}/models/{model}" +# alternatively you can use `model_path` from AutoMlClient +model_path = automl.AutoMlClient.model_path(project_id, location, model_id) + +``` \ No newline at end of file diff --git a/docs/UPGRADING.md b/docs/UPGRADING.md new file mode 120000 index 00000000..01097c8c --- /dev/null +++ b/docs/UPGRADING.md @@ -0,0 +1 @@ +../UPGRADING.md \ No newline at end of file diff --git a/docs/automl_v1/services.rst b/docs/automl_v1/services.rst new file mode 100644 index 00000000..b57ca45e --- /dev/null +++ b/docs/automl_v1/services.rst @@ -0,0 +1,9 @@ +Services for Google Cloud Automl v1 API +======================================= + +.. automodule:: google.cloud.automl_v1.services.auto_ml + :members: + :inherited-members: +.. automodule:: google.cloud.automl_v1.services.prediction_service + :members: + :inherited-members: diff --git a/docs/automl_v1/types.rst b/docs/automl_v1/types.rst new file mode 100644 index 00000000..47a76a80 --- /dev/null +++ b/docs/automl_v1/types.rst @@ -0,0 +1,5 @@ +Types for Google Cloud Automl v1 API +==================================== + +.. automodule:: google.cloud.automl_v1.types + :members: diff --git a/docs/automl_v1beta1/services.rst b/docs/automl_v1beta1/services.rst new file mode 100644 index 00000000..787e8566 --- /dev/null +++ b/docs/automl_v1beta1/services.rst @@ -0,0 +1,12 @@ +Services for Google Cloud Automl v1beta1 API +============================================ + +.. automodule:: google.cloud.automl_v1beta1.services.auto_ml + :members: + :inherited-members: +.. automodule:: google.cloud.automl_v1beta1.services.prediction_service + :members: + :inherited-members: +.. automodule:: google.cloud.automl_v1beta1.services.tables + :members: + :inherited-members: diff --git a/docs/automl_v1beta1/types.rst b/docs/automl_v1beta1/types.rst new file mode 100644 index 00000000..bf190b5b --- /dev/null +++ b/docs/automl_v1beta1/types.rst @@ -0,0 +1,5 @@ +Types for Google Cloud Automl v1beta1 API +========================================= + +.. automodule:: google.cloud.automl_v1beta1.types + :members: diff --git a/docs/gapic/v1/api.rst b/docs/gapic/v1/api.rst deleted file mode 100644 index 757fc1a0..00000000 --- a/docs/gapic/v1/api.rst +++ /dev/null @@ -1,6 +0,0 @@ -Client for Cloud AutoML API -=========================== - -.. automodule:: google.cloud.automl_v1 - :members: - :inherited-members: \ No newline at end of file diff --git a/docs/gapic/v1/types.rst b/docs/gapic/v1/types.rst deleted file mode 100644 index 5fd25134..00000000 --- a/docs/gapic/v1/types.rst +++ /dev/null @@ -1,5 +0,0 @@ -Types for Cloud AutoML API Client -================================= - -.. automodule:: google.cloud.automl_v1.types - :members: \ No newline at end of file diff --git a/docs/gapic/v1beta1/api.rst b/docs/gapic/v1beta1/api.rst deleted file mode 100644 index 268baa5b..00000000 --- a/docs/gapic/v1beta1/api.rst +++ /dev/null @@ -1,6 +0,0 @@ -Client for Cloud AutoML API -=========================== - -.. automodule:: google.cloud.automl_v1beta1 - :members: - :inherited-members: \ No newline at end of file diff --git a/docs/gapic/v1beta1/tables.rst b/docs/gapic/v1beta1/tables.rst deleted file mode 100644 index 54ed6a20..00000000 --- a/docs/gapic/v1beta1/tables.rst +++ /dev/null @@ -1,5 +0,0 @@ -A tables-specific client for AutoML -=================================== - -.. automodule:: google.cloud.automl_v1beta1.tables.tables_client - :members: diff --git a/docs/gapic/v1beta1/types.rst b/docs/gapic/v1beta1/types.rst deleted file mode 100644 index 27ce6644..00000000 --- a/docs/gapic/v1beta1/types.rst +++ /dev/null @@ -1,5 +0,0 @@ -Types for Cloud AutoML API Client -================================= - -.. automodule:: google.cloud.automl_v1beta1.types - :members: \ No newline at end of file diff --git a/docs/index.rst b/docs/index.rst index 5473e0d7..05219311 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -10,8 +10,8 @@ v1 API Reference .. toctree:: :maxdepth: 2 - gapic/v1/api - gapic/v1/types + automl_v1/services + automl_v1/types Previous beta release v1beta1 is provided as well. @@ -22,9 +22,19 @@ v1beta1 API Reference .. toctree:: :maxdepth: 2 - gapic/v1beta1/api - gapic/v1beta1/types - gapic/v1beta1/tables + automl_v1beta1/services + automl_v1beta1/types + + +Migration Guide +--------------- + +See the guide below for instructions on migrating to the 2.x release of this library. + +.. toctree:: + :maxdepth: 2 + + UPGRADING @@ -37,3 +47,5 @@ For a list of all ``google-cloud-automl`` releases: :maxdepth: 2 changelog + + diff --git a/google/cloud/automl/__init__.py b/google/cloud/automl/__init__.py new file mode 100644 index 00000000..e6e6a762 --- /dev/null +++ b/google/cloud/automl/__init__.py @@ -0,0 +1,200 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.cloud.automl_v1.services.auto_ml.async_client import AutoMlAsyncClient +from google.cloud.automl_v1.services.auto_ml.client import AutoMlClient +from google.cloud.automl_v1.services.prediction_service.async_client import ( + PredictionServiceAsyncClient, +) +from google.cloud.automl_v1.services.prediction_service.client import ( + PredictionServiceClient, +) +from google.cloud.automl_v1.types.annotation_payload import AnnotationPayload +from google.cloud.automl_v1.types.annotation_spec import AnnotationSpec +from google.cloud.automl_v1.types.classification import ClassificationAnnotation +from google.cloud.automl_v1.types.classification import ClassificationEvaluationMetrics +from google.cloud.automl_v1.types.classification import ClassificationType +from google.cloud.automl_v1.types.data_items import Document +from google.cloud.automl_v1.types.data_items import DocumentDimensions +from google.cloud.automl_v1.types.data_items import ExamplePayload +from google.cloud.automl_v1.types.data_items import Image +from google.cloud.automl_v1.types.data_items import TextSnippet +from google.cloud.automl_v1.types.dataset import Dataset +from google.cloud.automl_v1.types.detection import BoundingBoxMetricsEntry +from google.cloud.automl_v1.types.detection import ImageObjectDetectionAnnotation +from google.cloud.automl_v1.types.detection import ImageObjectDetectionEvaluationMetrics +from google.cloud.automl_v1.types.geometry import BoundingPoly +from google.cloud.automl_v1.types.geometry import NormalizedVertex +from google.cloud.automl_v1.types.image import ImageClassificationDatasetMetadata +from google.cloud.automl_v1.types.image import ( + ImageClassificationModelDeploymentMetadata, +) +from google.cloud.automl_v1.types.image import ImageClassificationModelMetadata +from google.cloud.automl_v1.types.image import ImageObjectDetectionDatasetMetadata +from google.cloud.automl_v1.types.image import ( + ImageObjectDetectionModelDeploymentMetadata, +) +from google.cloud.automl_v1.types.image import ImageObjectDetectionModelMetadata +from google.cloud.automl_v1.types.io import BatchPredictInputConfig +from google.cloud.automl_v1.types.io import BatchPredictOutputConfig +from google.cloud.automl_v1.types.io import DocumentInputConfig +from google.cloud.automl_v1.types.io import GcsDestination +from google.cloud.automl_v1.types.io import GcsSource +from google.cloud.automl_v1.types.io import InputConfig +from google.cloud.automl_v1.types.io import ModelExportOutputConfig +from google.cloud.automl_v1.types.io import OutputConfig +from google.cloud.automl_v1.types.model import Model +from google.cloud.automl_v1.types.model_evaluation import ModelEvaluation +from google.cloud.automl_v1.types.operations import BatchPredictOperationMetadata +from google.cloud.automl_v1.types.operations import CreateDatasetOperationMetadata +from google.cloud.automl_v1.types.operations import CreateModelOperationMetadata +from google.cloud.automl_v1.types.operations import DeleteOperationMetadata +from google.cloud.automl_v1.types.operations import DeployModelOperationMetadata +from google.cloud.automl_v1.types.operations import ExportDataOperationMetadata +from google.cloud.automl_v1.types.operations import ExportModelOperationMetadata +from google.cloud.automl_v1.types.operations import ImportDataOperationMetadata +from google.cloud.automl_v1.types.operations import OperationMetadata +from google.cloud.automl_v1.types.operations import UndeployModelOperationMetadata +from google.cloud.automl_v1.types.prediction_service import BatchPredictRequest +from google.cloud.automl_v1.types.prediction_service import BatchPredictResult +from google.cloud.automl_v1.types.prediction_service import PredictRequest +from google.cloud.automl_v1.types.prediction_service import PredictResponse +from google.cloud.automl_v1.types.service import CreateDatasetRequest +from google.cloud.automl_v1.types.service import CreateModelRequest +from google.cloud.automl_v1.types.service import DeleteDatasetRequest +from google.cloud.automl_v1.types.service import DeleteModelRequest +from google.cloud.automl_v1.types.service import DeployModelRequest +from google.cloud.automl_v1.types.service import ExportDataRequest +from google.cloud.automl_v1.types.service import ExportModelRequest +from google.cloud.automl_v1.types.service import GetAnnotationSpecRequest +from google.cloud.automl_v1.types.service import GetDatasetRequest +from google.cloud.automl_v1.types.service import GetModelEvaluationRequest +from google.cloud.automl_v1.types.service import GetModelRequest +from google.cloud.automl_v1.types.service import ImportDataRequest +from google.cloud.automl_v1.types.service import ListDatasetsRequest +from google.cloud.automl_v1.types.service import ListDatasetsResponse +from google.cloud.automl_v1.types.service import ListModelEvaluationsRequest +from google.cloud.automl_v1.types.service import ListModelEvaluationsResponse +from google.cloud.automl_v1.types.service import ListModelsRequest +from google.cloud.automl_v1.types.service import ListModelsResponse +from google.cloud.automl_v1.types.service import UndeployModelRequest +from google.cloud.automl_v1.types.service import UpdateDatasetRequest +from google.cloud.automl_v1.types.service import UpdateModelRequest +from google.cloud.automl_v1.types.text import TextClassificationDatasetMetadata +from google.cloud.automl_v1.types.text import TextClassificationModelMetadata +from google.cloud.automl_v1.types.text import TextExtractionDatasetMetadata +from google.cloud.automl_v1.types.text import TextExtractionModelMetadata +from google.cloud.automl_v1.types.text import TextSentimentDatasetMetadata +from google.cloud.automl_v1.types.text import TextSentimentModelMetadata +from google.cloud.automl_v1.types.text_extraction import TextExtractionAnnotation +from google.cloud.automl_v1.types.text_extraction import TextExtractionEvaluationMetrics +from google.cloud.automl_v1.types.text_segment import TextSegment +from google.cloud.automl_v1.types.text_sentiment import TextSentimentAnnotation +from google.cloud.automl_v1.types.text_sentiment import TextSentimentEvaluationMetrics +from google.cloud.automl_v1.types.translation import TranslationAnnotation +from google.cloud.automl_v1.types.translation import TranslationDatasetMetadata +from google.cloud.automl_v1.types.translation import TranslationEvaluationMetrics +from google.cloud.automl_v1.types.translation import TranslationModelMetadata + +__all__ = ( + "AnnotationPayload", + "AnnotationSpec", + "AutoMlAsyncClient", + "AutoMlClient", + "BatchPredictInputConfig", + "BatchPredictOperationMetadata", + "BatchPredictOutputConfig", + "BatchPredictRequest", + "BatchPredictResult", + "BoundingBoxMetricsEntry", + "BoundingPoly", + "ClassificationAnnotation", + "ClassificationEvaluationMetrics", + "ClassificationType", + "CreateDatasetOperationMetadata", + "CreateDatasetRequest", + "CreateModelOperationMetadata", + "CreateModelRequest", + "Dataset", + "DeleteDatasetRequest", + "DeleteModelRequest", + "DeleteOperationMetadata", + "DeployModelOperationMetadata", + "DeployModelRequest", + "Document", + "DocumentDimensions", + "DocumentInputConfig", + "ExamplePayload", + "ExportDataOperationMetadata", + "ExportDataRequest", + "ExportModelOperationMetadata", + "ExportModelRequest", + "GcsDestination", + "GcsSource", + "GetAnnotationSpecRequest", + "GetDatasetRequest", + "GetModelEvaluationRequest", + "GetModelRequest", + "Image", + "ImageClassificationDatasetMetadata", + "ImageClassificationModelDeploymentMetadata", + "ImageClassificationModelMetadata", + "ImageObjectDetectionAnnotation", + "ImageObjectDetectionDatasetMetadata", + "ImageObjectDetectionEvaluationMetrics", + "ImageObjectDetectionModelDeploymentMetadata", + "ImageObjectDetectionModelMetadata", + "ImportDataOperationMetadata", + "ImportDataRequest", + "InputConfig", + "ListDatasetsRequest", + "ListDatasetsResponse", + "ListModelEvaluationsRequest", + "ListModelEvaluationsResponse", + "ListModelsRequest", + "ListModelsResponse", + "Model", + "ModelEvaluation", + "ModelExportOutputConfig", + "NormalizedVertex", + "OperationMetadata", + "OutputConfig", + "PredictRequest", + "PredictResponse", + "PredictionServiceAsyncClient", + "PredictionServiceClient", + "TextClassificationDatasetMetadata", + "TextClassificationModelMetadata", + "TextExtractionAnnotation", + "TextExtractionDatasetMetadata", + "TextExtractionEvaluationMetrics", + "TextExtractionModelMetadata", + "TextSegment", + "TextSentimentAnnotation", + "TextSentimentDatasetMetadata", + "TextSentimentEvaluationMetrics", + "TextSentimentModelMetadata", + "TextSnippet", + "TranslationAnnotation", + "TranslationDatasetMetadata", + "TranslationEvaluationMetrics", + "TranslationModelMetadata", + "UndeployModelOperationMetadata", + "UndeployModelRequest", + "UpdateDatasetRequest", + "UpdateModelRequest", +) diff --git a/google/cloud/automl/py.typed b/google/cloud/automl/py.typed new file mode 100644 index 00000000..0560ba18 --- /dev/null +++ b/google/cloud/automl/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-automl package uses inline types. diff --git a/google/cloud/automl_v1/__init__.py b/google/cloud/automl_v1/__init__.py index 3c9ade66..b5f76f81 100644 --- a/google/cloud/automl_v1/__init__.py +++ b/google/cloud/automl_v1/__init__.py @@ -1,52 +1,189 @@ # -*- coding: utf-8 -*- -# + # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# https://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +# - -from __future__ import absolute_import -import sys -import warnings - -from google.cloud.automl_v1 import types -from google.cloud.automl_v1.gapic import auto_ml_client -from google.cloud.automl_v1.gapic import enums -from google.cloud.automl_v1.gapic import prediction_service_client - - -if sys.version_info[:2] == (2, 7): - message = ( - "A future version of this library will drop support for Python 2.7. " - "More details about Python 2 support for Google Cloud Client Libraries " - "can be found at https://cloud.google.com/python/docs/python2-sunset/" - ) - warnings.warn(message, DeprecationWarning) - - -class PredictionServiceClient(prediction_service_client.PredictionServiceClient): - __doc__ = prediction_service_client.PredictionServiceClient.__doc__ - enums = enums - - -class AutoMlClient(auto_ml_client.AutoMlClient): - __doc__ = auto_ml_client.AutoMlClient.__doc__ - enums = enums +from .services.auto_ml import AutoMlClient +from .services.prediction_service import PredictionServiceClient +from .types.annotation_payload import AnnotationPayload +from .types.annotation_spec import AnnotationSpec +from .types.classification import ClassificationAnnotation +from .types.classification import ClassificationEvaluationMetrics +from .types.classification import ClassificationType +from .types.data_items import Document +from .types.data_items import DocumentDimensions +from .types.data_items import ExamplePayload +from .types.data_items import Image +from .types.data_items import TextSnippet +from .types.dataset import Dataset +from .types.detection import BoundingBoxMetricsEntry +from .types.detection import ImageObjectDetectionAnnotation +from .types.detection import ImageObjectDetectionEvaluationMetrics +from .types.geometry import BoundingPoly +from .types.geometry import NormalizedVertex +from .types.image import ImageClassificationDatasetMetadata +from .types.image import ImageClassificationModelDeploymentMetadata +from .types.image import ImageClassificationModelMetadata +from .types.image import ImageObjectDetectionDatasetMetadata +from .types.image import ImageObjectDetectionModelDeploymentMetadata +from .types.image import ImageObjectDetectionModelMetadata +from .types.io import BatchPredictInputConfig +from .types.io import BatchPredictOutputConfig +from .types.io import DocumentInputConfig +from .types.io import GcsDestination +from .types.io import GcsSource +from .types.io import InputConfig +from .types.io import ModelExportOutputConfig +from .types.io import OutputConfig +from .types.model import Model +from .types.model_evaluation import ModelEvaluation +from .types.operations import BatchPredictOperationMetadata +from .types.operations import CreateDatasetOperationMetadata +from .types.operations import CreateModelOperationMetadata +from .types.operations import DeleteOperationMetadata +from .types.operations import DeployModelOperationMetadata +from .types.operations import ExportDataOperationMetadata +from .types.operations import ExportModelOperationMetadata +from .types.operations import ImportDataOperationMetadata +from .types.operations import OperationMetadata +from .types.operations import UndeployModelOperationMetadata +from .types.prediction_service import BatchPredictRequest +from .types.prediction_service import BatchPredictResult +from .types.prediction_service import PredictRequest +from .types.prediction_service import PredictResponse +from .types.service import CreateDatasetRequest +from .types.service import CreateModelRequest +from .types.service import DeleteDatasetRequest +from .types.service import DeleteModelRequest +from .types.service import DeployModelRequest +from .types.service import ExportDataRequest +from .types.service import ExportModelRequest +from .types.service import GetAnnotationSpecRequest +from .types.service import GetDatasetRequest +from .types.service import GetModelEvaluationRequest +from .types.service import GetModelRequest +from .types.service import ImportDataRequest +from .types.service import ListDatasetsRequest +from .types.service import ListDatasetsResponse +from .types.service import ListModelEvaluationsRequest +from .types.service import ListModelEvaluationsResponse +from .types.service import ListModelsRequest +from .types.service import ListModelsResponse +from .types.service import UndeployModelRequest +from .types.service import UpdateDatasetRequest +from .types.service import UpdateModelRequest +from .types.text import TextClassificationDatasetMetadata +from .types.text import TextClassificationModelMetadata +from .types.text import TextExtractionDatasetMetadata +from .types.text import TextExtractionModelMetadata +from .types.text import TextSentimentDatasetMetadata +from .types.text import TextSentimentModelMetadata +from .types.text_extraction import TextExtractionAnnotation +from .types.text_extraction import TextExtractionEvaluationMetrics +from .types.text_segment import TextSegment +from .types.text_sentiment import TextSentimentAnnotation +from .types.text_sentiment import TextSentimentEvaluationMetrics +from .types.translation import TranslationAnnotation +from .types.translation import TranslationDatasetMetadata +from .types.translation import TranslationEvaluationMetrics +from .types.translation import TranslationModelMetadata __all__ = ( - "enums", - "types", + "AnnotationPayload", + "AnnotationSpec", + "BatchPredictInputConfig", + "BatchPredictOperationMetadata", + "BatchPredictOutputConfig", + "BatchPredictRequest", + "BatchPredictResult", + "BoundingBoxMetricsEntry", + "BoundingPoly", + "ClassificationAnnotation", + "ClassificationEvaluationMetrics", + "ClassificationType", + "CreateDatasetOperationMetadata", + "CreateDatasetRequest", + "CreateModelOperationMetadata", + "CreateModelRequest", + "Dataset", + "DeleteDatasetRequest", + "DeleteModelRequest", + "DeleteOperationMetadata", + "DeployModelOperationMetadata", + "DeployModelRequest", + "Document", + "DocumentDimensions", + "DocumentInputConfig", + "ExamplePayload", + "ExportDataOperationMetadata", + "ExportDataRequest", + "ExportModelOperationMetadata", + "ExportModelRequest", + "GcsDestination", + "GcsSource", + "GetAnnotationSpecRequest", + "GetDatasetRequest", + "GetModelEvaluationRequest", + "GetModelRequest", + "Image", + "ImageClassificationDatasetMetadata", + "ImageClassificationModelDeploymentMetadata", + "ImageClassificationModelMetadata", + "ImageObjectDetectionAnnotation", + "ImageObjectDetectionDatasetMetadata", + "ImageObjectDetectionEvaluationMetrics", + "ImageObjectDetectionModelDeploymentMetadata", + "ImageObjectDetectionModelMetadata", + "ImportDataOperationMetadata", + "ImportDataRequest", + "InputConfig", + "ListDatasetsRequest", + "ListDatasetsResponse", + "ListModelEvaluationsRequest", + "ListModelEvaluationsResponse", + "ListModelsRequest", + "ListModelsResponse", + "Model", + "ModelEvaluation", + "ModelExportOutputConfig", + "NormalizedVertex", + "OperationMetadata", + "OutputConfig", + "PredictRequest", + "PredictResponse", "PredictionServiceClient", + "TextClassificationDatasetMetadata", + "TextClassificationModelMetadata", + "TextExtractionAnnotation", + "TextExtractionDatasetMetadata", + "TextExtractionEvaluationMetrics", + "TextExtractionModelMetadata", + "TextSegment", + "TextSentimentAnnotation", + "TextSentimentDatasetMetadata", + "TextSentimentEvaluationMetrics", + "TextSentimentModelMetadata", + "TextSnippet", + "TranslationAnnotation", + "TranslationDatasetMetadata", + "TranslationEvaluationMetrics", + "TranslationModelMetadata", + "UndeployModelOperationMetadata", + "UndeployModelRequest", + "UpdateDatasetRequest", + "UpdateModelRequest", "AutoMlClient", ) diff --git a/google/cloud/automl_v1/gapic/auto_ml_client.py b/google/cloud/automl_v1/gapic/auto_ml_client.py deleted file mode 100644 index a870b6bf..00000000 --- a/google/cloud/automl_v1/gapic/auto_ml_client.py +++ /dev/null @@ -1,1911 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Accesses the google.cloud.automl.v1 AutoMl API.""" - -import functools -import pkg_resources -import warnings - -from google.oauth2 import service_account -import google.api_core.client_options -import google.api_core.gapic_v1.client_info -import google.api_core.gapic_v1.config -import google.api_core.gapic_v1.method -import google.api_core.gapic_v1.routing_header -import google.api_core.grpc_helpers -import google.api_core.operation -import google.api_core.operations_v1 -import google.api_core.page_iterator -import google.api_core.path_template -import google.api_core.protobuf_helpers -import grpc - -from google.cloud.automl_v1.gapic import auto_ml_client_config -from google.cloud.automl_v1.gapic import enums -from google.cloud.automl_v1.gapic.transports import auto_ml_grpc_transport -from google.cloud.automl_v1.proto import annotation_spec_pb2 -from google.cloud.automl_v1.proto import data_items_pb2 -from google.cloud.automl_v1.proto import dataset_pb2 -from google.cloud.automl_v1.proto import image_pb2 -from google.cloud.automl_v1.proto import io_pb2 -from google.cloud.automl_v1.proto import model_evaluation_pb2 -from google.cloud.automl_v1.proto import model_pb2 -from google.cloud.automl_v1.proto import operations_pb2 as proto_operations_pb2 -from google.cloud.automl_v1.proto import prediction_service_pb2 -from google.cloud.automl_v1.proto import prediction_service_pb2_grpc -from google.cloud.automl_v1.proto import service_pb2 -from google.cloud.automl_v1.proto import service_pb2_grpc -from google.longrunning import operations_pb2 as longrunning_operations_pb2 -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 - - -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-automl",).version - - -class AutoMlClient(object): - """ - AutoML Server API. - - The resource names are assigned by the server. The server never reuses - names that it has created after the resources with those names are - deleted. - - An ID of a resource is the last element of the item's resource name. For - ``projects/{project_id}/locations/{location_id}/datasets/{dataset_id}``, - then the id for the item is ``{dataset_id}``. - - Currently the only supported ``location_id`` is "us-central1". - - On any input that is documented to expect a string parameter in - snake_case or kebab-case, either of those cases is accepted. - """ - - SERVICE_ADDRESS = "automl.googleapis.com:443" - """The default address of the service.""" - - # The name of the interface for this client. This is the key used to - # find the method configuration in the client_config dictionary. - _INTERFACE_NAME = "google.cloud.automl.v1.AutoMl" - - @classmethod - def from_service_account_file(cls, filename, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AutoMlClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file(filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @classmethod - def annotation_spec_path(cls, project, location, dataset, annotation_spec): - """Return a fully-qualified annotation_spec string.""" - return google.api_core.path_template.expand( - "projects/{project}/locations/{location}/datasets/{dataset}/annotationSpecs/{annotation_spec}", - project=project, - location=location, - dataset=dataset, - annotation_spec=annotation_spec, - ) - - @classmethod - def dataset_path(cls, project, location, dataset): - """Return a fully-qualified dataset string.""" - return google.api_core.path_template.expand( - "projects/{project}/locations/{location}/datasets/{dataset}", - project=project, - location=location, - dataset=dataset, - ) - - @classmethod - def location_path(cls, project, location): - """Return a fully-qualified location string.""" - return google.api_core.path_template.expand( - "projects/{project}/locations/{location}", - project=project, - location=location, - ) - - @classmethod - def model_path(cls, project, location, model): - """Return a fully-qualified model string.""" - return google.api_core.path_template.expand( - "projects/{project}/locations/{location}/models/{model}", - project=project, - location=location, - model=model, - ) - - @classmethod - def model_evaluation_path(cls, project, location, model, model_evaluation): - """Return a fully-qualified model_evaluation string.""" - return google.api_core.path_template.expand( - "projects/{project}/locations/{location}/models/{model}/modelEvaluations/{model_evaluation}", - project=project, - location=location, - model=model, - model_evaluation=model_evaluation, - ) - - def __init__( - self, - transport=None, - channel=None, - credentials=None, - client_config=None, - client_info=None, - client_options=None, - ): - """Constructor. - - Args: - transport (Union[~.AutoMlGrpcTransport, - Callable[[~.Credentials, type], ~.AutoMlGrpcTransport]): A transport - instance, responsible for actually making the API calls. - The default transport uses the gRPC protocol. - This argument may also be a callable which returns a - transport instance. Callables will be sent the credentials - as the first argument and the default transport class as - the second argument. - channel (grpc.Channel): DEPRECATED. A ``Channel`` instance - through which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is mutually exclusive with providing a - transport instance to ``transport``; doing so will raise - an exception. - client_config (dict): DEPRECATED. A dictionary of call options for - each method. If not specified, the default configuration is used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - client_options (Union[dict, google.api_core.client_options.ClientOptions]): - Client options used to set user options on the client. API Endpoint - should be set through client_options. - """ - # Raise deprecation warnings for things we want to go away. - if client_config is not None: - warnings.warn( - "The `client_config` argument is deprecated.", - PendingDeprecationWarning, - stacklevel=2, - ) - else: - client_config = auto_ml_client_config.config - - if channel: - warnings.warn( - "The `channel` argument is deprecated; use " "`transport` instead.", - PendingDeprecationWarning, - stacklevel=2, - ) - - api_endpoint = self.SERVICE_ADDRESS - if client_options: - if type(client_options) == dict: - client_options = google.api_core.client_options.from_dict( - client_options - ) - if client_options.api_endpoint: - api_endpoint = client_options.api_endpoint - - # Instantiate the transport. - # The transport is responsible for handling serialization and - # deserialization and actually sending data to the service. - if transport: - if callable(transport): - self.transport = transport( - credentials=credentials, - default_class=auto_ml_grpc_transport.AutoMlGrpcTransport, - address=api_endpoint, - ) - else: - if credentials: - raise ValueError( - "Received both a transport instance and " - "credentials; these are mutually exclusive." - ) - self.transport = transport - else: - self.transport = auto_ml_grpc_transport.AutoMlGrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials, - ) - - if client_info is None: - client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION, - ) - else: - client_info.gapic_version = _GAPIC_LIBRARY_VERSION - self._client_info = client_info - - # Parse out the default settings for retry and timeout for each RPC - # from the client configuration. - # (Ordinarily, these are the defaults specified in the `*_config.py` - # file next to this one.) - self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME], - ) - - # Save a dictionary of cached API call functions. - # These are the actual callables which invoke the proper - # transport methods, wrapped with `wrap_method` to add retry, - # timeout, and the like. - self._inner_api_calls = {} - - # Service calls - def create_dataset( - self, - parent, - dataset, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Creates a dataset. - - Example: - >>> from google.cloud import automl_v1 - >>> - >>> client = automl_v1.AutoMlClient() - >>> - >>> parent = client.location_path('[PROJECT]', '[LOCATION]') - >>> - >>> # TODO: Initialize `dataset`: - >>> dataset = {} - >>> - >>> response = client.create_dataset(parent, dataset) - >>> - >>> def callback(operation_future): - ... # Handle result. - ... result = operation_future.result() - >>> - >>> response.add_done_callback(callback) - >>> - >>> # Handle metadata. - >>> metadata = response.metadata() - - Args: - parent (str): Required. The resource name of the project to create the dataset for. - dataset (Union[dict, ~google.cloud.automl_v1.types.Dataset]): Required. The dataset to create. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.automl_v1.types.Dataset` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.automl_v1.types._OperationFuture` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "create_dataset" not in self._inner_api_calls: - self._inner_api_calls[ - "create_dataset" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_dataset, - default_retry=self._method_configs["CreateDataset"].retry, - default_timeout=self._method_configs["CreateDataset"].timeout, - client_info=self._client_info, - ) - - request = service_pb2.CreateDatasetRequest(parent=parent, dataset=dataset,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - operation = self._inner_api_calls["create_dataset"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - return google.api_core.operation.from_gapic( - operation, - self.transport._operations_client, - dataset_pb2.Dataset, - metadata_type=proto_operations_pb2.OperationMetadata, - ) - - def get_dataset( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets a dataset. - - Example: - >>> from google.cloud import automl_v1 - >>> - >>> client = automl_v1.AutoMlClient() - >>> - >>> name = client.dataset_path('[PROJECT]', '[LOCATION]', '[DATASET]') - >>> - >>> response = client.get_dataset(name) - - Args: - name (str): Required. The resource name of the dataset to retrieve. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.automl_v1.types.Dataset` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_dataset" not in self._inner_api_calls: - self._inner_api_calls[ - "get_dataset" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_dataset, - default_retry=self._method_configs["GetDataset"].retry, - default_timeout=self._method_configs["GetDataset"].timeout, - client_info=self._client_info, - ) - - request = service_pb2.GetDatasetRequest(name=name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_dataset"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def list_datasets( - self, - parent, - filter_=None, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists datasets in a project. - - Example: - >>> from google.cloud import automl_v1 - >>> - >>> client = automl_v1.AutoMlClient() - >>> - >>> parent = client.location_path('[PROJECT]', '[LOCATION]') - >>> - >>> # Iterate over all results - >>> for element in client.list_datasets(parent): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_datasets(parent).pages: - ... for element in page: - ... # process element - ... pass - - Args: - parent (str): Required. The resource name of the project from which to list datasets. - filter_ (str): An expression for filtering the results of the request. - - - ``dataset_metadata`` - for existence of the case (e.g. - ````image_classification_dataset_metadata````). Some examples of using the - filter are: - - - ``translation_dataset_metadata:*`` --> The dataset has - translation_dataset_metadata. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.automl_v1.types.Dataset` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_datasets" not in self._inner_api_calls: - self._inner_api_calls[ - "list_datasets" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_datasets, - default_retry=self._method_configs["ListDatasets"].retry, - default_timeout=self._method_configs["ListDatasets"].timeout, - client_info=self._client_info, - ) - - request = service_pb2.ListDatasetsRequest( - parent=parent, filter=filter_, page_size=page_size, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_datasets"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="datasets", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def update_dataset( - self, - dataset, - update_mask, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Updates a dataset. - - Example: - >>> from google.cloud import automl_v1 - >>> - >>> client = automl_v1.AutoMlClient() - >>> - >>> # TODO: Initialize `dataset`: - >>> dataset = {} - >>> - >>> # TODO: Initialize `update_mask`: - >>> update_mask = {} - >>> - >>> response = client.update_dataset(dataset, update_mask) - - Args: - dataset (Union[dict, ~google.cloud.automl_v1.types.Dataset]): Required. The dataset which replaces the resource on the server. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.automl_v1.types.Dataset` - update_mask (Union[dict, ~google.cloud.automl_v1.types.FieldMask]): Required. The update mask applies to the resource. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.automl_v1.types.FieldMask` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.automl_v1.types.Dataset` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "update_dataset" not in self._inner_api_calls: - self._inner_api_calls[ - "update_dataset" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.update_dataset, - default_retry=self._method_configs["UpdateDataset"].retry, - default_timeout=self._method_configs["UpdateDataset"].timeout, - client_info=self._client_info, - ) - - request = service_pb2.UpdateDatasetRequest( - dataset=dataset, update_mask=update_mask, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("dataset.name", dataset.name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["update_dataset"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def delete_dataset( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Deletes a dataset and all of its contents. Returns empty response in - the ``response`` field when it completes, and ``delete_details`` in the - ``metadata`` field. - - Example: - >>> from google.cloud import automl_v1 - >>> - >>> client = automl_v1.AutoMlClient() - >>> - >>> name = client.dataset_path('[PROJECT]', '[LOCATION]', '[DATASET]') - >>> - >>> response = client.delete_dataset(name) - >>> - >>> def callback(operation_future): - ... # Handle result. - ... result = operation_future.result() - >>> - >>> response.add_done_callback(callback) - >>> - >>> # Handle metadata. - >>> metadata = response.metadata() - - Args: - name (str): Required. The resource name of the dataset to delete. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.automl_v1.types._OperationFuture` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "delete_dataset" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_dataset" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_dataset, - default_retry=self._method_configs["DeleteDataset"].retry, - default_timeout=self._method_configs["DeleteDataset"].timeout, - client_info=self._client_info, - ) - - request = service_pb2.DeleteDatasetRequest(name=name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - operation = self._inner_api_calls["delete_dataset"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - return google.api_core.operation.from_gapic( - operation, - self.transport._operations_client, - empty_pb2.Empty, - metadata_type=proto_operations_pb2.OperationMetadata, - ) - - def import_data( - self, - name, - input_config, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Imports data into a dataset. For Tables this method can only be - called on an empty Dataset. - - For Tables: - - - A ``schema_inference_version`` parameter must be explicitly set. - Returns an empty response in the ``response`` field when it - completes. - - Example: - >>> from google.cloud import automl_v1 - >>> - >>> client = automl_v1.AutoMlClient() - >>> - >>> name = client.dataset_path('[PROJECT]', '[LOCATION]', '[DATASET]') - >>> - >>> # TODO: Initialize `input_config`: - >>> input_config = {} - >>> - >>> response = client.import_data(name, input_config) - >>> - >>> def callback(operation_future): - ... # Handle result. - ... result = operation_future.result() - >>> - >>> response.add_done_callback(callback) - >>> - >>> # Handle metadata. - >>> metadata = response.metadata() - - Args: - name (str): Required. Dataset name. Dataset must already exist. All imported - annotations and examples will be added. - input_config (Union[dict, ~google.cloud.automl_v1.types.InputConfig]): Required. The desired input location and its domain specific semantics, - if any. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.automl_v1.types.InputConfig` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.automl_v1.types._OperationFuture` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "import_data" not in self._inner_api_calls: - self._inner_api_calls[ - "import_data" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.import_data, - default_retry=self._method_configs["ImportData"].retry, - default_timeout=self._method_configs["ImportData"].timeout, - client_info=self._client_info, - ) - - request = service_pb2.ImportDataRequest(name=name, input_config=input_config,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - operation = self._inner_api_calls["import_data"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - return google.api_core.operation.from_gapic( - operation, - self.transport._operations_client, - empty_pb2.Empty, - metadata_type=proto_operations_pb2.OperationMetadata, - ) - - def export_data( - self, - name, - output_config, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Exports dataset's data to the provided output location. Returns an - empty response in the ``response`` field when it completes. - - Example: - >>> from google.cloud import automl_v1 - >>> - >>> client = automl_v1.AutoMlClient() - >>> - >>> name = client.dataset_path('[PROJECT]', '[LOCATION]', '[DATASET]') - >>> - >>> # TODO: Initialize `output_config`: - >>> output_config = {} - >>> - >>> response = client.export_data(name, output_config) - >>> - >>> def callback(operation_future): - ... # Handle result. - ... result = operation_future.result() - >>> - >>> response.add_done_callback(callback) - >>> - >>> # Handle metadata. - >>> metadata = response.metadata() - - Args: - name (str): Required. The resource name of the dataset. - output_config (Union[dict, ~google.cloud.automl_v1.types.OutputConfig]): Required. The desired output location. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.automl_v1.types.OutputConfig` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.automl_v1.types._OperationFuture` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "export_data" not in self._inner_api_calls: - self._inner_api_calls[ - "export_data" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.export_data, - default_retry=self._method_configs["ExportData"].retry, - default_timeout=self._method_configs["ExportData"].timeout, - client_info=self._client_info, - ) - - request = service_pb2.ExportDataRequest(name=name, output_config=output_config,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - operation = self._inner_api_calls["export_data"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - return google.api_core.operation.from_gapic( - operation, - self.transport._operations_client, - empty_pb2.Empty, - metadata_type=proto_operations_pb2.OperationMetadata, - ) - - def get_annotation_spec( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets an annotation spec. - - Example: - >>> from google.cloud import automl_v1 - >>> - >>> client = automl_v1.AutoMlClient() - >>> - >>> name = client.annotation_spec_path('[PROJECT]', '[LOCATION]', '[DATASET]', '[ANNOTATION_SPEC]') - >>> - >>> response = client.get_annotation_spec(name) - - Args: - name (str): Required. The resource name of the annotation spec to retrieve. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.automl_v1.types.AnnotationSpec` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_annotation_spec" not in self._inner_api_calls: - self._inner_api_calls[ - "get_annotation_spec" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_annotation_spec, - default_retry=self._method_configs["GetAnnotationSpec"].retry, - default_timeout=self._method_configs["GetAnnotationSpec"].timeout, - client_info=self._client_info, - ) - - request = service_pb2.GetAnnotationSpecRequest(name=name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_annotation_spec"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def create_model( - self, - parent, - model, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Creates a model. Returns a Model in the ``response`` field when it - completes. When you create a model, several model evaluations are - created for it: a global evaluation, and one evaluation for each - annotation spec. - - Example: - >>> from google.cloud import automl_v1 - >>> - >>> client = automl_v1.AutoMlClient() - >>> - >>> parent = client.location_path('[PROJECT]', '[LOCATION]') - >>> - >>> # TODO: Initialize `model`: - >>> model = {} - >>> - >>> response = client.create_model(parent, model) - >>> - >>> def callback(operation_future): - ... # Handle result. - ... result = operation_future.result() - >>> - >>> response.add_done_callback(callback) - >>> - >>> # Handle metadata. - >>> metadata = response.metadata() - - Args: - parent (str): Required. Resource name of the parent project where the model is being created. - model (Union[dict, ~google.cloud.automl_v1.types.Model]): Required. The model to create. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.automl_v1.types.Model` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.automl_v1.types._OperationFuture` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "create_model" not in self._inner_api_calls: - self._inner_api_calls[ - "create_model" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_model, - default_retry=self._method_configs["CreateModel"].retry, - default_timeout=self._method_configs["CreateModel"].timeout, - client_info=self._client_info, - ) - - request = service_pb2.CreateModelRequest(parent=parent, model=model,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - operation = self._inner_api_calls["create_model"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - return google.api_core.operation.from_gapic( - operation, - self.transport._operations_client, - model_pb2.Model, - metadata_type=proto_operations_pb2.OperationMetadata, - ) - - def get_model( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets a model. - - Example: - >>> from google.cloud import automl_v1 - >>> - >>> client = automl_v1.AutoMlClient() - >>> - >>> name = client.model_path('[PROJECT]', '[LOCATION]', '[MODEL]') - >>> - >>> response = client.get_model(name) - - Args: - name (str): Required. Resource name of the model. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.automl_v1.types.Model` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_model" not in self._inner_api_calls: - self._inner_api_calls[ - "get_model" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_model, - default_retry=self._method_configs["GetModel"].retry, - default_timeout=self._method_configs["GetModel"].timeout, - client_info=self._client_info, - ) - - request = service_pb2.GetModelRequest(name=name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_model"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def list_models( - self, - parent, - filter_=None, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists models. - - Example: - >>> from google.cloud import automl_v1 - >>> - >>> client = automl_v1.AutoMlClient() - >>> - >>> parent = client.location_path('[PROJECT]', '[LOCATION]') - >>> - >>> # Iterate over all results - >>> for element in client.list_models(parent): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_models(parent).pages: - ... for element in page: - ... # process element - ... pass - - Args: - parent (str): Required. Resource name of the project, from which to list the models. - filter_ (str): An expression for filtering the results of the request. - - - ``model_metadata`` - for existence of the case (e.g. - ````video_classification_model_metadata:*````). - - - ``dataset_id`` - for = or !=. Some examples of using the filter are: - - - ``image_classification_model_metadata:*`` --> The model has - image_classification_model_metadata. - - - ``dataset_id=5`` --> The model was created from a dataset with ID 5. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.automl_v1.types.Model` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_models" not in self._inner_api_calls: - self._inner_api_calls[ - "list_models" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_models, - default_retry=self._method_configs["ListModels"].retry, - default_timeout=self._method_configs["ListModels"].timeout, - client_info=self._client_info, - ) - - request = service_pb2.ListModelsRequest( - parent=parent, filter=filter_, page_size=page_size, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_models"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="model", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def delete_model( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Deletes a model. Returns ``google.protobuf.Empty`` in the - ``response`` field when it completes, and ``delete_details`` in the - ``metadata`` field. - - Example: - >>> from google.cloud import automl_v1 - >>> - >>> client = automl_v1.AutoMlClient() - >>> - >>> name = client.model_path('[PROJECT]', '[LOCATION]', '[MODEL]') - >>> - >>> response = client.delete_model(name) - >>> - >>> def callback(operation_future): - ... # Handle result. - ... result = operation_future.result() - >>> - >>> response.add_done_callback(callback) - >>> - >>> # Handle metadata. - >>> metadata = response.metadata() - - Args: - name (str): Required. Resource name of the model being deleted. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.automl_v1.types._OperationFuture` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "delete_model" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_model" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_model, - default_retry=self._method_configs["DeleteModel"].retry, - default_timeout=self._method_configs["DeleteModel"].timeout, - client_info=self._client_info, - ) - - request = service_pb2.DeleteModelRequest(name=name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - operation = self._inner_api_calls["delete_model"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - return google.api_core.operation.from_gapic( - operation, - self.transport._operations_client, - empty_pb2.Empty, - metadata_type=proto_operations_pb2.OperationMetadata, - ) - - def update_model( - self, - model, - update_mask, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Updates a model. - - Example: - >>> from google.cloud import automl_v1 - >>> - >>> client = automl_v1.AutoMlClient() - >>> - >>> # TODO: Initialize `model`: - >>> model = {} - >>> - >>> # TODO: Initialize `update_mask`: - >>> update_mask = {} - >>> - >>> response = client.update_model(model, update_mask) - - Args: - model (Union[dict, ~google.cloud.automl_v1.types.Model]): Required. The model which replaces the resource on the server. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.automl_v1.types.Model` - update_mask (Union[dict, ~google.cloud.automl_v1.types.FieldMask]): Required. The update mask applies to the resource. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.automl_v1.types.FieldMask` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.automl_v1.types.Model` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "update_model" not in self._inner_api_calls: - self._inner_api_calls[ - "update_model" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.update_model, - default_retry=self._method_configs["UpdateModel"].retry, - default_timeout=self._method_configs["UpdateModel"].timeout, - client_info=self._client_info, - ) - - request = service_pb2.UpdateModelRequest(model=model, update_mask=update_mask,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("model.name", model.name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["update_model"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def deploy_model( - self, - name, - image_object_detection_model_deployment_metadata=None, - image_classification_model_deployment_metadata=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Deploys a model. If a model is already deployed, deploying it with - the same parameters has no effect. Deploying with different parametrs - (as e.g. changing - - ``node_number``) will reset the deployment state without pausing the - model's availability. - - Only applicable for Text Classification, Image Object Detection , - Tables, and Image Segmentation; all other domains manage deployment - automatically. - - Returns an empty response in the ``response`` field when it completes. - - Example: - >>> from google.cloud import automl_v1 - >>> - >>> client = automl_v1.AutoMlClient() - >>> - >>> name = client.model_path('[PROJECT]', '[LOCATION]', '[MODEL]') - >>> - >>> response = client.deploy_model(name) - >>> - >>> def callback(operation_future): - ... # Handle result. - ... result = operation_future.result() - >>> - >>> response.add_done_callback(callback) - >>> - >>> # Handle metadata. - >>> metadata = response.metadata() - - Args: - name (str): Required. Resource name of the model to deploy. - image_object_detection_model_deployment_metadata (Union[dict, ~google.cloud.automl_v1.types.ImageObjectDetectionModelDeploymentMetadata]): Model deployment metadata specific to Image Object Detection. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.automl_v1.types.ImageObjectDetectionModelDeploymentMetadata` - image_classification_model_deployment_metadata (Union[dict, ~google.cloud.automl_v1.types.ImageClassificationModelDeploymentMetadata]): Model deployment metadata specific to Image Classification. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.automl_v1.types.ImageClassificationModelDeploymentMetadata` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.automl_v1.types._OperationFuture` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "deploy_model" not in self._inner_api_calls: - self._inner_api_calls[ - "deploy_model" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.deploy_model, - default_retry=self._method_configs["DeployModel"].retry, - default_timeout=self._method_configs["DeployModel"].timeout, - client_info=self._client_info, - ) - - # Sanity check: We have some fields which are mutually exclusive; - # raise ValueError if more than one is sent. - google.api_core.protobuf_helpers.check_oneof( - image_object_detection_model_deployment_metadata=image_object_detection_model_deployment_metadata, - image_classification_model_deployment_metadata=image_classification_model_deployment_metadata, - ) - - request = service_pb2.DeployModelRequest( - name=name, - image_object_detection_model_deployment_metadata=image_object_detection_model_deployment_metadata, - image_classification_model_deployment_metadata=image_classification_model_deployment_metadata, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - operation = self._inner_api_calls["deploy_model"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - return google.api_core.operation.from_gapic( - operation, - self.transport._operations_client, - empty_pb2.Empty, - metadata_type=proto_operations_pb2.OperationMetadata, - ) - - def undeploy_model( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Undeploys a model. If the model is not deployed this method has no - effect. - - Only applicable for Text Classification, Image Object Detection and - Tables; all other domains manage deployment automatically. - - Returns an empty response in the ``response`` field when it completes. - - Example: - >>> from google.cloud import automl_v1 - >>> - >>> client = automl_v1.AutoMlClient() - >>> - >>> name = client.model_path('[PROJECT]', '[LOCATION]', '[MODEL]') - >>> - >>> response = client.undeploy_model(name) - >>> - >>> def callback(operation_future): - ... # Handle result. - ... result = operation_future.result() - >>> - >>> response.add_done_callback(callback) - >>> - >>> # Handle metadata. - >>> metadata = response.metadata() - - Args: - name (str): Required. Resource name of the model to undeploy. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.automl_v1.types._OperationFuture` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "undeploy_model" not in self._inner_api_calls: - self._inner_api_calls[ - "undeploy_model" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.undeploy_model, - default_retry=self._method_configs["UndeployModel"].retry, - default_timeout=self._method_configs["UndeployModel"].timeout, - client_info=self._client_info, - ) - - request = service_pb2.UndeployModelRequest(name=name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - operation = self._inner_api_calls["undeploy_model"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - return google.api_core.operation.from_gapic( - operation, - self.transport._operations_client, - empty_pb2.Empty, - metadata_type=proto_operations_pb2.OperationMetadata, - ) - - def export_model( - self, - name, - output_config, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Exports a trained, "export-able", model to a user specified Google - Cloud Storage location. A model is considered export-able if and only if - it has an export format defined for it in ``ModelExportOutputConfig``. - - Returns an empty response in the ``response`` field when it completes. - - Example: - >>> from google.cloud import automl_v1 - >>> - >>> client = automl_v1.AutoMlClient() - >>> - >>> name = client.model_path('[PROJECT]', '[LOCATION]', '[MODEL]') - >>> - >>> # TODO: Initialize `output_config`: - >>> output_config = {} - >>> - >>> response = client.export_model(name, output_config) - >>> - >>> def callback(operation_future): - ... # Handle result. - ... result = operation_future.result() - >>> - >>> response.add_done_callback(callback) - >>> - >>> # Handle metadata. - >>> metadata = response.metadata() - - Args: - name (str): Required. The resource name of the model to export. - output_config (Union[dict, ~google.cloud.automl_v1.types.ModelExportOutputConfig]): Required. The desired output location and configuration. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.automl_v1.types.ModelExportOutputConfig` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.automl_v1.types._OperationFuture` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "export_model" not in self._inner_api_calls: - self._inner_api_calls[ - "export_model" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.export_model, - default_retry=self._method_configs["ExportModel"].retry, - default_timeout=self._method_configs["ExportModel"].timeout, - client_info=self._client_info, - ) - - request = service_pb2.ExportModelRequest( - name=name, output_config=output_config, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - operation = self._inner_api_calls["export_model"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - return google.api_core.operation.from_gapic( - operation, - self.transport._operations_client, - empty_pb2.Empty, - metadata_type=proto_operations_pb2.OperationMetadata, - ) - - def get_model_evaluation( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets a model evaluation. - - Example: - >>> from google.cloud import automl_v1 - >>> - >>> client = automl_v1.AutoMlClient() - >>> - >>> name = client.model_evaluation_path('[PROJECT]', '[LOCATION]', '[MODEL]', '[MODEL_EVALUATION]') - >>> - >>> response = client.get_model_evaluation(name) - - Args: - name (str): Required. Resource name for the model evaluation. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.automl_v1.types.ModelEvaluation` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_model_evaluation" not in self._inner_api_calls: - self._inner_api_calls[ - "get_model_evaluation" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_model_evaluation, - default_retry=self._method_configs["GetModelEvaluation"].retry, - default_timeout=self._method_configs["GetModelEvaluation"].timeout, - client_info=self._client_info, - ) - - request = service_pb2.GetModelEvaluationRequest(name=name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_model_evaluation"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def list_model_evaluations( - self, - parent, - filter_, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists model evaluations. - - Example: - >>> from google.cloud import automl_v1 - >>> - >>> client = automl_v1.AutoMlClient() - >>> - >>> parent = client.model_path('[PROJECT]', '[LOCATION]', '[MODEL]') - >>> - >>> # TODO: Initialize `filter_`: - >>> filter_ = '' - >>> - >>> # Iterate over all results - >>> for element in client.list_model_evaluations(parent, filter_): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_model_evaluations(parent, filter_).pages: - ... for element in page: - ... # process element - ... pass - - Args: - parent (str): Required. Resource name of the model to list the model evaluations for. - If modelId is set as "-", this will list model evaluations from across all - models of the parent location. - filter_ (str): Required. An expression for filtering the results of the request. - - - ``annotation_spec_id`` - for =, != or existence. See example below - for the last. - - Some examples of using the filter are: - - - ``annotation_spec_id!=4`` --> The model evaluation was done for - annotation spec with ID different than 4. - - ``NOT annotation_spec_id:*`` --> The model evaluation was done for - aggregate of all annotation specs. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.automl_v1.types.ModelEvaluation` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_model_evaluations" not in self._inner_api_calls: - self._inner_api_calls[ - "list_model_evaluations" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_model_evaluations, - default_retry=self._method_configs["ListModelEvaluations"].retry, - default_timeout=self._method_configs["ListModelEvaluations"].timeout, - client_info=self._client_info, - ) - - request = service_pb2.ListModelEvaluationsRequest( - parent=parent, filter=filter_, page_size=page_size, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_model_evaluations"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="model_evaluation", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator diff --git a/google/cloud/automl_v1/gapic/auto_ml_client_config.py b/google/cloud/automl_v1/gapic/auto_ml_client_config.py deleted file mode 100644 index 0c89b881..00000000 --- a/google/cloud/automl_v1/gapic/auto_ml_client_config.py +++ /dev/null @@ -1,132 +0,0 @@ -config = { - "interfaces": { - "google.cloud.automl.v1.AutoMl": { - "retry_codes": { - "retry_policy_1_codes": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], - "no_retry_codes": [], - "no_retry_1_codes": [], - }, - "retry_params": { - "retry_policy_1_params": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 5000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 5000, - "total_timeout_millis": 5000, - }, - "no_retry_params": { - "initial_retry_delay_millis": 0, - "retry_delay_multiplier": 0.0, - "max_retry_delay_millis": 0, - "initial_rpc_timeout_millis": 0, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 0, - "total_timeout_millis": 0, - }, - "no_retry_1_params": { - "initial_retry_delay_millis": 0, - "retry_delay_multiplier": 0.0, - "max_retry_delay_millis": 0, - "initial_rpc_timeout_millis": 5000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 5000, - "total_timeout_millis": 5000, - }, - }, - "methods": { - "CreateDataset": { - "timeout_millis": 5000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "GetDataset": { - "timeout_millis": 5000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "ListDatasets": { - "timeout_millis": 50000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "UpdateDataset": { - "timeout_millis": 5000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "DeleteDataset": { - "timeout_millis": 5000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "ImportData": { - "timeout_millis": 20000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "ExportData": { - "timeout_millis": 5000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "GetAnnotationSpec": { - "timeout_millis": 5000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "CreateModel": { - "timeout_millis": 20000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "GetModel": { - "timeout_millis": 5000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "ListModels": { - "timeout_millis": 50000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "DeleteModel": { - "timeout_millis": 5000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "UpdateModel": { - "timeout_millis": 5000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "DeployModel": { - "timeout_millis": 5000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "UndeployModel": { - "timeout_millis": 5000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "ExportModel": { - "timeout_millis": 5000, - "retry_codes_name": "no_retry_1_codes", - "retry_params_name": "no_retry_1_params", - }, - "GetModelEvaluation": { - "timeout_millis": 5000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - "ListModelEvaluations": { - "timeout_millis": 50000, - "retry_codes_name": "retry_policy_1_codes", - "retry_params_name": "retry_policy_1_params", - }, - }, - } - } -} diff --git a/google/cloud/automl_v1/gapic/enums.py b/google/cloud/automl_v1/gapic/enums.py deleted file mode 100644 index 7bb5f2cb..00000000 --- a/google/cloud/automl_v1/gapic/enums.py +++ /dev/null @@ -1,109 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Wrappers for protocol buffer enum types.""" - -import enum - - -class ClassificationType(enum.IntEnum): - """ - Type of the classification problem. - - Attributes: - CLASSIFICATION_TYPE_UNSPECIFIED (int): An un-set value of this enum. - MULTICLASS (int): At most one label is allowed per example. - MULTILABEL (int): Multiple labels are allowed for one example. - """ - - CLASSIFICATION_TYPE_UNSPECIFIED = 0 - MULTICLASS = 1 - MULTILABEL = 2 - - -class Document(object): - class Layout(object): - class TextSegmentType(enum.IntEnum): - """ - The type of TextSegment in the context of the original document. - - Attributes: - TEXT_SEGMENT_TYPE_UNSPECIFIED (int): Should not be used. - TOKEN (int): The text segment is a token. e.g. word. - PARAGRAPH (int): The text segment is a paragraph. - FORM_FIELD (int): The text segment is a form field. - FORM_FIELD_NAME (int): The text segment is the name part of a form field. It will be - treated as child of another FORM_FIELD TextSegment if its span is - subspan of another TextSegment with type FORM_FIELD. - FORM_FIELD_CONTENTS (int): The text segment is the text content part of a form field. It will - be treated as child of another FORM_FIELD TextSegment if its span is - subspan of another TextSegment with type FORM_FIELD. - TABLE (int): The text segment is a whole table, including headers, and all rows. - TABLE_HEADER (int): The text segment is a table's headers. It will be treated as child of - another TABLE TextSegment if its span is subspan of another TextSegment - with type TABLE. - TABLE_ROW (int): The text segment is a row in table. It will be treated as child of - another TABLE TextSegment if its span is subspan of another TextSegment - with type TABLE. - TABLE_CELL (int): The text segment is a cell in table. It will be treated as child of - another TABLE_ROW TextSegment if its span is subspan of another - TextSegment with type TABLE_ROW. - """ - - TEXT_SEGMENT_TYPE_UNSPECIFIED = 0 - TOKEN = 1 - PARAGRAPH = 2 - FORM_FIELD = 3 - FORM_FIELD_NAME = 4 - FORM_FIELD_CONTENTS = 5 - TABLE = 6 - TABLE_HEADER = 7 - TABLE_ROW = 8 - TABLE_CELL = 9 - - -class DocumentDimensions(object): - class DocumentDimensionUnit(enum.IntEnum): - """ - Unit of the document dimension. - - Attributes: - DOCUMENT_DIMENSION_UNIT_UNSPECIFIED (int): Should not be used. - INCH (int): Document dimension is measured in inches. - CENTIMETER (int): Document dimension is measured in centimeters. - POINT (int): Document dimension is measured in points. 72 points = 1 inch. - """ - - DOCUMENT_DIMENSION_UNIT_UNSPECIFIED = 0 - INCH = 1 - CENTIMETER = 2 - POINT = 3 - - -class Model(object): - class DeploymentState(enum.IntEnum): - """ - Deployment state of the model. - - Attributes: - DEPLOYMENT_STATE_UNSPECIFIED (int): Should not be used, an un-set enum has this value by default. - DEPLOYED (int): Model is deployed. - UNDEPLOYED (int): Model is not deployed. - """ - - DEPLOYMENT_STATE_UNSPECIFIED = 0 - DEPLOYED = 1 - UNDEPLOYED = 2 diff --git a/google/cloud/automl_v1/gapic/prediction_service_client.py b/google/cloud/automl_v1/gapic/prediction_service_client.py deleted file mode 100644 index 06686df3..00000000 --- a/google/cloud/automl_v1/gapic/prediction_service_client.py +++ /dev/null @@ -1,534 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Accesses the google.cloud.automl.v1 PredictionService API.""" - -import pkg_resources -import warnings - -from google.oauth2 import service_account -import google.api_core.client_options -import google.api_core.gapic_v1.client_info -import google.api_core.gapic_v1.config -import google.api_core.gapic_v1.method -import google.api_core.gapic_v1.routing_header -import google.api_core.grpc_helpers -import google.api_core.operation -import google.api_core.operations_v1 -import google.api_core.path_template -import grpc - -from google.cloud.automl_v1.gapic import enums -from google.cloud.automl_v1.gapic import prediction_service_client_config -from google.cloud.automl_v1.gapic.transports import prediction_service_grpc_transport -from google.cloud.automl_v1.proto import data_items_pb2 -from google.cloud.automl_v1.proto import io_pb2 -from google.cloud.automl_v1.proto import operations_pb2 as proto_operations_pb2 -from google.cloud.automl_v1.proto import prediction_service_pb2 -from google.cloud.automl_v1.proto import prediction_service_pb2_grpc -from google.longrunning import operations_pb2 as longrunning_operations_pb2 - - -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-automl",).version - - -class PredictionServiceClient(object): - """ - AutoML Prediction API. - - On any input that is documented to expect a string parameter in - snake_case or kebab-case, either of those cases is accepted. - """ - - SERVICE_ADDRESS = "automl.googleapis.com:443" - """The default address of the service.""" - - # The name of the interface for this client. This is the key used to - # find the method configuration in the client_config dictionary. - _INTERFACE_NAME = "google.cloud.automl.v1.PredictionService" - - @classmethod - def from_service_account_file(cls, filename, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - PredictionServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file(filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @classmethod - def model_path(cls, project, location, model): - """Return a fully-qualified model string.""" - return google.api_core.path_template.expand( - "projects/{project}/locations/{location}/models/{model}", - project=project, - location=location, - model=model, - ) - - def __init__( - self, - transport=None, - channel=None, - credentials=None, - client_config=None, - client_info=None, - client_options=None, - ): - """Constructor. - - Args: - transport (Union[~.PredictionServiceGrpcTransport, - Callable[[~.Credentials, type], ~.PredictionServiceGrpcTransport]): A transport - instance, responsible for actually making the API calls. - The default transport uses the gRPC protocol. - This argument may also be a callable which returns a - transport instance. Callables will be sent the credentials - as the first argument and the default transport class as - the second argument. - channel (grpc.Channel): DEPRECATED. A ``Channel`` instance - through which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is mutually exclusive with providing a - transport instance to ``transport``; doing so will raise - an exception. - client_config (dict): DEPRECATED. A dictionary of call options for - each method. If not specified, the default configuration is used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - client_options (Union[dict, google.api_core.client_options.ClientOptions]): - Client options used to set user options on the client. API Endpoint - should be set through client_options. - """ - # Raise deprecation warnings for things we want to go away. - if client_config is not None: - warnings.warn( - "The `client_config` argument is deprecated.", - PendingDeprecationWarning, - stacklevel=2, - ) - else: - client_config = prediction_service_client_config.config - - if channel: - warnings.warn( - "The `channel` argument is deprecated; use " "`transport` instead.", - PendingDeprecationWarning, - stacklevel=2, - ) - - api_endpoint = self.SERVICE_ADDRESS - if client_options: - if type(client_options) == dict: - client_options = google.api_core.client_options.from_dict( - client_options - ) - if client_options.api_endpoint: - api_endpoint = client_options.api_endpoint - - # Instantiate the transport. - # The transport is responsible for handling serialization and - # deserialization and actually sending data to the service. - if transport: - if callable(transport): - self.transport = transport( - credentials=credentials, - default_class=prediction_service_grpc_transport.PredictionServiceGrpcTransport, - address=api_endpoint, - ) - else: - if credentials: - raise ValueError( - "Received both a transport instance and " - "credentials; these are mutually exclusive." - ) - self.transport = transport - else: - self.transport = prediction_service_grpc_transport.PredictionServiceGrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials, - ) - - if client_info is None: - client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION, - ) - else: - client_info.gapic_version = _GAPIC_LIBRARY_VERSION - self._client_info = client_info - - # Parse out the default settings for retry and timeout for each RPC - # from the client configuration. - # (Ordinarily, these are the defaults specified in the `*_config.py` - # file next to this one.) - self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME], - ) - - # Save a dictionary of cached API call functions. - # These are the actual callables which invoke the proper - # transport methods, wrapped with `wrap_method` to add retry, - # timeout, and the like. - self._inner_api_calls = {} - - # Service calls - def predict( - self, - name, - payload, - params=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Perform an online prediction. The prediction result is directly - returned in the response. Available for following ML scenarios, and - their expected request payloads: - - AutoML Vision Classification - - - An image in .JPEG, .GIF or .PNG format, image_bytes up to 30MB. - - AutoML Vision Object Detection - - - An image in .JPEG, .GIF or .PNG format, image_bytes up to 30MB. - - AutoML Natural Language Classification - - - A TextSnippet up to 60,000 characters, UTF-8 encoded or a document in - .PDF, .TIF or .TIFF format with size upto 2MB. - - AutoML Natural Language Entity Extraction - - - A TextSnippet up to 10,000 characters, UTF-8 NFC encoded or a - document in .PDF, .TIF or .TIFF format with size upto 20MB. - - AutoML Natural Language Sentiment Analysis - - - A TextSnippet up to 60,000 characters, UTF-8 encoded or a document in - .PDF, .TIF or .TIFF format with size upto 2MB. - - AutoML Translation - - - A TextSnippet up to 25,000 characters, UTF-8 encoded. - - AutoML Tables - - - A row with column values matching the columns of the model, up to - 5MB. Not available for FORECASTING ``prediction_type``. - - Example: - >>> from google.cloud import automl_v1 - >>> - >>> client = automl_v1.PredictionServiceClient() - >>> - >>> name = client.model_path('[PROJECT]', '[LOCATION]', '[MODEL]') - >>> - >>> # TODO: Initialize `payload`: - >>> payload = {} - >>> - >>> response = client.predict(name, payload) - - Args: - name (str): Required. Name of the model requested to serve the prediction. - payload (Union[dict, ~google.cloud.automl_v1.types.ExamplePayload]): Required. Payload to perform a prediction on. The payload must match the - problem type that the model was trained to solve. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.automl_v1.types.ExamplePayload` - params (dict[str -> str]): Additional domain-specific parameters, any string must be up to - 25000 characters long. - - AutoML Vision Classification - - ``score_threshold`` : (float) A value from 0.0 to 1.0. When the model - makes predictions for an image, it will only produce results that have - at least this confidence score. The default is 0.5. - - AutoML Vision Object Detection - - ``score_threshold`` : (float) When Model detects objects on the image, - it will only produce bounding boxes which have at least this confidence - score. Value in 0 to 1 range, default is 0.5. - - ``max_bounding_box_count`` : (int64) The maximum number of bounding - boxes returned. The default is 100. The number of returned bounding - boxes might be limited by the server. - - AutoML Tables - - ``feature_importance`` : (boolean) Whether - - ``feature_importance`` is populated in the returned list of - ``TablesAnnotation`` objects. The default is false. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.automl_v1.types.PredictResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "predict" not in self._inner_api_calls: - self._inner_api_calls[ - "predict" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.predict, - default_retry=self._method_configs["Predict"].retry, - default_timeout=self._method_configs["Predict"].timeout, - client_info=self._client_info, - ) - - request = prediction_service_pb2.PredictRequest( - name=name, payload=payload, params=params, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["predict"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def batch_predict( - self, - name, - input_config, - output_config, - params=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Perform a batch prediction. Unlike the online ``Predict``, batch - prediction result won't be immediately available in the response. - Instead, a long running operation object is returned. User can poll the - operation result via ``GetOperation`` method. Once the operation is - done, ``BatchPredictResult`` is returned in the ``response`` field. - Available for following ML scenarios: - - - AutoML Vision Classification - - AutoML Vision Object Detection - - AutoML Video Intelligence Classification - - AutoML Video Intelligence Object Tracking \* AutoML Natural Language - Classification - - AutoML Natural Language Entity Extraction - - AutoML Natural Language Sentiment Analysis - - AutoML Tables - - Example: - >>> from google.cloud import automl_v1 - >>> - >>> client = automl_v1.PredictionServiceClient() - >>> - >>> name = client.model_path('[PROJECT]', '[LOCATION]', '[MODEL]') - >>> - >>> # TODO: Initialize `input_config`: - >>> input_config = {} - >>> - >>> # TODO: Initialize `output_config`: - >>> output_config = {} - >>> - >>> response = client.batch_predict(name, input_config, output_config) - >>> - >>> def callback(operation_future): - ... # Handle result. - ... result = operation_future.result() - >>> - >>> response.add_done_callback(callback) - >>> - >>> # Handle metadata. - >>> metadata = response.metadata() - - Args: - name (str): Required. Name of the model requested to serve the batch prediction. - input_config (Union[dict, ~google.cloud.automl_v1.types.BatchPredictInputConfig]): Required. The input configuration for batch prediction. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.automl_v1.types.BatchPredictInputConfig` - output_config (Union[dict, ~google.cloud.automl_v1.types.BatchPredictOutputConfig]): Required. The Configuration specifying where output predictions should - be written. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.automl_v1.types.BatchPredictOutputConfig` - params (dict[str -> str]): Additional domain-specific parameters for the predictions, any - string must be up to 25000 characters long. - - AutoML Natural Language Classification - - ``score_threshold`` : (float) A value from 0.0 to 1.0. When the model - makes predictions for a text snippet, it will only produce results that - have at least this confidence score. The default is 0.5. - - AutoML Vision Classification - - ``score_threshold`` : (float) A value from 0.0 to 1.0. When the model - makes predictions for an image, it will only produce results that have - at least this confidence score. The default is 0.5. - - AutoML Vision Object Detection - - ``score_threshold`` : (float) When Model detects objects on the image, - it will only produce bounding boxes which have at least this confidence - score. Value in 0 to 1 range, default is 0.5. - - ``max_bounding_box_count`` : (int64) The maximum number of bounding - boxes returned per image. The default is 100, the number of bounding - boxes returned might be limited by the server. AutoML Video Intelligence - Classification - - ``score_threshold`` : (float) A value from 0.0 to 1.0. When the model - makes predictions for a video, it will only produce results that have at - least this confidence score. The default is 0.5. - - ``segment_classification`` : (boolean) Set to true to request - segment-level classification. AutoML Video Intelligence returns labels - and their confidence scores for the entire segment of the video that - user specified in the request configuration. The default is true. - - ``shot_classification`` : (boolean) Set to true to request shot-level - classification. AutoML Video Intelligence determines the boundaries for - each camera shot in the entire segment of the video that user specified - in the request configuration. AutoML Video Intelligence then returns - labels and their confidence scores for each detected shot, along with - the start and end time of the shot. The default is false. - - WARNING: Model evaluation is not done for this classification type, the - quality of it depends on training data, but there are no metrics - provided to describe that quality. - - ``1s_interval_classification`` : (boolean) Set to true to request - classification for a video at one-second intervals. AutoML Video - Intelligence returns labels and their confidence scores for each second - of the entire segment of the video that user specified in the request - configuration. The default is false. - - WARNING: Model evaluation is not done for this classification type, the - quality of it depends on training data, but there are no metrics - provided to describe that quality. - - AutoML Video Intelligence Object Tracking - - ``score_threshold`` : (float) When Model detects objects on video - frames, it will only produce bounding boxes which have at least this - confidence score. Value in 0 to 1 range, default is 0.5. - - ``max_bounding_box_count`` : (int64) The maximum number of bounding - boxes returned per image. The default is 100, the number of bounding - boxes returned might be limited by the server. - - ``min_bounding_box_size`` : (float) Only bounding boxes with shortest - edge at least that long as a relative value of video frame size are - returned. Value in 0 to 1 range. Default is 0. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.automl_v1.types._OperationFuture` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "batch_predict" not in self._inner_api_calls: - self._inner_api_calls[ - "batch_predict" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.batch_predict, - default_retry=self._method_configs["BatchPredict"].retry, - default_timeout=self._method_configs["BatchPredict"].timeout, - client_info=self._client_info, - ) - - request = prediction_service_pb2.BatchPredictRequest( - name=name, - input_config=input_config, - output_config=output_config, - params=params, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - operation = self._inner_api_calls["batch_predict"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - return google.api_core.operation.from_gapic( - operation, - self.transport._operations_client, - prediction_service_pb2.BatchPredictResult, - metadata_type=proto_operations_pb2.OperationMetadata, - ) diff --git a/google/cloud/automl_v1/gapic/prediction_service_client_config.py b/google/cloud/automl_v1/gapic/prediction_service_client_config.py deleted file mode 100644 index e4b1a44f..00000000 --- a/google/cloud/automl_v1/gapic/prediction_service_client_config.py +++ /dev/null @@ -1,39 +0,0 @@ -config = { - "interfaces": { - "google.cloud.automl.v1.PredictionService": { - "retry_codes": {"no_retry_2_codes": [], "no_retry_codes": []}, - "retry_params": { - "no_retry_params": { - "initial_retry_delay_millis": 0, - "retry_delay_multiplier": 0.0, - "max_retry_delay_millis": 0, - "initial_rpc_timeout_millis": 0, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 0, - "total_timeout_millis": 0, - }, - "no_retry_2_params": { - "initial_retry_delay_millis": 0, - "retry_delay_multiplier": 0.0, - "max_retry_delay_millis": 0, - "initial_rpc_timeout_millis": 60000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 60000, - }, - }, - "methods": { - "Predict": { - "timeout_millis": 60000, - "retry_codes_name": "no_retry_2_codes", - "retry_params_name": "no_retry_2_params", - }, - "BatchPredict": { - "timeout_millis": 20000, - "retry_codes_name": "no_retry_2_codes", - "retry_params_name": "no_retry_2_params", - }, - }, - } - } -} diff --git a/google/cloud/automl_v1/gapic/transports/__init__.py b/google/cloud/automl_v1/gapic/transports/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/google/cloud/automl_v1/gapic/transports/auto_ml_grpc_transport.py b/google/cloud/automl_v1/gapic/transports/auto_ml_grpc_transport.py deleted file mode 100644 index 6ebffac5..00000000 --- a/google/cloud/automl_v1/gapic/transports/auto_ml_grpc_transport.py +++ /dev/null @@ -1,386 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import google.api_core.grpc_helpers -import google.api_core.operations_v1 - -from google.cloud.automl_v1.proto import service_pb2_grpc - - -class AutoMlGrpcTransport(object): - """gRPC transport class providing stubs for - google.cloud.automl.v1 AutoMl API. - - The transport provides access to the raw gRPC stubs, - which can be used to take advantage of advanced - features of gRPC. - """ - - # The scopes needed to make gRPC calls to all of the methods defined - # in this service. - _OAUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) - - def __init__( - self, channel=None, credentials=None, address="automl.googleapis.com:443" - ): - """Instantiate the transport class. - - Args: - channel (grpc.Channel): A ``Channel`` instance through - which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - address (str): The address where the service is hosted. - """ - # If both `channel` and `credentials` are specified, raise an - # exception (channels come with credentials baked in already). - if channel is not None and credentials is not None: - raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive.", - ) - - # Create the channel. - if channel is None: - channel = self.create_channel( - address=address, - credentials=credentials, - options={ - "grpc.max_send_message_length": -1, - "grpc.max_receive_message_length": -1, - }.items(), - ) - - self._channel = channel - - # gRPC uses objects called "stubs" that are bound to the - # channel and provide a basic method for each RPC. - self._stubs = { - "auto_ml_stub": service_pb2_grpc.AutoMlStub(channel), - } - - # Because this API includes a method that returns a - # long-running operation (proto: google.longrunning.Operation), - # instantiate an LRO client. - self._operations_client = google.api_core.operations_v1.OperationsClient( - channel - ) - - @classmethod - def create_channel( - cls, address="automl.googleapis.com:443", credentials=None, **kwargs - ): - """Create and return a gRPC channel object. - - Args: - address (str): The host for the channel to use. - credentials (~.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - kwargs (dict): Keyword arguments, which are passed to the - channel creation. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return google.api_core.grpc_helpers.create_channel( - address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs - ) - - @property - def channel(self): - """The gRPC channel used by the transport. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return self._channel - - @property - def create_dataset(self): - """Return the gRPC stub for :meth:`AutoMlClient.create_dataset`. - - Creates a dataset. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["auto_ml_stub"].CreateDataset - - @property - def get_dataset(self): - """Return the gRPC stub for :meth:`AutoMlClient.get_dataset`. - - Gets a dataset. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["auto_ml_stub"].GetDataset - - @property - def list_datasets(self): - """Return the gRPC stub for :meth:`AutoMlClient.list_datasets`. - - Lists datasets in a project. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["auto_ml_stub"].ListDatasets - - @property - def update_dataset(self): - """Return the gRPC stub for :meth:`AutoMlClient.update_dataset`. - - Updates a dataset. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["auto_ml_stub"].UpdateDataset - - @property - def delete_dataset(self): - """Return the gRPC stub for :meth:`AutoMlClient.delete_dataset`. - - Deletes a dataset and all of its contents. Returns empty response in - the ``response`` field when it completes, and ``delete_details`` in the - ``metadata`` field. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["auto_ml_stub"].DeleteDataset - - @property - def import_data(self): - """Return the gRPC stub for :meth:`AutoMlClient.import_data`. - - Imports data into a dataset. For Tables this method can only be - called on an empty Dataset. - - For Tables: - - - A ``schema_inference_version`` parameter must be explicitly set. - Returns an empty response in the ``response`` field when it - completes. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["auto_ml_stub"].ImportData - - @property - def export_data(self): - """Return the gRPC stub for :meth:`AutoMlClient.export_data`. - - Exports dataset's data to the provided output location. Returns an - empty response in the ``response`` field when it completes. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["auto_ml_stub"].ExportData - - @property - def get_annotation_spec(self): - """Return the gRPC stub for :meth:`AutoMlClient.get_annotation_spec`. - - Gets an annotation spec. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["auto_ml_stub"].GetAnnotationSpec - - @property - def create_model(self): - """Return the gRPC stub for :meth:`AutoMlClient.create_model`. - - Creates a model. Returns a Model in the ``response`` field when it - completes. When you create a model, several model evaluations are - created for it: a global evaluation, and one evaluation for each - annotation spec. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["auto_ml_stub"].CreateModel - - @property - def get_model(self): - """Return the gRPC stub for :meth:`AutoMlClient.get_model`. - - Gets a model. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["auto_ml_stub"].GetModel - - @property - def list_models(self): - """Return the gRPC stub for :meth:`AutoMlClient.list_models`. - - Lists models. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["auto_ml_stub"].ListModels - - @property - def delete_model(self): - """Return the gRPC stub for :meth:`AutoMlClient.delete_model`. - - Deletes a model. Returns ``google.protobuf.Empty`` in the - ``response`` field when it completes, and ``delete_details`` in the - ``metadata`` field. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["auto_ml_stub"].DeleteModel - - @property - def update_model(self): - """Return the gRPC stub for :meth:`AutoMlClient.update_model`. - - Updates a model. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["auto_ml_stub"].UpdateModel - - @property - def deploy_model(self): - """Return the gRPC stub for :meth:`AutoMlClient.deploy_model`. - - Deploys a model. If a model is already deployed, deploying it with - the same parameters has no effect. Deploying with different parametrs - (as e.g. changing - - ``node_number``) will reset the deployment state without pausing the - model's availability. - - Only applicable for Text Classification, Image Object Detection , - Tables, and Image Segmentation; all other domains manage deployment - automatically. - - Returns an empty response in the ``response`` field when it completes. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["auto_ml_stub"].DeployModel - - @property - def undeploy_model(self): - """Return the gRPC stub for :meth:`AutoMlClient.undeploy_model`. - - Undeploys a model. If the model is not deployed this method has no - effect. - - Only applicable for Text Classification, Image Object Detection and - Tables; all other domains manage deployment automatically. - - Returns an empty response in the ``response`` field when it completes. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["auto_ml_stub"].UndeployModel - - @property - def export_model(self): - """Return the gRPC stub for :meth:`AutoMlClient.export_model`. - - Exports a trained, "export-able", model to a user specified Google - Cloud Storage location. A model is considered export-able if and only if - it has an export format defined for it in ``ModelExportOutputConfig``. - - Returns an empty response in the ``response`` field when it completes. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["auto_ml_stub"].ExportModel - - @property - def get_model_evaluation(self): - """Return the gRPC stub for :meth:`AutoMlClient.get_model_evaluation`. - - Gets a model evaluation. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["auto_ml_stub"].GetModelEvaluation - - @property - def list_model_evaluations(self): - """Return the gRPC stub for :meth:`AutoMlClient.list_model_evaluations`. - - Lists model evaluations. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["auto_ml_stub"].ListModelEvaluations diff --git a/google/cloud/automl_v1/gapic/transports/prediction_service_grpc_transport.py b/google/cloud/automl_v1/gapic/transports/prediction_service_grpc_transport.py deleted file mode 100644 index c94538be..00000000 --- a/google/cloud/automl_v1/gapic/transports/prediction_service_grpc_transport.py +++ /dev/null @@ -1,192 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import google.api_core.grpc_helpers -import google.api_core.operations_v1 - -from google.cloud.automl_v1.proto import prediction_service_pb2_grpc - - -class PredictionServiceGrpcTransport(object): - """gRPC transport class providing stubs for - google.cloud.automl.v1 PredictionService API. - - The transport provides access to the raw gRPC stubs, - which can be used to take advantage of advanced - features of gRPC. - """ - - # The scopes needed to make gRPC calls to all of the methods defined - # in this service. - _OAUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) - - def __init__( - self, channel=None, credentials=None, address="automl.googleapis.com:443" - ): - """Instantiate the transport class. - - Args: - channel (grpc.Channel): A ``Channel`` instance through - which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - address (str): The address where the service is hosted. - """ - # If both `channel` and `credentials` are specified, raise an - # exception (channels come with credentials baked in already). - if channel is not None and credentials is not None: - raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive.", - ) - - # Create the channel. - if channel is None: - channel = self.create_channel( - address=address, - credentials=credentials, - options={ - "grpc.max_send_message_length": -1, - "grpc.max_receive_message_length": -1, - }.items(), - ) - - self._channel = channel - - # gRPC uses objects called "stubs" that are bound to the - # channel and provide a basic method for each RPC. - self._stubs = { - "prediction_service_stub": prediction_service_pb2_grpc.PredictionServiceStub( - channel - ), - } - - # Because this API includes a method that returns a - # long-running operation (proto: google.longrunning.Operation), - # instantiate an LRO client. - self._operations_client = google.api_core.operations_v1.OperationsClient( - channel - ) - - @classmethod - def create_channel( - cls, address="automl.googleapis.com:443", credentials=None, **kwargs - ): - """Create and return a gRPC channel object. - - Args: - address (str): The host for the channel to use. - credentials (~.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - kwargs (dict): Keyword arguments, which are passed to the - channel creation. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return google.api_core.grpc_helpers.create_channel( - address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs - ) - - @property - def channel(self): - """The gRPC channel used by the transport. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return self._channel - - @property - def predict(self): - """Return the gRPC stub for :meth:`PredictionServiceClient.predict`. - - Perform an online prediction. The prediction result is directly - returned in the response. Available for following ML scenarios, and - their expected request payloads: - - AutoML Vision Classification - - - An image in .JPEG, .GIF or .PNG format, image_bytes up to 30MB. - - AutoML Vision Object Detection - - - An image in .JPEG, .GIF or .PNG format, image_bytes up to 30MB. - - AutoML Natural Language Classification - - - A TextSnippet up to 60,000 characters, UTF-8 encoded or a document in - .PDF, .TIF or .TIFF format with size upto 2MB. - - AutoML Natural Language Entity Extraction - - - A TextSnippet up to 10,000 characters, UTF-8 NFC encoded or a - document in .PDF, .TIF or .TIFF format with size upto 20MB. - - AutoML Natural Language Sentiment Analysis - - - A TextSnippet up to 60,000 characters, UTF-8 encoded or a document in - .PDF, .TIF or .TIFF format with size upto 2MB. - - AutoML Translation - - - A TextSnippet up to 25,000 characters, UTF-8 encoded. - - AutoML Tables - - - A row with column values matching the columns of the model, up to - 5MB. Not available for FORECASTING ``prediction_type``. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["prediction_service_stub"].Predict - - @property - def batch_predict(self): - """Return the gRPC stub for :meth:`PredictionServiceClient.batch_predict`. - - Perform a batch prediction. Unlike the online ``Predict``, batch - prediction result won't be immediately available in the response. - Instead, a long running operation object is returned. User can poll the - operation result via ``GetOperation`` method. Once the operation is - done, ``BatchPredictResult`` is returned in the ``response`` field. - Available for following ML scenarios: - - - AutoML Vision Classification - - AutoML Vision Object Detection - - AutoML Video Intelligence Classification - - AutoML Video Intelligence Object Tracking \* AutoML Natural Language - Classification - - AutoML Natural Language Entity Extraction - - AutoML Natural Language Sentiment Analysis - - AutoML Tables - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["prediction_service_stub"].BatchPredict diff --git a/google/cloud/automl_v1/proto/__init__.py b/google/cloud/automl_v1/proto/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/google/cloud/automl_v1/proto/annotation_payload_pb2.py b/google/cloud/automl_v1/proto/annotation_payload_pb2.py deleted file mode 100644 index 8cdfd04f..00000000 --- a/google/cloud/automl_v1/proto/annotation_payload_pb2.py +++ /dev/null @@ -1,316 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/automl_v1/proto/annotation_payload.proto - -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.cloud.automl_v1.proto import ( - classification_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_classification__pb2, -) -from google.cloud.automl_v1.proto import ( - detection_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_detection__pb2, -) -from google.cloud.automl_v1.proto import ( - text_extraction_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_text__extraction__pb2, -) -from google.cloud.automl_v1.proto import ( - text_sentiment_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_text__sentiment__pb2, -) -from google.cloud.automl_v1.proto import ( - translation_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_translation__pb2, -) -from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/automl_v1/proto/annotation_payload.proto", - package="google.cloud.automl.v1", - syntax="proto3", - serialized_options=b"\n\032com.google.cloud.automl.v1P\001Z AutoML Vision .. raw:: html