diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md
new file mode 100644
index 00000000..939e5341
--- /dev/null
+++ b/.github/CONTRIBUTING.md
@@ -0,0 +1,28 @@
+# How to Contribute
+
+We'd love to accept your patches and contributions to this project. There are
+just a few small guidelines you need to follow.
+
+## Contributor License Agreement
+
+Contributions to this project must be accompanied by a Contributor License
+Agreement. You (or your employer) retain the copyright to your contribution;
+this simply gives us permission to use and redistribute your contributions as
+part of the project. Head over to to see
+your current agreements on file or to sign a new one.
+
+You generally only need to submit a CLA once, so if you've already submitted one
+(even if it was for a different project), you probably don't need to do it
+again.
+
+## Code reviews
+
+All submissions, including submissions by project members, require review. We
+use GitHub pull requests for this purpose. Consult
+[GitHub Help](https://help.github.com/articles/about-pull-requests/) for more
+information on using pull requests.
+
+## Community Guidelines
+
+This project follows [Google's Open Source Community
+Guidelines](https://opensource.google.com/conduct/).
diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md
new file mode 100644
index 00000000..3127a03a
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/bug_report.md
@@ -0,0 +1,44 @@
+---
+name: Bug report
+about: Create a report to help us improve
+
+---
+
+Thanks for stopping by to let us know something could be better!
+
+**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response.
+
+Please run down the following list and make sure you've tried the usual "quick fixes":
+
+ - Search the issues already opened: https://github.com/googleapis/python-automl/issues
+ - Search the issues on our "catch-all" repository: https://github.com/googleapis/google-cloud-python
+ - Search StackOverflow: http://stackoverflow.com/questions/tagged/google-cloud-platform+python
+
+If you are still having issues, please be sure to include as much information as possible:
+
+#### Environment details
+
+ - OS type and version:
+ - Python version: `python --version`
+ - pip version: `pip --version`
+ - `google-cloud-automl` version: `pip show google-cloud-automl`
+
+#### Steps to reproduce
+
+ 1. ?
+ 2. ?
+
+#### Code example
+
+```python
+# example
+```
+
+#### Stack trace
+```
+# example
+```
+
+Making sure to follow these steps will guarantee the quickest resolution possible.
+
+Thanks!
diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md
new file mode 100644
index 00000000..6365857f
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/feature_request.md
@@ -0,0 +1,18 @@
+---
+name: Feature request
+about: Suggest an idea for this library
+
+---
+
+Thanks for stopping by to let us know something could be better!
+
+**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response.
+
+ **Is your feature request related to a problem? Please describe.**
+A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
+ **Describe the solution you'd like**
+A clear and concise description of what you want to happen.
+ **Describe alternatives you've considered**
+A clear and concise description of any alternative solutions or features you've considered.
+ **Additional context**
+Add any other context or screenshots about the feature request here.
diff --git a/.github/ISSUE_TEMPLATE/support_request.md b/.github/ISSUE_TEMPLATE/support_request.md
new file mode 100644
index 00000000..99586903
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/support_request.md
@@ -0,0 +1,7 @@
+---
+name: Support request
+about: If you have a support contract with Google, please create an issue in the Google Cloud Support console.
+
+---
+
+**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response.
diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md
new file mode 100644
index 00000000..dcf25c84
--- /dev/null
+++ b/.github/PULL_REQUEST_TEMPLATE.md
@@ -0,0 +1,7 @@
+Thank you for opening a Pull Request! Before submitting your PR, there are a few things you can do to make sure it goes smoothly:
+- [ ] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/python-automl/issues/new/choose) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea
+- [ ] Ensure the tests and linter pass
+- [ ] Code coverage does not decrease (if any source code was changed)
+- [ ] Appropriate docs were updated (if necessary)
+
+Fixes # 🦕
diff --git a/.github/release-please.yml b/.github/release-please.yml
new file mode 100644
index 00000000..4507ad05
--- /dev/null
+++ b/.github/release-please.yml
@@ -0,0 +1 @@
+releaseType: python
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 00000000..3fb06e09
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,58 @@
+*.py[cod]
+*.sw[op]
+
+# C extensions
+*.so
+
+# Packages
+*.egg
+*.egg-info
+dist
+build
+eggs
+parts
+bin
+var
+sdist
+develop-eggs
+.installed.cfg
+lib
+lib64
+__pycache__
+
+# Installer logs
+pip-log.txt
+
+# Unit test / coverage reports
+.coverage
+.nox
+.cache
+.pytest_cache
+
+
+# Mac
+.DS_Store
+
+# JetBrains
+.idea
+
+# VS Code
+.vscode
+
+# emacs
+*~
+
+# Built documentation
+docs/_build
+bigquery/docs/generated
+
+# Virtual environment
+env/
+coverage.xml
+
+# System test environment variables.
+system_tests/local_test_setup
+
+# Make sure a generated file isn't accidentally committed.
+pylintrc
+pylintrc.test
\ No newline at end of file
diff --git a/.kokoro/build.sh b/.kokoro/build.sh
new file mode 100755
index 00000000..9648f6ec
--- /dev/null
+++ b/.kokoro/build.sh
@@ -0,0 +1,39 @@
+#!/bin/bash
+# Copyright 2018 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -eo pipefail
+
+cd github/python-automl
+
+# Disable buffering, so that the logs stream through.
+export PYTHONUNBUFFERED=1
+
+# Debug: show build environment
+env | grep KOKORO
+
+# Setup service account credentials.
+export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json
+
+# Setup project id.
+export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json")
+
+# Remove old nox
+python3.6 -m pip uninstall --yes --quiet nox-automation
+
+# Install nox
+python3.6 -m pip install --upgrade --quiet nox
+python3.6 -m nox --version
+
+python3.6 -m nox
diff --git a/.kokoro/continuous/common.cfg b/.kokoro/continuous/common.cfg
new file mode 100644
index 00000000..e929d8e3
--- /dev/null
+++ b/.kokoro/continuous/common.cfg
@@ -0,0 +1,27 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Download resources for system tests (service account key, etc.)
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python"
+
+# Use the trampoline script to run in docker.
+build_file: "python-automl/.kokoro/trampoline.sh"
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-multi"
+}
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-automl/.kokoro/build.sh"
+}
diff --git a/.kokoro/continuous/continuous.cfg b/.kokoro/continuous/continuous.cfg
new file mode 100644
index 00000000..8f43917d
--- /dev/null
+++ b/.kokoro/continuous/continuous.cfg
@@ -0,0 +1 @@
+# Format: //devtools/kokoro/config/proto/build.proto
\ No newline at end of file
diff --git a/.kokoro/docs/common.cfg b/.kokoro/docs/common.cfg
new file mode 100644
index 00000000..ea9a6595
--- /dev/null
+++ b/.kokoro/docs/common.cfg
@@ -0,0 +1,48 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-automl/.kokoro/trampoline.sh"
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-multi"
+}
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-automl/.kokoro/publish-docs.sh"
+}
+
+env_vars: {
+ key: "STAGING_BUCKET"
+ value: "docs-staging"
+}
+
+# Fetch the token needed for reporting release status to GitHub
+before_action {
+ fetch_keystore {
+ keystore_resource {
+ keystore_config_id: 73713
+ keyname: "yoshi-automation-github-key"
+ }
+ }
+}
+
+before_action {
+ fetch_keystore {
+ keystore_resource {
+ keystore_config_id: 73713
+ keyname: "docuploader_service_account"
+ }
+ }
+}
\ No newline at end of file
diff --git a/.kokoro/docs/docs.cfg b/.kokoro/docs/docs.cfg
new file mode 100644
index 00000000..8f43917d
--- /dev/null
+++ b/.kokoro/docs/docs.cfg
@@ -0,0 +1 @@
+# Format: //devtools/kokoro/config/proto/build.proto
\ No newline at end of file
diff --git a/.kokoro/presubmit/common.cfg b/.kokoro/presubmit/common.cfg
new file mode 100644
index 00000000..e929d8e3
--- /dev/null
+++ b/.kokoro/presubmit/common.cfg
@@ -0,0 +1,27 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Download resources for system tests (service account key, etc.)
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python"
+
+# Use the trampoline script to run in docker.
+build_file: "python-automl/.kokoro/trampoline.sh"
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-multi"
+}
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-automl/.kokoro/build.sh"
+}
diff --git a/.kokoro/presubmit/presubmit.cfg b/.kokoro/presubmit/presubmit.cfg
new file mode 100644
index 00000000..8f43917d
--- /dev/null
+++ b/.kokoro/presubmit/presubmit.cfg
@@ -0,0 +1 @@
+# Format: //devtools/kokoro/config/proto/build.proto
\ No newline at end of file
diff --git a/.kokoro/publish-docs.sh b/.kokoro/publish-docs.sh
new file mode 100755
index 00000000..b157f117
--- /dev/null
+++ b/.kokoro/publish-docs.sh
@@ -0,0 +1,57 @@
+#!/bin/bash
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+#!/bin/bash
+
+set -eo pipefail
+
+# Disable buffering, so that the logs stream through.
+export PYTHONUNBUFFERED=1
+
+cd github/python-automl
+
+# Remove old nox
+python3.6 -m pip uninstall --yes --quiet nox-automation
+
+# Install nox
+python3.6 -m pip install --upgrade --quiet nox
+python3.6 -m nox --version
+
+# build docs
+nox -s docs
+
+python3 -m pip install gcp-docuploader
+
+# install a json parser
+sudo apt-get update
+sudo apt-get -y install software-properties-common
+sudo add-apt-repository universe
+sudo apt-get update
+sudo apt-get -y install jq
+
+# create metadata
+python3 -m docuploader create-metadata \
+ --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \
+ --version=$(python3 setup.py --version) \
+ --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \
+ --distribution-name=$(python3 setup.py --name) \
+ --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \
+ --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \
+ --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json)
+
+cat docs.metadata
+
+# upload docs
+python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket docs-staging
diff --git a/.kokoro/release.sh b/.kokoro/release.sh
new file mode 100755
index 00000000..ba265923
--- /dev/null
+++ b/.kokoro/release.sh
@@ -0,0 +1,34 @@
+#!/bin/bash
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+#!/bin/bash
+
+set -eo pipefail
+
+# Start the releasetool reporter
+python3 -m pip install gcp-releasetool
+python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script
+
+# Ensure that we have the latest versions of Twine, Wheel, and Setuptools.
+python3 -m pip install --upgrade twine wheel setuptools
+
+# Disable buffering, so that the logs stream through.
+export PYTHONUNBUFFERED=1
+
+# Move into the package, build the distribution and upload.
+TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google_cloud_pypi_password")
+cd github/python-automl
+python3 setup.py sdist bdist_wheel
+twine upload --username gcloudpypi --password "${TWINE_PASSWORD}" dist/*
diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg
new file mode 100644
index 00000000..a66e4d28
--- /dev/null
+++ b/.kokoro/release/common.cfg
@@ -0,0 +1,64 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-automl/.kokoro/trampoline.sh"
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-multi"
+}
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-automl/.kokoro/release.sh"
+}
+
+# Fetch the token needed for reporting release status to GitHub
+before_action {
+ fetch_keystore {
+ keystore_resource {
+ keystore_config_id: 73713
+ keyname: "yoshi-automation-github-key"
+ }
+ }
+}
+
+# Fetch PyPI password
+before_action {
+ fetch_keystore {
+ keystore_resource {
+ keystore_config_id: 73713
+ keyname: "google_cloud_pypi_password"
+ }
+ }
+}
+
+# Fetch magictoken to use with Magic Github Proxy
+before_action {
+ fetch_keystore {
+ keystore_resource {
+ keystore_config_id: 73713
+ keyname: "releasetool-magictoken"
+ }
+ }
+}
+
+# Fetch api key to use with Magic Github Proxy
+before_action {
+ fetch_keystore {
+ keystore_resource {
+ keystore_config_id: 73713
+ keyname: "magic-github-proxy-api-key"
+ }
+ }
+}
diff --git a/.kokoro/release/release.cfg b/.kokoro/release/release.cfg
new file mode 100644
index 00000000..8f43917d
--- /dev/null
+++ b/.kokoro/release/release.cfg
@@ -0,0 +1 @@
+# Format: //devtools/kokoro/config/proto/build.proto
\ No newline at end of file
diff --git a/.kokoro/trampoline.sh b/.kokoro/trampoline.sh
new file mode 100755
index 00000000..e8c4251f
--- /dev/null
+++ b/.kokoro/trampoline.sh
@@ -0,0 +1,23 @@
+#!/bin/bash
+# Copyright 2017 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -eo pipefail
+
+python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" || ret_code=$?
+
+chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh
+${KOKORO_GFILE_DIR}/trampoline_cleanup.sh || true
+
+exit ${ret_code}
diff --git a/.repo-metadata.json b/.repo-metadata.json
index 6d94acb2..3c01878a 100644
--- a/.repo-metadata.json
+++ b/.repo-metadata.json
@@ -6,7 +6,7 @@
"issue_tracker": "https://issuetracker.google.com/savedsearches/559744",
"release_level": "alpha",
"language": "python",
- "repo": "googleapis/google-cloud-python",
+ "repo": "googleapis/python-automl",
"distribution_name": "google-cloud-automl",
"api_id": "automl.googleapis.com",
"requires_billing": true
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 9a65baa3..0a753706 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,6 +4,19 @@
[1]: https://pypi.org/project/google-cloud-automl/#history
+## [0.10.0](https://www.github.com/googleapis/python-automl/compare/v0.9.0...v0.10.0) (2020-01-31)
+
+
+### Features
+
+* **automl:** undeprecate resource name helper methods, add 2.7 deprecation warning (via synth) ([#10037](https://www.github.com/googleapis/python-automl/issues/10037)) ([763a961](https://www.github.com/googleapis/python-automl/commit/763a9611d45d86b6024bcd74dfb8e93099a3f9e0))
+
+
+### Bug Fixes
+
+* **automl:** fix TablesClient.predict for array and struct ([#9991](https://www.github.com/googleapis/python-automl/issues/9991)) ([39f6f2a](https://www.github.com/googleapis/python-automl/commit/39f6f2a5f59b7f61096fb3f43c05501ebc19f676)), closes [#9887](https://www.github.com/googleapis/python-automl/issues/9887)
+* **automl:** fix TypeError when passing a client_info to automl TablesClient ([#9949](https://www.github.com/googleapis/python-automl/issues/9949)) ([75783ec](https://www.github.com/googleapis/python-automl/commit/75783ec359871957253797cdbaa25042c8c02284))
+
## 0.9.0
11-18-2019 09:49 PST
@@ -205,4 +218,3 @@
### New Features
- Initial Release of AutoML v1beta1
-
diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md
new file mode 100644
index 00000000..b3d1f602
--- /dev/null
+++ b/CODE_OF_CONDUCT.md
@@ -0,0 +1,44 @@
+
+# Contributor Code of Conduct
+
+As contributors and maintainers of this project,
+and in the interest of fostering an open and welcoming community,
+we pledge to respect all people who contribute through reporting issues,
+posting feature requests, updating documentation,
+submitting pull requests or patches, and other activities.
+
+We are committed to making participation in this project
+a harassment-free experience for everyone,
+regardless of level of experience, gender, gender identity and expression,
+sexual orientation, disability, personal appearance,
+body size, race, ethnicity, age, religion, or nationality.
+
+Examples of unacceptable behavior by participants include:
+
+* The use of sexualized language or imagery
+* Personal attacks
+* Trolling or insulting/derogatory comments
+* Public or private harassment
+* Publishing other's private information,
+such as physical or electronic
+addresses, without explicit permission
+* Other unethical or unprofessional conduct.
+
+Project maintainers have the right and responsibility to remove, edit, or reject
+comments, commits, code, wiki edits, issues, and other contributions
+that are not aligned to this Code of Conduct.
+By adopting this Code of Conduct,
+project maintainers commit themselves to fairly and consistently
+applying these principles to every aspect of managing this project.
+Project maintainers who do not follow or enforce the Code of Conduct
+may be permanently removed from the project team.
+
+This code of conduct applies both within project spaces and in public spaces
+when an individual is representing the project or its community.
+
+Instances of abusive, harassing, or otherwise unacceptable behavior
+may be reported by opening an issue
+or contacting one or more of the project maintainers.
+
+This Code of Conduct is adapted from the [Contributor Covenant](http://contributor-covenant.org), version 1.2.0,
+available at [http://contributor-covenant.org/version/1/2/0/](http://contributor-covenant.org/version/1/2/0/)
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
new file mode 100644
index 00000000..1e9731cb
--- /dev/null
+++ b/CONTRIBUTING.rst
@@ -0,0 +1,279 @@
+.. Generated by synthtool. DO NOT EDIT!
+############
+Contributing
+############
+
+#. **Please sign one of the contributor license agreements below.**
+#. Fork the repo, develop and test your code changes, add docs.
+#. Make sure that your commit messages clearly describe the changes.
+#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_)
+
+.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews
+
+.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries.
+
+***************
+Adding Features
+***************
+
+In order to add a feature:
+
+- The feature must be documented in both the API and narrative
+ documentation.
+
+- The feature must work fully on the following CPython versions: 2.7,
+ 3.5, 3.6, and 3.7 on both UNIX and Windows.
+
+- The feature must not add unnecessary dependencies (where
+ "unnecessary" is of course subjective, but new dependencies should
+ be discussed).
+
+****************************
+Using a Development Checkout
+****************************
+
+You'll have to create a development environment using a Git checkout:
+
+- While logged into your GitHub account, navigate to the
+ ``python-automl`` `repo`_ on GitHub.
+
+- Fork and clone the ``python-automl`` repository to your GitHub account by
+ clicking the "Fork" button.
+
+- Clone your fork of ``python-automl`` from your GitHub account to your local
+ computer, substituting your account username and specifying the destination
+ as ``hack-on-python-automl``. E.g.::
+
+ $ cd ${HOME}
+ $ git clone git@github.com:USERNAME/python-automl.git hack-on-python-automl
+ $ cd hack-on-python-automl
+ # Configure remotes such that you can pull changes from the googleapis/python-automl
+ # repository into your local repository.
+ $ git remote add upstream git@github.com:googleapis/python-automl.git
+ # fetch and merge changes from upstream into master
+ $ git fetch upstream
+ $ git merge upstream/master
+
+Now your local repo is set up such that you will push changes to your GitHub
+repo, from which you can submit a pull request.
+
+To work on the codebase and run the tests, we recommend using ``nox``,
+but you can also use a ``virtualenv`` of your own creation.
+
+.. _repo: https://github.com/googleapis/python-automl
+
+Using ``nox``
+=============
+
+We use `nox `__ to instrument our tests.
+
+- To test your changes, run unit tests with ``nox``::
+
+ $ nox -s unit-2.7
+ $ nox -s unit-3.7
+ $ ...
+
+ .. note::
+
+ The unit tests and system tests are described in the
+ ``noxfile.py`` files in each directory.
+
+.. nox: https://pypi.org/project/nox/
+
+Note on Editable Installs / Develop Mode
+========================================
+
+- As mentioned previously, using ``setuptools`` in `develop mode`_
+ or a ``pip`` `editable install`_ is not possible with this
+ library. This is because this library uses `namespace packages`_.
+ For context see `Issue #2316`_ and the relevant `PyPA issue`_.
+
+ Since ``editable`` / ``develop`` mode can't be used, packages
+ need to be installed directly. Hence your changes to the source
+ tree don't get incorporated into the **already installed**
+ package.
+
+.. _namespace packages: https://www.python.org/dev/peps/pep-0420/
+.. _Issue #2316: https://github.com/GoogleCloudPlatform/google-cloud-python/issues/2316
+.. _PyPA issue: https://github.com/pypa/packaging-problems/issues/12
+.. _develop mode: https://setuptools.readthedocs.io/en/latest/setuptools.html#development-mode
+.. _editable install: https://pip.pypa.io/en/stable/reference/pip_install/#editable-installs
+
+*****************************************
+I'm getting weird errors... Can you help?
+*****************************************
+
+If the error mentions ``Python.h`` not being found,
+install ``python-dev`` and try again.
+On Debian/Ubuntu::
+
+ $ sudo apt-get install python-dev
+
+************
+Coding Style
+************
+
+- PEP8 compliance, with exceptions defined in the linter configuration.
+ If you have ``nox`` installed, you can test that you have not introduced
+ any non-compliant code via::
+
+ $ nox -s lint
+
+- In order to make ``nox -s lint`` run faster, you can set some environment
+ variables::
+
+ export GOOGLE_CLOUD_TESTING_REMOTE="upstream"
+ export GOOGLE_CLOUD_TESTING_BRANCH="master"
+
+ By doing this, you are specifying the location of the most up-to-date
+ version of ``python-automl``. The the suggested remote name ``upstream``
+ should point to the official ``googleapis`` checkout and the
+ the branch should be the main branch on that remote (``master``).
+
+Exceptions to PEP8:
+
+- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for
+ "Function-Under-Test"), which is PEP8-incompliant, but more readable.
+ Some also use a local variable, ``MUT`` (short for "Module-Under-Test").
+
+********************
+Running System Tests
+********************
+
+- To run system tests, you can execute::
+
+ $ nox -s system-3.7
+ $ nox -s system-2.7
+
+ .. note::
+
+ System tests are only configured to run under Python 2.7 and
+ Python 3.7. For expediency, we do not run them in older versions
+ of Python 3.
+
+ This alone will not run the tests. You'll need to change some local
+ auth settings and change some configuration in your project to
+ run all the tests.
+
+- System tests will be run against an actual project and
+ so you'll need to provide some environment variables to facilitate
+ authentication to your project:
+
+ - ``GOOGLE_APPLICATION_CREDENTIALS``: The path to a JSON key file;
+ Such a file can be downloaded directly from the developer's console by clicking
+ "Generate new JSON key". See private key
+ `docs `__
+ for more details.
+
+- Once you have downloaded your json keys, set the environment variable
+ ``GOOGLE_APPLICATION_CREDENTIALS`` to the absolute path of the json file::
+
+ $ export GOOGLE_APPLICATION_CREDENTIALS="/Users//path/to/app_credentials.json"
+
+
+*************
+Test Coverage
+*************
+
+- The codebase *must* have 100% test statement coverage after each commit.
+ You can test coverage via ``nox -s cover``.
+
+******************************************************
+Documentation Coverage and Building HTML Documentation
+******************************************************
+
+If you fix a bug, and the bug requires an API or behavior modification, all
+documentation in this package which references that API or behavior must be
+changed to reflect the bug fix, ideally in the same commit that fixes the bug
+or adds the feature.
+
+Build the docs via:
+
+ $ nox -s docs
+
+********************************************
+Note About ``README`` as it pertains to PyPI
+********************************************
+
+The `description on PyPI`_ for the project comes directly from the
+``README``. Due to the reStructuredText (``rst``) parser used by
+PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst``
+instead of
+``https://github.com/googleapis/python-automl/blob/master/CONTRIBUTING.rst``)
+may cause problems creating links or rendering the description.
+
+.. _description on PyPI: https://pypi.org/project/google-cloud-automl
+
+
+*************************
+Supported Python Versions
+*************************
+
+We support:
+
+- `Python 3.5`_
+- `Python 3.6`_
+- `Python 3.7`_
+
+.. _Python 3.5: https://docs.python.org/3.5/
+.. _Python 3.6: https://docs.python.org/3.6/
+.. _Python 3.7: https://docs.python.org/3.7/
+
+
+Supported versions can be found in our ``noxfile.py`` `config`_.
+
+.. _config: https://github.com/googleapis/python-automl/blob/master/noxfile.py
+
+We explicitly decided not to support `Python 2.5`_ due to `decreased usage`_
+and lack of continuous integration `support`_.
+
+.. _Python 2.5: https://docs.python.org/2.5/
+.. _decreased usage: https://caremad.io/2013/10/a-look-at-pypi-downloads/
+.. _support: https://blog.travis-ci.com/2013-11-18-upcoming-build-environment-updates/
+
+We have `dropped 2.6`_ as a supported version as well since Python 2.6 is no
+longer supported by the core development team.
+
+Python 2.7 support is deprecated. All code changes should maintain Python 2.7 compatibility until January 1, 2020.
+
+We also explicitly decided to support Python 3 beginning with version
+3.5. Reasons for this include:
+
+- Encouraging use of newest versions of Python 3
+- Taking the lead of `prominent`_ open-source `projects`_
+- `Unicode literal support`_ which allows for a cleaner codebase that
+ works in both Python 2 and Python 3
+
+.. _prominent: https://docs.djangoproject.com/en/1.9/faq/install/#what-python-version-can-i-use-with-django
+.. _projects: http://flask.pocoo.org/docs/0.10/python3/
+.. _Unicode literal support: https://www.python.org/dev/peps/pep-0414/
+.. _dropped 2.6: https://github.com/googleapis/google-cloud-python/issues/995
+
+**********
+Versioning
+**********
+
+This library follows `Semantic Versioning`_.
+
+.. _Semantic Versioning: http://semver.org/
+
+Some packages are currently in major version zero (``0.y.z``), which means that
+anything may change at any time and the public API should not be considered
+stable.
+
+******************************
+Contributor License Agreements
+******************************
+
+Before we can accept your pull requests you'll need to sign a Contributor
+License Agreement (CLA):
+
+- **If you are an individual writing original source code** and **you own the
+ intellectual property**, then you'll need to sign an
+ `individual CLA `__.
+- **If you work for a company that wants to allow you to contribute your work**,
+ then you'll need to sign a
+ `corporate CLA `__.
+
+You can sign these electronically (just scroll to the bottom). After that,
+we'll be able to accept your pull requests.
diff --git a/MANIFEST.in b/MANIFEST.in
index 9cbf175a..cd011be2 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -1,3 +1,4 @@
+# Generated by synthtool. DO NOT EDIT!
include README.rst LICENSE
recursive-include google *.json *.proto
recursive-include tests *
diff --git a/docs/conf.py b/docs/conf.py
index 9ac18387..0b6aebeb 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -20,7 +20,7 @@
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath(".."))
-__version__ = "0.1.0"
+__version__ = ""
# -- General configuration ------------------------------------------------
@@ -66,7 +66,7 @@
# General information about the project.
project = u"google-cloud-automl"
-copyright = u"2017, Google"
+copyright = u"2019, Google"
author = u"Google APIs"
# The version info for the project you're documenting, acts as replacement for
@@ -133,9 +133,9 @@
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {
- "description": "Google Cloud Client Libraries for Python",
+ "description": "Google Cloud Client Libraries for google-cloud-automl",
"github_user": "googleapis",
- "github_repo": "google-cloud-python",
+ "github_repo": "python-automl",
"github_banner": True,
"font_family": "'Roboto', Georgia, sans",
"head_font_family": "'Roboto', Georgia, serif",
@@ -164,7 +164,7 @@
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
-# html_static_path = []
+html_static_path = ["_static"]
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
@@ -318,7 +318,7 @@
u"google-cloud-automl Documentation",
author,
"google-cloud-automl",
- "GAPIC library for the {metadata.shortName} v1 service",
+ "google-cloud-automl Library",
"APIs",
)
]
@@ -339,14 +339,9 @@
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {
"python": ("http://python.readthedocs.org/en/latest/", None),
- "gax": ("https://gax-python.readthedocs.org/en/latest/", None),
"google-auth": ("https://google-auth.readthedocs.io/en/stable", None),
- "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None),
- "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None),
+ "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None),
"grpc": ("https://grpc.io/grpc/python/", None),
- "requests": ("https://requests.kennethreitz.org/en/master/", None),
- "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None),
- "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None),
}
diff --git a/google/cloud/automl.py b/google/cloud/automl.py
index 9f96f4f4..c1dc4ee7 100644
--- a/google/cloud/automl.py
+++ b/google/cloud/automl.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
#
-# Copyright 2019 Google LLC
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/google/cloud/automl_v1/__init__.py b/google/cloud/automl_v1/__init__.py
index f68180a5..a663f191 100644
--- a/google/cloud/automl_v1/__init__.py
+++ b/google/cloud/automl_v1/__init__.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
#
-# Copyright 2019 Google LLC
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -16,6 +16,8 @@
from __future__ import absolute_import
+import sys
+import warnings
from google.cloud.automl_v1 import types
from google.cloud.automl_v1.gapic import auto_ml_client
@@ -23,6 +25,15 @@
from google.cloud.automl_v1.gapic import prediction_service_client
+if sys.version_info[:2] == (2, 7):
+ message = (
+ "A future version of this library will drop support for Python 2.7."
+ "More details about Python 2 support for Google Cloud Client Libraries"
+ "can be found at https://cloud.google.com/python/docs/python2-sunset/"
+ )
+ warnings.warn(message, DeprecationWarning)
+
+
class AutoMlClient(auto_ml_client.AutoMlClient):
__doc__ = auto_ml_client.AutoMlClient.__doc__
enums = enums
diff --git a/google/cloud/automl_v1/gapic/auto_ml_client.py b/google/cloud/automl_v1/gapic/auto_ml_client.py
index 5fd2978d..c1f6ed3d 100644
--- a/google/cloud/automl_v1/gapic/auto_ml_client.py
+++ b/google/cloud/automl_v1/gapic/auto_ml_client.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
#
-# Copyright 2019 Google LLC
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/google/cloud/automl_v1/gapic/enums.py b/google/cloud/automl_v1/gapic/enums.py
index 3f5d6101..8e525587 100644
--- a/google/cloud/automl_v1/gapic/enums.py
+++ b/google/cloud/automl_v1/gapic/enums.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
#
-# Copyright 2019 Google LLC
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/google/cloud/automl_v1/gapic/prediction_service_client.py b/google/cloud/automl_v1/gapic/prediction_service_client.py
index e8a6040e..d6df5e54 100644
--- a/google/cloud/automl_v1/gapic/prediction_service_client.py
+++ b/google/cloud/automl_v1/gapic/prediction_service_client.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
#
-# Copyright 2019 Google LLC
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/google/cloud/automl_v1/gapic/transports/auto_ml_grpc_transport.py b/google/cloud/automl_v1/gapic/transports/auto_ml_grpc_transport.py
index 86aa8cbc..c5f6bfa7 100644
--- a/google/cloud/automl_v1/gapic/transports/auto_ml_grpc_transport.py
+++ b/google/cloud/automl_v1/gapic/transports/auto_ml_grpc_transport.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
#
-# Copyright 2019 Google LLC
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/google/cloud/automl_v1/gapic/transports/prediction_service_grpc_transport.py b/google/cloud/automl_v1/gapic/transports/prediction_service_grpc_transport.py
index ecdc9a31..9d494540 100644
--- a/google/cloud/automl_v1/gapic/transports/prediction_service_grpc_transport.py
+++ b/google/cloud/automl_v1/gapic/transports/prediction_service_grpc_transport.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
#
-# Copyright 2019 Google LLC
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/google/cloud/automl_v1/proto/annotation_payload_pb2.py b/google/cloud/automl_v1/proto/annotation_payload_pb2.py
index 8d9817bd..7840971f 100644
--- a/google/cloud/automl_v1/proto/annotation_payload_pb2.py
+++ b/google/cloud/automl_v1/proto/annotation_payload_pb2.py
@@ -274,7 +274,8 @@
dict(
DESCRIPTOR=_ANNOTATIONPAYLOAD,
__module__="google.cloud.automl_v1.proto.annotation_payload_pb2",
- __doc__="""Contains annotation information that is relevant to AutoML.
+ __doc__="""Contains annotation information that is relevant to
+ AutoML.
Attributes:
diff --git a/google/cloud/automl_v1/proto/data_items_pb2.py b/google/cloud/automl_v1/proto/data_items_pb2.py
index 021a01b7..2469b20e 100644
--- a/google/cloud/automl_v1/proto/data_items_pb2.py
+++ b/google/cloud/automl_v1/proto/data_items_pb2.py
@@ -702,8 +702,8 @@
dict(
DESCRIPTOR=_IMAGE,
__module__="google.cloud.automl_v1.proto.data_items_pb2",
- __doc__="""A representation of an image. Only images up to 30MB in size are
- supported.
+ __doc__="""A representation of an image. Only images up to 30MB in
+ size are supported.
Attributes:
diff --git a/google/cloud/automl_v1/proto/dataset_pb2.py b/google/cloud/automl_v1/proto/dataset_pb2.py
index bdc1f3b6..ad20d7b1 100644
--- a/google/cloud/automl_v1/proto/dataset_pb2.py
+++ b/google/cloud/automl_v1/proto/dataset_pb2.py
@@ -457,8 +457,9 @@
),
DESCRIPTOR=_DATASET,
__module__="google.cloud.automl_v1.proto.dataset_pb2",
- __doc__="""A workspace for solving a single, particular machine learning (ML)
- problem. A workspace contains examples that may be annotated.
+ __doc__="""A workspace for solving a single, particular machine
+ learning (ML) problem. A workspace contains examples that may be
+ annotated.
Attributes:
diff --git a/google/cloud/automl_v1/proto/detection_pb2.py b/google/cloud/automl_v1/proto/detection_pb2.py
index e0a5af99..01daff60 100644
--- a/google/cloud/automl_v1/proto/detection_pb2.py
+++ b/google/cloud/automl_v1/proto/detection_pb2.py
@@ -412,8 +412,9 @@
),
DESCRIPTOR=_BOUNDINGBOXMETRICSENTRY,
__module__="google.cloud.automl_v1.proto.detection_pb2",
- __doc__="""Bounding box matching model metrics for a single intersection-over-union
- threshold and multiple label match confidence thresholds.
+ __doc__="""Bounding box matching model metrics for a single
+ intersection-over-union threshold and multiple label match confidence
+ thresholds.
Attributes:
@@ -441,8 +442,8 @@
dict(
DESCRIPTOR=_IMAGEOBJECTDETECTIONEVALUATIONMETRICS,
__module__="google.cloud.automl_v1.proto.detection_pb2",
- __doc__="""Model evaluation metrics for image object detection problems. Evaluates
- prediction quality of labeled bounding boxes.
+ __doc__="""Model evaluation metrics for image object detection
+ problems. Evaluates prediction quality of labeled bounding boxes.
Attributes:
diff --git a/google/cloud/automl_v1/proto/geometry_pb2.py b/google/cloud/automl_v1/proto/geometry_pb2.py
index 9f3cd673..034f8ba6 100644
--- a/google/cloud/automl_v1/proto/geometry_pb2.py
+++ b/google/cloud/automl_v1/proto/geometry_pb2.py
@@ -156,9 +156,9 @@
dict(
DESCRIPTOR=_BOUNDINGPOLY,
__module__="google.cloud.automl_v1.proto.geometry_pb2",
- __doc__="""A bounding polygon of a detected object on a plane. On output both
- vertices and normalized\_vertices are provided. The polygon is formed by
- connecting vertices in the order they are listed.
+ __doc__="""A bounding polygon of a detected object on a plane. On
+ output both vertices and normalized\_vertices are provided. The polygon
+ is formed by connecting vertices in the order they are listed.
Attributes:
diff --git a/google/cloud/automl_v1/proto/image_pb2.py b/google/cloud/automl_v1/proto/image_pb2.py
index 4935ede9..afaf938c 100644
--- a/google/cloud/automl_v1/proto/image_pb2.py
+++ b/google/cloud/automl_v1/proto/image_pb2.py
@@ -508,6 +508,7 @@
DESCRIPTOR=_IMAGEOBJECTDETECTIONDATASETMETADATA,
__module__="google.cloud.automl_v1.proto.image_pb2",
__doc__="""Dataset metadata specific to image object detection.
+
""",
# @@protoc_insertion_point(class_scope:google.cloud.automl.v1.ImageObjectDetectionDatasetMetadata)
),
@@ -668,7 +669,8 @@
dict(
DESCRIPTOR=_IMAGECLASSIFICATIONMODELDEPLOYMENTMETADATA,
__module__="google.cloud.automl_v1.proto.image_pb2",
- __doc__="""Model deployment metadata specific to Image Classification.
+ __doc__="""Model deployment metadata specific to Image
+ Classification.
Attributes:
@@ -690,7 +692,8 @@
dict(
DESCRIPTOR=_IMAGEOBJECTDETECTIONMODELDEPLOYMENTMETADATA,
__module__="google.cloud.automl_v1.proto.image_pb2",
- __doc__="""Model deployment metadata specific to Image Object Detection.
+ __doc__="""Model deployment metadata specific to Image Object
+ Detection.
Attributes:
diff --git a/google/cloud/automl_v1/proto/io_pb2.py b/google/cloud/automl_v1/proto/io_pb2.py
index 636deef8..b0a5f000 100644
--- a/google/cloud/automl_v1/proto/io_pb2.py
+++ b/google/cloud/automl_v1/proto/io_pb2.py
@@ -1284,7 +1284,8 @@
dict(
DESCRIPTOR=_DOCUMENTINPUTCONFIG,
__module__="google.cloud.automl_v1.proto.io_pb2",
- __doc__="""Input configuration of a [Document][google.cloud.automl.v1.Document].
+ __doc__="""Input configuration of a
+ [Document][google.cloud.automl.v1.Document].
Attributes:
@@ -1579,7 +1580,8 @@
dict(
DESCRIPTOR=_GCSDESTINATION,
__module__="google.cloud.automl_v1.proto.io_pb2",
- __doc__="""The Google Cloud Storage location where the output is to be written to.
+ __doc__="""The Google Cloud Storage location where the output is to
+ be written to.
Attributes:
diff --git a/google/cloud/automl_v1/proto/operations_pb2.py b/google/cloud/automl_v1/proto/operations_pb2.py
index 0dc1e0e2..23e2a503 100644
--- a/google/cloud/automl_v1/proto/operations_pb2.py
+++ b/google/cloud/automl_v1/proto/operations_pb2.py
@@ -864,7 +864,8 @@
dict(
DESCRIPTOR=_OPERATIONMETADATA,
__module__="google.cloud.automl_v1.proto.operations_pb2",
- __doc__="""Metadata used across all long running operations returned by AutoML API.
+ __doc__="""Metadata used across all long running operations returned
+ by AutoML API.
Attributes:
@@ -915,7 +916,9 @@
dict(
DESCRIPTOR=_DELETEOPERATIONMETADATA,
__module__="google.cloud.automl_v1.proto.operations_pb2",
- __doc__="""Details of operations that perform deletes of any entities.
+ __doc__="""Details of operations that perform deletes of any
+ entities.
+
""",
# @@protoc_insertion_point(class_scope:google.cloud.automl.v1.DeleteOperationMetadata)
),
@@ -929,6 +932,7 @@
DESCRIPTOR=_DEPLOYMODELOPERATIONMETADATA,
__module__="google.cloud.automl_v1.proto.operations_pb2",
__doc__="""Details of DeployModel operation.
+
""",
# @@protoc_insertion_point(class_scope:google.cloud.automl.v1.DeployModelOperationMetadata)
),
@@ -942,6 +946,7 @@
DESCRIPTOR=_UNDEPLOYMODELOPERATIONMETADATA,
__module__="google.cloud.automl_v1.proto.operations_pb2",
__doc__="""Details of UndeployModel operation.
+
""",
# @@protoc_insertion_point(class_scope:google.cloud.automl.v1.UndeployModelOperationMetadata)
),
@@ -955,6 +960,7 @@
DESCRIPTOR=_CREATEDATASETOPERATIONMETADATA,
__module__="google.cloud.automl_v1.proto.operations_pb2",
__doc__="""Details of CreateDataset operation.
+
""",
# @@protoc_insertion_point(class_scope:google.cloud.automl.v1.CreateDatasetOperationMetadata)
),
@@ -968,6 +974,7 @@
DESCRIPTOR=_CREATEMODELOPERATIONMETADATA,
__module__="google.cloud.automl_v1.proto.operations_pb2",
__doc__="""Details of CreateModel operation.
+
""",
# @@protoc_insertion_point(class_scope:google.cloud.automl.v1.CreateModelOperationMetadata)
),
@@ -981,6 +988,7 @@
DESCRIPTOR=_IMPORTDATAOPERATIONMETADATA,
__module__="google.cloud.automl_v1.proto.operations_pb2",
__doc__="""Details of ImportData operation.
+
""",
# @@protoc_insertion_point(class_scope:google.cloud.automl.v1.ImportDataOperationMetadata)
),
diff --git a/google/cloud/automl_v1/proto/text_pb2.py b/google/cloud/automl_v1/proto/text_pb2.py
index ae52da7c..90835077 100644
--- a/google/cloud/automl_v1/proto/text_pb2.py
+++ b/google/cloud/automl_v1/proto/text_pb2.py
@@ -288,6 +288,7 @@
DESCRIPTOR=_TEXTEXTRACTIONDATASETMETADATA,
__module__="google.cloud.automl_v1.proto.text_pb2",
__doc__="""Dataset metadata that is specific to text extraction
+
""",
# @@protoc_insertion_point(class_scope:google.cloud.automl.v1.TextExtractionDatasetMetadata)
),
@@ -301,6 +302,7 @@
DESCRIPTOR=_TEXTEXTRACTIONMODELMETADATA,
__module__="google.cloud.automl_v1.proto.text_pb2",
__doc__="""Model metadata that is specific to text extraction.
+
""",
# @@protoc_insertion_point(class_scope:google.cloud.automl.v1.TextExtractionModelMetadata)
),
@@ -338,6 +340,7 @@
DESCRIPTOR=_TEXTSENTIMENTMODELMETADATA,
__module__="google.cloud.automl_v1.proto.text_pb2",
__doc__="""Model metadata that is specific to text sentiment.
+
""",
# @@protoc_insertion_point(class_scope:google.cloud.automl.v1.TextSentimentModelMetadata)
),
diff --git a/google/cloud/automl_v1/proto/text_segment_pb2.py b/google/cloud/automl_v1/proto/text_segment_pb2.py
index b58d2d7f..ec762046 100644
--- a/google/cloud/automl_v1/proto/text_segment_pb2.py
+++ b/google/cloud/automl_v1/proto/text_segment_pb2.py
@@ -115,8 +115,8 @@
dict(
DESCRIPTOR=_TEXTSEGMENT,
__module__="google.cloud.automl_v1.proto.text_segment_pb2",
- __doc__="""A contiguous part of a text (string), assuming it has an UTF-8 NFC
- encoding.
+ __doc__="""A contiguous part of a text (string), assuming it has an
+ UTF-8 NFC encoding.
Attributes:
diff --git a/google/cloud/automl_v1/types.py b/google/cloud/automl_v1/types.py
index 628bcf59..4b475551 100644
--- a/google/cloud/automl_v1/types.py
+++ b/google/cloud/automl_v1/types.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
#
-# Copyright 2019 Google LLC
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/google/cloud/automl_v1beta1/__init__.py b/google/cloud/automl_v1beta1/__init__.py
index 57ed250a..5a247e95 100644
--- a/google/cloud/automl_v1beta1/__init__.py
+++ b/google/cloud/automl_v1beta1/__init__.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
#
-# Copyright 2019 Google LLC
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -16,6 +16,8 @@
from __future__ import absolute_import
+import sys
+import warnings
from google.cloud.automl_v1beta1 import types
from google.cloud.automl_v1beta1.gapic import auto_ml_client
@@ -33,6 +35,15 @@ class GcsClient(gcs_client.GcsClient):
__doc__ = gcs_client.GcsClient.__doc__
+if sys.version_info[:2] == (2, 7):
+ message = (
+ "A future version of this library will drop support for Python 2.7."
+ "More details about Python 2 support for Google Cloud Client Libraries"
+ "can be found at https://cloud.google.com/python/docs/python2-sunset/"
+ )
+ warnings.warn(message, DeprecationWarning)
+
+
class AutoMlClient(auto_ml_client.AutoMlClient):
__doc__ = auto_ml_client.AutoMlClient.__doc__
enums = enums
diff --git a/google/cloud/automl_v1beta1/gapic/auto_ml_client.py b/google/cloud/automl_v1beta1/gapic/auto_ml_client.py
index b2cb38c3..fc91ddf0 100644
--- a/google/cloud/automl_v1beta1/gapic/auto_ml_client.py
+++ b/google/cloud/automl_v1beta1/gapic/auto_ml_client.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
#
-# Copyright 2019 Google LLC
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/google/cloud/automl_v1beta1/gapic/enums.py b/google/cloud/automl_v1beta1/gapic/enums.py
index 9d817f8e..9f09f9ce 100644
--- a/google/cloud/automl_v1beta1/gapic/enums.py
+++ b/google/cloud/automl_v1beta1/gapic/enums.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
#
-# Copyright 2019 Google LLC
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/google/cloud/automl_v1beta1/gapic/prediction_service_client.py b/google/cloud/automl_v1beta1/gapic/prediction_service_client.py
index 3342fecd..57cedc90 100644
--- a/google/cloud/automl_v1beta1/gapic/prediction_service_client.py
+++ b/google/cloud/automl_v1beta1/gapic/prediction_service_client.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
#
-# Copyright 2019 Google LLC
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/google/cloud/automl_v1beta1/gapic/transports/auto_ml_grpc_transport.py b/google/cloud/automl_v1beta1/gapic/transports/auto_ml_grpc_transport.py
index 0efa164f..106c3c6c 100644
--- a/google/cloud/automl_v1beta1/gapic/transports/auto_ml_grpc_transport.py
+++ b/google/cloud/automl_v1beta1/gapic/transports/auto_ml_grpc_transport.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
#
-# Copyright 2019 Google LLC
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/google/cloud/automl_v1beta1/gapic/transports/prediction_service_grpc_transport.py b/google/cloud/automl_v1beta1/gapic/transports/prediction_service_grpc_transport.py
index 14371192..69ebca84 100644
--- a/google/cloud/automl_v1beta1/gapic/transports/prediction_service_grpc_transport.py
+++ b/google/cloud/automl_v1beta1/gapic/transports/prediction_service_grpc_transport.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
#
-# Copyright 2019 Google LLC
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/google/cloud/automl_v1beta1/proto/annotation_payload_pb2.py b/google/cloud/automl_v1beta1/proto/annotation_payload_pb2.py
index f05435b9..f8036318 100644
--- a/google/cloud/automl_v1beta1/proto/annotation_payload_pb2.py
+++ b/google/cloud/automl_v1beta1/proto/annotation_payload_pb2.py
@@ -365,7 +365,8 @@
dict(
DESCRIPTOR=_ANNOTATIONPAYLOAD,
__module__="google.cloud.automl_v1beta1.proto.annotation_payload_pb2",
- __doc__="""Contains annotation information that is relevant to AutoML.
+ __doc__="""Contains annotation information that is relevant to
+ AutoML.
Attributes:
diff --git a/google/cloud/automl_v1beta1/proto/classification_pb2.py b/google/cloud/automl_v1beta1/proto/classification_pb2.py
index bd389d22..68651a84 100644
--- a/google/cloud/automl_v1beta1/proto/classification_pb2.py
+++ b/google/cloud/automl_v1beta1/proto/classification_pb2.py
@@ -782,7 +782,8 @@
dict(
DESCRIPTOR=_VIDEOCLASSIFICATIONANNOTATION,
__module__="google.cloud.automl_v1beta1.proto.classification_pb2",
- __doc__="""Contains annotation details specific to video classification.
+ __doc__="""Contains annotation details specific to video
+ classification.
Attributes:
@@ -944,9 +945,9 @@
),
DESCRIPTOR=_CLASSIFICATIONEVALUATIONMETRICS,
__module__="google.cloud.automl_v1beta1.proto.classification_pb2",
- __doc__="""Model evaluation metrics for classification problems. Note: For Video
- Classification this metrics only describe quality of the Video
- Classification predictions of "segment\_classification" type.
+ __doc__="""Model evaluation metrics for classification problems.
+ Note: For Video Classification this metrics only describe quality of the
+ Video Classification predictions of "segment\_classification" type.
Attributes:
diff --git a/google/cloud/automl_v1beta1/proto/column_spec_pb2.py b/google/cloud/automl_v1beta1/proto/column_spec_pb2.py
index 1a6685c9..844bc058 100644
--- a/google/cloud/automl_v1beta1/proto/column_spec_pb2.py
+++ b/google/cloud/automl_v1beta1/proto/column_spec_pb2.py
@@ -258,8 +258,8 @@
dict(
DESCRIPTOR=_COLUMNSPEC_CORRELATEDCOLUMN,
__module__="google.cloud.automl_v1beta1.proto.column_spec_pb2",
- __doc__="""Identifies the table's column, and its correlation with the column this
- ColumnSpec describes.
+ __doc__="""Identifies the table's column, and its correlation with
+ the column this ColumnSpec describes.
Attributes:
@@ -274,9 +274,9 @@
),
DESCRIPTOR=_COLUMNSPEC,
__module__="google.cloud.automl_v1beta1.proto.column_spec_pb2",
- __doc__="""A representation of a column in a relational table. When listing them,
- column specs are returned in the same order in which they were given on
- import . Used by: \* Tables
+ __doc__="""A representation of a column in a relational table. When
+ listing them, column specs are returned in the same order in which they
+ were given on import . Used by: \* Tables
Attributes:
diff --git a/google/cloud/automl_v1beta1/proto/data_items_pb2.py b/google/cloud/automl_v1beta1/proto/data_items_pb2.py
index 546efc1b..c76bcf28 100644
--- a/google/cloud/automl_v1beta1/proto/data_items_pb2.py
+++ b/google/cloud/automl_v1beta1/proto/data_items_pb2.py
@@ -809,8 +809,8 @@
dict(
DESCRIPTOR=_IMAGE,
__module__="google.cloud.automl_v1beta1.proto.data_items_pb2",
- __doc__="""A representation of an image. Only images up to 30MB in size are
- supported.
+ __doc__="""A representation of an image. Only images up to 30MB in
+ size are supported.
Attributes:
diff --git a/google/cloud/automl_v1beta1/proto/data_stats_pb2.py b/google/cloud/automl_v1beta1/proto/data_stats_pb2.py
index f0415f19..85f18cee 100644
--- a/google/cloud/automl_v1beta1/proto/data_stats_pb2.py
+++ b/google/cloud/automl_v1beta1/proto/data_stats_pb2.py
@@ -1029,7 +1029,8 @@
dict(
DESCRIPTOR=_DATASTATS,
__module__="google.cloud.automl_v1beta1.proto.data_stats_pb2",
- __doc__="""The data statistics of a series of values that share the same DataType.
+ __doc__="""The data statistics of a series of values that share the
+ same DataType.
Attributes:
@@ -1307,9 +1308,9 @@
dict(
DESCRIPTOR=_CORRELATIONSTATS,
__module__="google.cloud.automl_v1beta1.proto.data_stats_pb2",
- __doc__="""A correlation statistics between two series of DataType values. The
- series may have differing DataType-s, but within a single series the
- DataType must be the same.
+ __doc__="""A correlation statistics between two series of DataType
+ values. The series may have differing DataType-s, but within a single
+ series the DataType must be the same.
Attributes:
diff --git a/google/cloud/automl_v1beta1/proto/data_types_pb2.py b/google/cloud/automl_v1beta1/proto/data_types_pb2.py
index 631850a5..96121059 100644
--- a/google/cloud/automl_v1beta1/proto/data_types_pb2.py
+++ b/google/cloud/automl_v1beta1/proto/data_types_pb2.py
@@ -329,8 +329,8 @@
dict(
DESCRIPTOR=_DATATYPE,
__module__="google.cloud.automl_v1beta1.proto.data_types_pb2",
- __doc__="""Indicated the type of data that can be stored in a structured data
- entity (e.g. a table).
+ __doc__="""Indicated the type of data that can be stored in a
+ structured data entity (e.g. a table).
Attributes:
@@ -386,7 +386,7 @@
),
DESCRIPTOR=_STRUCTTYPE,
__module__="google.cloud.automl_v1beta1.proto.data_types_pb2",
- __doc__="""``StructType`` defines the DataType-s of a
+ __doc__="""\ ``StructType`` defines the DataType-s of a
[STRUCT][google.cloud.automl.v1beta1.TypeCode.STRUCT] type.
diff --git a/google/cloud/automl_v1beta1/proto/dataset_pb2.py b/google/cloud/automl_v1beta1/proto/dataset_pb2.py
index b313da91..fa8b2153 100644
--- a/google/cloud/automl_v1beta1/proto/dataset_pb2.py
+++ b/google/cloud/automl_v1beta1/proto/dataset_pb2.py
@@ -465,8 +465,9 @@
dict(
DESCRIPTOR=_DATASET,
__module__="google.cloud.automl_v1beta1.proto.dataset_pb2",
- __doc__="""A workspace for solving a single, particular machine learning (ML)
- problem. A workspace contains examples that may be annotated.
+ __doc__="""A workspace for solving a single, particular machine
+ learning (ML) problem. A workspace contains examples that may be
+ annotated.
Attributes:
diff --git a/google/cloud/automl_v1beta1/proto/detection_pb2.py b/google/cloud/automl_v1beta1/proto/detection_pb2.py
index c9c4296f..ab328c84 100644
--- a/google/cloud/automl_v1beta1/proto/detection_pb2.py
+++ b/google/cloud/automl_v1beta1/proto/detection_pb2.py
@@ -651,8 +651,9 @@
),
DESCRIPTOR=_BOUNDINGBOXMETRICSENTRY,
__module__="google.cloud.automl_v1beta1.proto.detection_pb2",
- __doc__="""Bounding box matching model metrics for a single intersection-over-union
- threshold and multiple label match confidence thresholds.
+ __doc__="""Bounding box matching model metrics for a single
+ intersection-over-union threshold and multiple label match confidence
+ thresholds.
Attributes:
@@ -680,8 +681,8 @@
dict(
DESCRIPTOR=_IMAGEOBJECTDETECTIONEVALUATIONMETRICS,
__module__="google.cloud.automl_v1beta1.proto.detection_pb2",
- __doc__="""Model evaluation metrics for image object detection problems. Evaluates
- prediction quality of labeled bounding boxes.
+ __doc__="""Model evaluation metrics for image object detection
+ problems. Evaluates prediction quality of labeled bounding boxes.
Attributes:
@@ -711,9 +712,10 @@
dict(
DESCRIPTOR=_VIDEOOBJECTTRACKINGEVALUATIONMETRICS,
__module__="google.cloud.automl_v1beta1.proto.detection_pb2",
- __doc__="""Model evaluation metrics for video object tracking problems. Evaluates
- prediction quality of both labeled bounding boxes and labeled tracks
- (i.e. series of bounding boxes sharing same label and instance ID).
+ __doc__="""Model evaluation metrics for video object tracking
+ problems. Evaluates prediction quality of both labeled bounding boxes
+ and labeled tracks (i.e. series of bounding boxes sharing same label and
+ instance ID).
Attributes:
diff --git a/google/cloud/automl_v1beta1/proto/geometry_pb2.py b/google/cloud/automl_v1beta1/proto/geometry_pb2.py
index 935a2e4d..324d76f5 100644
--- a/google/cloud/automl_v1beta1/proto/geometry_pb2.py
+++ b/google/cloud/automl_v1beta1/proto/geometry_pb2.py
@@ -156,9 +156,9 @@
dict(
DESCRIPTOR=_BOUNDINGPOLY,
__module__="google.cloud.automl_v1beta1.proto.geometry_pb2",
- __doc__="""A bounding polygon of a detected object on a plane. On output both
- vertices and normalized\_vertices are provided. The polygon is formed by
- connecting vertices in the order they are listed.
+ __doc__="""A bounding polygon of a detected object on a plane. On
+ output both vertices and normalized\_vertices are provided. The polygon
+ is formed by connecting vertices in the order they are listed.
Attributes:
diff --git a/google/cloud/automl_v1beta1/proto/image_pb2.py b/google/cloud/automl_v1beta1/proto/image_pb2.py
index 37751239..3a0a54a4 100644
--- a/google/cloud/automl_v1beta1/proto/image_pb2.py
+++ b/google/cloud/automl_v1beta1/proto/image_pb2.py
@@ -506,6 +506,7 @@
DESCRIPTOR=_IMAGEOBJECTDETECTIONDATASETMETADATA,
__module__="google.cloud.automl_v1beta1.proto.image_pb2",
__doc__="""Dataset metadata specific to image object detection.
+
""",
# @@protoc_insertion_point(class_scope:google.cloud.automl.v1beta1.ImageObjectDetectionDatasetMetadata)
),
@@ -654,7 +655,8 @@
dict(
DESCRIPTOR=_IMAGECLASSIFICATIONMODELDEPLOYMENTMETADATA,
__module__="google.cloud.automl_v1beta1.proto.image_pb2",
- __doc__="""Model deployment metadata specific to Image Classification.
+ __doc__="""Model deployment metadata specific to Image
+ Classification.
Attributes:
@@ -676,7 +678,8 @@
dict(
DESCRIPTOR=_IMAGEOBJECTDETECTIONMODELDEPLOYMENTMETADATA,
__module__="google.cloud.automl_v1beta1.proto.image_pb2",
- __doc__="""Model deployment metadata specific to Image Object Detection.
+ __doc__="""Model deployment metadata specific to Image Object
+ Detection.
Attributes:
diff --git a/google/cloud/automl_v1beta1/proto/io_pb2.py b/google/cloud/automl_v1beta1/proto/io_pb2.py
index d875a635..c2fb6138 100644
--- a/google/cloud/automl_v1beta1/proto/io_pb2.py
+++ b/google/cloud/automl_v1beta1/proto/io_pb2.py
@@ -1354,12 +1354,11 @@
dict(
DESCRIPTOR=_OUTPUTCONFIG,
__module__="google.cloud.automl_v1beta1.proto.io_pb2",
- __doc__="""
-- For Translation: CSV file ``translation.csv``, with each line in
- format: ML\_USE,GCS\_FILE\_PATH GCS\_FILE\_PATH leads to a .TSV file
- which describes examples that have given ML\_USE, using the following
- row format per line: TEXT\_SNIPPET (in source language)
- \\tTEXT\_SNIPPET (in target language)
+ __doc__="""\* For Translation: CSV file ``translation.csv``, with
+ each line in format: ML\_USE,GCS\_FILE\_PATH GCS\_FILE\_PATH leads to a
+ .TSV file which describes examples that have given ML\_USE, using the
+ following row format per line: TEXT\_SNIPPET (in source language)
+ \\tTEXT\_SNIPPET (in target language)
- For Tables: Output depends on whether the dataset was imported from
GCS or BigQuery. GCS case:
@@ -1769,10 +1768,10 @@
dict(
DESCRIPTOR=_EXPORTEVALUATEDEXAMPLESOUTPUTCONFIG,
__module__="google.cloud.automl_v1beta1.proto.io_pb2",
- __doc__="""Output configuration for ExportEvaluatedExamples Action. Note that this
- call is available only for 30 days since the moment the model was
- evaluated. The output depends on the domain, as follows (note that only
- examples from the TEST set are exported):
+ __doc__="""Output configuration for ExportEvaluatedExamples Action.
+ Note that this call is available only for 30 days since the moment the
+ model was evaluated. The output depends on the domain, as follows (note
+ that only examples from the TEST set are exported):
- For Tables:
@@ -1855,7 +1854,8 @@
dict(
DESCRIPTOR=_GCSDESTINATION,
__module__="google.cloud.automl_v1beta1.proto.io_pb2",
- __doc__="""The Google Cloud Storage location where the output is to be written to.
+ __doc__="""The Google Cloud Storage location where the output is to
+ be written to.
Attributes:
diff --git a/google/cloud/automl_v1beta1/proto/operations_pb2.py b/google/cloud/automl_v1beta1/proto/operations_pb2.py
index 166ef096..d1b13233 100644
--- a/google/cloud/automl_v1beta1/proto/operations_pb2.py
+++ b/google/cloud/automl_v1beta1/proto/operations_pb2.py
@@ -987,7 +987,8 @@
dict(
DESCRIPTOR=_OPERATIONMETADATA,
__module__="google.cloud.automl_v1beta1.proto.operations_pb2",
- __doc__="""Metadata used across all long running operations returned by AutoML API.
+ __doc__="""Metadata used across all long running operations returned
+ by AutoML API.
Attributes:
@@ -1038,7 +1039,9 @@
dict(
DESCRIPTOR=_DELETEOPERATIONMETADATA,
__module__="google.cloud.automl_v1beta1.proto.operations_pb2",
- __doc__="""Details of operations that perform deletes of any entities.
+ __doc__="""Details of operations that perform deletes of any
+ entities.
+
""",
# @@protoc_insertion_point(class_scope:google.cloud.automl.v1beta1.DeleteOperationMetadata)
),
@@ -1052,6 +1055,7 @@
DESCRIPTOR=_DEPLOYMODELOPERATIONMETADATA,
__module__="google.cloud.automl_v1beta1.proto.operations_pb2",
__doc__="""Details of DeployModel operation.
+
""",
# @@protoc_insertion_point(class_scope:google.cloud.automl.v1beta1.DeployModelOperationMetadata)
),
@@ -1065,6 +1069,7 @@
DESCRIPTOR=_UNDEPLOYMODELOPERATIONMETADATA,
__module__="google.cloud.automl_v1beta1.proto.operations_pb2",
__doc__="""Details of UndeployModel operation.
+
""",
# @@protoc_insertion_point(class_scope:google.cloud.automl.v1beta1.UndeployModelOperationMetadata)
),
@@ -1078,6 +1083,7 @@
DESCRIPTOR=_CREATEMODELOPERATIONMETADATA,
__module__="google.cloud.automl_v1beta1.proto.operations_pb2",
__doc__="""Details of CreateModel operation.
+
""",
# @@protoc_insertion_point(class_scope:google.cloud.automl.v1beta1.CreateModelOperationMetadata)
),
@@ -1091,6 +1097,7 @@
DESCRIPTOR=_IMPORTDATAOPERATIONMETADATA,
__module__="google.cloud.automl_v1beta1.proto.operations_pb2",
__doc__="""Details of ImportData operation.
+
""",
# @@protoc_insertion_point(class_scope:google.cloud.automl.v1beta1.ImportDataOperationMetadata)
),
@@ -1238,8 +1245,8 @@
dict(
DESCRIPTOR=_EXPORTEVALUATEDEXAMPLESOPERATIONMETADATA_EXPORTEVALUATEDEXAMPLESOUTPUTINFO,
__module__="google.cloud.automl_v1beta1.proto.operations_pb2",
- __doc__="""Further describes the output of the evaluated examples export.
- Supplements
+ __doc__="""Further describes the output of the evaluated examples
+ export. Supplements
[ExportEvaluatedExamplesOutputConfig][google.cloud.automl.v1beta1.ExportEvaluatedExamplesOutputConfig].
diff --git a/google/cloud/automl_v1beta1/proto/table_spec_pb2.py b/google/cloud/automl_v1beta1/proto/table_spec_pb2.py
index 9f5f386f..48aa9178 100644
--- a/google/cloud/automl_v1beta1/proto/table_spec_pb2.py
+++ b/google/cloud/automl_v1beta1/proto/table_spec_pb2.py
@@ -196,13 +196,13 @@
dict(
DESCRIPTOR=_TABLESPEC,
__module__="google.cloud.automl_v1beta1.proto.table_spec_pb2",
- __doc__="""A specification of a relational table. The table's schema is represented
- via its child column specs. It is pre-populated as part of ImportData by
- schema inference algorithm, the version of which is a required parameter
- of ImportData InputConfig. Note: While working with a table, at times
- the schema may be inconsistent with the data in the table (e.g. string
- in a FLOAT64 column). The consistency validation is done upon creation
- of a model. Used by: \* Tables
+ __doc__="""A specification of a relational table. The table's schema
+ is represented via its child column specs. It is pre-populated as part
+ of ImportData by schema inference algorithm, the version of which is a
+ required parameter of ImportData InputConfig. Note: While working with a
+ table, at times the schema may be inconsistent with the data in the
+ table (e.g. string in a FLOAT64 column). The consistency validation is
+ done upon creation of a model. Used by: \* Tables
Attributes:
diff --git a/google/cloud/automl_v1beta1/proto/tables_pb2.py b/google/cloud/automl_v1beta1/proto/tables_pb2.py
index dbbbf586..4659aa8d 100644
--- a/google/cloud/automl_v1beta1/proto/tables_pb2.py
+++ b/google/cloud/automl_v1beta1/proto/tables_pb2.py
@@ -892,8 +892,8 @@
dict(
DESCRIPTOR=_TABLESMODELCOLUMNINFO,
__module__="google.cloud.automl_v1beta1.proto.tables_pb2",
- __doc__="""An information specific to given column and Tables Model, in context of
- the Model and the predictions created by it.
+ __doc__="""An information specific to given column and Tables Model,
+ in context of the Model and the predictions created by it.
Attributes:
diff --git a/google/cloud/automl_v1beta1/proto/temporal_pb2.py b/google/cloud/automl_v1beta1/proto/temporal_pb2.py
index 99fa8d66..a8e53db8 100644
--- a/google/cloud/automl_v1beta1/proto/temporal_pb2.py
+++ b/google/cloud/automl_v1beta1/proto/temporal_pb2.py
@@ -107,8 +107,8 @@
dict(
DESCRIPTOR=_TIMESEGMENT,
__module__="google.cloud.automl_v1beta1.proto.temporal_pb2",
- __doc__="""A time period inside of an example that has a time dimension (e.g.
- video).
+ __doc__="""A time period inside of an example that has a time
+ dimension (e.g. video).
Attributes:
diff --git a/google/cloud/automl_v1beta1/proto/text_pb2.py b/google/cloud/automl_v1beta1/proto/text_pb2.py
index cfab112e..ea8fef3f 100644
--- a/google/cloud/automl_v1beta1/proto/text_pb2.py
+++ b/google/cloud/automl_v1beta1/proto/text_pb2.py
@@ -288,6 +288,7 @@
DESCRIPTOR=_TEXTEXTRACTIONDATASETMETADATA,
__module__="google.cloud.automl_v1beta1.proto.text_pb2",
__doc__="""Dataset metadata that is specific to text extraction
+
""",
# @@protoc_insertion_point(class_scope:google.cloud.automl.v1beta1.TextExtractionDatasetMetadata)
),
@@ -301,6 +302,7 @@
DESCRIPTOR=_TEXTEXTRACTIONMODELMETADATA,
__module__="google.cloud.automl_v1beta1.proto.text_pb2",
__doc__="""Model metadata that is specific to text extraction.
+
""",
# @@protoc_insertion_point(class_scope:google.cloud.automl.v1beta1.TextExtractionModelMetadata)
),
@@ -338,6 +340,7 @@
DESCRIPTOR=_TEXTSENTIMENTMODELMETADATA,
__module__="google.cloud.automl_v1beta1.proto.text_pb2",
__doc__="""Model metadata that is specific to text sentiment.
+
""",
# @@protoc_insertion_point(class_scope:google.cloud.automl.v1beta1.TextSentimentModelMetadata)
),
diff --git a/google/cloud/automl_v1beta1/proto/text_segment_pb2.py b/google/cloud/automl_v1beta1/proto/text_segment_pb2.py
index 79774587..e896211e 100644
--- a/google/cloud/automl_v1beta1/proto/text_segment_pb2.py
+++ b/google/cloud/automl_v1beta1/proto/text_segment_pb2.py
@@ -115,8 +115,8 @@
dict(
DESCRIPTOR=_TEXTSEGMENT,
__module__="google.cloud.automl_v1beta1.proto.text_segment_pb2",
- __doc__="""A contiguous part of a text (string), assuming it has an UTF-8 NFC
- encoding.
+ __doc__="""A contiguous part of a text (string), assuming it has an
+ UTF-8 NFC encoding.
Attributes:
diff --git a/google/cloud/automl_v1beta1/proto/video_pb2.py b/google/cloud/automl_v1beta1/proto/video_pb2.py
index 156c58b3..1481681a 100644
--- a/google/cloud/automl_v1beta1/proto/video_pb2.py
+++ b/google/cloud/automl_v1beta1/proto/video_pb2.py
@@ -137,8 +137,9 @@
dict(
DESCRIPTOR=_VIDEOCLASSIFICATIONDATASETMETADATA,
__module__="google.cloud.automl_v1beta1.proto.video_pb2",
- __doc__="""Dataset metadata specific to video classification. All Video
- Classification datasets are treated as multi label.
+ __doc__="""Dataset metadata specific to video classification. All
+ Video Classification datasets are treated as multi label.
+
""",
# @@protoc_insertion_point(class_scope:google.cloud.automl.v1beta1.VideoClassificationDatasetMetadata)
),
@@ -152,6 +153,7 @@
DESCRIPTOR=_VIDEOOBJECTTRACKINGDATASETMETADATA,
__module__="google.cloud.automl_v1beta1.proto.video_pb2",
__doc__="""Dataset metadata specific to video object tracking.
+
""",
# @@protoc_insertion_point(class_scope:google.cloud.automl.v1beta1.VideoObjectTrackingDatasetMetadata)
),
@@ -165,6 +167,7 @@
DESCRIPTOR=_VIDEOCLASSIFICATIONMODELMETADATA,
__module__="google.cloud.automl_v1beta1.proto.video_pb2",
__doc__="""Model metadata specific to video classification.
+
""",
# @@protoc_insertion_point(class_scope:google.cloud.automl.v1beta1.VideoClassificationModelMetadata)
),
@@ -178,6 +181,7 @@
DESCRIPTOR=_VIDEOOBJECTTRACKINGMODELMETADATA,
__module__="google.cloud.automl_v1beta1.proto.video_pb2",
__doc__="""Model metadata specific to video object tracking.
+
""",
# @@protoc_insertion_point(class_scope:google.cloud.automl.v1beta1.VideoObjectTrackingModelMetadata)
),
diff --git a/google/cloud/automl_v1beta1/tables/tables_client.py b/google/cloud/automl_v1beta1/tables/tables_client.py
index ab4c3d48..32137db2 100644
--- a/google/cloud/automl_v1beta1/tables/tables_client.py
+++ b/google/cloud/automl_v1beta1/tables/tables_client.py
@@ -22,8 +22,10 @@
from google.api_core.gapic_v1 import client_info
from google.api_core import exceptions
from google.cloud.automl_v1beta1 import gapic
-from google.cloud.automl_v1beta1.proto import data_types_pb2
+from google.cloud.automl_v1beta1.proto import data_types_pb2, data_items_pb2
from google.cloud.automl_v1beta1.tables import gcs_client
+from google.protobuf import struct_pb2
+
_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-automl").version
_LOGGER = logging.getLogger(__name__)
@@ -50,7 +52,7 @@ def __init__(
):
"""Constructor.
- Example:
+ Example for US region:
>>> from google.cloud import automl_v1beta1
>>>
>>> from google.oauth2 import service_account
@@ -60,6 +62,17 @@ def __init__(
... project='my-project', region='us-central1')
...
+ Example for EU region:
+ >>> from google.cloud import automl_v1beta1
+ >>>
+ >>> from google.oauth2 import service_account
+ >>>
+ >>> client_options = {'api_endpoint': 'eu-automl.googleapis.com:443'}
+ >>> client = automl_v1beta1.TablesClient(
+ ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json'),
+ ... project='my-project', region='eu', client_options=client_options)
+ ...
+
Args:
project (Optional[str]): The project ID of the GCP project all
future calls will default to. Most methods take `project` as an
@@ -104,6 +117,7 @@ def __init__(
else:
client_info_.user_agent = user_agent
client_info_.gapic_version = version
+ kwargs.pop("client_info", None)
if client is None:
self.auto_ml_client = gapic.auto_ml_client.AutoMlClient(
@@ -390,21 +404,39 @@ def __column_spec_name_from_args(
return column_spec_name
- def __type_code_to_value_type(self, type_code, value):
+ def __data_type_to_proto_value(self, data_type, value):
+ type_code = data_type.type_code
if value is None:
- return {"null_value": 0}
+ return struct_pb2.Value(null_value=struct_pb2.NullValue.NULL_VALUE)
elif type_code == data_types_pb2.FLOAT64:
- return {"number_value": value}
- elif type_code == data_types_pb2.TIMESTAMP:
- return {"string_value": value}
- elif type_code == data_types_pb2.STRING:
- return {"string_value": value}
+ return struct_pb2.Value(number_value=value)
+ elif (
+ type_code == data_types_pb2.TIMESTAMP
+ or type_code == data_types_pb2.STRING
+ or type_code == data_types_pb2.CATEGORY
+ ):
+ return struct_pb2.Value(string_value=value)
elif type_code == data_types_pb2.ARRAY:
- return {"list_value": value}
+ if isinstance(value, struct_pb2.ListValue):
+ # in case the user passed in a ListValue.
+ return struct_pb2.Value(list_value=value)
+ array = []
+ for item in value:
+ array.append(
+ self.__data_type_to_proto_value(data_type.list_element_type, item)
+ )
+ return struct_pb2.Value(list_value=struct_pb2.ListValue(values=array))
elif type_code == data_types_pb2.STRUCT:
- return {"struct_value": value}
- elif type_code == data_types_pb2.CATEGORY:
- return {"string_value": value}
+ if isinstance(value, struct_pb2.Struct):
+ # in case the user passed in a Struct.
+ return struct_pb2.Value(struct_value=value)
+ struct_value = struct_pb2.Struct()
+ for k, v in value.items():
+ field_value = self.__data_type_to_proto_value(
+ data_type.struct_type.fields[k], v
+ )
+ struct_value.fields[k].CopyFrom(field_value)
+ return struct_pb2.Value(struct_value=struct_value)
else:
raise ValueError("Unknown type_code: {}".format(type_code))
@@ -435,7 +467,7 @@ def list_datasets(self, project=None, region=None, **kwargs):
>>> from google.oauth2 import service_account
>>>
>>> client = automl_v1beta1.TablesClient(
- ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json')
+ ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json'),
... project='my-project', region='us-central1')
...
>>> ds = client.list_datasets()
@@ -488,7 +520,7 @@ def get_dataset(
>>> from google.oauth2 import service_account
>>>
>>> client = automl_v1beta1.TablesClient(
- ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json')
+ ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json'),
... project='my-project', region='us-central1')
...
>>> d = client.get_dataset(dataset_display_name='my_dataset')
@@ -550,7 +582,7 @@ def create_dataset(
>>> from google.oauth2 import service_account
>>>
>>> client = automl_v1beta1.TablesClient(
- ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json')
+ ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json'),
... project='my-project', region='us-central1')
...
>>> d = client.create_dataset(dataset_display_name='my_dataset')
@@ -602,7 +634,7 @@ def delete_dataset(
>>> from google.oauth2 import service_account
>>>
>>> client = automl_v1beta1.TablesClient(
- ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json')
+ ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json'),
... project='my-project', region='us-central1')
...
>>> op = client.delete_dataset(dataset_display_name='my_dataset')
@@ -682,7 +714,7 @@ def import_data(
>>> from google.oauth2 import service_account
>>>
>>> client = automl_v1beta1.TablesClient(
- ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json')
+ ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json'),
... project='my-project', region='us-central1')
...
>>> d = client.create_dataset(dataset_display_name='my_dataset')
@@ -803,7 +835,7 @@ def export_data(
>>> from google.oauth2 import service_account
>>>
>>> client = automl_v1beta1.TablesClient(
- ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json')
+ ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json'),
... project='my-project', region='us-central1')
...
>>> d = client.create_dataset(dataset_display_name='my_dataset')
@@ -889,7 +921,7 @@ def get_table_spec(self, table_spec_name, project=None, region=None, **kwargs):
>>> from google.oauth2 import service_account
>>>
>>> client = automl_v1beta1.TablesClient(
- ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json')
+ ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json'),
... project='my-project', region='us-central1')
...
>>> d = client.get_table_spec('my_table_spec')
@@ -937,7 +969,7 @@ def list_table_specs(
>>> from google.oauth2 import service_account
>>>
>>> client = automl_v1beta1.TablesClient(
- ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json')
+ ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json'),
... project='my-project', region='us-central1')
...
>>> for s in client.list_table_specs(dataset_display_name='my_dataset')
@@ -1001,7 +1033,7 @@ def get_column_spec(self, column_spec_name, project=None, region=None, **kwargs)
>>> from google.oauth2 import service_account
>>>
>>> client = automl_v1beta1.TablesClient(
- ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json')
+ ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json'),
... project='my-project', region='us-central1')
...
>>> d = client.get_column_spec('my_column_spec')
@@ -1051,7 +1083,7 @@ def list_column_specs(
>>> from google.oauth2 import service_account
>>>
>>> client = automl_v1beta1.TablesClient(
- ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json')
+ ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json'),
... project='my-project', region='us-central1')
...
>>> for s in client.list_column_specs(dataset_display_name='my_dataset')
@@ -1150,7 +1182,7 @@ def update_column_spec(
>>> from google.oauth2 import service_account
>>>
>>> client = automl_v1beta1.TablesClient(
- ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json')
+ ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json'),
... project='my-project', region='us-central1')
...
>>> client.update_column_specs(dataset_display_name='my_dataset',
@@ -1282,7 +1314,7 @@ def set_target_column(
>>> from google.oauth2 import service_account
>>>
>>> client = automl_v1beta1.TablesClient(
- ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json')
+ ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json'),
... project='my-project', region='us-central1')
...
>>> client.set_target_column(dataset_display_name='my_dataset',
@@ -1399,7 +1431,7 @@ def set_time_column(
>>> from google.cloud import automl_v1beta1
>>>
>>> client = automl_v1beta1.TablesClient(
- ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json')
+ ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json'),
... project='my-project', region='us-central1')
...
>>> client.set_time_column(dataset_display_name='my_dataset',
@@ -1512,7 +1544,7 @@ def clear_time_column(
>>> from google.cloud import automl_v1beta1
>>>
>>> client = automl_v1beta1.TablesClient(
- ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json')
+ ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json'),
... project='my-project', region='us-central1')
...
>>> client.clear_time_column(dataset_display_name='my_dataset')
@@ -1597,7 +1629,7 @@ def set_weight_column(
>>> from google.oauth2 import service_account
>>>
>>> client = automl_v1beta1.TablesClient(
- ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json')
+ ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json'),
... project='my-project', region='us-central1')
...
>>> client.set_weight_column(dataset_display_name='my_dataset',
@@ -1710,7 +1742,7 @@ def clear_weight_column(
>>> from google.oauth2 import service_account
>>>
>>> client = automl_v1beta1.TablesClient(
- ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json')
+ ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json'),
... project='my-project', region='us-central1')
...
>>> client.clear_weight_column(dataset_display_name='my_dataset')
@@ -1794,7 +1826,7 @@ def set_test_train_column(
>>> from google.oauth2 import service_account
>>>
>>> client = automl_v1beta1.TablesClient(
- ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json')
+ ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json'),
... project='my-project', region='us-central1')
...
>>> client.set_test_train_column(dataset_display_name='my_dataset',
@@ -1908,7 +1940,7 @@ def clear_test_train_column(
>>> from google.oauth2 import service_account
>>>
>>> client = automl_v1beta1.TablesClient(
- ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json')
+ ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json'),
... project='my-project', region='us-central1')
...
>>> client.clear_test_train_column(dataset_display_name='my_dataset')
@@ -1979,7 +2011,7 @@ def list_models(self, project=None, region=None, **kwargs):
>>> from google.oauth2 import service_account
>>>
>>> client = automl_v1beta1.TablesClient(
- ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json')
+ ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json'),
... project='my-project', region='us-central1')
...
>>> ms = client.list_models()
@@ -2033,7 +2065,7 @@ def list_model_evaluations(
>>> from google.oauth2 import service_account
>>>
>>> client = automl_v1beta1.TablesClient(
- ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json')
+ ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json'),
... project='my-project', region='us-central1')
...
>>> ms = client.list_model_evaluations(model_display_name='my_model')
@@ -2120,7 +2152,7 @@ def create_model(
>>> from google.oauth2 import service_account
>>>
>>> client = automl_v1beta1.TablesClient(
- ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json')
+ ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json'),
... project='my-project', region='us-central1')
...
>>> m = client.create_model(
@@ -2278,7 +2310,7 @@ def delete_model(
>>> from google.oauth2 import service_account
>>>
>>> client = automl_v1beta1.TablesClient(
- ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json')
+ ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json'),
... project='my-project', region='us-central1')
...
>>> op = client.delete_model(model_display_name='my_model')
@@ -2348,7 +2380,7 @@ def get_model_evaluation(
>>> from google.oauth2 import service_account
>>>
>>> client = automl_v1beta1.TablesClient(
- ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json')
+ ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json'),
... project='my-project', region='us-central1')
...
>>> d = client.get_model_evaluation('my_model_evaluation')
@@ -2395,7 +2427,7 @@ def get_model(
>>> from google.oauth2 import service_account
>>>
>>> client = automl_v1beta1.TablesClient(
- ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json')
+ ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json'),
... project='my-project', region='us-central1')
...
>>> d = client.get_model(model_display_name='my_model')
@@ -2462,7 +2494,7 @@ def deploy_model(
>>> from google.oauth2 import service_account
>>>
>>> client = automl_v1beta1.TablesClient(
- ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json')
+ ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json'),
... project='my-project', region='us-central1')
...
>>> op = client.deploy_model(model_display_name='my_model')
@@ -2534,7 +2566,7 @@ def undeploy_model(
>>> from google.oauth2 import service_account
>>>
>>> client = automl_v1beta1.TablesClient(
- ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json')
+ ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json'),
... project='my-project', region='us-central1')
...
>>> op = client.undeploy_model(model_display_name='my_model')
@@ -2610,7 +2642,7 @@ def predict(
>>> from google.oauth2 import service_account
>>>
>>> client = automl_v1beta1.TablesClient(
- ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json')
+ ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json'),
... project='my-project', region='us-central1')
...
>>> client.predict(inputs={'Age': 30, 'Income': 12, 'Category': 'A'}
@@ -2682,16 +2714,17 @@ def predict(
values = []
for i, c in zip(inputs, column_specs):
- value_type = self.__type_code_to_value_type(c.data_type.type_code, i)
+ value_type = self.__data_type_to_proto_value(c.data_type, i)
values.append(value_type)
- request = {"row": {"values": values}}
+ row = data_items_pb2.Row(values=values)
+ payload = data_items_pb2.ExamplePayload(row=row)
params = None
if feature_importance:
params = {"feature_importance": "true"}
- return self.prediction_client.predict(model.name, request, params, **kwargs)
+ return self.prediction_client.predict(model.name, payload, params, **kwargs)
def batch_predict(
self,
@@ -2718,7 +2751,7 @@ def batch_predict(
>>> from google.oauth2 import service_account
>>>
>>> client = automl_v1beta1.TablesClient(
- ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json')
+ ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json'),
... project='my-project', region='us-central1')
...
>>> client.batch_predict(
diff --git a/google/cloud/automl_v1beta1/types.py b/google/cloud/automl_v1beta1/types.py
index 80258924..2e9bdd77 100644
--- a/google/cloud/automl_v1beta1/types.py
+++ b/google/cloud/automl_v1beta1/types.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
#
-# Copyright 2019 Google LLC
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/noxfile.py b/noxfile.py
index 342fcdf4..116b26fb 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -23,7 +23,6 @@
import nox
-LOCAL_DEPS = (os.path.join("..", "api_core"), os.path.join("..", "core"))
BLACK_VERSION = "black==19.3b0"
BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"]
@@ -38,7 +37,7 @@ def lint(session):
Returns a failure if the linters find linting errors or sufficiently
serious code quality issues.
"""
- session.install("flake8", BLACK_VERSION, *LOCAL_DEPS)
+ session.install("flake8", BLACK_VERSION)
session.run("black", "--check", *BLACK_PATHS)
session.run("flake8", "google", "tests")
@@ -67,8 +66,6 @@ def lint_setup_py(session):
def default(session):
# Install all test dependencies, then install this package in-place.
session.install("mock", "pytest", "pytest-cov")
- for local_dep in LOCAL_DEPS:
- session.install("-e", local_dep)
session.install("-e", ".[pandas,storage]")
# Run py.test against the unit tests.
@@ -86,7 +83,7 @@ def default(session):
)
-@nox.session(python=["2.7", "3.5", "3.6", "3.7"])
+@nox.session(python=["2.7", "3.5", "3.6", "3.7", "3.8"])
def unit(session):
"""Run the unit test suite."""
default(session)
@@ -113,9 +110,8 @@ def system(session):
# Install all test dependencies, then install this package into the
# virtualenv's dist-packages.
session.install("mock", "pytest")
- for local_dep in LOCAL_DEPS:
- session.install("-e", local_dep)
- session.install("-e", "../test_utils/")
+
+ session.install("-e", "test_utils")
session.install("-e", ".[pandas,storage]")
# Run py.test against the system tests.
diff --git a/renovate.json b/renovate.json
new file mode 100644
index 00000000..4fa94931
--- /dev/null
+++ b/renovate.json
@@ -0,0 +1,5 @@
+{
+ "extends": [
+ "config:base", ":preserveSemverRanges"
+ ]
+}
diff --git a/setup.py b/setup.py
index eeabb7d6..7736f6ed 100644
--- a/setup.py
+++ b/setup.py
@@ -19,14 +19,14 @@
name = "google-cloud-automl"
description = "Cloud AutoML API client library"
-version = "0.9.0"
+version = "0.10.0"
release_status = "Development Status :: 3 - Alpha"
dependencies = [
"google-api-core[grpc] >= 1.14.0, < 2.0.0dev",
'enum34; python_version < "3.4"',
]
extras = {
- "pandas": ["pandas>=0.24.0"],
+ "pandas": ["pandas>=0.17.1"],
"storage": ["google-cloud-storage >= 1.18.0, < 2.0.0dev"],
}
@@ -52,7 +52,7 @@
author="Google LLC",
author_email="googleapis-packages@oogle.com",
license="Apache 2.0",
- url="https://github.com/GoogleCloudPlatform/google-cloud-python",
+ url="https://github.com/googleapis/python-automl",
classifiers=[
release_status,
"Intended Audience :: Developers",
diff --git a/synth.metadata b/synth.metadata
index 885dba20..79b7aa93 100644
--- a/synth.metadata
+++ b/synth.metadata
@@ -1,24 +1,24 @@
{
- "updateTime": "2019-11-16T13:13:10.249431Z",
+ "updateTime": "2020-01-31T18:17:08.114692Z",
"sources": [
{
"generator": {
"name": "artman",
- "version": "0.41.1",
- "dockerImage": "googleapis/artman@sha256:545c758c76c3f779037aa259023ec3d1ef2d57d2c8cd00a222cb187d63ceac5e"
+ "version": "0.44.4",
+ "dockerImage": "googleapis/artman@sha256:19e945954fc960a4bdfee6cb34695898ab21a8cf0bac063ee39b91f00a1faec8"
}
},
{
"git": {
"name": "googleapis",
"remote": "https://github.com/googleapis/googleapis.git",
- "sha": "c89394342a9ef70acaf73a6959e04b943fbc817b",
- "internalRef": "280761373"
+ "sha": "2717b8a1c762b26911b45ecc2e4ee01d98401b28",
+ "internalRef": "292555664"
}
},
{
"template": {
- "name": "python_library",
+ "name": "python_split_library",
"origin": "synthtool.gcp",
"version": "2019.10.17"
}
diff --git a/synth.py b/synth.py
index c46ab6f8..2009a1d0 100644
--- a/synth.py
+++ b/synth.py
@@ -49,7 +49,7 @@
f"class TablesClient(tables_client.TablesClient):"
f" __doc__ = tables_client.TablesClient.__doc__"
f"\n\nclass GcsClient(gcs_client.GcsClient):"
- f" __doc__ = gcs_client.GcsClient.__doc__"
+ f" __doc__ = gcs_client.GcsClient.__doc__",
)
s.replace(
@@ -111,25 +111,26 @@
s.replace("google/cloud/**/io_pb2.py", r":raw-latex:`\\t `", r"\\\\t")
# Remove html bits that can't be rendered correctly
-s.replace("google/cloud/automl_v1/**/io_pb2.py",
-r""".. raw:: html.+?
+s.replace(
+ "google/cloud/automl_v1/**/io_pb2.py",
+ r""".. raw:: html.+?
\""",
-r"", flags=re.DOTALL)
+ r"",
+ flags=re.DOTALL,
+)
# Remove raw-latex wrapping newline
-s.replace("google/cloud/automl_v1/**/io_pb2.py",
-r""":raw-latex:`\\n`""",
-r"``\\\\n``")
+s.replace("google/cloud/automl_v1/**/io_pb2.py", r""":raw-latex:`\\n`""", r"``\\\\n``")
# Make \n visible in JSONL samples
-s.replace("google/cloud/**/io_pb2.py",
-r"\}\\n",
-r"}\\\\n")
+s.replace("google/cloud/**/io_pb2.py", r"\}\\n", r"}\\\\n")
# ----------------------------------------------------------------------------
# Add templated files
# ----------------------------------------------------------------------------
-templated_files = common.py_library(unit_cov_level=82, cov_level=83)
+templated_files = common.py_library(
+ unit_cov_level=82, cov_level=83, system_test_dependencies=["test_utils"]
+)
s.move(templated_files)
@@ -137,7 +138,7 @@
s.replace(
"noxfile.py",
"""session\.install\(['"]-e['"], ['"]\.['"]\)""",
- """session.install("-e", ".[pandas,storage]")"""
+ """session.install("-e", ".[pandas,storage]")""",
)
s.shell.run(["nox", "-s", "blacken"], hide_output=False)
diff --git a/test_utils/credentials.json.enc b/test_utils/credentials.json.enc
new file mode 100644
index 00000000..f073c7e4
--- /dev/null
+++ b/test_utils/credentials.json.enc
@@ -0,0 +1,49 @@
+U2FsdGVkX1/vVm/dOEg1DCACYbdOcL+ey6+64A+DZGZVgF8Z/3skK6rpPocu6GOA
+UZAqASsBH9QifDf8cKVXQXVYpYq6HSv2O0w7vOmVorZO9GYPo98s9/8XO+4ty/AU
+aB6TD68frBAYv4cT/l5m7aYdzfzMTy0EOXoleZT09JYP3B5FV3KCO114FzMXGwrj
+HXsR6E5SyUUlUnWPC3eD3aqmovay0gxOKYO3ZwjFK1nlbN/8q6/8nwBCf/Bg6SHV
+V93pNxdolRlJev9kgKz4RN1z4jGCy5PAndhSLE82NFIs9LoAiEOU5YeMlN+Ulqus
+J92nh+ptUe9a4pJGbAuveUWO7zdS1QyXvTMUcmmSfXCNm/eIQjNuu5+rHtIjWKh8
+Ilwj2w1aTfSptQEhk/kwRgFz/d11vfwJzvwTmCxO6zyOeL0VUWLqdCBGgG5As9He
+/RenF8PZ1O0WbTt7fns5oTlTk/MUo+0xJ1xqvu/y45LaqqcBAnEdrWKmtM3dJHWv
+ufQku+kD+83F/VwBnQdvgMHu6KZEs6LRrNo58r4QuK6fS7VCACdzxID1RM2cL7kT
+6BFRlyGj1aigmjne9g9M9Jx4R+mZDpPU1WDzzG71J4qCUwaX8Dfwutuv4uiFvzwq
+NUF0wLJJPtKWmtW+hnZ/fhHQGCRsOpZzFnqp6Zv7J7k6esqxMgIjfal7Djk5Acy8
+j3iVvm6CYmKMVqzL62JHYS9Ye83tzBCaR8hpnJQKgH3FSOFY8HSwrtQSIsl/hSeF
+41sgnz0Y+/gkzNeU18qFk+eCZmvljyu+JK0nPYUgpOCJYVBNQpNHz5PUyiAEKhtM
+IOSdjPRW1Y+Xf4RroJnLPoF24Ijwrow5LCm9hBRY6TPPMMmnIXCd23xcLJ1rMj6g
+x4ZikElans+cwuc9wtbb7w01DcpTwQ1+eIV1qV+KIgpnLjRGLhZD4etobBsrwYu/
+vnIwy2QHCKENPb8sbdgp7x2mF7VSX0/7tf+9+i70EBiMzpOKBkiZhtLzm6hOBkEy
+ODaWrx4lTTwbSw8Rmtf58APhPFMsjHoNsjiUoK249Y8Y2Ff4fMfqYsXu6VC1n/At
+CuWYHc3EfBwFcLJS+RQB9kFk/4FygFBWq4Kj0MqoRruLbKmoGeJKH9q35W0f0NCD
+j+iHt3014kMGiuyJe1UDQ6fvEihFFdHuDivFpPAXDt4PTY/WtpDhaGMx23kb54pK
+jkAuxpznAB1lK3u9bGRXDasGeHIrNtIlPvgkrWHXvoBVqM7zry8TGtoxp3E3I42Z
+cUfDWfB9GqVdrOwvrTzyZsl2uShRkAJaZFZj5aMyYxiptp4gM8CwWiNtOd2EwtRO
+LxZX4M02PQFIqXV3FSDA0q6EwglUrTZdAlYeOEkopaKCtG31dEPOSQG3NGJAEYso
+Cxm99H7970dp0OAgpNSgRbcWDbhVbQXnRzvFGqLeH6a9dQ/a8uD3s8Qm9Du/kB6d
+XxTRe2OGxzcD0AgI8GClE4rIZHCLbcwuJRp0EYcN+pgY80O4U98fZ5RYpU6OYbU/
+MEiaBYFKtZtGkV6AQD568V7hHJWqc5DDfVHUQ/aeQwnKi2vnU66u+nnV2rZxXxLP
++dqeLRpul+wKa5b/Z5SfQ14Ff8s7aVyxaogGpyggyPL1vyq4KWZ6Or/wEE5hgNO4
+kBh6ht0QT1Hti8XY2JK1M+Jgbjgcg4jkHBGVqegrG1Rvcc2A4TYKwx+QMSBhyxrU
+5qhROjS4lTcC42hQslMUkUwc4U/Y91XdFbOOnaAkwzI36NRYL0pmgZnYxGJZeRvr
+E5foOhnOEVSFGdOkLfFh+FkWZQf56Lmn8Gg2wHE3dZTxLHibiUYfkgOr1uEosq29
+D1NstvlJURPQ0Q+8QQNWcl9nEZHMAjOmnL1hbx+QfuC6seucp+sXGzdZByMLZbvT
+tG8KNL293CmyQowgf9MXToWYnwRkcvqfTaKyor2Ggze3JtoFW4t0j4DI1XPciZFX
+XmfApHrzdB/bZadzxyaZ2NE0CuH9zDelwI6rz38xsN5liYnp5qmNKVCZVOHccXa6
+J8x365m5/VaaA2RrtdPqKxn8VaKy7+T690QgMXVGM4PbzQzQxHuSleklocqlP+sB
+jSMXCZY+ng/i4UmRO9noiyW3UThYh0hIdMYs12EmmI9cnF/OuYZpl30fmqwV+VNM
+td5B2fYvAvvsjiX60SFCn3DATP1GrPMBlZSmhhP3GYS+xrWt3Xxta9qIX2BEF1Gg
+twnZZRjoULSRFUYPfJPEOfEH2UQwm84wxx/GezVE+S/RpBlatPOgCiLnNNaLfdTC
+mTG9qY9elJv3GGQO8Lqgf4i8blExs05lSPk1BDhzTB6H9TLz+Ge0/l1QxKf3gPXU
+aImK1azieXMXHECkdKxrzmehwu1dZ/oYOLc/OFQCETwSRoLPFOFpYUpizwmVVHR6
+uLSfRptte4ZOU3zHfpd/0+J4tkwHwEkGzsmMdqudlm7qME6upuIplyVBH8JiXzUK
+n1RIH/OPmVEluAnexWRLZNdk7MrakIO4XACVbICENiYQgAIErP568An6twWEGDbZ
+bEN64E3cVDTDRPRAunIhhsEaapcxpFEPWlHorxv36nMUt0R0h0bJlCu5QdzckfcX
+ZrRuu1kl76ZfbSE8T0G4/rBb9gsU4Gn3WyvLIO3MgFBuxR68ZwcR8LpEUd8qp38H
+NG4cxPmN1nGKo663Z+xI2Gt5up4gpl+fOt4mXqxY386rB7yHaOfElMG5TUYdrS9w
+1xbbCVgeJ6zxX+NFlndG33cSAPprhw+C18eUu6ZU63WZcYFo3GfK6rs3lvYtofvE
+8DxztdTidQedNVNE+63YCjhxd/cZUI5n/UpgYkr9owp7hNGJiR3tdoNLR2gcoGqL
+qWhH928k2aSgF2j97LZ2OqoPCp0tUB7ho4jD2u4Ik3GLVNlCc3dCvWRvpHtDTQDv
+tujESMfHUc9I2r4S/PD3bku/ABGwa977Yp1PjzJGr9RajA5is5n6GVpyynwjtKG4
+iyyITpdwpCgr8pueTBLwZnas3slmiMOog/E4PmPgctHzvC+vhQijhUtw5zSsmv0l
+bZlw/mVhp5Ta7dTcLBKR8DA3m3vTbaEGkz0xpfQr7GfiSMRbJyvIw88pDK0gyTMD
diff --git a/test_utils/scripts/circleci/get_tagged_package.py b/test_utils/scripts/circleci/get_tagged_package.py
new file mode 100644
index 00000000..c148b9dc
--- /dev/null
+++ b/test_utils/scripts/circleci/get_tagged_package.py
@@ -0,0 +1,64 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Helper to determine package from tag.
+Get the current package directory corresponding to the Circle Tag.
+"""
+
+from __future__ import print_function
+
+import os
+import re
+import sys
+
+
+TAG_RE = re.compile(r"""
+ ^
+ (?P
+ (([a-z]+)[_-])*) # pkg-name-with-hyphens-or-underscores (empty allowed)
+ ([0-9]+)\.([0-9]+)\.([0-9]+) # Version x.y.z (x, y, z all ints)
+ $
+""", re.VERBOSE)
+TAG_ENV = 'CIRCLE_TAG'
+ERROR_MSG = '%s env. var. not set' % (TAG_ENV,)
+BAD_TAG_MSG = 'Invalid tag name: %s. Expected pkg-name-x.y.z'
+CIRCLE_CI_SCRIPTS_DIR = os.path.dirname(__file__)
+ROOT_DIR = os.path.realpath(
+ os.path.join(CIRCLE_CI_SCRIPTS_DIR, '..', '..', '..'))
+
+
+def main():
+ """Get the current package directory.
+ Prints the package directory out so callers can consume it.
+ """
+ if TAG_ENV not in os.environ:
+ print(ERROR_MSG, file=sys.stderr)
+ sys.exit(1)
+
+ tag_name = os.environ[TAG_ENV]
+ match = TAG_RE.match(tag_name)
+ if match is None:
+ print(BAD_TAG_MSG % (tag_name,), file=sys.stderr)
+ sys.exit(1)
+
+ pkg_name = match.group('pkg')
+ if pkg_name is None:
+ print(ROOT_DIR)
+ else:
+ pkg_dir = pkg_name.rstrip('-').replace('-', '_')
+ print(os.path.join(ROOT_DIR, pkg_dir))
+
+
+if __name__ == '__main__':
+ main()
diff --git a/test_utils/scripts/circleci/twine_upload.sh b/test_utils/scripts/circleci/twine_upload.sh
new file mode 100755
index 00000000..23a4738e
--- /dev/null
+++ b/test_utils/scripts/circleci/twine_upload.sh
@@ -0,0 +1,36 @@
+#!/bin/bash
+
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -ev
+
+# If this is not a CircleCI tag, no-op.
+if [[ -z "$CIRCLE_TAG" ]]; then
+ echo "This is not a release tag. Doing nothing."
+ exit 0
+fi
+
+# H/T: http://stackoverflow.com/a/246128/1068170
+SCRIPT="$(dirname "${BASH_SOURCE[0]}")/get_tagged_package.py"
+# Determine the package directory being deploying on this tag.
+PKG_DIR="$(python ${SCRIPT})"
+
+# Ensure that we have the latest versions of Twine, Wheel, and Setuptools.
+python3 -m pip install --upgrade twine wheel setuptools
+
+# Move into the package, build the distribution and upload.
+cd ${PKG_DIR}
+python3 setup.py sdist bdist_wheel
+twine upload dist/*
diff --git a/test_utils/scripts/get_target_packages.py b/test_utils/scripts/get_target_packages.py
new file mode 100644
index 00000000..1d51830c
--- /dev/null
+++ b/test_utils/scripts/get_target_packages.py
@@ -0,0 +1,268 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Print a list of packages which require testing."""
+
+import os
+import re
+import subprocess
+import warnings
+
+
+CURRENT_DIR = os.path.realpath(os.path.dirname(__file__))
+BASE_DIR = os.path.realpath(os.path.join(CURRENT_DIR, '..', '..'))
+GITHUB_REPO = os.environ.get('GITHUB_REPO', 'google-cloud-python')
+CI = os.environ.get('CI', '')
+CI_BRANCH = os.environ.get('CIRCLE_BRANCH')
+CI_PR = os.environ.get('CIRCLE_PR_NUMBER')
+CIRCLE_TAG = os.environ.get('CIRCLE_TAG')
+head_hash, head_name = subprocess.check_output(['git', 'show-ref', 'HEAD']
+).strip().decode('ascii').split()
+rev_parse = subprocess.check_output(
+ ['git', 'rev-parse', '--abbrev-ref', 'HEAD']
+).strip().decode('ascii')
+MAJOR_DIV = '#' * 78
+MINOR_DIV = '#' + '-' * 77
+
+# NOTE: This reg-ex is copied from ``get_tagged_packages``.
+TAG_RE = re.compile(r"""
+ ^
+ (?P
+ (([a-z]+)-)*) # pkg-name-with-hyphens- (empty allowed)
+ ([0-9]+)\.([0-9]+)\.([0-9]+) # Version x.y.z (x, y, z all ints)
+ $
+""", re.VERBOSE)
+
+# This is the current set of dependencies by package.
+# As of this writing, the only "real" dependency is that of error_reporting
+# (on logging), the rest are just system test dependencies.
+PKG_DEPENDENCIES = {
+ 'logging': {'pubsub'},
+}
+
+
+def get_baseline():
+ """Return the baseline commit.
+
+ On a pull request, or on a branch, return the common parent revision
+ with the master branch.
+
+ Locally, return a value pulled from environment variables, or None if
+ the environment variables are not set.
+
+ On a push to master, return None. This will effectively cause everything
+ to be considered to be affected.
+ """
+
+ # If this is a pull request or branch, return the tip for master.
+ # We will test only packages which have changed since that point.
+ ci_non_master = (CI == 'true') and any([CI_BRANCH != 'master', CI_PR])
+
+ if ci_non_master:
+
+ repo_url = 'git@github.com:GoogleCloudPlatform/{}'.format(GITHUB_REPO)
+ subprocess.run(['git', 'remote', 'add', 'baseline', repo_url],
+ stderr=subprocess.DEVNULL)
+ subprocess.run(['git', 'pull', 'baseline'], stderr=subprocess.DEVNULL)
+
+ if CI_PR is None and CI_BRANCH is not None:
+ output = subprocess.check_output([
+ 'git', 'merge-base', '--fork-point',
+ 'baseline/master', CI_BRANCH])
+ return output.strip().decode('ascii')
+
+ return 'baseline/master'
+
+ # If environment variables are set identifying what the master tip is,
+ # use that.
+ if os.environ.get('GOOGLE_CLOUD_TESTING_REMOTE', ''):
+ remote = os.environ['GOOGLE_CLOUD_TESTING_REMOTE']
+ branch = os.environ.get('GOOGLE_CLOUD_TESTING_BRANCH', 'master')
+ return '%s/%s' % (remote, branch)
+
+ # If we are not in CI and we got this far, issue a warning.
+ if not CI:
+ warnings.warn('No baseline could be determined; this means tests '
+ 'will run for every package. If this is local '
+ 'development, set the $GOOGLE_CLOUD_TESTING_REMOTE '
+ 'environment variable.')
+
+ # That is all we can do; return None.
+ return None
+
+
+def get_changed_files():
+ """Return a list of files that have been changed since the baseline.
+
+ If there is no base, return None.
+ """
+ # Get the baseline, and fail quickly if there is no baseline.
+ baseline = get_baseline()
+ print('# Baseline commit: {}'.format(baseline))
+ if not baseline:
+ return None
+
+ # Return a list of altered files.
+ try:
+ return subprocess.check_output([
+ 'git', 'diff', '--name-only', '{}..HEAD'.format(baseline),
+ ], stderr=subprocess.DEVNULL).decode('utf8').strip().split('\n')
+ except subprocess.CalledProcessError:
+ warnings.warn('Unable to perform git diff; falling back to assuming '
+ 'all packages have changed.')
+ return None
+
+
+def reverse_map(dict_of_sets):
+ """Reverse a map of one-to-many.
+
+ So the map::
+
+ {
+ 'A': {'B', 'C'},
+ 'B': {'C'},
+ }
+
+ becomes
+
+ {
+ 'B': {'A'},
+ 'C': {'A', 'B'},
+ }
+
+ Args:
+ dict_of_sets (dict[set]): A dictionary of sets, mapping
+ one value to many.
+
+ Returns:
+ dict[set]: The reversed map.
+ """
+ result = {}
+ for key, values in dict_of_sets.items():
+ for value in values:
+ result.setdefault(value, set()).add(key)
+
+ return result
+
+def get_changed_packages(file_list):
+ """Return a list of changed packages based on the provided file list.
+
+ If the file list is None, then all packages should be considered to be
+ altered.
+ """
+ # Determine a complete list of packages.
+ all_packages = set()
+ for file_ in os.listdir(BASE_DIR):
+ abs_file = os.path.realpath(os.path.join(BASE_DIR, file_))
+ nox_file = os.path.join(abs_file, 'nox.py')
+ if os.path.isdir(abs_file) and os.path.isfile(nox_file):
+ all_packages.add(file_)
+
+ # If ther is no file list, send down the full package set.
+ if file_list is None:
+ return all_packages
+
+ # Create a set based on the list of changed files.
+ answer = set()
+ reverse_deps = reverse_map(PKG_DEPENDENCIES)
+ for file_ in file_list:
+ # Ignore root directory changes (setup.py, .gitignore, etc.).
+ if os.path.sep not in file_:
+ continue
+
+ # Ignore changes that are not in a package (usually this will be docs).
+ package = file_.split(os.path.sep, 1)[0]
+ if package not in all_packages:
+ continue
+
+ # If there is a change in core, short-circuit now and return
+ # everything.
+ if package in ('core',):
+ return all_packages
+
+ # Add the package, as well as any dependencies this package has.
+ # NOTE: For now, dependencies only go down one level.
+ answer.add(package)
+ answer = answer.union(reverse_deps.get(package, set()))
+
+ # We got this far without being short-circuited; return the final answer.
+ return answer
+
+
+def get_tagged_package():
+ """Return the package corresponding to the current tag.
+
+ If there is not tag, will return :data:`None`.
+ """
+ if CIRCLE_TAG is None:
+ return
+
+ match = TAG_RE.match(CIRCLE_TAG)
+ if match is None:
+ return
+
+ pkg_name = match.group('pkg')
+ if pkg_name == '':
+ # NOTE: This corresponds to the "umbrella" tag.
+ return
+
+ return pkg_name.rstrip('-').replace('-', '_')
+
+
+def get_target_packages():
+ """Return a list of target packages to be run in the current build.
+
+ If in a tag build, will run only the package(s) that are tagged, otherwise
+ will run the packages that have file changes in them (or packages that
+ depend on those).
+ """
+ tagged_package = get_tagged_package()
+ if tagged_package is None:
+ file_list = get_changed_files()
+ print(MAJOR_DIV)
+ print('# Changed files:')
+ print(MINOR_DIV)
+ for file_ in file_list or ():
+ print('# {}'.format(file_))
+ for package in sorted(get_changed_packages(file_list)):
+ yield package
+ else:
+ yield tagged_package
+
+
+def main():
+ print(MAJOR_DIV)
+ print('# Environment')
+ print(MINOR_DIV)
+ print('# CircleCI: {}'.format(CI))
+ print('# CircleCI branch: {}'.format(CI_BRANCH))
+ print('# CircleCI pr: {}'.format(CI_PR))
+ print('# CircleCI tag: {}'.format(CIRCLE_TAG))
+ print('# HEAD ref: {}'.format(head_hash))
+ print('# {}'.format(head_name))
+ print('# Git branch: {}'.format(rev_parse))
+ print(MAJOR_DIV)
+
+ packages = list(get_target_packages())
+
+ print(MAJOR_DIV)
+ print('# Target packages:')
+ print(MINOR_DIV)
+ for package in packages:
+ print(package)
+ print(MAJOR_DIV)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/test_utils/scripts/get_target_packages_kokoro.py b/test_utils/scripts/get_target_packages_kokoro.py
new file mode 100644
index 00000000..27d3a0c9
--- /dev/null
+++ b/test_utils/scripts/get_target_packages_kokoro.py
@@ -0,0 +1,98 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Print a list of packages which require testing."""
+
+import pathlib
+import subprocess
+
+import ci_diff_helper
+import requests
+
+
+def print_environment(environment):
+ print("-> CI environment:")
+ print('Branch', environment.branch)
+ print('PR', environment.pr)
+ print('In PR', environment.in_pr)
+ print('Repo URL', environment.repo_url)
+ if environment.in_pr:
+ print('PR Base', environment.base)
+
+
+def get_base(environment):
+ if environment.in_pr:
+ return environment.base
+ else:
+ # If we're not in a PR, just calculate the changes between this commit
+ # and its parent.
+ return 'HEAD~1'
+
+
+def get_changed_files_from_base(base):
+ return subprocess.check_output([
+ 'git', 'diff', '--name-only', f'{base}..HEAD',
+ ], stderr=subprocess.DEVNULL).decode('utf8').strip().split('\n')
+
+
+_URL_TEMPLATE = (
+ 'https://api.github.com/repos/googleapis/google-cloud-python/pulls/'
+ '{}/files'
+)
+
+
+def get_changed_files_from_pr(pr):
+ url = _URL_TEMPLATE.format(pr)
+ while url is not None:
+ response = requests.get(url)
+ for info in response.json():
+ yield info['filename']
+ url = response.links.get('next', {}).get('url')
+
+
+def determine_changed_packages(changed_files):
+ packages = [
+ path.parent for path in pathlib.Path('.').glob('*/noxfile.py')
+ ]
+
+ changed_packages = set()
+ for file in changed_files:
+ file = pathlib.Path(file)
+ for package in packages:
+ if package in file.parents:
+ changed_packages.add(package)
+
+ return changed_packages
+
+
+def main():
+ environment = ci_diff_helper.get_config()
+ print_environment(environment)
+ base = get_base(environment)
+
+ if environment.in_pr:
+ changed_files = list(get_changed_files_from_pr(environment.pr))
+ else:
+ changed_files = get_changed_files_from_base(base)
+
+ packages = determine_changed_packages(changed_files)
+
+ print(f"Comparing against {base}.")
+ print("-> Changed packages:")
+
+ for package in packages:
+ print(package)
+
+
+main()
diff --git a/test_utils/scripts/run_emulator.py b/test_utils/scripts/run_emulator.py
new file mode 100644
index 00000000..287b0864
--- /dev/null
+++ b/test_utils/scripts/run_emulator.py
@@ -0,0 +1,199 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Run system tests locally with the emulator.
+
+First makes system calls to spawn the emulator and get the local environment
+variable needed for it. Then calls the system tests.
+"""
+
+
+import argparse
+import os
+import subprocess
+
+import psutil
+
+from google.cloud.environment_vars import BIGTABLE_EMULATOR
+from google.cloud.environment_vars import GCD_DATASET
+from google.cloud.environment_vars import GCD_HOST
+from google.cloud.environment_vars import PUBSUB_EMULATOR
+from run_system_test import run_module_tests
+
+
+BIGTABLE = 'bigtable'
+DATASTORE = 'datastore'
+PUBSUB = 'pubsub'
+PACKAGE_INFO = {
+ BIGTABLE: (BIGTABLE_EMULATOR,),
+ DATASTORE: (GCD_DATASET, GCD_HOST),
+ PUBSUB: (PUBSUB_EMULATOR,),
+}
+EXTRA = {
+ DATASTORE: ('--no-legacy',),
+}
+_DS_READY_LINE = '[datastore] Dev App Server is now running.\n'
+_PS_READY_LINE_PREFIX = '[pubsub] INFO: Server started, listening on '
+_BT_READY_LINE_PREFIX = '[bigtable] Cloud Bigtable emulator running on '
+
+
+def get_parser():
+ """Get simple ``argparse`` parser to determine package.
+
+ :rtype: :class:`argparse.ArgumentParser`
+ :returns: The parser for this script.
+ """
+ parser = argparse.ArgumentParser(
+ description='Run google-cloud system tests against local emulator.')
+ parser.add_argument('--package', dest='package',
+ choices=sorted(PACKAGE_INFO.keys()),
+ default=DATASTORE, help='Package to be tested.')
+ return parser
+
+
+def get_start_command(package):
+ """Get command line arguments for starting emulator.
+
+ :type package: str
+ :param package: The package to start an emulator for.
+
+ :rtype: tuple
+ :returns: The arguments to be used, in a tuple.
+ """
+ result = ('gcloud', 'beta', 'emulators', package, 'start')
+ extra = EXTRA.get(package, ())
+ return result + extra
+
+
+def get_env_init_command(package):
+ """Get command line arguments for getting emulator env. info.
+
+ :type package: str
+ :param package: The package to get environment info for.
+
+ :rtype: tuple
+ :returns: The arguments to be used, in a tuple.
+ """
+ result = ('gcloud', 'beta', 'emulators', package, 'env-init')
+ extra = EXTRA.get(package, ())
+ return result + extra
+
+
+def datastore_wait_ready(popen):
+ """Wait until the datastore emulator is ready to use.
+
+ :type popen: :class:`subprocess.Popen`
+ :param popen: An open subprocess to interact with.
+ """
+ emulator_ready = False
+ while not emulator_ready:
+ emulator_ready = popen.stderr.readline() == _DS_READY_LINE
+
+
+def wait_ready_prefix(popen, prefix):
+ """Wait until the a process encounters a line with matching prefix.
+
+ :type popen: :class:`subprocess.Popen`
+ :param popen: An open subprocess to interact with.
+
+ :type prefix: str
+ :param prefix: The prefix to match
+ """
+ emulator_ready = False
+ while not emulator_ready:
+ emulator_ready = popen.stderr.readline().startswith(prefix)
+
+
+def wait_ready(package, popen):
+ """Wait until the emulator is ready to use.
+
+ :type package: str
+ :param package: The package to check if ready.
+
+ :type popen: :class:`subprocess.Popen`
+ :param popen: An open subprocess to interact with.
+
+ :raises: :class:`KeyError` if the ``package`` is not among
+ ``datastore``, ``pubsub`` or ``bigtable``.
+ """
+ if package == DATASTORE:
+ datastore_wait_ready(popen)
+ elif package == PUBSUB:
+ wait_ready_prefix(popen, _PS_READY_LINE_PREFIX)
+ elif package == BIGTABLE:
+ wait_ready_prefix(popen, _BT_READY_LINE_PREFIX)
+ else:
+ raise KeyError('Package not supported', package)
+
+
+def cleanup(pid):
+ """Cleanup a process (including all of its children).
+
+ :type pid: int
+ :param pid: Process ID.
+ """
+ proc = psutil.Process(pid)
+ for child_proc in proc.children(recursive=True):
+ try:
+ child_proc.kill()
+ child_proc.terminate()
+ except psutil.NoSuchProcess:
+ pass
+ proc.terminate()
+ proc.kill()
+
+
+def run_tests_in_emulator(package):
+ """Spawn an emulator instance and run the system tests.
+
+ :type package: str
+ :param package: The package to run system tests against.
+ """
+ # Make sure this package has environment vars to replace.
+ env_vars = PACKAGE_INFO[package]
+
+ start_command = get_start_command(package)
+ # Ignore stdin and stdout, don't pollute the user's output with them.
+ proc_start = subprocess.Popen(start_command, stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ try:
+ wait_ready(package, proc_start)
+ env_init_command = get_env_init_command(package)
+ proc_env = subprocess.Popen(env_init_command, stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ env_status = proc_env.wait()
+ if env_status != 0:
+ raise RuntimeError(env_status, proc_env.stderr.read())
+ env_lines = proc_env.stdout.read().strip().split('\n')
+ # Set environment variables before running the system tests.
+ for env_var in env_vars:
+ line_prefix = 'export ' + env_var + '='
+ value, = [line.split(line_prefix, 1)[1] for line in env_lines
+ if line.startswith(line_prefix)]
+ os.environ[env_var] = value
+ run_module_tests(package,
+ ignore_requirements=True)
+ finally:
+ cleanup(proc_start.pid)
+
+
+def main():
+ """Main method to run this script."""
+ parser = get_parser()
+ args = parser.parse_args()
+ run_tests_in_emulator(args.package)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/test_utils/scripts/update_docs.sh b/test_utils/scripts/update_docs.sh
new file mode 100755
index 00000000..8cbab9f0
--- /dev/null
+++ b/test_utils/scripts/update_docs.sh
@@ -0,0 +1,93 @@
+#!/bin/bash
+
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -ev
+
+GH_OWNER='GoogleCloudPlatform'
+GH_PROJECT_NAME='google-cloud-python'
+
+DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+
+# Function to build the docs.
+function build_docs {
+ rm -rf docs/_build/
+ rm -f docs/bigquery/generated/*.rst
+ # -W -> warnings as errors
+ # -T -> show full traceback on exception
+ # -N -> no color
+ sphinx-build \
+ -W -T -N \
+ -b html \
+ -d docs/_build/doctrees \
+ docs/ \
+ docs/_build/html/
+ return $?
+}
+
+# Only update docs if we are on CircleCI.
+if [[ "${CIRCLE_BRANCH}" == "master" ]] && [[ -z "${CIRCLE_PR_NUMBER}" ]]; then
+ echo "Building new docs on a merged commit."
+elif [[ "$1" == "kokoro" ]]; then
+ echo "Building and publishing docs on Kokoro."
+elif [[ -n "${CIRCLE_TAG}" ]]; then
+ echo "Building new docs on a tag (but will not deploy)."
+ build_docs
+ exit $?
+else
+ echo "Not on master nor a release tag."
+ echo "Building new docs for testing purposes, but not deploying."
+ build_docs
+ exit $?
+fi
+
+# Adding GitHub pages branch. `git submodule add` checks it
+# out at HEAD.
+GH_PAGES_DIR='ghpages'
+git submodule add -q -b gh-pages \
+ "git@github.com:${GH_OWNER}/${GH_PROJECT_NAME}" ${GH_PAGES_DIR}
+
+# Determine if we are building a new tag or are building docs
+# for master. Then build new docs in docs/_build from master.
+if [[ -n "${CIRCLE_TAG}" ]]; then
+ # Sphinx will use the package version by default.
+ build_docs
+else
+ SPHINX_RELEASE=$(git log -1 --pretty=%h) build_docs
+fi
+
+# Update gh-pages with the created docs.
+cd ${GH_PAGES_DIR}
+git rm -fr latest/
+cp -R ../docs/_build/html/ latest/
+
+# Update the files push to gh-pages.
+git add .
+git status
+
+# If there are no changes, just exit cleanly.
+if [[ -z "$(git status --porcelain)" ]]; then
+ echo "Nothing to commit. Exiting without pushing changes."
+ exit
+fi
+
+# Commit to gh-pages branch to apply changes.
+git config --global user.email "dpebot@google.com"
+git config --global user.name "dpebot"
+git commit -m "Update docs after merge to master."
+
+# NOTE: This may fail if two docs updates (on merges to master)
+# happen in close proximity.
+git push -q origin HEAD:gh-pages
diff --git a/test_utils/setup.py b/test_utils/setup.py
new file mode 100644
index 00000000..8e9222a7
--- /dev/null
+++ b/test_utils/setup.py
@@ -0,0 +1,64 @@
+# Copyright 2017 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+
+from setuptools import find_packages
+from setuptools import setup
+
+
+PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__))
+
+
+# NOTE: This is duplicated throughout and we should try to
+# consolidate.
+SETUP_BASE = {
+ 'author': 'Google Cloud Platform',
+ 'author_email': 'googleapis-publisher@google.com',
+ 'scripts': [],
+ 'url': 'https://github.com/GoogleCloudPlatform/google-cloud-python',
+ 'license': 'Apache 2.0',
+ 'platforms': 'Posix; MacOS X; Windows',
+ 'include_package_data': True,
+ 'zip_safe': False,
+ 'classifiers': [
+ 'Development Status :: 4 - Beta',
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: Apache Software License',
+ 'Operating System :: OS Independent',
+ 'Programming Language :: Python :: 2',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.5',
+ 'Programming Language :: Python :: 3.6',
+ 'Programming Language :: Python :: 3.7',
+ 'Topic :: Internet',
+ ],
+}
+
+
+REQUIREMENTS = [
+ 'google-auth >= 0.4.0',
+ 'six',
+]
+
+setup(
+ name='google-cloud-testutils',
+ version='0.24.0',
+ description='System test utilities for google-cloud-python',
+ packages=find_packages(),
+ install_requires=REQUIREMENTS,
+ python_requires='>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*',
+ **SETUP_BASE
+)
diff --git a/test_utils/test_utils/__init__.py b/test_utils/test_utils/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/test_utils/test_utils/imports.py b/test_utils/test_utils/imports.py
new file mode 100644
index 00000000..5991af7f
--- /dev/null
+++ b/test_utils/test_utils/imports.py
@@ -0,0 +1,38 @@
+# Copyright 2019 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import mock
+import six
+
+
+def maybe_fail_import(predicate):
+ """Create and return a patcher that conditionally makes an import fail.
+
+ Args:
+ predicate (Callable[[...], bool]): A callable that, if it returns `True`,
+ triggers an `ImportError`. It must accept the same arguments as the
+ built-in `__import__` function.
+ https://docs.python.org/3/library/functions.html#__import__
+
+ Returns:
+ A mock patcher object that can be used to enable patched import behavior.
+ """
+ orig_import = six.moves.builtins.__import__
+
+ def custom_import(name, globals=None, locals=None, fromlist=(), level=0):
+ if predicate(name, globals, locals, fromlist, level):
+ raise ImportError
+ return orig_import(name, globals, locals, fromlist, level)
+
+ return mock.patch.object(six.moves.builtins, "__import__", new=custom_import)
diff --git a/test_utils/test_utils/retry.py b/test_utils/test_utils/retry.py
new file mode 100644
index 00000000..e61c001a
--- /dev/null
+++ b/test_utils/test_utils/retry.py
@@ -0,0 +1,207 @@
+# Copyright 2016 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import time
+from functools import wraps
+
+import six
+
+MAX_TRIES = 4
+DELAY = 1
+BACKOFF = 2
+
+
+def _retry_all(_):
+ """Retry all caught exceptions."""
+ return True
+
+
+class BackoffFailed(Exception):
+ """Retry w/ backoffs did not complete successfully."""
+
+
+class RetryBase(object):
+ """Base for retrying calling a decorated function w/ exponential backoff.
+
+ :type max_tries: int
+ :param max_tries: Number of times to try (not retry) before giving up.
+
+ :type delay: int
+ :param delay: Initial delay between retries in seconds.
+
+ :type backoff: int
+ :param backoff: Backoff multiplier e.g. value of 2 will double the
+ delay each retry.
+
+ :type logger: logging.Logger instance
+ :param logger: Logger to use. If None, print.
+ """
+ def __init__(self, max_tries=MAX_TRIES, delay=DELAY, backoff=BACKOFF,
+ logger=None):
+ self.max_tries = max_tries
+ self.delay = delay
+ self.backoff = backoff
+ self.logger = logger.warning if logger else six.print_
+
+
+class RetryErrors(RetryBase):
+ """Decorator for retrying given exceptions in testing.
+
+ :type exception: Exception or tuple of Exceptions
+ :param exception: The exception to check or may be a tuple of
+ exceptions to check.
+
+ :type error_predicate: function, takes caught exception, returns bool
+ :param error_predicate: Predicate evaluating whether to retry after a
+ caught exception.
+
+ :type max_tries: int
+ :param max_tries: Number of times to try (not retry) before giving up.
+
+ :type delay: int
+ :param delay: Initial delay between retries in seconds.
+
+ :type backoff: int
+ :param backoff: Backoff multiplier e.g. value of 2 will double the
+ delay each retry.
+
+ :type logger: logging.Logger instance
+ :param logger: Logger to use. If None, print.
+ """
+ def __init__(self, exception, error_predicate=_retry_all,
+ max_tries=MAX_TRIES, delay=DELAY, backoff=BACKOFF,
+ logger=None):
+ super(RetryErrors, self).__init__(max_tries, delay, backoff, logger)
+ self.exception = exception
+ self.error_predicate = error_predicate
+
+ def __call__(self, to_wrap):
+ @wraps(to_wrap)
+ def wrapped_function(*args, **kwargs):
+ tries = 0
+ while tries < self.max_tries:
+ try:
+ return to_wrap(*args, **kwargs)
+ except self.exception as caught_exception:
+
+ if not self.error_predicate(caught_exception):
+ raise
+
+ delay = self.delay * self.backoff**tries
+ msg = ("%s, Trying again in %d seconds..." %
+ (caught_exception, delay))
+ self.logger(msg)
+
+ time.sleep(delay)
+ tries += 1
+ return to_wrap(*args, **kwargs)
+
+ return wrapped_function
+
+
+class RetryResult(RetryBase):
+ """Decorator for retrying based on non-error result.
+
+ :type result_predicate: function, takes result, returns bool
+ :param result_predicate: Predicate evaluating whether to retry after a
+ result is returned.
+
+ :type max_tries: int
+ :param max_tries: Number of times to try (not retry) before giving up.
+
+ :type delay: int
+ :param delay: Initial delay between retries in seconds.
+
+ :type backoff: int
+ :param backoff: Backoff multiplier e.g. value of 2 will double the
+ delay each retry.
+
+ :type logger: logging.Logger instance
+ :param logger: Logger to use. If None, print.
+ """
+ def __init__(self, result_predicate,
+ max_tries=MAX_TRIES, delay=DELAY, backoff=BACKOFF,
+ logger=None):
+ super(RetryResult, self).__init__(max_tries, delay, backoff, logger)
+ self.result_predicate = result_predicate
+
+ def __call__(self, to_wrap):
+ @wraps(to_wrap)
+ def wrapped_function(*args, **kwargs):
+ tries = 0
+ while tries < self.max_tries:
+ result = to_wrap(*args, **kwargs)
+ if self.result_predicate(result):
+ return result
+
+ delay = self.delay * self.backoff**tries
+ msg = "%s. Trying again in %d seconds..." % (
+ self.result_predicate.__name__, delay,)
+ self.logger(msg)
+
+ time.sleep(delay)
+ tries += 1
+ raise BackoffFailed()
+
+ return wrapped_function
+
+
+class RetryInstanceState(RetryBase):
+ """Decorator for retrying based on instance state.
+
+ :type instance_predicate: function, takes instance, returns bool
+ :param instance_predicate: Predicate evaluating whether to retry after an
+ API-invoking method is called.
+
+ :type max_tries: int
+ :param max_tries: Number of times to try (not retry) before giving up.
+
+ :type delay: int
+ :param delay: Initial delay between retries in seconds.
+
+ :type backoff: int
+ :param backoff: Backoff multiplier e.g. value of 2 will double the
+ delay each retry.
+
+ :type logger: logging.Logger instance
+ :param logger: Logger to use. If None, print.
+ """
+ def __init__(self, instance_predicate,
+ max_tries=MAX_TRIES, delay=DELAY, backoff=BACKOFF,
+ logger=None):
+ super(RetryInstanceState, self).__init__(
+ max_tries, delay, backoff, logger)
+ self.instance_predicate = instance_predicate
+
+ def __call__(self, to_wrap):
+ instance = to_wrap.__self__ # only instance methods allowed
+
+ @wraps(to_wrap)
+ def wrapped_function(*args, **kwargs):
+ tries = 0
+ while tries < self.max_tries:
+ result = to_wrap(*args, **kwargs)
+ if self.instance_predicate(instance):
+ return result
+
+ delay = self.delay * self.backoff**tries
+ msg = "%s. Trying again in %d seconds..." % (
+ self.instance_predicate.__name__, delay,)
+ self.logger(msg)
+
+ time.sleep(delay)
+ tries += 1
+ raise BackoffFailed()
+
+ return wrapped_function
diff --git a/test_utils/test_utils/system.py b/test_utils/test_utils/system.py
new file mode 100644
index 00000000..590dc62a
--- /dev/null
+++ b/test_utils/test_utils/system.py
@@ -0,0 +1,81 @@
+# Copyright 2014 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import print_function
+import os
+import sys
+import time
+
+import google.auth.credentials
+from google.auth.environment_vars import CREDENTIALS as TEST_CREDENTIALS
+
+
+# From shell environ. May be None.
+CREDENTIALS = os.getenv(TEST_CREDENTIALS)
+
+ENVIRON_ERROR_MSG = """\
+To run the system tests, you need to set some environment variables.
+Please check the CONTRIBUTING guide for instructions.
+"""
+
+
+class EmulatorCreds(google.auth.credentials.Credentials):
+ """A mock credential object.
+
+ Used to avoid unnecessary token refreshing or reliance on the network
+ while an emulator is running.
+ """
+
+ def __init__(self): # pylint: disable=super-init-not-called
+ self.token = b'seekrit'
+ self.expiry = None
+
+ @property
+ def valid(self):
+ """Would-be validity check of the credentials.
+
+ Always is :data:`True`.
+ """
+ return True
+
+ def refresh(self, unused_request): # pylint: disable=unused-argument
+ """Off-limits implementation for abstract method."""
+ raise RuntimeError('Should never be refreshed.')
+
+
+def check_environ():
+ err_msg = None
+ if CREDENTIALS is None:
+ err_msg = '\nMissing variables: ' + TEST_CREDENTIALS
+ elif not os.path.isfile(CREDENTIALS):
+ err_msg = '\nThe %s path %r is not a file.' % (TEST_CREDENTIALS,
+ CREDENTIALS)
+
+ if err_msg is not None:
+ msg = ENVIRON_ERROR_MSG + err_msg
+ print(msg, file=sys.stderr)
+ sys.exit(1)
+
+
+def unique_resource_id(delimiter='_'):
+ """A unique identifier for a resource.
+
+ Intended to help locate resources created in particular
+ testing environments and at particular times.
+ """
+ build_id = os.getenv('CIRCLE_BUILD_NUM', '')
+ if build_id == '':
+ return '%s%d' % (delimiter, 1000 * time.time())
+ else:
+ return '%s%s%s%d' % (delimiter, build_id, delimiter, time.time())
diff --git a/test_utils/test_utils/vpcsc_config.py b/test_utils/test_utils/vpcsc_config.py
new file mode 100644
index 00000000..36b15d6b
--- /dev/null
+++ b/test_utils/test_utils/vpcsc_config.py
@@ -0,0 +1,118 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2019 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+
+import pytest
+
+
+INSIDE_VPCSC_ENVVAR = "GOOGLE_CLOUD_TESTS_IN_VPCSC"
+PROJECT_INSIDE_ENVVAR = "PROJECT_ID"
+PROJECT_OUTSIDE_ENVVAR = "GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT"
+BUCKET_OUTSIDE_ENVVAR = "GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_BUCKET"
+
+
+class VPCSCTestConfig(object):
+ """System test utility for VPCSC detection.
+
+ See: https://cloud.google.com/vpc-service-controls/docs/
+ """
+
+ @property
+ def inside_vpcsc(self):
+ """Test whether the test environment is configured to run inside VPCSC.
+
+ Returns:
+ bool:
+ true if the environment is configured to run inside VPCSC,
+ else false.
+ """
+ return INSIDE_VPCSC_ENVVAR in os.environ
+
+ @property
+ def project_inside(self):
+ """Project ID for testing outside access.
+
+ Returns:
+ str: project ID used for testing outside access; None if undefined.
+ """
+ return os.environ.get(PROJECT_INSIDE_ENVVAR, None)
+
+ @property
+ def project_outside(self):
+ """Project ID for testing inside access.
+
+ Returns:
+ str: project ID used for testing inside access; None if undefined.
+ """
+ return os.environ.get(PROJECT_OUTSIDE_ENVVAR, None)
+
+ @property
+ def bucket_outside(self):
+ """GCS bucket for testing inside access.
+
+ Returns:
+ str: bucket ID used for testing inside access; None if undefined.
+ """
+ return os.environ.get(BUCKET_OUTSIDE_ENVVAR, None)
+
+ def skip_if_inside_vpcsc(self, testcase):
+ """Test decorator: skip if running inside VPCSC."""
+ reason = (
+ "Running inside VPCSC. "
+ "Unset the {} environment variable to enable this test."
+ ).format(INSIDE_VPCSC_ENVVAR)
+ skip = pytest.mark.skipif(self.inside_vpcsc, reason=reason)
+ return skip(testcase)
+
+ def skip_unless_inside_vpcsc(self, testcase):
+ """Test decorator: skip if running outside VPCSC."""
+ reason = (
+ "Running outside VPCSC. "
+ "Set the {} environment variable to enable this test."
+ ).format(INSIDE_VPCSC_ENVVAR)
+ skip = pytest.mark.skipif(not self.inside_vpcsc, reason=reason)
+ return skip(testcase)
+
+ def skip_unless_inside_project(self, testcase):
+ """Test decorator: skip if inside project env var not set."""
+ reason = (
+ "Project ID for running inside VPCSC not set. "
+ "Set the {} environment variable to enable this test."
+ ).format(PROJECT_INSIDE_ENVVAR)
+ skip = pytest.mark.skipif(self.project_inside is None, reason=reason)
+ return skip(testcase)
+
+ def skip_unless_outside_project(self, testcase):
+ """Test decorator: skip if outside project env var not set."""
+ reason = (
+ "Project ID for running outside VPCSC not set. "
+ "Set the {} environment variable to enable this test."
+ ).format(PROJECT_OUTSIDE_ENVVAR)
+ skip = pytest.mark.skipif(self.project_outside is None, reason=reason)
+ return skip(testcase)
+
+ def skip_unless_outside_bucket(self, testcase):
+ """Test decorator: skip if outside bucket env var not set."""
+ reason = (
+ "Bucket ID for running outside VPCSC not set. "
+ "Set the {} environment variable to enable this test."
+ ).format(BUCKET_OUTSIDE_ENVVAR)
+ skip = pytest.mark.skipif(self.bucket_outside is None, reason=reason)
+ return skip(testcase)
+
+
+vpcsc_config = VPCSCTestConfig()
diff --git a/tests/unit/gapic/v1/test_auto_ml_client_v1.py b/tests/unit/gapic/v1/test_auto_ml_client_v1.py
index 22864e8e..7a4558d2 100644
--- a/tests/unit/gapic/v1/test_auto_ml_client_v1.py
+++ b/tests/unit/gapic/v1/test_auto_ml_client_v1.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
#
-# Copyright 2019 Google LLC
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/tests/unit/gapic/v1/test_prediction_service_client_v1.py b/tests/unit/gapic/v1/test_prediction_service_client_v1.py
index 7b7ff6d9..cd75ea01 100644
--- a/tests/unit/gapic/v1/test_prediction_service_client_v1.py
+++ b/tests/unit/gapic/v1/test_prediction_service_client_v1.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
#
-# Copyright 2019 Google LLC
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/tests/unit/gapic/v1beta1/test_auto_ml_client_v1beta1.py b/tests/unit/gapic/v1beta1/test_auto_ml_client_v1beta1.py
index e3a5ce12..702a3190 100644
--- a/tests/unit/gapic/v1beta1/test_auto_ml_client_v1beta1.py
+++ b/tests/unit/gapic/v1beta1/test_auto_ml_client_v1beta1.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
#
-# Copyright 2019 Google LLC
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/tests/unit/gapic/v1beta1/test_prediction_service_client_v1beta1.py b/tests/unit/gapic/v1beta1/test_prediction_service_client_v1beta1.py
index e3ab187f..9b510f3a 100644
--- a/tests/unit/gapic/v1beta1/test_prediction_service_client_v1beta1.py
+++ b/tests/unit/gapic/v1beta1/test_prediction_service_client_v1beta1.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
#
-# Copyright 2019 Google LLC
+# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
diff --git a/tests/unit/gapic/v1beta1/test_tables_client_v1beta1.py b/tests/unit/gapic/v1beta1/test_tables_client_v1beta1.py
index 3f2b6d3d..3566846d 100644
--- a/tests/unit/gapic/v1beta1/test_tables_client_v1beta1.py
+++ b/tests/unit/gapic/v1beta1/test_tables_client_v1beta1.py
@@ -23,7 +23,8 @@
from google.api_core import exceptions
from google.auth.credentials import AnonymousCredentials
from google.cloud import automl_v1beta1
-from google.cloud.automl_v1beta1.proto import data_types_pb2
+from google.cloud.automl_v1beta1.proto import data_types_pb2, data_items_pb2
+from google.protobuf import struct_pb2
PROJECT = "project"
REGION = "region"
@@ -1116,9 +1117,10 @@ def test_predict_from_array(self):
model.configure_mock(tables_model_metadata=model_metadata, name="my_model")
client = self.tables_client({"get_model.return_value": model}, {})
client.predict(["1"], model_name="my_model")
- client.prediction_client.predict.assert_called_with(
- "my_model", {"row": {"values": [{"string_value": "1"}]}}, None
+ payload = data_items_pb2.ExamplePayload(
+ row=data_items_pb2.Row(values=[struct_pb2.Value(string_value="1")])
)
+ client.prediction_client.predict.assert_called_with("my_model", payload, None)
def test_predict_from_dict(self):
data_type = mock.Mock(type_code=data_types_pb2.CATEGORY)
@@ -1131,11 +1133,15 @@ def test_predict_from_dict(self):
model.configure_mock(tables_model_metadata=model_metadata, name="my_model")
client = self.tables_client({"get_model.return_value": model}, {})
client.predict({"a": "1", "b": "2"}, model_name="my_model")
- client.prediction_client.predict.assert_called_with(
- "my_model",
- {"row": {"values": [{"string_value": "1"}, {"string_value": "2"}]}},
- None,
+ payload = data_items_pb2.ExamplePayload(
+ row=data_items_pb2.Row(
+ values=[
+ struct_pb2.Value(string_value="1"),
+ struct_pb2.Value(string_value="2"),
+ ]
+ )
)
+ client.prediction_client.predict.assert_called_with("my_model", payload, None)
def test_predict_from_dict_with_feature_importance(self):
data_type = mock.Mock(type_code=data_types_pb2.CATEGORY)
@@ -1150,10 +1156,16 @@ def test_predict_from_dict_with_feature_importance(self):
client.predict(
{"a": "1", "b": "2"}, model_name="my_model", feature_importance=True
)
+ payload = data_items_pb2.ExamplePayload(
+ row=data_items_pb2.Row(
+ values=[
+ struct_pb2.Value(string_value="1"),
+ struct_pb2.Value(string_value="2"),
+ ]
+ )
+ )
client.prediction_client.predict.assert_called_with(
- "my_model",
- {"row": {"values": [{"string_value": "1"}, {"string_value": "2"}]}},
- {"feature_importance": "true"},
+ "my_model", payload, {"feature_importance": "true"}
)
def test_predict_from_dict_missing(self):
@@ -1167,18 +1179,32 @@ def test_predict_from_dict_missing(self):
model.configure_mock(tables_model_metadata=model_metadata, name="my_model")
client = self.tables_client({"get_model.return_value": model}, {})
client.predict({"a": "1"}, model_name="my_model")
- client.prediction_client.predict.assert_called_with(
- "my_model",
- {"row": {"values": [{"string_value": "1"}, {"null_value": 0}]}},
- None,
+ payload = data_items_pb2.ExamplePayload(
+ row=data_items_pb2.Row(
+ values=[
+ struct_pb2.Value(string_value="1"),
+ struct_pb2.Value(null_value=struct_pb2.NullValue.NULL_VALUE),
+ ]
+ )
)
+ client.prediction_client.predict.assert_called_with("my_model", payload, None)
def test_predict_all_types(self):
float_type = mock.Mock(type_code=data_types_pb2.FLOAT64)
timestamp_type = mock.Mock(type_code=data_types_pb2.TIMESTAMP)
string_type = mock.Mock(type_code=data_types_pb2.STRING)
- array_type = mock.Mock(type_code=data_types_pb2.ARRAY)
- struct_type = mock.Mock(type_code=data_types_pb2.STRUCT)
+ array_type = mock.Mock(
+ type_code=data_types_pb2.ARRAY,
+ list_element_type=mock.Mock(type_code=data_types_pb2.FLOAT64),
+ )
+ struct = data_types_pb2.StructType()
+ struct.fields["a"].CopyFrom(
+ data_types_pb2.DataType(type_code=data_types_pb2.CATEGORY)
+ )
+ struct.fields["b"].CopyFrom(
+ data_types_pb2.DataType(type_code=data_types_pb2.CATEGORY)
+ )
+ struct_type = mock.Mock(type_code=data_types_pb2.STRUCT, struct_type=struct)
category_type = mock.Mock(type_code=data_types_pb2.CATEGORY)
column_spec_float = mock.Mock(display_name="float", data_type=float_type)
column_spec_timestamp = mock.Mock(
@@ -1211,29 +1237,33 @@ def test_predict_all_types(self):
"timestamp": "EST",
"string": "text",
"array": [1],
- "struct": {"a": "b"},
+ "struct": {"a": "label_a", "b": "label_b"},
"category": "a",
"null": None,
},
model_name="my_model",
)
- client.prediction_client.predict.assert_called_with(
- "my_model",
- {
- "row": {
- "values": [
- {"number_value": 1.0},
- {"string_value": "EST"},
- {"string_value": "text"},
- {"list_value": [1]},
- {"struct_value": {"a": "b"}},
- {"string_value": "a"},
- {"null_value": 0},
- ]
- }
- },
- None,
+ struct = struct_pb2.Struct()
+ struct.fields["a"].CopyFrom(struct_pb2.Value(string_value="label_a"))
+ struct.fields["b"].CopyFrom(struct_pb2.Value(string_value="label_b"))
+ payload = data_items_pb2.ExamplePayload(
+ row=data_items_pb2.Row(
+ values=[
+ struct_pb2.Value(number_value=1.0),
+ struct_pb2.Value(string_value="EST"),
+ struct_pb2.Value(string_value="text"),
+ struct_pb2.Value(
+ list_value=struct_pb2.ListValue(
+ values=[struct_pb2.Value(number_value=1.0)]
+ )
+ ),
+ struct_pb2.Value(struct_value=struct),
+ struct_pb2.Value(string_value="a"),
+ struct_pb2.Value(null_value=struct_pb2.NullValue.NULL_VALUE),
+ ]
+ )
)
+ client.prediction_client.predict.assert_called_with("my_model", payload, None)
def test_predict_from_array_missing(self):
data_type = mock.Mock(type_code=data_types_pb2.CATEGORY)
@@ -1424,3 +1454,14 @@ def test_prediction_client_credentials(self):
_, prediction_client_kwargs = MockPredictionClient.call_args
assert "credentials" in prediction_client_kwargs
assert prediction_client_kwargs["credentials"] == credentials_mock
+
+ def test_prediction_client_client_info(self):
+ client_info_mock = mock.Mock()
+ patch_prediction_client = mock.patch(
+ "google.cloud.automl_v1beta1.gapic.prediction_service_client.PredictionServiceClient"
+ )
+ with patch_prediction_client as MockPredictionClient:
+ client = automl_v1beta1.TablesClient(client_info=client_info_mock)
+ _, prediction_client_kwargs = MockPredictionClient.call_args
+ assert "client_info" in prediction_client_kwargs
+ assert prediction_client_kwargs["client_info"] == client_info_mock