diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml
index 9ee60f7e..7d98291c 100644
--- a/.github/.OwlBot.lock.yaml
+++ b/.github/.OwlBot.lock.yaml
@@ -1,3 +1,3 @@
docker:
- image: gcr.io/repo-automation-bots/owlbot-python:latest
- digest: sha256:aea14a583128771ae8aefa364e1652f3c56070168ef31beb203534222d842b8b
+ image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest
+ digest: sha256:58f73ba196b5414782605236dd0712a73541b44ff2ff4d3a36ec41092dd6fa5b
diff --git a/.github/.OwlBot.yaml b/.github/.OwlBot.yaml
index df80b945..88452d4a 100644
--- a/.github/.OwlBot.yaml
+++ b/.github/.OwlBot.yaml
@@ -13,7 +13,7 @@
# limitations under the License.
docker:
- image: gcr.io/repo-automation-bots/owlbot-python:latest
+ image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest
deep-remove-regex:
- /owl-bot-staging
diff --git a/.github/sync-repo-settings.yaml b/.github/sync-repo-settings.yaml
index af599353..0ddb512d 100644
--- a/.github/sync-repo-settings.yaml
+++ b/.github/sync-repo-settings.yaml
@@ -4,6 +4,8 @@ branchProtectionRules:
# Identifies the protection rule pattern. Name of the branch to be protected.
# Defaults to `master`
- pattern: master
+ requiresCodeOwnerReviews: true
+ requiresStrictStatusChecks: true
requiredStatusCheckContexts:
- 'Kokoro'
- 'cla/google'
diff --git a/.kokoro/build.sh b/.kokoro/build.sh
index 3b4c35c7..739ab8d9 100755
--- a/.kokoro/build.sh
+++ b/.kokoro/build.sh
@@ -41,7 +41,7 @@ python3 -m pip install --upgrade --quiet nox
python3 -m nox --version
# If this is a continuous build, send the test log to the FlakyBot.
-# See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot.
+# See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot.
if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then
cleanup() {
chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot
diff --git a/.kokoro/samples/lint/common.cfg b/.kokoro/samples/lint/common.cfg
index f3f92a93..a9caf408 100644
--- a/.kokoro/samples/lint/common.cfg
+++ b/.kokoro/samples/lint/common.cfg
@@ -31,4 +31,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
# Use the trampoline script to run in docker.
-build_file: "python-language/.kokoro/trampoline.sh"
\ No newline at end of file
+build_file: "python-language/.kokoro/trampoline_v2.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.10/common.cfg b/.kokoro/samples/python3.10/common.cfg
new file mode 100644
index 00000000..27547f0a
--- /dev/null
+++ b/.kokoro/samples/python3.10/common.cfg
@@ -0,0 +1,40 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Specify which tests to run
+env_vars: {
+ key: "RUN_TESTS_SESSION"
+ value: "py-3.10"
+}
+
+# Declare build specific Cloud project.
+env_vars: {
+ key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ value: "python-docs-samples-tests-310"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-language/.kokoro/test-samples.sh"
+}
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
+}
+
+# Download secrets for samples
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-language/.kokoro/trampoline_v2.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.10/continuous.cfg b/.kokoro/samples/python3.10/continuous.cfg
new file mode 100644
index 00000000..a1c8d975
--- /dev/null
+++ b/.kokoro/samples/python3.10/continuous.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.10/periodic-head.cfg b/.kokoro/samples/python3.10/periodic-head.cfg
new file mode 100644
index 00000000..676f8aa8
--- /dev/null
+++ b/.kokoro/samples/python3.10/periodic-head.cfg
@@ -0,0 +1,11 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-language/.kokoro/test-samples-against-head.sh"
+}
diff --git a/.kokoro/samples/python3.10/periodic.cfg b/.kokoro/samples/python3.10/periodic.cfg
new file mode 100644
index 00000000..71cd1e59
--- /dev/null
+++ b/.kokoro/samples/python3.10/periodic.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "False"
+}
diff --git a/.kokoro/samples/python3.10/presubmit.cfg b/.kokoro/samples/python3.10/presubmit.cfg
new file mode 100644
index 00000000..a1c8d975
--- /dev/null
+++ b/.kokoro/samples/python3.10/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.6/common.cfg b/.kokoro/samples/python3.6/common.cfg
index aa92ff73..3d32e6ed 100644
--- a/.kokoro/samples/python3.6/common.cfg
+++ b/.kokoro/samples/python3.6/common.cfg
@@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
# Use the trampoline script to run in docker.
-build_file: "python-language/.kokoro/trampoline.sh"
\ No newline at end of file
+build_file: "python-language/.kokoro/trampoline_v2.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.6/periodic.cfg b/.kokoro/samples/python3.6/periodic.cfg
index 50fec964..71cd1e59 100644
--- a/.kokoro/samples/python3.6/periodic.cfg
+++ b/.kokoro/samples/python3.6/periodic.cfg
@@ -3,4 +3,4 @@
env_vars: {
key: "INSTALL_LIBRARY_FROM_SOURCE"
value: "False"
-}
\ No newline at end of file
+}
diff --git a/.kokoro/samples/python3.7/common.cfg b/.kokoro/samples/python3.7/common.cfg
index aa64611c..71cf69f9 100644
--- a/.kokoro/samples/python3.7/common.cfg
+++ b/.kokoro/samples/python3.7/common.cfg
@@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
# Use the trampoline script to run in docker.
-build_file: "python-language/.kokoro/trampoline.sh"
\ No newline at end of file
+build_file: "python-language/.kokoro/trampoline_v2.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.7/periodic.cfg b/.kokoro/samples/python3.7/periodic.cfg
index 50fec964..71cd1e59 100644
--- a/.kokoro/samples/python3.7/periodic.cfg
+++ b/.kokoro/samples/python3.7/periodic.cfg
@@ -3,4 +3,4 @@
env_vars: {
key: "INSTALL_LIBRARY_FROM_SOURCE"
value: "False"
-}
\ No newline at end of file
+}
diff --git a/.kokoro/samples/python3.8/common.cfg b/.kokoro/samples/python3.8/common.cfg
index 8fd22150..086547f2 100644
--- a/.kokoro/samples/python3.8/common.cfg
+++ b/.kokoro/samples/python3.8/common.cfg
@@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
# Use the trampoline script to run in docker.
-build_file: "python-language/.kokoro/trampoline.sh"
\ No newline at end of file
+build_file: "python-language/.kokoro/trampoline_v2.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.8/periodic.cfg b/.kokoro/samples/python3.8/periodic.cfg
index 50fec964..71cd1e59 100644
--- a/.kokoro/samples/python3.8/periodic.cfg
+++ b/.kokoro/samples/python3.8/periodic.cfg
@@ -3,4 +3,4 @@
env_vars: {
key: "INSTALL_LIBRARY_FROM_SOURCE"
value: "False"
-}
\ No newline at end of file
+}
diff --git a/.kokoro/samples/python3.9/common.cfg b/.kokoro/samples/python3.9/common.cfg
index 38c3cc08..f1b3c2c0 100644
--- a/.kokoro/samples/python3.9/common.cfg
+++ b/.kokoro/samples/python3.9/common.cfg
@@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
# Use the trampoline script to run in docker.
-build_file: "python-language/.kokoro/trampoline.sh"
\ No newline at end of file
+build_file: "python-language/.kokoro/trampoline_v2.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.9/periodic.cfg b/.kokoro/samples/python3.9/periodic.cfg
index 50fec964..71cd1e59 100644
--- a/.kokoro/samples/python3.9/periodic.cfg
+++ b/.kokoro/samples/python3.9/periodic.cfg
@@ -3,4 +3,4 @@
env_vars: {
key: "INSTALL_LIBRARY_FROM_SOURCE"
value: "False"
-}
\ No newline at end of file
+}
diff --git a/.kokoro/test-samples-against-head.sh b/.kokoro/test-samples-against-head.sh
index 2a7db027..ba3a707b 100755
--- a/.kokoro/test-samples-against-head.sh
+++ b/.kokoro/test-samples-against-head.sh
@@ -23,6 +23,4 @@ set -eo pipefail
# Enables `**` to include files nested inside sub-folders
shopt -s globstar
-cd github/python-language
-
exec .kokoro/test-samples-impl.sh
diff --git a/.kokoro/test-samples-impl.sh b/.kokoro/test-samples-impl.sh
index 311a8d54..8a324c9c 100755
--- a/.kokoro/test-samples-impl.sh
+++ b/.kokoro/test-samples-impl.sh
@@ -80,7 +80,7 @@ for file in samples/**/requirements.txt; do
EXIT=$?
# If this is a periodic build, send the test log to the FlakyBot.
- # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot.
+ # See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot.
if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then
chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot
$KOKORO_GFILE_DIR/linux_amd64/flakybot
diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh
index 801c16f4..11c042d3 100755
--- a/.kokoro/test-samples.sh
+++ b/.kokoro/test-samples.sh
@@ -24,8 +24,6 @@ set -eo pipefail
# Enables `**` to include files nested inside sub-folders
shopt -s globstar
-cd github/python-language
-
# Run periodic samples tests at latest release
if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then
# preserving the test runner implementation.
diff --git a/.trampolinerc b/.trampolinerc
index 383b6ec8..0eee72ab 100644
--- a/.trampolinerc
+++ b/.trampolinerc
@@ -16,15 +16,26 @@
# Add required env vars here.
required_envvars+=(
- "STAGING_BUCKET"
- "V2_STAGING_BUCKET"
)
# Add env vars which are passed down into the container here.
pass_down_envvars+=(
+ "NOX_SESSION"
+ ###############
+ # Docs builds
+ ###############
"STAGING_BUCKET"
"V2_STAGING_BUCKET"
- "NOX_SESSION"
+ ##################
+ # Samples builds
+ ##################
+ "INSTALL_LIBRARY_FROM_SOURCE"
+ "RUN_TESTS_SESSION"
+ "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ # Target directories.
+ "RUN_TESTS_DIRS"
+ # The nox session to run.
+ "RUN_TESTS_SESSION"
)
# Prevent unintentional override on the default image.
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 0214571a..646b0782 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,6 +4,13 @@
[1]: https://pypi.org/project/google-cloud-language/#history
+## [2.3.0](https://www.github.com/googleapis/python-language/compare/v2.2.2...v2.3.0) (2021-10-09)
+
+
+### Features
+
+* add context manager support in client ([#203](https://www.github.com/googleapis/python-language/issues/203)) ([91d48a8](https://www.github.com/googleapis/python-language/commit/91d48a8fee63b8279b235b70921d018206084b50))
+
### [2.2.2](https://www.github.com/googleapis/python-language/compare/v2.2.1...v2.2.2) (2021-07-28)
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index 755aea73..adf8f81c 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -22,7 +22,7 @@ In order to add a feature:
documentation.
- The feature must work fully on the following CPython versions:
- 3.6, 3.7, 3.8 and 3.9 on both UNIX and Windows.
+ 3.6, 3.7, 3.8, 3.9 and 3.10 on both UNIX and Windows.
- The feature must not add unnecessary dependencies (where
"unnecessary" is of course subjective, but new dependencies should
@@ -50,9 +50,9 @@ You'll have to create a development environment using a Git checkout:
# Configure remotes such that you can pull changes from the googleapis/python-language
# repository into your local repository.
$ git remote add upstream git@github.com:googleapis/python-language.git
- # fetch and merge changes from upstream into master
+ # fetch and merge changes from upstream into main
$ git fetch upstream
- $ git merge upstream/master
+ $ git merge upstream/main
Now your local repo is set up such that you will push changes to your GitHub
repo, from which you can submit a pull request.
@@ -72,7 +72,7 @@ We use `nox `__ to instrument our tests.
- To run a single unit test::
- $ nox -s unit-3.9 -- -k
+ $ nox -s unit-3.10 -- -k
.. note::
@@ -110,12 +110,12 @@ Coding Style
variables::
export GOOGLE_CLOUD_TESTING_REMOTE="upstream"
- export GOOGLE_CLOUD_TESTING_BRANCH="master"
+ export GOOGLE_CLOUD_TESTING_BRANCH="main"
By doing this, you are specifying the location of the most up-to-date
- version of ``python-language``. The the suggested remote name ``upstream``
- should point to the official ``googleapis`` checkout and the
- the branch should be the main branch on that remote (``master``).
+ version of ``python-language``. The
+ remote name ``upstream`` should point to the official ``googleapis``
+ checkout and the branch should be the default branch on that remote (``main``).
- This repository contains configuration for the
`pre-commit `__ tool, which automates checking
@@ -209,7 +209,7 @@ The `description on PyPI`_ for the project comes directly from the
``README``. Due to the reStructuredText (``rst``) parser used by
PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst``
instead of
-``https://github.com/googleapis/python-language/blob/master/CONTRIBUTING.rst``)
+``https://github.com/googleapis/python-language/blob/main/CONTRIBUTING.rst``)
may cause problems creating links or rendering the description.
.. _description on PyPI: https://pypi.org/project/google-cloud-language
@@ -225,16 +225,18 @@ We support:
- `Python 3.7`_
- `Python 3.8`_
- `Python 3.9`_
+- `Python 3.10`_
.. _Python 3.6: https://docs.python.org/3.6/
.. _Python 3.7: https://docs.python.org/3.7/
.. _Python 3.8: https://docs.python.org/3.8/
.. _Python 3.9: https://docs.python.org/3.9/
+.. _Python 3.10: https://docs.python.org/3.10/
Supported versions can be found in our ``noxfile.py`` `config`_.
-.. _config: https://github.com/googleapis/python-language/blob/master/noxfile.py
+.. _config: https://github.com/googleapis/python-language/blob/main/noxfile.py
We also explicitly decided to support Python 3 beginning with version 3.6.
diff --git a/README.rst b/README.rst
index 96dd41fc..958246b4 100644
--- a/README.rst
+++ b/README.rst
@@ -17,7 +17,7 @@ with your document storage on Google Cloud Storage.
- `Product Documentation`_
.. |GA| image:: https://img.shields.io/badge/support-GA-gold.svg
- :target: https://github.com/googleapis/google-cloud-python/blob/master/README.rst#general-availability
+ :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#general-availability
.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-language.svg
:target: https://pypi.org/project/google-cloud-language/
.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-language.svg
diff --git a/docs/conf.py b/docs/conf.py
index 30faa8d1..e780d1a1 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -76,8 +76,8 @@
# The encoding of source files.
# source_encoding = 'utf-8-sig'
-# The master toctree document.
-master_doc = "index"
+# The root toctree document.
+root_doc = "index"
# General information about the project.
project = "google-cloud-language"
@@ -110,6 +110,7 @@
# directories to ignore when looking for source files.
exclude_patterns = [
"_build",
+ "**/.nox/**/*",
"samples/AUTHORING_GUIDE.md",
"samples/CONTRIBUTING.md",
"samples/snippets/README.rst",
@@ -279,7 +280,7 @@
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(
- master_doc,
+ root_doc,
"google-cloud-language.tex",
"google-cloud-language Documentation",
author,
@@ -314,7 +315,7 @@
# (source start file, name, description, authors, manual section).
man_pages = [
(
- master_doc,
+ root_doc,
"google-cloud-language",
"google-cloud-language Documentation",
[author],
@@ -333,7 +334,7 @@
# dir menu entry, description, category)
texinfo_documents = [
(
- master_doc,
+ root_doc,
"google-cloud-language",
"google-cloud-language Documentation",
author,
diff --git a/google/cloud/language_v1/services/language_service/async_client.py b/google/cloud/language_v1/services/language_service/async_client.py
index eb800802..86d98569 100644
--- a/google/cloud/language_v1/services/language_service/async_client.py
+++ b/google/cloud/language_v1/services/language_service/async_client.py
@@ -661,6 +661,12 @@ async def annotate_text(
# Done; return the response.
return response
+ async def __aenter__(self):
+ return self
+
+ async def __aexit__(self, exc_type, exc, tb):
+ await self.transport.close()
+
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
diff --git a/google/cloud/language_v1/services/language_service/client.py b/google/cloud/language_v1/services/language_service/client.py
index eb724c09..7d4846ec 100644
--- a/google/cloud/language_v1/services/language_service/client.py
+++ b/google/cloud/language_v1/services/language_service/client.py
@@ -17,7 +17,7 @@
from distutils import util
import os
import re
-from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union
+from typing import Dict, Optional, Sequence, Tuple, Type, Union
import pkg_resources
from google.api_core import client_options as client_options_lib # type: ignore
@@ -329,15 +329,12 @@ def __init__(
client_cert_source_for_mtls=client_cert_source_func,
quota_project_id=client_options.quota_project_id,
client_info=client_info,
- always_use_jwt_access=(
- Transport == type(self).get_transport_class("grpc")
- or Transport == type(self).get_transport_class("grpc_asyncio")
- ),
+ always_use_jwt_access=True,
)
def analyze_sentiment(
self,
- request: language_service.AnalyzeSentimentRequest = None,
+ request: Union[language_service.AnalyzeSentimentRequest, dict] = None,
*,
document: language_service.Document = None,
encoding_type: language_service.EncodingType = None,
@@ -348,7 +345,7 @@ def analyze_sentiment(
r"""Analyzes the sentiment of the provided text.
Args:
- request (google.cloud.language_v1.types.AnalyzeSentimentRequest):
+ request (Union[google.cloud.language_v1.types.AnalyzeSentimentRequest, dict]):
The request object. The sentiment analysis request
message.
document (google.cloud.language_v1.types.Document):
@@ -410,7 +407,7 @@ def analyze_sentiment(
def analyze_entities(
self,
- request: language_service.AnalyzeEntitiesRequest = None,
+ request: Union[language_service.AnalyzeEntitiesRequest, dict] = None,
*,
document: language_service.Document = None,
encoding_type: language_service.EncodingType = None,
@@ -424,7 +421,7 @@ def analyze_entities(
properties.
Args:
- request (google.cloud.language_v1.types.AnalyzeEntitiesRequest):
+ request (Union[google.cloud.language_v1.types.AnalyzeEntitiesRequest, dict]):
The request object. The entity analysis request message.
document (google.cloud.language_v1.types.Document):
Input document.
@@ -483,7 +480,7 @@ def analyze_entities(
def analyze_entity_sentiment(
self,
- request: language_service.AnalyzeEntitySentimentRequest = None,
+ request: Union[language_service.AnalyzeEntitySentimentRequest, dict] = None,
*,
document: language_service.Document = None,
encoding_type: language_service.EncodingType = None,
@@ -497,7 +494,7 @@ def analyze_entity_sentiment(
and its mentions.
Args:
- request (google.cloud.language_v1.types.AnalyzeEntitySentimentRequest):
+ request (Union[google.cloud.language_v1.types.AnalyzeEntitySentimentRequest, dict]):
The request object. The entity-level sentiment analysis
request message.
document (google.cloud.language_v1.types.Document):
@@ -559,7 +556,7 @@ def analyze_entity_sentiment(
def analyze_syntax(
self,
- request: language_service.AnalyzeSyntaxRequest = None,
+ request: Union[language_service.AnalyzeSyntaxRequest, dict] = None,
*,
document: language_service.Document = None,
encoding_type: language_service.EncodingType = None,
@@ -572,7 +569,7 @@ def analyze_syntax(
tags, dependency trees, and other properties.
Args:
- request (google.cloud.language_v1.types.AnalyzeSyntaxRequest):
+ request (Union[google.cloud.language_v1.types.AnalyzeSyntaxRequest, dict]):
The request object. The syntax analysis request message.
document (google.cloud.language_v1.types.Document):
Input document.
@@ -631,7 +628,7 @@ def analyze_syntax(
def classify_text(
self,
- request: language_service.ClassifyTextRequest = None,
+ request: Union[language_service.ClassifyTextRequest, dict] = None,
*,
document: language_service.Document = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
@@ -641,7 +638,7 @@ def classify_text(
r"""Classifies a document into categories.
Args:
- request (google.cloud.language_v1.types.ClassifyTextRequest):
+ request (Union[google.cloud.language_v1.types.ClassifyTextRequest, dict]):
The request object. The document classification request
message.
document (google.cloud.language_v1.types.Document):
@@ -694,7 +691,7 @@ def classify_text(
def annotate_text(
self,
- request: language_service.AnnotateTextRequest = None,
+ request: Union[language_service.AnnotateTextRequest, dict] = None,
*,
document: language_service.Document = None,
features: language_service.AnnotateTextRequest.Features = None,
@@ -708,7 +705,7 @@ def annotate_text(
analyzeSyntax provide in one call.
Args:
- request (google.cloud.language_v1.types.AnnotateTextRequest):
+ request (Union[google.cloud.language_v1.types.AnnotateTextRequest, dict]):
The request object. The request message for the text
annotation API, which can perform multiple analysis
types (sentiment, entities, and syntax) in one call.
@@ -776,6 +773,19 @@ def annotate_text(
# Done; return the response.
return response
+ def __enter__(self):
+ return self
+
+ def __exit__(self, type, value, traceback):
+ """Releases underlying transport's resources.
+
+ .. warning::
+ ONLY use as a context manager if the transport is NOT shared
+ with other clients! Exiting the with block will CLOSE the transport
+ and may cause errors in other clients!
+ """
+ self.transport.close()
+
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
diff --git a/google/cloud/language_v1/services/language_service/transports/base.py b/google/cloud/language_v1/services/language_service/transports/base.py
index 42537ea4..0e4917a5 100644
--- a/google/cloud/language_v1/services/language_service/transports/base.py
+++ b/google/cloud/language_v1/services/language_service/transports/base.py
@@ -118,7 +118,7 @@ def __init__(
**scopes_kwargs, quota_project_id=quota_project_id
)
- # If the credentials is service account credentials, then always try to use self signed JWT.
+ # If the credentials are service account credentials, then always try to use self signed JWT.
if (
always_use_jwt_access
and isinstance(credentials, service_account.Credentials)
@@ -247,6 +247,15 @@ def _prep_wrapped_messages(self, client_info):
),
}
+ def close(self):
+ """Closes resources associated with the transport.
+
+ .. warning::
+ Only call this method if the transport is NOT shared
+ with other clients - this may cause errors in other clients!
+ """
+ raise NotImplementedError()
+
@property
def analyze_sentiment(
self,
diff --git a/google/cloud/language_v1/services/language_service/transports/grpc.py b/google/cloud/language_v1/services/language_service/transports/grpc.py
index a059ea93..4a85b2e2 100644
--- a/google/cloud/language_v1/services/language_service/transports/grpc.py
+++ b/google/cloud/language_v1/services/language_service/transports/grpc.py
@@ -81,16 +81,16 @@ def __init__(
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
- ``client_cert_source`` or applicatin default SSL credentials.
+ ``client_cert_source`` or application default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
- for grpc channel. It is ignored if ``channel`` is provided.
+ for the grpc channel. It is ignored if ``channel`` is provided.
client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
A callback to provide client certificate bytes and private key bytes,
- both in PEM format. It is used to configure mutual TLS channel. It is
+ both in PEM format. It is used to configure a mutual TLS channel. It is
ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
@@ -407,5 +407,8 @@ def annotate_text(
)
return self._stubs["annotate_text"]
+ def close(self):
+ self.grpc_channel.close()
+
__all__ = ("LanguageServiceGrpcTransport",)
diff --git a/google/cloud/language_v1/services/language_service/transports/grpc_asyncio.py b/google/cloud/language_v1/services/language_service/transports/grpc_asyncio.py
index 4f849e85..46294075 100644
--- a/google/cloud/language_v1/services/language_service/transports/grpc_asyncio.py
+++ b/google/cloud/language_v1/services/language_service/transports/grpc_asyncio.py
@@ -128,16 +128,16 @@ def __init__(
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
- ``client_cert_source`` or applicatin default SSL credentials.
+ ``client_cert_source`` or application default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
- for grpc channel. It is ignored if ``channel`` is provided.
+ for the grpc channel. It is ignored if ``channel`` is provided.
client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
A callback to provide client certificate bytes and private key bytes,
- both in PEM format. It is used to configure mutual TLS channel. It is
+ both in PEM format. It is used to configure a mutual TLS channel. It is
ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
@@ -413,5 +413,8 @@ def annotate_text(
)
return self._stubs["annotate_text"]
+ def close(self):
+ return self.grpc_channel.close()
+
__all__ = ("LanguageServiceGrpcAsyncIOTransport",)
diff --git a/google/cloud/language_v1/types/language_service.py b/google/cloud/language_v1/types/language_service.py
index 1138d63e..e4ee8e53 100644
--- a/google/cloud/language_v1/types/language_service.py
+++ b/google/cloud/language_v1/types/language_service.py
@@ -101,6 +101,7 @@ class Type(proto.Enum):
class Sentence(proto.Message):
r"""Represents a sentence in the input document.
+
Attributes:
text (google.cloud.language_v1.types.TextSpan):
The sentence text.
@@ -182,6 +183,7 @@ class Type(proto.Enum):
class Token(proto.Message):
r"""Represents the smallest syntactic building block of the text.
+
Attributes:
text (google.cloud.language_v1.types.TextSpan):
The token text.
@@ -538,6 +540,7 @@ class Type(proto.Enum):
class TextSpan(proto.Message):
r"""Represents an output piece of text.
+
Attributes:
content (str):
The content of the output text.
@@ -554,6 +557,7 @@ class TextSpan(proto.Message):
class ClassificationCategory(proto.Message):
r"""Represents a category returned from the text classifier.
+
Attributes:
name (str):
The name of the category representing the document, from the
@@ -571,6 +575,7 @@ class ClassificationCategory(proto.Message):
class AnalyzeSentimentRequest(proto.Message):
r"""The sentiment analysis request message.
+
Attributes:
document (google.cloud.language_v1.types.Document):
Input document.
@@ -585,6 +590,7 @@ class AnalyzeSentimentRequest(proto.Message):
class AnalyzeSentimentResponse(proto.Message):
r"""The sentiment analysis response message.
+
Attributes:
document_sentiment (google.cloud.language_v1.types.Sentiment):
The overall sentiment of the input document.
@@ -606,6 +612,7 @@ class AnalyzeSentimentResponse(proto.Message):
class AnalyzeEntitySentimentRequest(proto.Message):
r"""The entity-level sentiment analysis request message.
+
Attributes:
document (google.cloud.language_v1.types.Document):
Input document.
@@ -620,6 +627,7 @@ class AnalyzeEntitySentimentRequest(proto.Message):
class AnalyzeEntitySentimentResponse(proto.Message):
r"""The entity-level sentiment analysis response message.
+
Attributes:
entities (Sequence[google.cloud.language_v1.types.Entity]):
The recognized entities in the input document
@@ -638,6 +646,7 @@ class AnalyzeEntitySentimentResponse(proto.Message):
class AnalyzeEntitiesRequest(proto.Message):
r"""The entity analysis request message.
+
Attributes:
document (google.cloud.language_v1.types.Document):
Input document.
@@ -652,6 +661,7 @@ class AnalyzeEntitiesRequest(proto.Message):
class AnalyzeEntitiesResponse(proto.Message):
r"""The entity analysis response message.
+
Attributes:
entities (Sequence[google.cloud.language_v1.types.Entity]):
The recognized entities in the input
@@ -670,6 +680,7 @@ class AnalyzeEntitiesResponse(proto.Message):
class AnalyzeSyntaxRequest(proto.Message):
r"""The syntax analysis request message.
+
Attributes:
document (google.cloud.language_v1.types.Document):
Input document.
@@ -684,6 +695,7 @@ class AnalyzeSyntaxRequest(proto.Message):
class AnalyzeSyntaxResponse(proto.Message):
r"""The syntax analysis response message.
+
Attributes:
sentences (Sequence[google.cloud.language_v1.types.Sentence]):
Sentences in the input document.
@@ -705,6 +717,7 @@ class AnalyzeSyntaxResponse(proto.Message):
class ClassifyTextRequest(proto.Message):
r"""The document classification request message.
+
Attributes:
document (google.cloud.language_v1.types.Document):
Input document.
@@ -715,6 +728,7 @@ class ClassifyTextRequest(proto.Message):
class ClassifyTextResponse(proto.Message):
r"""The document classification response message.
+
Attributes:
categories (Sequence[google.cloud.language_v1.types.ClassificationCategory]):
Categories representing the input document.
@@ -772,6 +786,7 @@ class Features(proto.Message):
class AnnotateTextResponse(proto.Message):
r"""The text annotations response message.
+
Attributes:
sentences (Sequence[google.cloud.language_v1.types.Sentence]):
Sentences in the input document. Populated if the user
diff --git a/google/cloud/language_v1beta2/services/language_service/async_client.py b/google/cloud/language_v1beta2/services/language_service/async_client.py
index 711bc55c..c95d4083 100644
--- a/google/cloud/language_v1beta2/services/language_service/async_client.py
+++ b/google/cloud/language_v1beta2/services/language_service/async_client.py
@@ -662,6 +662,12 @@ async def annotate_text(
# Done; return the response.
return response
+ async def __aenter__(self):
+ return self
+
+ async def __aexit__(self, exc_type, exc, tb):
+ await self.transport.close()
+
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
diff --git a/google/cloud/language_v1beta2/services/language_service/client.py b/google/cloud/language_v1beta2/services/language_service/client.py
index 70fc8afc..3ed65bb6 100644
--- a/google/cloud/language_v1beta2/services/language_service/client.py
+++ b/google/cloud/language_v1beta2/services/language_service/client.py
@@ -17,7 +17,7 @@
from distutils import util
import os
import re
-from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union
+from typing import Dict, Optional, Sequence, Tuple, Type, Union
import pkg_resources
from google.api_core import client_options as client_options_lib # type: ignore
@@ -329,15 +329,12 @@ def __init__(
client_cert_source_for_mtls=client_cert_source_func,
quota_project_id=client_options.quota_project_id,
client_info=client_info,
- always_use_jwt_access=(
- Transport == type(self).get_transport_class("grpc")
- or Transport == type(self).get_transport_class("grpc_asyncio")
- ),
+ always_use_jwt_access=True,
)
def analyze_sentiment(
self,
- request: language_service.AnalyzeSentimentRequest = None,
+ request: Union[language_service.AnalyzeSentimentRequest, dict] = None,
*,
document: language_service.Document = None,
encoding_type: language_service.EncodingType = None,
@@ -348,7 +345,7 @@ def analyze_sentiment(
r"""Analyzes the sentiment of the provided text.
Args:
- request (google.cloud.language_v1beta2.types.AnalyzeSentimentRequest):
+ request (Union[google.cloud.language_v1beta2.types.AnalyzeSentimentRequest, dict]):
The request object. The sentiment analysis request
message.
document (google.cloud.language_v1beta2.types.Document):
@@ -411,7 +408,7 @@ def analyze_sentiment(
def analyze_entities(
self,
- request: language_service.AnalyzeEntitiesRequest = None,
+ request: Union[language_service.AnalyzeEntitiesRequest, dict] = None,
*,
document: language_service.Document = None,
encoding_type: language_service.EncodingType = None,
@@ -425,7 +422,7 @@ def analyze_entities(
properties.
Args:
- request (google.cloud.language_v1beta2.types.AnalyzeEntitiesRequest):
+ request (Union[google.cloud.language_v1beta2.types.AnalyzeEntitiesRequest, dict]):
The request object. The entity analysis request message.
document (google.cloud.language_v1beta2.types.Document):
Required. Input document.
@@ -484,7 +481,7 @@ def analyze_entities(
def analyze_entity_sentiment(
self,
- request: language_service.AnalyzeEntitySentimentRequest = None,
+ request: Union[language_service.AnalyzeEntitySentimentRequest, dict] = None,
*,
document: language_service.Document = None,
encoding_type: language_service.EncodingType = None,
@@ -498,7 +495,7 @@ def analyze_entity_sentiment(
and its mentions.
Args:
- request (google.cloud.language_v1beta2.types.AnalyzeEntitySentimentRequest):
+ request (Union[google.cloud.language_v1beta2.types.AnalyzeEntitySentimentRequest, dict]):
The request object. The entity-level sentiment analysis
request message.
document (google.cloud.language_v1beta2.types.Document):
@@ -560,7 +557,7 @@ def analyze_entity_sentiment(
def analyze_syntax(
self,
- request: language_service.AnalyzeSyntaxRequest = None,
+ request: Union[language_service.AnalyzeSyntaxRequest, dict] = None,
*,
document: language_service.Document = None,
encoding_type: language_service.EncodingType = None,
@@ -573,7 +570,7 @@ def analyze_syntax(
tags, dependency trees, and other properties.
Args:
- request (google.cloud.language_v1beta2.types.AnalyzeSyntaxRequest):
+ request (Union[google.cloud.language_v1beta2.types.AnalyzeSyntaxRequest, dict]):
The request object. The syntax analysis request message.
document (google.cloud.language_v1beta2.types.Document):
Required. Input document.
@@ -632,7 +629,7 @@ def analyze_syntax(
def classify_text(
self,
- request: language_service.ClassifyTextRequest = None,
+ request: Union[language_service.ClassifyTextRequest, dict] = None,
*,
document: language_service.Document = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
@@ -642,7 +639,7 @@ def classify_text(
r"""Classifies a document into categories.
Args:
- request (google.cloud.language_v1beta2.types.ClassifyTextRequest):
+ request (Union[google.cloud.language_v1beta2.types.ClassifyTextRequest, dict]):
The request object. The document classification request
message.
document (google.cloud.language_v1beta2.types.Document):
@@ -695,7 +692,7 @@ def classify_text(
def annotate_text(
self,
- request: language_service.AnnotateTextRequest = None,
+ request: Union[language_service.AnnotateTextRequest, dict] = None,
*,
document: language_service.Document = None,
features: language_service.AnnotateTextRequest.Features = None,
@@ -709,7 +706,7 @@ def annotate_text(
call.
Args:
- request (google.cloud.language_v1beta2.types.AnnotateTextRequest):
+ request (Union[google.cloud.language_v1beta2.types.AnnotateTextRequest, dict]):
The request object. The request message for the text
annotation API, which can perform multiple analysis
types (sentiment, entities, and syntax) in one call.
@@ -777,6 +774,19 @@ def annotate_text(
# Done; return the response.
return response
+ def __enter__(self):
+ return self
+
+ def __exit__(self, type, value, traceback):
+ """Releases underlying transport's resources.
+
+ .. warning::
+ ONLY use as a context manager if the transport is NOT shared
+ with other clients! Exiting the with block will CLOSE the transport
+ and may cause errors in other clients!
+ """
+ self.transport.close()
+
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
diff --git a/google/cloud/language_v1beta2/services/language_service/transports/base.py b/google/cloud/language_v1beta2/services/language_service/transports/base.py
index 38f98b34..59be86ca 100644
--- a/google/cloud/language_v1beta2/services/language_service/transports/base.py
+++ b/google/cloud/language_v1beta2/services/language_service/transports/base.py
@@ -118,7 +118,7 @@ def __init__(
**scopes_kwargs, quota_project_id=quota_project_id
)
- # If the credentials is service account credentials, then always try to use self signed JWT.
+ # If the credentials are service account credentials, then always try to use self signed JWT.
if (
always_use_jwt_access
and isinstance(credentials, service_account.Credentials)
@@ -247,6 +247,15 @@ def _prep_wrapped_messages(self, client_info):
),
}
+ def close(self):
+ """Closes resources associated with the transport.
+
+ .. warning::
+ Only call this method if the transport is NOT shared
+ with other clients - this may cause errors in other clients!
+ """
+ raise NotImplementedError()
+
@property
def analyze_sentiment(
self,
diff --git a/google/cloud/language_v1beta2/services/language_service/transports/grpc.py b/google/cloud/language_v1beta2/services/language_service/transports/grpc.py
index ad25eaa4..7f7c7b2d 100644
--- a/google/cloud/language_v1beta2/services/language_service/transports/grpc.py
+++ b/google/cloud/language_v1beta2/services/language_service/transports/grpc.py
@@ -81,16 +81,16 @@ def __init__(
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
- ``client_cert_source`` or applicatin default SSL credentials.
+ ``client_cert_source`` or application default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
- for grpc channel. It is ignored if ``channel`` is provided.
+ for the grpc channel. It is ignored if ``channel`` is provided.
client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
A callback to provide client certificate bytes and private key bytes,
- both in PEM format. It is used to configure mutual TLS channel. It is
+ both in PEM format. It is used to configure a mutual TLS channel. It is
ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
@@ -407,5 +407,8 @@ def annotate_text(
)
return self._stubs["annotate_text"]
+ def close(self):
+ self.grpc_channel.close()
+
__all__ = ("LanguageServiceGrpcTransport",)
diff --git a/google/cloud/language_v1beta2/services/language_service/transports/grpc_asyncio.py b/google/cloud/language_v1beta2/services/language_service/transports/grpc_asyncio.py
index a141b4cb..2c48f9bc 100644
--- a/google/cloud/language_v1beta2/services/language_service/transports/grpc_asyncio.py
+++ b/google/cloud/language_v1beta2/services/language_service/transports/grpc_asyncio.py
@@ -128,16 +128,16 @@ def __init__(
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
- ``client_cert_source`` or applicatin default SSL credentials.
+ ``client_cert_source`` or application default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
- for grpc channel. It is ignored if ``channel`` is provided.
+ for the grpc channel. It is ignored if ``channel`` is provided.
client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
A callback to provide client certificate bytes and private key bytes,
- both in PEM format. It is used to configure mutual TLS channel. It is
+ both in PEM format. It is used to configure a mutual TLS channel. It is
ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
@@ -413,5 +413,8 @@ def annotate_text(
)
return self._stubs["annotate_text"]
+ def close(self):
+ return self.grpc_channel.close()
+
__all__ = ("LanguageServiceGrpcAsyncIOTransport",)
diff --git a/google/cloud/language_v1beta2/types/language_service.py b/google/cloud/language_v1beta2/types/language_service.py
index 631b8fad..0aabe4a9 100644
--- a/google/cloud/language_v1beta2/types/language_service.py
+++ b/google/cloud/language_v1beta2/types/language_service.py
@@ -101,6 +101,7 @@ class Type(proto.Enum):
class Sentence(proto.Message):
r"""Represents a sentence in the input document.
+
Attributes:
text (google.cloud.language_v1beta2.types.TextSpan):
The sentence text.
@@ -182,6 +183,7 @@ class Type(proto.Enum):
class Token(proto.Message):
r"""Represents the smallest syntactic building block of the text.
+
Attributes:
text (google.cloud.language_v1beta2.types.TextSpan):
The token text.
@@ -221,6 +223,7 @@ class Sentiment(proto.Message):
class PartOfSpeech(proto.Message):
r"""Represents part of speech information for a token.
+
Attributes:
tag (google.cloud.language_v1beta2.types.PartOfSpeech.Tag):
The part of speech tag.
@@ -401,6 +404,7 @@ class Voice(proto.Enum):
class DependencyEdge(proto.Message):
r"""Represents dependency parse tree information for a token.
+
Attributes:
head_token_index (int):
Represents the head of this token in the dependency tree.
@@ -533,6 +537,7 @@ class Type(proto.Enum):
class TextSpan(proto.Message):
r"""Represents an output piece of text.
+
Attributes:
content (str):
The content of the output text.
@@ -549,6 +554,7 @@ class TextSpan(proto.Message):
class ClassificationCategory(proto.Message):
r"""Represents a category returned from the text classifier.
+
Attributes:
name (str):
The name of the category representing the document, from the
@@ -566,6 +572,7 @@ class ClassificationCategory(proto.Message):
class AnalyzeSentimentRequest(proto.Message):
r"""The sentiment analysis request message.
+
Attributes:
document (google.cloud.language_v1beta2.types.Document):
Required. Input document.
@@ -581,6 +588,7 @@ class AnalyzeSentimentRequest(proto.Message):
class AnalyzeSentimentResponse(proto.Message):
r"""The sentiment analysis response message.
+
Attributes:
document_sentiment (google.cloud.language_v1beta2.types.Sentiment):
The overall sentiment of the input document.
@@ -602,6 +610,7 @@ class AnalyzeSentimentResponse(proto.Message):
class AnalyzeEntitySentimentRequest(proto.Message):
r"""The entity-level sentiment analysis request message.
+
Attributes:
document (google.cloud.language_v1beta2.types.Document):
Required. Input document.
@@ -616,6 +625,7 @@ class AnalyzeEntitySentimentRequest(proto.Message):
class AnalyzeEntitySentimentResponse(proto.Message):
r"""The entity-level sentiment analysis response message.
+
Attributes:
entities (Sequence[google.cloud.language_v1beta2.types.Entity]):
The recognized entities in the input document
@@ -634,6 +644,7 @@ class AnalyzeEntitySentimentResponse(proto.Message):
class AnalyzeEntitiesRequest(proto.Message):
r"""The entity analysis request message.
+
Attributes:
document (google.cloud.language_v1beta2.types.Document):
Required. Input document.
@@ -648,6 +659,7 @@ class AnalyzeEntitiesRequest(proto.Message):
class AnalyzeEntitiesResponse(proto.Message):
r"""The entity analysis response message.
+
Attributes:
entities (Sequence[google.cloud.language_v1beta2.types.Entity]):
The recognized entities in the input
@@ -666,6 +678,7 @@ class AnalyzeEntitiesResponse(proto.Message):
class AnalyzeSyntaxRequest(proto.Message):
r"""The syntax analysis request message.
+
Attributes:
document (google.cloud.language_v1beta2.types.Document):
Required. Input document.
@@ -680,6 +693,7 @@ class AnalyzeSyntaxRequest(proto.Message):
class AnalyzeSyntaxResponse(proto.Message):
r"""The syntax analysis response message.
+
Attributes:
sentences (Sequence[google.cloud.language_v1beta2.types.Sentence]):
Sentences in the input document.
@@ -701,6 +715,7 @@ class AnalyzeSyntaxResponse(proto.Message):
class ClassifyTextRequest(proto.Message):
r"""The document classification request message.
+
Attributes:
document (google.cloud.language_v1beta2.types.Document):
Required. Input document.
@@ -711,6 +726,7 @@ class ClassifyTextRequest(proto.Message):
class ClassifyTextResponse(proto.Message):
r"""The document classification response message.
+
Attributes:
categories (Sequence[google.cloud.language_v1beta2.types.ClassificationCategory]):
Categories representing the input document.
@@ -771,6 +787,7 @@ class Features(proto.Message):
class AnnotateTextResponse(proto.Message):
r"""The text annotations response message.
+
Attributes:
sentences (Sequence[google.cloud.language_v1beta2.types.Sentence]):
Sentences in the input document. Populated if the user
diff --git a/noxfile.py b/noxfile.py
index 03aa2f58..672b28d6 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -29,7 +29,7 @@
DEFAULT_PYTHON_VERSION = "3.8"
SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"]
-UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"]
+UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"]
CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute()
@@ -84,9 +84,15 @@ def default(session):
constraints_path = str(
CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt"
)
- session.install("asyncmock", "pytest-asyncio", "-c", constraints_path)
-
- session.install("mock", "pytest", "pytest-cov", "-c", constraints_path)
+ session.install(
+ "mock",
+ "asyncmock",
+ "pytest",
+ "pytest-cov",
+ "pytest-asyncio",
+ "-c",
+ constraints_path,
+ )
session.install("-e", ".", "-c", constraints_path)
diff --git a/renovate.json b/renovate.json
index c0489556..c21036d3 100644
--- a/renovate.json
+++ b/renovate.json
@@ -1,6 +1,9 @@
{
"extends": [
- "config:base", ":preserveSemverRanges"
+ "config:base",
+ "group:all",
+ ":preserveSemverRanges",
+ ":disableDependencyDashboard"
],
"ignorePaths": [".pre-commit-config.yaml"],
"pip_requirements": {
diff --git a/samples/snippets/api/noxfile.py b/samples/snippets/api/noxfile.py
index 6a8ccdae..93a9122c 100644
--- a/samples/snippets/api/noxfile.py
+++ b/samples/snippets/api/noxfile.py
@@ -39,17 +39,15 @@
TEST_CONFIG = {
# You can opt out from the test for specific Python versions.
- 'ignored_versions': ["2.7"],
-
+ "ignored_versions": [],
# Old samples are opted out of enforcing Python type hints
# All new samples should feature them
- 'enforce_type_hints': False,
-
+ "enforce_type_hints": False,
# An envvar key for determining the project id to use. Change it
# to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a
# build specific Cloud project. You can also use your own string
# to use your own Cloud project.
- 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT',
+ "gcloud_project_env": "GOOGLE_CLOUD_PROJECT",
# 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT',
# If you need to use a specific version of pip,
# change pip_version_override to the string representation
@@ -57,13 +55,13 @@
"pip_version_override": None,
# A dictionary you want to inject into your test. Don't put any
# secrets here. These values will override predefined values.
- 'envs': {},
+ "envs": {},
}
try:
# Ensure we can import noxfile_config in the project's directory.
- sys.path.append('.')
+ sys.path.append(".")
from noxfile_config import TEST_CONFIG_OVERRIDE
except ImportError as e:
print("No user noxfile_config found: detail: {}".format(e))
@@ -78,25 +76,32 @@ def get_pytest_env_vars() -> Dict[str, str]:
ret = {}
# Override the GCLOUD_PROJECT and the alias.
- env_key = TEST_CONFIG['gcloud_project_env']
+ env_key = TEST_CONFIG["gcloud_project_env"]
# This should error out if not set.
- ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key]
+ ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key]
# Apply user supplied envs.
- ret.update(TEST_CONFIG['envs'])
+ ret.update(TEST_CONFIG["envs"])
return ret
# DO NOT EDIT - automatically generated.
-# All versions used to tested samples.
-ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8", "3.9"]
+# All versions used to test samples.
+ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"]
# Any default versions that should be ignored.
-IGNORED_VERSIONS = TEST_CONFIG['ignored_versions']
+IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"]
TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS])
-INSTALL_LIBRARY_FROM_SOURCE = bool(os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False))
+INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in (
+ "True",
+ "true",
+)
+
+# Error if a python version is missing
+nox.options.error_on_missing_interpreters = True
+
#
# Style Checks
#
@@ -141,7 +146,7 @@ def _determine_local_import_names(start_dir: str) -> List[str]:
@nox.session
def lint(session: nox.sessions.Session) -> None:
- if not TEST_CONFIG['enforce_type_hints']:
+ if not TEST_CONFIG["enforce_type_hints"]:
session.install("flake8", "flake8-import-order")
else:
session.install("flake8", "flake8-import-order", "flake8-annotations")
@@ -150,9 +155,11 @@ def lint(session: nox.sessions.Session) -> None:
args = FLAKE8_COMMON_ARGS + [
"--application-import-names",
",".join(local_names),
- "."
+ ".",
]
session.run("flake8", *args)
+
+
#
# Black
#
@@ -165,6 +172,7 @@ def blacken(session: nox.sessions.Session) -> None:
session.run("black", *python_files)
+
#
# Sample Tests
#
@@ -173,7 +181,9 @@ def blacken(session: nox.sessions.Session) -> None:
PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"]
-def _session_tests(session: nox.sessions.Session, post_install: Callable = None) -> None:
+def _session_tests(
+ session: nox.sessions.Session, post_install: Callable = None
+) -> None:
if TEST_CONFIG["pip_version_override"]:
pip_version = TEST_CONFIG["pip_version_override"]
session.install(f"pip=={pip_version}")
@@ -203,7 +213,7 @@ def _session_tests(session: nox.sessions.Session, post_install: Callable = None)
# on travis where slow and flaky tests are excluded.
# See http://doc.pytest.org/en/latest/_modules/_pytest/main.html
success_codes=[0, 5],
- env=get_pytest_env_vars()
+ env=get_pytest_env_vars(),
)
@@ -213,9 +223,9 @@ def py(session: nox.sessions.Session) -> None:
if session.python in TESTED_VERSIONS:
_session_tests(session)
else:
- session.skip("SKIPPED: {} tests are disabled for this sample.".format(
- session.python
- ))
+ session.skip(
+ "SKIPPED: {} tests are disabled for this sample.".format(session.python)
+ )
#
diff --git a/samples/snippets/api/requirements-test.txt b/samples/snippets/api/requirements-test.txt
index 95ea1e6a..92709451 100644
--- a/samples/snippets/api/requirements-test.txt
+++ b/samples/snippets/api/requirements-test.txt
@@ -1 +1 @@
-pytest==6.2.4
+pytest==6.2.5
diff --git a/samples/snippets/api/requirements.txt b/samples/snippets/api/requirements.txt
index bb55768a..2d206082 100644
--- a/samples/snippets/api/requirements.txt
+++ b/samples/snippets/api/requirements.txt
@@ -1,3 +1,3 @@
-google-api-python-client==2.15.0
-google-auth==1.34.0
+google-api-python-client==2.24.0
+google-auth==2.2.1
google-auth-httplib2==0.1.0
diff --git a/samples/snippets/classify_text/noxfile.py b/samples/snippets/classify_text/noxfile.py
index 6a8ccdae..93a9122c 100644
--- a/samples/snippets/classify_text/noxfile.py
+++ b/samples/snippets/classify_text/noxfile.py
@@ -39,17 +39,15 @@
TEST_CONFIG = {
# You can opt out from the test for specific Python versions.
- 'ignored_versions': ["2.7"],
-
+ "ignored_versions": [],
# Old samples are opted out of enforcing Python type hints
# All new samples should feature them
- 'enforce_type_hints': False,
-
+ "enforce_type_hints": False,
# An envvar key for determining the project id to use. Change it
# to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a
# build specific Cloud project. You can also use your own string
# to use your own Cloud project.
- 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT',
+ "gcloud_project_env": "GOOGLE_CLOUD_PROJECT",
# 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT',
# If you need to use a specific version of pip,
# change pip_version_override to the string representation
@@ -57,13 +55,13 @@
"pip_version_override": None,
# A dictionary you want to inject into your test. Don't put any
# secrets here. These values will override predefined values.
- 'envs': {},
+ "envs": {},
}
try:
# Ensure we can import noxfile_config in the project's directory.
- sys.path.append('.')
+ sys.path.append(".")
from noxfile_config import TEST_CONFIG_OVERRIDE
except ImportError as e:
print("No user noxfile_config found: detail: {}".format(e))
@@ -78,25 +76,32 @@ def get_pytest_env_vars() -> Dict[str, str]:
ret = {}
# Override the GCLOUD_PROJECT and the alias.
- env_key = TEST_CONFIG['gcloud_project_env']
+ env_key = TEST_CONFIG["gcloud_project_env"]
# This should error out if not set.
- ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key]
+ ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key]
# Apply user supplied envs.
- ret.update(TEST_CONFIG['envs'])
+ ret.update(TEST_CONFIG["envs"])
return ret
# DO NOT EDIT - automatically generated.
-# All versions used to tested samples.
-ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8", "3.9"]
+# All versions used to test samples.
+ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"]
# Any default versions that should be ignored.
-IGNORED_VERSIONS = TEST_CONFIG['ignored_versions']
+IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"]
TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS])
-INSTALL_LIBRARY_FROM_SOURCE = bool(os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False))
+INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in (
+ "True",
+ "true",
+)
+
+# Error if a python version is missing
+nox.options.error_on_missing_interpreters = True
+
#
# Style Checks
#
@@ -141,7 +146,7 @@ def _determine_local_import_names(start_dir: str) -> List[str]:
@nox.session
def lint(session: nox.sessions.Session) -> None:
- if not TEST_CONFIG['enforce_type_hints']:
+ if not TEST_CONFIG["enforce_type_hints"]:
session.install("flake8", "flake8-import-order")
else:
session.install("flake8", "flake8-import-order", "flake8-annotations")
@@ -150,9 +155,11 @@ def lint(session: nox.sessions.Session) -> None:
args = FLAKE8_COMMON_ARGS + [
"--application-import-names",
",".join(local_names),
- "."
+ ".",
]
session.run("flake8", *args)
+
+
#
# Black
#
@@ -165,6 +172,7 @@ def blacken(session: nox.sessions.Session) -> None:
session.run("black", *python_files)
+
#
# Sample Tests
#
@@ -173,7 +181,9 @@ def blacken(session: nox.sessions.Session) -> None:
PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"]
-def _session_tests(session: nox.sessions.Session, post_install: Callable = None) -> None:
+def _session_tests(
+ session: nox.sessions.Session, post_install: Callable = None
+) -> None:
if TEST_CONFIG["pip_version_override"]:
pip_version = TEST_CONFIG["pip_version_override"]
session.install(f"pip=={pip_version}")
@@ -203,7 +213,7 @@ def _session_tests(session: nox.sessions.Session, post_install: Callable = None)
# on travis where slow and flaky tests are excluded.
# See http://doc.pytest.org/en/latest/_modules/_pytest/main.html
success_codes=[0, 5],
- env=get_pytest_env_vars()
+ env=get_pytest_env_vars(),
)
@@ -213,9 +223,9 @@ def py(session: nox.sessions.Session) -> None:
if session.python in TESTED_VERSIONS:
_session_tests(session)
else:
- session.skip("SKIPPED: {} tests are disabled for this sample.".format(
- session.python
- ))
+ session.skip(
+ "SKIPPED: {} tests are disabled for this sample.".format(session.python)
+ )
#
diff --git a/samples/snippets/classify_text/requirements-test.txt b/samples/snippets/classify_text/requirements-test.txt
index 95ea1e6a..92709451 100644
--- a/samples/snippets/classify_text/requirements-test.txt
+++ b/samples/snippets/classify_text/requirements-test.txt
@@ -1 +1 @@
-pytest==6.2.4
+pytest==6.2.5
diff --git a/samples/snippets/classify_text/requirements.txt b/samples/snippets/classify_text/requirements.txt
index fc435773..2323f5be 100644
--- a/samples/snippets/classify_text/requirements.txt
+++ b/samples/snippets/classify_text/requirements.txt
@@ -1,3 +1,3 @@
-google-cloud-language==2.2.1
+google-cloud-language==2.2.2
numpy==1.20.1; python_version > '3.6'
numpy==1.19.5; python_version <= '3.6'
diff --git a/samples/snippets/cloud-client/v1/noxfile.py b/samples/snippets/cloud-client/v1/noxfile.py
index 6a8ccdae..93a9122c 100644
--- a/samples/snippets/cloud-client/v1/noxfile.py
+++ b/samples/snippets/cloud-client/v1/noxfile.py
@@ -39,17 +39,15 @@
TEST_CONFIG = {
# You can opt out from the test for specific Python versions.
- 'ignored_versions': ["2.7"],
-
+ "ignored_versions": [],
# Old samples are opted out of enforcing Python type hints
# All new samples should feature them
- 'enforce_type_hints': False,
-
+ "enforce_type_hints": False,
# An envvar key for determining the project id to use. Change it
# to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a
# build specific Cloud project. You can also use your own string
# to use your own Cloud project.
- 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT',
+ "gcloud_project_env": "GOOGLE_CLOUD_PROJECT",
# 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT',
# If you need to use a specific version of pip,
# change pip_version_override to the string representation
@@ -57,13 +55,13 @@
"pip_version_override": None,
# A dictionary you want to inject into your test. Don't put any
# secrets here. These values will override predefined values.
- 'envs': {},
+ "envs": {},
}
try:
# Ensure we can import noxfile_config in the project's directory.
- sys.path.append('.')
+ sys.path.append(".")
from noxfile_config import TEST_CONFIG_OVERRIDE
except ImportError as e:
print("No user noxfile_config found: detail: {}".format(e))
@@ -78,25 +76,32 @@ def get_pytest_env_vars() -> Dict[str, str]:
ret = {}
# Override the GCLOUD_PROJECT and the alias.
- env_key = TEST_CONFIG['gcloud_project_env']
+ env_key = TEST_CONFIG["gcloud_project_env"]
# This should error out if not set.
- ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key]
+ ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key]
# Apply user supplied envs.
- ret.update(TEST_CONFIG['envs'])
+ ret.update(TEST_CONFIG["envs"])
return ret
# DO NOT EDIT - automatically generated.
-# All versions used to tested samples.
-ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8", "3.9"]
+# All versions used to test samples.
+ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"]
# Any default versions that should be ignored.
-IGNORED_VERSIONS = TEST_CONFIG['ignored_versions']
+IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"]
TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS])
-INSTALL_LIBRARY_FROM_SOURCE = bool(os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False))
+INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in (
+ "True",
+ "true",
+)
+
+# Error if a python version is missing
+nox.options.error_on_missing_interpreters = True
+
#
# Style Checks
#
@@ -141,7 +146,7 @@ def _determine_local_import_names(start_dir: str) -> List[str]:
@nox.session
def lint(session: nox.sessions.Session) -> None:
- if not TEST_CONFIG['enforce_type_hints']:
+ if not TEST_CONFIG["enforce_type_hints"]:
session.install("flake8", "flake8-import-order")
else:
session.install("flake8", "flake8-import-order", "flake8-annotations")
@@ -150,9 +155,11 @@ def lint(session: nox.sessions.Session) -> None:
args = FLAKE8_COMMON_ARGS + [
"--application-import-names",
",".join(local_names),
- "."
+ ".",
]
session.run("flake8", *args)
+
+
#
# Black
#
@@ -165,6 +172,7 @@ def blacken(session: nox.sessions.Session) -> None:
session.run("black", *python_files)
+
#
# Sample Tests
#
@@ -173,7 +181,9 @@ def blacken(session: nox.sessions.Session) -> None:
PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"]
-def _session_tests(session: nox.sessions.Session, post_install: Callable = None) -> None:
+def _session_tests(
+ session: nox.sessions.Session, post_install: Callable = None
+) -> None:
if TEST_CONFIG["pip_version_override"]:
pip_version = TEST_CONFIG["pip_version_override"]
session.install(f"pip=={pip_version}")
@@ -203,7 +213,7 @@ def _session_tests(session: nox.sessions.Session, post_install: Callable = None)
# on travis where slow and flaky tests are excluded.
# See http://doc.pytest.org/en/latest/_modules/_pytest/main.html
success_codes=[0, 5],
- env=get_pytest_env_vars()
+ env=get_pytest_env_vars(),
)
@@ -213,9 +223,9 @@ def py(session: nox.sessions.Session) -> None:
if session.python in TESTED_VERSIONS:
_session_tests(session)
else:
- session.skip("SKIPPED: {} tests are disabled for this sample.".format(
- session.python
- ))
+ session.skip(
+ "SKIPPED: {} tests are disabled for this sample.".format(session.python)
+ )
#
diff --git a/samples/snippets/cloud-client/v1/requirements-test.txt b/samples/snippets/cloud-client/v1/requirements-test.txt
index 95ea1e6a..92709451 100644
--- a/samples/snippets/cloud-client/v1/requirements-test.txt
+++ b/samples/snippets/cloud-client/v1/requirements-test.txt
@@ -1 +1 @@
-pytest==6.2.4
+pytest==6.2.5
diff --git a/samples/snippets/cloud-client/v1/requirements.txt b/samples/snippets/cloud-client/v1/requirements.txt
index 7e8a9a02..fc0ee401 100644
--- a/samples/snippets/cloud-client/v1/requirements.txt
+++ b/samples/snippets/cloud-client/v1/requirements.txt
@@ -1 +1 @@
-google-cloud-language==2.2.1
+google-cloud-language==2.2.2
diff --git a/samples/snippets/generated-samples/v1/noxfile.py b/samples/snippets/generated-samples/v1/noxfile.py
index 6a8ccdae..93a9122c 100644
--- a/samples/snippets/generated-samples/v1/noxfile.py
+++ b/samples/snippets/generated-samples/v1/noxfile.py
@@ -39,17 +39,15 @@
TEST_CONFIG = {
# You can opt out from the test for specific Python versions.
- 'ignored_versions': ["2.7"],
-
+ "ignored_versions": [],
# Old samples are opted out of enforcing Python type hints
# All new samples should feature them
- 'enforce_type_hints': False,
-
+ "enforce_type_hints": False,
# An envvar key for determining the project id to use. Change it
# to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a
# build specific Cloud project. You can also use your own string
# to use your own Cloud project.
- 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT',
+ "gcloud_project_env": "GOOGLE_CLOUD_PROJECT",
# 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT',
# If you need to use a specific version of pip,
# change pip_version_override to the string representation
@@ -57,13 +55,13 @@
"pip_version_override": None,
# A dictionary you want to inject into your test. Don't put any
# secrets here. These values will override predefined values.
- 'envs': {},
+ "envs": {},
}
try:
# Ensure we can import noxfile_config in the project's directory.
- sys.path.append('.')
+ sys.path.append(".")
from noxfile_config import TEST_CONFIG_OVERRIDE
except ImportError as e:
print("No user noxfile_config found: detail: {}".format(e))
@@ -78,25 +76,32 @@ def get_pytest_env_vars() -> Dict[str, str]:
ret = {}
# Override the GCLOUD_PROJECT and the alias.
- env_key = TEST_CONFIG['gcloud_project_env']
+ env_key = TEST_CONFIG["gcloud_project_env"]
# This should error out if not set.
- ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key]
+ ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key]
# Apply user supplied envs.
- ret.update(TEST_CONFIG['envs'])
+ ret.update(TEST_CONFIG["envs"])
return ret
# DO NOT EDIT - automatically generated.
-# All versions used to tested samples.
-ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8", "3.9"]
+# All versions used to test samples.
+ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"]
# Any default versions that should be ignored.
-IGNORED_VERSIONS = TEST_CONFIG['ignored_versions']
+IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"]
TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS])
-INSTALL_LIBRARY_FROM_SOURCE = bool(os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False))
+INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in (
+ "True",
+ "true",
+)
+
+# Error if a python version is missing
+nox.options.error_on_missing_interpreters = True
+
#
# Style Checks
#
@@ -141,7 +146,7 @@ def _determine_local_import_names(start_dir: str) -> List[str]:
@nox.session
def lint(session: nox.sessions.Session) -> None:
- if not TEST_CONFIG['enforce_type_hints']:
+ if not TEST_CONFIG["enforce_type_hints"]:
session.install("flake8", "flake8-import-order")
else:
session.install("flake8", "flake8-import-order", "flake8-annotations")
@@ -150,9 +155,11 @@ def lint(session: nox.sessions.Session) -> None:
args = FLAKE8_COMMON_ARGS + [
"--application-import-names",
",".join(local_names),
- "."
+ ".",
]
session.run("flake8", *args)
+
+
#
# Black
#
@@ -165,6 +172,7 @@ def blacken(session: nox.sessions.Session) -> None:
session.run("black", *python_files)
+
#
# Sample Tests
#
@@ -173,7 +181,9 @@ def blacken(session: nox.sessions.Session) -> None:
PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"]
-def _session_tests(session: nox.sessions.Session, post_install: Callable = None) -> None:
+def _session_tests(
+ session: nox.sessions.Session, post_install: Callable = None
+) -> None:
if TEST_CONFIG["pip_version_override"]:
pip_version = TEST_CONFIG["pip_version_override"]
session.install(f"pip=={pip_version}")
@@ -203,7 +213,7 @@ def _session_tests(session: nox.sessions.Session, post_install: Callable = None)
# on travis where slow and flaky tests are excluded.
# See http://doc.pytest.org/en/latest/_modules/_pytest/main.html
success_codes=[0, 5],
- env=get_pytest_env_vars()
+ env=get_pytest_env_vars(),
)
@@ -213,9 +223,9 @@ def py(session: nox.sessions.Session) -> None:
if session.python in TESTED_VERSIONS:
_session_tests(session)
else:
- session.skip("SKIPPED: {} tests are disabled for this sample.".format(
- session.python
- ))
+ session.skip(
+ "SKIPPED: {} tests are disabled for this sample.".format(session.python)
+ )
#
diff --git a/samples/snippets/generated-samples/v1/requirements-test.txt b/samples/snippets/generated-samples/v1/requirements-test.txt
index 95ea1e6a..92709451 100644
--- a/samples/snippets/generated-samples/v1/requirements-test.txt
+++ b/samples/snippets/generated-samples/v1/requirements-test.txt
@@ -1 +1 @@
-pytest==6.2.4
+pytest==6.2.5
diff --git a/samples/snippets/generated-samples/v1/requirements.txt b/samples/snippets/generated-samples/v1/requirements.txt
index 7e8a9a02..fc0ee401 100644
--- a/samples/snippets/generated-samples/v1/requirements.txt
+++ b/samples/snippets/generated-samples/v1/requirements.txt
@@ -1 +1 @@
-google-cloud-language==2.2.1
+google-cloud-language==2.2.2
diff --git a/samples/snippets/sentiment/noxfile.py b/samples/snippets/sentiment/noxfile.py
index 6a8ccdae..93a9122c 100644
--- a/samples/snippets/sentiment/noxfile.py
+++ b/samples/snippets/sentiment/noxfile.py
@@ -39,17 +39,15 @@
TEST_CONFIG = {
# You can opt out from the test for specific Python versions.
- 'ignored_versions': ["2.7"],
-
+ "ignored_versions": [],
# Old samples are opted out of enforcing Python type hints
# All new samples should feature them
- 'enforce_type_hints': False,
-
+ "enforce_type_hints": False,
# An envvar key for determining the project id to use. Change it
# to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a
# build specific Cloud project. You can also use your own string
# to use your own Cloud project.
- 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT',
+ "gcloud_project_env": "GOOGLE_CLOUD_PROJECT",
# 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT',
# If you need to use a specific version of pip,
# change pip_version_override to the string representation
@@ -57,13 +55,13 @@
"pip_version_override": None,
# A dictionary you want to inject into your test. Don't put any
# secrets here. These values will override predefined values.
- 'envs': {},
+ "envs": {},
}
try:
# Ensure we can import noxfile_config in the project's directory.
- sys.path.append('.')
+ sys.path.append(".")
from noxfile_config import TEST_CONFIG_OVERRIDE
except ImportError as e:
print("No user noxfile_config found: detail: {}".format(e))
@@ -78,25 +76,32 @@ def get_pytest_env_vars() -> Dict[str, str]:
ret = {}
# Override the GCLOUD_PROJECT and the alias.
- env_key = TEST_CONFIG['gcloud_project_env']
+ env_key = TEST_CONFIG["gcloud_project_env"]
# This should error out if not set.
- ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key]
+ ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key]
# Apply user supplied envs.
- ret.update(TEST_CONFIG['envs'])
+ ret.update(TEST_CONFIG["envs"])
return ret
# DO NOT EDIT - automatically generated.
-# All versions used to tested samples.
-ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8", "3.9"]
+# All versions used to test samples.
+ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"]
# Any default versions that should be ignored.
-IGNORED_VERSIONS = TEST_CONFIG['ignored_versions']
+IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"]
TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS])
-INSTALL_LIBRARY_FROM_SOURCE = bool(os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False))
+INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in (
+ "True",
+ "true",
+)
+
+# Error if a python version is missing
+nox.options.error_on_missing_interpreters = True
+
#
# Style Checks
#
@@ -141,7 +146,7 @@ def _determine_local_import_names(start_dir: str) -> List[str]:
@nox.session
def lint(session: nox.sessions.Session) -> None:
- if not TEST_CONFIG['enforce_type_hints']:
+ if not TEST_CONFIG["enforce_type_hints"]:
session.install("flake8", "flake8-import-order")
else:
session.install("flake8", "flake8-import-order", "flake8-annotations")
@@ -150,9 +155,11 @@ def lint(session: nox.sessions.Session) -> None:
args = FLAKE8_COMMON_ARGS + [
"--application-import-names",
",".join(local_names),
- "."
+ ".",
]
session.run("flake8", *args)
+
+
#
# Black
#
@@ -165,6 +172,7 @@ def blacken(session: nox.sessions.Session) -> None:
session.run("black", *python_files)
+
#
# Sample Tests
#
@@ -173,7 +181,9 @@ def blacken(session: nox.sessions.Session) -> None:
PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"]
-def _session_tests(session: nox.sessions.Session, post_install: Callable = None) -> None:
+def _session_tests(
+ session: nox.sessions.Session, post_install: Callable = None
+) -> None:
if TEST_CONFIG["pip_version_override"]:
pip_version = TEST_CONFIG["pip_version_override"]
session.install(f"pip=={pip_version}")
@@ -203,7 +213,7 @@ def _session_tests(session: nox.sessions.Session, post_install: Callable = None)
# on travis where slow and flaky tests are excluded.
# See http://doc.pytest.org/en/latest/_modules/_pytest/main.html
success_codes=[0, 5],
- env=get_pytest_env_vars()
+ env=get_pytest_env_vars(),
)
@@ -213,9 +223,9 @@ def py(session: nox.sessions.Session) -> None:
if session.python in TESTED_VERSIONS:
_session_tests(session)
else:
- session.skip("SKIPPED: {} tests are disabled for this sample.".format(
- session.python
- ))
+ session.skip(
+ "SKIPPED: {} tests are disabled for this sample.".format(session.python)
+ )
#
diff --git a/samples/snippets/sentiment/requirements-test.txt b/samples/snippets/sentiment/requirements-test.txt
index 95ea1e6a..92709451 100644
--- a/samples/snippets/sentiment/requirements-test.txt
+++ b/samples/snippets/sentiment/requirements-test.txt
@@ -1 +1 @@
-pytest==6.2.4
+pytest==6.2.5
diff --git a/samples/snippets/sentiment/requirements.txt b/samples/snippets/sentiment/requirements.txt
index 7e8a9a02..fc0ee401 100644
--- a/samples/snippets/sentiment/requirements.txt
+++ b/samples/snippets/sentiment/requirements.txt
@@ -1 +1 @@
-google-cloud-language==2.2.1
+google-cloud-language==2.2.2
diff --git a/scripts/fixup_language_v1_keywords.py b/scripts/fixup_language_v1_keywords.py
index 99d05077..aac999b1 100644
--- a/scripts/fixup_language_v1_keywords.py
+++ b/scripts/fixup_language_v1_keywords.py
@@ -39,12 +39,12 @@ def partition(
class languageCallTransformer(cst.CSTTransformer):
CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata')
METHOD_TO_PARAMS: Dict[str, Tuple[str]] = {
- 'analyze_entities': ('document', 'encoding_type', ),
- 'analyze_entity_sentiment': ('document', 'encoding_type', ),
- 'analyze_sentiment': ('document', 'encoding_type', ),
- 'analyze_syntax': ('document', 'encoding_type', ),
- 'annotate_text': ('document', 'features', 'encoding_type', ),
- 'classify_text': ('document', ),
+ 'analyze_entities': ('document', 'encoding_type', ),
+ 'analyze_entity_sentiment': ('document', 'encoding_type', ),
+ 'analyze_sentiment': ('document', 'encoding_type', ),
+ 'analyze_syntax': ('document', 'encoding_type', ),
+ 'annotate_text': ('document', 'features', 'encoding_type', ),
+ 'classify_text': ('document', ),
}
def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode:
@@ -63,7 +63,7 @@ def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode:
return updated
kwargs, ctrl_kwargs = partition(
- lambda a: not a.keyword.value in self.CTRL_PARAMS,
+ lambda a: a.keyword.value not in self.CTRL_PARAMS,
kwargs
)
diff --git a/scripts/fixup_language_v1beta2_keywords.py b/scripts/fixup_language_v1beta2_keywords.py
index 99d05077..aac999b1 100644
--- a/scripts/fixup_language_v1beta2_keywords.py
+++ b/scripts/fixup_language_v1beta2_keywords.py
@@ -39,12 +39,12 @@ def partition(
class languageCallTransformer(cst.CSTTransformer):
CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata')
METHOD_TO_PARAMS: Dict[str, Tuple[str]] = {
- 'analyze_entities': ('document', 'encoding_type', ),
- 'analyze_entity_sentiment': ('document', 'encoding_type', ),
- 'analyze_sentiment': ('document', 'encoding_type', ),
- 'analyze_syntax': ('document', 'encoding_type', ),
- 'annotate_text': ('document', 'features', 'encoding_type', ),
- 'classify_text': ('document', ),
+ 'analyze_entities': ('document', 'encoding_type', ),
+ 'analyze_entity_sentiment': ('document', 'encoding_type', ),
+ 'analyze_sentiment': ('document', 'encoding_type', ),
+ 'analyze_syntax': ('document', 'encoding_type', ),
+ 'annotate_text': ('document', 'features', 'encoding_type', ),
+ 'classify_text': ('document', ),
}
def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode:
@@ -63,7 +63,7 @@ def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode:
return updated
kwargs, ctrl_kwargs = partition(
- lambda a: not a.keyword.value in self.CTRL_PARAMS,
+ lambda a: a.keyword.value not in self.CTRL_PARAMS,
kwargs
)
diff --git a/scripts/readme-gen/templates/install_deps.tmpl.rst b/scripts/readme-gen/templates/install_deps.tmpl.rst
index a0406dba..275d6498 100644
--- a/scripts/readme-gen/templates/install_deps.tmpl.rst
+++ b/scripts/readme-gen/templates/install_deps.tmpl.rst
@@ -12,7 +12,7 @@ Install Dependencies
.. _Python Development Environment Setup Guide:
https://cloud.google.com/python/setup
-#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+.
+#. Create a virtualenv. Samples are compatible with Python 3.6+.
.. code-block:: bash
diff --git a/setup.py b/setup.py
index 6d7a1acb..708e6149 100644
--- a/setup.py
+++ b/setup.py
@@ -22,7 +22,7 @@
name = "google-cloud-language"
description = "Google Cloud Natural Language API client library"
-version = "2.2.2"
+version = "2.3.0"
# Should be one of:
# 'Development Status :: 3 - Alpha'
# 'Development Status :: 4 - Beta'
diff --git a/testing/constraints-3.10.txt b/testing/constraints-3.10.txt
new file mode 100644
index 00000000..e69de29b
diff --git a/testing/constraints-3.11.txt b/testing/constraints-3.11.txt
new file mode 100644
index 00000000..e69de29b
diff --git a/tests/unit/gapic/language_v1/test_language_service.py b/tests/unit/gapic/language_v1/test_language_service.py
index 1f4dcf06..a2a061a3 100644
--- a/tests/unit/gapic/language_v1/test_language_service.py
+++ b/tests/unit/gapic/language_v1/test_language_service.py
@@ -29,6 +29,7 @@
from google.api_core import gapic_v1
from google.api_core import grpc_helpers
from google.api_core import grpc_helpers_async
+from google.api_core import path_template
from google.auth import credentials as ga_credentials
from google.auth.exceptions import MutualTLSChannelError
from google.cloud.language_v1.services.language_service import (
@@ -1686,6 +1687,9 @@ def test_language_service_base_transport():
with pytest.raises(NotImplementedError):
getattr(transport, method)(request=object())
+ with pytest.raises(NotImplementedError):
+ transport.close()
+
@requires_google_auth_gte_1_25_0
def test_language_service_base_transport_with_credentials_file():
@@ -2165,3 +2169,49 @@ def test_client_withDEFAULT_CLIENT_INFO():
credentials=ga_credentials.AnonymousCredentials(), client_info=client_info,
)
prep.assert_called_once_with(client_info)
+
+
+@pytest.mark.asyncio
+async def test_transport_close_async():
+ client = LanguageServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio",
+ )
+ with mock.patch.object(
+ type(getattr(client.transport, "grpc_channel")), "close"
+ ) as close:
+ async with client:
+ close.assert_not_called()
+ close.assert_called_once()
+
+
+def test_transport_close():
+ transports = {
+ "grpc": "_grpc_channel",
+ }
+
+ for transport, close_name in transports.items():
+ client = LanguageServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport
+ )
+ with mock.patch.object(
+ type(getattr(client.transport, close_name)), "close"
+ ) as close:
+ with client:
+ close.assert_not_called()
+ close.assert_called_once()
+
+
+def test_client_ctx():
+ transports = [
+ "grpc",
+ ]
+ for transport in transports:
+ client = LanguageServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport
+ )
+ # Test client calls underlying transport.
+ with mock.patch.object(type(client.transport), "close") as close:
+ close.assert_not_called()
+ with client:
+ pass
+ close.assert_called()
diff --git a/tests/unit/gapic/language_v1beta2/test_language_service.py b/tests/unit/gapic/language_v1beta2/test_language_service.py
index 71d88d17..729ad8c1 100644
--- a/tests/unit/gapic/language_v1beta2/test_language_service.py
+++ b/tests/unit/gapic/language_v1beta2/test_language_service.py
@@ -29,6 +29,7 @@
from google.api_core import gapic_v1
from google.api_core import grpc_helpers
from google.api_core import grpc_helpers_async
+from google.api_core import path_template
from google.auth import credentials as ga_credentials
from google.auth.exceptions import MutualTLSChannelError
from google.cloud.language_v1beta2.services.language_service import (
@@ -1688,6 +1689,9 @@ def test_language_service_base_transport():
with pytest.raises(NotImplementedError):
getattr(transport, method)(request=object())
+ with pytest.raises(NotImplementedError):
+ transport.close()
+
@requires_google_auth_gte_1_25_0
def test_language_service_base_transport_with_credentials_file():
@@ -2167,3 +2171,49 @@ def test_client_withDEFAULT_CLIENT_INFO():
credentials=ga_credentials.AnonymousCredentials(), client_info=client_info,
)
prep.assert_called_once_with(client_info)
+
+
+@pytest.mark.asyncio
+async def test_transport_close_async():
+ client = LanguageServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio",
+ )
+ with mock.patch.object(
+ type(getattr(client.transport, "grpc_channel")), "close"
+ ) as close:
+ async with client:
+ close.assert_not_called()
+ close.assert_called_once()
+
+
+def test_transport_close():
+ transports = {
+ "grpc": "_grpc_channel",
+ }
+
+ for transport, close_name in transports.items():
+ client = LanguageServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport
+ )
+ with mock.patch.object(
+ type(getattr(client.transport, close_name)), "close"
+ ) as close:
+ with client:
+ close.assert_not_called()
+ close.assert_called_once()
+
+
+def test_client_ctx():
+ transports = [
+ "grpc",
+ ]
+ for transport in transports:
+ client = LanguageServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport
+ )
+ # Test client calls underlying transport.
+ with mock.patch.object(type(client.transport), "close") as close:
+ close.assert_not_called()
+ with client:
+ pass
+ close.assert_called()