diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index cb89b2e3..b668c04d 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,17 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:ec49167c606648a063d1222220b48119c912562849a0528f35bfb592a9f72737 + digest: sha256:ed1f9983d5a935a89fe8085e8bb97d94e41015252c5b6c9771257cf8624367e6 + diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 3319f86b..62aced93 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -3,9 +3,10 @@ # # For syntax help see: # https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax +# Note: This file is autogenerated. To make changes to the codeowner team, please update .repo-metadata.json. -# The @googleapis/yoshi-python is the default owner for changes in this repo -* @googleapis/yoshi-python +# @googleapis/yoshi-python @googleapis/cdpe-cloudai are the default owners for changes in this repo +* @googleapis/yoshi-python @googleapis/cdpe-cloudai -# The python-samples-reviewers team is the default owner for samples changes -/samples/ @telpirion @sirtorry @lucaswadedavis @googleapis/python-samples-owners +# @googleapis/python-samples-reviewers @googleapis/cdpe-cloudai are the default owners for samples changes +/samples/ @googleapis/python-samples-reviewers @googleapis/cdpe-cloudai diff --git a/.github/release-please.yml b/.github/release-please.yml index 4507ad05..466597e5 100644 --- a/.github/release-please.yml +++ b/.github/release-please.yml @@ -1 +1,2 @@ releaseType: python +handleGHRelease: true diff --git a/.github/release-trigger.yml b/.github/release-trigger.yml new file mode 100644 index 00000000..d4ca9418 --- /dev/null +++ b/.github/release-trigger.yml @@ -0,0 +1 @@ +enabled: true diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml new file mode 100644 index 00000000..f7b8344c --- /dev/null +++ b/.github/workflows/docs.yml @@ -0,0 +1,38 @@ +on: + pull_request: + branches: + - main +name: docs +jobs: + docs: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Setup Python + uses: actions/setup-python@v2 + with: + python-version: "3.10" + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run docs + run: | + nox -s docs + docfx: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Setup Python + uses: actions/setup-python@v2 + with: + python-version: "3.10" + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run docfx + run: | + nox -s docfx diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml new file mode 100644 index 00000000..1e8b05c3 --- /dev/null +++ b/.github/workflows/lint.yml @@ -0,0 +1,25 @@ +on: + pull_request: + branches: + - main +name: lint +jobs: + lint: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Setup Python + uses: actions/setup-python@v2 + with: + python-version: "3.10" + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run lint + run: | + nox -s lint + - name: Run lint_setup_py + run: | + nox -s lint_setup_py diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml new file mode 100644 index 00000000..074ee250 --- /dev/null +++ b/.github/workflows/unittest.yml @@ -0,0 +1,57 @@ +on: + pull_request: + branches: + - main +name: unittest +jobs: + unit: + runs-on: ubuntu-latest + strategy: + matrix: + python: ['3.6', '3.7', '3.8', '3.9', '3.10'] + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Setup Python + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python }} + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run unit tests + env: + COVERAGE_FILE: .coverage-${{ matrix.python }} + run: | + nox -s unit-${{ matrix.python }} + - name: Upload coverage results + uses: actions/upload-artifact@v2 + with: + name: coverage-artifacts + path: .coverage-${{ matrix.python }} + + cover: + runs-on: ubuntu-latest + needs: + - unit + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Setup Python + uses: actions/setup-python@v2 + with: + python-version: "3.10" + - name: Install coverage + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install coverage + - name: Download coverage results + uses: actions/download-artifact@v2 + with: + name: coverage-artifacts + path: .coverage-results/ + - name: Report coverage results + run: | + coverage combine .coverage-results/.coverage* + coverage report --show-missing --fail-under=100 diff --git a/.kokoro/release.sh b/.kokoro/release.sh index 4a4c3e42..9db53208 100755 --- a/.kokoro/release.sh +++ b/.kokoro/release.sh @@ -26,7 +26,7 @@ python3 -m pip install --upgrade twine wheel setuptools export PYTHONUNBUFFERED=1 # Move into the package, build the distribution and upload. -TWINE_PASSWORD=$(cat "${KOKORO_GFILE_DIR}/secret_manager/google-cloud-pypi-token") +TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-1") cd github/python-language python3 setup.py sdist bdist_wheel twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/* diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg index 44a63ec8..6c908a55 100644 --- a/.kokoro/release/common.cfg +++ b/.kokoro/release/common.cfg @@ -23,8 +23,18 @@ env_vars: { value: "github/python-language/.kokoro/release.sh" } +# Fetch PyPI password +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "google-cloud-pypi-token-keystore-1" + } + } +} + # Tokens needed to report release status back to GitHub env_vars: { key: "SECRET_MANAGER_KEYS" - value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem,google-cloud-pypi-token" + value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" } diff --git a/.repo-metadata.json b/.repo-metadata.json index 24d5ceb2..2997cbdb 100644 --- a/.repo-metadata.json +++ b/.repo-metadata.json @@ -2,9 +2,9 @@ "name": "language", "name_pretty": "Natural Language", "product_documentation": "https://cloud.google.com/natural-language/docs/", - "client_documentation": "https://googleapis.dev/python/language/latest", + "client_documentation": "https://cloud.google.com/python/docs/reference/language/latest", "issue_tracker": "https://issuetracker.google.com/savedsearches/559753", - "release_level": "ga", + "release_level": "stable", "language": "python", "library_type": "GAPIC_AUTO", "repo": "googleapis/python-language", @@ -12,5 +12,6 @@ "api_id": "language.googleapis.com", "requires_billing": true, "default_version": "v1", - "codeowner_team": "" + "codeowner_team": "@googleapis/cdpe-cloudai", + "api_shortname": "language" } diff --git a/CHANGELOG.md b/CHANGELOG.md index 1a9bbfab..d6fe8a06 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-language/#history +### [2.3.2](https://github.com/googleapis/python-language/compare/v2.3.1...v2.3.2) (2022-01-20) + + +### Documentation + +* **samples:** Document -> types.Document ([#227](https://github.com/googleapis/python-language/issues/227)) ([01367d7](https://github.com/googleapis/python-language/commit/01367d7b1e0ddba6e6b920f125730aa97d51ada0)) + ### [2.3.1](https://www.github.com/googleapis/python-language/compare/v2.3.0...v2.3.1) (2021-11-01) diff --git a/README.rst b/README.rst index 958246b4..1311cd55 100644 --- a/README.rst +++ b/README.rst @@ -24,7 +24,7 @@ with your document storage on Google Cloud Storage. :target: https://pypi.org/project/google-cloud-language/ .. _Google Cloud Natural Language: https://cloud.google.com/natural-language/ .. _Product Documentation: https://cloud.google.com/natural-language/docs -.. _Client Library Documentation: https://googleapis.dev/python/language/latest +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/language/latest .. note:: @@ -101,4 +101,4 @@ Next Steps - Read the `Product documentation`_ to learn more about the product and see How-to Guides. -.. _Usage documentation: https://googleapis.dev/python/language/latest +.. _Usage documentation: https://cloud.google.com/python/docs/reference/language/latest diff --git a/docs/api.rst b/docs/api.rst deleted file mode 100644 index 8720e9fa..00000000 --- a/docs/api.rst +++ /dev/null @@ -1,40 +0,0 @@ -Language Client API Reference -============================= - -This package includes clients for multiple versions of the Natural Language -API. By default, you will get ``v1``, the latest GA version. - -.. toctree:: - :maxdepth: 2 - - language_v1/services - language_v1/types - -If you are interested in beta features ahead of the latest GA, you may -opt-in to the v1.1 beta, which is spelled ``v1beta2``. In order to do this, -you will want to import from ``google.cloud.language_v1beta2`` in lieu of -``google.cloud.language``. - -An API and type reference is provided for the v1.1 beta also: - -.. toctree:: - :maxdepth: 2 - - language_v1beta2/services - language_v1beta2/types - -Migration Guide ---------------- - -See the guide below for instructions on migrating to the 2.x release of this library. - -.. toctree:: - :maxdepth: 2 - - UPGRADING - -.. note:: - - The client for the beta API is provided on a provisional basis. The API - surface is subject to change, and it is possible that this client will be - deprecated or removed after its features become GA. diff --git a/docs/index.rst b/docs/index.rst index 368f811d..524632b7 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -2,10 +2,44 @@ .. include:: multiprocessing.rst +This package includes clients for multiple versions of Natural Language. +By default, you will get version ``language_v1``. + + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + language_v1/services + language_v1/types + +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + language_v1beta2/services + language_v1beta2/types + + +Migration Guide +--------------- + +See the guide below for instructions on migrating to the latest version. + +.. toctree:: + :maxdepth: 2 + +  UPGRADING + + +Changelog +--------- + +For a list of all ``google-cloud-language`` releases: .. toctree:: - :maxdepth: 2 + :maxdepth: 2 - usage - api - changelog + changelog diff --git a/docs/usage.rst b/docs/usage.rst deleted file mode 100644 index f2e45934..00000000 --- a/docs/usage.rst +++ /dev/null @@ -1,199 +0,0 @@ -Using the Language Client -========================= - -Documents -********* - -The Google Natural Language API has the following supported methods: - -- `analyzeEntities`_ -- `analyzeSentiment`_ -- `analyzeEntitySentiment`_ -- `annotateText`_ -- `classifyText`_ - -and each method uses a :class:`~.language_v1.types.Document` for representing -text. - - .. code-block:: python - - >>> document = language.types.Document( - ... content='Google, headquartered in Mountain View, unveiled the ' - ... 'new Android phone at the Consumer Electronic Show. ' - ... 'Sundar Pichai said in his keynote that users love ' - ... 'their new Android phones.', - ... language='en', - ... type='PLAIN_TEXT', - ... ) - - -The document's language defaults to ``None``, which will cause the API to -auto-detect the language. - -In addition, you can construct an HTML document: - - .. code-block:: python - - >>> html_content = """\ - ... - ... - ... El Tiempo de las Historias</time> - ... </head> - ... <body> - ... <p>La vaca saltó sobre la luna.</p> - ... </body> - ... </html> - ... """ - >>> document = language.types.Document( - ... content=html_content, - ... language='es', - ... type='HTML', - ... ) - -The ``language`` argument can be either ISO-639-1 or BCP-47 language -codes. The API reference page contains the full list of `supported languages`_. - -.. _supported languages: https://cloud.google.com/natural-language/docs/languages - - -In addition to supplying the text / HTML content, a document can refer -to content stored in `Google Cloud Storage`_. - - .. code-block:: python - - >>> document = language.types.Document( - ... gcs_content_uri='gs://my-text-bucket/sentiment-me.txt', - ... type=language.enums.HTML, - ... ) - -.. _analyzeEntities: https://cloud.google.com/natural-language/docs/reference/rest/v1/documents/analyzeEntities -.. _analyzeSentiment: https://cloud.google.com/natural-language/docs/reference/rest/v1/documents/analyzeSentiment -.. _analyzeEntitySentiment: https://cloud.google.com/natural-language/docs/reference/rest/v1/documents/analyzeEntitySentiment -.. _annotateText: https://cloud.google.com/natural-language/docs/reference/rest/v1/documents/annotateText -.. _classifyText: https://cloud.google.com/natural-language/docs/reference/rest/v1/documents/classifyText -.. _Google Cloud Storage: https://cloud.google.com/storage/ - -Analyze Entities -**************** - -The :meth:`~.language_v1.LanguageServiceClient.analyze_entities` -method finds named entities (i.e. proper names) in the text. This method -returns a :class:`~.language_v1.types.AnalyzeEntitiesResponse`. - - .. code-block:: python - - >>> document = language.types.Document( - ... content='Michelangelo Caravaggio, Italian painter, is ' - ... 'known for "The Calling of Saint Matthew".', - ... type=language.enums.Document.Type.PLAIN_TEXT, - ... ) - >>> response = client.analyze_entities( - ... document=document, - ... encoding_type='UTF32', - ... ) - >>> for entity in response.entities: - ... print('=' * 20) - ... print(' name: {0}'.format(entity.name)) - ... print(' type: {0}'.format(entity.type)) - ... print(' metadata: {0}'.format(entity.metadata)) - ... print(' salience: {0}'.format(entity.salience)) - ==================== - name: Michelangelo Caravaggio - type: PERSON - metadata: {'wikipedia_url': 'https://en.wikipedia.org/wiki/Caravaggio'} - salience: 0.7615959 - ==================== - name: Italian - type: LOCATION - metadata: {'wikipedia_url': 'https://en.wikipedia.org/wiki/Italy'} - salience: 0.19960518 - ==================== - name: The Calling of Saint Matthew - type: EVENT - metadata: {'wikipedia_url': 'https://en.wikipedia.org/wiki/The_Calling_of_St_Matthew_(Caravaggio)'} - salience: 0.038798928 - -.. note:: - - It is recommended to send an ``encoding_type`` argument to Natural - Language methods, so they provide useful offsets for the data they return. - While the correct value varies by environment, in Python you *usually* - want ``UTF32``. - - -Analyze Sentiment -***************** - -The :meth:`~.language_v1.LanguageServiceClient.analyze_sentiment` method -analyzes the sentiment of the provided text. This method returns a -:class:`~.language_v1.types.AnalyzeSentimentResponse`. - - .. code-block:: python - - >>> document = language.types.Document( - ... content='Jogging is not very fun.', - ... type='PLAIN_TEXT', - ... ) - >>> response = client.analyze_sentiment( - ... document=document, - ... encoding_type='UTF32', - ... ) - >>> sentiment = response.document_sentiment - >>> print(sentiment.score) - -1 - >>> print(sentiment.magnitude) - 0.8 - -.. note:: - - It is recommended to send an ``encoding_type`` argument to Natural - Language methods, so they provide useful offsets for the data they return. - While the correct value varies by environment, in Python you *usually* - want ``UTF32``. - - -Analyze Entity Sentiment -************************ - -The :meth:`~.language_v1.LanguageServiceClient.analyze_entity_sentiment` -method is effectively the amalgamation of -:meth:`~.language_v1.LanguageServiceClient.analyze_entities` and -:meth:`~.language_v1.LanguageServiceClient.analyze_sentiment`. -This method returns a -:class:`~.language_v1.types.AnalyzeEntitySentimentResponse`. - -.. code-block:: python - - >>> document = language.types.Document( - ... content='Mona said that jogging is very fun.', - ... type='PLAIN_TEXT', - ... ) - >>> response = client.analyze_entity_sentiment( - ... document=document, - ... encoding_type='UTF32', - ... ) - >>> entities = response.entities - >>> entities[0].name - 'Mona' - >>> entities[1].name - 'jogging' - >>> entities[1].sentiment.magnitude - 0.8 - >>> entities[1].sentiment.score - 0.8 - -.. note:: - - It is recommended to send an ``encoding_type`` argument to Natural - Language methods, so they provide useful offsets for the data they return. - While the correct value varies by environment, in Python you *usually* - want ``UTF32``. - - -Annotate Text -************* - -The :meth:`~.language_v1.LanguageServiceClient.annotate_text` method -analyzes a document and is intended for users who are familiar with -machine learning and need in-depth text features to build upon. This method -returns a :class:`~.language_v1.types.AnnotateTextResponse`. diff --git a/google/cloud/language_v1/services/language_service/async_client.py b/google/cloud/language_v1/services/language_service/async_client.py index 04146200..8121cc51 100644 --- a/google/cloud/language_v1/services/language_service/async_client.py +++ b/google/cloud/language_v1/services/language_service/async_client.py @@ -19,14 +19,17 @@ from typing import Dict, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core.client_options import ClientOptions # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.cloud.language_v1.types import language_service from .transports.base import LanguageServiceTransport, DEFAULT_CLIENT_INFO diff --git a/google/cloud/language_v1/services/language_service/client.py b/google/cloud/language_v1/services/language_service/client.py index d2ec0b8e..ea4653b1 100644 --- a/google/cloud/language_v1/services/language_service/client.py +++ b/google/cloud/language_v1/services/language_service/client.py @@ -14,23 +14,25 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.cloud.language_v1.types import language_service from .transports.base import LanguageServiceTransport, DEFAULT_CLIENT_INFO @@ -268,8 +270,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None diff --git a/google/cloud/language_v1/services/language_service/transports/base.py b/google/cloud/language_v1/services/language_service/transports/base.py index b38a9acf..5d147fc1 100644 --- a/google/cloud/language_v1/services/language_service/transports/base.py +++ b/google/cloud/language_v1/services/language_service/transports/base.py @@ -18,10 +18,10 @@ import pkg_resources import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -102,7 +102,6 @@ def __init__( credentials, _ = google.auth.load_credentials_from_file( credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id diff --git a/google/cloud/language_v1/services/language_service/transports/grpc.py b/google/cloud/language_v1/services/language_service/transports/grpc.py index 4a85b2e2..94941e4e 100644 --- a/google/cloud/language_v1/services/language_service/transports/grpc.py +++ b/google/cloud/language_v1/services/language_service/transports/grpc.py @@ -16,8 +16,8 @@ import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import grpc_helpers # type: ignore -from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/google/cloud/language_v1/services/language_service/transports/grpc_asyncio.py b/google/cloud/language_v1/services/language_service/transports/grpc_asyncio.py index a9292306..681336e2 100644 --- a/google/cloud/language_v1/services/language_service/transports/grpc_asyncio.py +++ b/google/cloud/language_v1/services/language_service/transports/grpc_asyncio.py @@ -16,8 +16,8 @@ import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import gapic_v1 # type: ignore -from google.api_core import grpc_helpers_async # type: ignore +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/google/cloud/language_v1/types/language_service.py b/google/cloud/language_v1/types/language_service.py index c8beb931..25a6fff4 100644 --- a/google/cloud/language_v1/types/language_service.py +++ b/google/cloud/language_v1/types/language_service.py @@ -77,6 +77,7 @@ class Document(proto.Message): The content of the input in string format. Cloud audit logging exempt since it is based on user data. + This field is a member of `oneof`_ ``source``. gcs_content_uri (str): The Google Cloud Storage URI where the file content is @@ -84,6 +85,7 @@ class Document(proto.Message): gs://bucket_name/object_name. For more details, see https://cloud.google.com/storage/docs/reference-uris. NOTE: Cloud Storage object versioning is not supported. + This field is a member of `oneof`_ ``source``. language (str): The language of the document (if not specified, the language diff --git a/google/cloud/language_v1beta2/services/language_service/async_client.py b/google/cloud/language_v1beta2/services/language_service/async_client.py index c3b06047..366e2acf 100644 --- a/google/cloud/language_v1beta2/services/language_service/async_client.py +++ b/google/cloud/language_v1beta2/services/language_service/async_client.py @@ -19,14 +19,17 @@ from typing import Dict, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core.client_options import ClientOptions # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.cloud.language_v1beta2.types import language_service from .transports.base import LanguageServiceTransport, DEFAULT_CLIENT_INFO diff --git a/google/cloud/language_v1beta2/services/language_service/client.py b/google/cloud/language_v1beta2/services/language_service/client.py index e2a0de6e..29fd4df7 100644 --- a/google/cloud/language_v1beta2/services/language_service/client.py +++ b/google/cloud/language_v1beta2/services/language_service/client.py @@ -14,23 +14,25 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.cloud.language_v1beta2.types import language_service from .transports.base import LanguageServiceTransport, DEFAULT_CLIENT_INFO @@ -268,8 +270,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None diff --git a/google/cloud/language_v1beta2/services/language_service/transports/base.py b/google/cloud/language_v1beta2/services/language_service/transports/base.py index 99b2bd55..ea3aba83 100644 --- a/google/cloud/language_v1beta2/services/language_service/transports/base.py +++ b/google/cloud/language_v1beta2/services/language_service/transports/base.py @@ -18,10 +18,10 @@ import pkg_resources import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -102,7 +102,6 @@ def __init__( credentials, _ = google.auth.load_credentials_from_file( credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id diff --git a/google/cloud/language_v1beta2/services/language_service/transports/grpc.py b/google/cloud/language_v1beta2/services/language_service/transports/grpc.py index 7f7c7b2d..2936db35 100644 --- a/google/cloud/language_v1beta2/services/language_service/transports/grpc.py +++ b/google/cloud/language_v1beta2/services/language_service/transports/grpc.py @@ -16,8 +16,8 @@ import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import grpc_helpers # type: ignore -from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/google/cloud/language_v1beta2/services/language_service/transports/grpc_asyncio.py b/google/cloud/language_v1beta2/services/language_service/transports/grpc_asyncio.py index 331a9b1b..5e7e6213 100644 --- a/google/cloud/language_v1beta2/services/language_service/transports/grpc_asyncio.py +++ b/google/cloud/language_v1beta2/services/language_service/transports/grpc_asyncio.py @@ -16,8 +16,8 @@ import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import gapic_v1 # type: ignore -from google.api_core import grpc_helpers_async # type: ignore +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/google/cloud/language_v1beta2/types/language_service.py b/google/cloud/language_v1beta2/types/language_service.py index 840ae888..68f9297b 100644 --- a/google/cloud/language_v1beta2/types/language_service.py +++ b/google/cloud/language_v1beta2/types/language_service.py @@ -77,6 +77,7 @@ class Document(proto.Message): The content of the input in string format. Cloud audit logging exempt since it is based on user data. + This field is a member of `oneof`_ ``source``. gcs_content_uri (str): The Google Cloud Storage URI where the file content is @@ -84,6 +85,7 @@ class Document(proto.Message): gs://bucket_name/object_name. For more details, see https://cloud.google.com/storage/docs/reference-uris. NOTE: Cloud Storage object versioning is not supported. + This field is a member of `oneof`_ ``source``. language (str): The language of the document (if not specified, the language diff --git a/noxfile.py b/noxfile.py index f041f1f5..2a2001c4 100644 --- a/noxfile.py +++ b/noxfile.py @@ -175,7 +175,7 @@ def cover(session): test runs (not system test runs), and then erases coverage data. """ session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=98") + session.run("coverage", "report", "--show-missing", "--fail-under=100") session.run("coverage", "erase") diff --git a/owlbot.py b/owlbot.py deleted file mode 100644 index 11b0c990..00000000 --- a/owlbot.py +++ /dev/null @@ -1,51 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""This script is used to synthesize generated parts of this library.""" - -import synthtool as s -from synthtool import gcp -from synthtool.languages import python - -common = gcp.CommonTemplates() -default_version = "v1" - -for library in s.get_staging_dirs(default_version): - # Work around generator issue https://github.com/googleapis/gapic-generator-python/issues/902 - s.replace(library / f"google/cloud/language_{library.name}/types/language_service.py", - r"""Represents the input to API methods. - Attributes:""", - r"""Represents the input to API methods.\n - Attributes:""") - - s.move(library, excludes=["docs/index.rst", "README.rst", "setup.py"]) - -s.remove_staging_dirs() - -# ---------------------------------------------------------------------------- -# Add templated files -# ---------------------------------------------------------------------------- -templated_files = common.py_library(cov_level=98, samples=True, microgenerator=True,) - -s.move(templated_files, excludes=['.coveragerc']) - -s.shell.run(["nox", "-s", "blacken"], hide_output=False) - -# ---------------------------------------------------------------------------- -# Samples templates -# ---------------------------------------------------------------------------- - -python.py_samples(skip_readmes=True) - -s.shell.run(["nox", "-s", "blacken"], hide_output=False) diff --git a/samples/snippets/api/noxfile.py b/samples/snippets/api/noxfile.py index 93a9122c..20cdfc62 100644 --- a/samples/snippets/api/noxfile.py +++ b/samples/snippets/api/noxfile.py @@ -14,6 +14,7 @@ from __future__ import print_function +import glob import os from pathlib import Path import sys @@ -184,37 +185,45 @@ def blacken(session: nox.sessions.Session) -> None: def _session_tests( session: nox.sessions.Session, post_install: Callable = None ) -> None: - if TEST_CONFIG["pip_version_override"]: - pip_version = TEST_CONFIG["pip_version_override"] - session.install(f"pip=={pip_version}") - """Runs py.test for a particular project.""" - if os.path.exists("requirements.txt"): - if os.path.exists("constraints.txt"): - session.install("-r", "requirements.txt", "-c", "constraints.txt") - else: - session.install("-r", "requirements.txt") - - if os.path.exists("requirements-test.txt"): - if os.path.exists("constraints-test.txt"): - session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") - else: - session.install("-r", "requirements-test.txt") - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars(), - ) + # check for presence of tests + test_list = glob.glob("*_test.py") + glob.glob("test_*.py") + test_list.extend(glob.glob("tests")) + if len(test_list) == 0: + print("No tests found, skipping directory.") + else: + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install( + "-r", "requirements-test.txt", "-c", "constraints-test.txt" + ) + else: + session.install("-r", "requirements-test.txt") + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars(), + ) @nox.session(python=ALL_VERSIONS) diff --git a/samples/snippets/api/requirements.txt b/samples/snippets/api/requirements.txt index 2d206082..c86677b3 100644 --- a/samples/snippets/api/requirements.txt +++ b/samples/snippets/api/requirements.txt @@ -1,3 +1,3 @@ -google-api-python-client==2.24.0 -google-auth==2.2.1 +google-api-python-client==2.36.0 +google-auth==2.3.3 google-auth-httplib2==0.1.0 diff --git a/samples/snippets/classify_text/classify_text_tutorial.py b/samples/snippets/classify_text/classify_text_tutorial.py index 9c05b83f..675f8499 100644 --- a/samples/snippets/classify_text/classify_text_tutorial.py +++ b/samples/snippets/classify_text/classify_text_tutorial.py @@ -42,7 +42,7 @@ def classify(text, verbose=True): document = language_v1.Document( content=text, type_=language_v1.Document.Type.PLAIN_TEXT ) - response = language_client.classify_text(request={'document': document}) + response = language_client.classify_text(request={"document": document}) categories = response.categories result = {} diff --git a/samples/snippets/classify_text/noxfile.py b/samples/snippets/classify_text/noxfile.py index 93a9122c..20cdfc62 100644 --- a/samples/snippets/classify_text/noxfile.py +++ b/samples/snippets/classify_text/noxfile.py @@ -14,6 +14,7 @@ from __future__ import print_function +import glob import os from pathlib import Path import sys @@ -184,37 +185,45 @@ def blacken(session: nox.sessions.Session) -> None: def _session_tests( session: nox.sessions.Session, post_install: Callable = None ) -> None: - if TEST_CONFIG["pip_version_override"]: - pip_version = TEST_CONFIG["pip_version_override"] - session.install(f"pip=={pip_version}") - """Runs py.test for a particular project.""" - if os.path.exists("requirements.txt"): - if os.path.exists("constraints.txt"): - session.install("-r", "requirements.txt", "-c", "constraints.txt") - else: - session.install("-r", "requirements.txt") - - if os.path.exists("requirements-test.txt"): - if os.path.exists("constraints-test.txt"): - session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") - else: - session.install("-r", "requirements-test.txt") - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars(), - ) + # check for presence of tests + test_list = glob.glob("*_test.py") + glob.glob("test_*.py") + test_list.extend(glob.glob("tests")) + if len(test_list) == 0: + print("No tests found, skipping directory.") + else: + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install( + "-r", "requirements-test.txt", "-c", "constraints-test.txt" + ) + else: + session.install("-r", "requirements-test.txt") + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars(), + ) @nox.session(python=ALL_VERSIONS) diff --git a/samples/snippets/classify_text/requirements.txt b/samples/snippets/classify_text/requirements.txt index 2323f5be..8ebc429c 100644 --- a/samples/snippets/classify_text/requirements.txt +++ b/samples/snippets/classify_text/requirements.txt @@ -1,3 +1,4 @@ -google-cloud-language==2.2.2 -numpy==1.20.1; python_version > '3.6' +google-cloud-language==2.3.1 +numpy==1.22.1; python_version > '3.7' +numpy==1.21.4; python_version == '3.7' numpy==1.19.5; python_version <= '3.6' diff --git a/samples/snippets/cloud-client/v1/noxfile.py b/samples/snippets/cloud-client/v1/noxfile.py index 93a9122c..20cdfc62 100644 --- a/samples/snippets/cloud-client/v1/noxfile.py +++ b/samples/snippets/cloud-client/v1/noxfile.py @@ -14,6 +14,7 @@ from __future__ import print_function +import glob import os from pathlib import Path import sys @@ -184,37 +185,45 @@ def blacken(session: nox.sessions.Session) -> None: def _session_tests( session: nox.sessions.Session, post_install: Callable = None ) -> None: - if TEST_CONFIG["pip_version_override"]: - pip_version = TEST_CONFIG["pip_version_override"] - session.install(f"pip=={pip_version}") - """Runs py.test for a particular project.""" - if os.path.exists("requirements.txt"): - if os.path.exists("constraints.txt"): - session.install("-r", "requirements.txt", "-c", "constraints.txt") - else: - session.install("-r", "requirements.txt") - - if os.path.exists("requirements-test.txt"): - if os.path.exists("constraints-test.txt"): - session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") - else: - session.install("-r", "requirements-test.txt") - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars(), - ) + # check for presence of tests + test_list = glob.glob("*_test.py") + glob.glob("test_*.py") + test_list.extend(glob.glob("tests")) + if len(test_list) == 0: + print("No tests found, skipping directory.") + else: + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install( + "-r", "requirements-test.txt", "-c", "constraints-test.txt" + ) + else: + session.install("-r", "requirements-test.txt") + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars(), + ) @nox.session(python=ALL_VERSIONS) diff --git a/samples/snippets/cloud-client/v1/quickstart.py b/samples/snippets/cloud-client/v1/quickstart.py index 4c4b06b5..b9b0e96c 100644 --- a/samples/snippets/cloud-client/v1/quickstart.py +++ b/samples/snippets/cloud-client/v1/quickstart.py @@ -30,10 +30,14 @@ def run_quickstart(): # The text to analyze text = u"Hello, world!" - document = language_v1.Document(content=text, type_=language_v1.Document.Type.PLAIN_TEXT) + document = language_v1.Document( + content=text, type_=language_v1.Document.Type.PLAIN_TEXT + ) # Detects the sentiment of the text - sentiment = client.analyze_sentiment(request={'document': document}).document_sentiment + sentiment = client.analyze_sentiment( + request={"document": document} + ).document_sentiment print("Text: {}".format(text)) print("Sentiment: {}, {}".format(sentiment.score, sentiment.magnitude)) diff --git a/samples/snippets/cloud-client/v1/requirements.txt b/samples/snippets/cloud-client/v1/requirements.txt index fc0ee401..d2b8f2f0 100644 --- a/samples/snippets/cloud-client/v1/requirements.txt +++ b/samples/snippets/cloud-client/v1/requirements.txt @@ -1 +1 @@ -google-cloud-language==2.2.2 +google-cloud-language==2.3.1 diff --git a/samples/snippets/cloud-client/v1/set_endpoint.py b/samples/snippets/cloud-client/v1/set_endpoint.py index e9ad97d3..c49537a5 100644 --- a/samples/snippets/cloud-client/v1/set_endpoint.py +++ b/samples/snippets/cloud-client/v1/set_endpoint.py @@ -31,7 +31,9 @@ def set_endpoint(): ) # Detects the sentiment of the text - sentiment = client.analyze_sentiment(request={'document': document}).document_sentiment + sentiment = client.analyze_sentiment( + request={"document": document} + ).document_sentiment print("Sentiment: {}, {}".format(sentiment.score, sentiment.magnitude)) diff --git a/samples/snippets/generated-samples/v1/language_sentiment_text.py b/samples/snippets/generated-samples/v1/language_sentiment_text.py index 9f975023..4170ddbc 100644 --- a/samples/snippets/generated-samples/v1/language_sentiment_text.py +++ b/samples/snippets/generated-samples/v1/language_sentiment_text.py @@ -39,7 +39,7 @@ def sample_analyze_sentiment(content): type_ = language_v1.Document.Type.PLAIN_TEXT document = {"type_": type_, "content": content} - response = client.analyze_sentiment(request={'document': document}) + response = client.analyze_sentiment(request={"document": document}) sentiment = response.document_sentiment print("Score: {}".format(sentiment.score)) print("Magnitude: {}".format(sentiment.magnitude)) diff --git a/samples/snippets/generated-samples/v1/noxfile.py b/samples/snippets/generated-samples/v1/noxfile.py index 93a9122c..20cdfc62 100644 --- a/samples/snippets/generated-samples/v1/noxfile.py +++ b/samples/snippets/generated-samples/v1/noxfile.py @@ -14,6 +14,7 @@ from __future__ import print_function +import glob import os from pathlib import Path import sys @@ -184,37 +185,45 @@ def blacken(session: nox.sessions.Session) -> None: def _session_tests( session: nox.sessions.Session, post_install: Callable = None ) -> None: - if TEST_CONFIG["pip_version_override"]: - pip_version = TEST_CONFIG["pip_version_override"] - session.install(f"pip=={pip_version}") - """Runs py.test for a particular project.""" - if os.path.exists("requirements.txt"): - if os.path.exists("constraints.txt"): - session.install("-r", "requirements.txt", "-c", "constraints.txt") - else: - session.install("-r", "requirements.txt") - - if os.path.exists("requirements-test.txt"): - if os.path.exists("constraints-test.txt"): - session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") - else: - session.install("-r", "requirements-test.txt") - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars(), - ) + # check for presence of tests + test_list = glob.glob("*_test.py") + glob.glob("test_*.py") + test_list.extend(glob.glob("tests")) + if len(test_list) == 0: + print("No tests found, skipping directory.") + else: + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install( + "-r", "requirements-test.txt", "-c", "constraints-test.txt" + ) + else: + session.install("-r", "requirements-test.txt") + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars(), + ) @nox.session(python=ALL_VERSIONS) diff --git a/samples/snippets/generated-samples/v1/requirements.txt b/samples/snippets/generated-samples/v1/requirements.txt index fc0ee401..d2b8f2f0 100644 --- a/samples/snippets/generated-samples/v1/requirements.txt +++ b/samples/snippets/generated-samples/v1/requirements.txt @@ -1 +1 @@ -google-cloud-language==2.2.2 +google-cloud-language==2.3.1 diff --git a/samples/snippets/sentiment/noxfile.py b/samples/snippets/sentiment/noxfile.py index 93a9122c..20cdfc62 100644 --- a/samples/snippets/sentiment/noxfile.py +++ b/samples/snippets/sentiment/noxfile.py @@ -14,6 +14,7 @@ from __future__ import print_function +import glob import os from pathlib import Path import sys @@ -184,37 +185,45 @@ def blacken(session: nox.sessions.Session) -> None: def _session_tests( session: nox.sessions.Session, post_install: Callable = None ) -> None: - if TEST_CONFIG["pip_version_override"]: - pip_version = TEST_CONFIG["pip_version_override"] - session.install(f"pip=={pip_version}") - """Runs py.test for a particular project.""" - if os.path.exists("requirements.txt"): - if os.path.exists("constraints.txt"): - session.install("-r", "requirements.txt", "-c", "constraints.txt") - else: - session.install("-r", "requirements.txt") - - if os.path.exists("requirements-test.txt"): - if os.path.exists("constraints-test.txt"): - session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") - else: - session.install("-r", "requirements-test.txt") - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars(), - ) + # check for presence of tests + test_list = glob.glob("*_test.py") + glob.glob("test_*.py") + test_list.extend(glob.glob("tests")) + if len(test_list) == 0: + print("No tests found, skipping directory.") + else: + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install( + "-r", "requirements-test.txt", "-c", "constraints-test.txt" + ) + else: + session.install("-r", "requirements-test.txt") + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars(), + ) @nox.session(python=ALL_VERSIONS) diff --git a/samples/snippets/sentiment/requirements.txt b/samples/snippets/sentiment/requirements.txt index fc0ee401..d2b8f2f0 100644 --- a/samples/snippets/sentiment/requirements.txt +++ b/samples/snippets/sentiment/requirements.txt @@ -1 +1 @@ -google-cloud-language==2.2.2 +google-cloud-language==2.3.1 diff --git a/samples/snippets/sentiment/sentiment_analysis.py b/samples/snippets/sentiment/sentiment_analysis.py index 2333bf82..e82c3a68 100644 --- a/samples/snippets/sentiment/sentiment_analysis.py +++ b/samples/snippets/sentiment/sentiment_analysis.py @@ -51,8 +51,10 @@ def analyze(movie_review_filename): # Instantiates a plain text document. content = review_file.read() - document = language_v1.Document(content=content, type_=language_v1.Document.Type.PLAIN_TEXT) - annotations = client.analyze_sentiment(request={'document': document}) + document = language_v1.Document( + content=content, type_=language_v1.Document.Type.PLAIN_TEXT + ) + annotations = client.analyze_sentiment(request={"document": document}) # Print the results print_result(annotations) diff --git a/samples/v1/language_entity_sentiment_text.py b/samples/v1/language_entity_sentiment_text.py index b28434df..4e1341d5 100644 --- a/samples/v1/language_entity_sentiment_text.py +++ b/samples/v1/language_entity_sentiment_text.py @@ -40,7 +40,7 @@ def sample_analyze_entity_sentiment(text_content): # text_content = 'Grapes are good. Bananas are bad.' # Available types: PLAIN_TEXT, HTML - type_ = language_v1.Document.Type.PLAIN_TEXT + type_ = language_v1.types.Document.Type.PLAIN_TEXT # Optional. If not specified, the language is automatically detected. # For list of supported languages: diff --git a/setup.py b/setup.py index 04e3a675..cd04169e 100644 --- a/setup.py +++ b/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-language" description = "Google Cloud Natural Language API client library" -version = "2.3.1" +version = "2.3.2" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' diff --git a/tests/unit/gapic/language_v1/test_language_service.py b/tests/unit/gapic/language_v1/test_language_service.py index 9be55b98..3304ab5e 100644 --- a/tests/unit/gapic/language_v1/test_language_service.py +++ b/tests/unit/gapic/language_v1/test_language_service.py @@ -247,20 +247,20 @@ def test_language_service_client_client_options( # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError): - client = client_class() + client = client_class(transport=transport_name) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): with pytest.raises(ValueError): - client = client_class() + client = client_class(transport=transport_name) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -329,7 +329,7 @@ def test_language_service_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) if use_client_cert_env == "false": expected_client_cert_source = None @@ -424,7 +424,7 @@ def test_language_service_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -455,7 +455,7 @@ def test_language_service_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -488,9 +488,10 @@ def test_language_service_client_client_options_from_dict(): ) -def test_analyze_sentiment( - transport: str = "grpc", request_type=language_service.AnalyzeSentimentRequest -): +@pytest.mark.parametrize( + "request_type", [language_service.AnalyzeSentimentRequest, dict,] +) +def test_analyze_sentiment(request_type, transport: str = "grpc"): client = LanguageServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -519,10 +520,6 @@ def test_analyze_sentiment( assert response.language == "language_value" -def test_analyze_sentiment_from_dict(): - test_analyze_sentiment(request_type=dict) - - def test_analyze_sentiment_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -600,10 +597,14 @@ def test_analyze_sentiment_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].document == language_service.Document( + arg = args[0].document + mock_val = language_service.Document( type_=language_service.Document.Type.PLAIN_TEXT ) - assert args[0].encoding_type == language_service.EncodingType.UTF8 + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val def test_analyze_sentiment_flattened_error(): @@ -650,10 +651,14 @@ async def test_analyze_sentiment_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].document == language_service.Document( + arg = args[0].document + mock_val = language_service.Document( type_=language_service.Document.Type.PLAIN_TEXT ) - assert args[0].encoding_type == language_service.EncodingType.UTF8 + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val @pytest.mark.asyncio @@ -674,9 +679,10 @@ async def test_analyze_sentiment_flattened_error_async(): ) -def test_analyze_entities( - transport: str = "grpc", request_type=language_service.AnalyzeEntitiesRequest -): +@pytest.mark.parametrize( + "request_type", [language_service.AnalyzeEntitiesRequest, dict,] +) +def test_analyze_entities(request_type, transport: str = "grpc"): client = LanguageServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -703,10 +709,6 @@ def test_analyze_entities( assert response.language == "language_value" -def test_analyze_entities_from_dict(): - test_analyze_entities(request_type=dict) - - def test_analyze_entities_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -778,10 +780,14 @@ def test_analyze_entities_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].document == language_service.Document( + arg = args[0].document + mock_val = language_service.Document( type_=language_service.Document.Type.PLAIN_TEXT ) - assert args[0].encoding_type == language_service.EncodingType.UTF8 + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val def test_analyze_entities_flattened_error(): @@ -826,10 +832,14 @@ async def test_analyze_entities_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].document == language_service.Document( + arg = args[0].document + mock_val = language_service.Document( type_=language_service.Document.Type.PLAIN_TEXT ) - assert args[0].encoding_type == language_service.EncodingType.UTF8 + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val @pytest.mark.asyncio @@ -850,9 +860,10 @@ async def test_analyze_entities_flattened_error_async(): ) -def test_analyze_entity_sentiment( - transport: str = "grpc", request_type=language_service.AnalyzeEntitySentimentRequest -): +@pytest.mark.parametrize( + "request_type", [language_service.AnalyzeEntitySentimentRequest, dict,] +) +def test_analyze_entity_sentiment(request_type, transport: str = "grpc"): client = LanguageServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -881,10 +892,6 @@ def test_analyze_entity_sentiment( assert response.language == "language_value" -def test_analyze_entity_sentiment_from_dict(): - test_analyze_entity_sentiment(request_type=dict) - - def test_analyze_entity_sentiment_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -962,10 +969,14 @@ def test_analyze_entity_sentiment_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].document == language_service.Document( + arg = args[0].document + mock_val = language_service.Document( type_=language_service.Document.Type.PLAIN_TEXT ) - assert args[0].encoding_type == language_service.EncodingType.UTF8 + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val def test_analyze_entity_sentiment_flattened_error(): @@ -1012,10 +1023,14 @@ async def test_analyze_entity_sentiment_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].document == language_service.Document( + arg = args[0].document + mock_val = language_service.Document( type_=language_service.Document.Type.PLAIN_TEXT ) - assert args[0].encoding_type == language_service.EncodingType.UTF8 + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val @pytest.mark.asyncio @@ -1036,9 +1051,8 @@ async def test_analyze_entity_sentiment_flattened_error_async(): ) -def test_analyze_syntax( - transport: str = "grpc", request_type=language_service.AnalyzeSyntaxRequest -): +@pytest.mark.parametrize("request_type", [language_service.AnalyzeSyntaxRequest, dict,]) +def test_analyze_syntax(request_type, transport: str = "grpc"): client = LanguageServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1065,10 +1079,6 @@ def test_analyze_syntax( assert response.language == "language_value" -def test_analyze_syntax_from_dict(): - test_analyze_syntax(request_type=dict) - - def test_analyze_syntax_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1139,10 +1149,14 @@ def test_analyze_syntax_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].document == language_service.Document( + arg = args[0].document + mock_val = language_service.Document( type_=language_service.Document.Type.PLAIN_TEXT ) - assert args[0].encoding_type == language_service.EncodingType.UTF8 + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val def test_analyze_syntax_flattened_error(): @@ -1187,10 +1201,14 @@ async def test_analyze_syntax_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].document == language_service.Document( + arg = args[0].document + mock_val = language_service.Document( type_=language_service.Document.Type.PLAIN_TEXT ) - assert args[0].encoding_type == language_service.EncodingType.UTF8 + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val @pytest.mark.asyncio @@ -1211,9 +1229,8 @@ async def test_analyze_syntax_flattened_error_async(): ) -def test_classify_text( - transport: str = "grpc", request_type=language_service.ClassifyTextRequest -): +@pytest.mark.parametrize("request_type", [language_service.ClassifyTextRequest, dict,]) +def test_classify_text(request_type, transport: str = "grpc"): client = LanguageServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1237,10 +1254,6 @@ def test_classify_text( assert isinstance(response, language_service.ClassifyTextResponse) -def test_classify_text_from_dict(): - test_classify_text(request_type=dict) - - def test_classify_text_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1309,9 +1322,11 @@ def test_classify_text_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].document == language_service.Document( + arg = args[0].document + mock_val = language_service.Document( type_=language_service.Document.Type.PLAIN_TEXT ) + assert arg == mock_val def test_classify_text_flattened_error(): @@ -1354,9 +1369,11 @@ async def test_classify_text_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].document == language_service.Document( + arg = args[0].document + mock_val = language_service.Document( type_=language_service.Document.Type.PLAIN_TEXT ) + assert arg == mock_val @pytest.mark.asyncio @@ -1376,9 +1393,8 @@ async def test_classify_text_flattened_error_async(): ) -def test_annotate_text( - transport: str = "grpc", request_type=language_service.AnnotateTextRequest -): +@pytest.mark.parametrize("request_type", [language_service.AnnotateTextRequest, dict,]) +def test_annotate_text(request_type, transport: str = "grpc"): client = LanguageServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1405,10 +1421,6 @@ def test_annotate_text( assert response.language == "language_value" -def test_annotate_text_from_dict(): - test_annotate_text(request_type=dict) - - def test_annotate_text_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1480,13 +1492,17 @@ def test_annotate_text_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].document == language_service.Document( + arg = args[0].document + mock_val = language_service.Document( type_=language_service.Document.Type.PLAIN_TEXT ) - assert args[0].features == language_service.AnnotateTextRequest.Features( - extract_syntax=True - ) - assert args[0].encoding_type == language_service.EncodingType.UTF8 + assert arg == mock_val + arg = args[0].features + mock_val = language_service.AnnotateTextRequest.Features(extract_syntax=True) + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val def test_annotate_text_flattened_error(): @@ -1533,13 +1549,17 @@ async def test_annotate_text_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].document == language_service.Document( + arg = args[0].document + mock_val = language_service.Document( type_=language_service.Document.Type.PLAIN_TEXT ) - assert args[0].features == language_service.AnnotateTextRequest.Features( - extract_syntax=True - ) - assert args[0].encoding_type == language_service.EncodingType.UTF8 + assert arg == mock_val + arg = args[0].features + mock_val = language_service.AnnotateTextRequest.Features(extract_syntax=True) + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val @pytest.mark.asyncio @@ -2068,7 +2088,7 @@ def test_parse_common_location_path(): assert expected == actual -def test_client_withDEFAULT_CLIENT_INFO(): +def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() with mock.patch.object( diff --git a/tests/unit/gapic/language_v1beta2/test_language_service.py b/tests/unit/gapic/language_v1beta2/test_language_service.py index 5b0fd659..b12fd591 100644 --- a/tests/unit/gapic/language_v1beta2/test_language_service.py +++ b/tests/unit/gapic/language_v1beta2/test_language_service.py @@ -249,20 +249,20 @@ def test_language_service_client_client_options( # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError): - client = client_class() + client = client_class(transport=transport_name) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): with pytest.raises(ValueError): - client = client_class() + client = client_class(transport=transport_name) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -331,7 +331,7 @@ def test_language_service_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) if use_client_cert_env == "false": expected_client_cert_source = None @@ -426,7 +426,7 @@ def test_language_service_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -457,7 +457,7 @@ def test_language_service_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -490,9 +490,10 @@ def test_language_service_client_client_options_from_dict(): ) -def test_analyze_sentiment( - transport: str = "grpc", request_type=language_service.AnalyzeSentimentRequest -): +@pytest.mark.parametrize( + "request_type", [language_service.AnalyzeSentimentRequest, dict,] +) +def test_analyze_sentiment(request_type, transport: str = "grpc"): client = LanguageServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -521,10 +522,6 @@ def test_analyze_sentiment( assert response.language == "language_value" -def test_analyze_sentiment_from_dict(): - test_analyze_sentiment(request_type=dict) - - def test_analyze_sentiment_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -602,10 +599,14 @@ def test_analyze_sentiment_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].document == language_service.Document( + arg = args[0].document + mock_val = language_service.Document( type_=language_service.Document.Type.PLAIN_TEXT ) - assert args[0].encoding_type == language_service.EncodingType.UTF8 + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val def test_analyze_sentiment_flattened_error(): @@ -652,10 +653,14 @@ async def test_analyze_sentiment_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].document == language_service.Document( + arg = args[0].document + mock_val = language_service.Document( type_=language_service.Document.Type.PLAIN_TEXT ) - assert args[0].encoding_type == language_service.EncodingType.UTF8 + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val @pytest.mark.asyncio @@ -676,9 +681,10 @@ async def test_analyze_sentiment_flattened_error_async(): ) -def test_analyze_entities( - transport: str = "grpc", request_type=language_service.AnalyzeEntitiesRequest -): +@pytest.mark.parametrize( + "request_type", [language_service.AnalyzeEntitiesRequest, dict,] +) +def test_analyze_entities(request_type, transport: str = "grpc"): client = LanguageServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -705,10 +711,6 @@ def test_analyze_entities( assert response.language == "language_value" -def test_analyze_entities_from_dict(): - test_analyze_entities(request_type=dict) - - def test_analyze_entities_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -780,10 +782,14 @@ def test_analyze_entities_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].document == language_service.Document( + arg = args[0].document + mock_val = language_service.Document( type_=language_service.Document.Type.PLAIN_TEXT ) - assert args[0].encoding_type == language_service.EncodingType.UTF8 + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val def test_analyze_entities_flattened_error(): @@ -828,10 +834,14 @@ async def test_analyze_entities_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].document == language_service.Document( + arg = args[0].document + mock_val = language_service.Document( type_=language_service.Document.Type.PLAIN_TEXT ) - assert args[0].encoding_type == language_service.EncodingType.UTF8 + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val @pytest.mark.asyncio @@ -852,9 +862,10 @@ async def test_analyze_entities_flattened_error_async(): ) -def test_analyze_entity_sentiment( - transport: str = "grpc", request_type=language_service.AnalyzeEntitySentimentRequest -): +@pytest.mark.parametrize( + "request_type", [language_service.AnalyzeEntitySentimentRequest, dict,] +) +def test_analyze_entity_sentiment(request_type, transport: str = "grpc"): client = LanguageServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -883,10 +894,6 @@ def test_analyze_entity_sentiment( assert response.language == "language_value" -def test_analyze_entity_sentiment_from_dict(): - test_analyze_entity_sentiment(request_type=dict) - - def test_analyze_entity_sentiment_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -964,10 +971,14 @@ def test_analyze_entity_sentiment_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].document == language_service.Document( + arg = args[0].document + mock_val = language_service.Document( type_=language_service.Document.Type.PLAIN_TEXT ) - assert args[0].encoding_type == language_service.EncodingType.UTF8 + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val def test_analyze_entity_sentiment_flattened_error(): @@ -1014,10 +1025,14 @@ async def test_analyze_entity_sentiment_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].document == language_service.Document( + arg = args[0].document + mock_val = language_service.Document( type_=language_service.Document.Type.PLAIN_TEXT ) - assert args[0].encoding_type == language_service.EncodingType.UTF8 + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val @pytest.mark.asyncio @@ -1038,9 +1053,8 @@ async def test_analyze_entity_sentiment_flattened_error_async(): ) -def test_analyze_syntax( - transport: str = "grpc", request_type=language_service.AnalyzeSyntaxRequest -): +@pytest.mark.parametrize("request_type", [language_service.AnalyzeSyntaxRequest, dict,]) +def test_analyze_syntax(request_type, transport: str = "grpc"): client = LanguageServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1067,10 +1081,6 @@ def test_analyze_syntax( assert response.language == "language_value" -def test_analyze_syntax_from_dict(): - test_analyze_syntax(request_type=dict) - - def test_analyze_syntax_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1141,10 +1151,14 @@ def test_analyze_syntax_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].document == language_service.Document( + arg = args[0].document + mock_val = language_service.Document( type_=language_service.Document.Type.PLAIN_TEXT ) - assert args[0].encoding_type == language_service.EncodingType.UTF8 + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val def test_analyze_syntax_flattened_error(): @@ -1189,10 +1203,14 @@ async def test_analyze_syntax_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].document == language_service.Document( + arg = args[0].document + mock_val = language_service.Document( type_=language_service.Document.Type.PLAIN_TEXT ) - assert args[0].encoding_type == language_service.EncodingType.UTF8 + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val @pytest.mark.asyncio @@ -1213,9 +1231,8 @@ async def test_analyze_syntax_flattened_error_async(): ) -def test_classify_text( - transport: str = "grpc", request_type=language_service.ClassifyTextRequest -): +@pytest.mark.parametrize("request_type", [language_service.ClassifyTextRequest, dict,]) +def test_classify_text(request_type, transport: str = "grpc"): client = LanguageServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1239,10 +1256,6 @@ def test_classify_text( assert isinstance(response, language_service.ClassifyTextResponse) -def test_classify_text_from_dict(): - test_classify_text(request_type=dict) - - def test_classify_text_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1311,9 +1324,11 @@ def test_classify_text_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].document == language_service.Document( + arg = args[0].document + mock_val = language_service.Document( type_=language_service.Document.Type.PLAIN_TEXT ) + assert arg == mock_val def test_classify_text_flattened_error(): @@ -1356,9 +1371,11 @@ async def test_classify_text_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].document == language_service.Document( + arg = args[0].document + mock_val = language_service.Document( type_=language_service.Document.Type.PLAIN_TEXT ) + assert arg == mock_val @pytest.mark.asyncio @@ -1378,9 +1395,8 @@ async def test_classify_text_flattened_error_async(): ) -def test_annotate_text( - transport: str = "grpc", request_type=language_service.AnnotateTextRequest -): +@pytest.mark.parametrize("request_type", [language_service.AnnotateTextRequest, dict,]) +def test_annotate_text(request_type, transport: str = "grpc"): client = LanguageServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1407,10 +1423,6 @@ def test_annotate_text( assert response.language == "language_value" -def test_annotate_text_from_dict(): - test_annotate_text(request_type=dict) - - def test_annotate_text_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1482,13 +1494,17 @@ def test_annotate_text_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].document == language_service.Document( + arg = args[0].document + mock_val = language_service.Document( type_=language_service.Document.Type.PLAIN_TEXT ) - assert args[0].features == language_service.AnnotateTextRequest.Features( - extract_syntax=True - ) - assert args[0].encoding_type == language_service.EncodingType.UTF8 + assert arg == mock_val + arg = args[0].features + mock_val = language_service.AnnotateTextRequest.Features(extract_syntax=True) + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val def test_annotate_text_flattened_error(): @@ -1535,13 +1551,17 @@ async def test_annotate_text_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].document == language_service.Document( + arg = args[0].document + mock_val = language_service.Document( type_=language_service.Document.Type.PLAIN_TEXT ) - assert args[0].features == language_service.AnnotateTextRequest.Features( - extract_syntax=True - ) - assert args[0].encoding_type == language_service.EncodingType.UTF8 + assert arg == mock_val + arg = args[0].features + mock_val = language_service.AnnotateTextRequest.Features(extract_syntax=True) + assert arg == mock_val + arg = args[0].encoding_type + mock_val = language_service.EncodingType.UTF8 + assert arg == mock_val @pytest.mark.asyncio @@ -2070,7 +2090,7 @@ def test_parse_common_location_path(): assert expected == actual -def test_client_withDEFAULT_CLIENT_INFO(): +def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() with mock.patch.object(