diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 0000000..48d75b4 --- /dev/null +++ b/.coveragerc @@ -0,0 +1,18 @@ +[run] +branch = True + +[report] +fail_under = 100 +show_missing = True +omit = + google/cloud/dataqna/__init__.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ + # Ignore pkg_resources exceptions. + # This is added at the module level as a safeguard for if someone + # generates the code and tries to run it without pip installing. This + # makes it virtually impossible to test properly. + except pkg_resources.DistributionNotFound diff --git a/.flake8 b/.flake8 index ed93163..29227d4 100644 --- a/.flake8 +++ b/.flake8 @@ -26,6 +26,7 @@ exclude = *_pb2.py # Standard linting exemptions. + **/.nox/** __pycache__, .git, *.pyc, diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml new file mode 100644 index 0000000..864c176 --- /dev/null +++ b/.github/.OwlBot.lock.yaml @@ -0,0 +1,3 @@ +docker: + image: gcr.io/repo-automation-bots/owlbot-python:latest + digest: sha256:4c981a6b6f2b8914a448d7b3a01688365be03e3ed26dfee399a6aa77fb112eaa diff --git a/.github/.OwlBot.yaml b/.github/.OwlBot.yaml new file mode 100644 index 0000000..71a9478 --- /dev/null +++ b/.github/.OwlBot.yaml @@ -0,0 +1,26 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +docker: + image: gcr.io/repo-automation-bots/owlbot-python:latest + +deep-remove-regex: + - /owl-bot-staging + +deep-copy-regex: + - source: /google/cloud/dataqna/(v.*)/.*-py/(.*) + dest: /owl-bot-staging/$1/$2 + +begin-after-commit-hash: ee56c3493ec6aeb237ff515ecea949710944a20f + diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index e5cc861..ae570eb 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -5,6 +5,7 @@ # https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax # The @googleapis/api-bigquery is the default owner for changes in this repo -* @googleapis/api-bigquery +* @googleapis/api-bigquery @googleapis/yoshi-python +# The python-samples-reviewers team is the default owner for samples changes /samples/ @googleapis/python-samples-owners diff --git a/.github/header-checker-lint.yml b/.github/header-checker-lint.yml new file mode 100644 index 0000000..6fe78aa --- /dev/null +++ b/.github/header-checker-lint.yml @@ -0,0 +1,15 @@ +{"allowedCopyrightHolders": ["Google LLC"], + "allowedLicenses": ["Apache-2.0", "MIT", "BSD-3"], + "ignoreFiles": ["**/requirements.txt", "**/requirements-test.txt", "**/__init__.py", "samples/**/constraints.txt", "samples/**/constraints-test.txt"], + "sourceFileExtensions": [ + "ts", + "js", + "java", + "sh", + "Dockerfile", + "yaml", + "py", + "html", + "txt" + ] +} \ No newline at end of file diff --git a/.gitignore b/.gitignore index b9daa52..b4243ce 100644 --- a/.gitignore +++ b/.gitignore @@ -50,8 +50,10 @@ docs.metadata # Virtual environment env/ + +# Test logs coverage.xml -sponge_log.xml +*sponge_log.xml # System test environment variables. system_tests/local_test_setup diff --git a/.kokoro/build.sh b/.kokoro/build.sh index 3060153..e312027 100755 --- a/.kokoro/build.sh +++ b/.kokoro/build.sh @@ -15,7 +15,11 @@ set -eo pipefail -cd github/python-data-qna +if [[ -z "${PROJECT_ROOT:-}" ]]; then + PROJECT_ROOT="github/python-data-qna" +fi + +cd "${PROJECT_ROOT}" # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 @@ -30,16 +34,26 @@ export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") # Remove old nox -python3.6 -m pip uninstall --yes --quiet nox-automation +python3 -m pip uninstall --yes --quiet nox-automation # Install nox -python3.6 -m pip install --upgrade --quiet nox -python3.6 -m nox --version +python3 -m pip install --upgrade --quiet nox +python3 -m nox --version + +# If this is a continuous build, send the test log to the FlakyBot. +# See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. +if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then + cleanup() { + chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot + $KOKORO_GFILE_DIR/linux_amd64/flakybot + } + trap cleanup EXIT HUP +fi # If NOX_SESSION is set, it only runs the specified session, # otherwise run all the sessions. if [[ -n "${NOX_SESSION:-}" ]]; then - python3.6 -m nox -s "${NOX_SESSION:-}" + python3 -m nox -s ${NOX_SESSION:-} else - python3.6 -m nox + python3 -m nox fi diff --git a/.kokoro/docs/docs-presubmit.cfg b/.kokoro/docs/docs-presubmit.cfg index 1118107..a8343a5 100644 --- a/.kokoro/docs/docs-presubmit.cfg +++ b/.kokoro/docs/docs-presubmit.cfg @@ -15,3 +15,14 @@ env_vars: { key: "TRAMPOLINE_IMAGE_UPLOAD" value: "false" } + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-data-qna/.kokoro/build.sh" +} + +# Only run this nox session. +env_vars: { + key: "NOX_SESSION" + value: "docs docfx" +} diff --git a/.kokoro/release.sh b/.kokoro/release.sh index 8273069..0be3dc0 100755 --- a/.kokoro/release.sh +++ b/.kokoro/release.sh @@ -26,7 +26,7 @@ python3 -m pip install --upgrade twine wheel setuptools export PYTHONUNBUFFERED=1 # Move into the package, build the distribution and upload. -TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google_cloud_pypi_password") +TWINE_PASSWORD=$(cat "${KOKORO_GFILE_DIR}/secret_manager/google-cloud-pypi-token") cd github/python-data-qna python3 setup.py sdist bdist_wheel -twine upload --username gcloudpypi --password "${TWINE_PASSWORD}" dist/* +twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/* diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg index eaa9a4c..4bce15a 100644 --- a/.kokoro/release/common.cfg +++ b/.kokoro/release/common.cfg @@ -23,18 +23,8 @@ env_vars: { value: "github/python-data-qna/.kokoro/release.sh" } -# Fetch PyPI password -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "google_cloud_pypi_password" - } - } -} - # Tokens needed to report release status back to GitHub env_vars: { key: "SECRET_MANAGER_KEYS" - value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" -} \ No newline at end of file + value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem,google-cloud-pypi-token" +} diff --git a/.kokoro/samples/python3.6/periodic-head.cfg b/.kokoro/samples/python3.6/periodic-head.cfg new file mode 100644 index 0000000..f9cfcd3 --- /dev/null +++ b/.kokoro/samples/python3.6/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/samples/python3.7/periodic-head.cfg b/.kokoro/samples/python3.7/periodic-head.cfg new file mode 100644 index 0000000..f9cfcd3 --- /dev/null +++ b/.kokoro/samples/python3.7/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/samples/python3.8/periodic-head.cfg b/.kokoro/samples/python3.8/periodic-head.cfg new file mode 100644 index 0000000..f9cfcd3 --- /dev/null +++ b/.kokoro/samples/python3.8/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/test-samples-against-head.sh b/.kokoro/test-samples-against-head.sh new file mode 100755 index 0000000..0eda5e2 --- /dev/null +++ b/.kokoro/test-samples-against-head.sh @@ -0,0 +1,28 @@ +#!/bin/bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# A customized test runner for samples. +# +# For periodic builds, you can specify this file for testing against head. + +# `-e` enables the script to automatically fail when a command fails +# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero +set -eo pipefail +# Enables `**` to include files nested inside sub-folders +shopt -s globstar + +cd github/python-data-qna + +exec .kokoro/test-samples-impl.sh diff --git a/.kokoro/test-samples-impl.sh b/.kokoro/test-samples-impl.sh new file mode 100755 index 0000000..cf5de74 --- /dev/null +++ b/.kokoro/test-samples-impl.sh @@ -0,0 +1,102 @@ +#!/bin/bash +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# `-e` enables the script to automatically fail when a command fails +# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero +set -eo pipefail +# Enables `**` to include files nested inside sub-folders +shopt -s globstar + +# Exit early if samples directory doesn't exist +if [ ! -d "./samples" ]; then + echo "No tests run. `./samples` not found" + exit 0 +fi + +# Disable buffering, so that the logs stream through. +export PYTHONUNBUFFERED=1 + +# Debug: show build environment +env | grep KOKORO + +# Install nox +python3.6 -m pip install --upgrade --quiet nox + +# Use secrets acessor service account to get secrets +if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then + gcloud auth activate-service-account \ + --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \ + --project="cloud-devrel-kokoro-resources" +fi + +# This script will create 3 files: +# - testing/test-env.sh +# - testing/service-account.json +# - testing/client-secrets.json +./scripts/decrypt-secrets.sh + +source ./testing/test-env.sh +export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json + +# For cloud-run session, we activate the service account for gcloud sdk. +gcloud auth activate-service-account \ + --key-file "${GOOGLE_APPLICATION_CREDENTIALS}" + +export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json + +echo -e "\n******************** TESTING PROJECTS ********************" + +# Switch to 'fail at end' to allow all tests to complete before exiting. +set +e +# Use RTN to return a non-zero value if the test fails. +RTN=0 +ROOT=$(pwd) +# Find all requirements.txt in the samples directory (may break on whitespace). +for file in samples/**/requirements.txt; do + cd "$ROOT" + # Navigate to the project folder. + file=$(dirname "$file") + cd "$file" + + echo "------------------------------------------------------------" + echo "- testing $file" + echo "------------------------------------------------------------" + + # Use nox to execute the tests for the project. + python3.6 -m nox -s "$RUN_TESTS_SESSION" + EXIT=$? + + # If this is a periodic build, send the test log to the FlakyBot. + # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. + if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then + chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot + $KOKORO_GFILE_DIR/linux_amd64/flakybot + fi + + if [[ $EXIT -ne 0 ]]; then + RTN=1 + echo -e "\n Testing failed: Nox returned a non-zero exit code. \n" + else + echo -e "\n Testing completed.\n" + fi + +done +cd "$ROOT" + +# Workaround for Kokoro permissions issue: delete secrets +rm testing/{test-env.sh,client-secrets.json,service-account.json} + +exit "$RTN" diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh index 26aa10b..cbda541 100755 --- a/.kokoro/test-samples.sh +++ b/.kokoro/test-samples.sh @@ -13,6 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +# The default test runner for samples. +# +# For periodic builds, we rewinds the repo to the latest release, and +# run test-samples-impl.sh. # `-e` enables the script to automatically fail when a command fails # `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero @@ -24,87 +28,19 @@ cd github/python-data-qna # Run periodic samples tests at latest release if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then + # preserving the test runner implementation. + cp .kokoro/test-samples-impl.sh "${TMPDIR}/test-samples-impl.sh" + echo "--- IMPORTANT IMPORTANT IMPORTANT ---" + echo "Now we rewind the repo back to the latest release..." LATEST_RELEASE=$(git describe --abbrev=0 --tags) git checkout $LATEST_RELEASE -fi - -# Exit early if samples directory doesn't exist -if [ ! -d "./samples" ]; then - echo "No tests run. `./samples` not found" - exit 0 -fi - -# Disable buffering, so that the logs stream through. -export PYTHONUNBUFFERED=1 - -# Debug: show build environment -env | grep KOKORO - -# Install nox -python3.6 -m pip install --upgrade --quiet nox - -# Use secrets acessor service account to get secrets -if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then - gcloud auth activate-service-account \ - --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \ - --project="cloud-devrel-kokoro-resources" -fi - -# This script will create 3 files: -# - testing/test-env.sh -# - testing/service-account.json -# - testing/client-secrets.json -./scripts/decrypt-secrets.sh - -source ./testing/test-env.sh -export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json - -# For cloud-run session, we activate the service account for gcloud sdk. -gcloud auth activate-service-account \ - --key-file "${GOOGLE_APPLICATION_CREDENTIALS}" - -export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json - -echo -e "\n******************** TESTING PROJECTS ********************" - -# Switch to 'fail at end' to allow all tests to complete before exiting. -set +e -# Use RTN to return a non-zero value if the test fails. -RTN=0 -ROOT=$(pwd) -# Find all requirements.txt in the samples directory (may break on whitespace). -for file in samples/**/requirements.txt; do - cd "$ROOT" - # Navigate to the project folder. - file=$(dirname "$file") - cd "$file" - - echo "------------------------------------------------------------" - echo "- testing $file" - echo "------------------------------------------------------------" - - # Use nox to execute the tests for the project. - python3.6 -m nox -s "$RUN_TESTS_SESSION" - EXIT=$? - - # If this is a periodic build, send the test log to the Build Cop Bot. - # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/buildcop. - if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then - chmod +x $KOKORO_GFILE_DIR/linux_amd64/buildcop - $KOKORO_GFILE_DIR/linux_amd64/buildcop + echo "The current head is: " + echo $(git rev-parse --verify HEAD) + echo "--- IMPORTANT IMPORTANT IMPORTANT ---" + # move back the test runner implementation if there's no file. + if [ ! -f .kokoro/test-samples-impl.sh ]; then + cp "${TMPDIR}/test-samples-impl.sh" .kokoro/test-samples-impl.sh fi +fi - if [[ $EXIT -ne 0 ]]; then - RTN=1 - echo -e "\n Testing failed: Nox returned a non-zero exit code. \n" - else - echo -e "\n Testing completed.\n" - fi - -done -cd "$ROOT" - -# Workaround for Kokoro permissions issue: delete secrets -rm testing/{test-env.sh,client-secrets.json,service-account.json} - -exit "$RTN" +exec .kokoro/test-samples-impl.sh diff --git a/.kokoro/trampoline_v2.sh b/.kokoro/trampoline_v2.sh index 719bcd5..4af6cdc 100755 --- a/.kokoro/trampoline_v2.sh +++ b/.kokoro/trampoline_v2.sh @@ -159,7 +159,7 @@ if [[ -n "${KOKORO_BUILD_ID:-}" ]]; then "KOKORO_GITHUB_COMMIT" "KOKORO_GITHUB_PULL_REQUEST_NUMBER" "KOKORO_GITHUB_PULL_REQUEST_COMMIT" - # For Build Cop Bot + # For FlakyBot "KOKORO_GITHUB_COMMIT_URL" "KOKORO_GITHUB_PULL_REQUEST_URL" ) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 6ad8334..4f00c7c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,8 +1,22 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# # See https://pre-commit.com for more information # See https://pre-commit.com/hooks.html for more hooks repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v3.3.0 + rev: v3.4.0 hooks: - id: trailing-whitespace - id: end-of-file-fixer @@ -12,6 +26,6 @@ repos: hooks: - id: black - repo: https://gitlab.com/pycqa/flake8 - rev: 3.8.4 + rev: 3.9.2 hooks: - id: flake8 diff --git a/.repo-metadata.json b/.repo-metadata.json index 016462c..748f413 100644 --- a/.repo-metadata.json +++ b/.repo-metadata.json @@ -6,6 +6,7 @@ "issue_tracker": "", "release_level": "alpha", "language": "python", + "library_type": "GAPIC_AUTO", "repo": "googleapis/python-data-qna", "distribution_name": "google-cloud-data-qna", "api_id": "dataqna.googleapis.com" diff --git a/.trampolinerc b/.trampolinerc index 995ee29..383b6ec 100644 --- a/.trampolinerc +++ b/.trampolinerc @@ -24,6 +24,7 @@ required_envvars+=( pass_down_envvars+=( "STAGING_BUCKET" "V2_STAGING_BUCKET" + "NOX_SESSION" ) # Prevent unintentional override on the default image. diff --git a/CHANGELOG.md b/CHANGELOG.md index 0ad3855..ce4b280 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,20 @@ # Changelog +## [0.2.0](https://www.github.com/googleapis/python-data-qna/compare/v0.1.1...v0.2.0) (2021-05-18) + + +### Features + +* add from_service_account_info factory and fix sphinx identifiers ([#21](https://www.github.com/googleapis/python-data-qna/issues/21)) ([3fa53fb](https://www.github.com/googleapis/python-data-qna/commit/3fa53fbd42cc640d7f3442d77bd6357ce0e5e8d6)) +* support self-signed JWT flow for service accounts ([762c28c](https://www.github.com/googleapis/python-data-qna/commit/762c28cc095211301e922c9ceafef1d8cd36ad7d)) + + +### Bug Fixes + +* add async client to %name_%version/init.py ([762c28c](https://www.github.com/googleapis/python-data-qna/commit/762c28cc095211301e922c9ceafef1d8cd36ad7d)) +* **deps:** add packaging requirement ([#46](https://www.github.com/googleapis/python-data-qna/issues/46)) ([afb3009](https://www.github.com/googleapis/python-data-qna/commit/afb3009e762aff472c07f9884469d1cee78bc660)) +* remove gRPC send/recv limits ([#15](https://www.github.com/googleapis/python-data-qna/issues/15)) ([044d47a](https://www.github.com/googleapis/python-data-qna/commit/044d47ab0ab3c9ccb1b1f81fb974be6375a0cf52)) + ### [0.1.1](https://www.github.com/googleapis/python-data-qna/compare/v0.1.0...v0.1.1) (2020-12-04) diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 63dbb4d..8effc22 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -21,8 +21,8 @@ In order to add a feature: - The feature must be documented in both the API and narrative documentation. -- The feature must work fully on the following CPython versions: 2.7, - 3.5, 3.6, 3.7 and 3.8 on both UNIX and Windows. +- The feature must work fully on the following CPython versions: + 3.6, 3.7, 3.8 and 3.9 on both UNIX and Windows. - The feature must not add unnecessary dependencies (where "unnecessary" is of course subjective, but new dependencies should @@ -70,9 +70,14 @@ We use `nox `__ to instrument our tests. - To test your changes, run unit tests with ``nox``:: $ nox -s unit-2.7 - $ nox -s unit-3.7 + $ nox -s unit-3.8 $ ... +- Args to pytest can be passed through the nox command separated by a `--`. For + example, to run a single test:: + + $ nox -s unit-3.8 -- -k + .. note:: The unit tests and system tests are described in the @@ -93,8 +98,12 @@ On Debian/Ubuntu:: ************ Coding Style ************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken -- PEP8 compliance, with exceptions defined in the linter configuration. +- PEP8 compliance is required, with exceptions defined in the linter configuration. If you have ``nox`` installed, you can test that you have not introduced any non-compliant code via:: @@ -133,34 +142,25 @@ Running System Tests - To run system tests, you can execute:: - $ nox -s system-3.7 + # Run all system tests + $ nox -s system-3.8 $ nox -s system-2.7 + # Run a single system test + $ nox -s system-3.8 -- -k + + .. note:: System tests are only configured to run under Python 2.7 and - Python 3.7. For expediency, we do not run them in older versions + Python 3.8. For expediency, we do not run them in older versions of Python 3. This alone will not run the tests. You'll need to change some local auth settings and change some configuration in your project to run all the tests. -- System tests will be run against an actual project and - so you'll need to provide some environment variables to facilitate - authentication to your project: - - - ``GOOGLE_APPLICATION_CREDENTIALS``: The path to a JSON key file; - Such a file can be downloaded directly from the developer's console by clicking - "Generate new JSON key". See private key - `docs `__ - for more details. - -- Once you have downloaded your json keys, set the environment variable - ``GOOGLE_APPLICATION_CREDENTIALS`` to the absolute path of the json file:: - - $ export GOOGLE_APPLICATION_CREDENTIALS="/Users//path/to/app_credentials.json" - +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. ************* Test Coverage @@ -202,25 +202,24 @@ Supported Python Versions We support: -- `Python 3.5`_ - `Python 3.6`_ - `Python 3.7`_ - `Python 3.8`_ +- `Python 3.9`_ -.. _Python 3.5: https://docs.python.org/3.5/ .. _Python 3.6: https://docs.python.org/3.6/ .. _Python 3.7: https://docs.python.org/3.7/ .. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ Supported versions can be found in our ``noxfile.py`` `config`_. .. _config: https://github.com/googleapis/python-data-qna/blob/master/noxfile.py -Python 2.7 support is deprecated. All code changes should maintain Python 2.7 compatibility until January 1, 2020. We also explicitly decided to support Python 3 beginning with version -3.5. Reasons for this include: +3.6. Reasons for this include: - Encouraging use of newest versions of Python 3 - Taking the lead of `prominent`_ open-source `projects`_ diff --git a/LICENSE b/LICENSE index a8ee855..d645695 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,7 @@ - Apache License + + Apache License Version 2.0, January 2004 - https://www.apache.org/licenses/ + http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION @@ -192,7 +193,7 @@ you may not use this file except in compliance with the License. You may obtain a copy of the License at - https://www.apache.org/licenses/LICENSE-2.0 + http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, diff --git a/MANIFEST.in b/MANIFEST.in index e9e29d1..e783f4c 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -16,10 +16,10 @@ # Generated by synthtool. DO NOT EDIT! include README.rst LICENSE -recursive-include google *.json *.proto +recursive-include google *.json *.proto py.typed recursive-include tests * global-exclude *.py[co] global-exclude __pycache__ # Exclude scripts for samples readmegen -prune scripts/readme-gen \ No newline at end of file +prune scripts/readme-gen diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 0000000..8b58ae9 --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,7 @@ +# Security Policy + +To report a security issue, please use [g.co/vulnz](https://g.co/vulnz). + +The Google Security Team will respond within 5 working days of your report on g.co/vulnz. + +We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue. diff --git a/docs/_static/custom.css b/docs/_static/custom.css index 0abaf22..b0a2954 100644 --- a/docs/_static/custom.css +++ b/docs/_static/custom.css @@ -1,4 +1,20 @@ div#python2-eol { border-color: red; border-width: medium; -} \ No newline at end of file +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/docs/conf.py b/docs/conf.py index 169f9b2..5b1602d 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,4 +1,17 @@ # -*- coding: utf-8 -*- +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. # # google-cloud-data-qna documentation build configuration file # diff --git a/docs/dataqna_v1alpha/auto_suggestion_service.rst b/docs/dataqna_v1alpha/auto_suggestion_service.rst new file mode 100644 index 0000000..4a96dd5 --- /dev/null +++ b/docs/dataqna_v1alpha/auto_suggestion_service.rst @@ -0,0 +1,6 @@ +AutoSuggestionService +--------------------------------------- + +.. automodule:: google.cloud.dataqna_v1alpha.services.auto_suggestion_service + :members: + :inherited-members: diff --git a/docs/dataqna_v1alpha/question_service.rst b/docs/dataqna_v1alpha/question_service.rst new file mode 100644 index 0000000..0c7fe89 --- /dev/null +++ b/docs/dataqna_v1alpha/question_service.rst @@ -0,0 +1,6 @@ +QuestionService +--------------------------------- + +.. automodule:: google.cloud.dataqna_v1alpha.services.question_service + :members: + :inherited-members: diff --git a/docs/dataqna_v1alpha/services.rst b/docs/dataqna_v1alpha/services.rst index 8d53376..00a6eb5 100644 --- a/docs/dataqna_v1alpha/services.rst +++ b/docs/dataqna_v1alpha/services.rst @@ -1,9 +1,7 @@ Services for Google Cloud Dataqna v1alpha API ============================================= +.. toctree:: + :maxdepth: 2 -.. automodule:: google.cloud.dataqna_v1alpha.services.auto_suggestion_service - :members: - :inherited-members: -.. automodule:: google.cloud.dataqna_v1alpha.services.question_service - :members: - :inherited-members: + auto_suggestion_service + question_service diff --git a/docs/dataqna_v1alpha/types.rst b/docs/dataqna_v1alpha/types.rst index 3bf3d9b..66f9ab9 100644 --- a/docs/dataqna_v1alpha/types.rst +++ b/docs/dataqna_v1alpha/types.rst @@ -3,4 +3,5 @@ Types for Google Cloud Dataqna v1alpha API .. automodule:: google.cloud.dataqna_v1alpha.types :members: + :undoc-members: :show-inheritance: diff --git a/google/cloud/dataqna/__init__.py b/google/cloud/dataqna/__init__.py index 15c865b..c4b824e 100644 --- a/google/cloud/dataqna/__init__.py +++ b/google/cloud/dataqna/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -15,38 +14,39 @@ # limitations under the License. # -from google.cloud.dataqna_v1alpha.services.auto_suggestion_service.async_client import ( - AutoSuggestionServiceAsyncClient, -) from google.cloud.dataqna_v1alpha.services.auto_suggestion_service.client import ( AutoSuggestionServiceClient, ) -from google.cloud.dataqna_v1alpha.services.question_service.async_client import ( - QuestionServiceAsyncClient, +from google.cloud.dataqna_v1alpha.services.auto_suggestion_service.async_client import ( + AutoSuggestionServiceAsyncClient, ) from google.cloud.dataqna_v1alpha.services.question_service.client import ( QuestionServiceClient, ) +from google.cloud.dataqna_v1alpha.services.question_service.async_client import ( + QuestionServiceAsyncClient, +) + from google.cloud.dataqna_v1alpha.types.annotated_string import AnnotatedString +from google.cloud.dataqna_v1alpha.types.auto_suggestion_service import Suggestion +from google.cloud.dataqna_v1alpha.types.auto_suggestion_service import SuggestionInfo from google.cloud.dataqna_v1alpha.types.auto_suggestion_service import ( SuggestQueriesRequest, ) from google.cloud.dataqna_v1alpha.types.auto_suggestion_service import ( SuggestQueriesResponse, ) -from google.cloud.dataqna_v1alpha.types.auto_suggestion_service import Suggestion -from google.cloud.dataqna_v1alpha.types.auto_suggestion_service import SuggestionInfo from google.cloud.dataqna_v1alpha.types.auto_suggestion_service import SuggestionType from google.cloud.dataqna_v1alpha.types.question import BigQueryJob from google.cloud.dataqna_v1alpha.types.question import DataQuery from google.cloud.dataqna_v1alpha.types.question import DebugFlags from google.cloud.dataqna_v1alpha.types.question import ExecutionInfo from google.cloud.dataqna_v1alpha.types.question import HumanReadable -from google.cloud.dataqna_v1alpha.types.question import InterpretEntity -from google.cloud.dataqna_v1alpha.types.question import InterpretError from google.cloud.dataqna_v1alpha.types.question import Interpretation from google.cloud.dataqna_v1alpha.types.question import InterpretationStructure +from google.cloud.dataqna_v1alpha.types.question import InterpretError from google.cloud.dataqna_v1alpha.types.question import Question +from google.cloud.dataqna_v1alpha.types.question import InterpretEntity from google.cloud.dataqna_v1alpha.types.question_service import CreateQuestionRequest from google.cloud.dataqna_v1alpha.types.question_service import ExecuteQuestionRequest from google.cloud.dataqna_v1alpha.types.question_service import GetQuestionRequest @@ -57,30 +57,30 @@ from google.cloud.dataqna_v1alpha.types.user_feedback import UserFeedback __all__ = ( - "AnnotatedString", - "AutoSuggestionServiceAsyncClient", "AutoSuggestionServiceClient", + "AutoSuggestionServiceAsyncClient", + "QuestionServiceClient", + "QuestionServiceAsyncClient", + "AnnotatedString", + "Suggestion", + "SuggestionInfo", + "SuggestQueriesRequest", + "SuggestQueriesResponse", + "SuggestionType", "BigQueryJob", - "CreateQuestionRequest", "DataQuery", "DebugFlags", - "ExecuteQuestionRequest", "ExecutionInfo", - "GetQuestionRequest", - "GetUserFeedbackRequest", "HumanReadable", - "InterpretEntity", - "InterpretError", "Interpretation", "InterpretationStructure", + "InterpretError", "Question", - "QuestionServiceAsyncClient", - "QuestionServiceClient", - "SuggestQueriesRequest", - "SuggestQueriesResponse", - "Suggestion", - "SuggestionInfo", - "SuggestionType", + "InterpretEntity", + "CreateQuestionRequest", + "ExecuteQuestionRequest", + "GetQuestionRequest", + "GetUserFeedbackRequest", "UpdateUserFeedbackRequest", "UserFeedback", ) diff --git a/google/cloud/dataqna_v1alpha/__init__.py b/google/cloud/dataqna_v1alpha/__init__.py index 37d7ed3..46b3227 100644 --- a/google/cloud/dataqna_v1alpha/__init__.py +++ b/google/cloud/dataqna_v1alpha/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -16,23 +15,26 @@ # from .services.auto_suggestion_service import AutoSuggestionServiceClient +from .services.auto_suggestion_service import AutoSuggestionServiceAsyncClient from .services.question_service import QuestionServiceClient +from .services.question_service import QuestionServiceAsyncClient + from .types.annotated_string import AnnotatedString -from .types.auto_suggestion_service import SuggestQueriesRequest -from .types.auto_suggestion_service import SuggestQueriesResponse from .types.auto_suggestion_service import Suggestion from .types.auto_suggestion_service import SuggestionInfo +from .types.auto_suggestion_service import SuggestQueriesRequest +from .types.auto_suggestion_service import SuggestQueriesResponse from .types.auto_suggestion_service import SuggestionType from .types.question import BigQueryJob from .types.question import DataQuery from .types.question import DebugFlags from .types.question import ExecutionInfo from .types.question import HumanReadable -from .types.question import InterpretEntity -from .types.question import InterpretError from .types.question import Interpretation from .types.question import InterpretationStructure +from .types.question import InterpretError from .types.question import Question +from .types.question import InterpretEntity from .types.question_service import CreateQuestionRequest from .types.question_service import ExecuteQuestionRequest from .types.question_service import GetQuestionRequest @@ -40,8 +42,9 @@ from .types.question_service import UpdateUserFeedbackRequest from .types.user_feedback import UserFeedback - __all__ = ( + "AutoSuggestionServiceAsyncClient", + "QuestionServiceAsyncClient", "AnnotatedString", "AutoSuggestionServiceClient", "BigQueryJob", @@ -58,6 +61,7 @@ "Interpretation", "InterpretationStructure", "Question", + "QuestionServiceClient", "SuggestQueriesRequest", "SuggestQueriesResponse", "Suggestion", @@ -65,5 +69,4 @@ "SuggestionType", "UpdateUserFeedbackRequest", "UserFeedback", - "QuestionServiceClient", ) diff --git a/google/cloud/dataqna_v1alpha/gapic_metadata.json b/google/cloud/dataqna_v1alpha/gapic_metadata.json new file mode 100644 index 0000000..7ba1907 --- /dev/null +++ b/google/cloud/dataqna_v1alpha/gapic_metadata.json @@ -0,0 +1,97 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.dataqna_v1alpha", + "protoPackage": "google.cloud.dataqna.v1alpha", + "schema": "1.0", + "services": { + "AutoSuggestionService": { + "clients": { + "grpc": { + "libraryClient": "AutoSuggestionServiceClient", + "rpcs": { + "SuggestQueries": { + "methods": [ + "suggest_queries" + ] + } + } + }, + "grpc-async": { + "libraryClient": "AutoSuggestionServiceAsyncClient", + "rpcs": { + "SuggestQueries": { + "methods": [ + "suggest_queries" + ] + } + } + } + } + }, + "QuestionService": { + "clients": { + "grpc": { + "libraryClient": "QuestionServiceClient", + "rpcs": { + "CreateQuestion": { + "methods": [ + "create_question" + ] + }, + "ExecuteQuestion": { + "methods": [ + "execute_question" + ] + }, + "GetQuestion": { + "methods": [ + "get_question" + ] + }, + "GetUserFeedback": { + "methods": [ + "get_user_feedback" + ] + }, + "UpdateUserFeedback": { + "methods": [ + "update_user_feedback" + ] + } + } + }, + "grpc-async": { + "libraryClient": "QuestionServiceAsyncClient", + "rpcs": { + "CreateQuestion": { + "methods": [ + "create_question" + ] + }, + "ExecuteQuestion": { + "methods": [ + "execute_question" + ] + }, + "GetQuestion": { + "methods": [ + "get_question" + ] + }, + "GetUserFeedback": { + "methods": [ + "get_user_feedback" + ] + }, + "UpdateUserFeedback": { + "methods": [ + "update_user_feedback" + ] + } + } + } + } + } + } +} diff --git a/google/cloud/dataqna_v1alpha/services/__init__.py b/google/cloud/dataqna_v1alpha/services/__init__.py index 42ffdf2..4de6597 100644 --- a/google/cloud/dataqna_v1alpha/services/__init__.py +++ b/google/cloud/dataqna_v1alpha/services/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/google/cloud/dataqna_v1alpha/services/auto_suggestion_service/__init__.py b/google/cloud/dataqna_v1alpha/services/auto_suggestion_service/__init__.py index 86c7bd3..36d9bc8 100644 --- a/google/cloud/dataqna_v1alpha/services/auto_suggestion_service/__init__.py +++ b/google/cloud/dataqna_v1alpha/services/auto_suggestion_service/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .client import AutoSuggestionServiceClient from .async_client import AutoSuggestionServiceAsyncClient diff --git a/google/cloud/dataqna_v1alpha/services/auto_suggestion_service/async_client.py b/google/cloud/dataqna_v1alpha/services/auto_suggestion_service/async_client.py index 41fe4fa..9b631ff 100644 --- a/google/cloud/dataqna_v1alpha/services/auto_suggestion_service/async_client.py +++ b/google/cloud/dataqna_v1alpha/services/auto_suggestion_service/async_client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict import functools import re @@ -22,14 +20,13 @@ import pkg_resources import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore from google.cloud.dataqna_v1alpha.types import auto_suggestion_service - from .transports.base import AutoSuggestionServiceTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import AutoSuggestionServiceGrpcAsyncIOTransport from .client import AutoSuggestionServiceClient @@ -120,24 +117,20 @@ class AutoSuggestionServiceAsyncClient: parse_common_billing_account_path = staticmethod( AutoSuggestionServiceClient.parse_common_billing_account_path ) - common_folder_path = staticmethod(AutoSuggestionServiceClient.common_folder_path) parse_common_folder_path = staticmethod( AutoSuggestionServiceClient.parse_common_folder_path ) - common_organization_path = staticmethod( AutoSuggestionServiceClient.common_organization_path ) parse_common_organization_path = staticmethod( AutoSuggestionServiceClient.parse_common_organization_path ) - common_project_path = staticmethod(AutoSuggestionServiceClient.common_project_path) parse_common_project_path = staticmethod( AutoSuggestionServiceClient.parse_common_project_path ) - common_location_path = staticmethod( AutoSuggestionServiceClient.common_location_path ) @@ -145,12 +138,42 @@ class AutoSuggestionServiceAsyncClient: AutoSuggestionServiceClient.parse_common_location_path ) - from_service_account_file = AutoSuggestionServiceClient.from_service_account_file + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AutoSuggestionServiceAsyncClient: The constructed client. + """ + return AutoSuggestionServiceClient.from_service_account_info.__func__(AutoSuggestionServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AutoSuggestionServiceAsyncClient: The constructed client. + """ + return AutoSuggestionServiceClient.from_service_account_file.__func__(AutoSuggestionServiceAsyncClient, filename, *args, **kwargs) # type: ignore + from_service_account_json = from_service_account_file @property def transport(self) -> AutoSuggestionServiceTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: AutoSuggestionServiceTransport: The transport used by the client instance. @@ -165,12 +188,12 @@ def transport(self) -> AutoSuggestionServiceTransport: def __init__( self, *, - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, transport: Union[str, AutoSuggestionServiceTransport] = "grpc_asyncio", client_options: ClientOptions = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the auto suggestion service client. + """Instantiates the auto suggestion service client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -202,7 +225,6 @@ def __init__( google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. """ - self._client = AutoSuggestionServiceClient( credentials=credentials, transport=transport, @@ -222,9 +244,8 @@ async def suggest_queries( AutoSuggestion tolerance should be less than 1 second. Args: - request (:class:`~.auto_suggestion_service.SuggestQueriesRequest`): + request (:class:`google.cloud.dataqna_v1alpha.types.SuggestQueriesRequest`): The request object. Request for query suggestions. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -232,11 +253,10 @@ async def suggest_queries( sent along with the request as metadata. Returns: - ~.auto_suggestion_service.SuggestQueriesResponse: + google.cloud.dataqna_v1alpha.types.SuggestQueriesResponse: Response to SuggestQueries. """ # Create or coerce a protobuf request object. - request = auto_suggestion_service.SuggestQueriesRequest(request) # Wrap the RPC method; this adds retry and timeout information, diff --git a/google/cloud/dataqna_v1alpha/services/auto_suggestion_service/client.py b/google/cloud/dataqna_v1alpha/services/auto_suggestion_service/client.py index 5ec549f..b2105ef 100644 --- a/google/cloud/dataqna_v1alpha/services/auto_suggestion_service/client.py +++ b/google/cloud/dataqna_v1alpha/services/auto_suggestion_service/client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from distutils import util import os @@ -23,17 +21,16 @@ import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore from google.cloud.dataqna_v1alpha.types import auto_suggestion_service - from .transports.base import AutoSuggestionServiceTransport, DEFAULT_CLIENT_INFO from .transports.grpc import AutoSuggestionServiceGrpcTransport from .transports.grpc_asyncio import AutoSuggestionServiceGrpcAsyncIOTransport @@ -56,7 +53,7 @@ class AutoSuggestionServiceClientMeta(type): def get_transport_class( cls, label: str = None, ) -> Type[AutoSuggestionServiceTransport]: - """Return an appropriate transport class. + """Returns an appropriate transport class. Args: label: The name of the desired transport. If none is @@ -150,7 +147,8 @@ class AutoSuggestionServiceClient(metaclass=AutoSuggestionServiceClientMeta): @staticmethod def _get_default_mtls_endpoint(api_endpoint): - """Convert api endpoint to mTLS endpoint. + """Converts api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. Args: @@ -182,10 +180,27 @@ def _get_default_mtls_endpoint(api_endpoint): DEFAULT_ENDPOINT ) + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AutoSuggestionServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -194,7 +209,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): kwargs: Additional arguments to pass to the constructor. Returns: - {@api.name}: The constructed client. + AutoSuggestionServiceClient: The constructed client. """ credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials @@ -204,16 +219,17 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> AutoSuggestionServiceTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: - AutoSuggestionServiceTransport: The transport used by the client instance. + AutoSuggestionServiceTransport: The transport used by the client + instance. """ return self._transport @staticmethod def common_billing_account_path(billing_account: str,) -> str: - """Return a fully-qualified billing_account string.""" + """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -226,7 +242,7 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: @staticmethod def common_folder_path(folder: str,) -> str: - """Return a fully-qualified folder string.""" + """Returns a fully-qualified folder string.""" return "folders/{folder}".format(folder=folder,) @staticmethod @@ -237,7 +253,7 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: @staticmethod def common_organization_path(organization: str,) -> str: - """Return a fully-qualified organization string.""" + """Returns a fully-qualified organization string.""" return "organizations/{organization}".format(organization=organization,) @staticmethod @@ -248,7 +264,7 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: @staticmethod def common_project_path(project: str,) -> str: - """Return a fully-qualified project string.""" + """Returns a fully-qualified project string.""" return "projects/{project}".format(project=project,) @staticmethod @@ -259,7 +275,7 @@ def parse_common_project_path(path: str) -> Dict[str, str]: @staticmethod def common_location_path(project: str, location: str,) -> str: - """Return a fully-qualified location string.""" + """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -273,12 +289,12 @@ def parse_common_location_path(path: str) -> Dict[str, str]: def __init__( self, *, - credentials: Optional[credentials.Credentials] = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, AutoSuggestionServiceTransport, None] = None, client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the auto suggestion service client. + """Instantiates the auto suggestion service client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -286,10 +302,10 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.AutoSuggestionServiceTransport]): The + transport (Union[str, AutoSuggestionServiceTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (client_options_lib.ClientOptions): Custom options for the + client_options (google.api_core.client_options.ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -325,21 +341,18 @@ def __init__( util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) ) - ssl_credentials = None + client_cert_source_func = None is_mtls = False if use_client_cert: if client_options.client_cert_source: - import grpc # type: ignore - - cert, key = client_options.client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) is_mtls = True + client_cert_source_func = client_options.client_cert_source else: - creds = SslCredentials() - is_mtls = creds.is_mtls - ssl_credentials = creds.ssl_credentials if is_mtls else None + is_mtls = mtls.has_default_client_cert_source() + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -351,12 +364,14 @@ def __init__( elif use_mtls_env == "always": api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": - api_endpoint = ( - self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT - ) + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT else: raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" ) # Save or instantiate the transport. @@ -371,8 +386,8 @@ def __init__( ) if client_options.scopes: raise ValueError( - "When providing a transport instance, " - "provide its scopes directly." + "When providing a transport instance, provide its scopes " + "directly." ) self._transport = transport else: @@ -382,7 +397,7 @@ def __init__( credentials_file=client_options.credentials_file, host=api_endpoint, scopes=client_options.scopes, - ssl_channel_credentials=ssl_credentials, + client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, ) @@ -399,9 +414,8 @@ def suggest_queries( AutoSuggestion tolerance should be less than 1 second. Args: - request (:class:`~.auto_suggestion_service.SuggestQueriesRequest`): + request (google.cloud.dataqna_v1alpha.types.SuggestQueriesRequest): The request object. Request for query suggestions. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -409,11 +423,10 @@ def suggest_queries( sent along with the request as metadata. Returns: - ~.auto_suggestion_service.SuggestQueriesResponse: + google.cloud.dataqna_v1alpha.types.SuggestQueriesResponse: Response to SuggestQueries. """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a auto_suggestion_service.SuggestQueriesRequest. # There's no risk of modifying the input as we've already verified diff --git a/google/cloud/dataqna_v1alpha/services/auto_suggestion_service/transports/__init__.py b/google/cloud/dataqna_v1alpha/services/auto_suggestion_service/transports/__init__.py index bb6960e..ef507ef 100644 --- a/google/cloud/dataqna_v1alpha/services/auto_suggestion_service/transports/__init__.py +++ b/google/cloud/dataqna_v1alpha/services/auto_suggestion_service/transports/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from typing import Dict, Type @@ -30,7 +28,6 @@ _transport_registry["grpc"] = AutoSuggestionServiceGrpcTransport _transport_registry["grpc_asyncio"] = AutoSuggestionServiceGrpcAsyncIOTransport - __all__ = ( "AutoSuggestionServiceTransport", "AutoSuggestionServiceGrpcTransport", diff --git a/google/cloud/dataqna_v1alpha/services/auto_suggestion_service/transports/base.py b/google/cloud/dataqna_v1alpha/services/auto_suggestion_service/transports/base.py index bd76be9..3cd57ca 100644 --- a/google/cloud/dataqna_v1alpha/services/auto_suggestion_service/transports/base.py +++ b/google/cloud/dataqna_v1alpha/services/auto_suggestion_service/transports/base.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,20 +13,20 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import abc -import typing +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version import pkg_resources -from google import auth # type: ignore -from google.api_core import exceptions # type: ignore +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.cloud.dataqna_v1alpha.types import auto_suggestion_service - try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-dataqna",).version, @@ -35,27 +34,41 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + +_API_CORE_VERSION = google.api_core.__version__ + class AutoSuggestionServiceTransport(abc.ABC): """Abstract transport class for AutoSuggestionService.""" AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + DEFAULT_HOST: str = "dataqna.googleapis.com" + def __init__( self, *, - host: str = "dataqna.googleapis.com", - credentials: credentials.Credentials = None, - credentials_file: typing.Optional[str] = None, - scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, - quota_project_id: typing.Optional[str] = None, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, **kwargs, ) -> None: """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -64,13 +77,13 @@ def __init__( credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is mutually exclusive with credentials. - scope (Optional[Sequence[str]]): A list of scopes. + scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. @@ -78,28 +91,75 @@ def __init__( host += ":443" self._host = host + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + + # Save the scopes. + self._scopes = scopes or self.AUTH_SCOPES + # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise exceptions.DuplicateCredentialArgs( + raise core_exceptions.DuplicateCredentialArgs( "'credentials_file' and 'credentials' are mutually exclusive" ) if credentials_file is not None: - credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) elif credentials is None: - credentials, _ = auth.default( - scopes=scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials - # Lifted into its own function so it can be stubbed out during tests. - self._prep_wrapped_messages(client_info) + # TODO(busunkim): These two class methods are in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-api-core + # and google-auth are increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + # TODO: Remove this function once google-api-core >= 1.26.0 is required + @classmethod + def _get_self_signed_jwt_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Union[Optional[Sequence[str]], str]]: + """Returns kwargs to pass to grpc_helpers.create_channel depending on the google-api-core version""" + + self_signed_jwt_kwargs: Dict[str, Union[Optional[Sequence[str]], str]] = {} + + if _API_CORE_VERSION and ( + packaging.version.parse(_API_CORE_VERSION) + >= packaging.version.parse("1.26.0") + ): + self_signed_jwt_kwargs["default_scopes"] = cls.AUTH_SCOPES + self_signed_jwt_kwargs["scopes"] = scopes + self_signed_jwt_kwargs["default_host"] = cls.DEFAULT_HOST + else: + self_signed_jwt_kwargs["scopes"] = scopes or cls.AUTH_SCOPES + + return self_signed_jwt_kwargs def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. @@ -112,11 +172,11 @@ def _prep_wrapped_messages(self, client_info): @property def suggest_queries( self, - ) -> typing.Callable[ + ) -> Callable[ [auto_suggestion_service.SuggestQueriesRequest], - typing.Union[ + Union[ auto_suggestion_service.SuggestQueriesResponse, - typing.Awaitable[auto_suggestion_service.SuggestQueriesResponse], + Awaitable[auto_suggestion_service.SuggestQueriesResponse], ], ]: raise NotImplementedError() diff --git a/google/cloud/dataqna_v1alpha/services/auto_suggestion_service/transports/grpc.py b/google/cloud/dataqna_v1alpha/services/auto_suggestion_service/transports/grpc.py index b321359..2a364c3 100644 --- a/google/cloud/dataqna_v1alpha/services/auto_suggestion_service/transports/grpc.py +++ b/google/cloud/dataqna_v1alpha/services/auto_suggestion_service/transports/grpc.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,20 +13,18 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple +from typing import Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import grpc_helpers # type: ignore from google.api_core import gapic_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore from google.cloud.dataqna_v1alpha.types import auto_suggestion_service - from .base import AutoSuggestionServiceTransport, DEFAULT_CLIENT_INFO @@ -120,20 +117,22 @@ def __init__( self, *, host: str = "dataqna.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Sequence[str] = None, channel: grpc.Channel = None, api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -157,6 +156,10 @@ def __init__( ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -171,88 +174,75 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - warnings.warn( - "api_mtls_endpoint and client_cert_source are deprecated", - DeprecationWarning, - ) - - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - ssl_credentials = SslCredentials().ssl_credentials - - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - ) - self._ssl_channel_credentials = ssl_credentials else: - host = host if ":" in host else host + ":443" + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - ) - - self._stubs = {} # type: Dict[str, Callable] + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - # Run the base constructor. + # The base transport sets the host, credentials and scopes super().__init__( host=host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, + scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, ) + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + @classmethod def create_channel( cls, host: str = "dataqna.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -260,7 +250,7 @@ def create_channel( ) -> grpc.Channel: """Create and return a gRPC channel object. Args: - address (Optionsl[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If @@ -283,13 +273,15 @@ def create_channel( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) diff --git a/google/cloud/dataqna_v1alpha/services/auto_suggestion_service/transports/grpc_asyncio.py b/google/cloud/dataqna_v1alpha/services/auto_suggestion_service/transports/grpc_asyncio.py index 358aac6..f10da78 100644 --- a/google/cloud/dataqna_v1alpha/services/auto_suggestion_service/transports/grpc_asyncio.py +++ b/google/cloud/dataqna_v1alpha/services/auto_suggestion_service/transports/grpc_asyncio.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,21 +13,19 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore from google.cloud.dataqna_v1alpha.types import auto_suggestion_service - from .base import AutoSuggestionServiceTransport, DEFAULT_CLIENT_INFO from .grpc import AutoSuggestionServiceGrpcTransport @@ -123,7 +120,7 @@ class AutoSuggestionServiceGrpcAsyncIOTransport(AutoSuggestionServiceTransport): def create_channel( cls, host: str = "dataqna.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -131,7 +128,7 @@ def create_channel( ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If @@ -150,13 +147,15 @@ def create_channel( Returns: aio.Channel: A gRPC AsyncIO channel object. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers_async.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -164,20 +163,22 @@ def __init__( self, *, host: str = "dataqna.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, channel: aio.Channel = None, api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -202,12 +203,16 @@ def __init__( ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: @@ -216,82 +221,68 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - warnings.warn( - "api_mtls_endpoint and client_cert_source are deprecated", - DeprecationWarning, - ) - - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) - - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - ssl_credentials = SslCredentials().ssl_credentials - - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - ) - self._ssl_channel_credentials = ssl_credentials else: - host = host if ":" in host else host + ":443" - - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - ) + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - # Run the base constructor. + # The base transport sets the host, credentials and scopes super().__init__( host=host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, + scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, ) - self._stubs = {} + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @property def grpc_channel(self) -> aio.Channel: diff --git a/google/cloud/dataqna_v1alpha/services/question_service/__init__.py b/google/cloud/dataqna_v1alpha/services/question_service/__init__.py index 9626dd0..8de2e92 100644 --- a/google/cloud/dataqna_v1alpha/services/question_service/__init__.py +++ b/google/cloud/dataqna_v1alpha/services/question_service/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .client import QuestionServiceClient from .async_client import QuestionServiceAsyncClient diff --git a/google/cloud/dataqna_v1alpha/services/question_service/async_client.py b/google/cloud/dataqna_v1alpha/services/question_service/async_client.py index c766ae9..12bbfc3 100644 --- a/google/cloud/dataqna_v1alpha/services/question_service/async_client.py +++ b/google/cloud/dataqna_v1alpha/services/question_service/async_client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict import functools import re @@ -22,10 +20,10 @@ import pkg_resources import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore from google.cloud.dataqna_v1alpha.types import question @@ -33,10 +31,9 @@ from google.cloud.dataqna_v1alpha.types import question_service from google.cloud.dataqna_v1alpha.types import user_feedback from google.cloud.dataqna_v1alpha.types import user_feedback as gcd_user_feedback -from google.protobuf import any_pb2 as gp_any # type: ignore -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore - +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import QuestionServiceTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import QuestionServiceGrpcAsyncIOTransport from .client import QuestionServiceClient @@ -73,42 +70,67 @@ class QuestionServiceAsyncClient: parse_user_feedback_path = staticmethod( QuestionServiceClient.parse_user_feedback_path ) - common_billing_account_path = staticmethod( QuestionServiceClient.common_billing_account_path ) parse_common_billing_account_path = staticmethod( QuestionServiceClient.parse_common_billing_account_path ) - common_folder_path = staticmethod(QuestionServiceClient.common_folder_path) parse_common_folder_path = staticmethod( QuestionServiceClient.parse_common_folder_path ) - common_organization_path = staticmethod( QuestionServiceClient.common_organization_path ) parse_common_organization_path = staticmethod( QuestionServiceClient.parse_common_organization_path ) - common_project_path = staticmethod(QuestionServiceClient.common_project_path) parse_common_project_path = staticmethod( QuestionServiceClient.parse_common_project_path ) - common_location_path = staticmethod(QuestionServiceClient.common_location_path) parse_common_location_path = staticmethod( QuestionServiceClient.parse_common_location_path ) - from_service_account_file = QuestionServiceClient.from_service_account_file + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + QuestionServiceAsyncClient: The constructed client. + """ + return QuestionServiceClient.from_service_account_info.__func__(QuestionServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + QuestionServiceAsyncClient: The constructed client. + """ + return QuestionServiceClient.from_service_account_file.__func__(QuestionServiceAsyncClient, filename, *args, **kwargs) # type: ignore + from_service_account_json = from_service_account_file @property def transport(self) -> QuestionServiceTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: QuestionServiceTransport: The transport used by the client instance. @@ -122,12 +144,12 @@ def transport(self) -> QuestionServiceTransport: def __init__( self, *, - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, transport: Union[str, QuestionServiceTransport] = "grpc_asyncio", client_options: ClientOptions = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the question service client. + """Instantiates the question service client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -159,7 +181,6 @@ def __init__( google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. """ - self._client = QuestionServiceClient( credentials=credentials, transport=transport, @@ -179,16 +200,16 @@ async def get_question( r"""Gets a previously created question. Args: - request (:class:`~.question_service.GetQuestionRequest`): + request (:class:`google.cloud.dataqna_v1alpha.types.GetQuestionRequest`): The request object. A request to get a previously created question. name (:class:`str`): Required. The unique identifier for the question. Example: ``projects/foo/locations/bar/questions/1234`` + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -196,7 +217,7 @@ async def get_question( sent along with the request as metadata. Returns: - ~.question.Question: + google.cloud.dataqna_v1alpha.types.Question: The question resource represents a natural language query, its settings, understanding generated by the system, @@ -218,7 +239,6 @@ async def get_question( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -255,22 +275,22 @@ async def create_question( r"""Creates a question. Args: - request (:class:`~.question_service.CreateQuestionRequest`): + request (:class:`google.cloud.dataqna_v1alpha.types.CreateQuestionRequest`): The request object. Request to create a question resource. parent (:class:`str`): Required. The name of the project this data source reference belongs to. Example: ``projects/foo/locations/bar`` + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - question (:class:`~.gcd_question.Question`): + question (:class:`google.cloud.dataqna_v1alpha.types.Question`): Required. The question to create. This corresponds to the ``question`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -278,7 +298,7 @@ async def create_question( sent along with the request as metadata. Returns: - ~.gcd_question.Question: + google.cloud.dataqna_v1alpha.types.Question: The question resource represents a natural language query, its settings, understanding generated by the system, @@ -300,7 +320,6 @@ async def create_question( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if question is not None: @@ -339,22 +358,23 @@ async def execute_question( r"""Executes an interpretation. Args: - request (:class:`~.question_service.ExecuteQuestionRequest`): + request (:class:`google.cloud.dataqna_v1alpha.types.ExecuteQuestionRequest`): The request object. Request to execute an interpretation. name (:class:`str`): Required. The unique identifier for the question. Example: ``projects/foo/locations/bar/questions/1234`` + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. interpretation_index (:class:`int`): Required. Index of the interpretation to execute. + This corresponds to the ``interpretation_index`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -362,7 +382,7 @@ async def execute_question( sent along with the request as metadata. Returns: - ~.question.Question: + google.cloud.dataqna_v1alpha.types.Question: The question resource represents a natural language query, its settings, understanding generated by the system, @@ -384,7 +404,6 @@ async def execute_question( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if interpretation_index is not None: @@ -422,17 +441,17 @@ async def get_user_feedback( r"""Gets previously created user feedback. Args: - request (:class:`~.question_service.GetUserFeedbackRequest`): + request (:class:`google.cloud.dataqna_v1alpha.types.GetUserFeedbackRequest`): The request object. Request to get user feedback. name (:class:`str`): Required. The unique identifier for the user feedback. User feedback is a singleton resource on a Question. Example: ``projects/foo/locations/bar/questions/1234/userFeedback`` + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -440,7 +459,7 @@ async def get_user_feedback( sent along with the request as metadata. Returns: - ~.user_feedback.UserFeedback: + google.cloud.dataqna_v1alpha.types.UserFeedback: Feedback provided by a user. """ # Create or coerce a protobuf request object. @@ -457,7 +476,6 @@ async def get_user_feedback( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -486,7 +504,7 @@ async def update_user_feedback( request: question_service.UpdateUserFeedbackRequest = None, *, user_feedback: gcd_user_feedback.UserFeedback = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -495,24 +513,24 @@ async def update_user_feedback( there was none before (upsert). Args: - request (:class:`~.question_service.UpdateUserFeedbackRequest`): + request (:class:`google.cloud.dataqna_v1alpha.types.UpdateUserFeedbackRequest`): The request object. Request to updates user feedback. - user_feedback (:class:`~.gcd_user_feedback.UserFeedback`): + user_feedback (:class:`google.cloud.dataqna_v1alpha.types.UserFeedback`): Required. The user feedback to update. This can be called even if there is no user feedback so far. The feedback's name field is used to identify the user feedback (and the corresponding question) to update. + This corresponds to the ``user_feedback`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - update_mask (:class:`~.field_mask.FieldMask`): + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): The list of fields to be updated. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -520,7 +538,7 @@ async def update_user_feedback( sent along with the request as metadata. Returns: - ~.gcd_user_feedback.UserFeedback: + google.cloud.dataqna_v1alpha.types.UserFeedback: Feedback provided by a user. """ # Create or coerce a protobuf request object. @@ -537,7 +555,6 @@ async def update_user_feedback( # If we have keyword arguments corresponding to fields on the # request, apply these. - if user_feedback is not None: request.user_feedback = user_feedback if update_mask is not None: diff --git a/google/cloud/dataqna_v1alpha/services/question_service/client.py b/google/cloud/dataqna_v1alpha/services/question_service/client.py index e33011a..1d77cec 100644 --- a/google/cloud/dataqna_v1alpha/services/question_service/client.py +++ b/google/cloud/dataqna_v1alpha/services/question_service/client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from distutils import util import os @@ -23,10 +21,10 @@ import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore @@ -37,10 +35,9 @@ from google.cloud.dataqna_v1alpha.types import question_service from google.cloud.dataqna_v1alpha.types import user_feedback from google.cloud.dataqna_v1alpha.types import user_feedback as gcd_user_feedback -from google.protobuf import any_pb2 as gp_any # type: ignore -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore - +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore from .transports.base import QuestionServiceTransport, DEFAULT_CLIENT_INFO from .transports.grpc import QuestionServiceGrpcTransport from .transports.grpc_asyncio import QuestionServiceGrpcAsyncIOTransport @@ -61,7 +58,7 @@ class QuestionServiceClientMeta(type): _transport_registry["grpc_asyncio"] = QuestionServiceGrpcAsyncIOTransport def get_transport_class(cls, label: str = None,) -> Type[QuestionServiceTransport]: - """Return an appropriate transport class. + """Returns an appropriate transport class. Args: label: The name of the desired transport. If none is @@ -101,7 +98,8 @@ class QuestionServiceClient(metaclass=QuestionServiceClientMeta): @staticmethod def _get_default_mtls_endpoint(api_endpoint): - """Convert api endpoint to mTLS endpoint. + """Converts api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. Args: @@ -133,10 +131,27 @@ def _get_default_mtls_endpoint(api_endpoint): DEFAULT_ENDPOINT ) + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + QuestionServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -145,7 +160,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): kwargs: Additional arguments to pass to the constructor. Returns: - {@api.name}: The constructed client. + QuestionServiceClient: The constructed client. """ credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials @@ -155,23 +170,24 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> QuestionServiceTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: - QuestionServiceTransport: The transport used by the client instance. + QuestionServiceTransport: The transport used by the client + instance. """ return self._transport @staticmethod def question_path(project: str, location: str, question: str,) -> str: - """Return a fully-qualified question string.""" + """Returns a fully-qualified question string.""" return "projects/{project}/locations/{location}/questions/{question}".format( project=project, location=location, question=question, ) @staticmethod def parse_question_path(path: str) -> Dict[str, str]: - """Parse a question path into its component segments.""" + """Parses a question path into its component segments.""" m = re.match( r"^projects/(?P.+?)/locations/(?P.+?)/questions/(?P.+?)$", path, @@ -180,14 +196,14 @@ def parse_question_path(path: str) -> Dict[str, str]: @staticmethod def user_feedback_path(project: str, location: str, question: str,) -> str: - """Return a fully-qualified user_feedback string.""" + """Returns a fully-qualified user_feedback string.""" return "projects/{project}/locations/{location}/questions/{question}/userFeedback".format( project=project, location=location, question=question, ) @staticmethod def parse_user_feedback_path(path: str) -> Dict[str, str]: - """Parse a user_feedback path into its component segments.""" + """Parses a user_feedback path into its component segments.""" m = re.match( r"^projects/(?P.+?)/locations/(?P.+?)/questions/(?P.+?)/userFeedback$", path, @@ -196,7 +212,7 @@ def parse_user_feedback_path(path: str) -> Dict[str, str]: @staticmethod def common_billing_account_path(billing_account: str,) -> str: - """Return a fully-qualified billing_account string.""" + """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -209,7 +225,7 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: @staticmethod def common_folder_path(folder: str,) -> str: - """Return a fully-qualified folder string.""" + """Returns a fully-qualified folder string.""" return "folders/{folder}".format(folder=folder,) @staticmethod @@ -220,7 +236,7 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: @staticmethod def common_organization_path(organization: str,) -> str: - """Return a fully-qualified organization string.""" + """Returns a fully-qualified organization string.""" return "organizations/{organization}".format(organization=organization,) @staticmethod @@ -231,7 +247,7 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: @staticmethod def common_project_path(project: str,) -> str: - """Return a fully-qualified project string.""" + """Returns a fully-qualified project string.""" return "projects/{project}".format(project=project,) @staticmethod @@ -242,7 +258,7 @@ def parse_common_project_path(path: str) -> Dict[str, str]: @staticmethod def common_location_path(project: str, location: str,) -> str: - """Return a fully-qualified location string.""" + """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -256,12 +272,12 @@ def parse_common_location_path(path: str) -> Dict[str, str]: def __init__( self, *, - credentials: Optional[credentials.Credentials] = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, QuestionServiceTransport, None] = None, client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the question service client. + """Instantiates the question service client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -269,10 +285,10 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.QuestionServiceTransport]): The + transport (Union[str, QuestionServiceTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (client_options_lib.ClientOptions): Custom options for the + client_options (google.api_core.client_options.ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -308,21 +324,18 @@ def __init__( util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) ) - ssl_credentials = None + client_cert_source_func = None is_mtls = False if use_client_cert: if client_options.client_cert_source: - import grpc # type: ignore - - cert, key = client_options.client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) is_mtls = True + client_cert_source_func = client_options.client_cert_source else: - creds = SslCredentials() - is_mtls = creds.is_mtls - ssl_credentials = creds.ssl_credentials if is_mtls else None + is_mtls = mtls.has_default_client_cert_source() + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -334,12 +347,14 @@ def __init__( elif use_mtls_env == "always": api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": - api_endpoint = ( - self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT - ) + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT else: raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" ) # Save or instantiate the transport. @@ -354,8 +369,8 @@ def __init__( ) if client_options.scopes: raise ValueError( - "When providing a transport instance, " - "provide its scopes directly." + "When providing a transport instance, provide its scopes " + "directly." ) self._transport = transport else: @@ -365,7 +380,7 @@ def __init__( credentials_file=client_options.credentials_file, host=api_endpoint, scopes=client_options.scopes, - ssl_channel_credentials=ssl_credentials, + client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, ) @@ -382,16 +397,16 @@ def get_question( r"""Gets a previously created question. Args: - request (:class:`~.question_service.GetQuestionRequest`): + request (google.cloud.dataqna_v1alpha.types.GetQuestionRequest): The request object. A request to get a previously created question. - name (:class:`str`): + name (str): Required. The unique identifier for the question. Example: ``projects/foo/locations/bar/questions/1234`` + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -399,7 +414,7 @@ def get_question( sent along with the request as metadata. Returns: - ~.question.Question: + google.cloud.dataqna_v1alpha.types.Question: The question resource represents a natural language query, its settings, understanding generated by the system, @@ -423,10 +438,8 @@ def get_question( # there are no flattened fields. if not isinstance(request, question_service.GetQuestionRequest): request = question_service.GetQuestionRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -459,22 +472,22 @@ def create_question( r"""Creates a question. Args: - request (:class:`~.question_service.CreateQuestionRequest`): + request (google.cloud.dataqna_v1alpha.types.CreateQuestionRequest): The request object. Request to create a question resource. - parent (:class:`str`): + parent (str): Required. The name of the project this data source reference belongs to. Example: ``projects/foo/locations/bar`` + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - question (:class:`~.gcd_question.Question`): + question (google.cloud.dataqna_v1alpha.types.Question): Required. The question to create. This corresponds to the ``question`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -482,7 +495,7 @@ def create_question( sent along with the request as metadata. Returns: - ~.gcd_question.Question: + google.cloud.dataqna_v1alpha.types.Question: The question resource represents a natural language query, its settings, understanding generated by the system, @@ -506,10 +519,8 @@ def create_question( # there are no flattened fields. if not isinstance(request, question_service.CreateQuestionRequest): request = question_service.CreateQuestionRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if question is not None: @@ -544,22 +555,23 @@ def execute_question( r"""Executes an interpretation. Args: - request (:class:`~.question_service.ExecuteQuestionRequest`): + request (google.cloud.dataqna_v1alpha.types.ExecuteQuestionRequest): The request object. Request to execute an interpretation. - name (:class:`str`): + name (str): Required. The unique identifier for the question. Example: ``projects/foo/locations/bar/questions/1234`` + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - interpretation_index (:class:`int`): + interpretation_index (int): Required. Index of the interpretation to execute. + This corresponds to the ``interpretation_index`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -567,7 +579,7 @@ def execute_question( sent along with the request as metadata. Returns: - ~.question.Question: + google.cloud.dataqna_v1alpha.types.Question: The question resource represents a natural language query, its settings, understanding generated by the system, @@ -591,10 +603,8 @@ def execute_question( # there are no flattened fields. if not isinstance(request, question_service.ExecuteQuestionRequest): request = question_service.ExecuteQuestionRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if interpretation_index is not None: @@ -628,17 +638,17 @@ def get_user_feedback( r"""Gets previously created user feedback. Args: - request (:class:`~.question_service.GetUserFeedbackRequest`): + request (google.cloud.dataqna_v1alpha.types.GetUserFeedbackRequest): The request object. Request to get user feedback. - name (:class:`str`): + name (str): Required. The unique identifier for the user feedback. User feedback is a singleton resource on a Question. Example: ``projects/foo/locations/bar/questions/1234/userFeedback`` + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -646,7 +656,7 @@ def get_user_feedback( sent along with the request as metadata. Returns: - ~.user_feedback.UserFeedback: + google.cloud.dataqna_v1alpha.types.UserFeedback: Feedback provided by a user. """ # Create or coerce a protobuf request object. @@ -665,10 +675,8 @@ def get_user_feedback( # there are no flattened fields. if not isinstance(request, question_service.GetUserFeedbackRequest): request = question_service.GetUserFeedbackRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -693,7 +701,7 @@ def update_user_feedback( request: question_service.UpdateUserFeedbackRequest = None, *, user_feedback: gcd_user_feedback.UserFeedback = None, - update_mask: field_mask.FieldMask = None, + update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), @@ -702,24 +710,24 @@ def update_user_feedback( there was none before (upsert). Args: - request (:class:`~.question_service.UpdateUserFeedbackRequest`): + request (google.cloud.dataqna_v1alpha.types.UpdateUserFeedbackRequest): The request object. Request to updates user feedback. - user_feedback (:class:`~.gcd_user_feedback.UserFeedback`): + user_feedback (google.cloud.dataqna_v1alpha.types.UserFeedback): Required. The user feedback to update. This can be called even if there is no user feedback so far. The feedback's name field is used to identify the user feedback (and the corresponding question) to update. + This corresponds to the ``user_feedback`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - update_mask (:class:`~.field_mask.FieldMask`): + update_mask (google.protobuf.field_mask_pb2.FieldMask): The list of fields to be updated. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -727,7 +735,7 @@ def update_user_feedback( sent along with the request as metadata. Returns: - ~.gcd_user_feedback.UserFeedback: + google.cloud.dataqna_v1alpha.types.UserFeedback: Feedback provided by a user. """ # Create or coerce a protobuf request object. @@ -746,10 +754,8 @@ def update_user_feedback( # there are no flattened fields. if not isinstance(request, question_service.UpdateUserFeedbackRequest): request = question_service.UpdateUserFeedbackRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if user_feedback is not None: request.user_feedback = user_feedback if update_mask is not None: diff --git a/google/cloud/dataqna_v1alpha/services/question_service/transports/__init__.py b/google/cloud/dataqna_v1alpha/services/question_service/transports/__init__.py index 0852231..dee15de 100644 --- a/google/cloud/dataqna_v1alpha/services/question_service/transports/__init__.py +++ b/google/cloud/dataqna_v1alpha/services/question_service/transports/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from typing import Dict, Type @@ -28,7 +26,6 @@ _transport_registry["grpc"] = QuestionServiceGrpcTransport _transport_registry["grpc_asyncio"] = QuestionServiceGrpcAsyncIOTransport - __all__ = ( "QuestionServiceTransport", "QuestionServiceGrpcTransport", diff --git a/google/cloud/dataqna_v1alpha/services/question_service/transports/base.py b/google/cloud/dataqna_v1alpha/services/question_service/transports/base.py index 3ba8790..b224da2 100644 --- a/google/cloud/dataqna_v1alpha/services/question_service/transports/base.py +++ b/google/cloud/dataqna_v1alpha/services/question_service/transports/base.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,16 +13,17 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import abc -import typing +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version import pkg_resources -from google import auth # type: ignore -from google.api_core import exceptions # type: ignore +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.cloud.dataqna_v1alpha.types import question from google.cloud.dataqna_v1alpha.types import question as gcd_question @@ -31,7 +31,6 @@ from google.cloud.dataqna_v1alpha.types import user_feedback from google.cloud.dataqna_v1alpha.types import user_feedback as gcd_user_feedback - try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-dataqna",).version, @@ -39,27 +38,41 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + +_API_CORE_VERSION = google.api_core.__version__ + class QuestionServiceTransport(abc.ABC): """Abstract transport class for QuestionService.""" AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + DEFAULT_HOST: str = "dataqna.googleapis.com" + def __init__( self, *, - host: str = "dataqna.googleapis.com", - credentials: credentials.Credentials = None, - credentials_file: typing.Optional[str] = None, - scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, - quota_project_id: typing.Optional[str] = None, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, **kwargs, ) -> None: """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -68,13 +81,13 @@ def __init__( credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is mutually exclusive with credentials. - scope (Optional[Sequence[str]]): A list of scopes. + scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. @@ -82,28 +95,75 @@ def __init__( host += ":443" self._host = host + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + + # Save the scopes. + self._scopes = scopes or self.AUTH_SCOPES + # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise exceptions.DuplicateCredentialArgs( + raise core_exceptions.DuplicateCredentialArgs( "'credentials_file' and 'credentials' are mutually exclusive" ) if credentials_file is not None: - credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) elif credentials is None: - credentials, _ = auth.default( - scopes=scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials - # Lifted into its own function so it can be stubbed out during tests. - self._prep_wrapped_messages(client_info) + # TODO(busunkim): These two class methods are in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-api-core + # and google-auth are increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + # TODO: Remove this function once google-api-core >= 1.26.0 is required + @classmethod + def _get_self_signed_jwt_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Union[Optional[Sequence[str]], str]]: + """Returns kwargs to pass to grpc_helpers.create_channel depending on the google-api-core version""" + + self_signed_jwt_kwargs: Dict[str, Union[Optional[Sequence[str]], str]] = {} + + if _API_CORE_VERSION and ( + packaging.version.parse(_API_CORE_VERSION) + >= packaging.version.parse("1.26.0") + ): + self_signed_jwt_kwargs["default_scopes"] = cls.AUTH_SCOPES + self_signed_jwt_kwargs["scopes"] = scopes + self_signed_jwt_kwargs["default_host"] = cls.DEFAULT_HOST + else: + self_signed_jwt_kwargs["scopes"] = scopes or cls.AUTH_SCOPES + + return self_signed_jwt_kwargs def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. @@ -130,49 +190,46 @@ def _prep_wrapped_messages(self, client_info): @property def get_question( self, - ) -> typing.Callable[ + ) -> Callable[ [question_service.GetQuestionRequest], - typing.Union[question.Question, typing.Awaitable[question.Question]], + Union[question.Question, Awaitable[question.Question]], ]: raise NotImplementedError() @property def create_question( self, - ) -> typing.Callable[ + ) -> Callable[ [question_service.CreateQuestionRequest], - typing.Union[gcd_question.Question, typing.Awaitable[gcd_question.Question]], + Union[gcd_question.Question, Awaitable[gcd_question.Question]], ]: raise NotImplementedError() @property def execute_question( self, - ) -> typing.Callable[ + ) -> Callable[ [question_service.ExecuteQuestionRequest], - typing.Union[question.Question, typing.Awaitable[question.Question]], + Union[question.Question, Awaitable[question.Question]], ]: raise NotImplementedError() @property def get_user_feedback( self, - ) -> typing.Callable[ + ) -> Callable[ [question_service.GetUserFeedbackRequest], - typing.Union[ - user_feedback.UserFeedback, typing.Awaitable[user_feedback.UserFeedback] - ], + Union[user_feedback.UserFeedback, Awaitable[user_feedback.UserFeedback]], ]: raise NotImplementedError() @property def update_user_feedback( self, - ) -> typing.Callable[ + ) -> Callable[ [question_service.UpdateUserFeedbackRequest], - typing.Union[ - gcd_user_feedback.UserFeedback, - typing.Awaitable[gcd_user_feedback.UserFeedback], + Union[ + gcd_user_feedback.UserFeedback, Awaitable[gcd_user_feedback.UserFeedback] ], ]: raise NotImplementedError() diff --git a/google/cloud/dataqna_v1alpha/services/question_service/transports/grpc.py b/google/cloud/dataqna_v1alpha/services/question_service/transports/grpc.py index 4cd9e45..3fc22e9 100644 --- a/google/cloud/dataqna_v1alpha/services/question_service/transports/grpc.py +++ b/google/cloud/dataqna_v1alpha/services/question_service/transports/grpc.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,14 +13,13 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple +from typing import Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import grpc_helpers # type: ignore from google.api_core import gapic_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore @@ -31,7 +29,6 @@ from google.cloud.dataqna_v1alpha.types import question_service from google.cloud.dataqna_v1alpha.types import user_feedback from google.cloud.dataqna_v1alpha.types import user_feedback as gcd_user_feedback - from .base import QuestionServiceTransport, DEFAULT_CLIENT_INFO @@ -70,20 +67,22 @@ def __init__( self, *, host: str = "dataqna.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Sequence[str] = None, channel: grpc.Channel = None, api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -107,6 +106,10 @@ def __init__( ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -121,88 +124,75 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - warnings.warn( - "api_mtls_endpoint and client_cert_source are deprecated", - DeprecationWarning, - ) - - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - ssl_credentials = SslCredentials().ssl_credentials - - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - ) - self._ssl_channel_credentials = ssl_credentials else: - host = host if ":" in host else host + ":443" + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - ) - - self._stubs = {} # type: Dict[str, Callable] + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - # Run the base constructor. + # The base transport sets the host, credentials and scopes super().__init__( host=host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, + scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, ) + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + @classmethod def create_channel( cls, host: str = "dataqna.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -210,7 +200,7 @@ def create_channel( ) -> grpc.Channel: """Create and return a gRPC channel object. Args: - address (Optionsl[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If @@ -233,13 +223,15 @@ def create_channel( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) diff --git a/google/cloud/dataqna_v1alpha/services/question_service/transports/grpc_asyncio.py b/google/cloud/dataqna_v1alpha/services/question_service/transports/grpc_asyncio.py index 494610e..f1d41da 100644 --- a/google/cloud/dataqna_v1alpha/services/question_service/transports/grpc_asyncio.py +++ b/google/cloud/dataqna_v1alpha/services/question_service/transports/grpc_asyncio.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,15 +13,14 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore @@ -32,7 +30,6 @@ from google.cloud.dataqna_v1alpha.types import question_service from google.cloud.dataqna_v1alpha.types import user_feedback from google.cloud.dataqna_v1alpha.types import user_feedback as gcd_user_feedback - from .base import QuestionServiceTransport, DEFAULT_CLIENT_INFO from .grpc import QuestionServiceGrpcTransport @@ -73,7 +70,7 @@ class QuestionServiceGrpcAsyncIOTransport(QuestionServiceTransport): def create_channel( cls, host: str = "dataqna.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -81,7 +78,7 @@ def create_channel( ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If @@ -100,13 +97,15 @@ def create_channel( Returns: aio.Channel: A gRPC AsyncIO channel object. """ - scopes = scopes or cls.AUTH_SCOPES + + self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes) + return grpc_helpers_async.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + **self_signed_jwt_kwargs, **kwargs, ) @@ -114,20 +113,22 @@ def __init__( self, *, host: str = "dataqna.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, channel: aio.Channel = None, api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -152,12 +153,16 @@ def __init__( ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: @@ -166,82 +171,68 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - warnings.warn( - "api_mtls_endpoint and client_cert_source are deprecated", - DeprecationWarning, - ) - - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) - - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - ssl_credentials = SslCredentials().ssl_credentials - - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - ) - self._ssl_channel_credentials = ssl_credentials else: - host = host if ":" in host else host + ":443" - - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - ) + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - # Run the base constructor. + # The base transport sets the host, credentials and scopes super().__init__( host=host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, + scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, ) - self._stubs = {} + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @property def grpc_channel(self) -> aio.Channel: diff --git a/google/cloud/dataqna_v1alpha/types/__init__.py b/google/cloud/dataqna_v1alpha/types/__init__.py index 89782e2..df3351a 100644 --- a/google/cloud/dataqna_v1alpha/types/__init__.py +++ b/google/cloud/dataqna_v1alpha/types/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,54 +13,56 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .annotated_string import AnnotatedString from .auto_suggestion_service import ( - SuggestQueriesRequest, Suggestion, SuggestionInfo, + SuggestQueriesRequest, SuggestQueriesResponse, + SuggestionType, ) from .question import ( - Question, - InterpretError, - ExecutionInfo, BigQueryJob, - Interpretation, DataQuery, + DebugFlags, + ExecutionInfo, HumanReadable, + Interpretation, InterpretationStructure, - DebugFlags, + InterpretError, + Question, + InterpretEntity, ) -from .user_feedback import UserFeedback from .question_service import ( - GetQuestionRequest, CreateQuestionRequest, ExecuteQuestionRequest, + GetQuestionRequest, GetUserFeedbackRequest, UpdateUserFeedbackRequest, ) - +from .user_feedback import UserFeedback __all__ = ( "AnnotatedString", - "SuggestQueriesRequest", "Suggestion", "SuggestionInfo", + "SuggestQueriesRequest", "SuggestQueriesResponse", - "Question", - "InterpretError", - "ExecutionInfo", + "SuggestionType", "BigQueryJob", - "Interpretation", "DataQuery", + "DebugFlags", + "ExecutionInfo", "HumanReadable", + "Interpretation", "InterpretationStructure", - "DebugFlags", - "UserFeedback", - "GetQuestionRequest", + "InterpretError", + "Question", + "InterpretEntity", "CreateQuestionRequest", "ExecuteQuestionRequest", + "GetQuestionRequest", "GetUserFeedbackRequest", "UpdateUserFeedbackRequest", + "UserFeedback", ) diff --git a/google/cloud/dataqna_v1alpha/types/annotated_string.py b/google/cloud/dataqna_v1alpha/types/annotated_string.py index cab21e2..c144ec9 100644 --- a/google/cloud/dataqna_v1alpha/types/annotated_string.py +++ b/google/cloud/dataqna_v1alpha/types/annotated_string.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore @@ -60,7 +58,7 @@ class AnnotatedString(proto.Message): Text version of the string. html_formatted (str): HTML version of the string annotation. - markups (Sequence[~.annotated_string.AnnotatedString.SemanticMarkup]): + markups (Sequence[google.cloud.dataqna_v1alpha.types.AnnotatedString.SemanticMarkup]): Semantic version of the string annotation. """ @@ -79,7 +77,7 @@ class SemanticMarkup(proto.Message): with markup information. Attributes: - type_ (~.annotated_string.AnnotatedString.SemanticMarkupType): + type_ (google.cloud.dataqna_v1alpha.types.AnnotatedString.SemanticMarkupType): The semantic type of the markup substring. start_char_index (int): Unicode character index of the query. @@ -91,15 +89,11 @@ class SemanticMarkup(proto.Message): type_ = proto.Field( proto.ENUM, number=1, enum="AnnotatedString.SemanticMarkupType", ) + start_char_index = proto.Field(proto.INT32, number=2,) + length = proto.Field(proto.INT32, number=3,) - start_char_index = proto.Field(proto.INT32, number=2) - - length = proto.Field(proto.INT32, number=3) - - text_formatted = proto.Field(proto.STRING, number=1) - - html_formatted = proto.Field(proto.STRING, number=2) - + text_formatted = proto.Field(proto.STRING, number=1,) + html_formatted = proto.Field(proto.STRING, number=2,) markups = proto.RepeatedField(proto.MESSAGE, number=3, message=SemanticMarkup,) diff --git a/google/cloud/dataqna_v1alpha/types/auto_suggestion_service.py b/google/cloud/dataqna_v1alpha/types/auto_suggestion_service.py index 0c031a3..075e553 100644 --- a/google/cloud/dataqna_v1alpha/types/auto_suggestion_service.py +++ b/google/cloud/dataqna_v1alpha/types/auto_suggestion_service.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,10 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - from google.cloud.dataqna_v1alpha.types import annotated_string @@ -42,7 +39,6 @@ class SuggestionType(proto.Enum): class SuggestQueriesRequest(proto.Message): r"""Request for query suggestions. - Attributes: parent (str): Required. The parent of the suggestion query @@ -58,7 +54,7 @@ class SuggestQueriesRequest(proto.Message): are returned. This allows UIs to display suggestions right away, helping the user to get a sense of what a query might look like. - suggestion_types (Sequence[~.auto_suggestion_service.SuggestionType]): + suggestion_types (Sequence[google.cloud.dataqna_v1alpha.types.SuggestionType]): The requested suggestion type. Multiple suggestion types can be requested, but there is no guarantee that the service will return @@ -68,47 +64,40 @@ class SuggestQueriesRequest(proto.Message): cut these suggestions off. """ - parent = proto.Field(proto.STRING, number=1) - - scopes = proto.RepeatedField(proto.STRING, number=2) - - query = proto.Field(proto.STRING, number=3) - + parent = proto.Field(proto.STRING, number=1,) + scopes = proto.RepeatedField(proto.STRING, number=2,) + query = proto.Field(proto.STRING, number=3,) suggestion_types = proto.RepeatedField(proto.ENUM, number=4, enum="SuggestionType",) class Suggestion(proto.Message): r"""A suggestion for a query with a ranking score. - Attributes: - suggestion_info (~.auto_suggestion_service.SuggestionInfo): + suggestion_info (google.cloud.dataqna_v1alpha.types.SuggestionInfo): Detailed information about the suggestion. ranking_score (float): The score of the suggestion. This can be used to define ordering in UI. The score represents confidence in the suggestion where higher is better. All score values must be in the range [0, 1). - suggestion_type (~.auto_suggestion_service.SuggestionType): + suggestion_type (google.cloud.dataqna_v1alpha.types.SuggestionType): The type of the suggestion. """ suggestion_info = proto.Field(proto.MESSAGE, number=1, message="SuggestionInfo",) - - ranking_score = proto.Field(proto.DOUBLE, number=2) - + ranking_score = proto.Field(proto.DOUBLE, number=2,) suggestion_type = proto.Field(proto.ENUM, number=3, enum="SuggestionType",) class SuggestionInfo(proto.Message): r"""Detailed information about the suggestion. - Attributes: - annotated_suggestion (~.annotated_string.AnnotatedString): + annotated_suggestion (google.cloud.dataqna_v1alpha.types.AnnotatedString): Annotations for the suggestion. This provides information about which part of the suggestion corresponds to what semantic meaning (e.g. a metric). - query_matches (Sequence[~.auto_suggestion_service.SuggestionInfo.MatchInfo]): + query_matches (Sequence[google.cloud.dataqna_v1alpha.types.SuggestionInfo.MatchInfo]): Matches between user query and the annotated string. """ @@ -148,22 +137,19 @@ class MatchInfo(proto.Message): substring. """ - start_char_index = proto.Field(proto.INT32, number=1) - - length = proto.Field(proto.INT32, number=2) + start_char_index = proto.Field(proto.INT32, number=1,) + length = proto.Field(proto.INT32, number=2,) annotated_suggestion = proto.Field( proto.MESSAGE, number=1, message=annotated_string.AnnotatedString, ) - query_matches = proto.RepeatedField(proto.MESSAGE, number=2, message=MatchInfo,) class SuggestQueriesResponse(proto.Message): r"""Response to SuggestQueries. - Attributes: - suggestions (Sequence[~.auto_suggestion_service.Suggestion]): + suggestions (Sequence[google.cloud.dataqna_v1alpha.types.Suggestion]): A list of suggestions. """ diff --git a/google/cloud/dataqna_v1alpha/types/question.py b/google/cloud/dataqna_v1alpha/types/question.py index b37fdb9..5d79f86 100644 --- a/google/cloud/dataqna_v1alpha/types/question.py +++ b/google/cloud/dataqna_v1alpha/types/question.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,14 +13,12 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - from google.cloud.dataqna_v1alpha.types import annotated_string -from google.protobuf import any_pb2 as gp_any # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore -from google.rpc import status_pb2 as status # type: ignore +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore __protobuf__ = proto.module( @@ -76,7 +73,7 @@ class Question(proto.Message): data source reference resource). There must not be more than one annotation with the same data source reference. - interpret_error (~.question.InterpretError): + interpret_error (google.cloud.dataqna_v1alpha.types.InterpretError): An error field explaining why interpretation failed. This is only populated if the interpretation failed. @@ -87,17 +84,17 @@ class Question(proto.Message): interpret the question. Clients should present the error to the user so the user can rephrase the question. - interpretations (Sequence[~.question.Interpretation]): + interpretations (Sequence[google.cloud.dataqna_v1alpha.types.Interpretation]): A list of interpretations for this question. - create_time (~.timestamp.Timestamp): + create_time (google.protobuf.timestamp_pb2.Timestamp): Time when the question was created. user_email (str): Output only. The e-mail address of the user that created this question. - debug_flags (~.question.DebugFlags): + debug_flags (google.cloud.dataqna_v1alpha.types.DebugFlags): Input only. Immutable. Flags to request additional information for debugging purposes. - debug_info (~.gp_any.Any): + debug_info (google.protobuf.any_pb2.Any): Top level debug information. This will be stored as the type DebugInformation. Using Any so clients don't @@ -105,40 +102,30 @@ class Question(proto.Message): message. """ - name = proto.Field(proto.STRING, number=1) - - scopes = proto.RepeatedField(proto.STRING, number=2) - - query = proto.Field(proto.STRING, number=3) - - data_source_annotations = proto.RepeatedField(proto.STRING, number=4) - + name = proto.Field(proto.STRING, number=1,) + scopes = proto.RepeatedField(proto.STRING, number=2,) + query = proto.Field(proto.STRING, number=3,) + data_source_annotations = proto.RepeatedField(proto.STRING, number=4,) interpret_error = proto.Field(proto.MESSAGE, number=5, message="InterpretError",) - interpretations = proto.RepeatedField( proto.MESSAGE, number=6, message="Interpretation", ) - - create_time = proto.Field(proto.MESSAGE, number=7, message=timestamp.Timestamp,) - - user_email = proto.Field(proto.STRING, number=8) - + create_time = proto.Field(proto.MESSAGE, number=7, message=timestamp_pb2.Timestamp,) + user_email = proto.Field(proto.STRING, number=8,) debug_flags = proto.Field(proto.MESSAGE, number=9, message="DebugFlags",) - - debug_info = proto.Field(proto.MESSAGE, number=10, message=gp_any.Any,) + debug_info = proto.Field(proto.MESSAGE, number=10, message=any_pb2.Any,) class InterpretError(proto.Message): r"""Details on the failure to interpret the question. - Attributes: message (str): Error message explaining why this question could not be interpreted. - code (~.question.InterpretError.InterpretErrorCode): + code (google.cloud.dataqna_v1alpha.types.InterpretError.InterpretErrorCode): The code for the error category why the interpretation failed. - details (~.question.InterpretError.InterpretErrorDetails): + details (google.cloud.dataqna_v1alpha.types.InterpretError.InterpretErrorDetails): Details on interpretation failure. """ @@ -153,14 +140,13 @@ class InterpretErrorCode(proto.Enum): class InterpretErrorDetails(proto.Message): r"""Details on interpretation failure. - Attributes: - unsupported_details (~.question.InterpretError.InterpretUnsupportedDetails): + unsupported_details (google.cloud.dataqna_v1alpha.types.InterpretError.InterpretUnsupportedDetails): Populated if parts of the query are unsupported. - incomplete_query_details (~.question.InterpretError.InterpretIncompleteQueryDetails): + incomplete_query_details (google.cloud.dataqna_v1alpha.types.InterpretError.InterpretIncompleteQueryDetails): Populated if the query is incomplete. - ambiguity_details (~.question.InterpretError.InterpretAmbiguityDetails): + ambiguity_details (google.cloud.dataqna_v1alpha.types.InterpretError.InterpretAmbiguityDetails): Populated if the query was too ambiguous. """ @@ -169,20 +155,17 @@ class InterpretErrorDetails(proto.Message): number=1, message="InterpretError.InterpretUnsupportedDetails", ) - incomplete_query_details = proto.Field( proto.MESSAGE, number=2, message="InterpretError.InterpretIncompleteQueryDetails", ) - ambiguity_details = proto.Field( proto.MESSAGE, number=3, message="InterpretError.InterpretAmbiguityDetails", ) class InterpretUnsupportedDetails(proto.Message): r"""Details about unsupported parts in a query. - Attributes: operators (Sequence[str]): Unsupported operators. For example: median. @@ -190,15 +173,13 @@ class InterpretUnsupportedDetails(proto.Message): Unsupported intents. """ - operators = proto.RepeatedField(proto.STRING, number=1) - - intent = proto.RepeatedField(proto.STRING, number=2) + operators = proto.RepeatedField(proto.STRING, number=1,) + intent = proto.RepeatedField(proto.STRING, number=2,) class InterpretIncompleteQueryDetails(proto.Message): r"""Details about an incomplete query. - Attributes: - entities (Sequence[~.question.InterpretEntity]): + entities (Sequence[google.cloud.dataqna_v1alpha.types.InterpretEntity]): List of missing interpret entities. """ @@ -208,12 +189,10 @@ class InterpretAmbiguityDetails(proto.Message): r"""Details about a query that was too ambiguous. Currently, the message has no fields and its presence signals that there was ambiguity. - """ - - message = proto.Field(proto.STRING, number=1) + """ + message = proto.Field(proto.STRING, number=1,) code = proto.Field(proto.ENUM, number=2, enum=InterpretErrorCode,) - details = proto.Field(proto.MESSAGE, number=3, message=InterpretErrorDetails,) @@ -222,14 +201,14 @@ class ExecutionInfo(proto.Message): the execution. Attributes: - job_creation_status (~.status.Status): + job_creation_status (google.rpc.status_pb2.Status): Status returned by the backend when the job was created. - job_execution_state (~.question.ExecutionInfo.JobExecutionState): + job_execution_state (google.cloud.dataqna_v1alpha.types.ExecutionInfo.JobExecutionState): Status of the job execution. - create_time (~.timestamp.Timestamp): + create_time (google.protobuf.timestamp_pb2.Timestamp): Time when the execution was triggered. - bigquery_job (~.question.BigQueryJob): + bigquery_job (google.cloud.dataqna_v1alpha.types.BigQueryJob): BigQuery job information. Future versions will have different backends. Hence, clients must make sure they can handle it @@ -244,12 +223,11 @@ class JobExecutionState(proto.Enum): SUCCEEDED = 3 FAILED = 4 - job_creation_status = proto.Field(proto.MESSAGE, number=1, message=status.Status,) - + job_creation_status = proto.Field( + proto.MESSAGE, number=1, message=status_pb2.Status, + ) job_execution_state = proto.Field(proto.ENUM, number=2, enum=JobExecutionState,) - - create_time = proto.Field(proto.MESSAGE, number=3, message=timestamp.Timestamp,) - + create_time = proto.Field(proto.MESSAGE, number=3, message=timestamp_pb2.Timestamp,) bigquery_job = proto.Field(proto.MESSAGE, number=4, message="BigQueryJob",) @@ -267,16 +245,13 @@ class BigQueryJob(proto.Message): The location where the job is running. """ - job_id = proto.Field(proto.STRING, number=1) - - project_id = proto.Field(proto.STRING, number=2) - - location = proto.Field(proto.STRING, number=3) + job_id = proto.Field(proto.STRING, number=1,) + project_id = proto.Field(proto.STRING, number=2,) + location = proto.Field(proto.STRING, number=3,) class Interpretation(proto.Message): r"""An interpretation of a natural language query. - Attributes: data_sources (Sequence[str]): List of data sources used in the current @@ -290,35 +265,29 @@ class Interpretation(proto.Message): display a Did You Mean (DYM) dialog if this is non-empty, even if this is the only interpretation. - human_readable (~.question.HumanReadable): + human_readable (google.cloud.dataqna_v1alpha.types.HumanReadable): Human readable representation of the query. - interpretation_structure (~.question.InterpretationStructure): + interpretation_structure (google.cloud.dataqna_v1alpha.types.InterpretationStructure): Information about the interpretation structure that helps to understand and visualize the response. - data_query (~.question.DataQuery): + data_query (google.cloud.dataqna_v1alpha.types.DataQuery): Representation of the data query to be sent to the backend. - execution_info (~.question.ExecutionInfo): + execution_info (google.cloud.dataqna_v1alpha.types.ExecutionInfo): Information about the backend response. This is populated only if execution of an interpretation was requested. """ - data_sources = proto.RepeatedField(proto.STRING, number=1) - - confidence = proto.Field(proto.DOUBLE, number=2) - - unused_phrases = proto.RepeatedField(proto.STRING, number=3) - + data_sources = proto.RepeatedField(proto.STRING, number=1,) + confidence = proto.Field(proto.DOUBLE, number=2,) + unused_phrases = proto.RepeatedField(proto.STRING, number=3,) human_readable = proto.Field(proto.MESSAGE, number=4, message="HumanReadable",) - interpretation_structure = proto.Field( proto.MESSAGE, number=5, message="InterpretationStructure", ) - data_query = proto.Field(proto.MESSAGE, number=6, message="DataQuery",) - execution_info = proto.Field(proto.MESSAGE, number=7, message="ExecutionInfo",) @@ -336,24 +305,22 @@ class DataQuery(proto.Message): backend. """ - sql = proto.Field(proto.STRING, number=1) + sql = proto.Field(proto.STRING, number=1,) class HumanReadable(proto.Message): r"""Human readable interpretation. - Attributes: - generated_interpretation (~.annotated_string.AnnotatedString): + generated_interpretation (google.cloud.dataqna_v1alpha.types.AnnotatedString): Generated query explaining the interpretation. - original_question (~.annotated_string.AnnotatedString): + original_question (google.cloud.dataqna_v1alpha.types.AnnotatedString): Annotations on the original query. """ generated_interpretation = proto.Field( proto.MESSAGE, number=1, message=annotated_string.AnnotatedString, ) - original_question = proto.Field( proto.MESSAGE, number=2, message=annotated_string.AnnotatedString, ) @@ -364,11 +331,11 @@ class InterpretationStructure(proto.Message): understand and visualize the response. Attributes: - visualization_types (Sequence[~.question.InterpretationStructure.VisualizationType]): + visualization_types (Sequence[google.cloud.dataqna_v1alpha.types.InterpretationStructure.VisualizationType]): List of possible visualization types to apply for this interpretation. The order has no relevance. - column_info (Sequence[~.question.InterpretationStructure.ColumnInfo]): + column_info (Sequence[google.cloud.dataqna_v1alpha.types.InterpretationStructure.ColumnInfo]): Information about the output columns, that is, the columns that will be returned by the backend. @@ -394,7 +361,6 @@ class VisualizationType(proto.Enum): class ColumnInfo(proto.Message): r"""Information about a column. - Attributes: output_alias (str): The alias of the output column as used by the @@ -405,20 +371,17 @@ class ColumnInfo(proto.Message): Human readable name of the output column. """ - output_alias = proto.Field(proto.STRING, number=1) - - display_name = proto.Field(proto.STRING, number=2) + output_alias = proto.Field(proto.STRING, number=1,) + display_name = proto.Field(proto.STRING, number=2,) visualization_types = proto.RepeatedField( proto.ENUM, number=1, enum=VisualizationType, ) - column_info = proto.RepeatedField(proto.MESSAGE, number=2, message=ColumnInfo,) class DebugFlags(proto.Message): r"""Configuriation of debug flags. - Attributes: include_va_query (bool): Whether to include the original VAQuery. @@ -459,29 +422,18 @@ class DebugFlags(proto.Message): Whether to include the domain list. """ - include_va_query = proto.Field(proto.BOOL, number=1) - - include_nested_va_query = proto.Field(proto.BOOL, number=2) - - include_human_interpretation = proto.Field(proto.BOOL, number=3) - - include_aqua_debug_response = proto.Field(proto.BOOL, number=4) - - time_override = proto.Field(proto.INT64, number=5) - - is_internal_google_user = proto.Field(proto.BOOL, number=6) - - ignore_cache = proto.Field(proto.BOOL, number=7) - - include_search_entities_rpc = proto.Field(proto.BOOL, number=8) - - include_list_column_annotations_rpc = proto.Field(proto.BOOL, number=9) - - include_virtual_analyst_entities = proto.Field(proto.BOOL, number=10) - - include_table_list = proto.Field(proto.BOOL, number=11) - - include_domain_list = proto.Field(proto.BOOL, number=12) + include_va_query = proto.Field(proto.BOOL, number=1,) + include_nested_va_query = proto.Field(proto.BOOL, number=2,) + include_human_interpretation = proto.Field(proto.BOOL, number=3,) + include_aqua_debug_response = proto.Field(proto.BOOL, number=4,) + time_override = proto.Field(proto.INT64, number=5,) + is_internal_google_user = proto.Field(proto.BOOL, number=6,) + ignore_cache = proto.Field(proto.BOOL, number=7,) + include_search_entities_rpc = proto.Field(proto.BOOL, number=8,) + include_list_column_annotations_rpc = proto.Field(proto.BOOL, number=9,) + include_virtual_analyst_entities = proto.Field(proto.BOOL, number=10,) + include_table_list = proto.Field(proto.BOOL, number=11,) + include_domain_list = proto.Field(proto.BOOL, number=12,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/dataqna_v1alpha/types/question_service.py b/google/cloud/dataqna_v1alpha/types/question_service.py index 8f39256..d23c0d9 100644 --- a/google/cloud/dataqna_v1alpha/types/question_service.py +++ b/google/cloud/dataqna_v1alpha/types/question_service.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,13 +13,11 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - from google.cloud.dataqna_v1alpha.types import question as gcd_question from google.cloud.dataqna_v1alpha.types import user_feedback as gcd_user_feedback -from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore __protobuf__ = proto.module( @@ -37,39 +34,34 @@ class GetQuestionRequest(proto.Message): r"""A request to get a previously created question. - Attributes: name (str): Required. The unique identifier for the question. Example: ``projects/foo/locations/bar/questions/1234`` - read_mask (~.field_mask.FieldMask): + read_mask (google.protobuf.field_mask_pb2.FieldMask): The list of fields to be retrieved. """ - name = proto.Field(proto.STRING, number=1) - - read_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) + name = proto.Field(proto.STRING, number=1,) + read_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask,) class CreateQuestionRequest(proto.Message): r"""Request to create a question resource. - Attributes: parent (str): Required. The name of the project this data source reference belongs to. Example: ``projects/foo/locations/bar`` - question (~.gcd_question.Question): + question (google.cloud.dataqna_v1alpha.types.Question): Required. The question to create. """ - parent = proto.Field(proto.STRING, number=1) - + parent = proto.Field(proto.STRING, number=1,) question = proto.Field(proto.MESSAGE, number=2, message=gcd_question.Question,) class ExecuteQuestionRequest(proto.Message): r"""Request to execute an interpretation. - Attributes: name (str): Required. The unique identifier for the question. Example: @@ -79,14 +71,12 @@ class ExecuteQuestionRequest(proto.Message): execute. """ - name = proto.Field(proto.STRING, number=1) - - interpretation_index = proto.Field(proto.INT32, number=2) + name = proto.Field(proto.STRING, number=1,) + interpretation_index = proto.Field(proto.INT32, number=2,) class GetUserFeedbackRequest(proto.Message): r"""Request to get user feedback. - Attributes: name (str): Required. The unique identifier for the user feedback. User @@ -94,28 +84,28 @@ class GetUserFeedbackRequest(proto.Message): ``projects/foo/locations/bar/questions/1234/userFeedback`` """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class UpdateUserFeedbackRequest(proto.Message): r"""Request to updates user feedback. - Attributes: - user_feedback (~.gcd_user_feedback.UserFeedback): + user_feedback (google.cloud.dataqna_v1alpha.types.UserFeedback): Required. The user feedback to update. This can be called even if there is no user feedback so far. The feedback's name field is used to identify the user feedback (and the corresponding question) to update. - update_mask (~.field_mask.FieldMask): + update_mask (google.protobuf.field_mask_pb2.FieldMask): The list of fields to be updated. """ user_feedback = proto.Field( proto.MESSAGE, number=1, message=gcd_user_feedback.UserFeedback, ) - - update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) + update_mask = proto.Field( + proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/dataqna_v1alpha/types/user_feedback.py b/google/cloud/dataqna_v1alpha/types/user_feedback.py index 3c8bcab..8aa69bc 100644 --- a/google/cloud/dataqna_v1alpha/types/user_feedback.py +++ b/google/cloud/dataqna_v1alpha/types/user_feedback.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore @@ -25,7 +23,6 @@ class UserFeedback(proto.Message): r"""Feedback provided by a user. - Attributes: name (str): Required. The unique identifier for the user feedback. User @@ -33,7 +30,7 @@ class UserFeedback(proto.Message): ``projects/foo/locations/bar/questions/1234/userFeedback`` free_form_feedback (str): Free form user feedback, such as a text box. - rating (~.user_feedback.UserFeedback.UserFeedbackRating): + rating (google.cloud.dataqna_v1alpha.types.UserFeedback.UserFeedbackRating): The user feedback rating """ @@ -43,10 +40,8 @@ class UserFeedbackRating(proto.Enum): POSITIVE = 1 NEGATIVE = 2 - name = proto.Field(proto.STRING, number=1) - - free_form_feedback = proto.Field(proto.STRING, number=2) - + name = proto.Field(proto.STRING, number=1,) + free_form_feedback = proto.Field(proto.STRING, number=2,) rating = proto.Field(proto.ENUM, number=3, enum=UserFeedbackRating,) diff --git a/noxfile.py b/noxfile.py index a57e24b..70417e8 100644 --- a/noxfile.py +++ b/noxfile.py @@ -18,6 +18,7 @@ from __future__ import absolute_import import os +import pathlib import shutil import nox @@ -30,6 +31,22 @@ SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + @nox.session(python=DEFAULT_PYTHON_VERSION) def lint(session): @@ -45,16 +62,9 @@ def lint(session): session.run("flake8", "google", "tests") -@nox.session(python="3.6") +@nox.session(python=DEFAULT_PYTHON_VERSION) def blacken(session): - """Run black. - - Format code to uniform standard. - - This currently uses Python 3.6 due to the automated Kokoro run of synthtool. - That run uses an image that doesn't have 3.6 installed. Before updating this - check the state of the `gcp_ubuntu_config` we use for that Kokoro run. - """ + """Run black. Format code to uniform standard.""" session.install(BLACK_VERSION) session.run( "black", *BLACK_PATHS, @@ -70,17 +80,21 @@ def lint_setup_py(session): def default(session): # Install all test dependencies, then install this package in-place. - session.install("asyncmock", "pytest-asyncio") - session.install( - "mock", "pytest", "pytest-cov", + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) - session.install("-e", ".") + session.install("asyncmock", "pytest-asyncio", "-c", constraints_path) + + session.install("mock", "pytest", "pytest-cov", "-c", constraints_path) + + session.install("-e", ".", "-c", constraints_path) # Run py.test against the unit tests. session.run( "py.test", "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", "--cov=google/cloud", "--cov=tests/unit", "--cov-append", @@ -101,15 +115,18 @@ def unit(session): @nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) def system(session): """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) system_test_path = os.path.join("tests", "system.py") system_test_folder_path = os.path.join("tests", "system") # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": session.skip("RUN_SYSTEM_TESTS is set to false, skipping") - # Sanity check: Only run tests if the environment variable is set. - if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): - session.skip("Credentials must be set via environment variable") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") system_test_exists = os.path.exists(system_test_path) system_test_folder_exists = os.path.exists(system_test_folder_path) @@ -122,16 +139,26 @@ def system(session): # Install all test dependencies, then install this package into the # virtualenv's dist-packages. - session.install( - "mock", "pytest", "google-cloud-testutils", - ) - session.install("-e", ".") + session.install("mock", "pytest", "google-cloud-testutils", "-c", constraints_path) + session.install("-e", ".", "-c", constraints_path) # Run py.test against the system tests. if system_test_exists: - session.run("py.test", "--quiet", system_test_path, *session.posargs) + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) if system_test_folder_exists: - session.run("py.test", "--quiet", system_test_folder_path, *session.posargs) + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) @nox.session(python=DEFAULT_PYTHON_VERSION) @@ -142,7 +169,7 @@ def cover(session): test runs (not system test runs), and then erases coverage data. """ session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=99") + session.run("coverage", "report", "--show-missing", "--fail-under=98") session.run("coverage", "erase") @@ -174,9 +201,7 @@ def docfx(session): """Build the docfx yaml files for this library.""" session.install("-e", ".") - # sphinx-docfx-yaml supports up to sphinx version 1.5.5. - # https://github.com/docascode/sphinx-docfx-yaml/issues/97 - session.install("sphinx==1.5.5", "alabaster", "recommonmark", "sphinx-docfx-yaml") + session.install("sphinx", "alabaster", "recommonmark", "gcp-sphinx-docfx-yaml") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( diff --git a/synth.py b/owlbot.py similarity index 69% rename from synth.py rename to owlbot.py index 23d0247..750f343 100644 --- a/synth.py +++ b/owlbot.py @@ -20,26 +20,20 @@ import synthtool.gcp as gcp from synthtool.languages import python -gapic = gcp.GAPICBazel() common = gcp.CommonTemplates() -# ---------------------------------------------------------------------------- -# Generate functions GAPIC layer -# ---------------------------------------------------------------------------- +default_version = "v1alpha" -library = gapic.py_library( - service="functions", - version="v1", - bazel_target="//google/cloud/dataqna/v1alpha:dataqna-v1alpha-py", -) +for library in s.get_staging_dirs(default_version): + s.move(library, excludes=["setup.py", "README.rst", "docs/index.rst"]) -s.move(library, excludes=["setup.py", "README.rst", "docs/index.rst"]) +s.remove_staging_dirs() # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- -templated_files = common.py_library(cov_level=99, microgenerator=True) +templated_files = common.py_library(cov_level=98, microgenerator=True) s.move(templated_files, excludes=[ # the microgenerator has a good coverage rc file ".coveragerc", diff --git a/renovate.json b/renovate.json index 4fa9493..c048955 100644 --- a/renovate.json +++ b/renovate.json @@ -1,5 +1,9 @@ { "extends": [ "config:base", ":preserveSemverRanges" - ] + ], + "ignorePaths": [".pre-commit-config.yaml"], + "pip_requirements": { + "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] + } } diff --git a/scripts/fixup_dataqna_v1alpha_keywords.py b/scripts/fixup_dataqna_v1alpha_keywords.py index 0670423..da5349a 100644 --- a/scripts/fixup_dataqna_v1alpha_keywords.py +++ b/scripts/fixup_dataqna_v1alpha_keywords.py @@ -1,6 +1,5 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -15,7 +14,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import argparse import os import libcst as cst @@ -41,13 +39,12 @@ def partition( class dataqnaCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'create_question': ('parent', 'question', ), - 'execute_question': ('name', 'interpretation_index', ), - 'get_question': ('name', 'read_mask', ), - 'get_user_feedback': ('name', ), - 'suggest_queries': ('parent', 'scopes', 'query', 'suggestion_types', ), - 'update_user_feedback': ('user_feedback', 'update_mask', ), - + 'create_question': ('parent', 'question', ), + 'execute_question': ('name', 'interpretation_index', ), + 'get_question': ('name', 'read_mask', ), + 'get_user_feedback': ('name', ), + 'suggest_queries': ('parent', 'scopes', 'query', 'suggestion_types', ), + 'update_user_feedback': ('user_feedback', 'update_mask', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: @@ -78,7 +75,7 @@ def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: value=cst.Dict([ cst.DictElement( cst.SimpleString("'{}'".format(name)), - cst.Element(value=arg.value) +cst.Element(value=arg.value) ) # Note: the args + kwargs looks silly, but keep in mind that # the control parameters had to be stripped out, and that diff --git a/setup.py b/setup.py index b91f08d..4a5d0c8 100644 --- a/setup.py +++ b/setup.py @@ -19,7 +19,7 @@ import os import setuptools # type: ignore -version = "0.1.1" +version = "0.2.0" package_root = os.path.abspath(os.path.dirname(__file__)) @@ -41,12 +41,11 @@ platforms="Posix; MacOS X; Windows", include_package_data=True, install_requires=( - "google-api-core[grpc] >= 1.22.0, < 2.0.0dev", + "google-api-core[grpc] >= 1.22.2, < 2.0.0dev", "proto-plus >= 1.10.0", + "packaging >= 14.3", ), python_requires=">=3.6", - setup_requires=["libcst >= 0.2.5"], - scripts=["scripts/fixup_dataqna_v1alpha_keywords.py"], classifiers=[ "Development Status :: 3 - Alpha", "Intended Audience :: Developers", diff --git a/synth.metadata b/synth.metadata deleted file mode 100644 index f248089..0000000 --- a/synth.metadata +++ /dev/null @@ -1,142 +0,0 @@ -{ - "sources": [ - { - "git": { - "name": ".", - "remote": "https://github.com/googleapis/python-data-qna.git", - "sha": "716cd7c25b330fe40ebd9ded097643779b579862" - } - }, - { - "git": { - "name": "googleapis", - "remote": "https://github.com/googleapis/googleapis.git", - "sha": "5fdb685a684269e07c10c7518372eb5d7b6bc0a9", - "internalRef": "342906697" - } - }, - { - "git": { - "name": "synthtool", - "remote": "https://github.com/googleapis/synthtool.git", - "sha": "f94318521f63085b9ccb43d42af89f153fb39f15" - } - }, - { - "git": { - "name": "synthtool", - "remote": "https://github.com/googleapis/synthtool.git", - "sha": "f94318521f63085b9ccb43d42af89f153fb39f15" - } - } - ], - "destinations": [ - { - "client": { - "source": "googleapis", - "apiName": "functions", - "apiVersion": "v1", - "language": "python", - "generator": "bazel" - } - } - ], - "generatedFiles": [ - ".flake8", - ".github/CONTRIBUTING.md", - ".github/ISSUE_TEMPLATE/bug_report.md", - ".github/ISSUE_TEMPLATE/feature_request.md", - ".github/ISSUE_TEMPLATE/support_request.md", - ".github/PULL_REQUEST_TEMPLATE.md", - ".github/release-please.yml", - ".github/snippet-bot.yml", - ".gitignore", - ".kokoro/build.sh", - ".kokoro/continuous/common.cfg", - ".kokoro/continuous/continuous.cfg", - ".kokoro/docker/docs/Dockerfile", - ".kokoro/docker/docs/fetch_gpg_keys.sh", - ".kokoro/docs/common.cfg", - ".kokoro/docs/docs-presubmit.cfg", - ".kokoro/docs/docs.cfg", - ".kokoro/populate-secrets.sh", - ".kokoro/presubmit/common.cfg", - ".kokoro/presubmit/presubmit.cfg", - ".kokoro/publish-docs.sh", - ".kokoro/release.sh", - ".kokoro/release/common.cfg", - ".kokoro/release/release.cfg", - ".kokoro/samples/lint/common.cfg", - ".kokoro/samples/lint/continuous.cfg", - ".kokoro/samples/lint/periodic.cfg", - ".kokoro/samples/lint/presubmit.cfg", - ".kokoro/samples/python3.6/common.cfg", - ".kokoro/samples/python3.6/continuous.cfg", - ".kokoro/samples/python3.6/periodic.cfg", - ".kokoro/samples/python3.6/presubmit.cfg", - ".kokoro/samples/python3.7/common.cfg", - ".kokoro/samples/python3.7/continuous.cfg", - ".kokoro/samples/python3.7/periodic.cfg", - ".kokoro/samples/python3.7/presubmit.cfg", - ".kokoro/samples/python3.8/common.cfg", - ".kokoro/samples/python3.8/continuous.cfg", - ".kokoro/samples/python3.8/periodic.cfg", - ".kokoro/samples/python3.8/presubmit.cfg", - ".kokoro/test-samples.sh", - ".kokoro/trampoline.sh", - ".kokoro/trampoline_v2.sh", - ".pre-commit-config.yaml", - ".trampolinerc", - "CODE_OF_CONDUCT.md", - "CONTRIBUTING.rst", - "LICENSE", - "MANIFEST.in", - "docs/_static/custom.css", - "docs/_templates/layout.html", - "docs/conf.py", - "docs/dataqna_v1alpha/services.rst", - "docs/dataqna_v1alpha/types.rst", - "docs/multiprocessing.rst", - "google/cloud/dataqna/__init__.py", - "google/cloud/dataqna/py.typed", - "google/cloud/dataqna_v1alpha/__init__.py", - "google/cloud/dataqna_v1alpha/py.typed", - "google/cloud/dataqna_v1alpha/services/__init__.py", - "google/cloud/dataqna_v1alpha/services/auto_suggestion_service/__init__.py", - "google/cloud/dataqna_v1alpha/services/auto_suggestion_service/async_client.py", - "google/cloud/dataqna_v1alpha/services/auto_suggestion_service/client.py", - "google/cloud/dataqna_v1alpha/services/auto_suggestion_service/transports/__init__.py", - "google/cloud/dataqna_v1alpha/services/auto_suggestion_service/transports/base.py", - "google/cloud/dataqna_v1alpha/services/auto_suggestion_service/transports/grpc.py", - "google/cloud/dataqna_v1alpha/services/auto_suggestion_service/transports/grpc_asyncio.py", - "google/cloud/dataqna_v1alpha/services/question_service/__init__.py", - "google/cloud/dataqna_v1alpha/services/question_service/async_client.py", - "google/cloud/dataqna_v1alpha/services/question_service/client.py", - "google/cloud/dataqna_v1alpha/services/question_service/transports/__init__.py", - "google/cloud/dataqna_v1alpha/services/question_service/transports/base.py", - "google/cloud/dataqna_v1alpha/services/question_service/transports/grpc.py", - "google/cloud/dataqna_v1alpha/services/question_service/transports/grpc_asyncio.py", - "google/cloud/dataqna_v1alpha/types/__init__.py", - "google/cloud/dataqna_v1alpha/types/annotated_string.py", - "google/cloud/dataqna_v1alpha/types/auto_suggestion_service.py", - "google/cloud/dataqna_v1alpha/types/question.py", - "google/cloud/dataqna_v1alpha/types/question_service.py", - "google/cloud/dataqna_v1alpha/types/user_feedback.py", - "mypy.ini", - "noxfile.py", - "renovate.json", - "scripts/decrypt-secrets.sh", - "scripts/fixup_dataqna_v1alpha_keywords.py", - "scripts/readme-gen/readme_gen.py", - "scripts/readme-gen/templates/README.tmpl.rst", - "scripts/readme-gen/templates/auth.tmpl.rst", - "scripts/readme-gen/templates/auth_api_key.tmpl.rst", - "scripts/readme-gen/templates/install_deps.tmpl.rst", - "scripts/readme-gen/templates/install_portaudio.tmpl.rst", - "setup.cfg", - "testing/.gitignore", - "tests/unit/gapic/dataqna_v1alpha/__init__.py", - "tests/unit/gapic/dataqna_v1alpha/test_auto_suggestion_service.py", - "tests/unit/gapic/dataqna_v1alpha/test_question_service.py" - ] -} \ No newline at end of file diff --git a/testing/constraints-3.10.txt b/testing/constraints-3.10.txt new file mode 100644 index 0000000..e69de29 diff --git a/testing/constraints-3.11.txt b/testing/constraints-3.11.txt new file mode 100644 index 0000000..e69de29 diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt new file mode 100644 index 0000000..eb64cb3 --- /dev/null +++ b/testing/constraints-3.6.txt @@ -0,0 +1,10 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List *all* library dependencies and extras in this file. +# Pin the version to the lower bound. +# +# e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", +# Then this file should have foo==1.14.0 +google-api-core==1.22.2 +proto-plus==1.10.0 +packaging==14.3 diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt new file mode 100644 index 0000000..e69de29 diff --git a/testing/constraints-3.8.txt b/testing/constraints-3.8.txt new file mode 100644 index 0000000..e69de29 diff --git a/testing/constraints-3.9.txt b/testing/constraints-3.9.txt new file mode 100644 index 0000000..e69de29 diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..4de6597 --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py new file mode 100644 index 0000000..4de6597 --- /dev/null +++ b/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/unit/gapic/__init__.py b/tests/unit/gapic/__init__.py new file mode 100644 index 0000000..4de6597 --- /dev/null +++ b/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/unit/gapic/dataqna_v1alpha/__init__.py b/tests/unit/gapic/dataqna_v1alpha/__init__.py index 8b13789..4de6597 100644 --- a/tests/unit/gapic/dataqna_v1alpha/__init__.py +++ b/tests/unit/gapic/dataqna_v1alpha/__init__.py @@ -1 +1,15 @@ - +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/unit/gapic/dataqna_v1alpha/test_auto_suggestion_service.py b/tests/unit/gapic/dataqna_v1alpha/test_auto_suggestion_service.py index de0d7b1..daacdba 100644 --- a/tests/unit/gapic/dataqna_v1alpha/test_auto_suggestion_service.py +++ b/tests/unit/gapic/dataqna_v1alpha/test_auto_suggestion_service.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,9 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import os import mock +import packaging.version import grpc from grpc.experimental import aio @@ -24,13 +23,13 @@ import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule -from google import auth + from google.api_core import client_options -from google.api_core import exceptions +from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async -from google.auth import credentials +from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.dataqna_v1alpha.services.auto_suggestion_service import ( AutoSuggestionServiceAsyncClient, @@ -39,8 +38,38 @@ AutoSuggestionServiceClient, ) from google.cloud.dataqna_v1alpha.services.auto_suggestion_service import transports +from google.cloud.dataqna_v1alpha.services.auto_suggestion_service.transports.base import ( + _API_CORE_VERSION, +) +from google.cloud.dataqna_v1alpha.services.auto_suggestion_service.transports.base import ( + _GOOGLE_AUTH_VERSION, +) from google.cloud.dataqna_v1alpha.types import auto_suggestion_service from google.oauth2 import service_account +import google.auth + + +# TODO(busunkim): Once google-api-core >= 1.26.0 is required: +# - Delete all the api-core and auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +requires_api_core_lt_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) >= packaging.version.parse("1.26.0"), + reason="This test requires google-api-core < 1.26.0", +) + +requires_api_core_gte_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) < packaging.version.parse("1.26.0"), + reason="This test requires google-api-core >= 1.26.0", +) def client_cert_source_callback(): @@ -89,26 +118,48 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( - "client_class", [AutoSuggestionServiceClient, AutoSuggestionServiceAsyncClient] + "client_class", [AutoSuggestionServiceClient, AutoSuggestionServiceAsyncClient,] +) +def test_auto_suggestion_service_client_from_service_account_info(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == "dataqna.googleapis.com:443" + + +@pytest.mark.parametrize( + "client_class", [AutoSuggestionServiceClient, AutoSuggestionServiceAsyncClient,] ) def test_auto_suggestion_service_client_from_service_account_file(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) client = client_class.from_service_account_json("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) assert client.transport._host == "dataqna.googleapis.com:443" def test_auto_suggestion_service_client_get_transport_class(): transport = AutoSuggestionServiceClient.get_transport_class() - assert transport == transports.AutoSuggestionServiceGrpcTransport + available_transports = [ + transports.AutoSuggestionServiceGrpcTransport, + ] + assert transport in available_transports transport = AutoSuggestionServiceClient.get_transport_class("grpc") assert transport == transports.AutoSuggestionServiceGrpcTransport @@ -144,7 +195,7 @@ def test_auto_suggestion_service_client_client_options( ): # Check that if channel is provided we won't create a new one. with mock.patch.object(AutoSuggestionServiceClient, "get_transport_class") as gtc: - transport = transport_class(credentials=credentials.AnonymousCredentials()) + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() @@ -163,7 +214,7 @@ def test_auto_suggestion_service_client_client_options( credentials_file=None, host="squid.clam.whelk", scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -179,7 +230,7 @@ def test_auto_suggestion_service_client_client_options( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -195,7 +246,7 @@ def test_auto_suggestion_service_client_client_options( credentials_file=None, host=client.DEFAULT_MTLS_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -223,7 +274,7 @@ def test_auto_suggestion_service_client_client_options( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -284,29 +335,25 @@ def test_auto_suggestion_service_client_mtls_env_auto( client_cert_source=client_cert_source_callback ) with mock.patch.object(transport_class, "__init__") as patched: - ssl_channel_creds = mock.Mock() - with mock.patch( - "grpc.ssl_channel_credentials", return_value=ssl_channel_creds - ): - patched.return_value = None - client = client_class(client_options=options) + patched.return_value = None + client = client_class(client_options=options) - if use_client_cert_env == "false": - expected_ssl_channel_creds = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_ssl_channel_creds = ssl_channel_creds - expected_host = client.DEFAULT_MTLS_ENDPOINT + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - ssl_channel_credentials=expected_ssl_channel_creds, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) # Check the case ADC client cert is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. @@ -315,66 +362,53 @@ def test_auto_suggestion_service_client_mtls_env_auto( ): with mock.patch.object(transport_class, "__init__") as patched: with mock.patch( - "google.auth.transport.grpc.SslCredentials.__init__", return_value=None + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, ): with mock.patch( - "google.auth.transport.grpc.SslCredentials.is_mtls", - new_callable=mock.PropertyMock, - ) as is_mtls_mock: - with mock.patch( - "google.auth.transport.grpc.SslCredentials.ssl_credentials", - new_callable=mock.PropertyMock, - ) as ssl_credentials_mock: - if use_client_cert_env == "false": - is_mtls_mock.return_value = False - ssl_credentials_mock.return_value = None - expected_host = client.DEFAULT_ENDPOINT - expected_ssl_channel_creds = None - else: - is_mtls_mock.return_value = True - ssl_credentials_mock.return_value = mock.Mock() - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_ssl_channel_creds = ( - ssl_credentials_mock.return_value - ) - - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - ssl_channel_credentials=expected_ssl_channel_creds, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} - ): - with mock.patch.object(transport_class, "__init__") as patched: - with mock.patch( - "google.auth.transport.grpc.SslCredentials.__init__", return_value=None - ): - with mock.patch( - "google.auth.transport.grpc.SslCredentials.is_mtls", - new_callable=mock.PropertyMock, - ) as is_mtls_mock: - is_mtls_mock.return_value = False patched.return_value = None client = client_class() patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=expected_host, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + @pytest.mark.parametrize( "client_class,transport_class,transport_name", @@ -404,7 +438,7 @@ def test_auto_suggestion_service_client_client_options_scopes( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=["1", "2"], - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -438,7 +472,7 @@ def test_auto_suggestion_service_client_client_options_credentials_file( credentials_file="credentials.json", host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -457,7 +491,7 @@ def test_auto_suggestion_service_client_client_options_from_dict(): credentials_file=None, host="squid.clam.whelk", scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -467,7 +501,7 @@ def test_suggest_queries( transport: str = "grpc", request_type=auto_suggestion_service.SuggestQueriesRequest ): client = AutoSuggestionServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -478,17 +512,14 @@ def test_suggest_queries( with mock.patch.object(type(client.transport.suggest_queries), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = auto_suggestion_service.SuggestQueriesResponse() - response = client.suggest_queries(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == auto_suggestion_service.SuggestQueriesRequest() # Establish that the response is the type that we expect. - assert isinstance(response, auto_suggestion_service.SuggestQueriesResponse) @@ -496,13 +527,28 @@ def test_suggest_queries_from_dict(): test_suggest_queries(request_type=dict) +def test_suggest_queries_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = AutoSuggestionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.suggest_queries), "__call__") as call: + client.suggest_queries() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == auto_suggestion_service.SuggestQueriesRequest() + + @pytest.mark.asyncio async def test_suggest_queries_async( transport: str = "grpc_asyncio", request_type=auto_suggestion_service.SuggestQueriesRequest, ): client = AutoSuggestionServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -515,13 +561,11 @@ async def test_suggest_queries_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( auto_suggestion_service.SuggestQueriesResponse() ) - response = await client.suggest_queries(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == auto_suggestion_service.SuggestQueriesRequest() # Establish that the response is the type that we expect. @@ -535,18 +579,18 @@ async def test_suggest_queries_async_from_dict(): def test_suggest_queries_field_headers(): client = AutoSuggestionServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = auto_suggestion_service.SuggestQueriesRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.suggest_queries), "__call__") as call: call.return_value = auto_suggestion_service.SuggestQueriesResponse() - client.suggest_queries(request) # Establish that the underlying gRPC stub method was called. @@ -562,12 +606,13 @@ def test_suggest_queries_field_headers(): @pytest.mark.asyncio async def test_suggest_queries_field_headers_async(): client = AutoSuggestionServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = auto_suggestion_service.SuggestQueriesRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -575,7 +620,6 @@ async def test_suggest_queries_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( auto_suggestion_service.SuggestQueriesResponse() ) - await client.suggest_queries(request) # Establish that the underlying gRPC stub method was called. @@ -591,16 +635,16 @@ async def test_suggest_queries_field_headers_async(): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.AutoSuggestionServiceGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = AutoSuggestionServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.AutoSuggestionServiceGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = AutoSuggestionServiceClient( @@ -610,7 +654,7 @@ def test_credentials_transport_error(): # It is an error to provide scopes and a transport instance. transport = transports.AutoSuggestionServiceGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = AutoSuggestionServiceClient( @@ -621,7 +665,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.AutoSuggestionServiceGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) client = AutoSuggestionServiceClient(transport=transport) assert client.transport is transport @@ -630,13 +674,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.AutoSuggestionServiceGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel transport = transports.AutoSuggestionServiceGrpcAsyncIOTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel @@ -651,8 +695,8 @@ def test_transport_get_channel(): ) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() @@ -660,16 +704,16 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = AutoSuggestionServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) assert isinstance(client.transport, transports.AutoSuggestionServiceGrpcTransport,) def test_auto_suggestion_service_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(exceptions.DuplicateCredentialArgs): + with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.AutoSuggestionServiceTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json", ) @@ -681,7 +725,7 @@ def test_auto_suggestion_service_base_transport(): ) as Transport: Transport.return_value = None transport = transports.AutoSuggestionServiceTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly @@ -692,15 +736,37 @@ def test_auto_suggestion_service_base_transport(): getattr(transport, method)(request=object()) +@requires_google_auth_gte_1_25_0 def test_auto_suggestion_service_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( - auth, "load_credentials_from_file" + google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( "google.cloud.dataqna_v1alpha.services.auto_suggestion_service.transports.AutoSuggestionServiceTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - load_creds.return_value = (credentials.AnonymousCredentials(), None) + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.AutoSuggestionServiceTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_auto_suggestion_service_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.dataqna_v1alpha.services.auto_suggestion_service.transports.AutoSuggestionServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.AutoSuggestionServiceTransport( credentials_file="credentials.json", quota_project_id="octopus", ) @@ -713,19 +779,33 @@ def test_auto_suggestion_service_base_transport_with_credentials_file(): def test_auto_suggestion_service_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(auth, "default") as adc, mock.patch( + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( "google.cloud.dataqna_v1alpha.services.auto_suggestion_service.transports.AutoSuggestionServiceTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - adc.return_value = (credentials.AnonymousCredentials(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.AutoSuggestionServiceTransport() adc.assert_called_once() +@requires_google_auth_gte_1_25_0 def test_auto_suggestion_service_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + AutoSuggestionServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_auto_suggestion_service_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) AutoSuggestionServiceClient() adc.assert_called_once_with( scopes=("https://www.googleapis.com/auth/cloud-platform",), @@ -733,23 +813,208 @@ def test_auto_suggestion_service_auth_adc(): ) -def test_auto_suggestion_service_transport_auth_adc(): +@pytest.mark.parametrize( + "transport_class", + [ + transports.AutoSuggestionServiceGrpcTransport, + transports.AutoSuggestionServiceGrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_auto_suggestion_service_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) - transports.AutoSuggestionServiceGrpcTransport( - host="squid.clam.whelk", quota_project_id="octopus" + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AutoSuggestionServiceGrpcTransport, + transports.AutoSuggestionServiceGrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_auto_suggestion_service_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") adc.assert_called_once_with( scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id="octopus", ) +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.AutoSuggestionServiceGrpcTransport, grpc_helpers), + (transports.AutoSuggestionServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_gte_1_26_0 +def test_auto_suggestion_service_transport_create_channel( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "dataqna.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="dataqna.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.AutoSuggestionServiceGrpcTransport, grpc_helpers), + (transports.AutoSuggestionServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_auto_suggestion_service_transport_create_channel_old_api_core( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus") + + create_channel.assert_called_with( + "dataqna.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.AutoSuggestionServiceGrpcTransport, grpc_helpers), + (transports.AutoSuggestionServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_auto_suggestion_service_transport_create_channel_user_scopes( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "dataqna.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=["1", "2"], + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AutoSuggestionServiceGrpcTransport, + transports.AutoSuggestionServiceGrpcAsyncIOTransport, + ], +) +def test_auto_suggestion_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + def test_auto_suggestion_service_host_no_port(): client = AutoSuggestionServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="dataqna.googleapis.com" ), @@ -759,7 +1024,7 @@ def test_auto_suggestion_service_host_no_port(): def test_auto_suggestion_service_host_with_port(): client = AutoSuggestionServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="dataqna.googleapis.com:8000" ), @@ -768,7 +1033,7 @@ def test_auto_suggestion_service_host_with_port(): def test_auto_suggestion_service_grpc_transport_channel(): - channel = grpc.insecure_channel("http://localhost/") + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.AutoSuggestionServiceGrpcTransport( @@ -780,7 +1045,7 @@ def test_auto_suggestion_service_grpc_transport_channel(): def test_auto_suggestion_service_grpc_asyncio_transport_channel(): - channel = aio.insecure_channel("http://localhost/") + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.AutoSuggestionServiceGrpcAsyncIOTransport( @@ -791,6 +1056,8 @@ def test_auto_suggestion_service_grpc_asyncio_transport_channel(): assert transport._ssl_channel_credentials == None +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( "transport_class", [ @@ -805,7 +1072,7 @@ def test_auto_suggestion_service_transport_channel_mtls_with_client_cert_source( "grpc.ssl_channel_credentials", autospec=True ) as grpc_ssl_channel_cred: with mock.patch.object( - transport_class, "create_channel", autospec=True + transport_class, "create_channel" ) as grpc_create_channel: mock_ssl_cred = mock.Mock() grpc_ssl_channel_cred.return_value = mock_ssl_cred @@ -813,9 +1080,9 @@ def test_auto_suggestion_service_transport_channel_mtls_with_client_cert_source( mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): - with mock.patch.object(auth, "default") as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", @@ -834,11 +1101,17 @@ def test_auto_suggestion_service_transport_channel_mtls_with_client_cert_source( scopes=("https://www.googleapis.com/auth/cloud-platform",), ssl_credentials=mock_ssl_cred, quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) assert transport.grpc_channel == mock_grpc_channel assert transport._ssl_channel_credentials == mock_ssl_cred +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( "transport_class", [ @@ -854,7 +1127,7 @@ def test_auto_suggestion_service_transport_channel_mtls_with_adc(transport_class ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), ): with mock.patch.object( - transport_class, "create_channel", autospec=True + transport_class, "create_channel" ) as grpc_create_channel: mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel @@ -875,13 +1148,16 @@ def test_auto_suggestion_service_transport_channel_mtls_with_adc(transport_class scopes=("https://www.googleapis.com/auth/cloud-platform",), ssl_credentials=mock_ssl_cred, quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) assert transport.grpc_channel == mock_grpc_channel def test_common_billing_account_path(): billing_account = "squid" - expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -902,7 +1178,6 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "whelk" - expected = "folders/{folder}".format(folder=folder,) actual = AutoSuggestionServiceClient.common_folder_path(folder) assert expected == actual @@ -921,7 +1196,6 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "oyster" - expected = "organizations/{organization}".format(organization=organization,) actual = AutoSuggestionServiceClient.common_organization_path(organization) assert expected == actual @@ -940,7 +1214,6 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "cuttlefish" - expected = "projects/{project}".format(project=project,) actual = AutoSuggestionServiceClient.common_project_path(project) assert expected == actual @@ -960,7 +1233,6 @@ def test_parse_common_project_path(): def test_common_location_path(): project = "winkle" location = "nautilus" - expected = "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -987,7 +1259,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.AutoSuggestionServiceTransport, "_prep_wrapped_messages" ) as prep: client = AutoSuggestionServiceClient( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -996,6 +1268,6 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = AutoSuggestionServiceClient.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/dataqna_v1alpha/test_question_service.py b/tests/unit/gapic/dataqna_v1alpha/test_question_service.py index 5ddea44..34d4d3b 100644 --- a/tests/unit/gapic/dataqna_v1alpha/test_question_service.py +++ b/tests/unit/gapic/dataqna_v1alpha/test_question_service.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,9 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import os import mock +import packaging.version import grpc from grpc.experimental import aio @@ -24,19 +23,25 @@ import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule -from google import auth + from google.api_core import client_options -from google.api_core import exceptions +from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async -from google.auth import credentials +from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.dataqna_v1alpha.services.question_service import ( QuestionServiceAsyncClient, ) from google.cloud.dataqna_v1alpha.services.question_service import QuestionServiceClient from google.cloud.dataqna_v1alpha.services.question_service import transports +from google.cloud.dataqna_v1alpha.services.question_service.transports.base import ( + _API_CORE_VERSION, +) +from google.cloud.dataqna_v1alpha.services.question_service.transports.base import ( + _GOOGLE_AUTH_VERSION, +) from google.cloud.dataqna_v1alpha.types import annotated_string from google.cloud.dataqna_v1alpha.types import question from google.cloud.dataqna_v1alpha.types import question as gcd_question @@ -44,10 +49,34 @@ from google.cloud.dataqna_v1alpha.types import user_feedback from google.cloud.dataqna_v1alpha.types import user_feedback as gcd_user_feedback from google.oauth2 import service_account -from google.protobuf import any_pb2 as gp_any # type: ignore -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore -from google.rpc import status_pb2 as status # type: ignore +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-api-core >= 1.26.0 is required: +# - Delete all the api-core and auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +requires_api_core_lt_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) >= packaging.version.parse("1.26.0"), + reason="This test requires google-api-core < 1.26.0", +) + +requires_api_core_gte_1_26_0 = pytest.mark.skipif( + packaging.version.parse(_API_CORE_VERSION) < packaging.version.parse("1.26.0"), + reason="This test requires google-api-core >= 1.26.0", +) def client_cert_source_callback(): @@ -95,26 +124,48 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( - "client_class", [QuestionServiceClient, QuestionServiceAsyncClient] + "client_class", [QuestionServiceClient, QuestionServiceAsyncClient,] +) +def test_question_service_client_from_service_account_info(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == "dataqna.googleapis.com:443" + + +@pytest.mark.parametrize( + "client_class", [QuestionServiceClient, QuestionServiceAsyncClient,] ) def test_question_service_client_from_service_account_file(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) client = client_class.from_service_account_json("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) assert client.transport._host == "dataqna.googleapis.com:443" def test_question_service_client_get_transport_class(): transport = QuestionServiceClient.get_transport_class() - assert transport == transports.QuestionServiceGrpcTransport + available_transports = [ + transports.QuestionServiceGrpcTransport, + ] + assert transport in available_transports transport = QuestionServiceClient.get_transport_class("grpc") assert transport == transports.QuestionServiceGrpcTransport @@ -146,7 +197,7 @@ def test_question_service_client_client_options( ): # Check that if channel is provided we won't create a new one. with mock.patch.object(QuestionServiceClient, "get_transport_class") as gtc: - transport = transport_class(credentials=credentials.AnonymousCredentials()) + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() @@ -165,7 +216,7 @@ def test_question_service_client_client_options( credentials_file=None, host="squid.clam.whelk", scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -181,7 +232,7 @@ def test_question_service_client_client_options( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -197,7 +248,7 @@ def test_question_service_client_client_options( credentials_file=None, host=client.DEFAULT_MTLS_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -225,7 +276,7 @@ def test_question_service_client_client_options( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -286,29 +337,25 @@ def test_question_service_client_mtls_env_auto( client_cert_source=client_cert_source_callback ) with mock.patch.object(transport_class, "__init__") as patched: - ssl_channel_creds = mock.Mock() - with mock.patch( - "grpc.ssl_channel_credentials", return_value=ssl_channel_creds - ): - patched.return_value = None - client = client_class(client_options=options) + patched.return_value = None + client = client_class(client_options=options) - if use_client_cert_env == "false": - expected_ssl_channel_creds = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_ssl_channel_creds = ssl_channel_creds - expected_host = client.DEFAULT_MTLS_ENDPOINT + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - ssl_channel_credentials=expected_ssl_channel_creds, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) # Check the case ADC client cert is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. @@ -317,66 +364,53 @@ def test_question_service_client_mtls_env_auto( ): with mock.patch.object(transport_class, "__init__") as patched: with mock.patch( - "google.auth.transport.grpc.SslCredentials.__init__", return_value=None + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, ): with mock.patch( - "google.auth.transport.grpc.SslCredentials.is_mtls", - new_callable=mock.PropertyMock, - ) as is_mtls_mock: - with mock.patch( - "google.auth.transport.grpc.SslCredentials.ssl_credentials", - new_callable=mock.PropertyMock, - ) as ssl_credentials_mock: - if use_client_cert_env == "false": - is_mtls_mock.return_value = False - ssl_credentials_mock.return_value = None - expected_host = client.DEFAULT_ENDPOINT - expected_ssl_channel_creds = None - else: - is_mtls_mock.return_value = True - ssl_credentials_mock.return_value = mock.Mock() - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_ssl_channel_creds = ( - ssl_credentials_mock.return_value - ) - - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - ssl_channel_credentials=expected_ssl_channel_creds, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} - ): - with mock.patch.object(transport_class, "__init__") as patched: - with mock.patch( - "google.auth.transport.grpc.SslCredentials.__init__", return_value=None - ): - with mock.patch( - "google.auth.transport.grpc.SslCredentials.is_mtls", - new_callable=mock.PropertyMock, - ) as is_mtls_mock: - is_mtls_mock.return_value = False patched.return_value = None client = client_class() patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=expected_host, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + @pytest.mark.parametrize( "client_class,transport_class,transport_name", @@ -402,7 +436,7 @@ def test_question_service_client_client_options_scopes( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=["1", "2"], - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -432,7 +466,7 @@ def test_question_service_client_client_options_credentials_file( credentials_file="credentials.json", host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -451,7 +485,7 @@ def test_question_service_client_client_options_from_dict(): credentials_file=None, host="squid.clam.whelk", scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -461,7 +495,7 @@ def test_get_question( transport: str = "grpc", request_type=question_service.GetQuestionRequest ): client = QuestionServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -478,27 +512,19 @@ def test_get_question( data_source_annotations=["data_source_annotations_value"], user_email="user_email_value", ) - response = client.get_question(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == question_service.GetQuestionRequest() # Establish that the response is the type that we expect. - assert isinstance(response, question.Question) - assert response.name == "name_value" - assert response.scopes == ["scopes_value"] - assert response.query == "query_value" - assert response.data_source_annotations == ["data_source_annotations_value"] - assert response.user_email == "user_email_value" @@ -506,12 +532,27 @@ def test_get_question_from_dict(): test_get_question(request_type=dict) +def test_get_question_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = QuestionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_question), "__call__") as call: + client.get_question() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == question_service.GetQuestionRequest() + + @pytest.mark.asyncio async def test_get_question_async( transport: str = "grpc_asyncio", request_type=question_service.GetQuestionRequest ): client = QuestionServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -530,26 +571,19 @@ async def test_get_question_async( user_email="user_email_value", ) ) - response = await client.get_question(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == question_service.GetQuestionRequest() # Establish that the response is the type that we expect. assert isinstance(response, question.Question) - assert response.name == "name_value" - assert response.scopes == ["scopes_value"] - assert response.query == "query_value" - assert response.data_source_annotations == ["data_source_annotations_value"] - assert response.user_email == "user_email_value" @@ -559,17 +593,17 @@ async def test_get_question_async_from_dict(): def test_get_question_field_headers(): - client = QuestionServiceClient(credentials=credentials.AnonymousCredentials(),) + client = QuestionServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = question_service.GetQuestionRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_question), "__call__") as call: call.return_value = question.Question() - client.get_question(request) # Establish that the underlying gRPC stub method was called. @@ -584,17 +618,19 @@ def test_get_question_field_headers(): @pytest.mark.asyncio async def test_get_question_field_headers_async(): - client = QuestionServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = QuestionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = question_service.GetQuestionRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_question), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(question.Question()) - await client.get_question(request) # Establish that the underlying gRPC stub method was called. @@ -608,13 +644,12 @@ async def test_get_question_field_headers_async(): def test_get_question_flattened(): - client = QuestionServiceClient(credentials=credentials.AnonymousCredentials(),) + client = QuestionServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_question), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = question.Question() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_question(name="name_value",) @@ -623,12 +658,11 @@ def test_get_question_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_get_question_flattened_error(): - client = QuestionServiceClient(credentials=credentials.AnonymousCredentials(),) + client = QuestionServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -640,7 +674,9 @@ def test_get_question_flattened_error(): @pytest.mark.asyncio async def test_get_question_flattened_async(): - client = QuestionServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = QuestionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_question), "__call__") as call: @@ -656,13 +692,14 @@ async def test_get_question_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_get_question_flattened_error_async(): - client = QuestionServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = QuestionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -676,7 +713,7 @@ def test_create_question( transport: str = "grpc", request_type=question_service.CreateQuestionRequest ): client = QuestionServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -693,27 +730,19 @@ def test_create_question( data_source_annotations=["data_source_annotations_value"], user_email="user_email_value", ) - response = client.create_question(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == question_service.CreateQuestionRequest() # Establish that the response is the type that we expect. - assert isinstance(response, gcd_question.Question) - assert response.name == "name_value" - assert response.scopes == ["scopes_value"] - assert response.query == "query_value" - assert response.data_source_annotations == ["data_source_annotations_value"] - assert response.user_email == "user_email_value" @@ -721,12 +750,27 @@ def test_create_question_from_dict(): test_create_question(request_type=dict) +def test_create_question_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = QuestionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_question), "__call__") as call: + client.create_question() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == question_service.CreateQuestionRequest() + + @pytest.mark.asyncio async def test_create_question_async( transport: str = "grpc_asyncio", request_type=question_service.CreateQuestionRequest ): client = QuestionServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -745,26 +789,19 @@ async def test_create_question_async( user_email="user_email_value", ) ) - response = await client.create_question(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == question_service.CreateQuestionRequest() # Establish that the response is the type that we expect. assert isinstance(response, gcd_question.Question) - assert response.name == "name_value" - assert response.scopes == ["scopes_value"] - assert response.query == "query_value" - assert response.data_source_annotations == ["data_source_annotations_value"] - assert response.user_email == "user_email_value" @@ -774,17 +811,17 @@ async def test_create_question_async_from_dict(): def test_create_question_field_headers(): - client = QuestionServiceClient(credentials=credentials.AnonymousCredentials(),) + client = QuestionServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = question_service.CreateQuestionRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_question), "__call__") as call: call.return_value = gcd_question.Question() - client.create_question(request) # Establish that the underlying gRPC stub method was called. @@ -799,11 +836,14 @@ def test_create_question_field_headers(): @pytest.mark.asyncio async def test_create_question_field_headers_async(): - client = QuestionServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = QuestionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = question_service.CreateQuestionRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -811,7 +851,6 @@ async def test_create_question_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( gcd_question.Question() ) - await client.create_question(request) # Establish that the underlying gRPC stub method was called. @@ -825,13 +864,12 @@ async def test_create_question_field_headers_async(): def test_create_question_flattened(): - client = QuestionServiceClient(credentials=credentials.AnonymousCredentials(),) + client = QuestionServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_question), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = gcd_question.Question() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_question( @@ -842,14 +880,12 @@ def test_create_question_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].question == gcd_question.Question(name="name_value") def test_create_question_flattened_error(): - client = QuestionServiceClient(credentials=credentials.AnonymousCredentials(),) + client = QuestionServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -863,7 +899,9 @@ def test_create_question_flattened_error(): @pytest.mark.asyncio async def test_create_question_flattened_async(): - client = QuestionServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = QuestionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_question), "__call__") as call: @@ -883,15 +921,15 @@ async def test_create_question_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].question == gcd_question.Question(name="name_value") @pytest.mark.asyncio async def test_create_question_flattened_error_async(): - client = QuestionServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = QuestionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -907,7 +945,7 @@ def test_execute_question( transport: str = "grpc", request_type=question_service.ExecuteQuestionRequest ): client = QuestionServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -924,27 +962,19 @@ def test_execute_question( data_source_annotations=["data_source_annotations_value"], user_email="user_email_value", ) - response = client.execute_question(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == question_service.ExecuteQuestionRequest() # Establish that the response is the type that we expect. - assert isinstance(response, question.Question) - assert response.name == "name_value" - assert response.scopes == ["scopes_value"] - assert response.query == "query_value" - assert response.data_source_annotations == ["data_source_annotations_value"] - assert response.user_email == "user_email_value" @@ -952,13 +982,28 @@ def test_execute_question_from_dict(): test_execute_question(request_type=dict) +def test_execute_question_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = QuestionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.execute_question), "__call__") as call: + client.execute_question() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == question_service.ExecuteQuestionRequest() + + @pytest.mark.asyncio async def test_execute_question_async( transport: str = "grpc_asyncio", request_type=question_service.ExecuteQuestionRequest, ): client = QuestionServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -977,26 +1022,19 @@ async def test_execute_question_async( user_email="user_email_value", ) ) - response = await client.execute_question(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == question_service.ExecuteQuestionRequest() # Establish that the response is the type that we expect. assert isinstance(response, question.Question) - assert response.name == "name_value" - assert response.scopes == ["scopes_value"] - assert response.query == "query_value" - assert response.data_source_annotations == ["data_source_annotations_value"] - assert response.user_email == "user_email_value" @@ -1006,17 +1044,17 @@ async def test_execute_question_async_from_dict(): def test_execute_question_field_headers(): - client = QuestionServiceClient(credentials=credentials.AnonymousCredentials(),) + client = QuestionServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = question_service.ExecuteQuestionRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.execute_question), "__call__") as call: call.return_value = question.Question() - client.execute_question(request) # Establish that the underlying gRPC stub method was called. @@ -1031,17 +1069,19 @@ def test_execute_question_field_headers(): @pytest.mark.asyncio async def test_execute_question_field_headers_async(): - client = QuestionServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = QuestionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = question_service.ExecuteQuestionRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.execute_question), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(question.Question()) - await client.execute_question(request) # Establish that the underlying gRPC stub method was called. @@ -1055,13 +1095,12 @@ async def test_execute_question_field_headers_async(): def test_execute_question_flattened(): - client = QuestionServiceClient(credentials=credentials.AnonymousCredentials(),) + client = QuestionServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.execute_question), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = question.Question() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.execute_question( @@ -1072,14 +1111,12 @@ def test_execute_question_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].interpretation_index == 2159 def test_execute_question_flattened_error(): - client = QuestionServiceClient(credentials=credentials.AnonymousCredentials(),) + client = QuestionServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1093,7 +1130,9 @@ def test_execute_question_flattened_error(): @pytest.mark.asyncio async def test_execute_question_flattened_async(): - client = QuestionServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = QuestionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.execute_question), "__call__") as call: @@ -1111,15 +1150,15 @@ async def test_execute_question_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].interpretation_index == 2159 @pytest.mark.asyncio async def test_execute_question_flattened_error_async(): - client = QuestionServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = QuestionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1135,7 +1174,7 @@ def test_get_user_feedback( transport: str = "grpc", request_type=question_service.GetUserFeedbackRequest ): client = QuestionServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1152,23 +1191,17 @@ def test_get_user_feedback( free_form_feedback="free_form_feedback_value", rating=user_feedback.UserFeedback.UserFeedbackRating.POSITIVE, ) - response = client.get_user_feedback(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == question_service.GetUserFeedbackRequest() # Establish that the response is the type that we expect. - assert isinstance(response, user_feedback.UserFeedback) - assert response.name == "name_value" - assert response.free_form_feedback == "free_form_feedback_value" - assert response.rating == user_feedback.UserFeedback.UserFeedbackRating.POSITIVE @@ -1176,13 +1209,30 @@ def test_get_user_feedback_from_dict(): test_get_user_feedback(request_type=dict) +def test_get_user_feedback_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = QuestionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_user_feedback), "__call__" + ) as call: + client.get_user_feedback() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == question_service.GetUserFeedbackRequest() + + @pytest.mark.asyncio async def test_get_user_feedback_async( transport: str = "grpc_asyncio", request_type=question_service.GetUserFeedbackRequest, ): client = QuestionServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1201,22 +1251,17 @@ async def test_get_user_feedback_async( rating=user_feedback.UserFeedback.UserFeedbackRating.POSITIVE, ) ) - response = await client.get_user_feedback(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == question_service.GetUserFeedbackRequest() # Establish that the response is the type that we expect. assert isinstance(response, user_feedback.UserFeedback) - assert response.name == "name_value" - assert response.free_form_feedback == "free_form_feedback_value" - assert response.rating == user_feedback.UserFeedback.UserFeedbackRating.POSITIVE @@ -1226,11 +1271,12 @@ async def test_get_user_feedback_async_from_dict(): def test_get_user_feedback_field_headers(): - client = QuestionServiceClient(credentials=credentials.AnonymousCredentials(),) + client = QuestionServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = question_service.GetUserFeedbackRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1238,7 +1284,6 @@ def test_get_user_feedback_field_headers(): type(client.transport.get_user_feedback), "__call__" ) as call: call.return_value = user_feedback.UserFeedback() - client.get_user_feedback(request) # Establish that the underlying gRPC stub method was called. @@ -1253,11 +1298,14 @@ def test_get_user_feedback_field_headers(): @pytest.mark.asyncio async def test_get_user_feedback_field_headers_async(): - client = QuestionServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = QuestionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = question_service.GetUserFeedbackRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1267,7 +1315,6 @@ async def test_get_user_feedback_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( user_feedback.UserFeedback() ) - await client.get_user_feedback(request) # Establish that the underlying gRPC stub method was called. @@ -1281,7 +1328,7 @@ async def test_get_user_feedback_field_headers_async(): def test_get_user_feedback_flattened(): - client = QuestionServiceClient(credentials=credentials.AnonymousCredentials(),) + client = QuestionServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1289,7 +1336,6 @@ def test_get_user_feedback_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = user_feedback.UserFeedback() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_user_feedback(name="name_value",) @@ -1298,12 +1344,11 @@ def test_get_user_feedback_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_get_user_feedback_flattened_error(): - client = QuestionServiceClient(credentials=credentials.AnonymousCredentials(),) + client = QuestionServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1315,7 +1360,9 @@ def test_get_user_feedback_flattened_error(): @pytest.mark.asyncio async def test_get_user_feedback_flattened_async(): - client = QuestionServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = QuestionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1335,13 +1382,14 @@ async def test_get_user_feedback_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_get_user_feedback_flattened_error_async(): - client = QuestionServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = QuestionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1355,7 +1403,7 @@ def test_update_user_feedback( transport: str = "grpc", request_type=question_service.UpdateUserFeedbackRequest ): client = QuestionServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1372,23 +1420,17 @@ def test_update_user_feedback( free_form_feedback="free_form_feedback_value", rating=gcd_user_feedback.UserFeedback.UserFeedbackRating.POSITIVE, ) - response = client.update_user_feedback(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == question_service.UpdateUserFeedbackRequest() # Establish that the response is the type that we expect. - assert isinstance(response, gcd_user_feedback.UserFeedback) - assert response.name == "name_value" - assert response.free_form_feedback == "free_form_feedback_value" - assert response.rating == gcd_user_feedback.UserFeedback.UserFeedbackRating.POSITIVE @@ -1396,13 +1438,30 @@ def test_update_user_feedback_from_dict(): test_update_user_feedback(request_type=dict) +def test_update_user_feedback_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = QuestionServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_user_feedback), "__call__" + ) as call: + client.update_user_feedback() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == question_service.UpdateUserFeedbackRequest() + + @pytest.mark.asyncio async def test_update_user_feedback_async( transport: str = "grpc_asyncio", request_type=question_service.UpdateUserFeedbackRequest, ): client = QuestionServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1421,22 +1480,17 @@ async def test_update_user_feedback_async( rating=gcd_user_feedback.UserFeedback.UserFeedbackRating.POSITIVE, ) ) - response = await client.update_user_feedback(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == question_service.UpdateUserFeedbackRequest() # Establish that the response is the type that we expect. assert isinstance(response, gcd_user_feedback.UserFeedback) - assert response.name == "name_value" - assert response.free_form_feedback == "free_form_feedback_value" - assert response.rating == gcd_user_feedback.UserFeedback.UserFeedbackRating.POSITIVE @@ -1446,11 +1500,12 @@ async def test_update_user_feedback_async_from_dict(): def test_update_user_feedback_field_headers(): - client = QuestionServiceClient(credentials=credentials.AnonymousCredentials(),) + client = QuestionServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = question_service.UpdateUserFeedbackRequest() + request.user_feedback.name = "user_feedback.name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1458,7 +1513,6 @@ def test_update_user_feedback_field_headers(): type(client.transport.update_user_feedback), "__call__" ) as call: call.return_value = gcd_user_feedback.UserFeedback() - client.update_user_feedback(request) # Establish that the underlying gRPC stub method was called. @@ -1476,11 +1530,14 @@ def test_update_user_feedback_field_headers(): @pytest.mark.asyncio async def test_update_user_feedback_field_headers_async(): - client = QuestionServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = QuestionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = question_service.UpdateUserFeedbackRequest() + request.user_feedback.name = "user_feedback.name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1490,7 +1547,6 @@ async def test_update_user_feedback_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( gcd_user_feedback.UserFeedback() ) - await client.update_user_feedback(request) # Establish that the underlying gRPC stub method was called. @@ -1507,7 +1563,7 @@ async def test_update_user_feedback_field_headers_async(): def test_update_user_feedback_flattened(): - client = QuestionServiceClient(credentials=credentials.AnonymousCredentials(),) + client = QuestionServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1515,28 +1571,25 @@ def test_update_user_feedback_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = gcd_user_feedback.UserFeedback() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.update_user_feedback( user_feedback=gcd_user_feedback.UserFeedback(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].user_feedback == gcd_user_feedback.UserFeedback( name="name_value" ) - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) def test_update_user_feedback_flattened_error(): - client = QuestionServiceClient(credentials=credentials.AnonymousCredentials(),) + client = QuestionServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1544,13 +1597,15 @@ def test_update_user_feedback_flattened_error(): client.update_user_feedback( question_service.UpdateUserFeedbackRequest(), user_feedback=gcd_user_feedback.UserFeedback(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) @pytest.mark.asyncio async def test_update_user_feedback_flattened_async(): - client = QuestionServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = QuestionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1566,24 +1621,24 @@ async def test_update_user_feedback_flattened_async(): # using the keyword arguments to the method. response = await client.update_user_feedback( user_feedback=gcd_user_feedback.UserFeedback(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].user_feedback == gcd_user_feedback.UserFeedback( name="name_value" ) - - assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) @pytest.mark.asyncio async def test_update_user_feedback_flattened_error_async(): - client = QuestionServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = QuestionServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1591,23 +1646,23 @@ async def test_update_user_feedback_flattened_error_async(): await client.update_user_feedback( question_service.UpdateUserFeedbackRequest(), user_feedback=gcd_user_feedback.UserFeedback(name="name_value"), - update_mask=field_mask.FieldMask(paths=["paths_value"]), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.QuestionServiceGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = QuestionServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.QuestionServiceGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = QuestionServiceClient( @@ -1617,7 +1672,7 @@ def test_credentials_transport_error(): # It is an error to provide scopes and a transport instance. transport = transports.QuestionServiceGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = QuestionServiceClient( @@ -1628,7 +1683,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.QuestionServiceGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) client = QuestionServiceClient(transport=transport) assert client.transport is transport @@ -1637,13 +1692,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.QuestionServiceGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel transport = transports.QuestionServiceGrpcAsyncIOTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel @@ -1658,23 +1713,23 @@ def test_transport_get_channel(): ) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = QuestionServiceClient(credentials=credentials.AnonymousCredentials(),) + client = QuestionServiceClient(credentials=ga_credentials.AnonymousCredentials(),) assert isinstance(client.transport, transports.QuestionServiceGrpcTransport,) def test_question_service_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(exceptions.DuplicateCredentialArgs): + with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.QuestionServiceTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json", ) @@ -1686,7 +1741,7 @@ def test_question_service_base_transport(): ) as Transport: Transport.return_value = None transport = transports.QuestionServiceTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly @@ -1703,15 +1758,37 @@ def test_question_service_base_transport(): getattr(transport, method)(request=object()) +@requires_google_auth_gte_1_25_0 def test_question_service_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( - auth, "load_credentials_from_file" + google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( "google.cloud.dataqna_v1alpha.services.question_service.transports.QuestionServiceTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - load_creds.return_value = (credentials.AnonymousCredentials(), None) + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.QuestionServiceTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_question_service_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.dataqna_v1alpha.services.question_service.transports.QuestionServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.QuestionServiceTransport( credentials_file="credentials.json", quota_project_id="octopus", ) @@ -1724,19 +1801,33 @@ def test_question_service_base_transport_with_credentials_file(): def test_question_service_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(auth, "default") as adc, mock.patch( + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( "google.cloud.dataqna_v1alpha.services.question_service.transports.QuestionServiceTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - adc.return_value = (credentials.AnonymousCredentials(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.QuestionServiceTransport() adc.assert_called_once() +@requires_google_auth_gte_1_25_0 def test_question_service_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + QuestionServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_question_service_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) QuestionServiceClient() adc.assert_called_once_with( scopes=("https://www.googleapis.com/auth/cloud-platform",), @@ -1744,23 +1835,204 @@ def test_question_service_auth_adc(): ) -def test_question_service_transport_auth_adc(): +@pytest.mark.parametrize( + "transport_class", + [ + transports.QuestionServiceGrpcTransport, + transports.QuestionServiceGrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_question_service_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) - transports.QuestionServiceGrpcTransport( - host="squid.clam.whelk", quota_project_id="octopus" + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.QuestionServiceGrpcTransport, + transports.QuestionServiceGrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_question_service_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") adc.assert_called_once_with( scopes=("https://www.googleapis.com/auth/cloud-platform",), quota_project_id="octopus", ) +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.QuestionServiceGrpcTransport, grpc_helpers), + (transports.QuestionServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_gte_1_26_0 +def test_question_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "dataqna.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="dataqna.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.QuestionServiceGrpcTransport, grpc_helpers), + (transports.QuestionServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_question_service_transport_create_channel_old_api_core( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus") + + create_channel.assert_called_with( + "dataqna.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.QuestionServiceGrpcTransport, grpc_helpers), + (transports.QuestionServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +@requires_api_core_lt_1_26_0 +def test_question_service_transport_create_channel_user_scopes( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "dataqna.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + scopes=["1", "2"], + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.QuestionServiceGrpcTransport, + transports.QuestionServiceGrpcAsyncIOTransport, + ], +) +def test_question_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + def test_question_service_host_no_port(): client = QuestionServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="dataqna.googleapis.com" ), @@ -1770,7 +2042,7 @@ def test_question_service_host_no_port(): def test_question_service_host_with_port(): client = QuestionServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="dataqna.googleapis.com:8000" ), @@ -1779,7 +2051,7 @@ def test_question_service_host_with_port(): def test_question_service_grpc_transport_channel(): - channel = grpc.insecure_channel("http://localhost/") + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.QuestionServiceGrpcTransport( @@ -1791,7 +2063,7 @@ def test_question_service_grpc_transport_channel(): def test_question_service_grpc_asyncio_transport_channel(): - channel = aio.insecure_channel("http://localhost/") + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.QuestionServiceGrpcAsyncIOTransport( @@ -1802,6 +2074,8 @@ def test_question_service_grpc_asyncio_transport_channel(): assert transport._ssl_channel_credentials == None +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( "transport_class", [ @@ -1816,7 +2090,7 @@ def test_question_service_transport_channel_mtls_with_client_cert_source( "grpc.ssl_channel_credentials", autospec=True ) as grpc_ssl_channel_cred: with mock.patch.object( - transport_class, "create_channel", autospec=True + transport_class, "create_channel" ) as grpc_create_channel: mock_ssl_cred = mock.Mock() grpc_ssl_channel_cred.return_value = mock_ssl_cred @@ -1824,9 +2098,9 @@ def test_question_service_transport_channel_mtls_with_client_cert_source( mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): - with mock.patch.object(auth, "default") as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", @@ -1845,11 +2119,17 @@ def test_question_service_transport_channel_mtls_with_client_cert_source( scopes=("https://www.googleapis.com/auth/cloud-platform",), ssl_credentials=mock_ssl_cred, quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) assert transport.grpc_channel == mock_grpc_channel assert transport._ssl_channel_credentials == mock_ssl_cred +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( "transport_class", [ @@ -1865,7 +2145,7 @@ def test_question_service_transport_channel_mtls_with_adc(transport_class): ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), ): with mock.patch.object( - transport_class, "create_channel", autospec=True + transport_class, "create_channel" ) as grpc_create_channel: mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel @@ -1886,6 +2166,10 @@ def test_question_service_transport_channel_mtls_with_adc(transport_class): scopes=("https://www.googleapis.com/auth/cloud-platform",), ssl_credentials=mock_ssl_cred, quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) assert transport.grpc_channel == mock_grpc_channel @@ -1894,7 +2178,6 @@ def test_question_path(): project = "squid" location = "clam" question = "whelk" - expected = "projects/{project}/locations/{location}/questions/{question}".format( project=project, location=location, question=question, ) @@ -1919,7 +2202,6 @@ def test_user_feedback_path(): project = "cuttlefish" location = "mussel" question = "winkle" - expected = "projects/{project}/locations/{location}/questions/{question}/userFeedback".format( project=project, location=location, question=question, ) @@ -1942,7 +2224,6 @@ def test_parse_user_feedback_path(): def test_common_billing_account_path(): billing_account = "squid" - expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -1963,7 +2244,6 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "whelk" - expected = "folders/{folder}".format(folder=folder,) actual = QuestionServiceClient.common_folder_path(folder) assert expected == actual @@ -1982,7 +2262,6 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "oyster" - expected = "organizations/{organization}".format(organization=organization,) actual = QuestionServiceClient.common_organization_path(organization) assert expected == actual @@ -2001,7 +2280,6 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "cuttlefish" - expected = "projects/{project}".format(project=project,) actual = QuestionServiceClient.common_project_path(project) assert expected == actual @@ -2021,7 +2299,6 @@ def test_parse_common_project_path(): def test_common_location_path(): project = "winkle" location = "nautilus" - expected = "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -2048,7 +2325,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.QuestionServiceTransport, "_prep_wrapped_messages" ) as prep: client = QuestionServiceClient( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2057,6 +2334,6 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = QuestionServiceClient.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info)