diff --git a/.coveragerc b/.coveragerc index d097511c..34417c3f 100644 --- a/.coveragerc +++ b/.coveragerc @@ -11,3 +11,5 @@ exclude_lines = def __repr__ # Ignore abstract methods raise NotImplementedError + # Ignore coverage for code specific to static type checkers + TYPE_CHECKING diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index b771c37c..51b21a62 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,17 @@ +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. docker: - image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:a1a891041baa4ffbe1a809ac1b8b9b4a71887293c9101c88e8e255943c5aec2d + image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest + digest: sha256:a7aef70df5f13313ddc027409fc8f3151422ec2a57ac8730fce8fa75c060d5bb +# created: 2025-04-10T17:00:10.042601326Z diff --git a/.github/.OwlBot.yaml b/.github/.OwlBot.yaml index 3caf68d4..c8b40cc7 100644 --- a/.github/.OwlBot.yaml +++ b/.github/.OwlBot.yaml @@ -13,7 +13,7 @@ # limitations under the License. docker: - image: gcr.io/repo-automation-bots/owlbot-python:latest + image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest begin-after-commit-hash: 7af2cb8b2b725641ac0d07e2f256d453682802e6 diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 557e39e1..1b023b72 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -3,9 +3,10 @@ # # For syntax help see: # https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax +# Note: This file is autogenerated. To make changes to the codeowner team, please update .repo-metadata.json. -# The @googleapis/yoshi-python is the default owner for changes in this repo -* @googleapis/yoshi-python @googleapis/actools-python +# @googleapis/yoshi-python @googleapis/actools-python are the default owners for changes in this repo +* @googleapis/yoshi-python @googleapis/actools-python -# The python-samples-reviewers team is the default owner for samples changes -/samples/ @googleapis/python-samples-owners +# @googleapis/python-samples-reviewers @googleapis/actools-python are the default owners for samples changes +/samples/ @googleapis/python-samples-reviewers @googleapis/actools-python diff --git a/.github/auto-approve.yml b/.github/auto-approve.yml new file mode 100644 index 00000000..311ebbb8 --- /dev/null +++ b/.github/auto-approve.yml @@ -0,0 +1,3 @@ +# https://github.com/googleapis/repo-automation-bots/tree/main/packages/auto-approve +processes: + - "OwlBotTemplateChanges" diff --git a/google/__init__.py b/.github/auto-label.yaml similarity index 70% rename from google/__init__.py rename to .github/auto-label.yaml index 0d0a4c3a..21786a4e 100644 --- a/google/__init__.py +++ b/.github/auto-label.yaml @@ -1,4 +1,4 @@ -# Copyright 2016 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -11,14 +11,10 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +requestsize: + enabled: true -"""Google namespace package.""" - -try: - import pkg_resources - - pkg_resources.declare_namespace(__name__) -except ImportError: - import pkgutil - - __path__ = pkgutil.extend_path(__path__, __name__) +path: + pullrequest: true + paths: + samples: "samples" diff --git a/.github/blunderbuss.yml b/.github/blunderbuss.yml new file mode 100644 index 00000000..1618464d --- /dev/null +++ b/.github/blunderbuss.yml @@ -0,0 +1,17 @@ +# Blunderbuss config +# +# This file controls who is assigned for pull requests and issues. +# Note: This file is autogenerated. To make changes to the assignee +# team, please update `codeowner_team` in `.repo-metadata.json`. +assign_issues: + - googleapis/actools-python + +assign_issues_by: + - labels: + - "samples" + to: + - googleapis/python-samples-reviewers + - googleapis/actools-python + +assign_prs: + - googleapis/actools-python diff --git a/.github/cherry-pick-bot.yml b/.github/cherry-pick-bot.yml new file mode 100644 index 00000000..1e9cfcd3 --- /dev/null +++ b/.github/cherry-pick-bot.yml @@ -0,0 +1,2 @@ +enabled: true + diff --git a/.github/release-please.yml b/.github/release-please.yml index 4507ad05..29601ad4 100644 --- a/.github/release-please.yml +++ b/.github/release-please.yml @@ -1 +1,11 @@ releaseType: python +handleGHRelease: true +# NOTE: this section is generated by synthtool.languages.python +# See https://github.com/googleapis/synthtool/blob/master/synthtool/languages/python.py +branches: +- branch: v1 + handleGHRelease: true + releaseType: python +- branch: v0 + handleGHRelease: true + releaseType: python diff --git a/.github/release-trigger.yml b/.github/release-trigger.yml new file mode 100644 index 00000000..50e8bd30 --- /dev/null +++ b/.github/release-trigger.yml @@ -0,0 +1,2 @@ +enabled: true +multiScmName: python-api-core diff --git a/.github/sync-repo-settings.yaml b/.github/sync-repo-settings.yaml index e621885d..b724bada 100644 --- a/.github/sync-repo-settings.yaml +++ b/.github/sync-repo-settings.yaml @@ -1,3 +1,56 @@ +# https://github.com/googleapis/repo-automation-bots/tree/main/packages/sync-repo-settings +# Rules for main branch protection +branchProtectionRules: +# Identifies the protection rule pattern. Name of the branch to be protected. +# Defaults to `main` +- pattern: main + requiresCodeOwnerReviews: true + requiresStrictStatusChecks: true + requiredStatusCheckContexts: + - 'cla/google' + # No Kokoro: the following are Github actions + - 'lint' + - 'mypy' + - 'unit_grpc_gcp-3.7' + - 'unit_grpc_gcp-3.8' + - 'unit_grpc_gcp-3.9' + - 'unit_grpc_gcp-3.10' + - 'unit_grpc_gcp-3.11' + - 'unit_grpc_gcp-3.12' + - 'unit_grpc_gcp-3.13' + - 'unit_grpc_gcp-3.14' + - 'unit-3.7' + - 'unit-3.8' + - 'unit-3.9' + - 'unit-3.10' + - 'unit-3.11' + - 'unit-3.12' + - 'unit-3.13' + - 'unit-3.14' + - 'unit_wo_grpc-3.10' + - 'unit_wo_grpc-3.11' + - 'unit_wo_grpc-3.12' + - 'unit_wo_grpc-3.13' + - 'unit_wo_grpc-3.14' + - 'unit_w_prerelease_deps-3.7' + - 'unit_w_prerelease_deps-3.8' + - 'unit_w_prerelease_deps-3.9' + - 'unit_w_prerelease_deps-3.10' + - 'unit_w_prerelease_deps-3.11' + - 'unit_w_prerelease_deps-3.12' + - 'unit_w_prerelease_deps-3.13' + - 'unit_w_prerelease_deps-3.14' + - 'unit_w_async_rest_extra-3.7' + - 'unit_w_async_rest_extra-3.8' + - 'unit_w_async_rest_extra-3.9' + - 'unit_w_async_rest_extra-3.10' + - 'unit_w_async_rest_extra-3.11' + - 'unit_w_async_rest_extra-3.12' + - 'unit_w_async_rest_extra-3.13' + - 'unit_w_async_rest_extra-3.14' + - 'cover' + - 'docs' + - 'docfx' permissionRules: - team: actools-python permission: admin diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml new file mode 100644 index 00000000..2833fe98 --- /dev/null +++ b/.github/workflows/docs.yml @@ -0,0 +1,38 @@ +on: + pull_request: + branches: + - main +name: docs +jobs: + docs: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: "3.10" + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run docs + run: | + nox -s docs + docfx: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: "3.10" + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run docfx + run: | + nox -s docfx diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml new file mode 100644 index 00000000..1051da0b --- /dev/null +++ b/.github/workflows/lint.yml @@ -0,0 +1,25 @@ +on: + pull_request: + branches: + - main +name: lint +jobs: + lint: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: "3.10" + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run lint + run: | + nox -s lint + - name: Run lint_setup_py + run: | + nox -s lint_setup_py diff --git a/.github/workflows/mypy.yml b/.github/workflows/mypy.yml new file mode 100644 index 00000000..e6a79291 --- /dev/null +++ b/.github/workflows/mypy.yml @@ -0,0 +1,22 @@ +on: + pull_request: + branches: + - main +name: mypy +jobs: + mypy: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: "3.10" + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run mypy + run: | + nox -s mypy diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml new file mode 100644 index 00000000..f260a6a5 --- /dev/null +++ b/.github/workflows/unittest.yml @@ -0,0 +1,82 @@ +name: "Unit tests" + +on: + pull_request: + branches: + - main + +jobs: + run-unittests: + name: unit${{ matrix.option }}-${{ matrix.python }} + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2303): use `ubuntu-latest` once this bug is fixed. + # Use ubuntu-22.04 until Python 3.7 is removed from the test matrix + # https://docs.github.com/en/actions/using-github-hosted-runners/using-github-hosted-runners/about-github-hosted-runners#standard-github-hosted-runners-for-public-repositories + runs-on: ubuntu-22.04 + strategy: + matrix: + option: ["", "_grpc_gcp", "_wo_grpc", "_w_prerelease_deps", "_w_async_rest_extra"] + python: + - "3.7" + - "3.8" + - "3.9" + - "3.10" + - "3.11" + - "3.12" + - "3.13" + - "3.14" + exclude: + - option: "_wo_grpc" + python: 3.7 + - option: "_wo_grpc" + python: 3.8 + - option: "_wo_grpc" + python: 3.9 + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python }} + allow-prereleases: true + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run unit tests + env: + COVERAGE_FILE: .coverage${{ matrix.option }}-${{matrix.python }} + run: | + nox -s unit${{ matrix.option }}-${{ matrix.python }} + - name: Upload coverage results + uses: actions/upload-artifact@v4 + with: + name: coverage-artifact-${{ matrix.option }}-${{ matrix.python }} + path: .coverage${{ matrix.option }}-${{ matrix.python }} + include-hidden-files: true + + report-coverage: + name: cover + runs-on: ubuntu-latest + needs: + - run-unittests + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: "3.10" + - name: Install coverage + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install coverage + - name: Download coverage results + uses: actions/download-artifact@v4 + with: + path: .coverage-results/ + - name: Report coverage results + run: | + find .coverage-results -type f -name '*.zip' -exec unzip {} \; + coverage combine .coverage-results/**/.coverage* + coverage report --show-missing --fail-under=100 diff --git a/.gitignore b/.gitignore index 99c3a144..168b201f 100644 --- a/.gitignore +++ b/.gitignore @@ -51,6 +51,7 @@ docs.metadata # Virtual environment env/ +venv/ # Test logs coverage.xml diff --git a/.kokoro/build.sh b/.kokoro/build.sh index 8b9fd8a1..d41b45aa 100755 --- a/.kokoro/build.sh +++ b/.kokoro/build.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2018 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,11 +15,13 @@ set -eo pipefail +CURRENT_DIR=$(dirname "${BASH_SOURCE[0]}") + if [[ -z "${PROJECT_ROOT:-}" ]]; then - PROJECT_ROOT="github/python-api-core" + PROJECT_ROOT=$(realpath "${CURRENT_DIR}/..") fi -cd "${PROJECT_ROOT}" +pushd "${PROJECT_ROOT}" # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 @@ -28,20 +30,19 @@ export PYTHONUNBUFFERED=1 env | grep KOKORO # Setup service account credentials. -export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json +if [[ -f "${KOKORO_GFILE_DIR}/service-account.json" ]] +then + export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json +fi # Setup project id. -export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") - -# Remove old nox -python3 -m pip uninstall --yes --quiet nox-automation - -# Install nox -python3 -m pip install --upgrade --quiet nox -python3 -m nox --version +if [[ -f "${KOKORO_GFILE_DIR}/project-id.json" ]] +then + export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") +fi # If this is a continuous build, send the test log to the FlakyBot. -# See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. +# See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot. if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then cleanup() { chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot @@ -53,7 +54,7 @@ fi # If NOX_SESSION is set, it only runs the specified session, # otherwise run all the sessions. if [[ -n "${NOX_SESSION:-}" ]]; then - python3 -m nox -s ${NOX_SESSION:-} + python3 -m nox -s ${NOX_SESSION:-} else - python3 -m nox + python3 -m nox fi diff --git a/.kokoro/continuous/prerelease-deps.cfg b/.kokoro/continuous/prerelease-deps.cfg new file mode 100644 index 00000000..3595fb43 --- /dev/null +++ b/.kokoro/continuous/prerelease-deps.cfg @@ -0,0 +1,7 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Only run this nox session. +env_vars: { + key: "NOX_SESSION" + value: "prerelease_deps" +} diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile deleted file mode 100644 index 4e1b1fb8..00000000 --- a/.kokoro/docker/docs/Dockerfile +++ /dev/null @@ -1,67 +0,0 @@ -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from ubuntu:20.04 - -ENV DEBIAN_FRONTEND noninteractive - -# Ensure local Python is preferred over distribution Python. -ENV PATH /usr/local/bin:$PATH - -# Install dependencies. -RUN apt-get update \ - && apt-get install -y --no-install-recommends \ - apt-transport-https \ - build-essential \ - ca-certificates \ - curl \ - dirmngr \ - git \ - gpg-agent \ - graphviz \ - libbz2-dev \ - libdb5.3-dev \ - libexpat1-dev \ - libffi-dev \ - liblzma-dev \ - libreadline-dev \ - libsnappy-dev \ - libssl-dev \ - libsqlite3-dev \ - portaudio19-dev \ - python3-distutils \ - redis-server \ - software-properties-common \ - ssh \ - sudo \ - tcl \ - tcl-dev \ - tk \ - tk-dev \ - uuid-dev \ - wget \ - zlib1g-dev \ - && add-apt-repository universe \ - && apt-get update \ - && apt-get -y install jq \ - && apt-get clean autoclean \ - && apt-get autoremove -y \ - && rm -rf /var/lib/apt/lists/* \ - && rm -f /var/cache/apt/archives/*.deb - -RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ - && python3.8 /tmp/get-pip.py \ - && rm /tmp/get-pip.py - -CMD ["python3.8"] diff --git a/.kokoro/docs/common.cfg b/.kokoro/docs/common.cfg deleted file mode 100644 index 4847856f..00000000 --- a/.kokoro/docs/common.cfg +++ /dev/null @@ -1,65 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-api-core/.kokoro/trampoline_v2.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-lib-docs" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-api-core/.kokoro/publish-docs.sh" -} - -env_vars: { - key: "STAGING_BUCKET" - value: "docs-staging" -} - -env_vars: { - key: "V2_STAGING_BUCKET" - value: "docs-staging-v2" -} - -# It will upload the docker image after successful builds. -env_vars: { - key: "TRAMPOLINE_IMAGE_UPLOAD" - value: "true" -} - -# It will always build the docker image. -env_vars: { - key: "TRAMPOLINE_DOCKERFILE" - value: ".kokoro/docker/docs/Dockerfile" -} - -# Fetch the token needed for reporting release status to GitHub -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "yoshi-automation-github-key" - } - } -} - -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "docuploader_service_account" - } - } -} \ No newline at end of file diff --git a/.kokoro/docs/docs-presubmit.cfg b/.kokoro/docs/docs-presubmit.cfg deleted file mode 100644 index d1ed51eb..00000000 --- a/.kokoro/docs/docs-presubmit.cfg +++ /dev/null @@ -1,28 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "STAGING_BUCKET" - value: "gcloud-python-test" -} - -env_vars: { - key: "V2_STAGING_BUCKET" - value: "gcloud-python-test" -} - -# We only upload the image in the main `docs` build. -env_vars: { - key: "TRAMPOLINE_IMAGE_UPLOAD" - value: "false" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-api-core/.kokoro/build.sh" -} - -# Only run this nox session. -env_vars: { - key: "NOX_SESSION" - value: "docs docfx" -} diff --git a/.kokoro/docs/docs.cfg b/.kokoro/docs/docs.cfg deleted file mode 100644 index 8f43917d..00000000 --- a/.kokoro/docs/docs.cfg +++ /dev/null @@ -1 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/.kokoro/populate-secrets.sh b/.kokoro/populate-secrets.sh index f5251425..c435402f 100755 --- a/.kokoro/populate-secrets.sh +++ b/.kokoro/populate-secrets.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2020 Google LLC. +# Copyright 2024 Google LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/presubmit/prerelease-deps.cfg b/.kokoro/presubmit/prerelease-deps.cfg new file mode 100644 index 00000000..3595fb43 --- /dev/null +++ b/.kokoro/presubmit/prerelease-deps.cfg @@ -0,0 +1,7 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Only run this nox session. +env_vars: { + key: "NOX_SESSION" + value: "prerelease_deps" +} diff --git a/.kokoro/publish-docs.sh b/.kokoro/publish-docs.sh deleted file mode 100755 index 8acb14e8..00000000 --- a/.kokoro/publish-docs.sh +++ /dev/null @@ -1,64 +0,0 @@ -#!/bin/bash -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -# Disable buffering, so that the logs stream through. -export PYTHONUNBUFFERED=1 - -export PATH="${HOME}/.local/bin:${PATH}" - -# Install nox -python3 -m pip install --user --upgrade --quiet nox -python3 -m nox --version - -# build docs -nox -s docs - -python3 -m pip install --user gcp-docuploader - -# create metadata -python3 -m docuploader create-metadata \ - --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ - --version=$(python3 setup.py --version) \ - --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ - --distribution-name=$(python3 setup.py --name) \ - --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ - --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ - --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) - -cat docs.metadata - -# upload docs -python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket "${STAGING_BUCKET}" - - -# docfx yaml files -nox -s docfx - -# create metadata. -python3 -m docuploader create-metadata \ - --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ - --version=$(python3 setup.py --version) \ - --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ - --distribution-name=$(python3 setup.py --name) \ - --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ - --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ - --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) - -cat docs.metadata - -# upload docs -python3 -m docuploader upload docs/_build/html/docfx_yaml --metadata-file docs.metadata --destination-prefix docfx --staging-bucket "${V2_STAGING_BUCKET}" diff --git a/.kokoro/release.sh b/.kokoro/release.sh deleted file mode 100755 index 0728ce11..00000000 --- a/.kokoro/release.sh +++ /dev/null @@ -1,32 +0,0 @@ -#!/bin/bash -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -# Start the releasetool reporter -python3 -m pip install gcp-releasetool -python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script - -# Ensure that we have the latest versions of Twine, Wheel, and Setuptools. -python3 -m pip install --upgrade twine wheel setuptools - -# Disable buffering, so that the logs stream through. -export PYTHONUNBUFFERED=1 - -# Move into the package, build the distribution and upload. -TWINE_PASSWORD=$(cat "${KOKORO_GFILE_DIR}/secret_manager/google-cloud-pypi-token") -cd github/python-api-core -python3 setup.py sdist bdist_wheel -twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/* diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg deleted file mode 100644 index 586e7649..00000000 --- a/.kokoro/release/common.cfg +++ /dev/null @@ -1,30 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-api-core/.kokoro/trampoline.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-api-core/.kokoro/release.sh" -} - -# Tokens needed to report release status back to GitHub -env_vars: { - key: "SECRET_MANAGER_KEYS" - value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem,google-cloud-pypi-token" -} diff --git a/.kokoro/release/release.cfg b/.kokoro/release/release.cfg deleted file mode 100644 index 8f43917d..00000000 --- a/.kokoro/release/release.cfg +++ /dev/null @@ -1 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/.kokoro/samples/lint/common.cfg b/.kokoro/samples/lint/common.cfg index f5dddb4b..1a2b87b2 100644 --- a/.kokoro/samples/lint/common.cfg +++ b/.kokoro/samples/lint/common.cfg @@ -31,4 +31,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-api-core/.kokoro/trampoline.sh" \ No newline at end of file +build_file: "python-api-core/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.6/common.cfg b/.kokoro/samples/python3.10/common.cfg similarity index 87% rename from .kokoro/samples/python3.6/common.cfg rename to .kokoro/samples/python3.10/common.cfg index 7b4f5cd0..40fb8d81 100644 --- a/.kokoro/samples/python3.6/common.cfg +++ b/.kokoro/samples/python3.10/common.cfg @@ -10,13 +10,13 @@ action { # Specify which tests to run env_vars: { key: "RUN_TESTS_SESSION" - value: "py-3.6" + value: "py-3.10" } # Declare build specific Cloud project. env_vars: { key: "BUILD_SPECIFIC_GCLOUD_PROJECT" - value: "python-docs-samples-tests-py36" + value: "python-docs-samples-tests-310" } env_vars: { @@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-api-core/.kokoro/trampoline.sh" \ No newline at end of file +build_file: "python-api-core/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.6/presubmit.cfg b/.kokoro/samples/python3.10/continuous.cfg similarity index 100% rename from .kokoro/samples/python3.6/presubmit.cfg rename to .kokoro/samples/python3.10/continuous.cfg diff --git a/.kokoro/samples/python3.6/periodic-head.cfg b/.kokoro/samples/python3.10/periodic-head.cfg similarity index 100% rename from .kokoro/samples/python3.6/periodic-head.cfg rename to .kokoro/samples/python3.10/periodic-head.cfg diff --git a/.kokoro/samples/python3.6/periodic.cfg b/.kokoro/samples/python3.10/periodic.cfg similarity index 98% rename from .kokoro/samples/python3.6/periodic.cfg rename to .kokoro/samples/python3.10/periodic.cfg index 50fec964..71cd1e59 100644 --- a/.kokoro/samples/python3.6/periodic.cfg +++ b/.kokoro/samples/python3.10/periodic.cfg @@ -3,4 +3,4 @@ env_vars: { key: "INSTALL_LIBRARY_FROM_SOURCE" value: "False" -} \ No newline at end of file +} diff --git a/.kokoro/samples/python3.10/presubmit.cfg b/.kokoro/samples/python3.10/presubmit.cfg new file mode 100644 index 00000000..a1c8d975 --- /dev/null +++ b/.kokoro/samples/python3.10/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.11/common.cfg b/.kokoro/samples/python3.11/common.cfg new file mode 100644 index 00000000..d3597f08 --- /dev/null +++ b/.kokoro/samples/python3.11/common.cfg @@ -0,0 +1,40 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.11" +} + +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-311" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-api-core/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-api-core/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.6/continuous.cfg b/.kokoro/samples/python3.11/continuous.cfg similarity index 97% rename from .kokoro/samples/python3.6/continuous.cfg rename to .kokoro/samples/python3.11/continuous.cfg index 7218af14..a1c8d975 100644 --- a/.kokoro/samples/python3.6/continuous.cfg +++ b/.kokoro/samples/python3.11/continuous.cfg @@ -3,5 +3,4 @@ env_vars: { key: "INSTALL_LIBRARY_FROM_SOURCE" value: "True" -} - +} \ No newline at end of file diff --git a/.kokoro/samples/python3.11/periodic-head.cfg b/.kokoro/samples/python3.11/periodic-head.cfg new file mode 100644 index 00000000..a18c0cfc --- /dev/null +++ b/.kokoro/samples/python3.11/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-api-core/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/samples/python3.11/periodic.cfg b/.kokoro/samples/python3.11/periodic.cfg new file mode 100644 index 00000000..71cd1e59 --- /dev/null +++ b/.kokoro/samples/python3.11/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} diff --git a/.kokoro/samples/python3.11/presubmit.cfg b/.kokoro/samples/python3.11/presubmit.cfg new file mode 100644 index 00000000..a1c8d975 --- /dev/null +++ b/.kokoro/samples/python3.11/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.12/common.cfg b/.kokoro/samples/python3.12/common.cfg new file mode 100644 index 00000000..8a5840a7 --- /dev/null +++ b/.kokoro/samples/python3.12/common.cfg @@ -0,0 +1,40 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.12" +} + +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-312" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-api-core/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-api-core/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.12/continuous.cfg b/.kokoro/samples/python3.12/continuous.cfg new file mode 100644 index 00000000..a1c8d975 --- /dev/null +++ b/.kokoro/samples/python3.12/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.12/periodic-head.cfg b/.kokoro/samples/python3.12/periodic-head.cfg new file mode 100644 index 00000000..a18c0cfc --- /dev/null +++ b/.kokoro/samples/python3.12/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-api-core/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/samples/python3.12/periodic.cfg b/.kokoro/samples/python3.12/periodic.cfg new file mode 100644 index 00000000..71cd1e59 --- /dev/null +++ b/.kokoro/samples/python3.12/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} diff --git a/.kokoro/samples/python3.12/presubmit.cfg b/.kokoro/samples/python3.12/presubmit.cfg new file mode 100644 index 00000000..a1c8d975 --- /dev/null +++ b/.kokoro/samples/python3.12/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.13/common.cfg b/.kokoro/samples/python3.13/common.cfg new file mode 100644 index 00000000..2a4199f4 --- /dev/null +++ b/.kokoro/samples/python3.13/common.cfg @@ -0,0 +1,40 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.13" +} + +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-313" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-api-core/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-api-core/.kokoro/trampoline_v2.sh" diff --git a/.kokoro/samples/python3.13/continuous.cfg b/.kokoro/samples/python3.13/continuous.cfg new file mode 100644 index 00000000..a1c8d975 --- /dev/null +++ b/.kokoro/samples/python3.13/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.13/periodic-head.cfg b/.kokoro/samples/python3.13/periodic-head.cfg new file mode 100644 index 00000000..a18c0cfc --- /dev/null +++ b/.kokoro/samples/python3.13/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-api-core/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/samples/python3.13/periodic.cfg b/.kokoro/samples/python3.13/periodic.cfg new file mode 100644 index 00000000..71cd1e59 --- /dev/null +++ b/.kokoro/samples/python3.13/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} diff --git a/.kokoro/samples/python3.13/presubmit.cfg b/.kokoro/samples/python3.13/presubmit.cfg new file mode 100644 index 00000000..a1c8d975 --- /dev/null +++ b/.kokoro/samples/python3.13/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.7/common.cfg b/.kokoro/samples/python3.7/common.cfg index 1198d7ba..a3aa10b5 100644 --- a/.kokoro/samples/python3.7/common.cfg +++ b/.kokoro/samples/python3.7/common.cfg @@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-api-core/.kokoro/trampoline.sh" \ No newline at end of file +build_file: "python-api-core/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.7/periodic.cfg b/.kokoro/samples/python3.7/periodic.cfg index 50fec964..71cd1e59 100644 --- a/.kokoro/samples/python3.7/periodic.cfg +++ b/.kokoro/samples/python3.7/periodic.cfg @@ -3,4 +3,4 @@ env_vars: { key: "INSTALL_LIBRARY_FROM_SOURCE" value: "False" -} \ No newline at end of file +} diff --git a/.kokoro/samples/python3.8/common.cfg b/.kokoro/samples/python3.8/common.cfg index b7ec7f5e..20c941aa 100644 --- a/.kokoro/samples/python3.8/common.cfg +++ b/.kokoro/samples/python3.8/common.cfg @@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-api-core/.kokoro/trampoline.sh" \ No newline at end of file +build_file: "python-api-core/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.8/periodic.cfg b/.kokoro/samples/python3.8/periodic.cfg index 50fec964..71cd1e59 100644 --- a/.kokoro/samples/python3.8/periodic.cfg +++ b/.kokoro/samples/python3.8/periodic.cfg @@ -3,4 +3,4 @@ env_vars: { key: "INSTALL_LIBRARY_FROM_SOURCE" value: "False" -} \ No newline at end of file +} diff --git a/.kokoro/samples/python3.9/common.cfg b/.kokoro/samples/python3.9/common.cfg index cf034ec1..234887c6 100644 --- a/.kokoro/samples/python3.9/common.cfg +++ b/.kokoro/samples/python3.9/common.cfg @@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-api-core/.kokoro/trampoline.sh" \ No newline at end of file +build_file: "python-api-core/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.9/periodic.cfg b/.kokoro/samples/python3.9/periodic.cfg index 50fec964..71cd1e59 100644 --- a/.kokoro/samples/python3.9/periodic.cfg +++ b/.kokoro/samples/python3.9/periodic.cfg @@ -3,4 +3,4 @@ env_vars: { key: "INSTALL_LIBRARY_FROM_SOURCE" value: "False" -} \ No newline at end of file +} diff --git a/.kokoro/test-samples-against-head.sh b/.kokoro/test-samples-against-head.sh index a7858e4c..e9d8bd79 100755 --- a/.kokoro/test-samples-against-head.sh +++ b/.kokoro/test-samples-against-head.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2020 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -23,6 +23,4 @@ set -eo pipefail # Enables `**` to include files nested inside sub-folders shopt -s globstar -cd github/python-api-core - exec .kokoro/test-samples-impl.sh diff --git a/.kokoro/test-samples-impl.sh b/.kokoro/test-samples-impl.sh index 311a8d54..53e365bc 100755 --- a/.kokoro/test-samples-impl.sh +++ b/.kokoro/test-samples-impl.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2021 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -33,7 +33,8 @@ export PYTHONUNBUFFERED=1 env | grep KOKORO # Install nox -python3.6 -m pip install --upgrade --quiet nox +# `virtualenv==20.26.6` is added for Python 3.7 compatibility +python3.9 -m pip install --upgrade --quiet nox virtualenv==20.26.6 # Use secrets acessor service account to get secrets if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then @@ -76,11 +77,11 @@ for file in samples/**/requirements.txt; do echo "------------------------------------------------------------" # Use nox to execute the tests for the project. - python3.6 -m nox -s "$RUN_TESTS_SESSION" + python3.9 -m nox -s "$RUN_TESTS_SESSION" EXIT=$? # If this is a periodic build, send the test log to the FlakyBot. - # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. + # See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot. if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot $KOKORO_GFILE_DIR/linux_amd64/flakybot diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh index ee3146bd..7933d820 100755 --- a/.kokoro/test-samples.sh +++ b/.kokoro/test-samples.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2020 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -24,8 +24,6 @@ set -eo pipefail # Enables `**` to include files nested inside sub-folders shopt -s globstar -cd github/python-api-core - # Run periodic samples tests at latest release if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then # preserving the test runner implementation. diff --git a/.kokoro/trampoline.sh b/.kokoro/trampoline.sh index f39236e9..48f79699 100755 --- a/.kokoro/trampoline.sh +++ b/.kokoro/trampoline.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2017 Google Inc. +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/trampoline_v2.sh b/.kokoro/trampoline_v2.sh index 4af6cdc2..35fa5292 100755 --- a/.kokoro/trampoline_v2.sh +++ b/.kokoro/trampoline_v2.sh @@ -1,5 +1,5 @@ #!/usr/bin/env bash -# Copyright 2020 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 62eb5a77..1d74695f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,4 +1,4 @@ -# Copyright 2021 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -22,10 +22,10 @@ repos: - id: end-of-file-fixer - id: check-yaml - repo: https://github.com/psf/black - rev: 19.10b0 + rev: 23.7.0 hooks: - id: black -- repo: https://gitlab.com/pycqa/flake8 - rev: 3.9.2 +- repo: https://github.com/pycqa/flake8 + rev: 6.1.0 hooks: - id: flake8 diff --git a/.repo-metadata.json b/.repo-metadata.json index 59aa936d..0f0abd93 100644 --- a/.repo-metadata.json +++ b/.repo-metadata.json @@ -1,10 +1,12 @@ { - "name": "google-api-core", - "name_pretty": "Google API client core library", - "client_documentation": "https://googleapis.dev/python/google-api-core/latest", - "release_level": "ga", - "language": "python", - "library_type": "CORE", - "repo": "googleapis/python-api-core", - "distribution_name": "google-api-core" -} \ No newline at end of file + "name": "google-api-core", + "name_pretty": "Google API client core library", + "client_documentation": "https://googleapis.dev/python/google-api-core/latest", + "release_level": "stable", + "language": "python", + "library_type": "CORE", + "repo": "googleapis/python-api-core", + "distribution_name": "google-api-core", + "default_version": "", + "codeowner_team": "@googleapis/actools-python" +} diff --git a/.trampolinerc b/.trampolinerc index 383b6ec8..00801523 100644 --- a/.trampolinerc +++ b/.trampolinerc @@ -1,4 +1,4 @@ -# Copyright 2020 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,19 +12,28 @@ # See the License for the specific language governing permissions and # limitations under the License. -# Template for .trampolinerc - # Add required env vars here. required_envvars+=( - "STAGING_BUCKET" - "V2_STAGING_BUCKET" ) # Add env vars which are passed down into the container here. pass_down_envvars+=( + "NOX_SESSION" + ############### + # Docs builds + ############### "STAGING_BUCKET" "V2_STAGING_BUCKET" - "NOX_SESSION" + ################## + # Samples builds + ################## + "INSTALL_LIBRARY_FROM_SOURCE" + "RUN_TESTS_SESSION" + "BUILD_SPECIFIC_GCLOUD_PROJECT" + # Target directories. + "RUN_TESTS_DIRS" + # The nox session to run. + "RUN_TESTS_SESSION" ) # Prevent unintentional override on the default image. diff --git a/CHANGELOG.md b/CHANGELOG.md index e8e5e7a5..e118fe46 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,443 @@ [1]: https://pypi.org/project/google-api-core/#history +## [2.24.2](https://github.com/googleapis/python-api-core/compare/v2.24.1...v2.24.2) (2025-03-06) + + +### Bug Fixes + +* **deps:** Allow protobuf 6.x ([#804](https://github.com/googleapis/python-api-core/issues/804)) ([687be7c](https://github.com/googleapis/python-api-core/commit/687be7cbf629a61feb43ef37d3d920fa32b2d636)) + +## [2.24.1](https://github.com/googleapis/python-api-core/compare/v2.24.0...v2.24.1) (2025-01-24) + + +### Bug Fixes + +* Memory leak in bidi classes ([#770](https://github.com/googleapis/python-api-core/issues/770)) ([c1b8afa](https://github.com/googleapis/python-api-core/commit/c1b8afa4e2abe256e70651defccdc285f104ed19)) +* Resolve the issue where rpc timeout of 0 is used when timeout expires ([#776](https://github.com/googleapis/python-api-core/issues/776)) ([a5604a5](https://github.com/googleapis/python-api-core/commit/a5604a55070c6d92618d078191bf99f4c168d5f6)) + + +### Documentation + +* Add warnings regarding consuming externally sourced credentials ([#783](https://github.com/googleapis/python-api-core/issues/783)) ([0ec1825](https://github.com/googleapis/python-api-core/commit/0ec18254b90721684679a98bcacef4615467a227)) + +## [2.24.0](https://github.com/googleapis/python-api-core/compare/v2.23.0...v2.24.0) (2024-12-06) + + +### Features + +* Add automatic logging config to support debug logging ([#754](https://github.com/googleapis/python-api-core/issues/754)) ([d18d9b5](https://github.com/googleapis/python-api-core/commit/d18d9b5131162b44eebcc0859a7aca1198a2ac06)) +* Update recognized logging fields ([#766](https://github.com/googleapis/python-api-core/issues/766)) ([5f80f77](https://github.com/googleapis/python-api-core/commit/5f80f778bc25d878b3187c6138077ad8c6bcd35f)) + +## [2.23.0](https://github.com/googleapis/python-api-core/compare/v2.22.0...v2.23.0) (2024-11-11) + + +### Features + +* Migrate to pyproject.toml ([#736](https://github.com/googleapis/python-api-core/issues/736)) ([159e9a4](https://github.com/googleapis/python-api-core/commit/159e9a49525937f18a55c38136aae32575424d55)) + +## [2.22.0](https://github.com/googleapis/python-api-core/compare/v2.21.0...v2.22.0) (2024-10-25) + + +### Features + +* Add support for python 3.13 ([#696](https://github.com/googleapis/python-api-core/issues/696)) ([46b3d3a](https://github.com/googleapis/python-api-core/commit/46b3d3abaa1bae28e9d788d7c3006224cd6f74d5)) + + +### Bug Fixes + +* Add type hints to ClientOptions ([#735](https://github.com/googleapis/python-api-core/issues/735)) ([b91ed19](https://github.com/googleapis/python-api-core/commit/b91ed19210148dfa49ec790c4dd5f4a7bff80954)) +* Improve `Any` decode error ([#712](https://github.com/googleapis/python-api-core/issues/712)) ([0d5ed37](https://github.com/googleapis/python-api-core/commit/0d5ed37c96f9b40bccae98e228163a88abeb1763)) +* Require proto-plus >= 1.25.0 for Python 3.13 ([#740](https://github.com/googleapis/python-api-core/issues/740)) ([a26313e](https://github.com/googleapis/python-api-core/commit/a26313e1cb12e44aa498f12622edccc0c83ba0c3)) +* Switch to unittest.mock from mock ([#713](https://github.com/googleapis/python-api-core/issues/713)) ([8c53381](https://github.com/googleapis/python-api-core/commit/8c533819b7e212aa2f1d695a7ce08629f4fb2daf)) + +## [2.21.0](https://github.com/googleapis/python-api-core/compare/v2.20.0...v2.21.0) (2024-10-07) + + +### Features + +* Add support for asynchronous long running operations ([#724](https://github.com/googleapis/python-api-core/issues/724)) ([aaed69b](https://github.com/googleapis/python-api-core/commit/aaed69b6f1d694cd7e561e2aa03fdd8d6cfb369a)) + + +### Bug Fixes + +* Set chunk size for async stream content ([#702](https://github.com/googleapis/python-api-core/issues/702)) ([45b8a6d](https://github.com/googleapis/python-api-core/commit/45b8a6db5a5c75acdd8be896d0152f11608c7e51)) + +## [2.20.0](https://github.com/googleapis/python-api-core/compare/v2.19.2...v2.20.0) (2024-09-18) + + +### Features + +* Add async unsupported paramater exception ([#694](https://github.com/googleapis/python-api-core/issues/694)) ([8c137fe](https://github.com/googleapis/python-api-core/commit/8c137feb6e880fdd93d1248d9b6c10002dc3c096)) +* Add support for asynchronous rest streaming ([#686](https://github.com/googleapis/python-api-core/issues/686)) ([1b7bb6d](https://github.com/googleapis/python-api-core/commit/1b7bb6d1b721e4ee1561e8e4a347846d7fdd7c27)) +* Add support for creating exceptions from an asynchronous response ([#688](https://github.com/googleapis/python-api-core/issues/688)) ([1c4b0d0](https://github.com/googleapis/python-api-core/commit/1c4b0d079f2103a7b5562371a7bd1ada92528de3)) + +## [2.19.2](https://github.com/googleapis/python-api-core/compare/v2.19.1...v2.19.2) (2024-08-16) + + +### Bug Fixes + +* Fail gracefully if could not import `rpc_status` module ([#680](https://github.com/googleapis/python-api-core/issues/680)) ([7ccbf57](https://github.com/googleapis/python-api-core/commit/7ccbf5738fa236649f9a155055c71789362b5c4c)) + +## [2.19.1](https://github.com/googleapis/python-api-core/compare/v2.19.0...v2.19.1) (2024-06-19) + + +### Bug Fixes + +* Add support for protobuf 5.x ([#644](https://github.com/googleapis/python-api-core/issues/644)) ([fda0ca6](https://github.com/googleapis/python-api-core/commit/fda0ca6f0664ac5044671591ed62618175a7393f)) +* Ignore unknown fields in rest streaming. ([#651](https://github.com/googleapis/python-api-core/issues/651)) ([1203fb9](https://github.com/googleapis/python-api-core/commit/1203fb97d2685535f89113e944c4764c1deb595e)) + +## [2.19.0](https://github.com/googleapis/python-api-core/compare/v2.18.0...v2.19.0) (2024-04-29) + + +### Features + +* Add google.api_core.version_header ([#638](https://github.com/googleapis/python-api-core/issues/638)) ([a7b53e9](https://github.com/googleapis/python-api-core/commit/a7b53e9e9a7deb88baf92a2827958429e3677069)) + +## [2.18.0](https://github.com/googleapis/python-api-core/compare/v2.17.1...v2.18.0) (2024-03-20) + + +### Features + +* Add common logic for supporting universe domain ([#621](https://github.com/googleapis/python-api-core/issues/621)) ([94f2ca3](https://github.com/googleapis/python-api-core/commit/94f2ca3b4d094e6e10154634d3463d07ebea2035)) + + +### Bug Fixes + +* Add _registered_method to grpc ChannelStub ([#614](https://github.com/googleapis/python-api-core/issues/614)) ([5eaaea8](https://github.com/googleapis/python-api-core/commit/5eaaea8a989f8bdbdb5fbc95a155a20837c87f42)) +* **deps:** Require proto-plus >= 1.22.3 ([#626](https://github.com/googleapis/python-api-core/issues/626)) ([4fed37c](https://github.com/googleapis/python-api-core/commit/4fed37cbc32122f156e38250b5fa8b2b08a787a1)) + +## [2.17.1](https://github.com/googleapis/python-api-core/compare/v2.17.0...v2.17.1) (2024-02-13) + + +### Bug Fixes + +* Resolve issue handling protobuf responses in rest streaming ([#604](https://github.com/googleapis/python-api-core/issues/604)) ([bcebc92](https://github.com/googleapis/python-api-core/commit/bcebc92eca69dae81c5e546d526c92b164a6b3b4)) + +## [2.17.0](https://github.com/googleapis/python-api-core/compare/v2.16.2...v2.17.0) (2024-02-06) + + +### Features + +* Add attempt_direct_path argument to create_channel ([#583](https://github.com/googleapis/python-api-core/issues/583)) ([94726e7](https://github.com/googleapis/python-api-core/commit/94726e739698035b00667983f854c600252abd28)) + + +### Bug Fixes + +* Retry constructors methods support None ([#592](https://github.com/googleapis/python-api-core/issues/592)) ([416203c](https://github.com/googleapis/python-api-core/commit/416203c1888934670bfeccafe5f5469f87314512)) + +## [2.16.2](https://github.com/googleapis/python-api-core/compare/v2.16.1...v2.16.2) (2024-02-02) + + +### Bug Fixes + +* Spelling error `a,out` -> `amount` ([#596](https://github.com/googleapis/python-api-core/issues/596)) ([88688b1](https://github.com/googleapis/python-api-core/commit/88688b1625c4dab0df6124a0560f550eb322500f)) + +## [2.16.1](https://github.com/googleapis/python-api-core/compare/v2.16.0...v2.16.1) (2024-01-30) + + +### Bug Fixes + +* Fix broken import for google.api_core.retry_async.AsyncRetry ([#587](https://github.com/googleapis/python-api-core/issues/587)) ([ac012c0](https://github.com/googleapis/python-api-core/commit/ac012c04c69b8bbe72962f0d0d9e9536c0b4a524)) + +## [2.16.0](https://github.com/googleapis/python-api-core/compare/v2.15.0...v2.16.0) (2024-01-29) + + +### Features + +* Retry and retry_async support streaming rpcs ([#495](https://github.com/googleapis/python-api-core/issues/495)) ([17ff5f1](https://github.com/googleapis/python-api-core/commit/17ff5f1d83a9a6f50a0226fb0e794634bd584f17)) + +## [2.15.0](https://github.com/googleapis/python-api-core/compare/v2.14.0...v2.15.0) (2023-12-07) + + +### Features + +* Add support for Python 3.12 ([#557](https://github.com/googleapis/python-api-core/issues/557)) ([091b4f1](https://github.com/googleapis/python-api-core/commit/091b4f1c7fcc59c3f2a02ee44fd3c30b78423f12)) +* Add type annotations to wrapped grpc calls ([#554](https://github.com/googleapis/python-api-core/issues/554)) ([fc12b40](https://github.com/googleapis/python-api-core/commit/fc12b40bfc6e0c4bb313196e2e3a9c9374ce1c45)) +* Add universe_domain argument to ClientOptions ([3069ef4](https://github.com/googleapis/python-api-core/commit/3069ef4b9123ddb64841cbb7bbb183b53d502e0a)) +* Introduce compatibility with native namespace packages ([#561](https://github.com/googleapis/python-api-core/issues/561)) ([bd82827](https://github.com/googleapis/python-api-core/commit/bd82827108f1eeb6c05cfacf6c044b2afacc18a2)) + + +### Bug Fixes + +* Fix regression in `bidi` causing `Thread-ConsumeBidirectionalStream caught unexpected exception and will exit` ([#562](https://github.com/googleapis/python-api-core/issues/562)) ([40c8ae0](https://github.com/googleapis/python-api-core/commit/40c8ae0cf1f797e31e106461164e22db4fb2d3d9)) +* Replace deprecated `datetime.datetime.utcnow()` ([#552](https://github.com/googleapis/python-api-core/issues/552)) ([448923a](https://github.com/googleapis/python-api-core/commit/448923acf277a70e8704c949311bf4feaef8cab6)), closes [#540](https://github.com/googleapis/python-api-core/issues/540) + +## [2.14.0](https://github.com/googleapis/python-api-core/compare/v2.13.1...v2.14.0) (2023-11-09) + + +### Features + +* Support with_call for wrapped rpcs ([#550](https://github.com/googleapis/python-api-core/issues/550)) ([01a57a7](https://github.com/googleapis/python-api-core/commit/01a57a745f4c8345c9c93412c27dd416b49f5953)) + +## [2.13.1](https://github.com/googleapis/python-api-core/compare/v2.13.0...v2.13.1) (2023-11-09) + + +### Bug Fixes + +* Update async client to use async retry ([#544](https://github.com/googleapis/python-api-core/issues/544)) ([f21bb32](https://github.com/googleapis/python-api-core/commit/f21bb32b8e6310116a642a6e6b6dd8e44e30e656)) + +## [2.13.0](https://github.com/googleapis/python-api-core/compare/v2.12.0...v2.13.0) (2023-11-03) + + +### Features + +* Add caching to routing header calculation ([#526](https://github.com/googleapis/python-api-core/issues/526)) ([6251eab](https://github.com/googleapis/python-api-core/commit/6251eab3fca5f7e509cb9b6e476ce1184094b711)) + + +### Bug Fixes + +* Add warning to retry target to avoid incorrect usage ([#543](https://github.com/googleapis/python-api-core/issues/543)) ([bfb40e6](https://github.com/googleapis/python-api-core/commit/bfb40e6929ef47be7a6464d2f1e0d06595736b8d)) +* Drop usage of distutils ([#541](https://github.com/googleapis/python-api-core/issues/541)) ([4bd9e10](https://github.com/googleapis/python-api-core/commit/4bd9e10f20eea227c88e3e1496010cca6dd8a270)) +* Ensure exception is available when BackgroundConsumer open stream fails ([#357](https://github.com/googleapis/python-api-core/issues/357)) ([405272c](https://github.com/googleapis/python-api-core/commit/405272c05f8c6d20e242c6172b01f78f0fd3bf32)) + +## [2.12.0](https://github.com/googleapis/python-api-core/compare/v2.11.1...v2.12.0) (2023-09-07) + + +### Features + +* Add a little bit of typing to google.api_core.retry ([#453](https://github.com/googleapis/python-api-core/issues/453)) ([2477ab9](https://github.com/googleapis/python-api-core/commit/2477ab9ea5c2e863a493fb7ebebaa429a44ea096)) +* Add grpc Compression argument to channels and methods ([#451](https://github.com/googleapis/python-api-core/issues/451)) ([bdebd63](https://github.com/googleapis/python-api-core/commit/bdebd6331f9c0d3d1a8ceaf274f07d2ed75bfe92)) + + +### Documentation + +* Fix a typo in google/api_core/page_iterator.py ([#511](https://github.com/googleapis/python-api-core/issues/511)) ([c0ce73c](https://github.com/googleapis/python-api-core/commit/c0ce73c4de53ad694fe36d17408998aa1230398f)) + +## [2.11.1](https://github.com/googleapis/python-api-core/compare/v2.11.0...v2.11.1) (2023-06-12) + + +### Bug Fixes + +* Add actionable errors for GCE long running operations ([#498](https://github.com/googleapis/python-api-core/issues/498)) ([7dfc3a7](https://github.com/googleapis/python-api-core/commit/7dfc3a7a439243f05238a11b68a31720fde1769e)) +* Invalid `dev` version identifiers in `setup.py` ([#505](https://github.com/googleapis/python-api-core/issues/505)) ([8844edb](https://github.com/googleapis/python-api-core/commit/8844edb1e802040810918a12bc9ff89104da38d4)) + +## [2.11.0](https://github.com/googleapis/python-api-core/compare/v2.10.2...v2.11.0) (2022-11-10) + + +### Features + +* Add support for Python 3.11 ([#466](https://github.com/googleapis/python-api-core/issues/466)) ([ff379e3](https://github.com/googleapis/python-api-core/commit/ff379e304c353bcab734e1c4706b74b356a1e932)) +* Allow representing enums with their unqualified symbolic names in headers ([#465](https://github.com/googleapis/python-api-core/issues/465)) ([522b98e](https://github.com/googleapis/python-api-core/commit/522b98ecc1ebd1c2280d3d7c73a02f6e4fb528d4)) + + +### Bug Fixes + +* Major refactoring of Polling, Retry and Timeout logic ([#462](https://github.com/googleapis/python-api-core/issues/462)) ([434253d](https://github.com/googleapis/python-api-core/commit/434253de16d9efdf984ddb64c409706cda1d5f82)) +* Require google-auth >= 2.14.1 ([#463](https://github.com/googleapis/python-api-core/issues/463)) ([7cc329f](https://github.com/googleapis/python-api-core/commit/7cc329fe1498b0a4285123448e4ea80c6a780d47)) + +## [2.10.2](https://github.com/googleapis/python-api-core/compare/v2.10.1...v2.10.2) (2022-10-08) + + +### Bug Fixes + +* **deps:** Allow protobuf 3.19.5 ([#459](https://github.com/googleapis/python-api-core/issues/459)) ([e949364](https://github.com/googleapis/python-api-core/commit/e949364ce3a2c4c3cdb2658054d4793aa942d999)) + +## [2.10.1](https://github.com/googleapis/python-api-core/compare/v2.10.0...v2.10.1) (2022-09-14) + + +### Bug Fixes + +* Improve transcoding error message ([#442](https://github.com/googleapis/python-api-core/issues/442)) ([538df80](https://github.com/googleapis/python-api-core/commit/538df80ed6d21f43b512a73853935f7a7b9bdf52)) + +## [2.10.0](https://github.com/googleapis/python-api-core/compare/v2.9.0...v2.10.0) (2022-09-02) + + +### Features + +* Add 'strict' to flatten_query_params to lower-case bools ([#433](https://github.com/googleapis/python-api-core/issues/433)) ([83678e9](https://github.com/googleapis/python-api-core/commit/83678e94e1081f9087b19c43f26fad4774184d66)) + +## [2.9.0](https://github.com/googleapis/python-api-core/compare/v2.8.2...v2.9.0) (2022-09-01) + + +### Features + +* Make grpc transcode logic work in terms of protobuf python objects ([#428](https://github.com/googleapis/python-api-core/issues/428)) ([c3ad8ea](https://github.com/googleapis/python-api-core/commit/c3ad8ea67447e3d8a1154d7a9221e116f60d425a)) + + +### Bug Fixes + +* Require python 3.7+ ([#410](https://github.com/googleapis/python-api-core/issues/410)) ([7ddb8c0](https://github.com/googleapis/python-api-core/commit/7ddb8c00e6be7ab6905a9a802ad1c3063fbfa46c)) +* Restore support for grpcio-gcp ([#418](https://github.com/googleapis/python-api-core/issues/418)) ([8c19609](https://github.com/googleapis/python-api-core/commit/8c19609d6244930bd91fd5f40ef9b5b65584c4a5)) + +## [2.8.2](https://github.com/googleapis/python-api-core/compare/v2.8.1...v2.8.2) (2022-06-13) + + +### Bug Fixes + +* **deps:** allow protobuf < 5.0.0 ([#400](https://github.com/googleapis/python-api-core/issues/400)) ([8f73d2e](https://github.com/googleapis/python-api-core/commit/8f73d2ee2d3af2201f877aa7e2f7361147759dc7)) +* drop support for grpc-gcp ([#401](https://github.com/googleapis/python-api-core/issues/401)) ([5da6733](https://github.com/googleapis/python-api-core/commit/5da6733a475c436efc11b14889af73b3a0e20379)) + + +### Documentation + +* fix changelog header to consistent size ([#394](https://github.com/googleapis/python-api-core/issues/394)) ([ac266e9](https://github.com/googleapis/python-api-core/commit/ac266e935bc4e7c6dff250384407e7a60d8dba90)) +* Fix typo in the BackgroundConsumer docstring ([#395](https://github.com/googleapis/python-api-core/issues/395)) ([0eb727f](https://github.com/googleapis/python-api-core/commit/0eb727f92314db3c4383754514f75a49ba02e27b)) + +## [2.8.1](https://github.com/googleapis/python-api-core/compare/v2.8.0...v2.8.1) (2022-05-26) + + +### Bug Fixes + +* **deps:** require googleapis-common-protos >= 1.56.2 ([d84d66c](https://github.com/googleapis/python-api-core/commit/d84d66c2a4107f5f9a20c53e870a27fb1250ea3d)) +* **deps:** require protobuf>= 3.15.0, <4.0.0dev ([#385](https://github.com/googleapis/python-api-core/issues/385)) ([d84d66c](https://github.com/googleapis/python-api-core/commit/d84d66c2a4107f5f9a20c53e870a27fb1250ea3d)) + +## [2.8.0](https://github.com/googleapis/python-api-core/compare/v2.7.3...v2.8.0) (2022-05-18) + + +### Features + +* adds support for audience in client_options ([#379](https://github.com/googleapis/python-api-core/issues/379)) ([c97c498](https://github.com/googleapis/python-api-core/commit/c97c4980125a86f384cdf12720df7bb1a2adf9d2)) +* adds support for audience in client_options. ([c97c498](https://github.com/googleapis/python-api-core/commit/c97c4980125a86f384cdf12720df7bb1a2adf9d2)) + +## [2.7.3](https://github.com/googleapis/python-api-core/compare/v2.7.2...v2.7.3) (2022-04-29) + + +### Bug Fixes + +* Avoid AttributeError if grpcio-status is not installed ([#370](https://github.com/googleapis/python-api-core/issues/370)) ([022add1](https://github.com/googleapis/python-api-core/commit/022add16266f9c07f0f88eea13472cc2e0bfc991)) + +## [2.7.2](https://github.com/googleapis/python-api-core/compare/v2.7.1...v2.7.2) (2022-04-13) + + +### Bug Fixes + +* allow grpc without grpcio-status ([#355](https://github.com/googleapis/python-api-core/issues/355)) ([112049e](https://github.com/googleapis/python-api-core/commit/112049e79f5a5b0a989d85d438a1bd29485f46f7)) +* remove dependency on pkg_resources ([#361](https://github.com/googleapis/python-api-core/issues/361)) ([523dbd0](https://github.com/googleapis/python-api-core/commit/523dbd0b10d37ffcf83fa751f0bad313f162abf1)) + +## [2.7.1](https://github.com/googleapis/python-api-core/compare/v2.7.0...v2.7.1) (2022-03-09) + + +### Bug Fixes + +* add more context to error message. ([#340](https://github.com/googleapis/python-api-core/issues/340)) ([0680fb4](https://github.com/googleapis/python-api-core/commit/0680fb4d3e013fe2de27e0a2ae2cd9896479e596)) + +## [2.7.0](https://github.com/googleapis/python-api-core/compare/v2.6.1...v2.7.0) (2022-03-08) + + +### Features + +* expose extra fields in ExtendedOperation ([#351](https://github.com/googleapis/python-api-core/issues/351)) ([9abc6f4](https://github.com/googleapis/python-api-core/commit/9abc6f48f23c87b9771dca3c96b4f6af39620a50)) + +## [2.6.1](https://github.com/googleapis/python-api-core/compare/v2.6.0...v2.6.1) (2022-03-05) + + +### Bug Fixes + +* Remove py2 tag from wheel ([#343](https://github.com/googleapis/python-api-core/issues/343)) ([7e21e9e](https://github.com/googleapis/python-api-core/commit/7e21e9e34892472a34f9b44175fa761f0e3fd9ed)) + +## [2.6.0](https://github.com/googleapis/python-api-core/compare/v2.5.0...v2.6.0) (2022-03-03) + + +### Features + +* initial support for Extended Operations ([#344](https://github.com/googleapis/python-api-core/issues/344)) ([021bb7d](https://github.com/googleapis/python-api-core/commit/021bb7d5bf0a1d8ac58dbf0c738fac309135ba7d)) + +## [2.5.0](https://github.com/googleapis/python-api-core/compare/v2.4.0...v2.5.0) (2022-02-02) + + +### Features + +* add api_key to client options ([#248](https://github.com/googleapis/python-api-core/issues/248)) ([5e5ad37](https://github.com/googleapis/python-api-core/commit/5e5ad37b8161109d65b0fab43636f7424e570fa3)) + + +### Bug Fixes + +* **deps:** remove setuptools from dependencies ([#339](https://github.com/googleapis/python-api-core/issues/339)) ([c782f29](https://github.com/googleapis/python-api-core/commit/c782f294b50b078f01959627fb82aa4c5efec333)) + + +### Documentation + +* fix typo in library name ([#332](https://github.com/googleapis/python-api-core/issues/332)) ([f267111](https://github.com/googleapis/python-api-core/commit/f267111823545a6c67ef5f10b85cd8c2fab8a612)) + +## [2.4.0](https://www.github.com/googleapis/python-api-core/compare/v2.3.2...v2.4.0) (2022-01-11) + + +### Features + +* add support for 'error_info' ([#315](https://www.github.com/googleapis/python-api-core/issues/315)) ([cc46aa6](https://www.github.com/googleapis/python-api-core/commit/cc46aa68ec184871330d16a6c767f57a4f0eb633)) +* iterator for processing JSON responses in REST streaming. ([#317](https://www.github.com/googleapis/python-api-core/issues/317)) ([f9f2696](https://www.github.com/googleapis/python-api-core/commit/f9f26969842b456ea372bed941d712b7a9ab7239)) + +## [2.3.2](https://www.github.com/googleapis/python-api-core/compare/v2.3.1...v2.3.2) (2021-12-16) + + +### Bug Fixes + +* address broken wheels in version 2.3.1 + +## [2.3.1](https://www.github.com/googleapis/python-api-core/compare/v2.3.0...v2.3.1) (2021-12-15) + + +### Bug Fixes +* exclude function target from retry deadline exceeded exception message ([#318](https://www.github.com/googleapis/python-api-core/issues/318)) ([34ebdcc](https://www.github.com/googleapis/python-api-core/commit/34ebdcc251d4f3d7d496e8e0b78847645a06650b)) + +## [2.3.0](https://www.github.com/googleapis/python-api-core/compare/v2.2.2...v2.3.0) (2021-11-25) + + +### Features + +* add operations rest client to support long-running operations. ([#311](https://www.github.com/googleapis/python-api-core/issues/311)) ([ce1adf3](https://www.github.com/googleapis/python-api-core/commit/ce1adf395982ede157c0f25a920946bb52789873)) + + +### Bug Fixes + +* handle bare 'grpc.Call' in 'from_grpc_error' ([#298](https://www.github.com/googleapis/python-api-core/issues/298)) ([060b339](https://www.github.com/googleapis/python-api-core/commit/060b339e3af296dd1772bfc1b4a0d2b4264cae1f)) + +## [2.2.2](https://www.github.com/googleapis/python-api-core/compare/v2.2.1...v2.2.2) (2021-11-02) + + +### Bug Fixes + +* make 'gapic_v1.method.DEFAULT' a typed object ([#292](https://www.github.com/googleapis/python-api-core/issues/292)) ([ffc51f0](https://www.github.com/googleapis/python-api-core/commit/ffc51f03c7ce5d9f009ba859b8df385d52925578)) + +## [2.2.1](https://www.github.com/googleapis/python-api-core/compare/v2.2.0...v2.2.1) (2021-10-26) + + +### Bug Fixes + +* revert "fix: do not error on LROs with no response or error" ([#294](https://www.github.com/googleapis/python-api-core/issues/294)) ([9e6091e](https://www.github.com/googleapis/python-api-core/commit/9e6091ee59a30e72a6278b369f6a08e7aef32f22)) + +## [2.2.0](https://www.github.com/googleapis/python-api-core/compare/v2.1.1...v2.2.0) (2021-10-25) + + +### Features + +* add 'GoogleAPICallError.error_details' property ([#286](https://www.github.com/googleapis/python-api-core/issues/286)) ([ef6f0fc](https://www.github.com/googleapis/python-api-core/commit/ef6f0fcfdfe771172056e35e3c990998b3b00416)) + +## [2.1.1](https://www.github.com/googleapis/python-api-core/compare/v2.1.0...v2.1.1) (2021-10-13) + + +### Bug Fixes + +* add mypy checking + 'py.typed' file ([#290](https://www.github.com/googleapis/python-api-core/issues/290)) ([0023ee1](https://www.github.com/googleapis/python-api-core/commit/0023ee1fe0e8b80c7a9e8987e0f322a829e5d613)) + +## [2.1.0](https://www.github.com/googleapis/python-api-core/compare/v2.0.1...v2.1.0) (2021-10-05) + + +### Features + +* add grpc transcoding + tests ([#259](https://www.github.com/googleapis/python-api-core/issues/259)) ([afe0fa1](https://www.github.com/googleapis/python-api-core/commit/afe0fa14c21289c8244606a9f81544cff8ac5f7c)) +* Add helper function to format query_params for rest transport. ([#275](https://www.github.com/googleapis/python-api-core/issues/275)) ([1c5eb4d](https://www.github.com/googleapis/python-api-core/commit/1c5eb4df93d78e791082d9282330ebf0faacd222)) +* add support for Python 3.10 ([#284](https://www.github.com/googleapis/python-api-core/issues/284)) ([a422a5d](https://www.github.com/googleapis/python-api-core/commit/a422a5d72cb6f363d57e7a4effe421ba8e049cde)) + +## [2.0.1](https://www.github.com/googleapis/python-api-core/compare/v2.0.0...v2.0.1) (2021-08-31) + + +### Bug Fixes + +* do not error on LROs with no response or error ([#258](https://www.github.com/googleapis/python-api-core/issues/258)) ([618f192](https://www.github.com/googleapis/python-api-core/commit/618f19201af729205892fcecd9c8e315ba3174a3)) + +## [2.0.0](https://www.github.com/googleapis/python-api-core/compare/v2.0.0-b1...v2.0.0) (2021-08-18) + +### ⚠ BREAKING CHANGES + +* drop support for Python 2.7 / 3.5 ([#212](https://www.github.com/googleapis/python-api-core/issues/212)) ([a30f004](https://www.github.com/googleapis/python-api-core/commit/a30f004e74f709d46e905dd819c71f43354e9ac9)) + +### Bug Fixes + +* bump grpcio version to use stable aio API ([#234](https://www.github.com/googleapis/python-api-core/issues/234)) ([bdbf889](https://www.github.com/googleapis/python-api-core/commit/bdbf889210b709d7c1945f2160bcba9161b4dd2e)) +* strip trailing _ from field mask paths ([#228](https://www.github.com/googleapis/python-api-core/issues/228)) ([ff6ef1b](https://www.github.com/googleapis/python-api-core/commit/ff6ef1bd07fa68307b7c82c910416d770e7b3416)) + ## [2.0.0b1](https://www.github.com/googleapis/python-api-core/compare/v1.31.1...v2.0.0b1) (2021-08-03) @@ -15,7 +452,7 @@ * strip trailing _ from field mask paths ([#228](https://www.github.com/googleapis/python-api-core/issues/228)) ([ff6ef1b](https://www.github.com/googleapis/python-api-core/commit/ff6ef1bd07fa68307b7c82c910416d770e7b3416)) -### [1.31.1](https://www.github.com/googleapis/python-api-core/compare/v1.31.0...v1.31.1) (2021-07-26) +## [1.31.1](https://www.github.com/googleapis/python-api-core/compare/v1.31.0...v1.31.1) (2021-07-26) ### Bug Fixes @@ -78,7 +515,7 @@ * Add support for `rest/` token in `x-goog-api-client` header ([#189](https://www.github.com/googleapis/python-api-core/issues/189)) ([15aca6b](https://www.github.com/googleapis/python-api-core/commit/15aca6b288b2ec5ce0251e442e1dfa7f52e1b124)) * retry google.auth TransportError and requests ConnectionError ([#178](https://www.github.com/googleapis/python-api-core/issues/178)) ([6ae04a8](https://www.github.com/googleapis/python-api-core/commit/6ae04a8d134fffe13f06081e15f9723c1b2ea334)) -### [1.26.3](https://www.github.com/googleapis/python-api-core/compare/v1.26.2...v1.26.3) (2021-03-25) +## [1.26.3](https://www.github.com/googleapis/python-api-core/compare/v1.26.2...v1.26.3) (2021-03-25) ### Bug Fixes @@ -90,14 +527,14 @@ * update python contributing guide ([#147](https://www.github.com/googleapis/python-api-core/issues/147)) ([1d76b57](https://www.github.com/googleapis/python-api-core/commit/1d76b57d1f218f7885f85dc7c052bad1ad3857ac)) -### [1.26.2](https://www.github.com/googleapis/python-api-core/compare/v1.26.1...v1.26.2) (2021-03-23) +## [1.26.2](https://www.github.com/googleapis/python-api-core/compare/v1.26.1...v1.26.2) (2021-03-23) ### Bug Fixes * save empty IAM policy bindings ([#155](https://www.github.com/googleapis/python-api-core/issues/155)) ([536c2ca](https://www.github.com/googleapis/python-api-core/commit/536c2cad814b8fa8cd346a3d7bd5f6b9889c4a6f)) -### [1.26.1](https://www.github.com/googleapis/python-api-core/compare/v1.26.0...v1.26.1) (2021-02-12) +## [1.26.1](https://www.github.com/googleapis/python-api-core/compare/v1.26.0...v1.26.1) (2021-02-12) ### Bug Fixes @@ -111,7 +548,7 @@ * allow default_host and default_scopes to be passed to create_channel ([#134](https://www.github.com/googleapis/python-api-core/issues/134)) ([94c76e0](https://www.github.com/googleapis/python-api-core/commit/94c76e0873e5b2f42331d5b1ad286c1e63b61395)) -### [1.25.1](https://www.github.com/googleapis/python-api-core/compare/v1.25.0...v1.25.1) (2021-01-25) +## [1.25.1](https://www.github.com/googleapis/python-api-core/compare/v1.25.0...v1.25.1) (2021-01-25) ### Bug Fixes @@ -135,7 +572,7 @@ * **python:** document adding Python 3.9 support, dropping 3.5 support ([#120](https://www.github.com/googleapis/python-api-core/issues/120)) ([b51b7f5](https://www.github.com/googleapis/python-api-core/commit/b51b7f587042fe9340371c1b5c8e9adf8001c43a)), closes [#787](https://www.github.com/googleapis/python-api-core/issues/787) -### [1.24.1](https://www.github.com/googleapis/python-api-core/compare/v1.24.0...v1.24.1) (2020-12-16) +## [1.24.1](https://www.github.com/googleapis/python-api-core/compare/v1.24.0...v1.24.1) (2020-12-16) ### Bug Fixes @@ -168,28 +605,28 @@ * harden install to use full paths, and windows separators on windows ([#88](https://www.github.com/googleapis/python-api-core/issues/88)) ([db8e636](https://www.github.com/googleapis/python-api-core/commit/db8e636f545a8872f959e3f403cfec30ffed6c34)) * update out-of-date comment in exceptions.py ([#93](https://www.github.com/googleapis/python-api-core/issues/93)) ([70ebe42](https://www.github.com/googleapis/python-api-core/commit/70ebe42601b3d088b3421233ef7d8245229b7265)) -### [1.22.4](https://www.github.com/googleapis/python-api-core/compare/v1.22.3...v1.22.4) (2020-10-05) +## [1.22.4](https://www.github.com/googleapis/python-api-core/compare/v1.22.3...v1.22.4) (2020-10-05) ### Bug Fixes * use version.py instead of pkg_resources.get_distribution ([#80](https://www.github.com/googleapis/python-api-core/issues/80)) ([d480d97](https://www.github.com/googleapis/python-api-core/commit/d480d97e41cd6705325b3b649360553a83c23f47)) -### [1.22.3](https://www.github.com/googleapis/python-api-core/compare/v1.22.2...v1.22.3) (2020-10-02) +## [1.22.3](https://www.github.com/googleapis/python-api-core/compare/v1.22.2...v1.22.3) (2020-10-02) ### Bug Fixes * **deps:** require six >= 1.13.0 ([#78](https://www.github.com/googleapis/python-api-core/issues/78)) ([a7a8b98](https://www.github.com/googleapis/python-api-core/commit/a7a8b98602a3eb277fdc607ac69f3bcb147f3351)), closes [/github.com/benjaminp/six/blob/c0be8815d13df45b6ae471c4c436cce8c192245d/CHANGES#L30-L31](https://www.github.com/googleapis//github.com/benjaminp/six/blob/c0be8815d13df45b6ae471c4c436cce8c192245d/CHANGES/issues/L30-L31) -### [1.22.2](https://www.github.com/googleapis/python-api-core/compare/v1.22.1...v1.22.2) (2020-09-03) +## [1.22.2](https://www.github.com/googleapis/python-api-core/compare/v1.22.1...v1.22.2) (2020-09-03) ### Bug Fixes * only add quota project id if supported ([#75](https://www.github.com/googleapis/python-api-core/issues/75)) ([8f8ee78](https://www.github.com/googleapis/python-api-core/commit/8f8ee7879e4f834f3c676e535ffc41b5b9b2de62)) -### [1.22.1](https://www.github.com/googleapis/python-api-core/compare/v1.22.0...v1.22.1) (2020-08-12) +## [1.22.1](https://www.github.com/googleapis/python-api-core/compare/v1.22.0...v1.22.1) (2020-08-12) ### Documentation @@ -220,7 +657,7 @@ * allow credentials files to be passed for channel creation ([#50](https://www.github.com/googleapis/python-api-core/issues/50)) ([ded92d0](https://www.github.com/googleapis/python-api-core/commit/ded92d0acdcde4295d0e5df05fda0d83783a3991)) -### [1.20.1](https://www.github.com/googleapis/python-api-core/compare/v1.20.0...v1.20.1) (2020-06-16) +## [1.20.1](https://www.github.com/googleapis/python-api-core/compare/v1.20.0...v1.20.1) (2020-06-16) ### Bug Fixes @@ -234,7 +671,7 @@ * allow disabling response stream pre-fetch ([#30](https://www.github.com/googleapis/python-api-core/issues/30)) ([74e0b0f](https://www.github.com/googleapis/python-api-core/commit/74e0b0f8387207933c120af15b2bb5d175dd8f84)), closes [#25](https://www.github.com/googleapis/python-api-core/issues/25) -### [1.19.1](https://www.github.com/googleapis/python-api-core/compare/v1.19.0...v1.19.1) (2020-06-06) +## [1.19.1](https://www.github.com/googleapis/python-api-core/compare/v1.19.0...v1.19.1) (2020-06-06) ### Bug Fixes diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 358404f2..1a1f608b 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -21,7 +21,7 @@ In order to add a feature: documentation. - The feature must work fully on the following CPython versions: - 3.6, 3.7, 3.8 and 3.9 on both UNIX and Windows. + 3.7, 3.8, 3.9, 3.10, 3.11, 3.12 and 3.13 on both UNIX and Windows. - The feature must not add unnecessary dependencies (where "unnecessary" is of course subjective, but new dependencies should @@ -49,9 +49,9 @@ You'll have to create a development environment using a Git checkout: # Configure remotes such that you can pull changes from the googleapis/python-api-core # repository into your local repository. $ git remote add upstream git@github.com:googleapis/python-api-core.git - # fetch and merge changes from upstream into master + # fetch and merge changes from upstream into main $ git fetch upstream - $ git merge upstream/master + $ git merge upstream/main Now your local repo is set up such that you will push changes to your GitHub repo, from which you can submit a pull request. @@ -71,7 +71,7 @@ We use `nox `__ to instrument our tests. - To run a single unit test:: - $ nox -s unit-3.9 -- -k + $ nox -s unit-3.13 -- -k .. note:: @@ -109,12 +109,12 @@ Coding Style variables:: export GOOGLE_CLOUD_TESTING_REMOTE="upstream" - export GOOGLE_CLOUD_TESTING_BRANCH="master" + export GOOGLE_CLOUD_TESTING_BRANCH="main" By doing this, you are specifying the location of the most up-to-date version of ``python-api-core``. The the suggested remote name ``upstream`` should point to the official ``googleapis`` checkout and the - the branch should be the main branch on that remote (``master``). + the branch should be the main branch on that remote (``main``). - This repository contains configuration for the `pre-commit `__ tool, which automates checking @@ -185,7 +185,7 @@ The `description on PyPI`_ for the project comes directly from the ``README``. Due to the reStructuredText (``rst``) parser used by PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` instead of -``https://github.com/googleapis/python-api-core/blob/master/CONTRIBUTING.rst``) +``https://github.com/googleapis/python-api-core/blob/main/CONTRIBUTING.rst``) may cause problems creating links or rendering the description. .. _description on PyPI: https://pypi.org/project/google-api-core @@ -197,34 +197,28 @@ Supported Python Versions We support: -- `Python 3.6`_ - `Python 3.7`_ - `Python 3.8`_ - `Python 3.9`_ +- `Python 3.10`_ +- `Python 3.11`_ +- `Python 3.12`_ +- `Python 3.13`_ -.. _Python 3.6: https://docs.python.org/3.6/ .. _Python 3.7: https://docs.python.org/3.7/ .. _Python 3.8: https://docs.python.org/3.8/ .. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ +.. _Python 3.12: https://docs.python.org/3.12/ +.. _Python 3.13: https://docs.python.org/3.13/ Supported versions can be found in our ``noxfile.py`` `config`_. -.. _config: https://github.com/googleapis/python-api-core/blob/master/noxfile.py +.. _config: https://github.com/googleapis/python-api-core/blob/main/noxfile.py -We also explicitly decided to support Python 3 beginning with version 3.6. -Reasons for this include: - -- Encouraging use of newest versions of Python 3 -- Taking the lead of `prominent`_ open-source `projects`_ -- `Unicode literal support`_ which allows for a cleaner codebase that - works in both Python 2 and Python 3 - -.. _prominent: https://docs.djangoproject.com/en/1.9/faq/install/#what-python-version-can-i-use-with-django -.. _projects: http://flask.pocoo.org/docs/0.10/python3/ -.. _Unicode literal support: https://www.python.org/dev/peps/pep-0414/ - ********** Versioning ********** diff --git a/MANIFEST.in b/MANIFEST.in index e783f4c6..d6814cd6 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2020 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/README.rst b/README.rst index d94f3e88..58ae26cb 100644 --- a/README.rst +++ b/README.rst @@ -16,13 +16,16 @@ common helpers used by all Google API clients. For more information, see the Supported Python Versions ------------------------- -Python >= 3.6 +Python >= 3.7 Unsupported Python Versions --------------------------- -Python == 2.7, Python == 3.5. +Python == 2.7, Python == 3.5, Python == 3.6. The last version of this library compatible with Python 2.7 and 3.5 is -`google-api_core==1.31.1`. +`google-api-core==1.31.1`. + +The last version of this library compatible with Python 3.6 is +`google-api-core==2.8.2`. diff --git a/docs/auth.rst b/docs/auth.rst index a9b296d4..3dcc5fd3 100644 --- a/docs/auth.rst +++ b/docs/auth.rst @@ -165,7 +165,7 @@ possible to call Google Cloud APIs with a user account via getting started with the ``google-cloud-*`` library. The simplest way to use credentials from a user account is via -Application Default Credentials using ``gcloud auth login`` +Application Default Credentials using ``gcloud auth application-default login`` (as mentioned above) and :func:`google.auth.default`: .. code:: python diff --git a/docs/conf.py b/docs/conf.py index aec958f3..ad4723c0 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2021 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -76,8 +76,8 @@ # The encoding of source files. # source_encoding = 'utf-8-sig' -# The master toctree document. -master_doc = "index" +# The root toctree document. +root_doc = "index" # General information about the project. project = "google-api-core" @@ -280,7 +280,7 @@ # author, documentclass [howto, manual, or own class]). latex_documents = [ ( - master_doc, + root_doc, "google-api-core.tex", "google-api-core Documentation", author, @@ -314,7 +314,13 @@ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ - (master_doc, "google-api-core", "google-api-core Documentation", [author], 1,) + ( + root_doc, + "google-api-core", + "google-api-core Documentation", + [author], + 1, + ) ] # If true, show URL addresses after external links. @@ -328,7 +334,7 @@ # dir menu entry, description, category) texinfo_documents = [ ( - master_doc, + root_doc, "google-api-core", "google-api-core Documentation", author, @@ -355,7 +361,10 @@ intersphinx_mapping = { "python": ("https://python.readthedocs.org/en/latest/", None), "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), "grpc": ("https://grpc.github.io/grpc/python/", None), "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), diff --git a/docs/retry.rst b/docs/retry.rst index 97a7f2ca..6e165f56 100644 --- a/docs/retry.rst +++ b/docs/retry.rst @@ -10,4 +10,5 @@ Retry in AsyncIO .. automodule:: google.api_core.retry_async :members: + :noindex: :show-inheritance: diff --git a/google/api_core/__init__.py b/google/api_core/__init__.py index 605dd8be..b80ea372 100644 --- a/google/api_core/__init__.py +++ b/google/api_core/__init__.py @@ -14,7 +14,7 @@ """Google API Core. -This package contains common code and utilties used by Google client libraries. +This package contains common code and utilities used by Google client libraries. """ from google.api_core import version as api_core_version diff --git a/google/api_core/_rest_streaming_base.py b/google/api_core/_rest_streaming_base.py new file mode 100644 index 00000000..3bc87a96 --- /dev/null +++ b/google/api_core/_rest_streaming_base.py @@ -0,0 +1,118 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Helpers for server-side streaming in REST.""" + +from collections import deque +import string +from typing import Deque, Union +import types + +import proto +import google.protobuf.message +from google.protobuf.json_format import Parse + + +class BaseResponseIterator: + """Base Iterator over REST API responses. This class should not be used directly. + + Args: + response_message_cls (Union[proto.Message, google.protobuf.message.Message]): A response + class expected to be returned from an API. + + Raises: + ValueError: If `response_message_cls` is not a subclass of `proto.Message` or `google.protobuf.message.Message`. + """ + + def __init__( + self, + response_message_cls: Union[proto.Message, google.protobuf.message.Message], + ): + self._response_message_cls = response_message_cls + # Contains a list of JSON responses ready to be sent to user. + self._ready_objs: Deque[str] = deque() + # Current JSON response being built. + self._obj = "" + # Keeps track of the nesting level within a JSON object. + self._level = 0 + # Keeps track whether HTTP response is currently sending values + # inside of a string value. + self._in_string = False + # Whether an escape symbol "\" was encountered. + self._escape_next = False + + self._grab = types.MethodType(self._create_grab(), self) + + def _process_chunk(self, chunk: str): + if self._level == 0: + if chunk[0] != "[": + raise ValueError( + "Can only parse array of JSON objects, instead got %s" % chunk + ) + for char in chunk: + if char == "{": + if self._level == 1: + # Level 1 corresponds to the outermost JSON object + # (i.e. the one we care about). + self._obj = "" + if not self._in_string: + self._level += 1 + self._obj += char + elif char == "}": + self._obj += char + if not self._in_string: + self._level -= 1 + if not self._in_string and self._level == 1: + self._ready_objs.append(self._obj) + elif char == '"': + # Helps to deal with an escaped quotes inside of a string. + if not self._escape_next: + self._in_string = not self._in_string + self._obj += char + elif char in string.whitespace: + if self._in_string: + self._obj += char + elif char == "[": + if self._level == 0: + self._level += 1 + else: + self._obj += char + elif char == "]": + if self._level == 1: + self._level -= 1 + else: + self._obj += char + else: + self._obj += char + self._escape_next = not self._escape_next if char == "\\" else False + + def _create_grab(self): + if issubclass(self._response_message_cls, proto.Message): + + def grab(this): + return this._response_message_cls.from_json( + this._ready_objs.popleft(), ignore_unknown_fields=True + ) + + return grab + elif issubclass(self._response_message_cls, google.protobuf.message.Message): + + def grab(this): + return Parse(this._ready_objs.popleft(), this._response_message_cls()) + + return grab + else: + raise ValueError( + "Response message class must be a subclass of proto.Message or google.protobuf.message.Message." + ) diff --git a/google/api_core/bidi.py b/google/api_core/bidi.py index 56a021a9..b002b409 100644 --- a/google/api_core/bidi.py +++ b/google/api_core/bidi.py @@ -91,11 +91,9 @@ def __init__(self, queue, period=1, initial_request=None): def _is_active(self): # Note: there is a possibility that this starts *before* the call # property is set. So we have to check if self.call is set before - # seeing if it's active. - if self.call is not None and not self.call.is_active(): - return False - else: - return True + # seeing if it's active. We need to return True if self.call is None. + # See https://github.com/googleapis/python-api-core/issues/560. + return self.call is None or self.call.is_active() def __iter__(self): if self._initial_request is not None: @@ -265,6 +263,10 @@ def add_done_callback(self, callback): self._callbacks.append(callback) def _on_call_done(self, future): + # This occurs when the RPC errors or is successfully terminated. + # Note that grpc's "future" here can also be a grpc.RpcError. + # See note in https://github.com/grpc/grpc/issues/10885#issuecomment-302651331 + # that `grpc.RpcError` is also `grpc.call`. for callback in self._callbacks: callback(future) @@ -276,7 +278,13 @@ def open(self): request_generator = _RequestQueueGenerator( self._request_queue, initial_request=self._initial_request ) - call = self._start_rpc(iter(request_generator), metadata=self._rpc_metadata) + try: + call = self._start_rpc(iter(request_generator), metadata=self._rpc_metadata) + except exceptions.GoogleAPICallError as exc: + # The original `grpc.RpcError` (which is usually also a `grpc.Call`) is + # available from the ``response`` property on the mapped exception. + self._on_call_done(exc.response) + raise request_generator.call = call @@ -298,6 +306,8 @@ def close(self): self._request_queue.put(None) self.call.cancel() self._request_generator = None + self._initial_request = None + self._callbacks = [] # Don't set self.call to None. Keep it around so that send/recv can # raise the error. @@ -364,7 +374,7 @@ class ResumableBidiRpc(BidiRpc): def should_recover(exc): return ( isinstance(exc, grpc.RpcError) and - exc.code() == grpc.StatusCode.UNVAILABLE) + exc.code() == grpc.StatusCode.UNAVAILABLE) initial_request = example_pb2.StreamingRpcRequest( setting='example') @@ -589,7 +599,7 @@ class BackgroundConsumer(object): def should_recover(exc): return ( isinstance(exc, grpc.RpcError) and - exc.code() == grpc.StatusCode.UNVAILABLE) + exc.code() == grpc.StatusCode.UNAVAILABLE) initial_request = example_pb2.StreamingRpcRequest( setting='example') @@ -654,7 +664,8 @@ def _thread_main(self, ready): _LOGGER.debug("waiting for recv.") response = self._bidi_rpc.recv() _LOGGER.debug("recved response.") - self._on_response(response) + if self._on_response is not None: + self._on_response(response) except exceptions.GoogleAPICallError as exc: _LOGGER.debug( @@ -709,6 +720,7 @@ def stop(self): _LOGGER.warning("Background thread did not exit.") self._thread = None + self._on_response = None @property def is_active(self): @@ -727,7 +739,7 @@ def resume(self): """Resumes the response stream.""" with self._wake: self._paused = False - self._wake.notifyAll() + self._wake.notify_all() @property def is_paused(self): diff --git a/google/api_core/client_info.py b/google/api_core/client_info.py index d7f4367a..90926beb 100644 --- a/google/api_core/client_info.py +++ b/google/api_core/client_info.py @@ -19,17 +19,20 @@ """ import platform - -import pkg_resources +from typing import Union from google.api_core import version as api_core_version _PY_VERSION = platform.python_version() _API_CORE_VERSION = api_core_version.__version__ +_GRPC_VERSION: Union[str, None] + try: - _GRPC_VERSION = pkg_resources.get_distribution("grpcio").version -except pkg_resources.DistributionNotFound: # pragma: NO COVER + import grpc + + _GRPC_VERSION = grpc.__version__ +except ImportError: # pragma: NO COVER _GRPC_VERSION = None @@ -45,7 +48,7 @@ class ClientInfo(object): ``'3.9.6'``. grpc_version (Optional[str]): The gRPC library version. api_core_version (str): The google-api-core library version. - gapic_version (Optional[str]): The sversion of gapic-generated client + gapic_version (Optional[str]): The version of gapic-generated client library, if the library was generated by gapic. client_library_version (Optional[str]): The version of the client library, generally used if the client library was not generated @@ -54,7 +57,8 @@ class ClientInfo(object): user_agent (Optional[str]): Prefix to the user agent header. This is used to supply information such as application name or partner tool. Recommended format: ``application-or-tool-ID/major.minor.version``. - rest_version (Optional[str]): The requests library version. + rest_version (Optional[str]): A string with labeled versions of the + dependencies used for REST transport. """ def __init__( diff --git a/google/api_core/client_logging.py b/google/api_core/client_logging.py new file mode 100644 index 00000000..837e3e0c --- /dev/null +++ b/google/api_core/client_logging.py @@ -0,0 +1,144 @@ +import logging +import json +import os + +from typing import List, Optional + +_LOGGING_INITIALIZED = False +_BASE_LOGGER_NAME = "google" + +# Fields to be included in the StructuredLogFormatter. +# +# TODO(https://github.com/googleapis/python-api-core/issues/761): Update this list to support additional logging fields. +_recognized_logging_fields = [ + "httpRequest", + "rpcName", + "serviceName", + "credentialsType", + "credentialsInfo", + "universeDomain", + "request", + "response", + "metadata", + "retryAttempt", + "httpResponse", +] # Additional fields to be Logged. + + +def logger_configured(logger) -> bool: + """Determines whether `logger` has non-default configuration + + Args: + logger: The logger to check. + + Returns: + bool: Whether the logger has any non-default configuration. + """ + return ( + logger.handlers != [] or logger.level != logging.NOTSET or not logger.propagate + ) + + +def initialize_logging(): + """Initializes "google" loggers, partly based on the environment variable + + Initializes the "google" logger and any loggers (at the "google" + level or lower) specified by the environment variable + GOOGLE_SDK_PYTHON_LOGGING_SCOPE, as long as none of these loggers + were previously configured. If any such loggers (including the + "google" logger) are initialized, they are set to NOT propagate + log events up to their parent loggers. + + This initialization is executed only once, and hence the + environment variable is only processed the first time this + function is called. + """ + global _LOGGING_INITIALIZED + if _LOGGING_INITIALIZED: + return + scopes = os.getenv("GOOGLE_SDK_PYTHON_LOGGING_SCOPE", "") + setup_logging(scopes) + _LOGGING_INITIALIZED = True + + +def parse_logging_scopes(scopes: Optional[str] = None) -> List[str]: + """Returns a list of logger names. + + Splits the single string of comma-separated logger names into a list of individual logger name strings. + + Args: + scopes: The name of a single logger. (In the future, this will be a comma-separated list of multiple loggers.) + + Returns: + A list of all the logger names in scopes. + """ + if not scopes: + return [] + # TODO(https://github.com/googleapis/python-api-core/issues/759): check if the namespace is a valid namespace. + # TODO(b/380481951): Support logging multiple scopes. + # TODO(b/380483756): Raise or log a warning for an invalid scope. + namespaces = [scopes] + return namespaces + + +def configure_defaults(logger): + """Configures `logger` to emit structured info to stdout.""" + if not logger_configured(logger): + console_handler = logging.StreamHandler() + logger.setLevel("DEBUG") + logger.propagate = False + formatter = StructuredLogFormatter() + console_handler.setFormatter(formatter) + logger.addHandler(console_handler) + + +def setup_logging(scopes: str = ""): + """Sets up logging for the specified `scopes`. + + If the loggers specified in `scopes` have not been previously + configured, this will configure them to emit structured log + entries to stdout, and to not propagate their log events to their + parent loggers. Additionally, if the "google" logger (whether it + was specified in `scopes` or not) was not previously configured, + it will also configure it to not propagate log events to the root + logger. + + Args: + scopes: The name of a single logger. (In the future, this will be a comma-separated list of multiple loggers.) + + """ + + # only returns valid logger scopes (namespaces) + # this list has at most one element. + logger_names = parse_logging_scopes(scopes) + + for namespace in logger_names: + # This will either create a module level logger or get the reference of the base logger instantiated above. + logger = logging.getLogger(namespace) + + # Configure default settings. + configure_defaults(logger) + + # disable log propagation at base logger level to the root logger only if a base logger is not already configured via code changes. + base_logger = logging.getLogger(_BASE_LOGGER_NAME) + if not logger_configured(base_logger): + base_logger.propagate = False + + +# TODO(https://github.com/googleapis/python-api-core/issues/763): Expand documentation. +class StructuredLogFormatter(logging.Formatter): + # TODO(https://github.com/googleapis/python-api-core/issues/761): ensure that additional fields such as + # function name, file name, and line no. appear in a log output. + def format(self, record: logging.LogRecord): + log_obj = { + "timestamp": self.formatTime(record), + "severity": record.levelname, + "name": record.name, + "message": record.getMessage(), + } + + for field_name in _recognized_logging_fields: + value = getattr(record, field_name, None) + if value is not None: + log_obj[field_name] = value + return json.dumps(log_obj) diff --git a/google/api_core/client_options.py b/google/api_core/client_options.py index be5523df..d11665d2 100644 --- a/google/api_core/client_options.py +++ b/google/api_core/client_options.py @@ -48,6 +48,8 @@ def get_client_cert(): """ +from typing import Callable, Mapping, Optional, Sequence, Tuple + class ClientOptions(object): """Client Options used to set options on clients. @@ -55,49 +57,84 @@ class ClientOptions(object): Args: api_endpoint (Optional[str]): The desired API endpoint, e.g., compute.googleapis.com - client_cert_source (Optional[Callable[[], (bytes, bytes)]]): A callback + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback which returns client certificate bytes and private key bytes both in PEM format. ``client_cert_source`` and ``client_encrypted_cert_source`` are mutually exclusive. - client_encrypted_cert_source (Optional[Callable[[], (str, str, bytes)]]): + client_encrypted_cert_source (Optional[Callable[[], Tuple[str, str, bytes]]]): A callback which returns client certificate file path, encrypted private key file path, and the passphrase bytes.``client_cert_source`` and ``client_encrypted_cert_source`` are mutually exclusive. quota_project_id (Optional[str]): A project name that a client's quota belongs to. credentials_file (Optional[str]): A path to a file storing credentials. + ``credentials_file` and ``api_key`` are mutually exclusive. + + .. warning:: + Important: If you accept a credential configuration (credential JSON/File/Stream) + from an external source for authentication to Google Cloud Platform, you must + validate it before providing it to any Google API or client library. Providing an + unvalidated credential configuration to Google APIs or libraries can compromise + the security of your systems and data. For more information, refer to + `Validate credential configurations from external sources`_. + + .. _Validate credential configurations from external sources: + + https://cloud.google.com/docs/authentication/external/externally-sourced-credentials scopes (Optional[Sequence[str]]): OAuth access token override scopes. + api_key (Optional[str]): Google API key. ``credentials_file`` and + ``api_key`` are mutually exclusive. + api_audience (Optional[str]): The intended audience for the API calls + to the service that will be set when using certain 3rd party + authentication flows. Audience is typically a resource identifier. + If not set, the service endpoint value will be used as a default. + An example of a valid ``api_audience`` is: "https://language.googleapis.com". + universe_domain (Optional[str]): The desired universe domain. This must match + the one in credentials. If not set, the default universe domain is + `googleapis.com`. If both `api_endpoint` and `universe_domain` are set, + then `api_endpoint` is used as the service endpoint. If `api_endpoint` is + not specified, the format will be `{service}.{universe_domain}`. Raises: ValueError: If both ``client_cert_source`` and ``client_encrypted_cert_source`` - are provided. + are provided, or both ``credentials_file`` and ``api_key`` are provided. """ def __init__( self, - api_endpoint=None, - client_cert_source=None, - client_encrypted_cert_source=None, - quota_project_id=None, - credentials_file=None, - scopes=None, + api_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + client_encrypted_cert_source: Optional[ + Callable[[], Tuple[str, str, bytes]] + ] = None, + quota_project_id: Optional[str] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + api_key: Optional[str] = None, + api_audience: Optional[str] = None, + universe_domain: Optional[str] = None, ): if client_cert_source and client_encrypted_cert_source: raise ValueError( "client_cert_source and client_encrypted_cert_source are mutually exclusive" ) + if api_key and credentials_file: + raise ValueError("api_key and credentials_file are mutually exclusive") self.api_endpoint = api_endpoint self.client_cert_source = client_cert_source self.client_encrypted_cert_source = client_encrypted_cert_source self.quota_project_id = quota_project_id self.credentials_file = credentials_file self.scopes = scopes + self.api_key = api_key + self.api_audience = api_audience + self.universe_domain = universe_domain - def __repr__(self): + def __repr__(self) -> str: return "ClientOptions: " + repr(self.__dict__) -def from_dict(options): +def from_dict(options: Mapping[str, object]) -> ClientOptions: """Construct a client options object from a mapping object. Args: diff --git a/google/api_core/datetime_helpers.py b/google/api_core/datetime_helpers.py index 78268efc..c3792300 100644 --- a/google/api_core/datetime_helpers.py +++ b/google/api_core/datetime_helpers.py @@ -42,7 +42,7 @@ def utcnow(): """A :meth:`datetime.datetime.utcnow()` alias to allow mocking in tests.""" - return datetime.datetime.utcnow() + return datetime.datetime.now(tz=datetime.timezone.utc).replace(tzinfo=None) def to_milliseconds(value): @@ -151,7 +151,7 @@ def from_rfc3339(value): micros = 0 else: scale = 9 - len(fraction) - nanos = int(fraction) * (10 ** scale) + nanos = int(fraction) * (10**scale) micros = nanos // 1000 return bare_seconds.replace(microsecond=micros, tzinfo=datetime.timezone.utc) @@ -170,7 +170,7 @@ def to_rfc3339(value, ignore_zone=True): datetime object is ignored and the datetime is treated as UTC. Returns: - str: The RFC3339 formated string representing the datetime. + str: The RFC3339 formatted string representing the datetime. """ if not ignore_zone and value.tzinfo is not None: # Convert to UTC and remove the time zone info. @@ -245,7 +245,7 @@ def from_rfc3339(cls, stamp): nanos = 0 else: scale = 9 - len(fraction) - nanos = int(fraction) * (10 ** scale) + nanos = int(fraction) * (10**scale) return cls( bare.year, bare.month, diff --git a/google/api_core/exceptions.py b/google/api_core/exceptions.py index 13be917e..e3eb696c 100644 --- a/google/api_core/exceptions.py +++ b/google/api_core/exceptions.py @@ -22,17 +22,35 @@ from __future__ import unicode_literals import http.client +from typing import Optional, Dict +from typing import Union +import warnings + +from google.rpc import error_details_pb2 + + +def _warn_could_not_import_grpcio_status(): + warnings.warn( + "Please install grpcio-status to obtain helpful grpc error messages.", + ImportWarning, + ) # pragma: NO COVER + try: import grpc + try: + from grpc_status import rpc_status + except ImportError: # pragma: NO COVER + _warn_could_not_import_grpcio_status() + rpc_status = None except ImportError: # pragma: NO COVER grpc = None # Lookup tables for mapping exceptions from HTTP and gRPC transports. # Populated by _GoogleAPICallErrorMeta -_HTTP_CODE_TO_EXCEPTION = {} -_GRPC_CODE_TO_EXCEPTION = {} +_HTTP_CODE_TO_EXCEPTION: Dict[int, Exception] = {} +_GRPC_CODE_TO_EXCEPTION: Dict[int, Exception] = {} # Additional lookup table to map integer status codes to grpc status code # grpc does not currently support initializing enums from ints @@ -60,7 +78,7 @@ class RetryError(GoogleAPIError): Args: message (str): The exception message. - cause (Exception): The last exception raised when retring the + cause (Exception): The last exception raised when retrying the function. """ @@ -96,11 +114,14 @@ class GoogleAPICallError(GoogleAPIError, metaclass=_GoogleAPICallErrorMeta): Args: message (str): The exception message. errors (Sequence[Any]): An optional list of error details. + details (Sequence[Any]): An optional list of objects defined in google.rpc.error_details. response (Union[requests.Request, grpc.Call]): The response or gRPC call metadata. + error_info (Union[error_details_pb2.ErrorInfo, None]): An optional object containing error info + (google.rpc.error_details.ErrorInfo). """ - code = None + code: Union[int, None] = None """Optional[int]: The HTTP status code associated with this error. This may be ``None`` if the exception does not have a direct mapping @@ -116,15 +137,67 @@ class GoogleAPICallError(GoogleAPIError, metaclass=_GoogleAPICallErrorMeta): This may be ``None`` if the exception does not match up to a gRPC error. """ - def __init__(self, message, errors=(), response=None): + def __init__(self, message, errors=(), details=(), response=None, error_info=None): super(GoogleAPICallError, self).__init__(message) self.message = message """str: The exception message.""" self._errors = errors + self._details = details self._response = response + self._error_info = error_info def __str__(self): - return "{} {}".format(self.code, self.message) + error_msg = "{} {}".format(self.code, self.message) + if self.details: + error_msg = "{} {}".format(error_msg, self.details) + # Note: This else condition can be removed once proposal A from + # b/284179390 is implemented. + else: + if self.errors: + errors = [ + f"{error.code}: {error.message}" + for error in self.errors + if hasattr(error, "code") and hasattr(error, "message") + ] + if errors: + error_msg = "{} {}".format(error_msg, "\n".join(errors)) + return error_msg + + @property + def reason(self): + """The reason of the error. + + Reference: + https://github.com/googleapis/googleapis/blob/master/google/rpc/error_details.proto#L112 + + Returns: + Union[str, None]: An optional string containing reason of the error. + """ + return self._error_info.reason if self._error_info else None + + @property + def domain(self): + """The logical grouping to which the "reason" belongs. + + Reference: + https://github.com/googleapis/googleapis/blob/master/google/rpc/error_details.proto#L112 + + Returns: + Union[str, None]: An optional string containing a logical grouping to which the "reason" belongs. + """ + return self._error_info.domain if self._error_info else None + + @property + def metadata(self): + """Additional structured details about this error. + + Reference: + https://github.com/googleapis/googleapis/blob/master/google/rpc/error_details.proto#L112 + + Returns: + Union[Dict[str, str], None]: An optional object containing structured details about the error. + """ + return self._error_info.metadata if self._error_info else None @property def errors(self): @@ -135,6 +208,19 @@ def errors(self): """ return list(self._errors) + @property + def details(self): + """Information contained in google.rpc.status.details. + + Reference: + https://github.com/googleapis/googleapis/blob/master/google/rpc/status.proto + https://github.com/googleapis/googleapis/blob/master/google/rpc/error_details.proto + + Returns: + Sequence[Any]: A list of structured objects from error_details.proto + """ + return list(self._details) + @property def response(self): """Optional[Union[requests.Request, grpc.Call]]: The response or @@ -293,8 +379,7 @@ class Cancelled(ClientError): """Exception mapping a :attr:`grpc.StatusCode.CANCELLED` error.""" # This maps to HTTP status code 499. See - # https://github.com/googleapis/googleapis/blob/master/google/rpc\ - # /code.proto + # https://github.com/googleapis/googleapis/blob/master/google/rpc/code.proto code = 499 grpc_status_code = grpc.StatusCode.CANCELLED if grpc is not None else None @@ -357,6 +442,12 @@ class DeadlineExceeded(GatewayTimeout): grpc_status_code = grpc.StatusCode.DEADLINE_EXCEEDED if grpc is not None else None +class AsyncRestUnsupportedParameterError(NotImplementedError): + """Raised when an unsupported parameter is configured against async rest transport.""" + + pass + + def exception_class_for_http_status(status_code): """Return the exception class for a specific HTTP status code. @@ -391,6 +482,62 @@ def from_http_status(status_code, message, **kwargs): return error +def _format_rest_error_message(error, method, url): + method = method.upper() if method else None + message = "{method} {url}: {error}".format( + method=method, + url=url, + error=error, + ) + return message + + +# NOTE: We're moving away from `from_http_status` because it expects an aiohttp response compared +# to `format_http_response_error` which expects a more abstract response from google.auth and is +# compatible with both sync and async response types. +# TODO(https://github.com/googleapis/python-api-core/issues/691): Add type hint for response. +def format_http_response_error( + response, method: str, url: str, payload: Optional[Dict] = None +): + """Create a :class:`GoogleAPICallError` from a google auth rest response. + + Args: + response Union[google.auth.transport.Response, google.auth.aio.transport.Response]: The HTTP response. + method Optional(str): The HTTP request method. + url Optional(str): The HTTP request url. + payload Optional(dict): The HTTP response payload. If not passed in, it is read from response for a response type of google.auth.transport.Response. + + Returns: + GoogleAPICallError: An instance of the appropriate subclass of + :class:`GoogleAPICallError`, with the message and errors populated + from the response. + """ + payload = {} if not payload else payload + error_message = payload.get("error", {}).get("message", "unknown error") + errors = payload.get("error", {}).get("errors", ()) + # In JSON, details are already formatted in developer-friendly way. + details = payload.get("error", {}).get("details", ()) + error_info_list = list( + filter( + lambda detail: detail.get("@type", "") + == "type.googleapis.com/google.rpc.ErrorInfo", + details, + ) + ) + error_info = error_info_list[0] if error_info_list else None + message = _format_rest_error_message(error_message, method, url) + + exception = from_http_status( + response.status_code, + message, + errors=errors, + details=details, + response=response, + error_info=error_info, + ) + return exception + + def from_http_response(response): """Create a :class:`GoogleAPICallError` from a :class:`requests.Response`. @@ -406,19 +553,10 @@ def from_http_response(response): payload = response.json() except ValueError: payload = {"error": {"message": response.text or "unknown error"}} - - error_message = payload.get("error", {}).get("message", "unknown error") - errors = payload.get("error", {}).get("errors", ()) - - message = "{method} {url}: {error}".format( - method=response.request.method, url=response.request.url, error=error_message + return format_http_response_error( + response, response.request.method, response.request.url, payload ) - exception = from_http_status( - response.status_code, message, errors=errors, response=response - ) - return exception - def exception_class_for_grpc_status(status_code): """Return the exception class for a specific :class:`grpc.StatusCode`. @@ -462,6 +600,48 @@ def _is_informative_grpc_error(rpc_exc): return hasattr(rpc_exc, "code") and hasattr(rpc_exc, "details") +def _parse_grpc_error_details(rpc_exc): + if not rpc_status: # pragma: NO COVER + _warn_could_not_import_grpcio_status() + return [], None + try: + status = rpc_status.from_call(rpc_exc) + except NotImplementedError: # workaround + return [], None + + if not status: + return [], None + + possible_errors = [ + error_details_pb2.BadRequest, + error_details_pb2.PreconditionFailure, + error_details_pb2.QuotaFailure, + error_details_pb2.ErrorInfo, + error_details_pb2.RetryInfo, + error_details_pb2.ResourceInfo, + error_details_pb2.RequestInfo, + error_details_pb2.DebugInfo, + error_details_pb2.Help, + error_details_pb2.LocalizedMessage, + ] + error_info = None + error_details = [] + for detail in status.details: + matched_detail_cls = list( + filter(lambda x: detail.Is(x.DESCRIPTOR), possible_errors) + ) + # If nothing matched, use detail directly. + if len(matched_detail_cls) == 0: + info = detail + else: + info = matched_detail_cls[0]() + detail.Unpack(info) + error_details.append(info) + if isinstance(info, error_details_pb2.ErrorInfo): + error_info = info + return error_details, error_info + + def from_grpc_error(rpc_exc): """Create a :class:`GoogleAPICallError` from a :class:`grpc.RpcError`. @@ -474,9 +654,17 @@ def from_grpc_error(rpc_exc): """ # NOTE(lidiz) All gRPC error shares the parent class grpc.RpcError. # However, check for grpc.RpcError breaks backward compatibility. - if isinstance(rpc_exc, grpc.Call) or _is_informative_grpc_error(rpc_exc): + if ( + grpc is not None and isinstance(rpc_exc, grpc.Call) + ) or _is_informative_grpc_error(rpc_exc): + details, err_info = _parse_grpc_error_details(rpc_exc) return from_grpc_status( - rpc_exc.code(), rpc_exc.details(), errors=(rpc_exc,), response=rpc_exc + rpc_exc.code(), + rpc_exc.details(), + errors=(rpc_exc,), + details=details, + response=rpc_exc, + error_info=err_info, ) else: return GoogleAPICallError(str(rpc_exc), errors=(rpc_exc,), response=rpc_exc) diff --git a/google/api_core/extended_operation.py b/google/api_core/extended_operation.py new file mode 100644 index 00000000..d474632b --- /dev/null +++ b/google/api_core/extended_operation.py @@ -0,0 +1,225 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Futures for extended long-running operations returned from Google Cloud APIs. + +These futures can be used to synchronously wait for the result of a +long-running operations using :meth:`ExtendedOperation.result`: + +.. code-block:: python + + extended_operation = my_api_client.long_running_method() + + extended_operation.result() + +Or asynchronously using callbacks and :meth:`Operation.add_done_callback`: + +.. code-block:: python + + extended_operation = my_api_client.long_running_method() + + def my_callback(ex_op): + print(f"Operation {ex_op.name} completed") + + extended_operation.add_done_callback(my_callback) + +""" + +import threading + +from google.api_core import exceptions +from google.api_core.future import polling + + +class ExtendedOperation(polling.PollingFuture): + """An ExtendedOperation future for interacting with a Google API Long-Running Operation. + + Args: + extended_operation (proto.Message): The initial operation. + refresh (Callable[[], type(extended_operation)]): A callable that returns + the latest state of the operation. + cancel (Callable[[], None]): A callable that tries to cancel the operation. + polling Optional(google.api_core.retry.Retry): The configuration used + for polling. This can be used to control how often :meth:`done` + is polled. If the ``timeout`` argument to :meth:`result` is + specified it will override the ``polling.timeout`` property. + retry Optional(google.api_core.retry.Retry): DEPRECATED use ``polling`` + instead. If specified it will override ``polling`` parameter to + maintain backward compatibility. + + Note: Most long-running API methods use google.api_core.operation.Operation + This class is a wrapper for a subset of methods that use alternative + Long-Running Operation (LRO) semantics. + + Note: there is not a concrete type the extended operation must be. + It MUST have fields that correspond to the following, POSSIBLY WITH DIFFERENT NAMES: + * name: str + * status: Union[str, bool, enum.Enum] + * error_code: int + * error_message: str + """ + + def __init__( + self, + extended_operation, + refresh, + cancel, + polling=polling.DEFAULT_POLLING, + **kwargs, + ): + super().__init__(polling=polling, **kwargs) + self._extended_operation = extended_operation + self._refresh = refresh + self._cancel = cancel + # Note: the extended operation does not give a good way to indicate cancellation. + # We make do with manually tracking cancellation and checking for doneness. + self._cancelled = False + self._completion_lock = threading.Lock() + # Invoke in case the operation came back already complete. + self._handle_refreshed_operation() + + # Note: the following four properties MUST be overridden in a subclass + # if, and only if, the fields in the corresponding extended operation message + # have different names. + # + # E.g. we have an extended operation class that looks like + # + # class MyOperation(proto.Message): + # moniker = proto.Field(proto.STRING, number=1) + # status_msg = proto.Field(proto.STRING, number=2) + # optional http_error_code = proto.Field(proto.INT32, number=3) + # optional http_error_msg = proto.Field(proto.STRING, number=4) + # + # the ExtendedOperation subclass would provide property overrides that map + # to these (poorly named) fields. + @property + def name(self): + return self._extended_operation.name + + @property + def status(self): + return self._extended_operation.status + + @property + def error_code(self): + return self._extended_operation.error_code + + @property + def error_message(self): + return self._extended_operation.error_message + + def __getattr__(self, name): + return getattr(self._extended_operation, name) + + def done(self, retry=None): + self._refresh_and_update(retry) + return self._extended_operation.done + + def cancel(self): + if self.done(): + return False + + self._cancel() + self._cancelled = True + return True + + def cancelled(self): + # TODO(dovs): there is not currently a good way to determine whether the + # operation has been cancelled. + # The best we can do is manually keep track of cancellation + # and check for doneness. + if not self._cancelled: + return False + + self._refresh_and_update() + return self._extended_operation.done + + def _refresh_and_update(self, retry=None): + if not self._extended_operation.done: + self._extended_operation = ( + self._refresh(retry=retry) if retry else self._refresh() + ) + self._handle_refreshed_operation() + + def _handle_refreshed_operation(self): + with self._completion_lock: + if not self._extended_operation.done: + return + + if self.error_code and self.error_message: + # Note: `errors` can be removed once proposal A from + # b/284179390 is implemented. + errors = [] + if hasattr(self, "error") and hasattr(self.error, "errors"): + errors = self.error.errors + exception = exceptions.from_http_status( + status_code=self.error_code, + message=self.error_message, + response=self._extended_operation, + errors=errors, + ) + self.set_exception(exception) + elif self.error_code or self.error_message: + exception = exceptions.GoogleAPICallError( + f"Unexpected error {self.error_code}: {self.error_message}" + ) + self.set_exception(exception) + else: + # Extended operations have no payload. + self.set_result(None) + + @classmethod + def make(cls, refresh, cancel, extended_operation, **kwargs): + """ + Return an instantiated ExtendedOperation (or child) that wraps + * a refresh callable + * a cancel callable (can be a no-op) + * an initial result + + .. note:: + It is the caller's responsibility to set up refresh and cancel + with their correct request argument. + The reason for this is that the services that use Extended Operations + have rpcs that look something like the following: + + // service.proto + service MyLongService { + rpc StartLongTask(StartLongTaskRequest) returns (ExtendedOperation) { + option (google.cloud.operation_service) = "CustomOperationService"; + } + } + + service CustomOperationService { + rpc Get(GetOperationRequest) returns (ExtendedOperation) { + option (google.cloud.operation_polling_method) = true; + } + } + + Any info needed for the poll, e.g. a name, path params, etc. + is held in the request, which the initial client method is in a much + better position to make made because the caller made the initial request. + + TL;DR: the caller sets up closures for refresh and cancel that carry + the properly configured requests. + + Args: + refresh (Callable[Optional[Retry]][type(extended_operation)]): A callable that + returns the latest state of the operation. + cancel (Callable[][Any]): A callable that tries to cancel the operation + on a best effort basis. + extended_operation (Any): The initial response of the long running method. + See the docstring for ExtendedOperation.__init__ for requirements on + the type and fields of extended_operation + """ + return cls(extended_operation, refresh, cancel, **kwargs) diff --git a/google/api_core/future/async_future.py b/google/api_core/future/async_future.py index 0343fbe2..325ee9cd 100644 --- a/google/api_core/future/async_future.py +++ b/google/api_core/future/async_future.py @@ -43,8 +43,10 @@ class AsyncFuture(base.Future): The :meth:`done` method should be implemented by subclasses. The polling behavior will repeatedly call ``done`` until it returns True. - .. note: Privacy here is intended to prevent the final class from - overexposing, not to prevent subclasses from accessing methods. + .. note:: + + Privacy here is intended to prevent the final class from + overexposing, not to prevent subclasses from accessing methods. Args: retry (google.api_core.retry.Retry): The retry configuration used @@ -93,7 +95,7 @@ async def _blocking_poll(self, timeout=None): if self._future.done(): return - retry_ = self._retry.with_deadline(timeout) + retry_ = self._retry.with_timeout(timeout) try: await retry_(self._done_or_raise)() diff --git a/google/api_core/future/polling.py b/google/api_core/future/polling.py index 2f80efb5..f1e2a188 100644 --- a/google/api_core/future/polling.py +++ b/google/api_core/future/polling.py @@ -18,7 +18,7 @@ import concurrent.futures from google.api_core import exceptions -from google.api_core import retry +from google.api_core import retry as retries from google.api_core.future import _helpers from google.api_core.future import base @@ -29,14 +29,37 @@ class _OperationNotComplete(Exception): pass -RETRY_PREDICATE = retry.if_exception_type( +# DEPRECATED as it conflates RPC retry and polling concepts into one. +# Use POLLING_PREDICATE instead to configure polling. +RETRY_PREDICATE = retries.if_exception_type( _OperationNotComplete, exceptions.TooManyRequests, exceptions.InternalServerError, exceptions.BadGateway, exceptions.ServiceUnavailable, ) -DEFAULT_RETRY = retry.Retry(predicate=RETRY_PREDICATE) + +# DEPRECATED: use DEFAULT_POLLING to configure LRO polling logic. Construct +# Retry object using its default values as a baseline for any custom retry logic +# (not to be confused with polling logic). +DEFAULT_RETRY = retries.Retry(predicate=RETRY_PREDICATE) + +# POLLING_PREDICATE is supposed to poll only on _OperationNotComplete. +# Any RPC-specific errors (like ServiceUnavailable) will be handled +# by retry logic (not to be confused with polling logic) which is triggered for +# every polling RPC independently of polling logic but within its context. +POLLING_PREDICATE = retries.if_exception_type( + _OperationNotComplete, +) + +# Default polling configuration +DEFAULT_POLLING = retries.Retry( + predicate=POLLING_PREDICATE, + initial=1.0, # seconds + maximum=20.0, # seconds + multiplier=1.5, + timeout=900, # seconds +) class PollingFuture(base.Future): @@ -45,19 +68,29 @@ class PollingFuture(base.Future): The :meth:`done` method should be implemented by subclasses. The polling behavior will repeatedly call ``done`` until it returns True. - .. note: Privacy here is intended to prevent the final class from - overexposing, not to prevent subclasses from accessing methods. + The actual polling logic is encapsulated in :meth:`result` method. See + documentation for that method for details on how polling works. + + .. note:: + + Privacy here is intended to prevent the final class from + overexposing, not to prevent subclasses from accessing methods. Args: - retry (google.api_core.retry.Retry): The retry configuration used - when polling. This can be used to control how often :meth:`done` - is polled. Regardless of the retry's ``deadline``, it will be - overridden by the ``timeout`` argument to :meth:`result`. + polling (google.api_core.retry.Retry): The configuration used for polling. + This parameter controls how often :meth:`done` is polled. If the + ``timeout`` argument is specified in :meth:`result` method it will + override the ``polling.timeout`` property. + retry (google.api_core.retry.Retry): DEPRECATED use ``polling`` instead. + If set, it will override ``polling`` parameter for backward + compatibility. """ - def __init__(self, retry=DEFAULT_RETRY): + _DEFAULT_VALUE = object() + + def __init__(self, polling=DEFAULT_POLLING, **kwargs): super(PollingFuture, self).__init__() - self._retry = retry + self._polling = kwargs.get("retry", polling) self._result = None self._exception = None self._result_set = False @@ -67,11 +100,13 @@ def __init__(self, retry=DEFAULT_RETRY): self._done_callbacks = [] @abc.abstractmethod - def done(self, retry=DEFAULT_RETRY): + def done(self, retry=None): """Checks to see if the operation is complete. Args: - retry (google.api_core.retry.Retry): (Optional) How to retry the RPC. + retry (google.api_core.retry.Retry): (Optional) How to retry the + polling RPC (to not be confused with polling configuration. See + the documentation for :meth:`result` for details). Returns: bool: True if the operation is complete, False otherwise. @@ -79,45 +114,136 @@ def done(self, retry=DEFAULT_RETRY): # pylint: disable=redundant-returns-doc, missing-raises-doc raise NotImplementedError() - def _done_or_raise(self, retry=DEFAULT_RETRY): + def _done_or_raise(self, retry=None): """Check if the future is done and raise if it's not.""" - kwargs = {} if retry is DEFAULT_RETRY else {"retry": retry} - - if not self.done(**kwargs): + if not self.done(retry=retry): raise _OperationNotComplete() def running(self): """True if the operation is currently running.""" return not self.done() - def _blocking_poll(self, timeout=None, retry=DEFAULT_RETRY): - """Poll and wait for the Future to be resolved. + def _blocking_poll(self, timeout=_DEFAULT_VALUE, retry=None, polling=None): + """Poll and wait for the Future to be resolved.""" - Args: - timeout (int): - How long (in seconds) to wait for the operation to complete. - If None, wait indefinitely. - """ if self._result_set: return - retry_ = self._retry.with_deadline(timeout) + polling = polling or self._polling + if timeout is not PollingFuture._DEFAULT_VALUE: + polling = polling.with_timeout(timeout) try: - kwargs = {} if retry is DEFAULT_RETRY else {"retry": retry} - retry_(self._done_or_raise)(**kwargs) + polling(self._done_or_raise)(retry=retry) except exceptions.RetryError: raise concurrent.futures.TimeoutError( - "Operation did not complete within the designated " "timeout." + f"Operation did not complete within the designated timeout of " + f"{polling.timeout} seconds." ) - def result(self, timeout=None, retry=DEFAULT_RETRY): - """Get the result of the operation, blocking if necessary. + def result(self, timeout=_DEFAULT_VALUE, retry=None, polling=None): + """Get the result of the operation. + + This method will poll for operation status periodically, blocking if + necessary. If you just want to make sure that this method does not block + for more than X seconds and you do not care about the nitty-gritty of + how this method operates, just call it with ``result(timeout=X)``. The + other parameters are for advanced use only. + + Every call to this method is controlled by the following three + parameters, each of which has a specific, distinct role, even though all three + may look very similar: ``timeout``, ``retry`` and ``polling``. In most + cases users do not need to specify any custom values for any of these + parameters and may simply rely on default ones instead. + + If you choose to specify custom parameters, please make sure you've + read the documentation below carefully. + + First, please check :class:`google.api_core.retry.Retry` + class documentation for the proper definition of timeout and deadline + terms and for the definition the three different types of timeouts. + This class operates in terms of Retry Timeout and Polling Timeout. It + does not let customizing RPC timeout and the user is expected to rely on + default behavior for it. + + The roles of each argument of this method are as follows: + + ``timeout`` (int): (Optional) The Polling Timeout as defined in + :class:`google.api_core.retry.Retry`. If the operation does not complete + within this timeout an exception will be thrown. This parameter affects + neither Retry Timeout nor RPC Timeout. + + ``retry`` (google.api_core.retry.Retry): (Optional) How to retry the + polling RPC. The ``retry.timeout`` property of this parameter is the + Retry Timeout as defined in :class:`google.api_core.retry.Retry`. + This parameter defines ONLY how the polling RPC call is retried + (i.e. what to do if the RPC we used for polling returned an error). It + does NOT define how the polling is done (i.e. how frequently and for + how long to call the polling RPC); use the ``polling`` parameter for that. + If a polling RPC throws and error and retrying it fails, the whole + future fails with the corresponding exception. If you want to tune which + server response error codes are not fatal for operation polling, use this + parameter to control that (``retry.predicate`` in particular). + + ``polling`` (google.api_core.retry.Retry): (Optional) How often and + for how long to call the polling RPC periodically (i.e. what to do if + a polling rpc returned successfully but its returned result indicates + that the long running operation is not completed yet, so we need to + check it again at some point in future). This parameter does NOT define + how to retry each individual polling RPC in case of an error; use the + ``retry`` parameter for that. The ``polling.timeout`` of this parameter + is Polling Timeout as defined in as defined in + :class:`google.api_core.retry.Retry`. + + For each of the arguments, there are also default values in place, which + will be used if a user does not specify their own. The default values + for the three parameters are not to be confused with the default values + for the corresponding arguments in this method (those serve as "not set" + markers for the resolution logic). + + If ``timeout`` is provided (i.e.``timeout is not _DEFAULT VALUE``; note + the ``None`` value means "infinite timeout"), it will be used to control + the actual Polling Timeout. Otherwise, the ``polling.timeout`` value + will be used instead (see below for how the ``polling`` config itself + gets resolved). In other words, this parameter effectively overrides + the ``polling.timeout`` value if specified. This is so to preserve + backward compatibility. + + If ``retry`` is provided (i.e. ``retry is not None``) it will be used to + control retry behavior for the polling RPC and the ``retry.timeout`` + will determine the Retry Timeout. If not provided, the + polling RPC will be called with whichever default retry config was + specified for the polling RPC at the moment of the construction of the + polling RPC's client. For example, if the polling RPC is + ``operations_client.get_operation()``, the ``retry`` parameter will be + controlling its retry behavior (not polling behavior) and, if not + specified, that specific method (``operations_client.get_operation()``) + will be retried according to the default retry config provided during + creation of ``operations_client`` client instead. This argument exists + mainly for backward compatibility; users are very unlikely to ever need + to set this parameter explicitly. + + If ``polling`` is provided (i.e. ``polling is not None``), it will be used + to control the overall polling behavior and ``polling.timeout`` will + control Polling Timeout unless it is overridden by ``timeout`` parameter + as described above. If not provided, the``polling`` parameter specified + during construction of this future (the ``polling`` argument in the + constructor) will be used instead. Note: since the ``timeout`` argument may + override ``polling.timeout`` value, this parameter should be viewed as + coupled with the ``timeout`` parameter as described above. Args: - timeout (int): - How long (in seconds) to wait for the operation to complete. - If None, wait indefinitely. + timeout (int): (Optional) How long (in seconds) to wait for the + operation to complete. If None, wait indefinitely. + retry (google.api_core.retry.Retry): (Optional) How to retry the + polling RPC. This defines ONLY how the polling RPC call is + retried (i.e. what to do if the RPC we used for polling returned + an error). It does NOT define how the polling is done (i.e. how + frequently and for how long to call the polling RPC). + polling (google.api_core.retry.Retry): (Optional) How often and + for how long to call polling RPC periodically. This parameter + does NOT define how to retry each individual polling RPC call + (use the ``retry`` parameter for that). Returns: google.protobuf.Message: The Operation's result. @@ -126,8 +252,8 @@ def result(self, timeout=None, retry=DEFAULT_RETRY): google.api_core.GoogleAPICallError: If the operation errors or if the timeout is reached before the operation completes. """ - kwargs = {} if retry is DEFAULT_RETRY else {"retry": retry} - self._blocking_poll(timeout=timeout, **kwargs) + + self._blocking_poll(timeout=timeout, retry=retry, polling=polling) if self._exception is not None: # pylint: disable=raising-bad-type @@ -136,12 +262,18 @@ def result(self, timeout=None, retry=DEFAULT_RETRY): return self._result - def exception(self, timeout=None): + def exception(self, timeout=_DEFAULT_VALUE): """Get the exception from the operation, blocking if necessary. + See the documentation for the :meth:`result` method for details on how + this method operates, as both ``result`` and this method rely on the + exact same polling logic. The only difference is that this method does + not accept ``retry`` and ``polling`` arguments but relies on the default ones + instead. + Args: timeout (int): How long to wait for the operation to complete. - If None, wait indefinitely. + If None, wait indefinitely. Returns: Optional[google.api_core.GoogleAPICallError]: The operation's diff --git a/google/api_core/gapic_v1/client_info.py b/google/api_core/gapic_v1/client_info.py index fab0f542..4516f339 100644 --- a/google/api_core/gapic_v1/client_info.py +++ b/google/api_core/gapic_v1/client_info.py @@ -36,7 +36,7 @@ class ClientInfo(client_info.ClientInfo): ``'3.9.6'``. grpc_version (Optional[str]): The gRPC library version. api_core_version (str): The google-api-core library version. - gapic_version (Optional[str]): The sversion of gapic-generated client + gapic_version (Optional[str]): The version of gapic-generated client library, if the library was generated by gapic. client_library_version (Optional[str]): The version of the client library, generally used if the client library was not generated @@ -45,6 +45,8 @@ class ClientInfo(client_info.ClientInfo): user_agent (Optional[str]): Prefix to the user agent header. This is used to supply information such as application name or partner tool. Recommended format: ``application-or-tool-ID/major.minor.version``. + rest_version (Optional[str]): A string with labeled versions of the + dependencies used for REST transport. """ def to_grpc_metadata(self): diff --git a/google/api_core/gapic_v1/config.py b/google/api_core/gapic_v1/config.py index 9c722871..36b50d9f 100644 --- a/google/api_core/gapic_v1/config.py +++ b/google/api_core/gapic_v1/config.py @@ -33,6 +33,9 @@ def _exception_class_for_grpc_status_name(name): """Returns the Google API exception class for a gRPC error code name. + DEPRECATED: use ``exceptions.exception_class_for_grpc_status`` method + directly instead. + Args: name (str): The name of the gRPC status code, for example, ``UNAVAILABLE``. @@ -47,6 +50,8 @@ def _exception_class_for_grpc_status_name(name): def _retry_from_retry_config(retry_params, retry_codes, retry_impl=retry.Retry): """Creates a Retry object given a gapic retry configuration. + DEPRECATED: instantiate retry and timeout classes directly instead. + Args: retry_params (dict): The retry parameter values, for example:: @@ -81,6 +86,8 @@ def _retry_from_retry_config(retry_params, retry_codes, retry_impl=retry.Retry): def _timeout_from_retry_config(retry_params): """Creates a ExponentialTimeout object given a gapic retry configuration. + DEPRECATED: instantiate retry and timeout classes directly instead. + Args: retry_params (dict): The retry parameter values, for example:: @@ -113,6 +120,8 @@ def parse_method_configs(interface_config, retry_impl=retry.Retry): """Creates default retry and timeout objects for each method in a gapic interface config. + DEPRECATED: instantiate retry and timeout classes directly instead. + Args: interface_config (Mapping): The interface config section of the full gapic library config. For example, If the full configuration has diff --git a/google/api_core/gapic_v1/method.py b/google/api_core/gapic_v1/method.py index 79722c07..0f14ea9c 100644 --- a/google/api_core/gapic_v1/method.py +++ b/google/api_core/gapic_v1/method.py @@ -15,18 +15,30 @@ """Helpers for wrapping low-level gRPC methods with common functionality. This is used by gapic clients to provide common error mapping, retry, timeout, -pagination, and long-running operations to gRPC methods. +compression, pagination, and long-running operations to gRPC methods. """ +import enum import functools from google.api_core import grpc_helpers -from google.api_core import timeout from google.api_core.gapic_v1 import client_info +from google.api_core.timeout import TimeToDeadlineTimeout USE_DEFAULT_METADATA = object() -DEFAULT = object() -"""Sentinel value indicating that a retry or timeout argument was unspecified, + + +class _MethodDefault(enum.Enum): + # Uses enum so that pytype/mypy knows that this is the only possible value. + # https://stackoverflow.com/a/60605919/101923 + # + # Literal[_DEFAULT_VALUE] is an alternative, but only added in Python 3.8. + # https://docs.python.org/3/library/typing.html#typing.Literal + _DEFAULT_VALUE = object() + + +DEFAULT = _MethodDefault._DEFAULT_VALUE +"""Sentinel value indicating that a retry, timeout, or compression argument was unspecified, so the default should be used.""" @@ -40,55 +52,14 @@ def _apply_decorators(func, decorators): ``decorators`` may contain items that are ``None`` or ``False`` which will be ignored. """ - decorators = filter(_is_not_none_or_false, reversed(decorators)) + filtered_decorators = filter(_is_not_none_or_false, reversed(decorators)) - for decorator in decorators: + for decorator in filtered_decorators: func = decorator(func) return func -def _determine_timeout(default_timeout, specified_timeout, retry): - """Determines how timeout should be applied to a wrapped method. - - Args: - default_timeout (Optional[Timeout]): The default timeout specified - at method creation time. - specified_timeout (Optional[Timeout]): The timeout specified at - invocation time. If :attr:`DEFAULT`, this will be set to - the ``default_timeout``. - retry (Optional[Retry]): The retry specified at invocation time. - - Returns: - Optional[Timeout]: The timeout to apply to the method or ``None``. - """ - # If timeout is specified as a number instead of a Timeout instance, - # convert it to a ConstantTimeout. - if isinstance(specified_timeout, (int, float)): - specified_timeout = timeout.ConstantTimeout(specified_timeout) - if isinstance(default_timeout, (int, float)): - default_timeout = timeout.ConstantTimeout(default_timeout) - - if specified_timeout is DEFAULT: - specified_timeout = default_timeout - - if specified_timeout is default_timeout: - # If timeout is the default and the default timeout is exponential and - # a non-default retry is specified, make sure the timeout's deadline - # matches the retry's. This handles the case where the user leaves - # the timeout default but specifies a lower deadline via the retry. - if ( - retry - and retry is not DEFAULT - and isinstance(default_timeout, timeout.ExponentialTimeout) - ): - return default_timeout.with_deadline(retry._deadline) - else: - return default_timeout - - return specified_timeout - - class _GapicCallable(object): """Callable that applies retry, timeout, and metadata logic. @@ -96,35 +67,51 @@ class _GapicCallable(object): target (Callable): The low-level RPC method. retry (google.api_core.retry.Retry): The default retry for the callable. If ``None``, this callable will not retry by default - timeout (google.api_core.timeout.Timeout): The default timeout - for the callable. If ``None``, this callable will not specify - a timeout argument to the low-level RPC method by default. + timeout (google.api_core.timeout.Timeout): The default timeout for the + callable (i.e. duration of time within which an RPC must terminate + after its start, not to be confused with deadline). If ``None``, + this callable will not specify a timeout argument to the low-level + RPC method. + compression (grpc.Compression): The default compression for the callable. + If ``None``, this callable will not specify a compression argument + to the low-level RPC method. metadata (Sequence[Tuple[str, str]]): Additional metadata that is provided to the RPC method on every invocation. This is merged with any metadata specified during invocation. If ``None``, no additional metadata will be passed to the RPC method. """ - def __init__(self, target, retry, timeout, metadata=None): + def __init__( + self, + target, + retry, + timeout, + compression, + metadata=None, + ): self._target = target self._retry = retry self._timeout = timeout + self._compression = compression self._metadata = metadata - def __call__(self, *args, timeout=DEFAULT, retry=DEFAULT, **kwargs): - """Invoke the low-level RPC with retry, timeout, and metadata.""" - timeout = _determine_timeout( - self._timeout, - timeout, - # Use only the invocation-specified retry only for this, as we only - # want to adjust the timeout deadline if the *user* specified - # a different retry. - retry, - ) + def __call__( + self, *args, timeout=DEFAULT, retry=DEFAULT, compression=DEFAULT, **kwargs + ): + """Invoke the low-level RPC with retry, timeout, compression, and metadata.""" if retry is DEFAULT: retry = self._retry + if timeout is DEFAULT: + timeout = self._timeout + + if compression is DEFAULT: + compression = self._compression + + if isinstance(timeout, (int, float)): + timeout = TimeToDeadlineTimeout(timeout=timeout) + # Apply all applicable decorators. wrapped_func = _apply_decorators(self._target, [retry, timeout]) @@ -138,6 +125,8 @@ def __call__(self, *args, timeout=DEFAULT, retry=DEFAULT, **kwargs): metadata = list(metadata) metadata.extend(self._metadata) kwargs["metadata"] = metadata + if self._compression is not None: + kwargs["compression"] = compression return wrapped_func(*args, **kwargs) @@ -146,12 +135,15 @@ def wrap_method( func, default_retry=None, default_timeout=None, + default_compression=None, client_info=client_info.DEFAULT_CLIENT_INFO, + *, + with_call=False, ): """Wrap an RPC method with common behavior. - This applies common error wrapping, retry, and timeout behavior a function. - The wrapped function will take optional ``retry`` and ``timeout`` + This applies common error wrapping, retry, timeout, and compression behavior to a function. + The wrapped function will take optional ``retry``, ``timeout``, and ``compression`` arguments. For example:: @@ -159,6 +151,7 @@ def wrap_method( import google.api_core.gapic_v1.method from google.api_core import retry from google.api_core import timeout + from grpc import Compression # The original RPC method. def get_topic(name, timeout=None): @@ -167,6 +160,7 @@ def get_topic(name, timeout=None): default_retry = retry.Retry(deadline=60) default_timeout = timeout.Timeout(deadline=60) + default_compression = Compression.NoCompression wrapped_get_topic = google.api_core.gapic_v1.method.wrap_method( get_topic, default_retry) @@ -215,20 +209,34 @@ def get_topic(name, timeout=None): default_timeout (Optional[google.api_core.Timeout]): The default timeout strategy. Can also be specified as an int or float. If ``None``, the method will not have timeout specified by default. + default_compression (Optional[grpc.Compression]): The default + grpc.Compression. If ``None``, the method will not have + compression specified by default. client_info (Optional[google.api_core.gapic_v1.client_info.ClientInfo]): Client information used to create a user-agent string that's passed as gRPC metadata to the method. If unspecified, then a sane default will be used. If ``None``, then no user agent metadata will be provided to the RPC method. + with_call (bool): If True, wrapped grpc.UnaryUnaryMulticallables will + return a tuple of (response, grpc.Call) instead of just the response. + This is useful for extracting trailing metadata from unary calls. + Defaults to False. Returns: - Callable: A new callable that takes optional ``retry`` and ``timeout`` - arguments and applies the common error mapping, retry, timeout, + Callable: A new callable that takes optional ``retry``, ``timeout``, + and ``compression`` + arguments and applies the common error mapping, retry, timeout, compression, and metadata behavior to the low-level RPC method. """ + if with_call: + try: + func = func.with_call + except AttributeError as exc: + raise ValueError( + "with_call=True is only supported for unary calls." + ) from exc func = grpc_helpers.wrap_errors(func) - if client_info is not None: user_agent_metadata = [client_info.to_grpc_metadata()] else: @@ -236,6 +244,10 @@ def get_topic(name, timeout=None): return functools.wraps(func)( _GapicCallable( - func, default_retry, default_timeout, metadata=user_agent_metadata + func, + default_retry, + default_timeout, + default_compression, + metadata=user_agent_metadata, ) ) diff --git a/google/api_core/gapic_v1/method_async.py b/google/api_core/gapic_v1/method_async.py index 84c99aa2..c0f38c0e 100644 --- a/google/api_core/gapic_v1/method_async.py +++ b/google/api_core/gapic_v1/method_async.py @@ -14,7 +14,7 @@ """AsyncIO helpers for wrapping gRPC methods with common functionality. This is used by gapic clients to provide common error mapping, retry, timeout, -pagination, and long-running operations to gRPC methods. +compression, pagination, and long-running operations to gRPC methods. """ import functools @@ -25,24 +25,35 @@ from google.api_core.gapic_v1.method import DEFAULT # noqa: F401 from google.api_core.gapic_v1.method import USE_DEFAULT_METADATA # noqa: F401 +_DEFAULT_ASYNC_TRANSPORT_KIND = "grpc_asyncio" + def wrap_method( func, default_retry=None, default_timeout=None, + default_compression=None, client_info=client_info.DEFAULT_CLIENT_INFO, + kind=_DEFAULT_ASYNC_TRANSPORT_KIND, ): """Wrap an async RPC method with common behavior. Returns: - Callable: A new callable that takes optional ``retry`` and ``timeout`` - arguments and applies the common error mapping, retry, timeout, - and metadata behavior to the low-level RPC method. + Callable: A new callable that takes optional ``retry``, ``timeout``, + and ``compression`` arguments and applies the common error mapping, + retry, timeout, metadata, and compression behavior to the low-level RPC method. """ - func = grpc_helpers_async.wrap_errors(func) + if kind == _DEFAULT_ASYNC_TRANSPORT_KIND: + func = grpc_helpers_async.wrap_errors(func) metadata = [client_info.to_grpc_metadata()] if client_info is not None else None return functools.wraps(func)( - _GapicCallable(func, default_retry, default_timeout, metadata=metadata) + _GapicCallable( + func, + default_retry, + default_timeout, + default_compression, + metadata=metadata, + ) ) diff --git a/google/api_core/gapic_v1/routing_header.py b/google/api_core/gapic_v1/routing_header.py index a7bcb5a8..c0c6f648 100644 --- a/google/api_core/gapic_v1/routing_header.py +++ b/google/api_core/gapic_v1/routing_header.py @@ -20,38 +20,68 @@ Generally, these headers are specified as gRPC metadata. """ +import functools +from enum import Enum from urllib.parse import urlencode ROUTING_METADATA_KEY = "x-goog-request-params" +# This is the value for the `maxsize` argument of @functools.lru_cache +# https://docs.python.org/3/library/functools.html#functools.lru_cache +# This represents the number of recent function calls to store. +ROUTING_PARAM_CACHE_SIZE = 32 -def to_routing_header(params): +def to_routing_header(params, qualified_enums=True): """Returns a routing header string for the given request parameters. Args: - params (Mapping[str, Any]): A dictionary containing the request + params (Mapping[str, str | bytes | Enum]): A dictionary containing the request parameters used for routing. + qualified_enums (bool): Whether to represent enum values + as their type-qualified symbol names instead of as their + unqualified symbol names. Returns: str: The routing header string. """ - return urlencode( - params, - # Per Google API policy (go/api-url-encoding), / is not encoded. - safe="/", - ) + tuples = params.items() if isinstance(params, dict) else params + if not qualified_enums: + tuples = [(x[0], x[1].name) if isinstance(x[1], Enum) else x for x in tuples] + return "&".join([_urlencode_param(*t) for t in tuples]) -def to_grpc_metadata(params): +def to_grpc_metadata(params, qualified_enums=True): """Returns the gRPC metadata containing the routing headers for the given request parameters. Args: - params (Mapping[str, Any]): A dictionary containing the request + params (Mapping[str, str | bytes | Enum]): A dictionary containing the request parameters used for routing. + qualified_enums (bool): Whether to represent enum values + as their type-qualified symbol names instead of as their + unqualified symbol names. Returns: Tuple(str, str): The gRPC metadata containing the routing header key and value. """ - return (ROUTING_METADATA_KEY, to_routing_header(params)) + return (ROUTING_METADATA_KEY, to_routing_header(params, qualified_enums)) + + +# use caching to avoid repeated computation +@functools.lru_cache(maxsize=ROUTING_PARAM_CACHE_SIZE) +def _urlencode_param(key, value): + """Cacheable wrapper over urlencode + + Args: + key (str): The key of the parameter to encode. + value (str | bytes | Enum): The value of the parameter to encode. + + Returns: + str: The encoded parameter. + """ + return urlencode( + {key: value}, + # Per Google API policy (go/api-url-encoding), / is not encoded. + safe="/", + ) diff --git a/google/api_core/general_helpers.py b/google/api_core/general_helpers.py index fba78026..a6af45b7 100644 --- a/google/api_core/general_helpers.py +++ b/google/api_core/general_helpers.py @@ -12,5 +12,5 @@ # See the License for the specific language governing permissions and # limitations under the License. -# This import for backward compatibiltiy only. +# This import for backward compatibility only. from functools import wraps # noqa: F401 pragma: NO COVER diff --git a/google/api_core/grpc_helpers.py b/google/api_core/grpc_helpers.py index 594df987..07963024 100644 --- a/google/api_core/grpc_helpers.py +++ b/google/api_core/grpc_helpers.py @@ -13,38 +13,48 @@ # limitations under the License. """Helpers for :mod:`grpc`.""" +from typing import Generic, Iterator, Optional, TypeVar import collections import functools +import warnings import grpc -import pkg_resources from google.api_core import exceptions import google.auth import google.auth.credentials import google.auth.transport.grpc import google.auth.transport.requests +import google.protobuf -try: - import grpc_gcp +PROTOBUF_VERSION = google.protobuf.__version__ - HAS_GRPC_GCP = True -except ImportError: +# The grpcio-gcp package only has support for protobuf < 4 +if PROTOBUF_VERSION[0:2] == "3.": # pragma: NO COVER + try: + import grpc_gcp + + warnings.warn( + """Support for grpcio-gcp is deprecated. This feature will be + removed from `google-api-core` after January 1, 2024. If you need to + continue to use this feature, please pin to a specific version of + `google-api-core`.""", + DeprecationWarning, + ) + HAS_GRPC_GCP = True + except ImportError: + HAS_GRPC_GCP = False +else: HAS_GRPC_GCP = False -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None # The list of gRPC Callable interfaces that return iterators. _STREAM_WRAP_CLASSES = (grpc.UnaryStreamMultiCallable, grpc.StreamStreamMultiCallable) +# denotes the proto response type for grpc calls +P = TypeVar("P") + def _patch_callable_name(callable_): """Fix-up gRPC callable attributes. @@ -70,7 +80,7 @@ def error_remapped_callable(*args, **kwargs): return error_remapped_callable -class _StreamingResponseIterator(grpc.Call): +class _StreamingResponseIterator(Generic[P], grpc.Call): def __init__(self, wrapped, prefetch_first_result=True): self._wrapped = wrapped @@ -88,11 +98,11 @@ def __init__(self, wrapped, prefetch_first_result=True): # ignore stop iteration at this time. This should be handled outside of retry. pass - def __iter__(self): + def __iter__(self) -> Iterator[P]: """This iterator is also an iterable that returns itself.""" return self - def __next__(self): + def __next__(self) -> P: """Get the next response from the stream. Returns: @@ -135,6 +145,10 @@ def trailing_metadata(self): return self._wrapped.trailing_metadata() +# public type alias denoting the return type of streaming gapic calls +GrpcStream = _StreamingResponseIterator[P] + + def _wrap_stream_errors(callable_): """Wrap errors for Unary-Stream and Stream-Stream gRPC callables. @@ -202,6 +216,18 @@ def _create_composite_credentials( credentials_file (str): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is mutually exclusive with credentials. + + .. warning:: + Important: If you accept a credential configuration (credential JSON/File/Stream) + from an external source for authentication to Google Cloud Platform, you must + validate it before providing it to any Google API or client library. Providing an + unvalidated credential configuration to Google APIs or libraries can compromise + the security of your systems and data. For more information, refer to + `Validate credential configurations from external sources`_. + + .. _Validate credential configurations from external sources: + + https://cloud.google.com/docs/authentication/external/externally-sourced-credentials default_scopes (Sequence[str]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -246,17 +272,32 @@ def _create_composite_credentials( # Create the metadata plugin for inserting the authorization header. metadata_plugin = google.auth.transport.grpc.AuthMetadataPlugin( - credentials, request, default_host=default_host, + credentials, + request, + default_host=default_host, ) # Create a set of grpc.CallCredentials using the metadata plugin. google_auth_credentials = grpc.metadata_call_credentials(metadata_plugin) - if ssl_credentials is None: - ssl_credentials = grpc.ssl_channel_credentials() - - # Combine the ssl credentials and the authorization credentials. - return grpc.composite_channel_credentials(ssl_credentials, google_auth_credentials) + # if `ssl_credentials` is set, use `grpc.composite_channel_credentials` instead of + # `grpc.compute_engine_channel_credentials` as the former supports passing + # `ssl_credentials` via `channel_credentials` which is needed for mTLS. + if ssl_credentials: + # Combine the ssl credentials and the authorization credentials. + # See https://grpc.github.io/grpc/python/grpc.html#grpc.composite_channel_credentials + return grpc.composite_channel_credentials( + ssl_credentials, google_auth_credentials + ) + else: + # Use grpc.compute_engine_channel_credentials in order to support Direct Path. + # See https://grpc.github.io/grpc/python/grpc.html#grpc.compute_engine_channel_credentials + # TODO(https://github.com/googleapis/python-api-core/issues/598): + # Although `grpc.compute_engine_channel_credentials` returns channel credentials + # outside of a Google Compute Engine environment (GCE), we should determine if + # there is a way to reliably detect a GCE environment so that + # `grpc.compute_engine_channel_credentials` is not called outside of GCE. + return grpc.compute_engine_channel_credentials(google_auth_credentials) def create_channel( @@ -268,7 +309,9 @@ def create_channel( quota_project_id=None, default_scopes=None, default_host=None, - **kwargs + compression=None, + attempt_direct_path: Optional[bool] = False, + **kwargs, ): """Create a secure channel with credentials. @@ -285,20 +328,58 @@ def create_channel( credentials_file (str): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is mutually exclusive with credentials. + + .. warning:: + Important: If you accept a credential configuration (credential JSON/File/Stream) + from an external source for authentication to Google Cloud Platform, you must + validate it before providing it to any Google API or client library. Providing an + unvalidated credential configuration to Google APIs or libraries can compromise + the security of your systems and data. For more information, refer to + `Validate credential configurations from external sources`_. + + .. _Validate credential configurations from external sources: + + https://cloud.google.com/docs/authentication/external/externally-sourced-credentials quota_project_id (str): An optional project to use for billing and quota. default_scopes (Sequence[str]): Default scopes passed by a Google client library. Use 'scopes' for user-defined scopes. default_host (str): The default endpoint. e.g., "pubsub.googleapis.com". + compression (grpc.Compression): An optional value indicating the + compression method to be used over the lifetime of the channel. + attempt_direct_path (Optional[bool]): If set, Direct Path will be attempted + when the request is made. Direct Path is only available within a Google + Compute Engine (GCE) environment and provides a proxyless connection + which increases the available throughput, reduces latency, and increases + reliability. Note: + + - This argument should only be set in a GCE environment and for Services + that are known to support Direct Path. + - If this argument is set outside of GCE, then this request will fail + unless the back-end service happens to have configured fall-back to DNS. + - If the request causes a `ServiceUnavailable` response, it is recommended + that the client repeat the request with `attempt_direct_path` set to + `False` as the Service may not support Direct Path. + - Using `ssl_credentials` with `attempt_direct_path` set to `True` will + result in `ValueError` as this combination is not yet supported. + kwargs: Additional key-word args passed to :func:`grpc_gcp.secure_channel` or :func:`grpc.secure_channel`. + Note: `grpc_gcp` is only supported in environments with protobuf < 4.0.0. Returns: grpc.Channel: The created channel. Raises: google.api_core.DuplicateCredentialArgs: If both a credentials object and credentials_file are passed. + ValueError: If `ssl_credentials` is set and `attempt_direct_path` is set to `True`. """ + # If `ssl_credentials` is set and `attempt_direct_path` is set to `True`, + # raise ValueError as this is not yet supported. + # See https://github.com/googleapis/python-api-core/issues/590 + if ssl_credentials and attempt_direct_path: + raise ValueError("Using ssl_credentials with Direct Path is not supported") + composite_credentials = _create_composite_credentials( credentials=credentials, credentials_file=credentials_file, @@ -309,16 +390,60 @@ def create_channel( default_host=default_host, ) - if HAS_GRPC_GCP: - # If grpc_gcp module is available use grpc_gcp.secure_channel, - # otherwise, use grpc.secure_channel to create grpc channel. + # Note that grpcio-gcp is deprecated + if HAS_GRPC_GCP: # pragma: NO COVER + if compression is not None and compression != grpc.Compression.NoCompression: + warnings.warn( + "The `compression` argument is ignored for grpc_gcp.secure_channel creation.", + DeprecationWarning, + ) + if attempt_direct_path: + warnings.warn( + """The `attempt_direct_path` argument is ignored for grpc_gcp.secure_channel creation.""", + DeprecationWarning, + ) return grpc_gcp.secure_channel(target, composite_credentials, **kwargs) - else: - return grpc.secure_channel(target, composite_credentials, **kwargs) + + if attempt_direct_path: + target = _modify_target_for_direct_path(target) + + return grpc.secure_channel( + target, composite_credentials, compression=compression, **kwargs + ) + + +def _modify_target_for_direct_path(target: str) -> str: + """ + Given a target, return a modified version which is compatible with Direct Path. + + Args: + target (str): The target service address in the format 'hostname[:port]' or + 'dns://hostname[:port]'. + + Returns: + target (str): The target service address which is converted into a format compatible with Direct Path. + If the target contains `dns:///` or does not contain `:///`, the target will be converted in + a format compatible with Direct Path; otherwise the original target will be returned as the + original target may already denote Direct Path. + """ + + # A DNS prefix may be included with the target to indicate the endpoint is living in the Internet, + # outside of Google Cloud Platform. + dns_prefix = "dns:///" + # Remove "dns:///" if `attempt_direct_path` is set to True as + # the Direct Path prefix `google-c2p:///` will be used instead. + target = target.replace(dns_prefix, "") + + direct_path_separator = ":///" + if direct_path_separator not in target: + target_without_port = target.split(":")[0] + # Modify the target to use Direct Path by adding the `google-c2p:///` prefix + target = f"google-c2p{direct_path_separator}{target_without_port}" + return target _MethodCall = collections.namedtuple( - "_MethodCall", ("request", "timeout", "metadata", "credentials") + "_MethodCall", ("request", "timeout", "metadata", "credentials", "compression") ) _ChannelRequest = collections.namedtuple("_ChannelRequest", ("method", "request")) @@ -345,11 +470,15 @@ def __init__(self, method, channel): """List[protobuf.Message]: All requests sent to this callable.""" self.calls = [] """List[Tuple]: All invocations of this callable. Each tuple is the - request, timeout, metadata, and credentials.""" + request, timeout, metadata, compression, and credentials.""" - def __call__(self, request, timeout=None, metadata=None, credentials=None): + def __call__( + self, request, timeout=None, metadata=None, credentials=None, compression=None + ): self._channel.requests.append(_ChannelRequest(self._method, request)) - self.calls.append(_MethodCall(request, timeout, metadata, credentials)) + self.calls.append( + _MethodCall(request, timeout, metadata, credentials, compression) + ) self.requests.append(request) response = self.response @@ -464,20 +593,42 @@ def __getattr__(self, key): except KeyError: raise AttributeError - def unary_unary(self, method, request_serializer=None, response_deserializer=None): + def unary_unary( + self, + method, + request_serializer=None, + response_deserializer=None, + _registered_method=False, + ): """grpc.Channel.unary_unary implementation.""" return self._stub_for_method(method) - def unary_stream(self, method, request_serializer=None, response_deserializer=None): + def unary_stream( + self, + method, + request_serializer=None, + response_deserializer=None, + _registered_method=False, + ): """grpc.Channel.unary_stream implementation.""" return self._stub_for_method(method) - def stream_unary(self, method, request_serializer=None, response_deserializer=None): + def stream_unary( + self, + method, + request_serializer=None, + response_deserializer=None, + _registered_method=False, + ): """grpc.Channel.stream_unary implementation.""" return self._stub_for_method(method) def stream_stream( - self, method, request_serializer=None, response_deserializer=None + self, + method, + request_serializer=None, + response_deserializer=None, + _registered_method=False, ): """grpc.Channel.stream_stream implementation.""" return self._stub_for_method(method) diff --git a/google/api_core/grpc_helpers_async.py b/google/api_core/grpc_helpers_async.py index 92df645b..af661430 100644 --- a/google/api_core/grpc_helpers_async.py +++ b/google/api_core/grpc_helpers_async.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""AsyncIO helpers for :mod:`grpc` supporting 3.6+. +"""AsyncIO helpers for :mod:`grpc` supporting 3.7+. Please combine more detailed docstring in grpc_helpers.py to use following functions. This module is implementing the same surface with AsyncIO semantics. @@ -21,14 +21,15 @@ import asyncio import functools +from typing import AsyncGenerator, Generic, Iterator, Optional, TypeVar + import grpc -from grpc.experimental import aio +from grpc import aio from google.api_core import exceptions, grpc_helpers - -# TODO(lidiz) Support gRPC GCP wrapper -HAS_GRPC_GCP = False +# denotes the proto response type for grpc calls +P = TypeVar("P") # NOTE(lidiz) Alternatively, we can hack "__getattribute__" to perform # automatic patching for us. But that means the overhead of creating an @@ -78,8 +79,8 @@ async def wait_for_connection(self): raise exceptions.from_grpc_error(rpc_error) from rpc_error -class _WrappedUnaryResponseMixin(_WrappedCall): - def __await__(self): +class _WrappedUnaryResponseMixin(Generic[P], _WrappedCall): + def __await__(self) -> Iterator[P]: try: response = yield from self._call.__await__() return response @@ -87,17 +88,17 @@ def __await__(self): raise exceptions.from_grpc_error(rpc_error) from rpc_error -class _WrappedStreamResponseMixin(_WrappedCall): +class _WrappedStreamResponseMixin(Generic[P], _WrappedCall): def __init__(self): self._wrapped_async_generator = None - async def read(self): + async def read(self) -> P: try: return await self._call.read() except grpc.RpcError as rpc_error: raise exceptions.from_grpc_error(rpc_error) from rpc_error - async def _wrapped_aiter(self): + async def _wrapped_aiter(self) -> AsyncGenerator[P, None]: try: # NOTE(lidiz) coverage doesn't understand the exception raised from # __anext__ method. It is covered by test case: @@ -107,7 +108,7 @@ async def _wrapped_aiter(self): except grpc.RpcError as rpc_error: raise exceptions.from_grpc_error(rpc_error) from rpc_error - def __aiter__(self): + def __aiter__(self) -> AsyncGenerator[P, None]: if not self._wrapped_async_generator: self._wrapped_async_generator = self._wrapped_aiter() return self._wrapped_async_generator @@ -130,29 +131,34 @@ async def done_writing(self): # NOTE(lidiz) Implementing each individual class separately, so we don't # expose any API that should not be seen. E.g., __aiter__ in unary-unary # RPC, or __await__ in stream-stream RPC. -class _WrappedUnaryUnaryCall(_WrappedUnaryResponseMixin, aio.UnaryUnaryCall): +class _WrappedUnaryUnaryCall(_WrappedUnaryResponseMixin[P], aio.UnaryUnaryCall): """Wrapped UnaryUnaryCall to map exceptions.""" -class _WrappedUnaryStreamCall(_WrappedStreamResponseMixin, aio.UnaryStreamCall): +class _WrappedUnaryStreamCall(_WrappedStreamResponseMixin[P], aio.UnaryStreamCall): """Wrapped UnaryStreamCall to map exceptions.""" class _WrappedStreamUnaryCall( - _WrappedUnaryResponseMixin, _WrappedStreamRequestMixin, aio.StreamUnaryCall + _WrappedUnaryResponseMixin[P], _WrappedStreamRequestMixin, aio.StreamUnaryCall ): """Wrapped StreamUnaryCall to map exceptions.""" class _WrappedStreamStreamCall( - _WrappedStreamRequestMixin, _WrappedStreamResponseMixin, aio.StreamStreamCall + _WrappedStreamRequestMixin, _WrappedStreamResponseMixin[P], aio.StreamStreamCall ): """Wrapped StreamStreamCall to map exceptions.""" +# public type alias denoting the return type of async streaming gapic calls +GrpcAsyncStream = _WrappedStreamResponseMixin +# public type alias denoting the return type of unary gapic calls +AwaitableGrpcCall = _WrappedUnaryResponseMixin + + def _wrap_unary_errors(callable_): """Map errors for Unary-Unary async callables.""" - grpc_helpers._patch_callable_name(callable_) @functools.wraps(callable_) def error_remapped_callable(*args, **kwargs): @@ -162,23 +168,13 @@ def error_remapped_callable(*args, **kwargs): return error_remapped_callable -def _wrap_stream_errors(callable_): +def _wrap_stream_errors(callable_, wrapper_type): """Map errors for streaming RPC async callables.""" - grpc_helpers._patch_callable_name(callable_) @functools.wraps(callable_) async def error_remapped_callable(*args, **kwargs): call = callable_(*args, **kwargs) - - if isinstance(call, aio.UnaryStreamCall): - call = _WrappedUnaryStreamCall().with_call(call) - elif isinstance(call, aio.StreamUnaryCall): - call = _WrappedStreamUnaryCall().with_call(call) - elif isinstance(call, aio.StreamStreamCall): - call = _WrappedStreamStreamCall().with_call(call) - else: - raise TypeError("Unexpected type of call %s" % type(call)) - + call = wrapper_type().with_call(call) await call.wait_for_connection() return call @@ -200,10 +196,16 @@ def wrap_errors(callable_): Returns: Callable: The wrapped gRPC callable. """ - if isinstance(callable_, aio.UnaryUnaryMultiCallable): - return _wrap_unary_errors(callable_) + grpc_helpers._patch_callable_name(callable_) + + if isinstance(callable_, aio.UnaryStreamMultiCallable): + return _wrap_stream_errors(callable_, _WrappedUnaryStreamCall) + elif isinstance(callable_, aio.StreamUnaryMultiCallable): + return _wrap_stream_errors(callable_, _WrappedStreamUnaryCall) + elif isinstance(callable_, aio.StreamStreamMultiCallable): + return _wrap_stream_errors(callable_, _WrappedStreamStreamCall) else: - return _wrap_stream_errors(callable_) + return _wrap_unary_errors(callable_) def create_channel( @@ -215,6 +217,8 @@ def create_channel( quota_project_id=None, default_scopes=None, default_host=None, + compression=None, + attempt_direct_path: Optional[bool] = False, **kwargs ): """Create an AsyncIO secure channel with credentials. @@ -232,10 +236,40 @@ def create_channel( credentials_file (str): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is mutually exclusive with credentials. + + .. warning:: + Important: If you accept a credential configuration (credential JSON/File/Stream) + from an external source for authentication to Google Cloud Platform, you must + validate it before providing it to any Google API or client library. Providing an + unvalidated credential configuration to Google APIs or libraries can compromise + the security of your systems and data. For more information, refer to + `Validate credential configurations from external sources`_. + + .. _Validate credential configurations from external sources: + + https://cloud.google.com/docs/authentication/external/externally-sourced-credentials quota_project_id (str): An optional project to use for billing and quota. default_scopes (Sequence[str]): Default scopes passed by a Google client library. Use 'scopes' for user-defined scopes. default_host (str): The default endpoint. e.g., "pubsub.googleapis.com". + compression (grpc.Compression): An optional value indicating the + compression method to be used over the lifetime of the channel. + attempt_direct_path (Optional[bool]): If set, Direct Path will be attempted + when the request is made. Direct Path is only available within a Google + Compute Engine (GCE) environment and provides a proxyless connection + which increases the available throughput, reduces latency, and increases + reliability. Note: + + - This argument should only be set in a GCE environment and for Services + that are known to support Direct Path. + - If this argument is set outside of GCE, then this request will fail + unless the back-end service happens to have configured fall-back to DNS. + - If the request causes a `ServiceUnavailable` response, it is recommended + that the client repeat the request with `attempt_direct_path` set to + `False` as the Service may not support Direct Path. + - Using `ssl_credentials` with `attempt_direct_path` set to `True` will + result in `ValueError` as this combination is not yet supported. + kwargs: Additional key-word args passed to :func:`aio.secure_channel`. Returns: @@ -243,8 +277,15 @@ def create_channel( Raises: google.api_core.DuplicateCredentialArgs: If both a credentials object and credentials_file are passed. + ValueError: If `ssl_credentials` is set and `attempt_direct_path` is set to `True`. """ + # If `ssl_credentials` is set and `attempt_direct_path` is set to `True`, + # raise ValueError as this is not yet supported. + # See https://github.com/googleapis/python-api-core/issues/590 + if ssl_credentials and attempt_direct_path: + raise ValueError("Using ssl_credentials with Direct Path is not supported") + composite_credentials = grpc_helpers._create_composite_credentials( credentials=credentials, credentials_file=credentials_file, @@ -255,7 +296,12 @@ def create_channel( default_host=default_host, ) - return aio.secure_channel(target, composite_credentials, **kwargs) + if attempt_direct_path: + target = grpc_helpers._modify_target_for_direct_path(target) + + return aio.secure_channel( + target, composite_credentials, compression=compression, **kwargs + ) class FakeUnaryUnaryCall(_WrappedUnaryUnaryCall): diff --git a/google/api_core/operation.py b/google/api_core/operation.py index b17f753b..4b9c9a58 100644 --- a/google/api_core/operation.py +++ b/google/api_core/operation.py @@ -61,10 +61,13 @@ class Operation(polling.PollingFuture): result. metadata_type (func:`type`): The protobuf type for the operation's metadata. - retry (google.api_core.retry.Retry): The retry configuration used - when polling. This can be used to control how often :meth:`done` - is polled. Regardless of the retry's ``deadline``, it will be - overridden by the ``timeout`` argument to :meth:`result`. + polling (google.api_core.retry.Retry): The configuration used for polling. + This parameter controls how often :meth:`done` is polled. If the + ``timeout`` argument is specified in the :meth:`result` method, it will + override the ``polling.timeout`` property. + retry (google.api_core.retry.Retry): DEPRECATED: use ``polling`` instead. + If specified it will override ``polling`` parameter to maintain + backward compatibility. """ def __init__( @@ -74,9 +77,10 @@ def __init__( cancel, result_type, metadata_type=None, - retry=polling.DEFAULT_RETRY, + polling=polling.DEFAULT_POLLING, + **kwargs ): - super(Operation, self).__init__(retry=retry) + super(Operation, self).__init__(polling=polling, **kwargs) self._operation = operation self._refresh = refresh self._cancel = cancel @@ -146,7 +150,7 @@ def _set_result_from_operation(self): ) self.set_exception(exception) - def _refresh_and_update(self, retry=polling.DEFAULT_RETRY): + def _refresh_and_update(self, retry=None): """Refresh the operation and update the result if needed. Args: @@ -155,10 +159,10 @@ def _refresh_and_update(self, retry=polling.DEFAULT_RETRY): # If the currently cached operation is done, no need to make another # RPC as it will not change once done. if not self._operation.done: - self._operation = self._refresh(retry=retry) + self._operation = self._refresh(retry=retry) if retry else self._refresh() self._set_result_from_operation() - def done(self, retry=polling.DEFAULT_RETRY): + def done(self, retry=None): """Checks to see if the operation is complete. Args: @@ -311,10 +315,16 @@ def from_grpc(operation, operations_stub, result_type, grpc_metadata=None, **kwa operation. """ refresh = functools.partial( - _refresh_grpc, operations_stub, operation.name, metadata=grpc_metadata + _refresh_grpc, + operations_stub, + operation.name, + metadata=grpc_metadata, ) cancel = functools.partial( - _cancel_grpc, operations_stub, operation.name, metadata=grpc_metadata + _cancel_grpc, + operations_stub, + operation.name, + metadata=grpc_metadata, ) return Operation(operation, refresh, cancel, result_type, **kwargs) @@ -343,9 +353,13 @@ def from_gapic(operation, operations_client, result_type, grpc_metadata=None, ** operation. """ refresh = functools.partial( - operations_client.get_operation, operation.name, metadata=grpc_metadata + operations_client.get_operation, + operation.name, + metadata=grpc_metadata, ) cancel = functools.partial( - operations_client.cancel_operation, operation.name, metadata=grpc_metadata + operations_client.cancel_operation, + operation.name, + metadata=grpc_metadata, ) return Operation(operation, refresh, cancel, result_type, **kwargs) diff --git a/google/api_core/operation_async.py b/google/api_core/operation_async.py index 6bae8654..2fd341d9 100644 --- a/google/api_core/operation_async.py +++ b/google/api_core/operation_async.py @@ -213,9 +213,13 @@ def from_gapic(operation, operations_client, result_type, grpc_metadata=None, ** operation. """ refresh = functools.partial( - operations_client.get_operation, operation.name, metadata=grpc_metadata + operations_client.get_operation, + operation.name, + metadata=grpc_metadata, ) cancel = functools.partial( - operations_client.cancel_operation, operation.name, metadata=grpc_metadata + operations_client.cancel_operation, + operation.name, + metadata=grpc_metadata, ) return AsyncOperation(operation, refresh, cancel, result_type, **kwargs) diff --git a/google/api_core/operations_v1/__init__.py b/google/api_core/operations_v1/__init__.py index d7f963e2..4db32a4c 100644 --- a/google/api_core/operations_v1/__init__.py +++ b/google/api_core/operations_v1/__init__.py @@ -14,7 +14,27 @@ """Package for interacting with the google.longrunning.operations meta-API.""" +from google.api_core.operations_v1.abstract_operations_client import AbstractOperationsClient from google.api_core.operations_v1.operations_async_client import OperationsAsyncClient from google.api_core.operations_v1.operations_client import OperationsClient +from google.api_core.operations_v1.transports.rest import OperationsRestTransport -__all__ = ["OperationsAsyncClient", "OperationsClient"] +__all__ = [ + "AbstractOperationsClient", + "OperationsAsyncClient", + "OperationsClient", + "OperationsRestTransport" +] + +try: + from google.api_core.operations_v1.transports.rest_asyncio import ( + AsyncOperationsRestTransport, + ) + from google.api_core.operations_v1.operations_rest_client_async import AsyncOperationsRestClient + + __all__ += ["AsyncOperationsRestClient", "AsyncOperationsRestTransport"] +except ImportError: + # This import requires the `async_rest` extra. + # Don't raise an exception if `AsyncOperationsRestTransport` cannot be imported + # as other transports are still available. + pass diff --git a/google/api_core/operations_v1/abstract_operations_base_client.py b/google/api_core/operations_v1/abstract_operations_base_client.py new file mode 100644 index 00000000..160c2a88 --- /dev/null +++ b/google/api_core/operations_v1/abstract_operations_base_client.py @@ -0,0 +1,370 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from collections import OrderedDict +import os +import re +from typing import Dict, Optional, Type, Union + +from google.api_core import client_options as client_options_lib # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core.operations_v1.transports.base import ( + DEFAULT_CLIENT_INFO, + OperationsTransport, +) +from google.api_core.operations_v1.transports.rest import OperationsRestTransport + +try: + from google.api_core.operations_v1.transports.rest_asyncio import ( + AsyncOperationsRestTransport, + ) + + HAS_ASYNC_REST_DEPENDENCIES = True +except ImportError as e: + HAS_ASYNC_REST_DEPENDENCIES = False + ASYNC_REST_EXCEPTION = e + +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore + + +class AbstractOperationsBaseClientMeta(type): + """Metaclass for the Operations Base client. + + This provides base class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[OperationsTransport]] + _transport_registry["rest"] = OperationsRestTransport + if HAS_ASYNC_REST_DEPENDENCIES: + _transport_registry["rest_asyncio"] = AsyncOperationsRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[OperationsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if ( + label == "rest_asyncio" and not HAS_ASYNC_REST_DEPENDENCIES + ): # pragma: NO COVER + raise ASYNC_REST_EXCEPTION + + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class AbstractOperationsBaseClient(metaclass=AbstractOperationsBaseClientMeta): + """Manages long-running operations with an API service. + + When an API method normally takes long time to complete, it can be + designed to return [Operation][google.api_core.operations_v1.Operation] to the + client, and the client can use this interface to receive the real + response asynchronously by polling the operation resource, or pass + the operation resource to another API (such as Google Cloud Pub/Sub + API) to receive the response. Any API service that returns + long-running operations should implement the ``Operations`` + interface so developers can have a consistent client experience. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "longrunning.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """ + This class method should be overridden by the subclasses. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Raises: + NotImplementedError: If the method is called on the base class. + """ + raise NotImplementedError("`from_service_account_info` is not implemented.") + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """ + This class method should be overridden by the subclasses. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Raises: + NotImplementedError: If the method is called on the base class. + """ + raise NotImplementedError("`from_service_account_file` is not implemented.") + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> OperationsTransport: + """Returns the transport used by the client instance. + + Returns: + OperationsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, OperationsTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the operations client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, OperationsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + client_cert_source_func = None + is_mtls = False + if use_client_cert == "true": + if client_options.client_cert_source: + is_mtls = True + client_cert_source_func = client_options.client_cert_source + else: + is_mtls = mtls.has_default_client_cert_source() + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_env == "never": + api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, OperationsTransport): + # transport is a OperationsTransport instance. + if credentials or client_options.credentials_file: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + ) diff --git a/google/api_core/operations_v1/abstract_operations_client.py b/google/api_core/operations_v1/abstract_operations_client.py new file mode 100644 index 00000000..fc445362 --- /dev/null +++ b/google/api_core/operations_v1/abstract_operations_client.py @@ -0,0 +1,387 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Optional, Sequence, Tuple, Union + +from google.api_core import client_options as client_options_lib # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.api_core.operations_v1 import pagers +from google.api_core.operations_v1.transports.base import ( + DEFAULT_CLIENT_INFO, + OperationsTransport, +) +from google.api_core.operations_v1.abstract_operations_base_client import ( + AbstractOperationsBaseClient, +) +from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 +from google.oauth2 import service_account # type: ignore +import grpc + +OptionalRetry = Union[retries.Retry, object] + + +class AbstractOperationsClient(AbstractOperationsBaseClient): + """Manages long-running operations with an API service. + + When an API method normally takes long time to complete, it can be + designed to return [Operation][google.api_core.operations_v1.Operation] to the + client, and the client can use this interface to receive the real + response asynchronously by polling the operation resource, or pass + the operation resource to another API (such as Google Cloud Pub/Sub + API) to receive the response. Any API service that returns + long-running operations should implement the ``Operations`` + interface so developers can have a consistent client experience. + """ + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, OperationsTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the operations client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, OperationsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + super().__init__( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AbstractOperationsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AbstractOperationsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + def list_operations( + self, + name: str, + filter_: Optional[str] = None, + *, + page_size: Optional[int] = None, + page_token: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + compression: Optional[grpc.Compression] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListOperationsPager: + r"""Lists operations that match the specified filter in the request. + If the server doesn't support this method, it returns + ``UNIMPLEMENTED``. + + NOTE: the ``name`` binding allows API services to override the + binding to use different resource name schemes, such as + ``users/*/operations``. To override the binding, API services + can add a binding such as ``"/v1/{name=users/*}/operations"`` to + their service configuration. For backwards compatibility, the + default name includes the operations collection id, however + overriding users must ensure the name binding is the parent + resource, without the operations collection id. + + Args: + name (str): + The name of the operation's parent + resource. + filter_ (str): + The standard list filter. + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operations_v1.pagers.ListOperationsPager: + The response message for + [Operations.ListOperations][google.api_core.operations_v1.Operations.ListOperations]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create a protobuf request object. + request = operations_pb2.ListOperationsRequest(name=name, filter=filter_) + if page_size is not None: + request.page_size = page_size + if page_token is not None: + request.page_token = page_token + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata or ()) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + compression=compression, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListOperationsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_operation( + self, + name: str, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + compression: Optional[grpc.Compression] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + Clients can use this method to poll the operation result + at intervals as recommended by the API service. + + Args: + name (str): + The name of the operation resource. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.longrunning.operations_pb2.Operation: + This resource represents a long- + running operation that is the result of a + network API call. + + """ + + request = operations_pb2.GetOperationRequest(name=name) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata or ()) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + compression=compression, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_operation( + self, + name: str, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + compression: Optional[grpc.Compression] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. This method indicates that the + client is no longer interested in the operation result. It does + not cancel the operation. If the server doesn't support this + method, it returns ``google.rpc.Code.UNIMPLEMENTED``. + + Args: + name (str): + The name of the operation resource to + be deleted. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create the request object. + request = operations_pb2.DeleteOperationRequest(name=name) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata or ()) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + compression=compression, + metadata=metadata, + ) + + def cancel_operation( + self, + name: Optional[str] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + compression: Optional[grpc.Compression] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + The server makes a best effort to cancel the operation, but + success is not guaranteed. If the server doesn't support this + method, it returns ``google.rpc.Code.UNIMPLEMENTED``. Clients + can use + [Operations.GetOperation][google.api_core.operations_v1.Operations.GetOperation] + or other methods to check whether the cancellation succeeded or + whether the operation completed despite cancellation. On + successful cancellation, the operation is not deleted; instead, + it becomes an operation with an + [Operation.error][google.api_core.operations_v1.Operation.error] value with + a [google.rpc.Status.code][google.rpc.Status.code] of 1, + corresponding to ``Code.CANCELLED``. + + Args: + name (str): + The name of the operation resource to + be cancelled. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create the request object. + request = operations_pb2.CancelOperationRequest(name=name) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata or ()) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + compression=compression, + metadata=metadata, + ) diff --git a/google/api_core/operations_v1/operations_async_client.py b/google/api_core/operations_v1/operations_async_client.py index 5a5e5562..a60c7177 100644 --- a/google/api_core/operations_v1/operations_async_client.py +++ b/google/api_core/operations_v1/operations_async_client.py @@ -24,9 +24,12 @@ import functools +from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1, page_iterator_async -from google.api_core.operations_v1 import operations_client_config +from google.api_core import retry_async as retries +from google.api_core import timeout as timeouts from google.longrunning import operations_pb2 +from grpc import Compression class OperationsAsyncClient: @@ -41,39 +44,50 @@ class OperationsAsyncClient: the default configuration is used. """ - def __init__(self, channel, client_config=operations_client_config.config): + def __init__(self, channel, client_config=None): # Create the gRPC client stub with gRPC AsyncIO channel. self.operations_stub = operations_pb2.OperationsStub(channel) - # Create all wrapped methods using the interface configuration. - # The interface config contains all of the default settings for retry - # and timeout for each RPC method. - interfaces = client_config["interfaces"] - interface_config = interfaces["google.longrunning.Operations"] - method_configs = gapic_v1.config_async.parse_method_configs(interface_config) + default_retry = retries.AsyncRetry( + initial=0.1, # seconds + maximum=60.0, # seconds + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + timeout=600.0, # seconds + ) + default_timeout = timeouts.TimeToDeadlineTimeout(timeout=600.0) + + default_compression = Compression.NoCompression self._get_operation = gapic_v1.method_async.wrap_method( self.operations_stub.GetOperation, - default_retry=method_configs["GetOperation"].retry, - default_timeout=method_configs["GetOperation"].timeout, + default_retry=default_retry, + default_timeout=default_timeout, + default_compression=default_compression, ) self._list_operations = gapic_v1.method_async.wrap_method( self.operations_stub.ListOperations, - default_retry=method_configs["ListOperations"].retry, - default_timeout=method_configs["ListOperations"].timeout, + default_retry=default_retry, + default_timeout=default_timeout, + default_compression=default_compression, ) self._cancel_operation = gapic_v1.method_async.wrap_method( self.operations_stub.CancelOperation, - default_retry=method_configs["CancelOperation"].retry, - default_timeout=method_configs["CancelOperation"].timeout, + default_retry=default_retry, + default_timeout=default_timeout, + default_compression=default_compression, ) self._delete_operation = gapic_v1.method_async.wrap_method( self.operations_stub.DeleteOperation, - default_retry=method_configs["DeleteOperation"].retry, - default_timeout=method_configs["DeleteOperation"].timeout, + default_retry=default_retry, + default_timeout=default_timeout, + default_compression=default_compression, ) async def get_operation( @@ -81,6 +95,7 @@ async def get_operation( name, retry=gapic_v1.method_async.DEFAULT, timeout=gapic_v1.method_async.DEFAULT, + compression=gapic_v1.method_async.DEFAULT, metadata=None, ): """Gets the latest state of a long-running operation. @@ -107,6 +122,8 @@ async def get_operation( unspecified, the the default timeout in the client configuration is used. If ``None``, then the RPC method will not time out. + compression (grpc.Compression): An element of grpc.compression + e.g. grpc.compression.Gzip. metadata (Optional[List[Tuple[str, str]]]): Additional gRPC metadata. @@ -126,7 +143,11 @@ async def get_operation( metadata.append(gapic_v1.routing_header.to_grpc_metadata({"name": name})) return await self._get_operation( - request, retry=retry, timeout=timeout, metadata=metadata + request, + retry=retry, + timeout=timeout, + compression=compression, + metadata=metadata, ) async def list_operations( @@ -135,6 +156,7 @@ async def list_operations( filter_, retry=gapic_v1.method_async.DEFAULT, timeout=gapic_v1.method_async.DEFAULT, + compression=gapic_v1.method_async.DEFAULT, metadata=None, ): """ @@ -171,6 +193,8 @@ async def list_operations( unspecified, the the default timeout in the client configuration is used. If ``None``, then the RPC method will not time out. + compression (grpc.Compression): An element of grpc.compression + e.g. grpc.compression.Gzip. metadata (Optional[List[Tuple[str, str]]]): Additional gRPC metadata. @@ -195,7 +219,11 @@ async def list_operations( # Create the method used to fetch pages method = functools.partial( - self._list_operations, retry=retry, timeout=timeout, metadata=metadata + self._list_operations, + retry=retry, + timeout=timeout, + compression=compression, + metadata=metadata, ) iterator = page_iterator_async.AsyncGRPCIterator( @@ -214,6 +242,7 @@ async def cancel_operation( name, retry=gapic_v1.method_async.DEFAULT, timeout=gapic_v1.method_async.DEFAULT, + compression=gapic_v1.method_async.DEFAULT, metadata=None, ): """Starts asynchronous cancellation on a long-running operation. @@ -254,6 +283,8 @@ async def cancel_operation( google.api_core.exceptions.GoogleAPICallError: If an error occurred while invoking the RPC, the appropriate ``GoogleAPICallError`` subclass will be raised. + compression (grpc.Compression): An element of grpc.compression + e.g. grpc.compression.Gzip. metadata (Optional[List[Tuple[str, str]]]): Additional gRPC metadata. """ @@ -265,7 +296,11 @@ async def cancel_operation( metadata.append(gapic_v1.routing_header.to_grpc_metadata({"name": name})) await self._cancel_operation( - request, retry=retry, timeout=timeout, metadata=metadata + request, + retry=retry, + timeout=timeout, + compression=compression, + metadata=metadata, ) async def delete_operation( @@ -273,6 +308,7 @@ async def delete_operation( name, retry=gapic_v1.method_async.DEFAULT, timeout=gapic_v1.method_async.DEFAULT, + compression=gapic_v1.method_async.DEFAULT, metadata=None, ): """Deletes a long-running operation. @@ -299,6 +335,8 @@ async def delete_operation( unspecified, the the default timeout in the client configuration is used. If ``None``, then the RPC method will not time out. + compression (grpc.Compression): An element of grpc.compression + e.g. grpc.compression.Gzip. metadata (Optional[List[Tuple[str, str]]]): Additional gRPC metadata. @@ -318,5 +356,9 @@ async def delete_operation( metadata.append(gapic_v1.routing_header.to_grpc_metadata({"name": name})) await self._delete_operation( - request, retry=retry, timeout=timeout, metadata=metadata + request, + retry=retry, + timeout=timeout, + compression=compression, + metadata=metadata, ) diff --git a/google/api_core/operations_v1/operations_client.py b/google/api_core/operations_v1/operations_client.py index e48eac01..d1d3fd55 100644 --- a/google/api_core/operations_v1/operations_client.py +++ b/google/api_core/operations_v1/operations_client.py @@ -37,10 +37,13 @@ import functools +from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import page_iterator -from google.api_core.operations_v1 import operations_client_config +from google.api_core import retry as retries +from google.api_core import timeout as timeouts from google.longrunning import operations_pb2 +from grpc import Compression class OperationsClient(object): @@ -54,39 +57,50 @@ class OperationsClient(object): the default configuration is used. """ - def __init__(self, channel, client_config=operations_client_config.config): + def __init__(self, channel, client_config=None): # Create the gRPC client stub. self.operations_stub = operations_pb2.OperationsStub(channel) - # Create all wrapped methods using the interface configuration. - # The interface config contains all of the default settings for retry - # and timeout for each RPC method. - interfaces = client_config["interfaces"] - interface_config = interfaces["google.longrunning.Operations"] - method_configs = gapic_v1.config.parse_method_configs(interface_config) + default_retry = retries.Retry( + initial=0.1, # seconds + maximum=60.0, # seconds + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + timeout=600.0, # seconds + ) + default_timeout = timeouts.TimeToDeadlineTimeout(timeout=600.0) + + default_compression = Compression.NoCompression self._get_operation = gapic_v1.method.wrap_method( self.operations_stub.GetOperation, - default_retry=method_configs["GetOperation"].retry, - default_timeout=method_configs["GetOperation"].timeout, + default_retry=default_retry, + default_timeout=default_timeout, + default_compression=default_compression, ) self._list_operations = gapic_v1.method.wrap_method( self.operations_stub.ListOperations, - default_retry=method_configs["ListOperations"].retry, - default_timeout=method_configs["ListOperations"].timeout, + default_retry=default_retry, + default_timeout=default_timeout, + default_compression=default_compression, ) self._cancel_operation = gapic_v1.method.wrap_method( self.operations_stub.CancelOperation, - default_retry=method_configs["CancelOperation"].retry, - default_timeout=method_configs["CancelOperation"].timeout, + default_retry=default_retry, + default_timeout=default_timeout, + default_compression=default_compression, ) self._delete_operation = gapic_v1.method.wrap_method( self.operations_stub.DeleteOperation, - default_retry=method_configs["DeleteOperation"].retry, - default_timeout=method_configs["DeleteOperation"].timeout, + default_retry=default_retry, + default_timeout=default_timeout, + default_compression=default_compression, ) # Service calls @@ -95,6 +109,7 @@ def get_operation( name, retry=gapic_v1.method.DEFAULT, timeout=gapic_v1.method.DEFAULT, + compression=gapic_v1.method.DEFAULT, metadata=None, ): """Gets the latest state of a long-running operation. @@ -121,6 +136,8 @@ def get_operation( unspecified, the the default timeout in the client configuration is used. If ``None``, then the RPC method will not time out. + compression (grpc.Compression): An element of grpc.compression + e.g. grpc.compression.Gzip. metadata (Optional[List[Tuple[str, str]]]): Additional gRPC metadata. @@ -140,7 +157,11 @@ def get_operation( metadata.append(gapic_v1.routing_header.to_grpc_metadata({"name": name})) return self._get_operation( - request, retry=retry, timeout=timeout, metadata=metadata + request, + retry=retry, + timeout=timeout, + compression=compression, + metadata=metadata, ) def list_operations( @@ -149,6 +170,7 @@ def list_operations( filter_, retry=gapic_v1.method.DEFAULT, timeout=gapic_v1.method.DEFAULT, + compression=gapic_v1.method.DEFAULT, metadata=None, ): """ @@ -185,6 +207,8 @@ def list_operations( unspecified, the the default timeout in the client configuration is used. If ``None``, then the RPC method will not time out. + compression (grpc.Compression): An element of grpc.compression + e.g. grpc.compression.Gzip. metadata (Optional[List[Tuple[str, str]]]): Additional gRPC metadata. @@ -209,7 +233,11 @@ def list_operations( # Create the method used to fetch pages method = functools.partial( - self._list_operations, retry=retry, timeout=timeout, metadata=metadata + self._list_operations, + retry=retry, + timeout=timeout, + compression=compression, + metadata=metadata, ) iterator = page_iterator.GRPCIterator( @@ -228,6 +256,7 @@ def cancel_operation( name, retry=gapic_v1.method.DEFAULT, timeout=gapic_v1.method.DEFAULT, + compression=gapic_v1.method.DEFAULT, metadata=None, ): """Starts asynchronous cancellation on a long-running operation. @@ -260,6 +289,8 @@ def cancel_operation( unspecified, the the default timeout in the client configuration is used. If ``None``, then the RPC method will not time out. + compression (grpc.Compression): An element of grpc.compression + e.g. grpc.compression.Gzip. metadata (Optional[List[Tuple[str, str]]]): Additional gRPC metadata. @@ -278,13 +309,20 @@ def cancel_operation( metadata = metadata or [] metadata.append(gapic_v1.routing_header.to_grpc_metadata({"name": name})) - self._cancel_operation(request, retry=retry, timeout=timeout, metadata=metadata) + self._cancel_operation( + request, + retry=retry, + timeout=timeout, + compression=compression, + metadata=metadata, + ) def delete_operation( self, name, retry=gapic_v1.method.DEFAULT, timeout=gapic_v1.method.DEFAULT, + compression=gapic_v1.method.DEFAULT, metadata=None, ): """Deletes a long-running operation. @@ -311,6 +349,8 @@ def delete_operation( unspecified, the the default timeout in the client configuration is used. If ``None``, then the RPC method will not time out. + compression (grpc.Compression): An element of grpc.compression + e.g. grpc.compression.Gzip. metadata (Optional[List[Tuple[str, str]]]): Additional gRPC metadata. @@ -329,4 +369,10 @@ def delete_operation( metadata = metadata or [] metadata.append(gapic_v1.routing_header.to_grpc_metadata({"name": name})) - self._delete_operation(request, retry=retry, timeout=timeout, metadata=metadata) + self._delete_operation( + request, + retry=retry, + timeout=timeout, + compression=compression, + metadata=metadata, + ) diff --git a/google/api_core/operations_v1/operations_client_config.py b/google/api_core/operations_v1/operations_client_config.py index 6cf95753..3ad3548c 100644 --- a/google/api_core/operations_v1/operations_client_config.py +++ b/google/api_core/operations_v1/operations_client_config.py @@ -12,8 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""gapic configuration for the googe.longrunning.operations client.""" +"""gapic configuration for the google.longrunning.operations client.""" +# DEPRECATED: retry and timeout classes are instantiated directly config = { "interfaces": { "google.longrunning.Operations": { diff --git a/google/api_core/operations_v1/operations_rest_client_async.py b/google/api_core/operations_v1/operations_rest_client_async.py new file mode 100644 index 00000000..7ab0cd36 --- /dev/null +++ b/google/api_core/operations_v1/operations_rest_client_async.py @@ -0,0 +1,345 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Optional, Sequence, Tuple, Union + +from google.api_core import client_options as client_options_lib # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core.operations_v1 import pagers_async as pagers +from google.api_core.operations_v1.transports.base import ( + DEFAULT_CLIENT_INFO, + OperationsTransport, +) +from google.api_core.operations_v1.abstract_operations_base_client import ( + AbstractOperationsBaseClient, +) +from google.longrunning import operations_pb2 + +try: + from google.auth.aio import credentials as ga_credentials # type: ignore +except ImportError as e: # pragma: NO COVER + raise ImportError( + "The `async_rest` extra of `google-api-core` is required to use long-running operations. Install it by running " + "`pip install google-api-core[async_rest]`." + ) from e + + +class AsyncOperationsRestClient(AbstractOperationsBaseClient): + """Manages long-running operations with a REST API service for the asynchronous client. + + When an API method normally takes long time to complete, it can be + designed to return [Operation][google.api_core.operations_v1.Operation] to the + client, and the client can use this interface to receive the real + response asynchronously by polling the operation resource, or pass + the operation resource to another API (such as Google Cloud Pub/Sub + API) to receive the response. Any API service that returns + long-running operations should implement the ``Operations`` + interface so developers can have a consistent client experience. + """ + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, OperationsTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the operations client. + + Args: + credentials (Optional[google.auth.aio.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, OperationsTransport]): The + transport to use. If set to None, this defaults to 'rest_asyncio'. + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + super().__init__( + credentials=credentials, # type: ignore + # NOTE: If a transport is not provided, we force the client to use the async + # REST transport. + transport=transport or "rest_asyncio", + client_options=client_options, + client_info=client_info, + ) + + async def get_operation( + self, + name: str, + *, + # TODO(https://github.com/googleapis/python-api-core/issues/722): Leverage `retry` + # to allow configuring retryable error codes. + retry=gapic_v1.method_async.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + Clients can use this method to poll the operation result + at intervals as recommended by the API service. + + Args: + name (str): + The name of the operation resource. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.longrunning.operations_pb2.Operation: + This resource represents a long- + running operation that is the result of a + network API call. + + """ + + request = operations_pb2.GetOperationRequest(name=name) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata or ()) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + name: str, + filter_: Optional[str] = None, + *, + page_size: Optional[int] = None, + page_token: Optional[str] = None, + # TODO(https://github.com/googleapis/python-api-core/issues/722): Leverage `retry` + # to allow configuring retryable error codes. + retry=gapic_v1.method_async.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListOperationsAsyncPager: + r"""Lists operations that match the specified filter in the request. + If the server doesn't support this method, it returns + ``UNIMPLEMENTED``. + + NOTE: the ``name`` binding allows API services to override the + binding to use different resource name schemes, such as + ``users/*/operations``. To override the binding, API services + can add a binding such as ``"/v1/{name=users/*}/operations"`` to + their service configuration. For backwards compatibility, the + default name includes the operations collection id, however + overriding users must ensure the name binding is the parent + resource, without the operations collection id. + + Args: + name (str): + The name of the operation's parent + resource. + filter_ (str): + The standard list filter. + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operations_v1.pagers.ListOperationsPager: + The response message for + [Operations.ListOperations][google.api_core.operations_v1.Operations.ListOperations]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create a protobuf request object. + request = operations_pb2.ListOperationsRequest(name=name, filter=filter_) + if page_size is not None: + request.page_size = page_size + if page_token is not None: + request.page_token = page_token + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata or ()) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListOperationsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_operation( + self, + name: str, + *, + # TODO(https://github.com/googleapis/python-api-core/issues/722): Leverage `retry` + # to allow configuring retryable error codes. + retry=gapic_v1.method_async.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. This method indicates that the + client is no longer interested in the operation result. It does + not cancel the operation. If the server doesn't support this + method, it returns ``google.rpc.Code.UNIMPLEMENTED``. + + Args: + name (str): + The name of the operation resource to + be deleted. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create the request object. + request = operations_pb2.DeleteOperationRequest(name=name) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata or ()) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_operation( + self, + name: Optional[str] = None, + *, + # TODO(https://github.com/googleapis/python-api-core/issues/722): Leverage `retry` + # to allow configuring retryable error codes. + retry=gapic_v1.method_async.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + The server makes a best effort to cancel the operation, but + success is not guaranteed. If the server doesn't support this + method, it returns ``google.rpc.Code.UNIMPLEMENTED``. Clients + can use + [Operations.GetOperation][google.api_core.operations_v1.Operations.GetOperation] + or other methods to check whether the cancellation succeeded or + whether the operation completed despite cancellation. On + successful cancellation, the operation is not deleted; instead, + it becomes an operation with an + [Operation.error][google.api_core.operations_v1.Operation.error] value with + a [google.rpc.Status.code][google.rpc.Status.code] of 1, + corresponding to ``Code.CANCELLED``. + + Args: + name (str): + The name of the operation resource to + be cancelled. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create the request object. + request = operations_pb2.CancelOperationRequest(name=name) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata or ()) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) diff --git a/google/api_core/operations_v1/pagers.py b/google/api_core/operations_v1/pagers.py new file mode 100644 index 00000000..132f1c66 --- /dev/null +++ b/google/api_core/operations_v1/pagers.py @@ -0,0 +1,67 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Callable, + Iterator, + Sequence, + Tuple, +) + +from google.longrunning import operations_pb2 +from google.api_core.operations_v1.pagers_base import ListOperationsPagerBase + + +class ListOperationsPager(ListOperationsPagerBase): + """A pager for iterating through ``list_operations`` requests. + + This class thinly wraps an initial + :class:`google.longrunning.operations_pb2.ListOperationsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``operations`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListOperations`` requests and continue to iterate + through the ``operations`` field on the + corresponding responses. + + All the usual :class:`google.longrunning.operations_pb2.ListOperationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., operations_pb2.ListOperationsResponse], + request: operations_pb2.ListOperationsRequest, + response: operations_pb2.ListOperationsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + super().__init__( + method=method, request=request, response=response, metadata=metadata + ) + + @property + def pages(self) -> Iterator[operations_pb2.ListOperationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[operations_pb2.Operation]: + for page in self.pages: + yield from page.operations diff --git a/google/api_core/operations_v1/pagers_async.py b/google/api_core/operations_v1/pagers_async.py new file mode 100644 index 00000000..e2909dd5 --- /dev/null +++ b/google/api_core/operations_v1/pagers_async.py @@ -0,0 +1,71 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Callable, + AsyncIterator, + Sequence, + Tuple, +) + +from google.longrunning import operations_pb2 +from google.api_core.operations_v1.pagers_base import ListOperationsPagerBase + + +class ListOperationsAsyncPager(ListOperationsPagerBase): + """A pager for iterating through ``list_operations`` requests. + + This class thinly wraps an initial + :class:`google.longrunning.operations_pb2.ListOperationsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``operations`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListOperations`` requests and continue to iterate + through the ``operations`` field on the + corresponding responses. + + All the usual :class:`google.longrunning.operations_pb2.ListOperationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., operations_pb2.ListOperationsResponse], + request: operations_pb2.ListOperationsRequest, + response: operations_pb2.ListOperationsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + super().__init__( + method=method, request=request, response=response, metadata=metadata + ) + + @property + async def pages(self) -> AsyncIterator[operations_pb2.ListOperationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[operations_pb2.Operation]: + async def async_generator(): + async for page in self.pages: + for operation in page.operations: + yield operation + + return async_generator() diff --git a/google/api_core/operations_v1/pagers_base.py b/google/api_core/operations_v1/pagers_base.py new file mode 100644 index 00000000..24caf74f --- /dev/null +++ b/google/api_core/operations_v1/pagers_base.py @@ -0,0 +1,73 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + Callable, + Sequence, + Tuple, +) + +from google.longrunning import operations_pb2 + + +class ListOperationsPagerBase: + """A pager for iterating through ``list_operations`` requests. + + This class thinly wraps an initial + :class:`google.longrunning.operations_pb2.ListOperationsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``operations`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListOperations`` requests and continue to iterate + through the ``operations`` field on the + corresponding responses. + + All the usual :class:`google.longrunning.operations_pb2.ListOperationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., operations_pb2.ListOperationsResponse], + request: operations_pb2.ListOperationsRequest, + response: operations_pb2.ListOperationsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.longrunning.operations_pb2.ListOperationsRequest): + The initial request object. + response (google.longrunning.operations_pb2.ListOperationsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = request + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/google/api_core/operations_v1/transports/__init__.py b/google/api_core/operations_v1/transports/__init__.py new file mode 100644 index 00000000..8c24ce6e --- /dev/null +++ b/google/api_core/operations_v1/transports/__init__.py @@ -0,0 +1,39 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import cast, Dict, Tuple + +from .base import OperationsTransport +from .rest import OperationsRestTransport + +# Compile a registry of transports. +_transport_registry: Dict[str, OperationsTransport] = OrderedDict() +_transport_registry["rest"] = cast(OperationsTransport, OperationsRestTransport) + +__all__: Tuple[str, ...] = ("OperationsTransport", "OperationsRestTransport") + +try: + from .rest_asyncio import AsyncOperationsRestTransport + + __all__ += ("AsyncOperationsRestTransport",) + _transport_registry["rest_asyncio"] = cast( + OperationsTransport, AsyncOperationsRestTransport + ) +except ImportError: + # This import requires the `async_rest` extra. + # Don't raise an exception if `AsyncOperationsRestTransport` cannot be imported + # as other transports are still available. + pass diff --git a/google/api_core/operations_v1/transports/base.py b/google/api_core/operations_v1/transports/base.py new file mode 100644 index 00000000..71764c1e --- /dev/null +++ b/google/api_core/operations_v1/transports/base.py @@ -0,0 +1,294 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +import re +from typing import Awaitable, Callable, Optional, Sequence, Union + +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.api_core import version +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 +from google.oauth2 import service_account # type: ignore +import google.protobuf +from google.protobuf import empty_pb2, json_format # type: ignore +from grpc import Compression + + +PROTOBUF_VERSION = google.protobuf.__version__ + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=version.__version__, +) + + +class OperationsTransport(abc.ABC): + """Abstract transport class for Operations.""" + + AUTH_SCOPES = () + + DEFAULT_HOST: str = "longrunning.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + # TODO(https://github.com/googleapis/python-api-core/issues/709): update type hint for credentials to include `google.auth.aio.Credentials`. + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme="https", + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + + .. warning:: + Important: If you accept a credential configuration (credential JSON/File/Stream) + from an external source for authentication to Google Cloud Platform, you must + validate it before providing it to any Google API or client library. Providing an + unvalidated credential configuration to Google APIs or libraries can compromise + the security of your systems and data. For more information, refer to + `Validate credential configurations from external sources`_. + + .. _Validate credential configurations from external sources: + + https://cloud.google.com/docs/authentication/external/externally-sourced-credentials + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" # pragma: NO COVER + self._host = host + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_retry=retries.Retry( + initial=0.5, + maximum=10.0, + multiplier=2.0, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=10.0, + ), + default_timeout=10.0, + default_compression=Compression.NoCompression, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_retry=retries.Retry( + initial=0.5, + maximum=10.0, + multiplier=2.0, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=10.0, + ), + default_timeout=10.0, + default_compression=Compression.NoCompression, + client_info=client_info, + ), + self.delete_operation: gapic_v1.method.wrap_method( + self.delete_operation, + default_retry=retries.Retry( + initial=0.5, + maximum=10.0, + multiplier=2.0, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=10.0, + ), + default_timeout=10.0, + default_compression=Compression.NoCompression, + client_info=client_info, + ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_retry=retries.Retry( + initial=0.5, + maximum=10.0, + multiplier=2.0, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=10.0, + ), + default_timeout=10.0, + default_compression=Compression.NoCompression, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + def _convert_protobuf_message_to_dict( + self, message: google.protobuf.message.Message + ): + r"""Converts protobuf message to a dictionary. + + When the dictionary is encoded to JSON, it conforms to proto3 JSON spec. + + Args: + message(google.protobuf.message.Message): The protocol buffers message + instance to serialize. + + Returns: + A dict representation of the protocol buffer message. + """ + # TODO(https://github.com/googleapis/python-api-core/issues/643): For backwards compatibility + # with protobuf 3.x 4.x, Remove once support for protobuf 3.x and 4.x is dropped. + if PROTOBUF_VERSION[0:2] in ["3.", "4."]: + result = json_format.MessageToDict( + message, + preserving_proto_field_name=True, + including_default_value_fields=True, # type: ignore # backward compatibility + ) + else: + result = json_format.MessageToDict( + message, + preserving_proto_field_name=True, + always_print_fields_with_no_presence=True, + ) + + return result + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + +__all__ = ("OperationsTransport",) diff --git a/google/api_core/operations_v1/transports/rest.py b/google/api_core/operations_v1/transports/rest.py new file mode 100644 index 00000000..0705c518 --- /dev/null +++ b/google/api_core/operations_v1/transports/rest.py @@ -0,0 +1,485 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from requests import __version__ as requests_version + +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import path_template # type: ignore +from google.api_core import rest_helpers # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import json_format # type: ignore +import google.protobuf + +import grpc +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, OperationsTransport + +PROTOBUF_VERSION = google.protobuf.__version__ + +OptionalRetry = Union[retries.Retry, object] + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + + +class OperationsRestTransport(OperationsTransport): + """REST backend transport for Operations. + + Manages long-running operations with an API service. + + When an API method normally takes long time to complete, it can be + designed to return [Operation][google.api_core.operations_v1.Operation] to the + client, and the client can use this interface to receive the real + response asynchronously by polling the operation resource, or pass + the operation resource to another API (such as Google Cloud Pub/Sub + API) to receive the response. Any API service that returns + long-running operations should implement the ``Operations`` + interface so developers can have a consistent client experience. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "longrunning.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + http_options: Optional[Dict] = None, + path_prefix: str = "v1", + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + + .. warning:: + Important: If you accept a credential configuration (credential JSON/File/Stream) + from an external source for authentication to Google Cloud Platform, you must + validate it before providing it to any Google API or client library. Providing an + unvalidated credential configuration to Google APIs or libraries can compromise + the security of your systems and data. For more information, refer to + `Validate credential configurations from external sources`_. + + .. _Validate credential configurations from external sources: + + https://cloud.google.com/docs/authentication/external/externally-sourced-credentials + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + http_options: a dictionary of http_options for transcoding, to override + the defaults from operations.proto. Each method has an entry + with the corresponding http rules as value. + path_prefix: path prefix (usually represents API version). Set to + "v1" by default. + + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + # TODO(https://github.com/googleapis/python-api-core/issues/720): Add wrap logic directly to the property methods for callables. + self._prep_wrapped_messages(client_info) + self._http_options = http_options or {} + self._path_prefix = path_prefix + + def _list_operations( + self, + request: operations_pb2.ListOperationsRequest, + *, + # TODO(https://github.com/googleapis/python-api-core/issues/723): Leverage `retry` + # to allow configuring retryable error codes. + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + compression: Optional[grpc.Compression] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (~.operations_pb2.ListOperationsRequest): + The request object. The request message for + [Operations.ListOperations][google.api_core.operations_v1.Operations.ListOperations]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.ListOperationsResponse: + The response message for + [Operations.ListOperations][google.api_core.operations_v1.Operations.ListOperations]. + + """ + + http_options = [ + { + "method": "get", + "uri": "/{}/{{name=**}}/operations".format(self._path_prefix), + }, + ] + if "google.longrunning.Operations.ListOperations" in self._http_options: + http_options = self._http_options[ + "google.longrunning.Operations.ListOperations" + ] + + request_kwargs = self._convert_protobuf_message_to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params_request = operations_pb2.ListOperationsRequest() + json_format.ParseDict(transcoded_request["query_params"], query_params_request) + query_params = json_format.MessageToDict( + query_params_request, + preserving_proto_field_name=False, + use_integers_for_enums=False, + ) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + # TODO(https://github.com/googleapis/python-api-core/issues/721): Update incorrect use of `uri`` variable name. + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + api_response = operations_pb2.ListOperationsResponse() + json_format.Parse(response.content, api_response, ignore_unknown_fields=False) + return api_response + + def _get_operation( + self, + request: operations_pb2.GetOperationRequest, + *, + # TODO(https://github.com/googleapis/python-api-core/issues/723): Leverage `retry` + # to allow configuring retryable error codes. + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + compression: Optional[grpc.Compression] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (~.operations_pb2.GetOperationRequest): + The request object. The request message for + [Operations.GetOperation][google.api_core.operations_v1.Operations.GetOperation]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a long- + running operation that is the result of a + network API call. + + """ + + http_options = [ + { + "method": "get", + "uri": "/{}/{{name=**/operations/*}}".format(self._path_prefix), + }, + ] + if "google.longrunning.Operations.GetOperation" in self._http_options: + http_options = self._http_options[ + "google.longrunning.Operations.GetOperation" + ] + + request_kwargs = self._convert_protobuf_message_to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params_request = operations_pb2.GetOperationRequest() + json_format.ParseDict(transcoded_request["query_params"], query_params_request) + query_params = json_format.MessageToDict( + query_params_request, + preserving_proto_field_name=False, + use_integers_for_enums=False, + ) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + # TODO(https://github.com/googleapis/python-api-core/issues/721): Update incorrect use of `uri`` variable name. + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + api_response = operations_pb2.Operation() + json_format.Parse(response.content, api_response, ignore_unknown_fields=False) + return api_response + + def _delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + *, + # TODO(https://github.com/googleapis/python-api-core/issues/723): Leverage `retry` + # to allow configuring retryable error codes. + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + compression: Optional[grpc.Compression] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> empty_pb2.Empty: + r"""Call the delete operation method over HTTP. + + Args: + request (~.operations_pb2.DeleteOperationRequest): + The request object. The request message for + [Operations.DeleteOperation][google.api_core.operations_v1.Operations.DeleteOperation]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options = [ + { + "method": "delete", + "uri": "/{}/{{name=**/operations/*}}".format(self._path_prefix), + }, + ] + if "google.longrunning.Operations.DeleteOperation" in self._http_options: + http_options = self._http_options[ + "google.longrunning.Operations.DeleteOperation" + ] + + request_kwargs = self._convert_protobuf_message_to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params_request = operations_pb2.DeleteOperationRequest() + json_format.ParseDict(transcoded_request["query_params"], query_params_request) + query_params = json_format.MessageToDict( + query_params_request, + preserving_proto_field_name=False, + use_integers_for_enums=False, + ) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + # TODO(https://github.com/googleapis/python-api-core/issues/721): Update incorrect use of `uri`` variable name. + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return empty_pb2.Empty() + + def _cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + *, + # TODO(https://github.com/googleapis/python-api-core/issues/723): Leverage `retry` + # to allow configuring retryable error codes. + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + compression: Optional[grpc.Compression] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> empty_pb2.Empty: + r"""Call the cancel operation method over HTTP. + + Args: + request (~.operations_pb2.CancelOperationRequest): + The request object. The request message for + [Operations.CancelOperation][google.api_core.operations_v1.Operations.CancelOperation]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options = [ + { + "method": "post", + "uri": "/{}/{{name=**/operations/*}}:cancel".format(self._path_prefix), + "body": "*", + }, + ] + if "google.longrunning.Operations.CancelOperation" in self._http_options: + http_options = self._http_options[ + "google.longrunning.Operations.CancelOperation" + ] + + request_kwargs = self._convert_protobuf_message_to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + # Jsonify the request body + body_request = operations_pb2.CancelOperationRequest() + json_format.ParseDict(transcoded_request["body"], body_request) + body = json_format.MessageToDict( + body_request, + preserving_proto_field_name=False, + use_integers_for_enums=False, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params_request = operations_pb2.CancelOperationRequest() + json_format.ParseDict(transcoded_request["query_params"], query_params_request) + query_params = json_format.MessageToDict( + query_params_request, + preserving_proto_field_name=False, + use_integers_for_enums=False, + ) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + # TODO(https://github.com/googleapis/python-api-core/issues/721): Update incorrect use of `uri`` variable name. + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return empty_pb2.Empty() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + return self._list_operations + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + return self._get_operation + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], empty_pb2.Empty]: + return self._delete_operation + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], empty_pb2.Empty]: + return self._cancel_operation + + +__all__ = ("OperationsRestTransport",) diff --git a/google/api_core/operations_v1/transports/rest_asyncio.py b/google/api_core/operations_v1/transports/rest_asyncio.py new file mode 100644 index 00000000..71c20eb8 --- /dev/null +++ b/google/api_core/operations_v1/transports/rest_asyncio.py @@ -0,0 +1,560 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import json +from typing import Any, Callable, Coroutine, Dict, Optional, Sequence, Tuple + +from google.auth import __version__ as auth_version + +try: + from google.auth.aio.transport.sessions import AsyncAuthorizedSession # type: ignore +except ImportError as e: # pragma: NO COVER + raise ImportError( + "The `async_rest` extra of `google-api-core` is required to use long-running operations. Install it by running " + "`pip install google-api-core[async_rest]`." + ) from e + +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import path_template # type: ignore +from google.api_core import rest_helpers # type: ignore +from google.api_core import retry_async as retries_async # type: ignore +from google.auth.aio import credentials as ga_credentials_async # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import json_format # type: ignore + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, OperationsTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"google-auth@{auth_version}", +) + + +class AsyncOperationsRestTransport(OperationsTransport): + """Asynchronous REST backend transport for Operations. + + Manages async long-running operations with an API service. + + When an API method normally takes long time to complete, it can be + designed to return [Operation][google.api_core.operations_v1.Operation] to the + client, and the client can use this interface to receive the real + response asynchronously by polling the operation resource, or pass + the operation resource to another API (such as Google Cloud Pub/Sub + API) to receive the response. Any API service that returns + long-running operations should implement the ``Operations`` + interface so developers can have a consistent client experience. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "longrunning.googleapis.com", + credentials: Optional[ga_credentials_async.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + http_options: Optional[Dict] = None, + path_prefix: str = "v1", + # TODO(https://github.com/googleapis/python-api-core/issues/715): Add docstring for `credentials_file` to async REST transport. + # TODO(https://github.com/googleapis/python-api-core/issues/716): Add docstring for `scopes` to async REST transport. + # TODO(https://github.com/googleapis/python-api-core/issues/717): Add docstring for `quota_project_id` to async REST transport. + # TODO(https://github.com/googleapis/python-api-core/issues/718): Add docstring for `client_cert_source` to async REST transport. + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.aio.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + http_options: a dictionary of http_options for transcoding, to override + the defaults from operations.proto. Each method has an entry + with the corresponding http rules as value. + path_prefix: path prefix (usually represents API version). Set to + "v1" by default. + + """ + unsupported_params = { + # TODO(https://github.com/googleapis/python-api-core/issues/715): Add support for `credentials_file` to async REST transport. + "google.api_core.client_options.ClientOptions.credentials_file": credentials_file, + # TODO(https://github.com/googleapis/python-api-core/issues/716): Add support for `scopes` to async REST transport. + "google.api_core.client_options.ClientOptions.scopes": scopes, + # TODO(https://github.com/googleapis/python-api-core/issues/717): Add support for `quota_project_id` to async REST transport. + "google.api_core.client_options.ClientOptions.quota_project_id": quota_project_id, + # TODO(https://github.com/googleapis/python-api-core/issues/718): Add support for `client_cert_source` to async REST transport. + "google.api_core.client_options.ClientOptions.client_cert_source": client_cert_source_for_mtls, + # TODO(https://github.com/googleapis/python-api-core/issues/718): Add support for `client_cert_source` to async REST transport. + "google.api_core.client_options.ClientOptions.client_cert_source": client_cert_source_for_mtls, + } + provided_unsupported_params = [ + name for name, value in unsupported_params.items() if value is not None + ] + if provided_unsupported_params: + raise core_exceptions.AsyncRestUnsupportedParameterError( + f"The following provided parameters are not supported for `transport=rest_asyncio`: {', '.join(provided_unsupported_params)}" + ) + + super().__init__( + host=host, + # TODO(https://github.com/googleapis/python-api-core/issues/709): Remove `type: ignore` when the linked issue is resolved. + credentials=credentials, # type: ignore + client_info=client_info, + # TODO(https://github.com/googleapis/python-api-core/issues/725): Set always_use_jwt_access token when supported. + always_use_jwt_access=False, + ) + # TODO(https://github.com/googleapis/python-api-core/issues/708): add support for + # `default_host` in AsyncAuthorizedSession for feature parity with the synchronous + # code. + # TODO(https://github.com/googleapis/python-api-core/issues/709): Remove `type: ignore` when the linked issue is resolved. + self._session = AsyncAuthorizedSession(self._credentials) # type: ignore + # TODO(https://github.com/googleapis/python-api-core/issues/720): Add wrap logic directly to the property methods for callables. + self._prep_wrapped_messages(client_info) + self._http_options = http_options or {} + self._path_prefix = path_prefix + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_operations: gapic_v1.method_async.wrap_method( + self.list_operations, + default_retry=retries_async.AsyncRetry( + initial=0.5, + maximum=10.0, + multiplier=2.0, + predicate=retries_async.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=10.0, + ), + default_timeout=10.0, + client_info=client_info, + kind="rest_asyncio", + ), + self.get_operation: gapic_v1.method_async.wrap_method( + self.get_operation, + default_retry=retries_async.AsyncRetry( + initial=0.5, + maximum=10.0, + multiplier=2.0, + predicate=retries_async.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=10.0, + ), + default_timeout=10.0, + client_info=client_info, + kind="rest_asyncio", + ), + self.delete_operation: gapic_v1.method_async.wrap_method( + self.delete_operation, + default_retry=retries_async.AsyncRetry( + initial=0.5, + maximum=10.0, + multiplier=2.0, + predicate=retries_async.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=10.0, + ), + default_timeout=10.0, + client_info=client_info, + kind="rest_asyncio", + ), + self.cancel_operation: gapic_v1.method_async.wrap_method( + self.cancel_operation, + default_retry=retries_async.AsyncRetry( + initial=0.5, + maximum=10.0, + multiplier=2.0, + predicate=retries_async.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=10.0, + ), + default_timeout=10.0, + client_info=client_info, + kind="rest_asyncio", + ), + } + + async def _list_operations( + self, + request: operations_pb2.ListOperationsRequest, + *, + # TODO(https://github.com/googleapis/python-api-core/issues/722): Leverage `retry` + # to allow configuring retryable error codes. + retry=gapic_v1.method_async.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Asynchronously call the list operations method over HTTP. + + Args: + request (~.operations_pb2.ListOperationsRequest): + The request object. The request message for + [Operations.ListOperations][google.api_core.operations_v1.Operations.ListOperations]. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.ListOperationsResponse: + The response message for + [Operations.ListOperations][google.api_core.operations_v1.Operations.ListOperations]. + + """ + + http_options = [ + { + "method": "get", + "uri": "/{}/{{name=**}}/operations".format(self._path_prefix), + }, + ] + if "google.longrunning.Operations.ListOperations" in self._http_options: + http_options = self._http_options[ + "google.longrunning.Operations.ListOperations" + ] + + request_kwargs = self._convert_protobuf_message_to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params_request = operations_pb2.ListOperationsRequest() + json_format.ParseDict(transcoded_request["query_params"], query_params_request) + query_params = json_format.MessageToDict( + query_params_request, + preserving_proto_field_name=False, + use_integers_for_enums=False, + ) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + # TODO(https://github.com/googleapis/python-api-core/issues/721): Update incorrect use of `uri`` variable name. + response = await getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + content = await response.read() + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format(host=self._host, uri=uri) + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + + # Return the response + api_response = operations_pb2.ListOperationsResponse() + json_format.Parse(content, api_response, ignore_unknown_fields=False) + return api_response + + async def _get_operation( + self, + request: operations_pb2.GetOperationRequest, + *, + # TODO(https://github.com/googleapis/python-api-core/issues/722): Leverage `retry` + # to allow configuring retryable error codes. + retry=gapic_v1.method_async.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Asynchronously call the get operation method over HTTP. + + Args: + request (~.operations_pb2.GetOperationRequest): + The request object. The request message for + [Operations.GetOperation][google.api_core.operations_v1.Operations.GetOperation]. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a long- + running operation that is the result of a + network API call. + + """ + + http_options = [ + { + "method": "get", + "uri": "/{}/{{name=**/operations/*}}".format(self._path_prefix), + }, + ] + if "google.longrunning.Operations.GetOperation" in self._http_options: + http_options = self._http_options[ + "google.longrunning.Operations.GetOperation" + ] + + request_kwargs = self._convert_protobuf_message_to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params_request = operations_pb2.GetOperationRequest() + json_format.ParseDict(transcoded_request["query_params"], query_params_request) + query_params = json_format.MessageToDict( + query_params_request, + preserving_proto_field_name=False, + use_integers_for_enums=False, + ) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + # TODO(https://github.com/googleapis/python-api-core/issues/721): Update incorrect use of `uri`` variable name. + response = await getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + content = await response.read() + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format(host=self._host, uri=uri) + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + + # Return the response + api_response = operations_pb2.Operation() + json_format.Parse(content, api_response, ignore_unknown_fields=False) + return api_response + + async def _delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + *, + # TODO(https://github.com/googleapis/python-api-core/issues/722): Leverage `retry` + # to allow configuring retryable error codes. + retry=gapic_v1.method_async.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> empty_pb2.Empty: + r"""Asynchronously call the delete operation method over HTTP. + + Args: + request (~.operations_pb2.DeleteOperationRequest): + The request object. The request message for + [Operations.DeleteOperation][google.api_core.operations_v1.Operations.DeleteOperation]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options = [ + { + "method": "delete", + "uri": "/{}/{{name=**/operations/*}}".format(self._path_prefix), + }, + ] + if "google.longrunning.Operations.DeleteOperation" in self._http_options: + http_options = self._http_options[ + "google.longrunning.Operations.DeleteOperation" + ] + + request_kwargs = self._convert_protobuf_message_to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params_request = operations_pb2.DeleteOperationRequest() + json_format.ParseDict(transcoded_request["query_params"], query_params_request) + query_params = json_format.MessageToDict( + query_params_request, + preserving_proto_field_name=False, + use_integers_for_enums=False, + ) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + # TODO(https://github.com/googleapis/python-api-core/issues/721): Update incorrect use of `uri`` variable name. + response = await getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + content = await response.read() + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format(host=self._host, uri=uri) + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + + return empty_pb2.Empty() + + async def _cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + *, + # TODO(https://github.com/googleapis/python-api-core/issues/722): Leverage `retry` + # to allow configuring retryable error codes. + retry=gapic_v1.method_async.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + # TODO(https://github.com/googleapis/python-api-core/issues/722): Add `retry` parameter + # to allow configuring retryable error codes. + ) -> empty_pb2.Empty: + r"""Asynchronously call the cancel operation method over HTTP. + + Args: + request (~.operations_pb2.CancelOperationRequest): + The request object. The request message for + [Operations.CancelOperation][google.api_core.operations_v1.Operations.CancelOperation]. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options = [ + { + "method": "post", + "uri": "/{}/{{name=**/operations/*}}:cancel".format(self._path_prefix), + "body": "*", + }, + ] + if "google.longrunning.Operations.CancelOperation" in self._http_options: + http_options = self._http_options[ + "google.longrunning.Operations.CancelOperation" + ] + + request_kwargs = self._convert_protobuf_message_to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + # Jsonify the request body + body_request = operations_pb2.CancelOperationRequest() + json_format.ParseDict(transcoded_request["body"], body_request) + body = json_format.MessageToDict( + body_request, + preserving_proto_field_name=False, + use_integers_for_enums=False, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params_request = operations_pb2.CancelOperationRequest() + json_format.ParseDict(transcoded_request["query_params"], query_params_request) + query_params = json_format.MessageToDict( + query_params_request, + preserving_proto_field_name=False, + use_integers_for_enums=False, + ) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + # TODO(https://github.com/googleapis/python-api-core/issues/721): Update incorrect use of `uri`` variable name. + response = await getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + content = await response.read() + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format(host=self._host, uri=uri) + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + + return empty_pb2.Empty() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Coroutine[Any, Any, operations_pb2.ListOperationsResponse], + ]: + return self._list_operations + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Coroutine[Any, Any, operations_pb2.Operation], + ]: + return self._get_operation + + @property + def delete_operation( + self, + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], Coroutine[Any, Any, empty_pb2.Empty] + ]: + return self._delete_operation + + @property + def cancel_operation( + self, + ) -> Callable[ + [operations_pb2.CancelOperationRequest], Coroutine[Any, Any, empty_pb2.Empty] + ]: + return self._cancel_operation + + +__all__ = ("AsyncOperationsRestTransport",) diff --git a/google/api_core/page_iterator.py b/google/api_core/page_iterator.py index 7ddc5cbc..23761ec4 100644 --- a/google/api_core/page_iterator.py +++ b/google/api_core/page_iterator.py @@ -448,7 +448,7 @@ class _GAXIterator(Iterator): page_iter (google.gax.PageIterator): A GAX page iterator to be wrapped to conform to the :class:`Iterator` interface. item_to_value (Callable[Iterator, Any]): Callable to convert an item - from the the protobuf response into a native object. Will + from the protobuf response into a native object. Will be called with the iterator and a single item. max_results (int): The maximum number of results to fetch. diff --git a/google/api_core/path_template.py b/google/api_core/path_template.py index c5969c14..b8ebb2af 100644 --- a/google/api_core/path_template.py +++ b/google/api_core/path_template.py @@ -25,6 +25,8 @@ from __future__ import unicode_literals +from collections import deque +import copy import functools import re @@ -64,7 +66,7 @@ def _expand_variable_match(positional_vars, named_vars, match): """Expand a matched variable with its value. Args: - positional_vars (list): A list of positonal variables. This list will + positional_vars (list): A list of positional variables. This list will be modified. named_vars (dict): A dictionary of named variables. match (re.Match): A regular expression match. @@ -170,6 +172,56 @@ def _generate_pattern_for_template(tmpl): return _VARIABLE_RE.sub(_replace_variable_with_pattern, tmpl) +def get_field(request, field): + """Get the value of a field from a given dictionary. + + Args: + request (dict | Message): A dictionary or a Message object. + field (str): The key to the request in dot notation. + + Returns: + The value of the field. + """ + parts = field.split(".") + value = request + + for part in parts: + if not isinstance(value, dict): + value = getattr(value, part, None) + else: + value = value.get(part) + if isinstance(value, dict): + return + return value + + +def delete_field(request, field): + """Delete the value of a field from a given dictionary. + + Args: + request (dict | Message): A dictionary object or a Message. + field (str): The key to the request in dot notation. + """ + parts = deque(field.split(".")) + while len(parts) > 1: + part = parts.popleft() + if not isinstance(request, dict): + if hasattr(request, part): + request = getattr(request, part, None) + else: + return + else: + request = request.get(part) + part = parts.popleft() + if not isinstance(request, dict): + if hasattr(request, part): + request.ClearField(part) + else: + return + else: + request.pop(part, None) + + def validate(tmpl, path): """Validate a path against the path template. @@ -193,3 +245,102 @@ def validate(tmpl, path): """ pattern = _generate_pattern_for_template(tmpl) + "$" return True if re.match(pattern, path) is not None else False + + +def transcode(http_options, message=None, **request_kwargs): + """Transcodes a grpc request pattern into a proper HTTP request following the rules outlined here, + https://github.com/googleapis/googleapis/blob/master/google/api/http.proto#L44-L312 + + Args: + http_options (list(dict)): A list of dicts which consist of these keys, + 'method' (str): The http method + 'uri' (str): The path template + 'body' (str): The body field name (optional) + (This is a simplified representation of the proto option `google.api.http`) + + message (Message) : A request object (optional) + request_kwargs (dict) : A dict representing the request object + + Returns: + dict: The transcoded request with these keys, + 'method' (str) : The http method + 'uri' (str) : The expanded uri + 'body' (dict | Message) : A dict or a Message representing the body (optional) + 'query_params' (dict | Message) : A dict or Message mapping query parameter variables and values + + Raises: + ValueError: If the request does not match the given template. + """ + transcoded_value = message or request_kwargs + bindings = [] + for http_option in http_options: + request = {} + + # Assign path + uri_template = http_option["uri"] + fields = [ + (m.group("name"), m.group("template")) + for m in _VARIABLE_RE.finditer(uri_template) + ] + bindings.append((uri_template, fields)) + + path_args = {field: get_field(transcoded_value, field) for field, _ in fields} + request["uri"] = expand(uri_template, **path_args) + + if not validate(uri_template, request["uri"]) or not all(path_args.values()): + continue + + # Remove fields used in uri path from request + leftovers = copy.deepcopy(transcoded_value) + for path_field, _ in fields: + delete_field(leftovers, path_field) + + # Assign body and query params + body = http_option.get("body") + + if body: + if body == "*": + request["body"] = leftovers + if message: + request["query_params"] = message.__class__() + else: + request["query_params"] = {} + else: + try: + if message: + request["body"] = getattr(leftovers, body) + delete_field(leftovers, body) + else: + request["body"] = leftovers.pop(body) + except (KeyError, AttributeError): + continue + request["query_params"] = leftovers + else: + request["query_params"] = leftovers + request["method"] = http_option["method"] + return request + + bindings_description = [ + '\n\tURI: "{}"' + "\n\tRequired request fields:\n\t\t{}".format( + uri, + "\n\t\t".join( + [ + 'field: "{}", pattern: "{}"'.format(n, p if p else "*") + for n, p in fields + ] + ), + ) + for uri, fields in bindings + ] + + raise ValueError( + "Invalid request." + "\nSome of the fields of the request message are either not initialized or " + "initialized with an invalid value." + "\nPlease make sure your request matches at least one accepted HTTP binding." + "\nTo match a binding the request message must have all the required fields " + "initialized with values matching their patterns as listed below:{}".format( + "\n".join(bindings_description) + ) + ) diff --git a/google/api_core/protobuf_helpers.py b/google/api_core/protobuf_helpers.py index 896e89c1..30cd7c85 100644 --- a/google/api_core/protobuf_helpers.py +++ b/google/api_core/protobuf_helpers.py @@ -63,9 +63,7 @@ def from_any_pb(pb_type, any_pb): # Unpack the Any object and populate the protobuf message instance. if not any_pb.Unpack(msg_pb): raise TypeError( - "Could not convert {} to {}".format( - any_pb.__class__.__name__, pb_type.__name__ - ) + f"Could not convert `{any_pb.TypeName()}` with underlying type `google.protobuf.any_pb2.Any` to `{msg_pb.DESCRIPTOR.full_name}`" ) # Done; return the message. @@ -288,10 +286,10 @@ def field_mask(original, modified): Args: original (~google.protobuf.message.Message): the original message. - If set to None, this field will be interpretted as an empty + If set to None, this field will be interpreted as an empty message. modified (~google.protobuf.message.Message): the modified message. - If set to None, this field will be interpretted as an empty + If set to None, this field will be interpreted as an empty message. Returns: @@ -313,7 +311,7 @@ def field_mask(original, modified): modified = copy.deepcopy(original) modified.Clear() - if type(original) != type(modified): + if not isinstance(original, type(modified)): raise ValueError( "expected that both original and modified should be of the " 'same type, received "{!r}" and "{!r}".'.format( diff --git a/google/api_core/py.typed b/google/api_core/py.typed new file mode 100644 index 00000000..1d5517b1 --- /dev/null +++ b/google/api_core/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-api-core package uses inline types. diff --git a/google/api_core/rest_helpers.py b/google/api_core/rest_helpers.py new file mode 100644 index 00000000..a78822f1 --- /dev/null +++ b/google/api_core/rest_helpers.py @@ -0,0 +1,109 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Helpers for rest transports.""" + +import functools +import operator + + +def flatten_query_params(obj, strict=False): + """Flatten a dict into a list of (name,value) tuples. + + The result is suitable for setting query params on an http request. + + .. code-block:: python + + >>> obj = {'a': + ... {'b': + ... {'c': ['x', 'y', 'z']} }, + ... 'd': 'uvw', + ... 'e': True, } + >>> flatten_query_params(obj, strict=True) + [('a.b.c', 'x'), ('a.b.c', 'y'), ('a.b.c', 'z'), ('d', 'uvw'), ('e', 'true')] + + Note that, as described in + https://github.com/googleapis/googleapis/blob/48d9fb8c8e287c472af500221c6450ecd45d7d39/google/api/http.proto#L117, + repeated fields (i.e. list-valued fields) may only contain primitive types (not lists or dicts). + This is enforced in this function. + + Args: + obj: a possibly nested dictionary (from json), or None + strict: a bool, defaulting to False, to enforce that all values in the + result tuples be strings and, if boolean, lower-cased. + + Returns: a list of tuples, with each tuple having a (possibly) multi-part name + and a scalar value. + + Raises: + TypeError if obj is not a dict or None + ValueError if obj contains a list of non-primitive values. + """ + + if obj is not None and not isinstance(obj, dict): + raise TypeError("flatten_query_params must be called with dict object") + + return _flatten(obj, key_path=[], strict=strict) + + +def _flatten(obj, key_path, strict=False): + if obj is None: + return [] + if isinstance(obj, dict): + return _flatten_dict(obj, key_path=key_path, strict=strict) + if isinstance(obj, list): + return _flatten_list(obj, key_path=key_path, strict=strict) + return _flatten_value(obj, key_path=key_path, strict=strict) + + +def _is_primitive_value(obj): + if obj is None: + return False + + if isinstance(obj, (list, dict)): + raise ValueError("query params may not contain repeated dicts or lists") + + return True + + +def _flatten_value(obj, key_path, strict=False): + return [(".".join(key_path), _canonicalize(obj, strict=strict))] + + +def _flatten_dict(obj, key_path, strict=False): + items = ( + _flatten(value, key_path=key_path + [key], strict=strict) + for key, value in obj.items() + ) + return functools.reduce(operator.concat, items, []) + + +def _flatten_list(elems, key_path, strict=False): + # Only lists of scalar values are supported. + # The name (key_path) is repeated for each value. + items = ( + _flatten_value(elem, key_path=key_path, strict=strict) + for elem in elems + if _is_primitive_value(elem) + ) + return functools.reduce(operator.concat, items, []) + + +def _canonicalize(obj, strict=False): + if strict: + value = str(obj) + if isinstance(obj, bool): + value = value.lower() + return value + return obj diff --git a/google/api_core/rest_streaming.py b/google/api_core/rest_streaming.py new file mode 100644 index 00000000..84aa270c --- /dev/null +++ b/google/api_core/rest_streaming.py @@ -0,0 +1,66 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Helpers for server-side streaming in REST.""" + +from typing import Union + +import proto +import requests +import google.protobuf.message +from google.api_core._rest_streaming_base import BaseResponseIterator + + +class ResponseIterator(BaseResponseIterator): + """Iterator over REST API responses. + + Args: + response (requests.Response): An API response object. + response_message_cls (Union[proto.Message, google.protobuf.message.Message]): A response + class expected to be returned from an API. + + Raises: + ValueError: + - If `response_message_cls` is not a subclass of `proto.Message` or `google.protobuf.message.Message`. + """ + + def __init__( + self, + response: requests.Response, + response_message_cls: Union[proto.Message, google.protobuf.message.Message], + ): + self._response = response + # Inner iterator over HTTP response's content. + self._response_itr = self._response.iter_content(decode_unicode=True) + super(ResponseIterator, self).__init__( + response_message_cls=response_message_cls + ) + + def cancel(self): + """Cancel existing streaming operation.""" + self._response.close() + + def __next__(self): + while not self._ready_objs: + try: + chunk = next(self._response_itr) + self._process_chunk(chunk) + except StopIteration as e: + if self._level > 0: + raise ValueError("Unfinished stream: %s" % self._obj) + raise e + return self._grab() + + def __iter__(self): + return self diff --git a/google/api_core/rest_streaming_async.py b/google/api_core/rest_streaming_async.py new file mode 100644 index 00000000..370c2b53 --- /dev/null +++ b/google/api_core/rest_streaming_async.py @@ -0,0 +1,89 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Helpers for asynchronous server-side streaming in REST.""" + +from typing import Union + +import proto + +try: + import google.auth.aio.transport +except ImportError as e: # pragma: NO COVER + raise ImportError( + "`google-api-core[async_rest]` is required to use asynchronous rest streaming. " + "Install the `async_rest` extra of `google-api-core` using " + "`pip install google-api-core[async_rest]`." + ) from e + +import google.protobuf.message +from google.api_core._rest_streaming_base import BaseResponseIterator + + +class AsyncResponseIterator(BaseResponseIterator): + """Asynchronous Iterator over REST API responses. + + Args: + response (google.auth.aio.transport.Response): An API response object. + response_message_cls (Union[proto.Message, google.protobuf.message.Message]): A response + class expected to be returned from an API. + + Raises: + ValueError: + - If `response_message_cls` is not a subclass of `proto.Message` or `google.protobuf.message.Message`. + """ + + def __init__( + self, + response: google.auth.aio.transport.Response, + response_message_cls: Union[proto.Message, google.protobuf.message.Message], + ): + self._response = response + self._chunk_size = 1024 + # TODO(https://github.com/googleapis/python-api-core/issues/703): mypy does not recognize the abstract content + # method as an async generator as it looks for the `yield` keyword in the implementation. + # Given that the abstract method is not implemented, mypy fails to recognize it as an async generator. + # mypy warnings are silenced until the linked issue is resolved. + self._response_itr = self._response.content(self._chunk_size).__aiter__() # type: ignore + super(AsyncResponseIterator, self).__init__( + response_message_cls=response_message_cls + ) + + async def __aenter__(self): + return self + + async def cancel(self): + """Cancel existing streaming operation.""" + await self._response.close() + + async def __anext__(self): + while not self._ready_objs: + try: + chunk = await self._response_itr.__anext__() + chunk = chunk.decode("utf-8") + self._process_chunk(chunk) + except StopAsyncIteration as e: + if self._level > 0: + raise ValueError("i Unfinished stream: %s" % self._obj) + raise e + except ValueError as e: + raise e + return self._grab() + + def __aiter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + """Cancel existing async streaming operation.""" + await self._response.close() diff --git a/google/api_core/retry.py b/google/api_core/retry.py deleted file mode 100644 index d39f97c1..00000000 --- a/google/api_core/retry.py +++ /dev/null @@ -1,363 +0,0 @@ -# Copyright 2017 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Helpers for retrying functions with exponential back-off. - -The :class:`Retry` decorator can be used to retry functions that raise -exceptions using exponential backoff. Because a exponential sleep algorithm is -used, the retry is limited by a `deadline`. The deadline is the maxmimum amount -of time a method can block. This is used instead of total number of retries -because it is difficult to ascertain the amount of time a function can block -when using total number of retries and exponential backoff. - -By default, this decorator will retry transient -API errors (see :func:`if_transient_error`). For example: - -.. code-block:: python - - @retry.Retry() - def call_flaky_rpc(): - return client.flaky_rpc() - - # Will retry flaky_rpc() if it raises transient API errors. - result = call_flaky_rpc() - -You can pass a custom predicate to retry on different exceptions, such as -waiting for an eventually consistent item to be available: - -.. code-block:: python - - @retry.Retry(predicate=if_exception_type(exceptions.NotFound)) - def check_if_exists(): - return client.does_thing_exist() - - is_available = check_if_exists() - -Some client library methods apply retry automatically. These methods can accept -a ``retry`` parameter that allows you to configure the behavior: - -.. code-block:: python - - my_retry = retry.Retry(deadline=60) - result = client.some_method(retry=my_retry) - -""" - -from __future__ import unicode_literals - -import datetime -import functools -import logging -import random -import time - -import requests.exceptions - -from google.api_core import datetime_helpers -from google.api_core import exceptions -from google.auth import exceptions as auth_exceptions - -_LOGGER = logging.getLogger(__name__) -_DEFAULT_INITIAL_DELAY = 1.0 # seconds -_DEFAULT_MAXIMUM_DELAY = 60.0 # seconds -_DEFAULT_DELAY_MULTIPLIER = 2.0 -_DEFAULT_DEADLINE = 60.0 * 2.0 # seconds - - -def if_exception_type(*exception_types): - """Creates a predicate to check if the exception is of a given type. - - Args: - exception_types (Sequence[:func:`type`]): The exception types to check - for. - - Returns: - Callable[Exception]: A predicate that returns True if the provided - exception is of the given type(s). - """ - - def if_exception_type_predicate(exception): - """Bound predicate for checking an exception type.""" - return isinstance(exception, exception_types) - - return if_exception_type_predicate - - -# pylint: disable=invalid-name -# Pylint sees this as a constant, but it is also an alias that should be -# considered a function. -if_transient_error = if_exception_type( - exceptions.InternalServerError, - exceptions.TooManyRequests, - exceptions.ServiceUnavailable, - requests.exceptions.ConnectionError, - requests.exceptions.ChunkedEncodingError, - auth_exceptions.TransportError, -) -"""A predicate that checks if an exception is a transient API error. - -The following server errors are considered transient: - -- :class:`google.api_core.exceptions.InternalServerError` - HTTP 500, gRPC - ``INTERNAL(13)`` and its subclasses. -- :class:`google.api_core.exceptions.TooManyRequests` - HTTP 429 -- :class:`google.api_core.exceptions.ServiceUnavailable` - HTTP 503 -- :class:`google.api_core.exceptions.ResourceExhausted` - gRPC - ``RESOURCE_EXHAUSTED(8)`` -""" -# pylint: enable=invalid-name - - -def exponential_sleep_generator(initial, maximum, multiplier=_DEFAULT_DELAY_MULTIPLIER): - """Generates sleep intervals based on the exponential back-off algorithm. - - This implements the `Truncated Exponential Back-off`_ algorithm. - - .. _Truncated Exponential Back-off: - https://cloud.google.com/storage/docs/exponential-backoff - - Args: - initial (float): The minimum amount of time to delay. This must - be greater than 0. - maximum (float): The maximum amount of time to delay. - multiplier (float): The multiplier applied to the delay. - - Yields: - float: successive sleep intervals. - """ - delay = initial - while True: - # Introduce jitter by yielding a delay that is uniformly distributed - # to average out to the delay time. - yield min(random.uniform(0.0, delay * 2.0), maximum) - delay = delay * multiplier - - -def retry_target(target, predicate, sleep_generator, deadline, on_error=None): - """Call a function and retry if it fails. - - This is the lowest-level retry helper. Generally, you'll use the - higher-level retry helper :class:`Retry`. - - Args: - target(Callable): The function to call and retry. This must be a - nullary function - apply arguments with `functools.partial`. - predicate (Callable[Exception]): A callable used to determine if an - exception raised by the target should be considered retryable. - It should return True to retry or False otherwise. - sleep_generator (Iterable[float]): An infinite iterator that determines - how long to sleep between retries. - deadline (float): How long to keep retrying the target. The last sleep - period is shortened as necessary, so that the last retry runs at - ``deadline`` (and not considerably beyond it). - on_error (Callable[Exception]): A function to call while processing a - retryable exception. Any error raised by this function will *not* - be caught. - - Returns: - Any: the return value of the target function. - - Raises: - google.api_core.RetryError: If the deadline is exceeded while retrying. - ValueError: If the sleep generator stops yielding values. - Exception: If the target raises a method that isn't retryable. - """ - if deadline is not None: - deadline_datetime = datetime_helpers.utcnow() + datetime.timedelta( - seconds=deadline - ) - else: - deadline_datetime = None - - last_exc = None - - for sleep in sleep_generator: - try: - return target() - - # pylint: disable=broad-except - # This function explicitly must deal with broad exceptions. - except Exception as exc: - if not predicate(exc): - raise - last_exc = exc - if on_error is not None: - on_error(exc) - - now = datetime_helpers.utcnow() - - if deadline_datetime is not None: - if deadline_datetime <= now: - raise exceptions.RetryError( - "Deadline of {:.1f}s exceeded while calling {}".format( - deadline, target - ), - last_exc, - ) from last_exc - else: - time_to_deadline = (deadline_datetime - now).total_seconds() - sleep = min(time_to_deadline, sleep) - - _LOGGER.debug( - "Retrying due to {}, sleeping {:.1f}s ...".format(last_exc, sleep) - ) - time.sleep(sleep) - - raise ValueError("Sleep generator stopped yielding sleep values.") - - -class Retry(object): - """Exponential retry decorator. - - This class is a decorator used to add exponential back-off retry behavior - to an RPC call. - - Although the default behavior is to retry transient API errors, a - different predicate can be provided to retry other exceptions. - - Args: - predicate (Callable[Exception]): A callable that should return ``True`` - if the given exception is retryable. - initial (float): The minimum amount of time to delay in seconds. This - must be greater than 0. - maximum (float): The maximum amount of time to delay in seconds. - multiplier (float): The multiplier applied to the delay. - deadline (float): How long to keep retrying in seconds. The last sleep - period is shortened as necessary, so that the last retry runs at - ``deadline`` (and not considerably beyond it). - """ - - def __init__( - self, - predicate=if_transient_error, - initial=_DEFAULT_INITIAL_DELAY, - maximum=_DEFAULT_MAXIMUM_DELAY, - multiplier=_DEFAULT_DELAY_MULTIPLIER, - deadline=_DEFAULT_DEADLINE, - on_error=None, - ): - self._predicate = predicate - self._initial = initial - self._multiplier = multiplier - self._maximum = maximum - self._deadline = deadline - self._on_error = on_error - - def __call__(self, func, on_error=None): - """Wrap a callable with retry behavior. - - Args: - func (Callable): The callable to add retry behavior to. - on_error (Callable[Exception]): A function to call while processing - a retryable exception. Any error raised by this function will - *not* be caught. - - Returns: - Callable: A callable that will invoke ``func`` with retry - behavior. - """ - if self._on_error is not None: - on_error = self._on_error - - @functools.wraps(func) - def retry_wrapped_func(*args, **kwargs): - """A wrapper that calls target function with retry.""" - target = functools.partial(func, *args, **kwargs) - sleep_generator = exponential_sleep_generator( - self._initial, self._maximum, multiplier=self._multiplier - ) - return retry_target( - target, - self._predicate, - sleep_generator, - self._deadline, - on_error=on_error, - ) - - return retry_wrapped_func - - @property - def deadline(self): - return self._deadline - - def with_deadline(self, deadline): - """Return a copy of this retry with the given deadline. - - Args: - deadline (float): How long to keep retrying. - - Returns: - Retry: A new retry instance with the given deadline. - """ - return Retry( - predicate=self._predicate, - initial=self._initial, - maximum=self._maximum, - multiplier=self._multiplier, - deadline=deadline, - on_error=self._on_error, - ) - - def with_predicate(self, predicate): - """Return a copy of this retry with the given predicate. - - Args: - predicate (Callable[Exception]): A callable that should return - ``True`` if the given exception is retryable. - - Returns: - Retry: A new retry instance with the given predicate. - """ - return Retry( - predicate=predicate, - initial=self._initial, - maximum=self._maximum, - multiplier=self._multiplier, - deadline=self._deadline, - on_error=self._on_error, - ) - - def with_delay(self, initial=None, maximum=None, multiplier=None): - """Return a copy of this retry with the given delay options. - - Args: - initial (float): The minimum amount of time to delay. This must - be greater than 0. - maximum (float): The maximum amount of time to delay. - multiplier (float): The multiplier applied to the delay. - - Returns: - Retry: A new retry instance with the given predicate. - """ - return Retry( - predicate=self._predicate, - initial=initial if initial is not None else self._initial, - maximum=maximum if maximum is not None else self._maximum, - multiplier=multiplier if multiplier is not None else self._multiplier, - deadline=self._deadline, - on_error=self._on_error, - ) - - def __str__(self): - return ( - "".format( - self._predicate, - self._initial, - self._maximum, - self._multiplier, - self._deadline, - self._on_error, - ) - ) diff --git a/google/api_core/retry/__init__.py b/google/api_core/retry/__init__.py new file mode 100644 index 00000000..1724fdbd --- /dev/null +++ b/google/api_core/retry/__init__.py @@ -0,0 +1,52 @@ +# Copyright 2017 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Retry implementation for Google API client libraries.""" + +from .retry_base import exponential_sleep_generator +from .retry_base import if_exception_type +from .retry_base import if_transient_error +from .retry_base import build_retry_error +from .retry_base import RetryFailureReason +from .retry_unary import Retry +from .retry_unary import retry_target +from .retry_unary_async import AsyncRetry +from .retry_unary_async import retry_target as retry_target_async +from .retry_streaming import StreamingRetry +from .retry_streaming import retry_target_stream +from .retry_streaming_async import AsyncStreamingRetry +from .retry_streaming_async import retry_target_stream as retry_target_stream_async + +# The following imports are for backwards compatibility with https://github.com/googleapis/python-api-core/blob/4d7d2edee2c108d43deb151e6e0fdceb56b73275/google/api_core/retry.py +# +# TODO: Revert these imports on the next major version release (https://github.com/googleapis/python-api-core/issues/576) +from google.api_core import datetime_helpers # noqa: F401 +from google.api_core import exceptions # noqa: F401 +from google.auth import exceptions as auth_exceptions # noqa: F401 + +__all__ = ( + "exponential_sleep_generator", + "if_exception_type", + "if_transient_error", + "build_retry_error", + "RetryFailureReason", + "Retry", + "AsyncRetry", + "StreamingRetry", + "AsyncStreamingRetry", + "retry_target", + "retry_target_async", + "retry_target_stream", + "retry_target_stream_async", +) diff --git a/google/api_core/retry/retry_base.py b/google/api_core/retry/retry_base.py new file mode 100644 index 00000000..263b4ccf --- /dev/null +++ b/google/api_core/retry/retry_base.py @@ -0,0 +1,370 @@ +# Copyright 2017 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Shared classes and functions for retrying requests. + +:class:`_BaseRetry` is the base class for :class:`Retry`, +:class:`AsyncRetry`, :class:`StreamingRetry`, and :class:`AsyncStreamingRetry`. +""" + +from __future__ import annotations + +import logging +import random +import time + +from enum import Enum +from typing import Any, Callable, Optional, Iterator, TYPE_CHECKING + +import requests.exceptions + +from google.api_core import exceptions +from google.auth import exceptions as auth_exceptions + +if TYPE_CHECKING: + import sys + + if sys.version_info >= (3, 11): + from typing import Self + else: + from typing_extensions import Self + +_DEFAULT_INITIAL_DELAY = 1.0 # seconds +_DEFAULT_MAXIMUM_DELAY = 60.0 # seconds +_DEFAULT_DELAY_MULTIPLIER = 2.0 +_DEFAULT_DEADLINE = 60.0 * 2.0 # seconds + +_LOGGER = logging.getLogger("google.api_core.retry") + + +def if_exception_type( + *exception_types: type[Exception], +) -> Callable[[Exception], bool]: + """Creates a predicate to check if the exception is of a given type. + + Args: + exception_types (Sequence[:func:`type`]): The exception types to check + for. + + Returns: + Callable[Exception]: A predicate that returns True if the provided + exception is of the given type(s). + """ + + def if_exception_type_predicate(exception: Exception) -> bool: + """Bound predicate for checking an exception type.""" + return isinstance(exception, exception_types) + + return if_exception_type_predicate + + +# pylint: disable=invalid-name +# Pylint sees this as a constant, but it is also an alias that should be +# considered a function. +if_transient_error = if_exception_type( + exceptions.InternalServerError, + exceptions.TooManyRequests, + exceptions.ServiceUnavailable, + requests.exceptions.ConnectionError, + requests.exceptions.ChunkedEncodingError, + auth_exceptions.TransportError, +) +"""A predicate that checks if an exception is a transient API error. + +The following server errors are considered transient: + +- :class:`google.api_core.exceptions.InternalServerError` - HTTP 500, gRPC + ``INTERNAL(13)`` and its subclasses. +- :class:`google.api_core.exceptions.TooManyRequests` - HTTP 429 +- :class:`google.api_core.exceptions.ServiceUnavailable` - HTTP 503 +- :class:`requests.exceptions.ConnectionError` +- :class:`requests.exceptions.ChunkedEncodingError` - The server declared + chunked encoding but sent an invalid chunk. +- :class:`google.auth.exceptions.TransportError` - Used to indicate an + error occurred during an HTTP request. +""" +# pylint: enable=invalid-name + + +def exponential_sleep_generator( + initial: float, maximum: float, multiplier: float = _DEFAULT_DELAY_MULTIPLIER +): + """Generates sleep intervals based on the exponential back-off algorithm. + + This implements the `Truncated Exponential Back-off`_ algorithm. + + .. _Truncated Exponential Back-off: + https://cloud.google.com/storage/docs/exponential-backoff + + Args: + initial (float): The minimum amount of time to delay. This must + be greater than 0. + maximum (float): The maximum amount of time to delay. + multiplier (float): The multiplier applied to the delay. + + Yields: + float: successive sleep intervals. + """ + max_delay = min(initial, maximum) + while True: + yield random.uniform(0.0, max_delay) + max_delay = min(max_delay * multiplier, maximum) + + +class RetryFailureReason(Enum): + """ + The cause of a failed retry, used when building exceptions + """ + + TIMEOUT = 0 + NON_RETRYABLE_ERROR = 1 + + +def build_retry_error( + exc_list: list[Exception], + reason: RetryFailureReason, + timeout_val: float | None, + **kwargs: Any, +) -> tuple[Exception, Exception | None]: + """ + Default exception_factory implementation. + + Returns a RetryError if the failure is due to a timeout, otherwise + returns the last exception encountered. + + Args: + - exc_list: list of exceptions that occurred during the retry + - reason: reason for the retry failure. + Can be TIMEOUT or NON_RETRYABLE_ERROR + - timeout_val: the original timeout value for the retry (in seconds), for use in the exception message + + Returns: + - tuple: a tuple of the exception to be raised, and the cause exception if any + """ + if reason == RetryFailureReason.TIMEOUT: + # return RetryError with the most recent exception as the cause + src_exc = exc_list[-1] if exc_list else None + timeout_val_str = f"of {timeout_val:0.1f}s " if timeout_val is not None else "" + return ( + exceptions.RetryError( + f"Timeout {timeout_val_str}exceeded", + src_exc, + ), + src_exc, + ) + elif exc_list: + # return most recent exception encountered + return exc_list[-1], None + else: + # no exceptions were given in exc_list. Raise generic RetryError + return exceptions.RetryError("Unknown error", None), None + + +def _retry_error_helper( + exc: Exception, + deadline: float | None, + sleep_iterator: Iterator[float], + error_list: list[Exception], + predicate_fn: Callable[[Exception], bool], + on_error_fn: Callable[[Exception], None] | None, + exc_factory_fn: Callable[ + [list[Exception], RetryFailureReason, float | None], + tuple[Exception, Exception | None], + ], + original_timeout: float | None, +) -> float: + """ + Shared logic for handling an error for all retry implementations + + - Raises an error on timeout or non-retryable error + - Calls on_error_fn if provided + - Logs the error + + Args: + - exc: the exception that was raised + - deadline: the deadline for the retry, calculated as a diff from time.monotonic() + - sleep_iterator: iterator to draw the next backoff value from + - error_list: the list of exceptions that have been raised so far + - predicate_fn: takes `exc` and returns true if the operation should be retried + - on_error_fn: callback to execute when a retryable error occurs + - exc_factory_fn: callback used to build the exception to be raised on terminal failure + - original_timeout_val: the original timeout value for the retry (in seconds), + to be passed to the exception factory for building an error message + Returns: + - the sleep value chosen before the next attempt + """ + error_list.append(exc) + if not predicate_fn(exc): + final_exc, source_exc = exc_factory_fn( + error_list, + RetryFailureReason.NON_RETRYABLE_ERROR, + original_timeout, + ) + raise final_exc from source_exc + if on_error_fn is not None: + on_error_fn(exc) + # next_sleep is fetched after the on_error callback, to allow clients + # to update sleep_iterator values dynamically in response to errors + try: + next_sleep = next(sleep_iterator) + except StopIteration: + raise ValueError("Sleep generator stopped yielding sleep values.") from exc + if deadline is not None and time.monotonic() + next_sleep > deadline: + final_exc, source_exc = exc_factory_fn( + error_list, + RetryFailureReason.TIMEOUT, + original_timeout, + ) + raise final_exc from source_exc + _LOGGER.debug( + "Retrying due to {}, sleeping {:.1f}s ...".format(error_list[-1], next_sleep) + ) + return next_sleep + + +class _BaseRetry(object): + """ + Base class for retry configuration objects. This class is intended to capture retry + and backoff configuration that is common to both synchronous and asynchronous retries, + for both unary and streaming RPCs. It is not intended to be instantiated directly, + but rather to be subclassed by the various retry configuration classes. + """ + + def __init__( + self, + predicate: Callable[[Exception], bool] = if_transient_error, + initial: float = _DEFAULT_INITIAL_DELAY, + maximum: float = _DEFAULT_MAXIMUM_DELAY, + multiplier: float = _DEFAULT_DELAY_MULTIPLIER, + timeout: Optional[float] = _DEFAULT_DEADLINE, + on_error: Optional[Callable[[Exception], Any]] = None, + **kwargs: Any, + ) -> None: + self._predicate = predicate + self._initial = initial + self._multiplier = multiplier + self._maximum = maximum + self._timeout = kwargs.get("deadline", timeout) + self._deadline = self._timeout + self._on_error = on_error + + def __call__(self, *args, **kwargs) -> Any: + raise NotImplementedError("Not implemented in base class") + + @property + def deadline(self) -> float | None: + """ + DEPRECATED: use ``timeout`` instead. Refer to the ``Retry`` class + documentation for details. + """ + return self._timeout + + @property + def timeout(self) -> float | None: + return self._timeout + + def with_deadline(self, deadline: float | None) -> Self: + """Return a copy of this retry with the given timeout. + + DEPRECATED: use :meth:`with_timeout` instead. Refer to the ``Retry`` class + documentation for details. + + Args: + deadline (float|None): How long to keep retrying, in seconds. If None, + no timeout is enforced. + + Returns: + Retry: A new retry instance with the given timeout. + """ + return self.with_timeout(deadline) + + def with_timeout(self, timeout: float | None) -> Self: + """Return a copy of this retry with the given timeout. + + Args: + timeout (float): How long to keep retrying, in seconds. If None, + no timeout will be enforced. + + Returns: + Retry: A new retry instance with the given timeout. + """ + return type(self)( + predicate=self._predicate, + initial=self._initial, + maximum=self._maximum, + multiplier=self._multiplier, + timeout=timeout, + on_error=self._on_error, + ) + + def with_predicate(self, predicate: Callable[[Exception], bool]) -> Self: + """Return a copy of this retry with the given predicate. + + Args: + predicate (Callable[Exception]): A callable that should return + ``True`` if the given exception is retryable. + + Returns: + Retry: A new retry instance with the given predicate. + """ + return type(self)( + predicate=predicate, + initial=self._initial, + maximum=self._maximum, + multiplier=self._multiplier, + timeout=self._timeout, + on_error=self._on_error, + ) + + def with_delay( + self, + initial: Optional[float] = None, + maximum: Optional[float] = None, + multiplier: Optional[float] = None, + ) -> Self: + """Return a copy of this retry with the given delay options. + + Args: + initial (float): The minimum amount of time to delay (in seconds). This must + be greater than 0. If None, the current value is used. + maximum (float): The maximum amount of time to delay (in seconds). If None, the + current value is used. + multiplier (float): The multiplier applied to the delay. If None, the current + value is used. + + Returns: + Retry: A new retry instance with the given delay options. + """ + return type(self)( + predicate=self._predicate, + initial=initial if initial is not None else self._initial, + maximum=maximum if maximum is not None else self._maximum, + multiplier=multiplier if multiplier is not None else self._multiplier, + timeout=self._timeout, + on_error=self._on_error, + ) + + def __str__(self) -> str: + return ( + "<{} predicate={}, initial={:.1f}, maximum={:.1f}, " + "multiplier={:.1f}, timeout={}, on_error={}>".format( + type(self).__name__, + self._predicate, + self._initial, + self._maximum, + self._multiplier, + self._timeout, # timeout can be None, thus no {:.1f} + self._on_error, + ) + ) diff --git a/google/api_core/retry/retry_streaming.py b/google/api_core/retry/retry_streaming.py new file mode 100644 index 00000000..e4474c8a --- /dev/null +++ b/google/api_core/retry/retry_streaming.py @@ -0,0 +1,264 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Generator wrapper for retryable streaming RPCs. +""" +from __future__ import annotations + +from typing import ( + Callable, + Optional, + List, + Tuple, + Iterable, + Generator, + TypeVar, + Any, + TYPE_CHECKING, +) + +import sys +import time +import functools + +from google.api_core.retry.retry_base import _BaseRetry +from google.api_core.retry.retry_base import _retry_error_helper +from google.api_core.retry import exponential_sleep_generator +from google.api_core.retry import build_retry_error +from google.api_core.retry import RetryFailureReason + +if TYPE_CHECKING: + if sys.version_info >= (3, 10): + from typing import ParamSpec + else: + from typing_extensions import ParamSpec + + _P = ParamSpec("_P") # target function call parameters + _Y = TypeVar("_Y") # yielded values + + +def retry_target_stream( + target: Callable[_P, Iterable[_Y]], + predicate: Callable[[Exception], bool], + sleep_generator: Iterable[float], + timeout: Optional[float] = None, + on_error: Optional[Callable[[Exception], None]] = None, + exception_factory: Callable[ + [List[Exception], RetryFailureReason, Optional[float]], + Tuple[Exception, Optional[Exception]], + ] = build_retry_error, + init_args: tuple = (), + init_kwargs: dict = {}, + **kwargs, +) -> Generator[_Y, Any, None]: + """Create a generator wrapper that retries the wrapped stream if it fails. + + This is the lowest-level retry helper. Generally, you'll use the + higher-level retry helper :class:`Retry`. + + Args: + target: The generator function to call and retry. + predicate: A callable used to determine if an + exception raised by the target should be considered retryable. + It should return True to retry or False otherwise. + sleep_generator: An infinite iterator that determines + how long to sleep between retries. + timeout: How long to keep retrying the target. + Note: timeout is only checked before initiating a retry, so the target may + run past the timeout value as long as it is healthy. + on_error: If given, the on_error callback will be called with each + retryable exception raised by the target. Any error raised by this + function will *not* be caught. + exception_factory: A function that is called when the retryable reaches + a terminal failure state, used to construct an exception to be raised. + It takes a list of all exceptions encountered, a retry.RetryFailureReason + enum indicating the failure cause, and the original timeout value + as arguments. It should return a tuple of the exception to be raised, + along with the cause exception if any. The default implementation will raise + a RetryError on timeout, or the last exception encountered otherwise. + init_args: Positional arguments to pass to the target function. + init_kwargs: Keyword arguments to pass to the target function. + + Returns: + Generator: A retryable generator that wraps the target generator function. + + Raises: + ValueError: If the sleep generator stops yielding values. + Exception: a custom exception specified by the exception_factory if provided. + If no exception_factory is provided: + google.api_core.RetryError: If the timeout is exceeded while retrying. + Exception: If the target raises an error that isn't retryable. + """ + + timeout = kwargs.get("deadline", timeout) + deadline: Optional[float] = ( + time.monotonic() + timeout if timeout is not None else None + ) + error_list: list[Exception] = [] + sleep_iter = iter(sleep_generator) + + # continue trying until an attempt completes, or a terminal exception is raised in _retry_error_helper + # TODO: support max_attempts argument: https://github.com/googleapis/python-api-core/issues/535 + while True: + # Start a new retry loop + try: + # Note: in the future, we can add a ResumptionStrategy object + # to generate new args between calls. For now, use the same args + # for each attempt. + subgenerator = target(*init_args, **init_kwargs) + return (yield from subgenerator) + # handle exceptions raised by the subgenerator + # pylint: disable=broad-except + # This function explicitly must deal with broad exceptions. + except Exception as exc: + # defer to shared logic for handling errors + next_sleep = _retry_error_helper( + exc, + deadline, + sleep_iter, + error_list, + predicate, + on_error, + exception_factory, + timeout, + ) + # if exception not raised, sleep before next attempt + time.sleep(next_sleep) + + +class StreamingRetry(_BaseRetry): + """Exponential retry decorator for streaming synchronous RPCs. + + This class returns a Generator when called, which wraps the target + stream in retry logic. If any exception is raised by the target, the + entire stream will be retried within the wrapper. + + Although the default behavior is to retry transient API errors, a + different predicate can be provided to retry other exceptions. + + Important Note: when a stream encounters a retryable error, it will + silently construct a fresh iterator instance in the background + and continue yielding (likely duplicate) values as if no error occurred. + This is the most general way to retry a stream, but it often is not the + desired behavior. Example: iter([1, 2, 1/0]) -> [1, 2, 1, 2, ...] + + There are two ways to build more advanced retry logic for streams: + + 1. Wrap the target + Use a ``target`` that maintains state between retries, and creates a + different generator on each retry call. For example, you can wrap a + network call in a function that modifies the request based on what has + already been returned: + + .. code-block:: python + + def attempt_with_modified_request(target, request, seen_items=[]): + # remove seen items from request on each attempt + new_request = modify_request(request, seen_items) + new_generator = target(new_request) + for item in new_generator: + yield item + seen_items.append(item) + + retry_wrapped_fn = StreamingRetry()(attempt_with_modified_request) + retryable_generator = retry_wrapped_fn(target, request) + + 2. Wrap the retry generator + Alternatively, you can wrap the retryable generator itself before + passing it to the end-user to add a filter on the stream. For + example, you can keep track of the items that were successfully yielded + in previous retry attempts, and only yield new items when the + new attempt surpasses the previous ones: + + .. code-block:: python + + def retryable_with_filter(target): + stream_idx = 0 + # reset stream_idx when the stream is retried + def on_error(e): + nonlocal stream_idx + stream_idx = 0 + # build retryable + retryable_gen = StreamingRetry(...)(target) + # keep track of what has been yielded out of filter + seen_items = [] + for item in retryable_gen(): + if stream_idx >= len(seen_items): + seen_items.append(item) + yield item + elif item != seen_items[stream_idx]: + raise ValueError("Stream differs from last attempt") + stream_idx += 1 + + filter_retry_wrapped = retryable_with_filter(target) + + Args: + predicate (Callable[Exception]): A callable that should return ``True`` + if the given exception is retryable. + initial (float): The minimum amount of time to delay in seconds. This + must be greater than 0. + maximum (float): The maximum amount of time to delay in seconds. + multiplier (float): The multiplier applied to the delay. + timeout (float): How long to keep retrying, in seconds. + Note: timeout is only checked before initiating a retry, so the target may + run past the timeout value as long as it is healthy. + on_error (Callable[Exception]): A function to call while processing + a retryable exception. Any error raised by this function will + *not* be caught. + deadline (float): DEPRECATED: use `timeout` instead. For backward + compatibility, if specified it will override the ``timeout`` parameter. + """ + + def __call__( + self, + func: Callable[_P, Iterable[_Y]], + on_error: Callable[[Exception], Any] | None = None, + ) -> Callable[_P, Generator[_Y, Any, None]]: + """Wrap a callable with retry behavior. + + Args: + func (Callable): The callable to add retry behavior to. + on_error (Optional[Callable[Exception]]): If given, the + on_error callback will be called with each retryable exception + raised by the wrapped function. Any error raised by this + function will *not* be caught. If on_error was specified in the + constructor, this value will be ignored. + + Returns: + Callable: A callable that will invoke ``func`` with retry + behavior. + """ + if self._on_error is not None: + on_error = self._on_error + + @functools.wraps(func) + def retry_wrapped_func( + *args: _P.args, **kwargs: _P.kwargs + ) -> Generator[_Y, Any, None]: + """A wrapper that calls target function with retry.""" + sleep_generator = exponential_sleep_generator( + self._initial, self._maximum, multiplier=self._multiplier + ) + return retry_target_stream( + func, + predicate=self._predicate, + sleep_generator=sleep_generator, + timeout=self._timeout, + on_error=on_error, + init_args=args, + init_kwargs=kwargs, + ) + + return retry_wrapped_func diff --git a/google/api_core/retry/retry_streaming_async.py b/google/api_core/retry/retry_streaming_async.py new file mode 100644 index 00000000..5e5fa240 --- /dev/null +++ b/google/api_core/retry/retry_streaming_async.py @@ -0,0 +1,328 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Generator wrapper for retryable async streaming RPCs. +""" +from __future__ import annotations + +from typing import ( + cast, + Any, + Callable, + Iterable, + AsyncIterator, + AsyncIterable, + Awaitable, + TypeVar, + AsyncGenerator, + TYPE_CHECKING, +) + +import asyncio +import time +import sys +import functools + +from google.api_core.retry.retry_base import _BaseRetry +from google.api_core.retry.retry_base import _retry_error_helper +from google.api_core.retry import exponential_sleep_generator +from google.api_core.retry import build_retry_error +from google.api_core.retry import RetryFailureReason + + +if TYPE_CHECKING: + if sys.version_info >= (3, 10): + from typing import ParamSpec + else: + from typing_extensions import ParamSpec + + _P = ParamSpec("_P") # target function call parameters + _Y = TypeVar("_Y") # yielded values + + +async def retry_target_stream( + target: Callable[_P, AsyncIterable[_Y] | Awaitable[AsyncIterable[_Y]]], + predicate: Callable[[Exception], bool], + sleep_generator: Iterable[float], + timeout: float | None = None, + on_error: Callable[[Exception], None] | None = None, + exception_factory: Callable[ + [list[Exception], RetryFailureReason, float | None], + tuple[Exception, Exception | None], + ] = build_retry_error, + init_args: tuple = (), + init_kwargs: dict = {}, + **kwargs, +) -> AsyncGenerator[_Y, None]: + """Create a generator wrapper that retries the wrapped stream if it fails. + + This is the lowest-level retry helper. Generally, you'll use the + higher-level retry helper :class:`AsyncRetry`. + + Args: + target: The generator function to call and retry. + predicate: A callable used to determine if an + exception raised by the target should be considered retryable. + It should return True to retry or False otherwise. + sleep_generator: An infinite iterator that determines + how long to sleep between retries. + timeout: How long to keep retrying the target. + Note: timeout is only checked before initiating a retry, so the target may + run past the timeout value as long as it is healthy. + on_error: If given, the on_error callback will be called with each + retryable exception raised by the target. Any error raised by this + function will *not* be caught. + exception_factory: A function that is called when the retryable reaches + a terminal failure state, used to construct an exception to be raised. + It takes a list of all exceptions encountered, a retry.RetryFailureReason + enum indicating the failure cause, and the original timeout value + as arguments. It should return a tuple of the exception to be raised, + along with the cause exception if any. The default implementation will raise + a RetryError on timeout, or the last exception encountered otherwise. + init_args: Positional arguments to pass to the target function. + init_kwargs: Keyword arguments to pass to the target function. + + Returns: + AsyncGenerator: A retryable generator that wraps the target generator function. + + Raises: + ValueError: If the sleep generator stops yielding values. + Exception: a custom exception specified by the exception_factory if provided. + If no exception_factory is provided: + google.api_core.RetryError: If the timeout is exceeded while retrying. + Exception: If the target raises an error that isn't retryable. + """ + target_iterator: AsyncIterator[_Y] | None = None + timeout = kwargs.get("deadline", timeout) + deadline = time.monotonic() + timeout if timeout else None + # keep track of retryable exceptions we encounter to pass in to exception_factory + error_list: list[Exception] = [] + sleep_iter = iter(sleep_generator) + target_is_generator: bool | None = None + + # continue trying until an attempt completes, or a terminal exception is raised in _retry_error_helper + # TODO: support max_attempts argument: https://github.com/googleapis/python-api-core/issues/535 + while True: + # Start a new retry loop + try: + # Note: in the future, we can add a ResumptionStrategy object + # to generate new args between calls. For now, use the same args + # for each attempt. + target_output: AsyncIterable[_Y] | Awaitable[AsyncIterable[_Y]] = target( + *init_args, **init_kwargs + ) + try: + # gapic functions return the generator behind an awaitable + # unwrap the awaitable so we can work with the generator directly + target_output = await target_output # type: ignore + except TypeError: + # was not awaitable, continue + pass + target_iterator = cast(AsyncIterable["_Y"], target_output).__aiter__() + + if target_is_generator is None: + # Check if target supports generator features (asend, athrow, aclose) + target_is_generator = bool(getattr(target_iterator, "asend", None)) + + sent_in = None + while True: + ## Read from target_iterator + # If the target is a generator, we will advance it with `asend` + # otherwise, we will use `anext` + if target_is_generator: + next_value = await target_iterator.asend(sent_in) # type: ignore + else: + next_value = await target_iterator.__anext__() + ## Yield from Wrapper to caller + try: + # yield latest value from target + # exceptions from `athrow` and `aclose` are injected here + sent_in = yield next_value + except GeneratorExit: + # if wrapper received `aclose` while waiting on yield, + # it will raise GeneratorExit here + if target_is_generator: + # pass to inner target_iterator for handling + await cast(AsyncGenerator["_Y", None], target_iterator).aclose() + else: + raise + return + except: # noqa: E722 + # bare except catches any exception passed to `athrow` + if target_is_generator: + # delegate error handling to target_iterator + await cast(AsyncGenerator["_Y", None], target_iterator).athrow( + cast(BaseException, sys.exc_info()[1]) + ) + else: + raise + return + except StopAsyncIteration: + # if iterator exhausted, return + return + # handle exceptions raised by the target_iterator + # pylint: disable=broad-except + # This function explicitly must deal with broad exceptions. + except Exception as exc: + # defer to shared logic for handling errors + next_sleep = _retry_error_helper( + exc, + deadline, + sleep_iter, + error_list, + predicate, + on_error, + exception_factory, + timeout, + ) + # if exception not raised, sleep before next attempt + await asyncio.sleep(next_sleep) + + finally: + if target_is_generator and target_iterator is not None: + await cast(AsyncGenerator["_Y", None], target_iterator).aclose() + + +class AsyncStreamingRetry(_BaseRetry): + """Exponential retry decorator for async streaming rpcs. + + This class returns an AsyncGenerator when called, which wraps the target + stream in retry logic. If any exception is raised by the target, the + entire stream will be retried within the wrapper. + + Although the default behavior is to retry transient API errors, a + different predicate can be provided to retry other exceptions. + + Important Note: when a stream is encounters a retryable error, it will + silently construct a fresh iterator instance in the background + and continue yielding (likely duplicate) values as if no error occurred. + This is the most general way to retry a stream, but it often is not the + desired behavior. Example: iter([1, 2, 1/0]) -> [1, 2, 1, 2, ...] + + There are two ways to build more advanced retry logic for streams: + + 1. Wrap the target + Use a ``target`` that maintains state between retries, and creates a + different generator on each retry call. For example, you can wrap a + grpc call in a function that modifies the request based on what has + already been returned: + + .. code-block:: python + + async def attempt_with_modified_request(target, request, seen_items=[]): + # remove seen items from request on each attempt + new_request = modify_request(request, seen_items) + new_generator = await target(new_request) + async for item in new_generator: + yield item + seen_items.append(item) + + retry_wrapped = AsyncRetry(is_stream=True,...)(attempt_with_modified_request, target, request, []) + + 2. Wrap the retry generator + Alternatively, you can wrap the retryable generator itself before + passing it to the end-user to add a filter on the stream. For + example, you can keep track of the items that were successfully yielded + in previous retry attempts, and only yield new items when the + new attempt surpasses the previous ones: + + .. code-block:: python + + async def retryable_with_filter(target): + stream_idx = 0 + # reset stream_idx when the stream is retried + def on_error(e): + nonlocal stream_idx + stream_idx = 0 + # build retryable + retryable_gen = AsyncRetry(is_stream=True, ...)(target) + # keep track of what has been yielded out of filter + seen_items = [] + async for item in retryable_gen: + if stream_idx >= len(seen_items): + yield item + seen_items.append(item) + elif item != previous_stream[stream_idx]: + raise ValueError("Stream differs from last attempt")" + stream_idx += 1 + + filter_retry_wrapped = retryable_with_filter(target) + + Args: + predicate (Callable[Exception]): A callable that should return ``True`` + if the given exception is retryable. + initial (float): The minimum amount of time to delay in seconds. This + must be greater than 0. + maximum (float): The maximum amount of time to delay in seconds. + multiplier (float): The multiplier applied to the delay. + timeout (Optional[float]): How long to keep retrying in seconds. + Note: timeout is only checked before initiating a retry, so the target may + run past the timeout value as long as it is healthy. + on_error (Optional[Callable[Exception]]): A function to call while processing + a retryable exception. Any error raised by this function will + *not* be caught. + is_stream (bool): Indicates whether the input function + should be treated as a stream function (i.e. an AsyncGenerator, + or function or coroutine that returns an AsyncIterable). + If True, the iterable will be wrapped with retry logic, and any + failed outputs will restart the stream. If False, only the input + function call itself will be retried. Defaults to False. + To avoid duplicate values, retryable streams should typically be + wrapped in additional filter logic before use. + deadline (float): DEPRECATED use ``timeout`` instead. If set it will + override ``timeout`` parameter. + """ + + def __call__( + self, + func: Callable[..., AsyncIterable[_Y] | Awaitable[AsyncIterable[_Y]]], + on_error: Callable[[Exception], Any] | None = None, + ) -> Callable[_P, Awaitable[AsyncGenerator[_Y, None]]]: + """Wrap a callable with retry behavior. + + Args: + func (Callable): The callable or stream to add retry behavior to. + on_error (Optional[Callable[Exception]]): If given, the + on_error callback will be called with each retryable exception + raised by the wrapped function. Any error raised by this + function will *not* be caught. If on_error was specified in the + constructor, this value will be ignored. + + Returns: + Callable: A callable that will invoke ``func`` with retry + behavior. + """ + if self._on_error is not None: + on_error = self._on_error + + @functools.wraps(func) + async def retry_wrapped_func( + *args: _P.args, **kwargs: _P.kwargs + ) -> AsyncGenerator[_Y, None]: + """A wrapper that calls target function with retry.""" + sleep_generator = exponential_sleep_generator( + self._initial, self._maximum, multiplier=self._multiplier + ) + return retry_target_stream( + func, + self._predicate, + sleep_generator, + self._timeout, + on_error, + init_args=args, + init_kwargs=kwargs, + ) + + return retry_wrapped_func diff --git a/google/api_core/retry/retry_unary.py b/google/api_core/retry/retry_unary.py new file mode 100644 index 00000000..6d36bc7d --- /dev/null +++ b/google/api_core/retry/retry_unary.py @@ -0,0 +1,302 @@ +# Copyright 2017 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Helpers for retrying functions with exponential back-off. + +The :class:`Retry` decorator can be used to retry functions that raise +exceptions using exponential backoff. Because a exponential sleep algorithm is +used, the retry is limited by a `timeout`. The timeout determines the window +in which retries will be attempted. This is used instead of total number of retries +because it is difficult to ascertain the amount of time a function can block +when using total number of retries and exponential backoff. + +By default, this decorator will retry transient +API errors (see :func:`if_transient_error`). For example: + +.. code-block:: python + + @retry.Retry() + def call_flaky_rpc(): + return client.flaky_rpc() + + # Will retry flaky_rpc() if it raises transient API errors. + result = call_flaky_rpc() + +You can pass a custom predicate to retry on different exceptions, such as +waiting for an eventually consistent item to be available: + +.. code-block:: python + + @retry.Retry(predicate=if_exception_type(exceptions.NotFound)) + def check_if_exists(): + return client.does_thing_exist() + + is_available = check_if_exists() + +Some client library methods apply retry automatically. These methods can accept +a ``retry`` parameter that allows you to configure the behavior: + +.. code-block:: python + + my_retry = retry.Retry(timeout=60) + result = client.some_method(retry=my_retry) + +""" + +from __future__ import annotations + +import functools +import sys +import time +import inspect +import warnings +from typing import Any, Callable, Iterable, TypeVar, TYPE_CHECKING + +from google.api_core.retry.retry_base import _BaseRetry +from google.api_core.retry.retry_base import _retry_error_helper +from google.api_core.retry.retry_base import exponential_sleep_generator +from google.api_core.retry.retry_base import build_retry_error +from google.api_core.retry.retry_base import RetryFailureReason + + +if TYPE_CHECKING: + if sys.version_info >= (3, 10): + from typing import ParamSpec + else: + from typing_extensions import ParamSpec + + _P = ParamSpec("_P") # target function call parameters + _R = TypeVar("_R") # target function returned value + +_ASYNC_RETRY_WARNING = "Using the synchronous google.api_core.retry.Retry with asynchronous calls may lead to unexpected results. Please use google.api_core.retry_async.AsyncRetry instead." + + +def retry_target( + target: Callable[[], _R], + predicate: Callable[[Exception], bool], + sleep_generator: Iterable[float], + timeout: float | None = None, + on_error: Callable[[Exception], None] | None = None, + exception_factory: Callable[ + [list[Exception], RetryFailureReason, float | None], + tuple[Exception, Exception | None], + ] = build_retry_error, + **kwargs, +): + """Call a function and retry if it fails. + + This is the lowest-level retry helper. Generally, you'll use the + higher-level retry helper :class:`Retry`. + + Args: + target(Callable): The function to call and retry. This must be a + nullary function - apply arguments with `functools.partial`. + predicate (Callable[Exception]): A callable used to determine if an + exception raised by the target should be considered retryable. + It should return True to retry or False otherwise. + sleep_generator (Iterable[float]): An infinite iterator that determines + how long to sleep between retries. + timeout (Optional[float]): How long to keep retrying the target. + Note: timeout is only checked before initiating a retry, so the target may + run past the timeout value as long as it is healthy. + on_error (Optional[Callable[Exception]]): If given, the on_error + callback will be called with each retryable exception raised by the + target. Any error raised by this function will *not* be caught. + exception_factory: A function that is called when the retryable reaches + a terminal failure state, used to construct an exception to be raised. + It takes a list of all exceptions encountered, a retry.RetryFailureReason + enum indicating the failure cause, and the original timeout value + as arguments. It should return a tuple of the exception to be raised, + along with the cause exception if any. The default implementation will raise + a RetryError on timeout, or the last exception encountered otherwise. + deadline (float): DEPRECATED: use ``timeout`` instead. For backward + compatibility, if specified it will override ``timeout`` parameter. + + Returns: + Any: the return value of the target function. + + Raises: + ValueError: If the sleep generator stops yielding values. + Exception: a custom exception specified by the exception_factory if provided. + If no exception_factory is provided: + google.api_core.RetryError: If the timeout is exceeded while retrying. + Exception: If the target raises an error that isn't retryable. + """ + + timeout = kwargs.get("deadline", timeout) + + deadline = time.monotonic() + timeout if timeout is not None else None + error_list: list[Exception] = [] + sleep_iter = iter(sleep_generator) + + # continue trying until an attempt completes, or a terminal exception is raised in _retry_error_helper + # TODO: support max_attempts argument: https://github.com/googleapis/python-api-core/issues/535 + while True: + try: + result = target() + if inspect.isawaitable(result): + warnings.warn(_ASYNC_RETRY_WARNING) + return result + + # pylint: disable=broad-except + # This function explicitly must deal with broad exceptions. + except Exception as exc: + # defer to shared logic for handling errors + next_sleep = _retry_error_helper( + exc, + deadline, + sleep_iter, + error_list, + predicate, + on_error, + exception_factory, + timeout, + ) + # if exception not raised, sleep before next attempt + time.sleep(next_sleep) + + +class Retry(_BaseRetry): + """Exponential retry decorator for unary synchronous RPCs. + + This class is a decorator used to add retry or polling behavior to an RPC + call. + + Although the default behavior is to retry transient API errors, a + different predicate can be provided to retry other exceptions. + + There are two important concepts that retry/polling behavior may operate on, + Deadline and Timeout, which need to be properly defined for the correct + usage of this class and the rest of the library. + + Deadline: a fixed point in time by which a certain operation must + terminate. For example, if a certain operation has a deadline + "2022-10-18T23:30:52.123Z" it must terminate (successfully or with an + error) by that time, regardless of when it was started or whether it + was started at all. + + Timeout: the maximum duration of time after which a certain operation + must terminate (successfully or with an error). The countdown begins right + after an operation was started. For example, if an operation was started at + 09:24:00 with timeout of 75 seconds, it must terminate no later than + 09:25:15. + + Unfortunately, in the past this class (and the api-core library as a whole) has not + been properly distinguishing the concepts of "timeout" and "deadline", and the + ``deadline`` parameter has meant ``timeout``. That is why + ``deadline`` has been deprecated and ``timeout`` should be used instead. If the + ``deadline`` parameter is set, it will override the ``timeout`` parameter. + In other words, ``retry.deadline`` should be treated as just a deprecated alias for + ``retry.timeout``. + + Said another way, it is safe to assume that this class and the rest of this + library operate in terms of timeouts (not deadlines) unless explicitly + noted the usage of deadline semantics. + + It is also important to + understand the three most common applications of the Timeout concept in the + context of this library. + + Usually the generic Timeout term may stand for one of the following actual + timeouts: RPC Timeout, Retry Timeout, or Polling Timeout. + + RPC Timeout: a value supplied by the client to the server so + that the server side knows the maximum amount of time it is expected to + spend handling that specific RPC. For example, in the case of gRPC transport, + RPC Timeout is represented by setting "grpc-timeout" header in the HTTP2 + request. The `timeout` property of this class normally never represents the + RPC Timeout as it is handled separately by the ``google.api_core.timeout`` + module of this library. + + Retry Timeout: this is the most common meaning of the ``timeout`` property + of this class, and defines how long a certain RPC may be retried in case + the server returns an error. + + Polling Timeout: defines how long the + client side is allowed to call the polling RPC repeatedly to check a status of a + long-running operation. Each polling RPC is + expected to succeed (its errors are supposed to be handled by the retry + logic). The decision as to whether a new polling attempt needs to be made is based + not on the RPC status code but on the status of the returned + status of an operation. In other words: we will poll a long-running operation until + the operation is done or the polling timeout expires. Each poll will inform us of + the status of the operation. The poll consists of an RPC to the server that may + itself be retried as per the poll-specific retry settings in case of errors. The + operation-level retry settings do NOT apply to polling-RPC retries. + + With the actual timeout types being defined above, the client libraries + often refer to just Timeout without clarifying which type specifically + that is. In that case the actual timeout type (sometimes also referred to as + Logical Timeout) can be determined from the context. If it is a unary rpc + call (i.e. a regular one) Timeout usually stands for the RPC Timeout (if + provided directly as a standalone value) or Retry Timeout (if provided as + ``retry.timeout`` property of the unary RPC's retry config). For + ``Operation`` or ``PollingFuture`` in general Timeout stands for + Polling Timeout. + + Args: + predicate (Callable[Exception]): A callable that should return ``True`` + if the given exception is retryable. + initial (float): The minimum amount of time to delay in seconds. This + must be greater than 0. + maximum (float): The maximum amount of time to delay in seconds. + multiplier (float): The multiplier applied to the delay. + timeout (Optional[float]): How long to keep retrying, in seconds. + Note: timeout is only checked before initiating a retry, so the target may + run past the timeout value as long as it is healthy. + on_error (Callable[Exception]): A function to call while processing + a retryable exception. Any error raised by this function will + *not* be caught. + deadline (float): DEPRECATED: use `timeout` instead. For backward + compatibility, if specified it will override the ``timeout`` parameter. + """ + + def __call__( + self, + func: Callable[_P, _R], + on_error: Callable[[Exception], Any] | None = None, + ) -> Callable[_P, _R]: + """Wrap a callable with retry behavior. + + Args: + func (Callable): The callable to add retry behavior to. + on_error (Optional[Callable[Exception]]): If given, the + on_error callback will be called with each retryable exception + raised by the wrapped function. Any error raised by this + function will *not* be caught. If on_error was specified in the + constructor, this value will be ignored. + + Returns: + Callable: A callable that will invoke ``func`` with retry + behavior. + """ + if self._on_error is not None: + on_error = self._on_error + + @functools.wraps(func) + def retry_wrapped_func(*args: _P.args, **kwargs: _P.kwargs) -> _R: + """A wrapper that calls target function with retry.""" + target = functools.partial(func, *args, **kwargs) + sleep_generator = exponential_sleep_generator( + self._initial, self._maximum, multiplier=self._multiplier + ) + return retry_target( + target, + self._predicate, + sleep_generator, + timeout=self._timeout, + on_error=on_error, + ) + + return retry_wrapped_func diff --git a/google/api_core/retry/retry_unary_async.py b/google/api_core/retry/retry_unary_async.py new file mode 100644 index 00000000..1f72476a --- /dev/null +++ b/google/api_core/retry/retry_unary_async.py @@ -0,0 +1,239 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Helpers for retrying coroutine functions with exponential back-off. + +The :class:`AsyncRetry` decorator shares most functionality and behavior with +:class:`Retry`, but supports coroutine functions. Please refer to description +of :class:`Retry` for more details. + +By default, this decorator will retry transient +API errors (see :func:`if_transient_error`). For example: + +.. code-block:: python + + @retry_async.AsyncRetry() + async def call_flaky_rpc(): + return await client.flaky_rpc() + + # Will retry flaky_rpc() if it raises transient API errors. + result = await call_flaky_rpc() + +You can pass a custom predicate to retry on different exceptions, such as +waiting for an eventually consistent item to be available: + +.. code-block:: python + + @retry_async.AsyncRetry(predicate=retry_async.if_exception_type(exceptions.NotFound)) + async def check_if_exists(): + return await client.does_thing_exist() + + is_available = await check_if_exists() + +Some client library methods apply retry automatically. These methods can accept +a ``retry`` parameter that allows you to configure the behavior: + +.. code-block:: python + + my_retry = retry_async.AsyncRetry(timeout=60) + result = await client.some_method(retry=my_retry) + +""" + +from __future__ import annotations + +import asyncio +import time +import functools +from typing import ( + Awaitable, + Any, + Callable, + Iterable, + TypeVar, + TYPE_CHECKING, +) + +from google.api_core.retry.retry_base import _BaseRetry +from google.api_core.retry.retry_base import _retry_error_helper +from google.api_core.retry.retry_base import exponential_sleep_generator +from google.api_core.retry.retry_base import build_retry_error +from google.api_core.retry.retry_base import RetryFailureReason + +# for backwards compatibility, expose helpers in this module +from google.api_core.retry.retry_base import if_exception_type # noqa +from google.api_core.retry.retry_base import if_transient_error # noqa + +if TYPE_CHECKING: + import sys + + if sys.version_info >= (3, 10): + from typing import ParamSpec + else: + from typing_extensions import ParamSpec + + _P = ParamSpec("_P") # target function call parameters + _R = TypeVar("_R") # target function returned value + +_DEFAULT_INITIAL_DELAY = 1.0 # seconds +_DEFAULT_MAXIMUM_DELAY = 60.0 # seconds +_DEFAULT_DELAY_MULTIPLIER = 2.0 +_DEFAULT_DEADLINE = 60.0 * 2.0 # seconds +_DEFAULT_TIMEOUT = 60.0 * 2.0 # seconds + + +async def retry_target( + target: Callable[[], Awaitable[_R]], + predicate: Callable[[Exception], bool], + sleep_generator: Iterable[float], + timeout: float | None = None, + on_error: Callable[[Exception], None] | None = None, + exception_factory: Callable[ + [list[Exception], RetryFailureReason, float | None], + tuple[Exception, Exception | None], + ] = build_retry_error, + **kwargs, +): + """Await a coroutine and retry if it fails. + + This is the lowest-level retry helper. Generally, you'll use the + higher-level retry helper :class:`Retry`. + + Args: + target(Callable[[], Any]): The function to call and retry. This must be a + nullary function - apply arguments with `functools.partial`. + predicate (Callable[Exception]): A callable used to determine if an + exception raised by the target should be considered retryable. + It should return True to retry or False otherwise. + sleep_generator (Iterable[float]): An infinite iterator that determines + how long to sleep between retries. + timeout (Optional[float]): How long to keep retrying the target, in seconds. + Note: timeout is only checked before initiating a retry, so the target may + run past the timeout value as long as it is healthy. + on_error (Optional[Callable[Exception]]): If given, the on_error + callback will be called with each retryable exception raised by the + target. Any error raised by this function will *not* be caught. + exception_factory: A function that is called when the retryable reaches + a terminal failure state, used to construct an exception to be raised. + It takes a list of all exceptions encountered, a retry.RetryFailureReason + enum indicating the failure cause, and the original timeout value + as arguments. It should return a tuple of the exception to be raised, + along with the cause exception if any. The default implementation will raise + a RetryError on timeout, or the last exception encountered otherwise. + deadline (float): DEPRECATED use ``timeout`` instead. For backward + compatibility, if set it will override the ``timeout`` parameter. + + Returns: + Any: the return value of the target function. + + Raises: + ValueError: If the sleep generator stops yielding values. + Exception: a custom exception specified by the exception_factory if provided. + If no exception_factory is provided: + google.api_core.RetryError: If the timeout is exceeded while retrying. + Exception: If the target raises an error that isn't retryable. + """ + + timeout = kwargs.get("deadline", timeout) + + deadline = time.monotonic() + timeout if timeout is not None else None + error_list: list[Exception] = [] + sleep_iter = iter(sleep_generator) + + # continue trying until an attempt completes, or a terminal exception is raised in _retry_error_helper + # TODO: support max_attempts argument: https://github.com/googleapis/python-api-core/issues/535 + while True: + try: + return await target() + # pylint: disable=broad-except + # This function explicitly must deal with broad exceptions. + except Exception as exc: + # defer to shared logic for handling errors + next_sleep = _retry_error_helper( + exc, + deadline, + sleep_iter, + error_list, + predicate, + on_error, + exception_factory, + timeout, + ) + # if exception not raised, sleep before next attempt + await asyncio.sleep(next_sleep) + + +class AsyncRetry(_BaseRetry): + """Exponential retry decorator for async coroutines. + + This class is a decorator used to add exponential back-off retry behavior + to an RPC call. + + Although the default behavior is to retry transient API errors, a + different predicate can be provided to retry other exceptions. + + Args: + predicate (Callable[Exception]): A callable that should return ``True`` + if the given exception is retryable. + initial (float): The minimum amount of time to delay in seconds. This + must be greater than 0. + maximum (float): The maximum amount of time to delay in seconds. + multiplier (float): The multiplier applied to the delay. + timeout (Optional[float]): How long to keep retrying in seconds. + Note: timeout is only checked before initiating a retry, so the target may + run past the timeout value as long as it is healthy. + on_error (Optional[Callable[Exception]]): A function to call while processing + a retryable exception. Any error raised by this function will + *not* be caught. + deadline (float): DEPRECATED use ``timeout`` instead. If set it will + override ``timeout`` parameter. + """ + + def __call__( + self, + func: Callable[..., Awaitable[_R]], + on_error: Callable[[Exception], Any] | None = None, + ) -> Callable[_P, Awaitable[_R]]: + """Wrap a callable with retry behavior. + + Args: + func (Callable): The callable or stream to add retry behavior to. + on_error (Optional[Callable[Exception]]): If given, the + on_error callback will be called with each retryable exception + raised by the wrapped function. Any error raised by this + function will *not* be caught. If on_error was specified in the + constructor, this value will be ignored. + + Returns: + Callable: A callable that will invoke ``func`` with retry + behavior. + """ + if self._on_error is not None: + on_error = self._on_error + + @functools.wraps(func) + async def retry_wrapped_func(*args: _P.args, **kwargs: _P.kwargs) -> _R: + """A wrapper that calls target function with retry.""" + sleep_generator = exponential_sleep_generator( + self._initial, self._maximum, multiplier=self._multiplier + ) + return await retry_target( + functools.partial(func, *args, **kwargs), + predicate=self._predicate, + sleep_generator=sleep_generator, + timeout=self._timeout, + on_error=on_error, + ) + + return retry_wrapped_func diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index 2dfa2f6e..90a2d5ad 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -1,4 +1,4 @@ -# Copyright 2020 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -11,281 +11,24 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - -"""Helpers for retrying coroutine functions with exponential back-off. - -The :class:`AsyncRetry` decorator shares most functionality and behavior with -:class:`Retry`, but supports coroutine functions. Please refer to description -of :class:`Retry` for more details. - -By default, this decorator will retry transient -API errors (see :func:`if_transient_error`). For example: - -.. code-block:: python - - @retry_async.AsyncRetry() - async def call_flaky_rpc(): - return await client.flaky_rpc() - - # Will retry flaky_rpc() if it raises transient API errors. - result = await call_flaky_rpc() - -You can pass a custom predicate to retry on different exceptions, such as -waiting for an eventually consistent item to be available: - -.. code-block:: python - - @retry_async.AsyncRetry(predicate=retry_async.if_exception_type(exceptions.NotFound)) - async def check_if_exists(): - return await client.does_thing_exist() - - is_available = await check_if_exists() - -Some client library methods apply retry automatically. These methods can accept -a ``retry`` parameter that allows you to configure the behavior: - -.. code-block:: python - - my_retry = retry_async.AsyncRetry(deadline=60) - result = await client.some_method(retry=my_retry) - -""" - -import asyncio -import datetime -import functools -import logging - -from google.api_core import datetime_helpers -from google.api_core import exceptions -from google.api_core.retry import exponential_sleep_generator +# +# The following imports are for backwards compatibility with https://github.com/googleapis/python-api-core/blob/4d7d2edee2c108d43deb151e6e0fdceb56b73275/google/api_core/retry_async.py +# +# TODO: Revert these imports on the next major version release (https://github.com/googleapis/python-api-core/issues/576) +from google.api_core import datetime_helpers # noqa: F401 +from google.api_core import exceptions # noqa: F401 +from google.api_core.retry import exponential_sleep_generator # noqa: F401 from google.api_core.retry import if_exception_type # noqa: F401 -from google.api_core.retry import if_transient_error - - -_LOGGER = logging.getLogger(__name__) -_DEFAULT_INITIAL_DELAY = 1.0 # seconds -_DEFAULT_MAXIMUM_DELAY = 60.0 # seconds -_DEFAULT_DELAY_MULTIPLIER = 2.0 -_DEFAULT_DEADLINE = 60.0 * 2.0 # seconds - - -async def retry_target(target, predicate, sleep_generator, deadline, on_error=None): - """Call a function and retry if it fails. - - This is the lowest-level retry helper. Generally, you'll use the - higher-level retry helper :class:`Retry`. - - Args: - target(Callable): The function to call and retry. This must be a - nullary function - apply arguments with `functools.partial`. - predicate (Callable[Exception]): A callable used to determine if an - exception raised by the target should be considered retryable. - It should return True to retry or False otherwise. - sleep_generator (Iterable[float]): An infinite iterator that determines - how long to sleep between retries. - deadline (float): How long to keep retrying the target. The last sleep - period is shortened as necessary, so that the last retry runs at - ``deadline`` (and not considerably beyond it). - on_error (Callable[Exception]): A function to call while processing a - retryable exception. Any error raised by this function will *not* - be caught. - - Returns: - Any: the return value of the target function. - - Raises: - google.api_core.RetryError: If the deadline is exceeded while retrying. - ValueError: If the sleep generator stops yielding values. - Exception: If the target raises a method that isn't retryable. - """ - deadline_dt = ( - (datetime_helpers.utcnow() + datetime.timedelta(seconds=deadline)) - if deadline - else None - ) - - last_exc = None - - for sleep in sleep_generator: - try: - if not deadline_dt: - return await target() - else: - return await asyncio.wait_for( - target(), - timeout=(deadline_dt - datetime_helpers.utcnow()).total_seconds(), - ) - # pylint: disable=broad-except - # This function explicitly must deal with broad exceptions. - except Exception as exc: - if not predicate(exc) and not isinstance(exc, asyncio.TimeoutError): - raise - last_exc = exc - if on_error is not None: - on_error(exc) - - now = datetime_helpers.utcnow() - - if deadline_dt: - if deadline_dt <= now: - # Chains the raising RetryError with the root cause error, - # which helps observability and debugability. - raise exceptions.RetryError( - "Deadline of {:.1f}s exceeded while calling {}".format( - deadline, target - ), - last_exc, - ) from last_exc - else: - time_to_deadline = (deadline_dt - now).total_seconds() - sleep = min(time_to_deadline, sleep) - - _LOGGER.debug( - "Retrying due to {}, sleeping {:.1f}s ...".format(last_exc, sleep) - ) - await asyncio.sleep(sleep) - - raise ValueError("Sleep generator stopped yielding sleep values.") - - -class AsyncRetry: - """Exponential retry decorator for async functions. - - This class is a decorator used to add exponential back-off retry behavior - to an RPC call. - - Although the default behavior is to retry transient API errors, a - different predicate can be provided to retry other exceptions. - - Args: - predicate (Callable[Exception]): A callable that should return ``True`` - if the given exception is retryable. - initial (float): The minimum a,out of time to delay in seconds. This - must be greater than 0. - maximum (float): The maximum amout of time to delay in seconds. - multiplier (float): The multiplier applied to the delay. - deadline (float): How long to keep retrying in seconds. The last sleep - period is shortened as necessary, so that the last retry runs at - ``deadline`` (and not considerably beyond it). - on_error (Callable[Exception]): A function to call while processing - a retryable exception. Any error raised by this function will - *not* be caught. - """ - - def __init__( - self, - predicate=if_transient_error, - initial=_DEFAULT_INITIAL_DELAY, - maximum=_DEFAULT_MAXIMUM_DELAY, - multiplier=_DEFAULT_DELAY_MULTIPLIER, - deadline=_DEFAULT_DEADLINE, - on_error=None, - ): - self._predicate = predicate - self._initial = initial - self._multiplier = multiplier - self._maximum = maximum - self._deadline = deadline - self._on_error = on_error - - def __call__(self, func, on_error=None): - """Wrap a callable with retry behavior. - - Args: - func (Callable): The callable to add retry behavior to. - on_error (Callable[Exception]): A function to call while processing - a retryable exception. Any error raised by this function will - *not* be caught. - - Returns: - Callable: A callable that will invoke ``func`` with retry - behavior. - """ - if self._on_error is not None: - on_error = self._on_error - - @functools.wraps(func) - async def retry_wrapped_func(*args, **kwargs): - """A wrapper that calls target function with retry.""" - target = functools.partial(func, *args, **kwargs) - sleep_generator = exponential_sleep_generator( - self._initial, self._maximum, multiplier=self._multiplier - ) - return await retry_target( - target, - self._predicate, - sleep_generator, - self._deadline, - on_error=on_error, - ) - - return retry_wrapped_func - - def _replace( - self, - predicate=None, - initial=None, - maximum=None, - multiplier=None, - deadline=None, - on_error=None, - ): - return AsyncRetry( - predicate=predicate or self._predicate, - initial=initial or self._initial, - maximum=maximum or self._maximum, - multiplier=multiplier or self._multiplier, - deadline=deadline or self._deadline, - on_error=on_error or self._on_error, - ) - - def with_deadline(self, deadline): - """Return a copy of this retry with the given deadline. - - Args: - deadline (float): How long to keep retrying. - - Returns: - AsyncRetry: A new retry instance with the given deadline. - """ - return self._replace(deadline=deadline) - - def with_predicate(self, predicate): - """Return a copy of this retry with the given predicate. - - Args: - predicate (Callable[Exception]): A callable that should return - ``True`` if the given exception is retryable. - - Returns: - AsyncRetry: A new retry instance with the given predicate. - """ - return self._replace(predicate=predicate) - - def with_delay(self, initial=None, maximum=None, multiplier=None): - """Return a copy of this retry with the given delay options. - - Args: - initial (float): The minimum amout of time to delay. This must - be greater than 0. - maximum (float): The maximum amout of time to delay. - multiplier (float): The multiplier applied to the delay. - - Returns: - AsyncRetry: A new retry instance with the given predicate. - """ - return self._replace(initial=initial, maximum=maximum, multiplier=multiplier) - - def __str__(self): - return ( - "".format( - self._predicate, - self._initial, - self._maximum, - self._multiplier, - self._deadline, - self._on_error, - ) - ) +from google.api_core.retry import if_transient_error # noqa: F401 +from google.api_core.retry.retry_unary_async import AsyncRetry +from google.api_core.retry.retry_unary_async import retry_target + +__all__ = ( + "AsyncRetry", + "datetime_helpers", + "exceptions", + "exponential_sleep_generator", + "if_exception_type", + "if_transient_error", + "retry_target", +) diff --git a/google/api_core/timeout.py b/google/api_core/timeout.py index 73232180..55b195e9 100644 --- a/google/api_core/timeout.py +++ b/google/api_core/timeout.py @@ -14,8 +14,9 @@ """Decorators for applying timeout arguments to functions. -These decorators are used to wrap API methods to apply either a constant -or exponential timeout argument. +These decorators are used to wrap API methods to apply either a +Deadline-dependent (recommended), constant (DEPRECATED) or exponential +(DEPRECATED) timeout argument. For example, imagine an API method that can take a while to return results, such as one that might block until a resource is ready: @@ -66,9 +67,79 @@ def is_thing_ready(timeout=None): _DEFAULT_DEADLINE = None +class TimeToDeadlineTimeout(object): + """A decorator that decreases timeout set for an RPC based on how much time + has left till its deadline. The deadline is calculated as + ``now + initial_timeout`` when this decorator is first called for an rpc. + + In other words this decorator implements deadline semantics in terms of a + sequence of decreasing timeouts t0 > t1 > t2 ... tn >= 0. + + Args: + timeout (Optional[float]): the timeout (in seconds) to applied to the + wrapped function. If `None`, the target function is expected to + never timeout. + """ + + def __init__(self, timeout=None, clock=datetime_helpers.utcnow): + self._timeout = timeout + self._clock = clock + + def __call__(self, func): + """Apply the timeout decorator. + + Args: + func (Callable): The function to apply the timeout argument to. + This function must accept a timeout keyword argument. + + Returns: + Callable: The wrapped function. + """ + + first_attempt_timestamp = self._clock().timestamp() + + @functools.wraps(func) + def func_with_timeout(*args, **kwargs): + """Wrapped function that adds timeout.""" + + if self._timeout is not None: + # All calculations are in seconds + now_timestamp = self._clock().timestamp() + + # To avoid usage of nonlocal but still have round timeout + # numbers for first attempt (in most cases the only attempt made + # for an RPC. + if now_timestamp - first_attempt_timestamp < 0.001: + now_timestamp = first_attempt_timestamp + + time_since_first_attempt = now_timestamp - first_attempt_timestamp + remaining_timeout = self._timeout - time_since_first_attempt + + # Although the `deadline` parameter in `google.api_core.retry.Retry` + # is deprecated, and should be treated the same as the `timeout`, + # it is still possible for the `deadline` argument in + # `google.api_core.retry.Retry` to be larger than the `timeout`. + # See https://github.com/googleapis/python-api-core/issues/654 + # Only positive non-zero timeouts are supported. + # Revert back to the initial timeout for negative or 0 timeout values. + if remaining_timeout < 1: + remaining_timeout = self._timeout + + kwargs["timeout"] = remaining_timeout + + return func(*args, **kwargs) + + return func_with_timeout + + def __str__(self): + return "".format(self._timeout) + + class ConstantTimeout(object): """A decorator that adds a constant timeout argument. + DEPRECATED: use ``TimeToDeadlineTimeout`` instead. + This is effectively equivalent to ``functools.partial(func, timeout=timeout)``. @@ -140,6 +211,9 @@ def _exponential_timeout_generator(initial, maximum, multiplier, deadline): class ExponentialTimeout(object): """A decorator that adds an exponentially increasing timeout argument. + DEPRECATED: the concept of incrementing timeout exponentially has been + deprecated. Use ``TimeToDeadlineTimeout`` instead. + This is useful if a function is called multiple times. Each time the function is called this decorator will calculate a new timeout parameter based on the the number of times the function has been called. @@ -156,9 +230,9 @@ class ExponentialTimeout(object): deadline (Optional[float]): The overall deadline across all invocations. This is used to prevent a very large calculated timeout from pushing the overall execution time over the deadline. - This is especially useful in conjuction with + This is especially useful in conjunction with :mod:`google.api_core.retry`. If ``None``, the timeouts will not - be adjusted to accomodate an overall deadline. + be adjusted to accommodate an overall deadline. """ def __init__( @@ -174,7 +248,7 @@ def __init__( self._deadline = deadline def with_deadline(self, deadline): - """Return a copy of this teimout with the given deadline. + """Return a copy of this timeout with the given deadline. Args: deadline (float): The overall deadline across all invocations. diff --git a/google/api_core/universe.py b/google/api_core/universe.py new file mode 100644 index 00000000..35669642 --- /dev/null +++ b/google/api_core/universe.py @@ -0,0 +1,82 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Helpers for universe domain.""" + +from typing import Any, Optional + +DEFAULT_UNIVERSE = "googleapis.com" + + +class EmptyUniverseError(ValueError): + def __init__(self): + message = "Universe Domain cannot be an empty string." + super().__init__(message) + + +class UniverseMismatchError(ValueError): + def __init__(self, client_universe, credentials_universe): + message = ( + f"The configured universe domain ({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{DEFAULT_UNIVERSE}` is the default." + ) + super().__init__(message) + + +def determine_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] +) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the + "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise EmptyUniverseError + return universe_domain + + +def compare_domains(client_universe: str, credentials: Any) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials Any: The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + credentials_universe = getattr(credentials, "universe_domain", DEFAULT_UNIVERSE) + + if client_universe != credentials_universe: + raise UniverseMismatchError(client_universe, credentials_universe) + return True diff --git a/google/api_core/version.py b/google/api_core/version.py index 5fa77444..04ffe6a5 100644 --- a/google/api_core/version.py +++ b/google/api_core/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.0.0b1" +__version__ = "2.24.2" diff --git a/google/api_core/version_header.py b/google/api_core/version_header.py new file mode 100644 index 00000000..cf1972ac --- /dev/null +++ b/google/api_core/version_header.py @@ -0,0 +1,29 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +API_VERSION_METADATA_KEY = "x-goog-api-version" + + +def to_api_version_header(version_identifier): + """Returns data for the API Version header for the given `version_identifier`. + + Args: + version_identifier (str): The version identifier to be used in the + tuple returned. + + Returns: + Tuple(str, str): A tuple containing the API Version metadata key and + value. + """ + return (API_VERSION_METADATA_KEY, version_identifier) diff --git a/noxfile.py b/noxfile.py index 84470f5a..ac21330e 100644 --- a/noxfile.py +++ b/noxfile.py @@ -15,31 +15,42 @@ from __future__ import absolute_import import os import pathlib +import re import shutil +import unittest # https://github.com/google/importlab/issues/25 import nox # pytype: disable=import-error -BLACK_VERSION = "black==19.10b0" +BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] # Black and flake8 clash on the syntax for ignoring flake8's F401 in this file. BLACK_EXCLUDES = ["--exclude", "^/google/api_core/operations_v1/__init__.py"] -DEFAULT_PYTHON_VERSION = "3.7" -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -_MINIMAL_ASYNCIO_SUPPORT_PYTHON_VERSION = [3, 6] +PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13", "3.14"] +DEFAULT_PYTHON_VERSION = "3.10" +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() -def _greater_or_equal_than_36(version_string): - tokens = version_string.split(".") - for i, token in enumerate(tokens): - try: - tokens[i] = int(token) - except ValueError: - pass - return tokens >= [3, 6] +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "unit_grpc_gcp", + "unit_wo_grpc", + "unit_w_prerelease_deps", + "unit_w_async_rest_extra", + "cover", + "pytype", + "mypy", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True @nox.session(python=DEFAULT_PYTHON_VERSION) @@ -49,10 +60,13 @@ def lint(session): Returns a failure if the linters find linting errors or sufficiently serious code quality issues. """ - session.install("flake8", "flake8-import-order", BLACK_VERSION) + session.install("flake8", BLACK_VERSION) session.install(".") session.run( - "black", "--check", *BLACK_EXCLUDES, *BLACK_PATHS, + "black", + "--check", + *BLACK_EXCLUDES, + *BLACK_PATHS, ) session.run("flake8", "google", "tests") @@ -67,7 +81,37 @@ def blacken(session): session.run("black", *BLACK_EXCLUDES, *BLACK_PATHS) -def default(session): +def install_prerelease_dependencies(session, constraints_path): + with open(constraints_path, encoding="utf-8") as constraints_file: + constraints_text = constraints_file.read() + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + session.install(*constraints_deps) + prerel_deps = [ + "google-auth", + "googleapis-common-protos", + "grpcio", + "grpcio-status", + "proto-plus", + "protobuf", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + +def default(session, install_grpc=True, prerelease=False, install_async_rest=False): """Default unit test session. This is intended to be run **without** an interpreter set, so @@ -75,61 +119,144 @@ def default(session): Python corresponding to the ``nox`` binary the ``PATH`` can run the tests. """ - constraints_path = str( - CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + if prerelease and not install_grpc: + unittest.skip("The pre-release session cannot be run without grpc") + + session.install( + "dataclasses", + "mock; python_version=='3.7'", + "pytest", + "pytest-cov", + "pytest-xdist", ) - # Install all test dependencies, then install this package in-place. - session.install("mock", "pytest", "pytest-cov") - session.install("-e", ".[grpc]", "-c", constraints_path) + install_extras = [] + if install_grpc: + # Note: The extra is called `grpc` and not `grpcio`. + install_extras.append("grpc") + + constraints_dir = str(CURRENT_DIRECTORY / "testing") + if install_async_rest: + install_extras.append("async_rest") + constraints_type = "async-rest-" + else: + constraints_type = "" + + lib_with_extras = f".[{','.join(install_extras)}]" if len(install_extras) else "." + if prerelease: + install_prerelease_dependencies( + session, + f"{constraints_dir}/constraints-{constraints_type}{PYTHON_VERSIONS[0]}.txt", + ) + # This *must* be the last install command to get the package from source. + session.install("-e", lib_with_extras, "--no-deps") + else: + constraints_file = ( + f"{constraints_dir}/constraints-{constraints_type}{session.python}.txt" + ) + # fall back to standard constraints file + if not pathlib.Path(constraints_file).exists(): + constraints_file = f"{constraints_dir}/constraints-{session.python}.txt" + + session.install( + "-e", + lib_with_extras, + "-c", + constraints_file, + ) + + # Print out package versions of dependencies + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + # Support for proto.version was added in v1.23.0 + # https://github.com/googleapis/proto-plus-python/releases/tag/v1.23.0 + session.run( + "python", + "-c", + """import proto; hasattr(proto, "version") and print(proto.version.__version__)""", + ) + if install_grpc: + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") pytest_args = [ "python", "-m", - "py.test", - "--quiet", - "--cov=google.api_core", - "--cov=tests.unit", - "--cov-append", - "--cov-config=.coveragerc", - "--cov-report=", - "--cov-fail-under=0", - os.path.join("tests", "unit"), + "pytest", + *( + # Helpful for running a single test or testfile. + session.posargs + or [ + "--quiet", + "--cov=google.api_core", + "--cov=tests.unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + # Running individual tests with parallelism enabled is usually not helpful. + "-n=auto", + os.path.join("tests", "unit"), + ] + ), ] - pytest_args.extend(session.posargs) - # Inject AsyncIO content and proto-plus, if version >= 3.6. - # proto-plus is needed for a field mask test in test_protobuf_helpers.py - if _greater_or_equal_than_36(session.python): - session.install("asyncmock", "pytest-asyncio", "proto-plus") + session.install("asyncmock", "pytest-asyncio") + # Having positional arguments means the user wants to run specific tests. + # Best not to add additional tests to that list. + if not session.posargs: pytest_args.append("--cov=tests.asyncio") pytest_args.append(os.path.join("tests", "asyncio")) - session.run(*pytest_args) - else: - # Run py.test against the unit tests. - session.run(*pytest_args) + session.run(*pytest_args) -@nox.session(python=["3.6", "3.7", "3.8", "3.9"]) + +@nox.session(python=PYTHON_VERSIONS) def unit(session): """Run the unit test suite.""" default(session) -@nox.session(python=["3.6", "3.7", "3.8", "3.9"]) +@nox.session(python=PYTHON_VERSIONS) +def unit_w_prerelease_deps(session): + """Run the unit test suite.""" + default(session, prerelease=True) + + +@nox.session(python=PYTHON_VERSIONS) def unit_grpc_gcp(session): - """Run the unit test suite with grpcio-gcp installed.""" + """ + Run the unit test suite with grpcio-gcp installed. + `grpcio-gcp` doesn't support protobuf 4+. + Remove extra `grpcgcp` when protobuf 3.x is dropped. + https://github.com/googleapis/python-api-core/issues/594 + """ constraints_path = str( CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) # Install grpcio-gcp session.install("-e", ".[grpcgcp]", "-c", constraints_path) + # Install protobuf < 4.0.0 + session.install("protobuf<4.0.0") default(session) -@nox.session(python="3.6") +@nox.session(python=PYTHON_VERSIONS) +def unit_wo_grpc(session): + """Run the unit test suite w/o grpcio installed""" + default(session, install_grpc=False) + + +@nox.session(python=PYTHON_VERSIONS) +def unit_w_async_rest_extra(session): + """Run the unit test suite with the `async_rest` extra""" + default(session, install_async_rest=True) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) def lint_setup_py(session): """Verify that setup.py is valid (including RST check).""" @@ -137,15 +264,28 @@ def lint_setup_py(session): session.run("python", "setup.py", "check", "--restructuredtext", "--strict") -# No 3.7 because pytype supports up to 3.6 only. -@nox.session(python="3.6") +@nox.session(python=DEFAULT_PYTHON_VERSION) def pytype(session): """Run type-checking.""" - session.install(".[grpc, grpcgcp]", "pytype >= 2019.3.21") + session.install(".[grpc]", "pytype") session.run("pytype") -@nox.session(python="3.6") +@nox.session(python=DEFAULT_PYTHON_VERSION) +def mypy(session): + """Run type-checking.""" + session.install(".[grpc,async_rest]", "mypy") + session.install( + "types-setuptools", + "types-requests", + "types-protobuf", + "types-dataclasses", + "types-mock; python_version=='3.7'", + ) + session.run("mypy", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) def cover(session): """Run the final coverage report. @@ -157,12 +297,25 @@ def cover(session): session.run("coverage", "erase") -@nox.session(python="3.8") +@nox.session(python="3.10") def docs(session): """Build the docs for this library.""" - session.install("-e", ".[grpc, grpcgcp]") - session.install("sphinx==4.0.1", "alabaster", "recommonmark") + session.install("-e", ".[grpc]") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "sphinx==4.5.0", + "alabaster", + "recommonmark", + ) shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( @@ -179,13 +332,24 @@ def docs(session): ) -@nox.session(python="3.8") +@nox.session(python="3.10") def docfx(session): """Build the docfx yaml files for this library.""" session.install("-e", ".") session.install( - "sphinx==4.0.1", "alabaster", "recommonmark", "gcp-sphinx-docfx-yaml" + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", ) shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) diff --git a/owlbot.py b/owlbot.py index 451f7c48..58bc7517 100644 --- a/owlbot.py +++ b/owlbot.py @@ -16,6 +16,7 @@ import synthtool as s from synthtool import gcp +from synthtool.languages import python common = gcp.CommonTemplates() @@ -25,23 +26,15 @@ excludes = [ "noxfile.py", # pytype "setup.cfg", # pytype - ".flake8", # flake8-import-order, layout ".coveragerc", # layout "CONTRIBUTING.rst", # no systests + ".github/workflows/unittest.yml", # exclude unittest gh action + ".github/workflows/lint.yml", # exclude lint gh action + "README.rst", ] templated_files = common.py_library(microgenerator=True, cov_level=100) s.move(templated_files, excludes=excludes) -# Add pytype support -s.replace( - ".gitignore", - """\ -.pytest_cache -""", - """\ -.pytest_cache -.pytype -""", -) +python.configure_previous_major_version_branches() s.shell.run(["nox", "-s", "blacken"], hide_output=False) diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..da404ab3 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,107 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +[build-system] +requires = ["setuptools"] +build-backend = "setuptools.build_meta" + +[project] +name = "google-api-core" +authors = [{ name = "Google LLC", email = "googleapis-packages@google.com" }] +license = { text = "Apache 2.0" } +requires-python = ">=3.7" +readme = "README.rst" +description = "Google API client core library" +classifiers = [ + # Should be one of: + # "Development Status :: 3 - Alpha" + # "Development Status :: 4 - Beta" + # "Development Status :: 5 - Production/Stable" + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Operating System :: OS Independent", + "Topic :: Internet", +] +dependencies = [ + "googleapis-common-protos >= 1.56.2, < 2.0.0", + "protobuf >= 3.19.5, < 7.0.0, != 3.20.0, != 3.20.1, != 4.21.0, != 4.21.1, != 4.21.2, != 4.21.3, != 4.21.4, != 4.21.5", + "proto-plus >= 1.22.3, < 2.0.0", + "proto-plus >= 1.25.0, < 2.0.0; python_version >= '3.13'", + "google-auth >= 2.14.1, < 3.0.0", + "requests >= 2.18.0, < 3.0.0", +] +dynamic = ["version"] + +[project.urls] +Documentation = "https://googleapis.dev/python/google-api-core/latest/" +Repository = "https://github.com/googleapis/python-api-core" + +[project.optional-dependencies] +async_rest = ["google-auth[aiohttp] >= 2.35.0, < 3.0.0"] +grpc = [ + "grpcio >= 1.33.2, < 2.0.0", + "grpcio >= 1.49.1, < 2.0.0; python_version >= '3.11'", + "grpcio-status >= 1.33.2, < 2.0.0", + "grpcio-status >= 1.49.1, < 2.0.0; python_version >= '3.11'", +] +grpcgcp = ["grpcio-gcp >= 0.2.2, < 1.0.0"] +grpcio-gcp = ["grpcio-gcp >= 0.2.2, < 1.0.0"] + +[tool.setuptools.dynamic] +version = { attr = "google.api_core.version.__version__" } + +[tool.setuptools.packages.find] +# Only include packages under the 'google' namespace. Do not include tests, +# benchmarks, etc. +include = ["google*"] + +[tool.mypy] +python_version = "3.7" +namespace_packages = true +ignore_missing_imports = true + +[tool.pytest] +filterwarnings = [ + # treat all warnings as errors + "error", + # Remove once https://github.com/pytest-dev/pytest-cov/issues/621 is fixed + "ignore:.*The --rsyncdir command line argument and rsyncdirs config variable are deprecated:DeprecationWarning", + # Remove once https://github.com/protocolbuffers/protobuf/issues/12186 is fixed + "ignore:.*custom tp_new.*in Python 3.14:DeprecationWarning", + # Remove once support for python 3.7 is dropped + # This warning only appears when using python 3.7 + "ignore:.*Using or importing the ABCs from.*collections:DeprecationWarning", + # Remove once support for grpcio-gcp is deprecated + # See https://github.com/googleapis/python-api-core/blob/42e8b6e6f426cab749b34906529e8aaf3f133d75/google/api_core/grpc_helpers.py#L39-L45 + "ignore:.*Support for grpcio-gcp is deprecated:DeprecationWarning", + "ignore: The `compression` argument is ignored for grpc_gcp.secure_channel creation:DeprecationWarning", + "ignore:The `attempt_direct_path` argument is ignored for grpc_gcp.secure_channel creation:DeprecationWarning", + # Remove once the minimum supported version of googleapis-common-protos is 1.62.0 + "ignore:.*pkg_resources.declare_namespace:DeprecationWarning", + "ignore:.*pkg_resources is deprecated as an API:DeprecationWarning", + # Remove once https://github.com/grpc/grpc/issues/35086 is fixed (and version newer than 1.60.0 is published) + "ignore:There is no current event loop:DeprecationWarning", + # Remove after support for Python 3.7 is dropped + "ignore:After January 1, 2024, new releases of this library will drop support for Python 3.7:DeprecationWarning", +] diff --git a/renovate.json b/renovate.json index c0489556..c7875c46 100644 --- a/renovate.json +++ b/renovate.json @@ -1,8 +1,11 @@ { "extends": [ - "config:base", ":preserveSemverRanges" + "config:base", + "group:all", + ":preserveSemverRanges", + ":disableDependencyDashboard" ], - "ignorePaths": [".pre-commit-config.yaml"], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py", ".github/workflows/unittest.yml"], "pip_requirements": { "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] } diff --git a/scripts/decrypt-secrets.sh b/scripts/decrypt-secrets.sh index 21f6d2a2..120b0ddc 100755 --- a/scripts/decrypt-secrets.sh +++ b/scripts/decrypt-secrets.sh @@ -1,6 +1,6 @@ #!/bin/bash -# Copyright 2015 Google Inc. All rights reserved. +# Copyright 2024 Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/scripts/readme-gen/readme_gen.py b/scripts/readme-gen/readme_gen.py index d309d6e9..8f5e248a 100644 --- a/scripts/readme-gen/readme_gen.py +++ b/scripts/readme-gen/readme_gen.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# Copyright 2016 Google Inc +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -28,19 +28,22 @@ jinja_env = jinja2.Environment( trim_blocks=True, loader=jinja2.FileSystemLoader( - os.path.abspath(os.path.join(os.path.dirname(__file__), 'templates')))) + os.path.abspath(os.path.join(os.path.dirname(__file__), "templates")) + ), + autoescape=True, +) -README_TMPL = jinja_env.get_template('README.tmpl.rst') +README_TMPL = jinja_env.get_template("README.tmpl.rst") def get_help(file): - return subprocess.check_output(['python', file, '--help']).decode() + return subprocess.check_output(["python", file, "--help"]).decode() def main(): parser = argparse.ArgumentParser() - parser.add_argument('source') - parser.add_argument('--destination', default='README.rst') + parser.add_argument("source") + parser.add_argument("--destination", default="README.rst") args = parser.parse_args() @@ -48,9 +51,9 @@ def main(): root = os.path.dirname(source) destination = os.path.join(root, args.destination) - jinja_env.globals['get_help'] = get_help + jinja_env.globals["get_help"] = get_help - with io.open(source, 'r') as f: + with io.open(source, "r") as f: config = yaml.load(f) # This allows get_help to execute in the right directory. @@ -58,9 +61,9 @@ def main(): output = README_TMPL.render(config) - with io.open(destination, 'w') as f: + with io.open(destination, "w") as f: f.write(output) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/scripts/readme-gen/templates/install_deps.tmpl.rst b/scripts/readme-gen/templates/install_deps.tmpl.rst index a0406dba..6f069c6c 100644 --- a/scripts/readme-gen/templates/install_deps.tmpl.rst +++ b/scripts/readme-gen/templates/install_deps.tmpl.rst @@ -12,7 +12,7 @@ Install Dependencies .. _Python Development Environment Setup Guide: https://cloud.google.com/python/setup -#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+. +#. Create a virtualenv. Samples are compatible with Python 3.7+. .. code-block:: bash diff --git a/setup.cfg b/setup.cfg index 0be0b3ff..f7b5a3bc 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,8 +1,5 @@ -[bdist_wheel] -universal = 1 - [pytype] -python_version = 3.6 +python_version = 3.7 inputs = google/ exclude = diff --git a/setup.py b/setup.py index d98c69c5..168877fa 100644 --- a/setup.py +++ b/setup.py @@ -12,91 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -import io -import os - import setuptools -# Package metadata. - -name = "google-api-core" -description = "Google API client core library" - -# Should be one of: -# 'Development Status :: 3 - Alpha' -# 'Development Status :: 4 - Beta' -# 'Development Status :: 5 - Production/Stable' -release_status = "Development Status :: 5 - Production/Stable" -dependencies = [ - "googleapis-common-protos >= 1.6.0, < 2.0dev", - "protobuf >= 3.12.0", - "google-auth >= 1.25.0, < 3.0dev", - "requests >= 2.18.0, < 3.0.0dev", - "setuptools >= 40.3.0", - 'futures >= 3.2.0; python_version < "3.2"', -] -extras = { - "grpc": "grpcio >= 1.29.0, < 2.0dev", - "grpcgcp": "grpcio-gcp >= 0.2.2", - "grpcio-gcp": "grpcio-gcp >= 0.2.2", -} - - -# Setup boilerplate below this line. - -package_root = os.path.abspath(os.path.dirname(__file__)) - - -version = {} -with open(os.path.join(package_root, "google/api_core/version.py")) as fp: - exec(fp.read(), version) -version = version["__version__"] - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -# Only include packages under the 'google' namespace. Do not include tests, -# benchmarks, etc. -packages = [ - package for package in setuptools.find_packages() if package.startswith("google") -] - -# Determine which namespaces are needed. -namespaces = ["google"] -if "google.cloud" in packages: - namespaces.append("google.cloud") - - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url="https://github.com/googleapis/python-api-core", - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.6", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - namespace_packages=namespaces, - install_requires=dependencies, - extras_require=extras, - python_requires=">=3.6", - include_package_data=True, - zip_safe=False, -) +setuptools.setup() diff --git a/testing/constraints-3.12.txt b/testing/constraints-3.12.txt new file mode 100644 index 00000000..e69de29b diff --git a/testing/constraints-3.13.txt b/testing/constraints-3.13.txt new file mode 100644 index 00000000..e69de29b diff --git a/testing/constraints-3.14.txt b/testing/constraints-3.14.txt new file mode 100644 index 00000000..e69de29b diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt index e69de29b..4ce1c899 100644 --- a/testing/constraints-3.7.txt +++ b/testing/constraints-3.7.txt @@ -0,0 +1,15 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List *all* library dependencies and extras in this file. +# Pin the version to the lower bound. +# +# e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", +# Then this file should have foo==1.14.0 +googleapis-common-protos==1.56.2 +protobuf==3.19.5 +google-auth==2.14.1 +requests==2.18.0 +grpcio==1.33.2 +grpcio-status==1.33.2 +grpcio-gcp==0.2.2 +proto-plus==1.22.3 diff --git a/testing/constraints-3.8.txt b/testing/constraints-3.8.txt index e69de29b..1b5bb58e 100644 --- a/testing/constraints-3.8.txt +++ b/testing/constraints-3.8.txt @@ -0,0 +1,2 @@ +googleapis-common-protos==1.56.3 +protobuf==4.21.6 \ No newline at end of file diff --git a/testing/constraints-3.6.txt b/testing/constraints-async-rest-3.7.txt similarity index 61% rename from testing/constraints-3.6.txt rename to testing/constraints-async-rest-3.7.txt index 2544e8e5..7aedeb1c 100644 --- a/testing/constraints-3.6.txt +++ b/testing/constraints-async-rest-3.7.txt @@ -5,12 +5,13 @@ # # e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", # Then this file should have foo==1.14.0 -googleapis-common-protos==1.6.0 -protobuf==3.12.0 -google-auth==1.25.0 -requests==2.18.0 -setuptools==40.3.0 -packaging==14.3 -grpcio==1.29.0 -grpcio-gcp==0.2.2 +googleapis-common-protos==1.56.2 +protobuf==3.19.5 +google-auth==2.35.0 +# from google-auth[aiohttp] +aiohttp==3.6.2 +requests==2.20.0 +grpcio==1.33.2 +grpcio-status==1.33.2 grpcio-gcp==0.2.2 +proto-plus==1.22.3 diff --git a/tests/asyncio/future/test_async_future.py b/tests/asyncio/future/test_async_future.py index 1e9ae334..659f41cf 100644 --- a/tests/asyncio/future/test_async_future.py +++ b/tests/asyncio/future/test_async_future.py @@ -13,8 +13,8 @@ # limitations under the License. import asyncio +from unittest import mock -import mock import pytest from google.api_core import exceptions @@ -47,7 +47,6 @@ async def test_polling_future_constructor(): @pytest.mark.asyncio async def test_set_result(): future = AsyncFuture() - callback = mock.Mock() future.set_result(1) diff --git a/tests/asyncio/gapic/test_config_async.py b/tests/asyncio/gapic/test_config_async.py index 1f6ea9e2..dbb05d5e 100644 --- a/tests/asyncio/gapic/test_config_async.py +++ b/tests/asyncio/gapic/test_config_async.py @@ -12,6 +12,14 @@ # See the License for the specific language governing permissions and # limitations under the License. + +import pytest + +try: + import grpc # noqa: F401 +except ImportError: + pytest.skip("No GRPC", allow_module_level=True) + from google.api_core import exceptions from google.api_core.gapic_v1 import config_async diff --git a/tests/asyncio/gapic/test_method_async.py b/tests/asyncio/gapic/test_method_async.py index 2c6bbab9..73f67b8d 100644 --- a/tests/asyncio/gapic/test_method_async.py +++ b/tests/asyncio/gapic/test_method_async.py @@ -14,10 +14,18 @@ import datetime -from grpc.experimental import aio -import mock +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401 +except ImportError: # pragma: NO COVER + import mock # type: ignore import pytest +try: + from grpc import aio, Compression +except ImportError: + pytest.skip("No GRPC", allow_module_level=True) + from google.api_core import exceptions from google.api_core import gapic_v1 from google.api_core import grpc_helpers_async @@ -88,6 +96,35 @@ async def test_wrap_method_with_custom_client_info(): assert client_info.to_grpc_metadata() in metadata +@pytest.mark.asyncio +async def test_wrap_method_with_no_compression(): + fake_call = grpc_helpers_async.FakeUnaryUnaryCall() + method = mock.Mock(spec=aio.UnaryUnaryMultiCallable, return_value=fake_call) + + wrapped_method = gapic_v1.method_async.wrap_method(method) + + await wrapped_method(1, 2, meep="moop", compression=None) + + method.assert_called_once_with(1, 2, meep="moop", metadata=mock.ANY) + + +@pytest.mark.asyncio +async def test_wrap_method_with_custom_compression(): + compression = Compression.Gzip + fake_call = grpc_helpers_async.FakeUnaryUnaryCall() + method = mock.Mock(spec=aio.UnaryUnaryMultiCallable, return_value=fake_call) + + wrapped_method = gapic_v1.method_async.wrap_method( + method, default_compression=compression + ) + + await wrapped_method(1, 2, meep="moop", compression=Compression.Deflate) + + method.assert_called_once_with( + 1, 2, meep="moop", metadata=mock.ANY, compression=Compression.Deflate + ) + + @pytest.mark.asyncio async def test_invoke_wrapped_method_with_metadata(): fake_call = grpc_helpers_async.FakeUnaryUnaryCall() @@ -122,7 +159,7 @@ async def test_invoke_wrapped_method_with_metadata_as_none(): @mock.patch("asyncio.sleep") @pytest.mark.asyncio -async def test_wrap_method_with_default_retry_and_timeout(unused_sleep): +async def test_wrap_method_with_default_retry_timeout_and_compression(unused_sleep): fake_call = grpc_helpers_async.FakeUnaryUnaryCall(42) method = mock.Mock( spec=aio.UnaryUnaryMultiCallable, @@ -131,15 +168,18 @@ async def test_wrap_method_with_default_retry_and_timeout(unused_sleep): default_retry = retry_async.AsyncRetry() default_timeout = timeout.ConstantTimeout(60) + default_compression = Compression.Gzip wrapped_method = gapic_v1.method_async.wrap_method( - method, default_retry, default_timeout + method, default_retry, default_timeout, default_compression ) result = await wrapped_method() assert result == 42 assert method.call_count == 2 - method.assert_called_with(timeout=60, metadata=mock.ANY) + method.assert_called_with( + timeout=60, compression=default_compression, metadata=mock.ANY + ) @mock.patch("asyncio.sleep") @@ -153,22 +193,27 @@ async def test_wrap_method_with_default_retry_and_timeout_using_sentinel(unused_ default_retry = retry_async.AsyncRetry() default_timeout = timeout.ConstantTimeout(60) + default_compression = Compression.Gzip wrapped_method = gapic_v1.method_async.wrap_method( - method, default_retry, default_timeout + method, default_retry, default_timeout, default_compression ) result = await wrapped_method( - retry=gapic_v1.method_async.DEFAULT, timeout=gapic_v1.method_async.DEFAULT, + retry=gapic_v1.method_async.DEFAULT, + timeout=gapic_v1.method_async.DEFAULT, + compression=gapic_v1.method_async.DEFAULT, ) assert result == 42 assert method.call_count == 2 - method.assert_called_with(timeout=60, metadata=mock.ANY) + method.assert_called_with( + timeout=60, compression=Compression.Gzip, metadata=mock.ANY + ) @mock.patch("asyncio.sleep") @pytest.mark.asyncio -async def test_wrap_method_with_overriding_retry_and_timeout(unused_sleep): +async def test_wrap_method_with_overriding_retry_timeout_and_compression(unused_sleep): fake_call = grpc_helpers_async.FakeUnaryUnaryCall(42) method = mock.Mock( spec=aio.UnaryUnaryMultiCallable, @@ -177,8 +222,9 @@ async def test_wrap_method_with_overriding_retry_and_timeout(unused_sleep): default_retry = retry_async.AsyncRetry() default_timeout = timeout.ConstantTimeout(60) + default_compression = Compression.Gzip wrapped_method = gapic_v1.method_async.wrap_method( - method, default_retry, default_timeout + method, default_retry, default_timeout, default_compression ) result = await wrapped_method( @@ -186,45 +232,13 @@ async def test_wrap_method_with_overriding_retry_and_timeout(unused_sleep): retry_async.if_exception_type(exceptions.NotFound) ), timeout=timeout.ConstantTimeout(22), + compression=Compression.Deflate, ) assert result == 42 assert method.call_count == 2 - method.assert_called_with(timeout=22, metadata=mock.ANY) - - -@mock.patch("asyncio.sleep") -@mock.patch( - "google.api_core.datetime_helpers.utcnow", - side_effect=_utcnow_monotonic(), - autospec=True, -) -@pytest.mark.asyncio -async def test_wrap_method_with_overriding_retry_deadline(utcnow, unused_sleep): - fake_call = grpc_helpers_async.FakeUnaryUnaryCall(42) - method = mock.Mock( - spec=aio.UnaryUnaryMultiCallable, - side_effect=([exceptions.InternalServerError(None)] * 4) + [fake_call], - ) - - default_retry = retry_async.AsyncRetry() - default_timeout = timeout.ExponentialTimeout(deadline=60) - wrapped_method = gapic_v1.method_async.wrap_method( - method, default_retry, default_timeout - ) - - # Overriding only the retry's deadline should also override the timeout's - # deadline. - result = await wrapped_method(retry=default_retry.with_deadline(30)) - - assert result == 42 - timeout_args = [call[1]["timeout"] for call in method.call_args_list] - assert timeout_args == [5.0, 10.0, 20.0, 26.0, 25.0] - assert utcnow.call_count == ( - 1 - + 1 # Compute wait_for timeout in retry_async - + 5 # First to set the deadline. - + 5 # One for each min(timeout, maximum, (DEADLINE - NOW).seconds) + method.assert_called_with( + timeout=22, compression=Compression.Deflate, metadata=mock.ANY ) @@ -242,3 +256,14 @@ async def test_wrap_method_with_overriding_timeout_as_a_number(): assert result == 42 method.assert_called_once_with(timeout=22, metadata=mock.ANY) + + +@pytest.mark.asyncio +async def test_wrap_method_without_wrap_errors(): + fake_call = mock.AsyncMock() + + wrapped_method = gapic_v1.method_async.wrap_method(fake_call, kind="rest") + with mock.patch("google.api_core.grpc_helpers_async.wrap_errors") as method: + await wrapped_method() + + method.assert_not_called() diff --git a/tests/asyncio/operations_v1/test_operations_async_client.py b/tests/asyncio/operations_v1/test_operations_async_client.py index 5473e8ae..e5b20dcd 100644 --- a/tests/asyncio/operations_v1/test_operations_async_client.py +++ b/tests/asyncio/operations_v1/test_operations_async_client.py @@ -12,11 +12,18 @@ # See the License for the specific language governing permissions and # limitations under the License. -from grpc.experimental import aio -import mock +from unittest import mock + import pytest -from google.api_core import grpc_helpers_async, operations_v1, page_iterator_async +try: + from grpc import aio, Compression +except ImportError: # pragma: NO COVER + pytest.skip("No GRPC", allow_module_level=True) + +from google.api_core import grpc_helpers_async +from google.api_core import operations_v1 +from google.api_core import page_iterator_async from google.longrunning import operations_pb2 from google.protobuf import empty_pb2 @@ -36,10 +43,13 @@ async def test_get_operation(): ) client = operations_v1.OperationsAsyncClient(mocked_channel) - response = await client.get_operation("name", metadata=[("header", "foo")]) + response = await client.get_operation( + "name", metadata=[("header", "foo")], compression=Compression.Gzip + ) assert method.call_count == 1 assert tuple(method.call_args_list[0])[0][0].name == "name" assert ("header", "foo") in tuple(method.call_args_list[0])[1]["metadata"] + assert tuple(method.call_args_list[0])[1]["compression"] == Compression.Gzip assert ("x-goog-request-params", "name=name") in tuple(method.call_args_list[0])[1][ "metadata" ] @@ -57,7 +67,9 @@ async def test_list_operations(): mocked_channel, method, fake_call = _mock_grpc_objects(list_response) client = operations_v1.OperationsAsyncClient(mocked_channel) - pager = await client.list_operations("name", "filter", metadata=[("header", "foo")]) + pager = await client.list_operations( + "name", "filter", metadata=[("header", "foo")], compression=Compression.Gzip + ) assert isinstance(pager, page_iterator_async.AsyncIterator) responses = [] @@ -68,6 +80,7 @@ async def test_list_operations(): assert method.call_count == 1 assert ("header", "foo") in tuple(method.call_args_list[0])[1]["metadata"] + assert tuple(method.call_args_list[0])[1]["compression"] == Compression.Gzip assert ("x-goog-request-params", "name=name") in tuple(method.call_args_list[0])[1][ "metadata" ] @@ -82,11 +95,14 @@ async def test_delete_operation(): mocked_channel, method, fake_call = _mock_grpc_objects(empty_pb2.Empty()) client = operations_v1.OperationsAsyncClient(mocked_channel) - await client.delete_operation("name", metadata=[("header", "foo")]) + await client.delete_operation( + "name", metadata=[("header", "foo")], compression=Compression.Gzip + ) assert method.call_count == 1 assert tuple(method.call_args_list[0])[0][0].name == "name" assert ("header", "foo") in tuple(method.call_args_list[0])[1]["metadata"] + assert tuple(method.call_args_list[0])[1]["compression"] == Compression.Gzip assert ("x-goog-request-params", "name=name") in tuple(method.call_args_list[0])[1][ "metadata" ] @@ -97,11 +113,14 @@ async def test_cancel_operation(): mocked_channel, method, fake_call = _mock_grpc_objects(empty_pb2.Empty()) client = operations_v1.OperationsAsyncClient(mocked_channel) - await client.cancel_operation("name", metadata=[("header", "foo")]) + await client.cancel_operation( + "name", metadata=[("header", "foo")], compression=Compression.Gzip + ) assert method.call_count == 1 assert tuple(method.call_args_list[0])[0][0].name == "name" assert ("header", "foo") in tuple(method.call_args_list[0])[1]["metadata"] + assert tuple(method.call_args_list[0])[1]["compression"] == Compression.Gzip assert ("x-goog-request-params", "name=name") in tuple(method.call_args_list[0])[1][ "metadata" ] diff --git a/tests/asyncio/retry/__init__.py b/tests/asyncio/retry/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/asyncio/retry/test_retry_streaming_async.py b/tests/asyncio/retry/test_retry_streaming_async.py new file mode 100644 index 00000000..e44f5361 --- /dev/null +++ b/tests/asyncio/retry/test_retry_streaming_async.py @@ -0,0 +1,601 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import asyncio +import datetime +import re + +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401 +except ImportError: # pragma: NO COVER + import mock # type: ignore + +import pytest + +from google.api_core import exceptions +from google.api_core import retry_async +from google.api_core.retry import retry_streaming_async + +from ...unit.retry.test_retry_base import Test_BaseRetry + + +@pytest.mark.asyncio +async def test_retry_streaming_target_bad_sleep_generator(): + from google.api_core.retry.retry_streaming_async import retry_target_stream + + with pytest.raises(ValueError, match="Sleep generator"): + await retry_target_stream(None, lambda x: True, [], None).__anext__() + + +@mock.patch("asyncio.sleep", autospec=True) +@pytest.mark.asyncio +async def test_retry_streaming_target_dynamic_backoff(sleep): + """ + sleep_generator should be iterated after on_error, to support dynamic backoff + """ + from functools import partial + from google.api_core.retry.retry_streaming_async import retry_target_stream + + sleep.side_effect = RuntimeError("stop after sleep") + # start with empty sleep generator; values are added after exception in push_sleep_value + sleep_values = [] + error_target = partial(TestAsyncStreamingRetry._generator_mock, error_on=0) + inserted_sleep = 99 + + def push_sleep_value(err): + sleep_values.append(inserted_sleep) + + with pytest.raises(RuntimeError): + await retry_target_stream( + error_target, + predicate=lambda x: True, + sleep_generator=sleep_values, + on_error=push_sleep_value, + ).__anext__() + assert sleep.call_count == 1 + sleep.assert_called_once_with(inserted_sleep) + + +class TestAsyncStreamingRetry(Test_BaseRetry): + def _make_one(self, *args, **kwargs): + return retry_streaming_async.AsyncStreamingRetry(*args, **kwargs) + + def test___str__(self): + def if_exception_type(exc): + return bool(exc) # pragma: NO COVER + + # Explicitly set all attributes as changed Retry defaults should not + # cause this test to start failing. + retry_ = retry_streaming_async.AsyncStreamingRetry( + predicate=if_exception_type, + initial=1.0, + maximum=60.0, + multiplier=2.0, + timeout=120.0, + on_error=None, + ) + assert re.match( + ( + r", " + r"initial=1.0, maximum=60.0, multiplier=2.0, timeout=120.0, " + r"on_error=None>" + ), + str(retry_), + ) + + @staticmethod + async def _generator_mock( + num=5, + error_on=None, + exceptions_seen=None, + sleep_time=0, + ): + """ + Helper to create a mock generator that yields a number of values + Generator can optionally raise an exception on a specific iteration + + Args: + - num (int): the number of values to yield + - error_on (int): if given, the generator will raise a ValueError on the specified iteration + - exceptions_seen (list): if given, the generator will append any exceptions to this list before raising + - sleep_time (int): if given, the generator will asyncio.sleep for this many seconds before yielding each value + """ + try: + for i in range(num): + if sleep_time: + await asyncio.sleep(sleep_time) + if error_on is not None and i == error_on: + raise ValueError("generator mock error") + yield i + except (Exception, BaseException, GeneratorExit) as e: + # keep track of exceptions seen by generator + if exceptions_seen is not None: + exceptions_seen.append(e) + raise + + @mock.patch("asyncio.sleep", autospec=True) + @pytest.mark.asyncio + async def test___call___generator_success(self, sleep): + """ + Test that a retry-decorated generator yields values as expected + This test checks a generator with no issues + """ + from collections.abc import AsyncGenerator + + retry_ = retry_streaming_async.AsyncStreamingRetry() + decorated = retry_(self._generator_mock) + + num = 10 + generator = await decorated(num) + # check types + assert isinstance(generator, AsyncGenerator) + assert isinstance(self._generator_mock(num), AsyncGenerator) + # check yield contents + unpacked = [i async for i in generator] + assert len(unpacked) == num + expected = [i async for i in self._generator_mock(num)] + for a, b in zip(unpacked, expected): + assert a == b + sleep.assert_not_called() + + @mock.patch("asyncio.sleep", autospec=True) + @pytest.mark.asyncio + async def test___call___generator_retry(self, sleep): + """ + Tests that a retry-decorated generator will retry on errors + """ + on_error = mock.Mock(return_value=None) + retry_ = retry_streaming_async.AsyncStreamingRetry( + on_error=on_error, + predicate=retry_async.if_exception_type(ValueError), + timeout=None, + ) + generator = await retry_(self._generator_mock)(error_on=3) + # error thrown on 3 + # generator should contain 0, 1, 2 looping + unpacked = [await generator.__anext__() for i in range(10)] + assert unpacked == [0, 1, 2, 0, 1, 2, 0, 1, 2, 0] + assert on_error.call_count == 3 + await generator.aclose() + + @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n) + @mock.patch("asyncio.sleep", autospec=True) + @pytest.mark.parametrize("use_deadline_arg", [True, False]) + @pytest.mark.asyncio + async def test___call___generator_retry_hitting_timeout( + self, sleep, uniform, use_deadline_arg + ): + """ + Tests that a retry-decorated generator will throw a RetryError + after using the time budget + """ + import time + + timeout_val = 9.9 + # support "deadline" as an alias for "timeout" + timeout_kwarg = ( + {"timeout": timeout_val} + if not use_deadline_arg + else {"deadline": timeout_val} + ) + + on_error = mock.Mock() + retry_ = retry_streaming_async.AsyncStreamingRetry( + predicate=retry_async.if_exception_type(ValueError), + initial=1.0, + maximum=1024.0, + multiplier=2.0, + **timeout_kwarg, + ) + + time_now = time.monotonic() + now_patcher = mock.patch( + "time.monotonic", + return_value=time_now, + ) + + decorated = retry_(self._generator_mock, on_error=on_error) + generator = await decorated(error_on=1) + + with now_patcher as patched_now: + # Make sure that calls to fake asyncio.sleep() also advance the mocked + # time clock. + def increase_time(sleep_delay): + patched_now.return_value += sleep_delay + + sleep.side_effect = increase_time + + with pytest.raises(exceptions.RetryError): + [i async for i in generator] + + assert on_error.call_count == 4 + # check the delays + assert sleep.call_count == 3 # once between each successive target calls + last_wait = sleep.call_args.args[0] + total_wait = sum(call_args.args[0] for call_args in sleep.call_args_list) + # next wait would have put us over, so ended early + assert last_wait == 4 + assert total_wait == 7 + + @pytest.mark.asyncio + async def test___call___generator_cancellations(self): + """ + cancel calls should propagate to the generator + """ + # test without cancel as retryable + retry_ = retry_streaming_async.AsyncStreamingRetry() + utcnow = datetime.datetime.now(datetime.timezone.utc) + mock.patch("google.api_core.datetime_helpers.utcnow", return_value=utcnow) + generator = await retry_(self._generator_mock)(sleep_time=0.2) + assert await generator.__anext__() == 0 + task = asyncio.create_task(generator.__anext__()) + task.cancel() + with pytest.raises(asyncio.CancelledError): + await task + with pytest.raises(StopAsyncIteration): + await generator.__anext__() + + @mock.patch("asyncio.sleep", autospec=True) + @pytest.mark.asyncio + async def test___call___with_generator_send(self, sleep): + """ + Send should be passed through retry into target generator + """ + + async def _mock_send_gen(): + """ + always yield whatever was sent in + """ + in_ = yield + while True: + in_ = yield in_ + + retry_ = retry_streaming_async.AsyncStreamingRetry() + + decorated = retry_(_mock_send_gen) + + generator = await decorated() + result = await generator.__anext__() + # first yield should be None + assert result is None + in_messages = ["test_1", "hello", "world"] + out_messages = [] + for msg in in_messages: + recv = await generator.asend(msg) + out_messages.append(recv) + assert in_messages == out_messages + await generator.aclose() + + @mock.patch("asyncio.sleep", autospec=True) + @pytest.mark.asyncio + async def test___call___generator_send_retry(self, sleep): + """ + Send should be retried if target generator raises an error + """ + on_error = mock.Mock(return_value=None) + retry_ = retry_streaming_async.AsyncStreamingRetry( + on_error=on_error, + predicate=retry_async.if_exception_type(ValueError), + timeout=None, + ) + generator = await retry_(self._generator_mock)(error_on=3) + with pytest.raises(TypeError) as exc_info: + await generator.asend("cannot send to fresh generator") + assert exc_info.match("can't send non-None value") + await generator.aclose() + + # error thrown on 3 + # generator should contain 0, 1, 2 looping + generator = await retry_(self._generator_mock)(error_on=3) + assert await generator.__anext__() == 0 + unpacked = [await generator.asend(i) for i in range(10)] + assert unpacked == [1, 2, 0, 1, 2, 0, 1, 2, 0, 1] + assert on_error.call_count == 3 + await generator.aclose() + + @mock.patch("asyncio.sleep", autospec=True) + @pytest.mark.asyncio + async def test___call___with_generator_close(self, sleep): + """ + Close should be passed through retry into target generator + """ + retry_ = retry_streaming_async.AsyncStreamingRetry() + decorated = retry_(self._generator_mock) + exception_list = [] + generator = await decorated(10, exceptions_seen=exception_list) + for i in range(2): + await generator.__anext__() + await generator.aclose() + + assert isinstance(exception_list[0], GeneratorExit) + with pytest.raises(StopAsyncIteration): + # calling next on closed generator should raise error + await generator.__anext__() + + @mock.patch("asyncio.sleep", autospec=True) + @pytest.mark.asyncio + async def test___call___with_new_generator_close(self, sleep): + """ + Close should be passed through retry into target generator, + even when it hasn't been iterated yet + """ + retry_ = retry_streaming_async.AsyncStreamingRetry() + decorated = retry_(self._generator_mock) + exception_list = [] + generator = await decorated(10, exceptions_seen=exception_list) + await generator.aclose() + + with pytest.raises(StopAsyncIteration): + # calling next on closed generator should raise error + await generator.__anext__() + + @mock.patch("asyncio.sleep", autospec=True) + @pytest.mark.asyncio + async def test___call___with_generator_throw(self, sleep): + """ + Throw should be passed through retry into target generator + """ + + # The generator should not retry when it encounters a non-retryable error + retry_ = retry_streaming_async.AsyncStreamingRetry( + predicate=retry_async.if_exception_type(ValueError), + ) + decorated = retry_(self._generator_mock) + exception_list = [] + generator = await decorated(10, exceptions_seen=exception_list) + for i in range(2): + await generator.__anext__() + with pytest.raises(BufferError): + await generator.athrow(BufferError("test")) + assert isinstance(exception_list[0], BufferError) + with pytest.raises(StopAsyncIteration): + # calling next on closed generator should raise error + await generator.__anext__() + + # In contrast, the generator should retry if we throw a retryable exception + exception_list = [] + generator = await decorated(10, exceptions_seen=exception_list) + for i in range(2): + await generator.__anext__() + throw_val = await generator.athrow(ValueError("test")) + assert throw_val == 0 + assert isinstance(exception_list[0], ValueError) + # calling next on generator should not raise error, because it was retried + assert await generator.__anext__() == 1 + + @pytest.mark.parametrize("awaitable_wrapped", [True, False]) + @mock.patch("asyncio.sleep", autospec=True) + @pytest.mark.asyncio + async def test___call___with_iterable_send(self, sleep, awaitable_wrapped): + """ + Send should work like next if the wrapped iterable does not support it + """ + retry_ = retry_streaming_async.AsyncStreamingRetry() + + def iterable_fn(): + class CustomIterable: + def __init__(self): + self.i = -1 + + def __aiter__(self): + return self + + async def __anext__(self): + self.i += 1 + return self.i + + return CustomIterable() + + if awaitable_wrapped: + + async def wrapper(): + return iterable_fn() + + decorated = retry_(wrapper) + else: + decorated = retry_(iterable_fn) + + retryable = await decorated() + # initiate the generator by calling next + result = await retryable.__anext__() + assert result == 0 + # test sending values + assert await retryable.asend("test") == 1 + assert await retryable.asend("test2") == 2 + assert await retryable.asend("test3") == 3 + await retryable.aclose() + + @pytest.mark.parametrize("awaitable_wrapped", [True, False]) + @mock.patch("asyncio.sleep", autospec=True) + @pytest.mark.asyncio + async def test___call___with_iterable_close(self, sleep, awaitable_wrapped): + """ + close should be handled by wrapper if wrapped iterable does not support it + """ + retry_ = retry_streaming_async.AsyncStreamingRetry() + + def iterable_fn(): + class CustomIterable: + def __init__(self): + self.i = -1 + + def __aiter__(self): + return self + + async def __anext__(self): + self.i += 1 + return self.i + + return CustomIterable() + + if awaitable_wrapped: + + async def wrapper(): + return iterable_fn() + + decorated = retry_(wrapper) + else: + decorated = retry_(iterable_fn) + + # try closing active generator + retryable = await decorated() + assert await retryable.__anext__() == 0 + await retryable.aclose() + with pytest.raises(StopAsyncIteration): + await retryable.__anext__() + # try closing new generator + new_retryable = await decorated() + await new_retryable.aclose() + with pytest.raises(StopAsyncIteration): + await new_retryable.__anext__() + + @pytest.mark.parametrize("awaitable_wrapped", [True, False]) + @mock.patch("asyncio.sleep", autospec=True) + @pytest.mark.asyncio + async def test___call___with_iterable_throw(self, sleep, awaitable_wrapped): + """ + Throw should work even if the wrapped iterable does not support it + """ + + predicate = retry_async.if_exception_type(ValueError) + retry_ = retry_streaming_async.AsyncStreamingRetry(predicate=predicate) + + def iterable_fn(): + class CustomIterable: + def __init__(self): + self.i = -1 + + def __aiter__(self): + return self + + async def __anext__(self): + self.i += 1 + return self.i + + return CustomIterable() + + if awaitable_wrapped: + + async def wrapper(): + return iterable_fn() + + decorated = retry_(wrapper) + else: + decorated = retry_(iterable_fn) + + # try throwing with active generator + retryable = await decorated() + assert await retryable.__anext__() == 0 + # should swallow errors in predicate + await retryable.athrow(ValueError("test")) + # should raise errors not in predicate + with pytest.raises(BufferError): + await retryable.athrow(BufferError("test")) + with pytest.raises(StopAsyncIteration): + await retryable.__anext__() + # try throwing with new generator + new_retryable = await decorated() + with pytest.raises(BufferError): + await new_retryable.athrow(BufferError("test")) + with pytest.raises(StopAsyncIteration): + await new_retryable.__anext__() + + @pytest.mark.asyncio + async def test_exc_factory_non_retryable_error(self): + """ + generator should give the option to override exception creation logic + test when non-retryable error is thrown + """ + from google.api_core.retry import RetryFailureReason + from google.api_core.retry.retry_streaming_async import retry_target_stream + + timeout = 6 + sent_errors = [ValueError("test"), ValueError("test2"), BufferError("test3")] + expected_final_err = RuntimeError("done") + expected_source_err = ZeroDivisionError("test4") + + def factory(*args, **kwargs): + assert len(kwargs) == 0 + assert args[0] == sent_errors + assert args[1] == RetryFailureReason.NON_RETRYABLE_ERROR + assert args[2] == timeout + return expected_final_err, expected_source_err + + generator = retry_target_stream( + self._generator_mock, + retry_async.if_exception_type(ValueError), + [0] * 3, + timeout=timeout, + exception_factory=factory, + ) + # initialize the generator + await generator.__anext__() + # trigger some retryable errors + await generator.athrow(sent_errors[0]) + await generator.athrow(sent_errors[1]) + # trigger a non-retryable error + with pytest.raises(expected_final_err.__class__) as exc_info: + await generator.athrow(sent_errors[2]) + assert exc_info.value == expected_final_err + assert exc_info.value.__cause__ == expected_source_err + + @pytest.mark.asyncio + async def test_exc_factory_timeout(self): + """ + generator should give the option to override exception creation logic + test when timeout is exceeded + """ + import time + from google.api_core.retry import RetryFailureReason + from google.api_core.retry.retry_streaming_async import retry_target_stream + + timeout = 2 + time_now = time.monotonic() + now_patcher = mock.patch( + "time.monotonic", + return_value=time_now, + ) + + with now_patcher as patched_now: + timeout = 2 + sent_errors = [ValueError("test"), ValueError("test2"), ValueError("test3")] + expected_final_err = RuntimeError("done") + expected_source_err = ZeroDivisionError("test4") + + def factory(*args, **kwargs): + assert len(kwargs) == 0 + assert args[0] == sent_errors + assert args[1] == RetryFailureReason.TIMEOUT + assert args[2] == timeout + return expected_final_err, expected_source_err + + generator = retry_target_stream( + self._generator_mock, + retry_async.if_exception_type(ValueError), + [0] * 3, + timeout=timeout, + exception_factory=factory, + ) + # initialize the generator + await generator.__anext__() + # trigger some retryable errors + await generator.athrow(sent_errors[0]) + await generator.athrow(sent_errors[1]) + # trigger a timeout + patched_now.return_value += timeout + 1 + with pytest.raises(expected_final_err.__class__) as exc_info: + await generator.athrow(sent_errors[2]) + assert exc_info.value == expected_final_err + assert exc_info.value.__cause__ == expected_source_err diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/retry/test_retry_unary_async.py similarity index 62% rename from tests/asyncio/test_retry_async.py rename to tests/asyncio/retry/test_retry_unary_async.py index 9e51044b..e7fdc963 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/retry/test_retry_unary_async.py @@ -15,12 +15,18 @@ import datetime import re -import mock +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401 +except ImportError: # pragma: NO COVER + import mock # type: ignore import pytest from google.api_core import exceptions from google.api_core import retry_async +from ...unit.retry.test_retry_base import Test_BaseRetry + @mock.patch("asyncio.sleep", autospec=True) @mock.patch( @@ -97,140 +103,72 @@ async def test_retry_target_non_retryable_error(utcnow, sleep): @mock.patch("asyncio.sleep", autospec=True) -@mock.patch("google.api_core.datetime_helpers.utcnow", autospec=True) +@mock.patch("time.monotonic", autospec=True) +@pytest.mark.parametrize("use_deadline_arg", [True, False]) @pytest.mark.asyncio -async def test_retry_target_deadline_exceeded(utcnow, sleep): +async def test_retry_target_timeout_exceeded(monotonic, sleep, use_deadline_arg): predicate = retry_async.if_exception_type(ValueError) exception = ValueError("meep") target = mock.Mock(side_effect=exception) # Setup the timeline so that the first call takes 5 seconds but the second - # call takes 6, which puts the retry over the deadline. - utcnow.side_effect = [ - # The first call to utcnow establishes the start of the timeline. - datetime.datetime.min, - datetime.datetime.min + datetime.timedelta(seconds=5), - datetime.datetime.min + datetime.timedelta(seconds=11), - ] + # call takes 6, which puts the retry over the timeout. + monotonic.side_effect = [0, 5, 11] + + timeout_val = 10 + # support "deadline" as an alias for "timeout" + timeout_kwarg = ( + {"timeout": timeout_val} if not use_deadline_arg else {"deadline": timeout_val} + ) with pytest.raises(exceptions.RetryError) as exc_info: - await retry_async.retry_target(target, predicate, range(10), deadline=10) + await retry_async.retry_target(target, predicate, range(10), **timeout_kwarg) assert exc_info.value.cause == exception - assert exc_info.match("Deadline of 10.0s exceeded") + assert exc_info.match("Timeout of 10.0s exceeded") assert exc_info.match("last exception: meep") assert target.call_count == 2 + # Ensure the exception message does not include the target fn: + # it may be a partial with user data embedded + assert str(target) not in exc_info.exconly() + @pytest.mark.asyncio async def test_retry_target_bad_sleep_generator(): with pytest.raises(ValueError, match="Sleep generator"): - await retry_async.retry_target( - mock.sentinel.target, mock.sentinel.predicate, [], None - ) - + await retry_async.retry_target(mock.sentinel.target, lambda x: True, [], None) -class TestAsyncRetry: - def test_constructor_defaults(self): - retry_ = retry_async.AsyncRetry() - assert retry_._predicate == retry_async.if_transient_error - assert retry_._initial == 1 - assert retry_._maximum == 60 - assert retry_._multiplier == 2 - assert retry_._deadline == 120 - assert retry_._on_error is None - - def test_constructor_options(self): - _some_function = mock.Mock() - - retry_ = retry_async.AsyncRetry( - predicate=mock.sentinel.predicate, - initial=1, - maximum=2, - multiplier=3, - deadline=4, - on_error=_some_function, - ) - assert retry_._predicate == mock.sentinel.predicate - assert retry_._initial == 1 - assert retry_._maximum == 2 - assert retry_._multiplier == 3 - assert retry_._deadline == 4 - assert retry_._on_error is _some_function - def test_with_deadline(self): - retry_ = retry_async.AsyncRetry( - predicate=mock.sentinel.predicate, - initial=1, - maximum=2, - multiplier=3, - deadline=4, - on_error=mock.sentinel.on_error, - ) - new_retry = retry_.with_deadline(42) - assert retry_ is not new_retry - assert new_retry._deadline == 42 - - # the rest of the attributes should remain the same - assert new_retry._predicate is retry_._predicate - assert new_retry._initial == retry_._initial - assert new_retry._maximum == retry_._maximum - assert new_retry._multiplier == retry_._multiplier - assert new_retry._on_error is retry_._on_error - - def test_with_predicate(self): - retry_ = retry_async.AsyncRetry( - predicate=mock.sentinel.predicate, - initial=1, - maximum=2, - multiplier=3, - deadline=4, - on_error=mock.sentinel.on_error, - ) - new_retry = retry_.with_predicate(mock.sentinel.predicate) - assert retry_ is not new_retry - assert new_retry._predicate == mock.sentinel.predicate - - # the rest of the attributes should remain the same - assert new_retry._deadline == retry_._deadline - assert new_retry._initial == retry_._initial - assert new_retry._maximum == retry_._maximum - assert new_retry._multiplier == retry_._multiplier - assert new_retry._on_error is retry_._on_error - - def test_with_delay_noop(self): - retry_ = retry_async.AsyncRetry( - predicate=mock.sentinel.predicate, - initial=1, - maximum=2, - multiplier=3, - deadline=4, - on_error=mock.sentinel.on_error, +@mock.patch("asyncio.sleep", autospec=True) +@pytest.mark.asyncio +async def test_retry_target_dynamic_backoff(sleep): + """ + sleep_generator should be iterated after on_error, to support dynamic backoff + """ + sleep.side_effect = RuntimeError("stop after sleep") + # start with empty sleep generator; values are added after exception in push_sleep_value + sleep_values = [] + exception = ValueError("trigger retry") + error_target = mock.Mock(side_effect=exception) + inserted_sleep = 99 + + def push_sleep_value(err): + sleep_values.append(inserted_sleep) + + with pytest.raises(RuntimeError): + await retry_async.retry_target( + error_target, + predicate=lambda x: True, + sleep_generator=sleep_values, + on_error=push_sleep_value, ) - new_retry = retry_.with_delay() - assert retry_ is not new_retry - assert new_retry._initial == retry_._initial - assert new_retry._maximum == retry_._maximum - assert new_retry._multiplier == retry_._multiplier + assert sleep.call_count == 1 + sleep.assert_called_once_with(inserted_sleep) - def test_with_delay(self): - retry_ = retry_async.AsyncRetry( - predicate=mock.sentinel.predicate, - initial=1, - maximum=2, - multiplier=3, - deadline=4, - on_error=mock.sentinel.on_error, - ) - new_retry = retry_.with_delay(initial=1, maximum=2, multiplier=3) - assert retry_ is not new_retry - assert new_retry._initial == 1 - assert new_retry._maximum == 2 - assert new_retry._multiplier == 3 - # the rest of the attributes should remain the same - assert new_retry._deadline == retry_._deadline - assert new_retry._predicate is retry_._predicate - assert new_retry._on_error is retry_._on_error +class TestAsyncRetry(Test_BaseRetry): + def _make_one(self, *args, **kwargs): + return retry_async.AsyncRetry(*args, **kwargs) def test___str__(self): def if_exception_type(exc): @@ -243,13 +181,13 @@ def if_exception_type(exc): initial=1.0, maximum=60.0, multiplier=2.0, - deadline=120.0, + timeout=120.0, on_error=None, ) assert re.match( ( r", " - r"initial=1.0, maximum=60.0, multiplier=2.0, deadline=120.0, " + r"initial=1.0, maximum=60.0, multiplier=2.0, timeout=120.0, " r"on_error=None>" ), str(retry_), @@ -272,12 +210,10 @@ async def test___call___and_execute_success(self, sleep): target.assert_called_once_with("meep") sleep.assert_not_called() - # Make uniform return half of its maximum, which is the calculated sleep time. - @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n / 2.0) + @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n) @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio async def test___call___and_execute_retry(self, sleep, uniform): - on_error = mock.Mock(spec=["__call__"], side_effect=[None]) retry_ = retry_async.AsyncRetry( predicate=retry_async.if_exception_type(ValueError) @@ -298,25 +234,20 @@ async def test___call___and_execute_retry(self, sleep, uniform): sleep.assert_called_once_with(retry_._initial) assert on_error.call_count == 1 - # Make uniform return half of its maximum, which is the calculated sleep time. - @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n / 2.0) + @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n) @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio - async def test___call___and_execute_retry_hitting_deadline(self, sleep, uniform): - + async def test___call___and_execute_retry_hitting_timeout(self, sleep, uniform): on_error = mock.Mock(spec=["__call__"], side_effect=[None] * 10) retry_ = retry_async.AsyncRetry( predicate=retry_async.if_exception_type(ValueError), initial=1.0, maximum=1024.0, multiplier=2.0, - deadline=9.9, + timeout=30.9, ) - utcnow = datetime.datetime.utcnow() - utcnow_patcher = mock.patch( - "google.api_core.datetime_helpers.utcnow", return_value=utcnow - ) + monotonic_patcher = mock.patch("time.monotonic", return_value=0) target = mock.AsyncMock(spec=["__call__"], side_effect=[ValueError()] * 10) # __name__ is needed by functools.partial. @@ -325,11 +256,11 @@ async def test___call___and_execute_retry_hitting_deadline(self, sleep, uniform) decorated = retry_(target, on_error=on_error) target.assert_not_called() - with utcnow_patcher as patched_utcnow: + with monotonic_patcher as patched_monotonic: # Make sure that calls to fake asyncio.sleep() also advance the mocked # time clock. def increase_time(sleep_delay): - patched_utcnow.return_value += datetime.timedelta(seconds=sleep_delay) + patched_monotonic.return_value += sleep_delay sleep.side_effect = increase_time @@ -345,8 +276,17 @@ def increase_time(sleep_delay): last_wait = sleep.call_args.args[0] total_wait = sum(call_args.args[0] for call_args in sleep.call_args_list) - assert last_wait == 2.9 # and not 8.0, because the last delay was shortened - assert total_wait == 9.9 # the same as the deadline + assert last_wait == 8.0 + # Next attempt would be scheduled in 16 secs, 15 + 16 = 31 > 30.9, thus + # we do not even wait for it to be scheduled (30.9 is configured timeout). + # This changes the previous logic of shortening the last attempt to fit + # in the timeout. The previous logic was removed to make Python retry + # logic consistent with the other languages and to not disrupt the + # randomized retry delays distribution by artificially increasing a + # probability of scheduling two (instead of one) last attempts with very + # short delay between them, while the second retry having very low chance + # of succeeding anyways. + assert total_wait == 15.0 @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio @@ -372,8 +312,7 @@ async def test___init___without_retry_executed(self, sleep): sleep.assert_not_called() _some_function.assert_not_called() - # Make uniform return half of its maximum, which is the calculated sleep time. - @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n / 2.0) + @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n) @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio async def test___init___when_retry_is_executed(self, sleep, uniform): diff --git a/tests/asyncio/test_grpc_helpers_async.py b/tests/asyncio/test_grpc_helpers_async.py index a511ed46..aa8d5d10 100644 --- a/tests/asyncio/test_grpc_helpers_async.py +++ b/tests/asyncio/test_grpc_helpers_async.py @@ -12,10 +12,23 @@ # See the License for the specific language governing permissions and # limitations under the License. -import grpc -from grpc.experimental import aio -import mock -import pytest +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401 +except ImportError: # pragma: NO COVER + import mock # type: ignore +import pytest # noqa: I202 + +try: + import grpc + from grpc import aio +except ImportError: # pragma: NO COVER + grpc = aio = None + + +if grpc is None: # pragma: NO COVER + pytest.skip("No GRPC", allow_module_level=True) + from google.api_core import exceptions from google.api_core import grpc_helpers_async @@ -33,6 +46,9 @@ def code(self): def details(self): return None + def trailing_metadata(self): + return None + @pytest.mark.asyncio async def test_wrap_unary_errors(): @@ -85,12 +101,40 @@ async def test_common_methods_in_wrapped_call(): assert mock_call.wait_for_connection.call_count == 1 +@pytest.mark.asyncio +@pytest.mark.parametrize( + "callable_type,expected_wrapper_type", + [ + (grpc.aio.UnaryStreamMultiCallable, grpc_helpers_async._WrappedUnaryStreamCall), + (grpc.aio.StreamUnaryMultiCallable, grpc_helpers_async._WrappedStreamUnaryCall), + ( + grpc.aio.StreamStreamMultiCallable, + grpc_helpers_async._WrappedStreamStreamCall, + ), + ], +) +async def test_wrap_errors_w_stream_type(callable_type, expected_wrapper_type): + class ConcreteMulticallable(callable_type): + def __call__(self, *args, **kwargs): + raise NotImplementedError("Should not be called") + + with mock.patch.object( + grpc_helpers_async, "_wrap_stream_errors" + ) as wrap_stream_errors: + callable_ = ConcreteMulticallable() + grpc_helpers_async.wrap_errors(callable_) + assert wrap_stream_errors.call_count == 1 + wrap_stream_errors.assert_called_once_with(callable_, expected_wrapper_type) + + @pytest.mark.asyncio async def test_wrap_stream_errors_unary_stream(): mock_call = mock.Mock(aio.UnaryStreamCall, autospec=True) multicallable = mock.Mock(return_value=mock_call) - wrapped_callable = grpc_helpers_async._wrap_stream_errors(multicallable) + wrapped_callable = grpc_helpers_async._wrap_stream_errors( + multicallable, grpc_helpers_async._WrappedUnaryStreamCall + ) await wrapped_callable(1, 2, three="four") multicallable.assert_called_once_with(1, 2, three="four") @@ -102,7 +146,9 @@ async def test_wrap_stream_errors_stream_unary(): mock_call = mock.Mock(aio.StreamUnaryCall, autospec=True) multicallable = mock.Mock(return_value=mock_call) - wrapped_callable = grpc_helpers_async._wrap_stream_errors(multicallable) + wrapped_callable = grpc_helpers_async._wrap_stream_errors( + multicallable, grpc_helpers_async._WrappedStreamUnaryCall + ) await wrapped_callable(1, 2, three="four") multicallable.assert_called_once_with(1, 2, three="four") @@ -114,24 +160,15 @@ async def test_wrap_stream_errors_stream_stream(): mock_call = mock.Mock(aio.StreamStreamCall, autospec=True) multicallable = mock.Mock(return_value=mock_call) - wrapped_callable = grpc_helpers_async._wrap_stream_errors(multicallable) + wrapped_callable = grpc_helpers_async._wrap_stream_errors( + multicallable, grpc_helpers_async._WrappedStreamStreamCall + ) await wrapped_callable(1, 2, three="four") multicallable.assert_called_once_with(1, 2, three="four") assert mock_call.wait_for_connection.call_count == 1 -@pytest.mark.asyncio -async def test_wrap_stream_errors_type_error(): - mock_call = mock.Mock() - multicallable = mock.Mock(return_value=mock_call) - - wrapped_callable = grpc_helpers_async._wrap_stream_errors(multicallable) - - with pytest.raises(TypeError): - await wrapped_callable() - - @pytest.mark.asyncio async def test_wrap_stream_errors_raised(): grpc_error = RpcErrorImpl(grpc.StatusCode.INVALID_ARGUMENT) @@ -139,7 +176,9 @@ async def test_wrap_stream_errors_raised(): mock_call.wait_for_connection = mock.AsyncMock(side_effect=[grpc_error]) multicallable = mock.Mock(return_value=mock_call) - wrapped_callable = grpc_helpers_async._wrap_stream_errors(multicallable) + wrapped_callable = grpc_helpers_async._wrap_stream_errors( + multicallable, grpc_helpers_async._WrappedStreamStreamCall + ) with pytest.raises(exceptions.InvalidArgument): await wrapped_callable() @@ -154,7 +193,9 @@ async def test_wrap_stream_errors_read(): mock_call.read = mock.AsyncMock(side_effect=grpc_error) multicallable = mock.Mock(return_value=mock_call) - wrapped_callable = grpc_helpers_async._wrap_stream_errors(multicallable) + wrapped_callable = grpc_helpers_async._wrap_stream_errors( + multicallable, grpc_helpers_async._WrappedStreamStreamCall + ) wrapped_call = await wrapped_callable(1, 2, three="four") multicallable.assert_called_once_with(1, 2, three="four") @@ -177,7 +218,9 @@ async def test_wrap_stream_errors_aiter(): mock_call.__aiter__ = mock.Mock(return_value=mocked_aiter) multicallable = mock.Mock(return_value=mock_call) - wrapped_callable = grpc_helpers_async._wrap_stream_errors(multicallable) + wrapped_callable = grpc_helpers_async._wrap_stream_errors( + multicallable, grpc_helpers_async._WrappedStreamStreamCall + ) wrapped_call = await wrapped_callable() with pytest.raises(exceptions.InvalidArgument) as exc_info: @@ -198,7 +241,9 @@ async def test_wrap_stream_errors_aiter_non_rpc_error(): mock_call.__aiter__ = mock.Mock(return_value=mocked_aiter) multicallable = mock.Mock(return_value=mock_call) - wrapped_callable = grpc_helpers_async._wrap_stream_errors(multicallable) + wrapped_callable = grpc_helpers_async._wrap_stream_errors( + multicallable, grpc_helpers_async._WrappedStreamStreamCall + ) wrapped_call = await wrapped_callable() with pytest.raises(TypeError) as exc_info: @@ -212,7 +257,9 @@ async def test_wrap_stream_errors_aiter_called_multiple_times(): mock_call = mock.Mock(aio.StreamStreamCall, autospec=True) multicallable = mock.Mock(return_value=mock_call) - wrapped_callable = grpc_helpers_async._wrap_stream_errors(multicallable) + wrapped_callable = grpc_helpers_async._wrap_stream_errors( + multicallable, grpc_helpers_async._WrappedStreamStreamCall + ) wrapped_call = await wrapped_callable() assert wrapped_call.__aiter__() == wrapped_call.__aiter__() @@ -227,7 +274,9 @@ async def test_wrap_stream_errors_write(): mock_call.done_writing = mock.AsyncMock(side_effect=[None, grpc_error]) multicallable = mock.Mock(return_value=mock_call) - wrapped_callable = grpc_helpers_async._wrap_stream_errors(multicallable) + wrapped_callable = grpc_helpers_async._wrap_stream_errors( + multicallable, grpc_helpers_async._WrappedStreamStreamCall + ) wrapped_call = await wrapped_callable() @@ -254,6 +303,28 @@ def test_wrap_errors_non_streaming(wrap_unary_errors): wrap_unary_errors.assert_called_once_with(callable_) +def test_grpc_async_stream(): + """ + GrpcAsyncStream type should be both an AsyncIterator and a grpc.aio.Call. + """ + instance = grpc_helpers_async.GrpcAsyncStream[int]() + assert isinstance(instance, grpc.aio.Call) + # should implement __aiter__ and __anext__ + assert hasattr(instance, "__aiter__") + it = instance.__aiter__() + assert hasattr(it, "__anext__") + + +def test_awaitable_grpc_call(): + """ + AwaitableGrpcCall type should be an Awaitable and a grpc.aio.Call. + """ + instance = grpc_helpers_async.AwaitableGrpcCall() + assert isinstance(instance, grpc.aio.Call) + # should implement __await__ + assert hasattr(instance, "__await__") + + @mock.patch("google.api_core.grpc_helpers_async._wrap_stream_errors") def test_wrap_errors_streaming(wrap_stream_errors): callable_ = mock.create_autospec(aio.UnaryStreamMultiCallable) @@ -261,87 +332,151 @@ def test_wrap_errors_streaming(wrap_stream_errors): result = grpc_helpers_async.wrap_errors(callable_) assert result == wrap_stream_errors.return_value - wrap_stream_errors.assert_called_once_with(callable_) + wrap_stream_errors.assert_called_once_with( + callable_, grpc_helpers_async._WrappedUnaryStreamCall + ) -@mock.patch("grpc.composite_channel_credentials") +@pytest.mark.parametrize( + "attempt_direct_path,target,expected_target", + [ + (None, "example.com:443", "example.com:443"), + (False, "example.com:443", "example.com:443"), + (True, "example.com:443", "google-c2p:///example.com"), + (True, "dns:///example.com", "google-c2p:///example.com"), + (True, "another-c2p:///example.com", "another-c2p:///example.com"), + ], +) +@mock.patch("grpc.compute_engine_channel_credentials") @mock.patch( "google.auth.default", autospec=True, - return_value=(mock.sentinel.credentials, mock.sentinel.projet), + return_value=(mock.sentinel.credentials, mock.sentinel.project), ) -@mock.patch("grpc.experimental.aio.secure_channel") -def test_create_channel_implicit(grpc_secure_channel, default, composite_creds_call): - target = "example.com:443" +@mock.patch("grpc.aio.secure_channel") +def test_create_channel_implicit( + grpc_secure_channel, + google_auth_default, + composite_creds_call, + attempt_direct_path, + target, + expected_target, +): composite_creds = composite_creds_call.return_value - channel = grpc_helpers_async.create_channel(target) + channel = grpc_helpers_async.create_channel( + target, attempt_direct_path=attempt_direct_path + ) assert channel is grpc_secure_channel.return_value - default.assert_called_once_with(scopes=None, default_scopes=None) - grpc_secure_channel.assert_called_once_with(target, composite_creds) + google_auth_default.assert_called_once_with(scopes=None, default_scopes=None) + grpc_secure_channel.assert_called_once_with( + expected_target, composite_creds, compression=None + ) +@pytest.mark.parametrize( + "attempt_direct_path,target, expected_target", + [ + (None, "example.com:443", "example.com:443"), + (False, "example.com:443", "example.com:443"), + (True, "example.com:443", "google-c2p:///example.com"), + (True, "dns:///example.com", "google-c2p:///example.com"), + (True, "another-c2p:///example.com", "another-c2p:///example.com"), + ], +) @mock.patch("google.auth.transport.grpc.AuthMetadataPlugin", autospec=True) @mock.patch( "google.auth.transport.requests.Request", autospec=True, return_value=mock.sentinel.Request, ) -@mock.patch("grpc.composite_channel_credentials") +@mock.patch("grpc.compute_engine_channel_credentials") @mock.patch( "google.auth.default", autospec=True, - return_value=(mock.sentinel.credentials, mock.sentinel.projet), + return_value=(mock.sentinel.credentials, mock.sentinel.project), ) -@mock.patch("grpc.experimental.aio.secure_channel") +@mock.patch("grpc.aio.secure_channel") def test_create_channel_implicit_with_default_host( - grpc_secure_channel, default, composite_creds_call, request, auth_metadata_plugin + grpc_secure_channel, + google_auth_default, + composite_creds_call, + request, + auth_metadata_plugin, + attempt_direct_path, + target, + expected_target, ): - target = "example.com:443" default_host = "example.com" composite_creds = composite_creds_call.return_value - channel = grpc_helpers_async.create_channel(target, default_host=default_host) + channel = grpc_helpers_async.create_channel( + target, default_host=default_host, attempt_direct_path=attempt_direct_path + ) assert channel is grpc_secure_channel.return_value - default.assert_called_once_with(scopes=None, default_scopes=None) + google_auth_default.assert_called_once_with(scopes=None, default_scopes=None) auth_metadata_plugin.assert_called_once_with( mock.sentinel.credentials, mock.sentinel.Request, default_host=default_host ) - grpc_secure_channel.assert_called_once_with(target, composite_creds) + grpc_secure_channel.assert_called_once_with( + expected_target, composite_creds, compression=None + ) +@pytest.mark.parametrize( + "attempt_direct_path", + [ + None, + False, + ], +) @mock.patch("grpc.composite_channel_credentials") @mock.patch( "google.auth.default", - return_value=(mock.sentinel.credentials, mock.sentinel.projet), + return_value=(mock.sentinel.credentials, mock.sentinel.project), ) -@mock.patch("grpc.experimental.aio.secure_channel") +@mock.patch("grpc.aio.secure_channel") def test_create_channel_implicit_with_ssl_creds( - grpc_secure_channel, default, composite_creds_call + grpc_secure_channel, default, composite_creds_call, attempt_direct_path ): target = "example.com:443" ssl_creds = grpc.ssl_channel_credentials() - grpc_helpers_async.create_channel(target, ssl_credentials=ssl_creds) + grpc_helpers_async.create_channel( + target, ssl_credentials=ssl_creds, attempt_direct_path=attempt_direct_path + ) default.assert_called_once_with(scopes=None, default_scopes=None) composite_creds_call.assert_called_once_with(ssl_creds, mock.ANY) composite_creds = composite_creds_call.return_value - grpc_secure_channel.assert_called_once_with(target, composite_creds) + grpc_secure_channel.assert_called_once_with( + target, composite_creds, compression=None + ) -@mock.patch("grpc.composite_channel_credentials") +def test_create_channel_implicit_with_ssl_creds_attempt_direct_path_true(): + target = "example.com:443" + ssl_creds = grpc.ssl_channel_credentials() + with pytest.raises( + ValueError, match="Using ssl_credentials with Direct Path is not supported" + ): + grpc_helpers_async.create_channel( + target, ssl_credentials=ssl_creds, attempt_direct_path=True + ) + + +@mock.patch("grpc.compute_engine_channel_credentials") @mock.patch( "google.auth.default", autospec=True, - return_value=(mock.sentinel.credentials, mock.sentinel.projet), + return_value=(mock.sentinel.credentials, mock.sentinel.project), ) -@mock.patch("grpc.experimental.aio.secure_channel") +@mock.patch("grpc.aio.secure_channel") def test_create_channel_implicit_with_scopes( grpc_secure_channel, default, composite_creds_call ): @@ -353,16 +488,18 @@ def test_create_channel_implicit_with_scopes( assert channel is grpc_secure_channel.return_value default.assert_called_once_with(scopes=["one", "two"], default_scopes=None) - grpc_secure_channel.assert_called_once_with(target, composite_creds) + grpc_secure_channel.assert_called_once_with( + target, composite_creds, compression=None + ) -@mock.patch("grpc.composite_channel_credentials") +@mock.patch("grpc.compute_engine_channel_credentials") @mock.patch( "google.auth.default", autospec=True, - return_value=(mock.sentinel.credentials, mock.sentinel.projet), + return_value=(mock.sentinel.credentials, mock.sentinel.project), ) -@mock.patch("grpc.experimental.aio.secure_channel") +@mock.patch("grpc.aio.secure_channel") def test_create_channel_implicit_with_default_scopes( grpc_secure_channel, default, composite_creds_call ): @@ -370,13 +507,15 @@ def test_create_channel_implicit_with_default_scopes( composite_creds = composite_creds_call.return_value channel = grpc_helpers_async.create_channel( - target, default_scopes=["three", "four"] + target, default_scopes=["three", "four"], compression=grpc.Compression.Gzip ) assert channel is grpc_secure_channel.return_value default.assert_called_once_with(scopes=None, default_scopes=["three", "four"]) - grpc_secure_channel.assert_called_once_with(target, composite_creds) + grpc_secure_channel.assert_called_once_with( + target, composite_creds, compression=grpc.Compression.Gzip + ) def test_create_channel_explicit_with_duplicate_credentials(): @@ -392,26 +531,28 @@ def test_create_channel_explicit_with_duplicate_credentials(): assert "mutually exclusive" in str(excinfo.value) -@mock.patch("grpc.composite_channel_credentials") +@mock.patch("grpc.compute_engine_channel_credentials") @mock.patch("google.auth.credentials.with_scopes_if_required", autospec=True) -@mock.patch("grpc.experimental.aio.secure_channel") +@mock.patch("grpc.aio.secure_channel") def test_create_channel_explicit(grpc_secure_channel, auth_creds, composite_creds_call): target = "example.com:443" composite_creds = composite_creds_call.return_value channel = grpc_helpers_async.create_channel( - target, credentials=mock.sentinel.credentials + target, credentials=mock.sentinel.credentials, compression=grpc.Compression.Gzip ) auth_creds.assert_called_once_with( mock.sentinel.credentials, scopes=None, default_scopes=None ) assert channel is grpc_secure_channel.return_value - grpc_secure_channel.assert_called_once_with(target, composite_creds) + grpc_secure_channel.assert_called_once_with( + target, composite_creds, compression=grpc.Compression.Gzip + ) -@mock.patch("grpc.composite_channel_credentials") -@mock.patch("grpc.experimental.aio.secure_channel") +@mock.patch("grpc.compute_engine_channel_credentials") +@mock.patch("grpc.aio.secure_channel") def test_create_channel_explicit_scoped(grpc_secure_channel, composite_creds_call): target = "example.com:443" scopes = ["1", "2"] @@ -421,16 +562,21 @@ def test_create_channel_explicit_scoped(grpc_secure_channel, composite_creds_cal credentials.requires_scopes = True channel = grpc_helpers_async.create_channel( - target, credentials=credentials, scopes=scopes + target, + credentials=credentials, + scopes=scopes, + compression=grpc.Compression.Gzip, ) credentials.with_scopes.assert_called_once_with(scopes, default_scopes=None) assert channel is grpc_secure_channel.return_value - grpc_secure_channel.assert_called_once_with(target, composite_creds) + grpc_secure_channel.assert_called_once_with( + target, composite_creds, compression=grpc.Compression.Gzip + ) -@mock.patch("grpc.composite_channel_credentials") -@mock.patch("grpc.experimental.aio.secure_channel") +@mock.patch("grpc.compute_engine_channel_credentials") +@mock.patch("grpc.aio.secure_channel") def test_create_channel_explicit_default_scopes( grpc_secure_channel, composite_creds_call ): @@ -442,18 +588,23 @@ def test_create_channel_explicit_default_scopes( credentials.requires_scopes = True channel = grpc_helpers_async.create_channel( - target, credentials=credentials, default_scopes=default_scopes + target, + credentials=credentials, + default_scopes=default_scopes, + compression=grpc.Compression.Gzip, ) credentials.with_scopes.assert_called_once_with( scopes=None, default_scopes=default_scopes ) assert channel is grpc_secure_channel.return_value - grpc_secure_channel.assert_called_once_with(target, composite_creds) + grpc_secure_channel.assert_called_once_with( + target, composite_creds, compression=grpc.Compression.Gzip + ) -@mock.patch("grpc.composite_channel_credentials") -@mock.patch("grpc.experimental.aio.secure_channel") +@mock.patch("grpc.compute_engine_channel_credentials") +@mock.patch("grpc.aio.secure_channel") def test_create_channel_explicit_with_quota_project( grpc_secure_channel, composite_creds_call ): @@ -470,17 +621,19 @@ def test_create_channel_explicit_with_quota_project( credentials.with_quota_project.assert_called_once_with("project-foo") assert channel is grpc_secure_channel.return_value - grpc_secure_channel.assert_called_once_with(target, composite_creds) + grpc_secure_channel.assert_called_once_with( + target, composite_creds, compression=None + ) -@mock.patch("grpc.composite_channel_credentials") -@mock.patch("grpc.experimental.aio.secure_channel") +@mock.patch("grpc.compute_engine_channel_credentials") +@mock.patch("grpc.aio.secure_channel") @mock.patch( "google.auth.load_credentials_from_file", autospec=True, return_value=(mock.sentinel.credentials, mock.sentinel.project), ) -def test_create_channnel_with_credentials_file( +def test_create_channel_with_credentials_file( load_credentials_from_file, grpc_secure_channel, composite_creds_call ): target = "example.com:443" @@ -496,11 +649,13 @@ def test_create_channnel_with_credentials_file( credentials_file, scopes=None, default_scopes=None ) assert channel is grpc_secure_channel.return_value - grpc_secure_channel.assert_called_once_with(target, composite_creds) + grpc_secure_channel.assert_called_once_with( + target, composite_creds, compression=None + ) -@mock.patch("grpc.composite_channel_credentials") -@mock.patch("grpc.experimental.aio.secure_channel") +@mock.patch("grpc.compute_engine_channel_credentials") +@mock.patch("grpc.aio.secure_channel") @mock.patch( "google.auth.load_credentials_from_file", autospec=True, @@ -523,11 +678,13 @@ def test_create_channel_with_credentials_file_and_scopes( credentials_file, scopes=scopes, default_scopes=None ) assert channel is grpc_secure_channel.return_value - grpc_secure_channel.assert_called_once_with(target, composite_creds) + grpc_secure_channel.assert_called_once_with( + target, composite_creds, compression=None + ) -@mock.patch("grpc.composite_channel_credentials") -@mock.patch("grpc.experimental.aio.secure_channel") +@mock.patch("grpc.compute_engine_channel_credentials") +@mock.patch("grpc.aio.secure_channel") @mock.patch( "google.auth.load_credentials_from_file", autospec=True, @@ -550,14 +707,13 @@ def test_create_channel_with_credentials_file_and_default_scopes( credentials_file, scopes=None, default_scopes=default_scopes ) assert channel is grpc_secure_channel.return_value - grpc_secure_channel.assert_called_once_with(target, composite_creds) + grpc_secure_channel.assert_called_once_with( + target, composite_creds, compression=None + ) -@pytest.mark.skipif( - grpc_helpers_async.HAS_GRPC_GCP, reason="grpc_gcp module not available" -) -@mock.patch("grpc.experimental.aio.secure_channel") -def test_create_channel_without_grpc_gcp(grpc_secure_channel): +@mock.patch("grpc.aio.secure_channel") +def test_create_channel(grpc_secure_channel): target = "example.com:443" scopes = ["test_scope"] diff --git a/tests/asyncio/test_operation_async.py b/tests/asyncio/test_operation_async.py index 907cda7c..9d9fb5d2 100644 --- a/tests/asyncio/test_operation_async.py +++ b/tests/asyncio/test_operation_async.py @@ -13,9 +13,19 @@ # limitations under the License. -import mock import pytest +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401 +except ImportError: # pragma: NO COVER + import mock # type: ignore + +try: + import grpc # noqa: F401 +except ImportError: # pragma: NO COVER + pytest.skip("No GRPC", allow_module_level=True) + from google.api_core import exceptions from google.api_core import operation_async from google.api_core import operations_v1 @@ -74,6 +84,7 @@ async def test_constructor(): assert await future.running() +@pytest.mark.asyncio def test_metadata(): expected_metadata = struct_pb2.Struct() future, _, _ = make_operation_future( @@ -166,6 +177,7 @@ async def test_unexpected_result(unused_sleep): assert "Unexpected state" in "{!r}".format(exception) +@pytest.mark.asyncio def test_from_gapic(): operation_proto = make_operation_proto(done=True) operations_client = mock.create_autospec( diff --git a/tests/asyncio/test_page_iterator_async.py b/tests/asyncio/test_page_iterator_async.py index 75f9e1cf..63e26d02 100644 --- a/tests/asyncio/test_page_iterator_async.py +++ b/tests/asyncio/test_page_iterator_async.py @@ -14,7 +14,11 @@ import inspect -import mock +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401 +except ImportError: # pragma: NO COVER + import mock # type: ignore import pytest from google.api_core import page_iterator_async @@ -106,6 +110,7 @@ async def test__page_aiter_increment(self): await page_aiter.__anext__() assert iterator.num_results == 1 + await page_aiter.aclose() @pytest.mark.asyncio async def test__page_aiter_no_increment(self): @@ -118,6 +123,7 @@ async def test__page_aiter_no_increment(self): # results should still be 0 after fetching a page. assert iterator.num_results == 0 + await page_aiter.aclose() @pytest.mark.asyncio async def test__items_aiter(self): diff --git a/tests/asyncio/test_rest_streaming_async.py b/tests/asyncio/test_rest_streaming_async.py new file mode 100644 index 00000000..c9caa2b1 --- /dev/null +++ b/tests/asyncio/test_rest_streaming_async.py @@ -0,0 +1,378 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# TODO: set random.seed explicitly in each test function. +# See related issue: https://github.com/googleapis/python-api-core/issues/689. + +import datetime +import logging +import random +import time +from typing import List, AsyncIterator + +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401 +except ImportError: # pragma: NO COVER + import mock # type: ignore + +import pytest # noqa: I202 + +import proto + +try: + from google.auth.aio.transport import Response +except ImportError: + pytest.skip( + "google-api-core[async_rest] is required to test asynchronous rest streaming.", + allow_module_level=True, + ) + +from google.api_core import rest_streaming_async +from google.api import http_pb2 +from google.api import httpbody_pb2 + + +from ..helpers import Composer, Song, EchoResponse, parse_responses + + +__protobuf__ = proto.module(package=__name__) +SEED = int(time.time()) +logging.info(f"Starting async rest streaming tests with random seed: {SEED}") +random.seed(SEED) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + +class ResponseMock(Response): + class _ResponseItr(AsyncIterator[bytes]): + def __init__(self, _response_bytes: bytes, random_split=False): + self._responses_bytes = _response_bytes + self._idx = 0 + self._random_split = random_split + + def __aiter__(self): + return self + + async def __anext__(self): + if self._idx >= len(self._responses_bytes): + raise StopAsyncIteration + if self._random_split: + n = random.randint(1, len(self._responses_bytes[self._idx :])) + else: + n = 1 + x = self._responses_bytes[self._idx : self._idx + n] + self._idx += n + return x + + def __init__( + self, + responses: List[proto.Message], + response_cls, + random_split=False, + ): + self._responses = responses + self._random_split = random_split + self._response_message_cls = response_cls + + def _parse_responses(self): + return parse_responses(self._response_message_cls, self._responses) + + @property + async def headers(self): + raise NotImplementedError() + + @property + async def status_code(self): + raise NotImplementedError() + + async def close(self): + raise NotImplementedError() + + async def content(self, chunk_size=None): + itr = self._ResponseItr( + self._parse_responses(), random_split=self._random_split + ) + async for chunk in itr: + yield chunk + + async def read(self): + raise NotImplementedError() + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "random_split,resp_message_is_proto_plus", + [(False, True), (False, False)], +) +async def test_next_simple(random_split, resp_message_is_proto_plus): + if resp_message_is_proto_plus: + response_type = EchoResponse + responses = [EchoResponse(content="hello world"), EchoResponse(content="yes")] + else: + response_type = httpbody_pb2.HttpBody + responses = [ + httpbody_pb2.HttpBody(content_type="hello world"), + httpbody_pb2.HttpBody(content_type="yes"), + ] + + resp = ResponseMock( + responses=responses, random_split=random_split, response_cls=response_type + ) + itr = rest_streaming_async.AsyncResponseIterator(resp, response_type) + idx = 0 + async for response in itr: + assert response == responses[idx] + idx += 1 + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "random_split,resp_message_is_proto_plus", + [ + (True, True), + (False, True), + (True, False), + (False, False), + ], +) +async def test_next_nested(random_split, resp_message_is_proto_plus): + if resp_message_is_proto_plus: + response_type = Song + responses = [ + Song(title="some song", composer=Composer(given_name="some name")), + Song(title="another song", date_added=datetime.datetime(2021, 12, 17)), + ] + else: + # Although `http_pb2.HttpRule`` is used in the response, any response message + # can be used which meets this criteria for the test of having a nested field. + response_type = http_pb2.HttpRule + responses = [ + http_pb2.HttpRule( + selector="some selector", + custom=http_pb2.CustomHttpPattern(kind="some kind"), + ), + http_pb2.HttpRule( + selector="another selector", + custom=http_pb2.CustomHttpPattern(path="some path"), + ), + ] + resp = ResponseMock( + responses=responses, random_split=random_split, response_cls=response_type + ) + itr = rest_streaming_async.AsyncResponseIterator(resp, response_type) + idx = 0 + async for response in itr: + assert response == responses[idx] + idx += 1 + assert idx == len(responses) + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "random_split,resp_message_is_proto_plus", + [ + (True, True), + (False, True), + (True, False), + (False, False), + ], +) +async def test_next_stress(random_split, resp_message_is_proto_plus): + n = 50 + if resp_message_is_proto_plus: + response_type = Song + responses = [ + Song(title="title_%d" % i, composer=Composer(given_name="name_%d" % i)) + for i in range(n) + ] + else: + response_type = http_pb2.HttpRule + responses = [ + http_pb2.HttpRule( + selector="selector_%d" % i, + custom=http_pb2.CustomHttpPattern(path="path_%d" % i), + ) + for i in range(n) + ] + resp = ResponseMock( + responses=responses, random_split=random_split, response_cls=response_type + ) + itr = rest_streaming_async.AsyncResponseIterator(resp, response_type) + idx = 0 + async for response in itr: + assert response == responses[idx] + idx += 1 + assert idx == n + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "random_split,resp_message_is_proto_plus", + [ + (True, True), + (False, True), + (True, False), + (False, False), + ], +) +async def test_next_escaped_characters_in_string( + random_split, resp_message_is_proto_plus +): + if resp_message_is_proto_plus: + response_type = Song + composer_with_relateds = Composer() + relateds = ["Artist A", "Artist B"] + composer_with_relateds.relateds = relateds + + responses = [ + Song( + title='ti"tle\nfoo\tbar{}', composer=Composer(given_name="name\n\n\n") + ), + Song( + title='{"this is weird": "totally"}', + composer=Composer(given_name="\\{}\\"), + ), + Song(title='\\{"key": ["value",]}\\', composer=composer_with_relateds), + ] + else: + response_type = http_pb2.Http + responses = [ + http_pb2.Http( + rules=[ + http_pb2.HttpRule( + selector='ti"tle\nfoo\tbar{}', + custom=http_pb2.CustomHttpPattern(kind="name\n\n\n"), + ) + ] + ), + http_pb2.Http( + rules=[ + http_pb2.HttpRule( + selector='{"this is weird": "totally"}', + custom=http_pb2.CustomHttpPattern(kind="\\{}\\"), + ) + ] + ), + http_pb2.Http( + rules=[ + http_pb2.HttpRule( + selector='\\{"key": ["value",]}\\', + custom=http_pb2.CustomHttpPattern(kind="\\{}\\"), + ) + ] + ), + ] + resp = ResponseMock( + responses=responses, random_split=random_split, response_cls=response_type + ) + itr = rest_streaming_async.AsyncResponseIterator(resp, response_type) + idx = 0 + async for response in itr: + assert response == responses[idx] + idx += 1 + assert idx == len(responses) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("response_type", [EchoResponse, httpbody_pb2.HttpBody]) +async def test_next_not_array(response_type): + + data = '{"hello": 0}' + with mock.patch.object( + ResponseMock, "content", return_value=mock_async_gen(data) + ) as mock_method: + resp = ResponseMock(responses=[], response_cls=response_type) + itr = rest_streaming_async.AsyncResponseIterator(resp, response_type) + with pytest.raises(ValueError): + await itr.__anext__() + mock_method.assert_called_once() + + +@pytest.mark.asyncio +@pytest.mark.parametrize("response_type", [EchoResponse, httpbody_pb2.HttpBody]) +async def test_cancel(response_type): + with mock.patch.object( + ResponseMock, "close", new_callable=mock.AsyncMock + ) as mock_method: + resp = ResponseMock(responses=[], response_cls=response_type) + itr = rest_streaming_async.AsyncResponseIterator(resp, response_type) + await itr.cancel() + mock_method.assert_called_once() + + +@pytest.mark.asyncio +@pytest.mark.parametrize("response_type", [EchoResponse, httpbody_pb2.HttpBody]) +async def test_iterator_as_context_manager(response_type): + with mock.patch.object( + ResponseMock, "close", new_callable=mock.AsyncMock + ) as mock_method: + resp = ResponseMock(responses=[], response_cls=response_type) + async with rest_streaming_async.AsyncResponseIterator(resp, response_type): + pass + mock_method.assert_called_once() + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "response_type,return_value", + [ + (EchoResponse, bytes('[{"content": "hello"}, {', "utf-8")), + (httpbody_pb2.HttpBody, bytes('[{"content_type": "hello"}, {', "utf-8")), + ], +) +async def test_check_buffer(response_type, return_value): + with mock.patch.object( + ResponseMock, + "_parse_responses", + return_value=return_value, + ): + resp = ResponseMock(responses=[], response_cls=response_type) + itr = rest_streaming_async.AsyncResponseIterator(resp, response_type) + with pytest.raises(ValueError): + await itr.__anext__() + await itr.__anext__() + + +@pytest.mark.asyncio +@pytest.mark.parametrize("response_type", [EchoResponse, httpbody_pb2.HttpBody]) +async def test_next_html(response_type): + + data = "" + with mock.patch.object( + ResponseMock, "content", return_value=mock_async_gen(data) + ) as mock_method: + resp = ResponseMock(responses=[], response_cls=response_type) + + itr = rest_streaming_async.AsyncResponseIterator(resp, response_type) + with pytest.raises(ValueError): + await itr.__anext__() + mock_method.assert_called_once() + + +@pytest.mark.asyncio +async def test_invalid_response_class(): + class SomeClass: + pass + + resp = ResponseMock(responses=[], response_cls=SomeClass) + with pytest.raises( + ValueError, + match="Response message class must be a subclass of proto.Message or google.protobuf.message.Message", + ): + rest_streaming_async.AsyncResponseIterator(resp, SomeClass) diff --git a/tests/helpers.py b/tests/helpers.py new file mode 100644 index 00000000..3429d511 --- /dev/null +++ b/tests/helpers.py @@ -0,0 +1,71 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Helpers for tests""" + +import logging +from typing import List + +import proto + +from google.protobuf import duration_pb2 +from google.protobuf import timestamp_pb2 +from google.protobuf.json_format import MessageToJson + + +class Genre(proto.Enum): + GENRE_UNSPECIFIED = 0 + CLASSICAL = 1 + JAZZ = 2 + ROCK = 3 + + +class Composer(proto.Message): + given_name = proto.Field(proto.STRING, number=1) + family_name = proto.Field(proto.STRING, number=2) + relateds = proto.RepeatedField(proto.STRING, number=3) + indices = proto.MapField(proto.STRING, proto.STRING, number=4) + + +class Song(proto.Message): + composer = proto.Field(Composer, number=1) + title = proto.Field(proto.STRING, number=2) + lyrics = proto.Field(proto.STRING, number=3) + year = proto.Field(proto.INT32, number=4) + genre = proto.Field(Genre, number=5) + is_five_mins_longer = proto.Field(proto.BOOL, number=6) + score = proto.Field(proto.DOUBLE, number=7) + likes = proto.Field(proto.INT64, number=8) + duration = proto.Field(duration_pb2.Duration, number=9) + date_added = proto.Field(timestamp_pb2.Timestamp, number=10) + + +class EchoResponse(proto.Message): + content = proto.Field(proto.STRING, number=1) + + +def parse_responses(response_message_cls, all_responses: List[proto.Message]) -> bytes: + # json.dumps returns a string surrounded with quotes that need to be stripped + # in order to be an actual JSON. + json_responses = [ + ( + response_message_cls.to_json(response).strip('"') + if issubclass(response_message_cls, proto.Message) + else MessageToJson(response).strip('"') + ) + for response in all_responses + ] + logging.info(f"Sending JSON stream: {json_responses}") + ret_val = "[{}]".format(",".join(json_responses)) + return bytes(ret_val, "utf-8") diff --git a/tests/unit/future/test__helpers.py b/tests/unit/future/test__helpers.py index 98afc599..a37efdd4 100644 --- a/tests/unit/future/test__helpers.py +++ b/tests/unit/future/test__helpers.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -import mock +from unittest import mock from google.api_core.future import _helpers diff --git a/tests/unit/future/test_polling.py b/tests/unit/future/test_polling.py index 2381d036..2f66f230 100644 --- a/tests/unit/future/test_polling.py +++ b/tests/unit/future/test_polling.py @@ -15,8 +15,8 @@ import concurrent.futures import threading import time +from unittest import mock -import mock import pytest from google.api_core import exceptions, retry @@ -24,7 +24,7 @@ class PollingFutureImpl(polling.PollingFuture): - def done(self): + def done(self, retry=None): return False def cancel(self): @@ -33,9 +33,6 @@ def cancel(self): def cancelled(self): return False - def running(self): - return True - def test_polling_future_constructor(): future = PollingFutureImpl() @@ -84,20 +81,23 @@ def test_invoke_callback_exception(): class PollingFutureImplWithPoll(PollingFutureImpl): - def __init__(self): + def __init__(self, max_poll_count=1): super(PollingFutureImplWithPoll, self).__init__() self.poll_count = 0 self.event = threading.Event() + self.max_poll_count = max_poll_count - def done(self, retry=polling.DEFAULT_RETRY): + def done(self, retry=None): self.poll_count += 1 + if self.max_poll_count > self.poll_count: + return False self.event.wait() self.set_result(42) return True -def test_result_with_polling(): - future = PollingFutureImplWithPoll() +def test_result_with_one_polling(): + future = PollingFutureImplWithPoll(max_poll_count=1) future.event.set() result = future.result() @@ -109,8 +109,34 @@ def test_result_with_polling(): assert future.poll_count == 1 +def test_result_with_two_pollings(): + future = PollingFutureImplWithPoll(max_poll_count=2) + + future.event.set() + result = future.result() + + assert result == 42 + assert future.poll_count == 2 + # Repeated calls should not cause additional polling + assert future.result() == result + assert future.poll_count == 2 + + +def test_result_with_two_pollings_custom_retry(): + future = PollingFutureImplWithPoll(max_poll_count=2) + + future.event.set() + result = future.result() + + assert result == 42 + assert future.poll_count == 2 + # Repeated calls should not cause additional polling + assert future.result() == result + assert future.poll_count == 2 + + class PollingFutureImplTimeout(PollingFutureImplWithPoll): - def done(self, retry=polling.DEFAULT_RETRY): + def done(self, retry=None): time.sleep(1) return False @@ -132,11 +158,11 @@ def __init__(self, errors): super(PollingFutureImplTransient, self).__init__() self._errors = errors - def done(self, retry=polling.DEFAULT_RETRY): + def done(self, retry=None): + self.poll_count += 1 if self._errors: error, self._errors = self._errors[0], self._errors[1:] raise error("testing") - self.poll_count += 1 self.set_result(42) return True @@ -144,17 +170,17 @@ def done(self, retry=polling.DEFAULT_RETRY): def test_result_transient_error(): future = PollingFutureImplTransient( ( - exceptions.TooManyRequests, - exceptions.InternalServerError, - exceptions.BadGateway, + polling._OperationNotComplete, + polling._OperationNotComplete, + polling._OperationNotComplete, ) ) result = future.result() assert result == 42 - assert future.poll_count == 1 + assert future.poll_count == 4 # Repeated calls should not cause additional polling assert future.result() == result - assert future.poll_count == 1 + assert future.poll_count == 4 def test_callback_background_thread(): @@ -197,23 +223,23 @@ def test_double_callback_background_thread(): class PollingFutureImplWithoutRetry(PollingFutureImpl): - def done(self): + def done(self, retry=None): return True - def result(self): + def result(self, timeout=None, retry=None, polling=None): return super(PollingFutureImplWithoutRetry, self).result() - def _blocking_poll(self, timeout): + def _blocking_poll(self, timeout=None, retry=None, polling=None): return super(PollingFutureImplWithoutRetry, self)._blocking_poll( timeout=timeout ) class PollingFutureImplWith_done_or_raise(PollingFutureImpl): - def done(self): + def done(self, retry=None): return True - def _done_or_raise(self): + def _done_or_raise(self, retry=None): return super(PollingFutureImplWith_done_or_raise, self)._done_or_raise() @@ -223,12 +249,12 @@ def test_polling_future_without_retry(): ) future = PollingFutureImplWithoutRetry() assert future.done() - assert future.running() + assert not future.running() assert future.result() is None with mock.patch.object(future, "done") as done_mock: future._done_or_raise() - done_mock.assert_called_once_with() + done_mock.assert_called_once_with(retry=None) with mock.patch.object(future, "done") as done_mock: future._done_or_raise(retry=custom_retry) @@ -238,5 +264,5 @@ def test_polling_future_without_retry(): def test_polling_future_with__done_or_raise(): future = PollingFutureImplWith_done_or_raise() assert future.done() - assert future.running() + assert not future.running() assert future.result() is None diff --git a/tests/unit/gapic/test_client_info.py b/tests/unit/gapic/test_client_info.py index 64080ffd..2ca5c404 100644 --- a/tests/unit/gapic/test_client_info.py +++ b/tests/unit/gapic/test_client_info.py @@ -12,6 +12,13 @@ # See the License for the specific language governing permissions and # limitations under the License. +import pytest + +try: + import grpc # noqa: F401 +except ImportError: + pytest.skip("No GRPC", allow_module_level=True) + from google.api_core.gapic_v1 import client_info diff --git a/tests/unit/gapic/test_config.py b/tests/unit/gapic/test_config.py index 1c15261d..5e42fde8 100644 --- a/tests/unit/gapic/test_config.py +++ b/tests/unit/gapic/test_config.py @@ -12,6 +12,13 @@ # See the License for the specific language governing permissions and # limitations under the License. +import pytest + +try: + import grpc # noqa: F401 +except ImportError: + pytest.skip("No GRPC", allow_module_level=True) + from google.api_core import exceptions from google.api_core.gapic_v1 import config diff --git a/tests/unit/gapic/test_method.py b/tests/unit/gapic/test_method.py index e0ea57ac..87aa6390 100644 --- a/tests/unit/gapic/test_method.py +++ b/tests/unit/gapic/test_method.py @@ -13,8 +13,15 @@ # limitations under the License. import datetime +from unittest import mock + +import pytest + +try: + import grpc # noqa: F401 +except ImportError: + pytest.skip("No GRPC", allow_module_level=True) -import mock from google.api_core import exceptions from google.api_core import retry @@ -32,27 +39,6 @@ def _utcnow_monotonic(): curr_value += delta -def test__determine_timeout(): - # Check _determine_timeout always returns a Timeout object. - timeout_type_timeout = timeout.ConstantTimeout(600.0) - returned_timeout = google.api_core.gapic_v1.method._determine_timeout( - 600.0, 600.0, None - ) - assert isinstance(returned_timeout, timeout.ConstantTimeout) - returned_timeout = google.api_core.gapic_v1.method._determine_timeout( - 600.0, timeout_type_timeout, None - ) - assert isinstance(returned_timeout, timeout.ConstantTimeout) - returned_timeout = google.api_core.gapic_v1.method._determine_timeout( - timeout_type_timeout, 600.0, None - ) - assert isinstance(returned_timeout, timeout.ConstantTimeout) - returned_timeout = google.api_core.gapic_v1.method._determine_timeout( - timeout_type_timeout, timeout_type_timeout, None - ) - assert isinstance(returned_timeout, timeout.ConstantTimeout) - - def test_wrap_method_basic(): method = mock.Mock(spec=["__call__"], return_value=42) @@ -135,91 +121,71 @@ def test_invoke_wrapped_method_with_metadata_as_none(): @mock.patch("time.sleep") -def test_wrap_method_with_default_retry_and_timeout(unusued_sleep): +def test_wrap_method_with_default_retry_and_timeout_and_compression(unused_sleep): method = mock.Mock( spec=["__call__"], side_effect=[exceptions.InternalServerError(None), 42] ) default_retry = retry.Retry() default_timeout = timeout.ConstantTimeout(60) + default_compression = grpc.Compression.Gzip wrapped_method = google.api_core.gapic_v1.method.wrap_method( - method, default_retry, default_timeout + method, default_retry, default_timeout, default_compression ) result = wrapped_method() assert result == 42 assert method.call_count == 2 - method.assert_called_with(timeout=60, metadata=mock.ANY) + method.assert_called_with( + timeout=60, compression=default_compression, metadata=mock.ANY + ) @mock.patch("time.sleep") -def test_wrap_method_with_default_retry_and_timeout_using_sentinel(unusued_sleep): +def test_wrap_method_with_default_retry_and_timeout_using_sentinel(unused_sleep): method = mock.Mock( spec=["__call__"], side_effect=[exceptions.InternalServerError(None), 42] ) default_retry = retry.Retry() default_timeout = timeout.ConstantTimeout(60) + default_compression = grpc.Compression.Gzip wrapped_method = google.api_core.gapic_v1.method.wrap_method( - method, default_retry, default_timeout + method, default_retry, default_timeout, default_compression ) result = wrapped_method( retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, + compression=google.api_core.gapic_v1.method.DEFAULT, ) assert result == 42 assert method.call_count == 2 - method.assert_called_with(timeout=60, metadata=mock.ANY) + method.assert_called_with( + timeout=60, compression=default_compression, metadata=mock.ANY + ) @mock.patch("time.sleep") -def test_wrap_method_with_overriding_retry_and_timeout(unusued_sleep): +def test_wrap_method_with_overriding_retry_timeout_compression(unused_sleep): method = mock.Mock(spec=["__call__"], side_effect=[exceptions.NotFound(None), 42]) default_retry = retry.Retry() default_timeout = timeout.ConstantTimeout(60) + default_compression = grpc.Compression.Gzip wrapped_method = google.api_core.gapic_v1.method.wrap_method( - method, default_retry, default_timeout + method, default_retry, default_timeout, default_compression ) result = wrapped_method( retry=retry.Retry(retry.if_exception_type(exceptions.NotFound)), timeout=timeout.ConstantTimeout(22), + compression=grpc.Compression.Deflate, ) assert result == 42 assert method.call_count == 2 - method.assert_called_with(timeout=22, metadata=mock.ANY) - - -@mock.patch("time.sleep") -@mock.patch( - "google.api_core.datetime_helpers.utcnow", - side_effect=_utcnow_monotonic(), - autospec=True, -) -def test_wrap_method_with_overriding_retry_deadline(utcnow, unused_sleep): - method = mock.Mock( - spec=["__call__"], - side_effect=([exceptions.InternalServerError(None)] * 4) + [42], - ) - default_retry = retry.Retry() - default_timeout = timeout.ExponentialTimeout(deadline=60) - wrapped_method = google.api_core.gapic_v1.method.wrap_method( - method, default_retry, default_timeout - ) - - # Overriding only the retry's deadline should also override the timeout's - # deadline. - result = wrapped_method(retry=default_retry.with_deadline(30)) - - assert result == 42 - timeout_args = [call[1]["timeout"] for call in method.call_args_list] - assert timeout_args == [5.0, 10.0, 20.0, 26.0, 25.0] - assert utcnow.call_count == ( - 1 - + 5 # First to set the deadline. - + 5 # One for each min(timeout, maximum, (DEADLINE - NOW).seconds) + method.assert_called_with( + timeout=22, compression=grpc.Compression.Deflate, metadata=mock.ANY ) @@ -235,3 +201,24 @@ def test_wrap_method_with_overriding_timeout_as_a_number(): assert result == 42 method.assert_called_once_with(timeout=22, metadata=mock.ANY) + + +def test_wrap_method_with_call(): + method = mock.Mock() + mock_call = mock.Mock() + method.with_call.return_value = 42, mock_call + + wrapped_method = google.api_core.gapic_v1.method.wrap_method(method, with_call=True) + result = wrapped_method() + assert len(result) == 2 + assert result[0] == 42 + assert result[1] == mock_call + + +def test_wrap_method_with_call_not_supported(): + """Raises an error if wrapped callable doesn't have with_call method.""" + method = lambda: None # noqa: E731 + + with pytest.raises(ValueError) as exc_info: + google.api_core.gapic_v1.method.wrap_method(method, with_call=True) + assert "with_call=True is only supported for unary calls" in str(exc_info.value) diff --git a/tests/unit/gapic/test_routing_header.py b/tests/unit/gapic/test_routing_header.py index 77300e87..2c8c7546 100644 --- a/tests/unit/gapic/test_routing_header.py +++ b/tests/unit/gapic/test_routing_header.py @@ -12,6 +12,15 @@ # See the License for the specific language governing permissions and # limitations under the License. +from enum import Enum + +import pytest + +try: + import grpc # noqa: F401 +except ImportError: + pytest.skip("No GRPC", allow_module_level=True) + from google.api_core.gapic_v1 import routing_header @@ -28,7 +37,67 @@ def test_to_routing_header_with_slashes(): assert value == "name=me/ep&book.read=1%262" +def test_enum_fully_qualified(): + class Message: + class Color(Enum): + RED = 1 + GREEN = 2 + BLUE = 3 + + params = [("color", Message.Color.RED)] + value = routing_header.to_routing_header(params) + assert value == "color=Color.RED" + value = routing_header.to_routing_header(params, qualified_enums=True) + assert value == "color=Color.RED" + + +def test_enum_nonqualified(): + class Message: + class Color(Enum): + RED = 1 + GREEN = 2 + BLUE = 3 + + params = [("color", Message.Color.RED), ("num", 5)] + value = routing_header.to_routing_header(params, qualified_enums=False) + assert value == "color=RED&num=5" + params = {"color": Message.Color.RED, "num": 5} + value = routing_header.to_routing_header(params, qualified_enums=False) + assert value == "color=RED&num=5" + + def test_to_grpc_metadata(): params = [("name", "meep"), ("book.read", "1")] metadata = routing_header.to_grpc_metadata(params) assert metadata == (routing_header.ROUTING_METADATA_KEY, "name=meep&book.read=1") + + +@pytest.mark.parametrize( + "key,value,expected", + [ + ("book.read", "1", "book.read=1"), + ("name", "me/ep", "name=me/ep"), + ("\\", "=", "%5C=%3D"), + (b"hello", "world", "hello=world"), + ("✔️", "✌️", "%E2%9C%94%EF%B8%8F=%E2%9C%8C%EF%B8%8F"), + ], +) +def test__urlencode_param(key, value, expected): + result = routing_header._urlencode_param(key, value) + assert result == expected + + +def test__urlencode_param_caching_performance(): + import time + + key = "key" * 100 + value = "value" * 100 + # time with empty cache + start_time = time.perf_counter() + routing_header._urlencode_param(key, value) + duration = time.perf_counter() - start_time + second_start_time = time.perf_counter() + routing_header._urlencode_param(key, value) + second_duration = time.perf_counter() - second_start_time + # second call should be approximately 10 times faster + assert second_duration < duration / 10 diff --git a/tests/unit/operations_v1/test_operations_client.py b/tests/unit/operations_v1/test_operations_client.py index 001b8fea..fb4b14f1 100644 --- a/tests/unit/operations_v1/test_operations_client.py +++ b/tests/unit/operations_v1/test_operations_client.py @@ -12,9 +12,17 @@ # See the License for the specific language governing permissions and # limitations under the License. +import pytest + +try: + import grpc # noqa: F401 +except ImportError: # pragma: NO COVER + pytest.skip("No GRPC", allow_module_level=True) + from google.api_core import grpc_helpers from google.api_core import operations_v1 from google.api_core import page_iterator +from google.api_core.operations_v1 import operations_client_config from google.longrunning import operations_pb2 from google.protobuf import empty_pb2 @@ -89,3 +97,7 @@ def test_cancel_operation(): ].metadata assert len(channel.CancelOperation.requests) == 1 assert channel.CancelOperation.requests[0].name == "name" + + +def test_operations_client_config(): + assert operations_client_config.config["interfaces"] diff --git a/tests/unit/operations_v1/test_operations_rest_client.py b/tests/unit/operations_v1/test_operations_rest_client.py new file mode 100644 index 00000000..d1f6e0eb --- /dev/null +++ b/tests/unit/operations_v1/test_operations_rest_client.py @@ -0,0 +1,1401 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401 +except ImportError: # pragma: NO COVER + import mock # type: ignore + +import pytest +from typing import Any, List + +try: + import grpc # noqa: F401 +except ImportError: # pragma: NO COVER + pytest.skip("No GRPC", allow_module_level=True) +from requests import Response # noqa I201 +from google.auth.transport.requests import AuthorizedSession + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core.operations_v1 import AbstractOperationsClient + +import google.auth +from google.api_core.operations_v1 import pagers +from google.api_core.operations_v1 import pagers_async +from google.api_core.operations_v1 import transports +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.longrunning import operations_pb2 +from google.oauth2 import service_account +from google.protobuf import json_format # type: ignore +from google.rpc import status_pb2 # type: ignore + +try: + import aiohttp # noqa: F401 + import google.auth.aio.transport + from google.auth.aio.transport.sessions import AsyncAuthorizedSession + from google.api_core.operations_v1 import AsyncOperationsRestClient + from google.auth.aio import credentials as ga_credentials_async + + GOOGLE_AUTH_AIO_INSTALLED = True +except ImportError: + GOOGLE_AUTH_AIO_INSTALLED = False + +HTTP_OPTIONS = { + "google.longrunning.Operations.CancelOperation": [ + {"method": "post", "uri": "/v3/{name=operations/*}:cancel", "body": "*"}, + ], + "google.longrunning.Operations.DeleteOperation": [ + {"method": "delete", "uri": "/v3/{name=operations/*}"}, + ], + "google.longrunning.Operations.GetOperation": [ + {"method": "get", "uri": "/v3/{name=operations/*}"}, + ], + "google.longrunning.Operations.ListOperations": [ + {"method": "get", "uri": "/v3/{name=operations}"}, + ], +} + +PYPARAM_CLIENT: List[Any] = [ + AbstractOperationsClient, +] +PYPARAM_CLIENT_TRANSPORT_NAME = [ + [AbstractOperationsClient, transports.OperationsRestTransport, "rest"], +] +PYPARAM_CLIENT_TRANSPORT_CREDENTIALS = [ + [ + AbstractOperationsClient, + transports.OperationsRestTransport, + ga_credentials.AnonymousCredentials(), + ], +] + +if GOOGLE_AUTH_AIO_INSTALLED: + PYPARAM_CLIENT.append(AsyncOperationsRestClient) + PYPARAM_CLIENT_TRANSPORT_NAME.append( + [ + AsyncOperationsRestClient, + transports.AsyncOperationsRestTransport, + "rest_asyncio", + ] + ) + PYPARAM_CLIENT_TRANSPORT_CREDENTIALS.append( + [ + AsyncOperationsRestClient, + transports.AsyncOperationsRestTransport, + ga_credentials_async.AnonymousCredentials(), + ] + ) + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +def _get_session_type(is_async: bool): + return ( + AsyncAuthorizedSession + if is_async and GOOGLE_AUTH_AIO_INSTALLED + else AuthorizedSession + ) + + +def _get_operations_client(is_async: bool, http_options=HTTP_OPTIONS): + if is_async and GOOGLE_AUTH_AIO_INSTALLED: + async_transport = transports.rest_asyncio.AsyncOperationsRestTransport( + credentials=ga_credentials_async.AnonymousCredentials(), + http_options=http_options, + ) + return AsyncOperationsRestClient(transport=async_transport) + else: + sync_transport = transports.rest.OperationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), http_options=http_options + ) + return AbstractOperationsClient(transport=sync_transport) + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# TODO: Add support for mtls in async rest +@pytest.mark.parametrize( + "client_class", + [ + AbstractOperationsClient, + ], +) +def test__get_default_mtls_endpoint(client_class): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert client_class._get_default_mtls_endpoint(None) is None + assert client_class._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert ( + client_class._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + ) + assert ( + client_class._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + client_class._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert client_class._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize( + "client_class", + PYPARAM_CLIENT, +) +def test_operations_client_from_service_account_info(client_class): + creds = ga_credentials.AnonymousCredentials() + if "async" in str(client_class): + # TODO(): Add support for service account info to async REST transport. + with pytest.raises(NotImplementedError): + info = {"valid": True} + client_class.from_service_account_info(info) + else: + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == "https://longrunning.googleapis.com" + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.OperationsRestTransport, + # TODO(https://github.com/googleapis/python-api-core/issues/706): Add support for + # service account credentials in transports.AsyncOperationsRestTransport + ], +) +def test_operations_client_service_account_always_use_jwt(transport_class): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class", + PYPARAM_CLIENT, +) +def test_operations_client_from_service_account_file(client_class): + + if "async" in str(client_class): + # TODO(): Add support for service account creds to async REST transport. + with pytest.raises(NotImplementedError): + client_class.from_service_account_file("dummy/file/path.json") + else: + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == "https://longrunning.googleapis.com" + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + PYPARAM_CLIENT_TRANSPORT_NAME, +) +def test_operations_client_get_transport_class( + client_class, transport_class, transport_name +): + transport = client_class.get_transport_class() + available_transports = [ + transports.OperationsRestTransport, + ] + if GOOGLE_AUTH_AIO_INSTALLED: + available_transports.append(transports.AsyncOperationsRestTransport) + assert transport in available_transports + + transport = client_class.get_transport_class(transport_name) + assert transport == transport_class + + +# TODO(): Update this test case to include async REST once we have support for MTLS. +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [(AbstractOperationsClient, transports.OperationsRestTransport, "rest")], +) +@mock.patch.object( + AbstractOperationsClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AbstractOperationsClient), +) +def test_operations_client_client_options( + client_class, transport_class, transport_name +): + # # Check that if channel is provided we won't create a new one. + # with mock.patch.object(AbstractOperationsBaseClient, "get_transport_class") as gtc: + # client = client_class(transport=transport_class()) + # gtc.assert_not_called() + + # # Check that if channel is provided via str we will create a new one. + # with mock.patch.object(AbstractOperationsBaseClient, "get_transport_class") as gtc: + # client = client_class(transport=transport_name) + # gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +# TODO: Add support for mtls in async REST +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (AbstractOperationsClient, transports.OperationsRestTransport, "rest", "true"), + (AbstractOperationsClient, transports.OperationsRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + AbstractOperationsClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AbstractOperationsClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_operations_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + + def fake_init(client_cert_source_for_mtls=None, **kwargs): + """Invoke client_cert source if provided.""" + + if client_cert_source_for_mtls: + client_cert_source_for_mtls() + return None + + with mock.patch.object(transport_class, "__init__") as patched: + patched.side_effect = fake_init + client = client_class(client_options=options) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + PYPARAM_CLIENT_TRANSPORT_NAME, +) +def test_operations_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + if "async" in str(client_class): + # TODO(): Add support for scopes to async REST transport. + with pytest.raises(core_exceptions.AsyncRestUnsupportedParameterError): + client_class(client_options=options, transport=transport_name) + else: + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + PYPARAM_CLIENT_TRANSPORT_NAME, +) +def test_operations_client_client_options_credentials_file( + client_class, transport_class, transport_name +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + if "async" in str(client_class): + # TODO(): Add support for credentials file to async REST transport. + with pytest.raises(core_exceptions.AsyncRestUnsupportedParameterError): + client_class(client_options=options, transport=transport_name) + else: + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +def test_list_operations_rest(): + client = _get_operations_client(is_async=False) + # Mock the http request call within the method and fake a response. + with mock.patch.object(_get_session_type(is_async=False), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_operations( + name="operations", filter_="my_filter", page_size=10, page_token="abc" + ) + + actual_args = req.call_args + assert actual_args.args[0] == "GET" + assert actual_args.args[1] == "https://longrunning.googleapis.com/v3/operations" + assert actual_args.kwargs["params"] == [ + ("filter", "my_filter"), + ("pageSize", 10), + ("pageToken", "abc"), + ] + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListOperationsPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_operations_rest_async(): + if not GOOGLE_AUTH_AIO_INSTALLED: + pytest.skip("Skipped because google-api-core[async_rest] is not installed") + + client = _get_operations_client(is_async=True) + # Mock the http request call within the method and fake a response. + with mock.patch.object(_get_session_type(is_async=True), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.read = mock.AsyncMock( + return_value=json_return_value.encode("UTF-8") + ) + req.return_value = response_value + response = await client.list_operations( + name="operations", filter_="my_filter", page_size=10, page_token="abc" + ) + + actual_args = req.call_args + assert actual_args.args[0] == "GET" + assert actual_args.args[1] == "https://longrunning.googleapis.com/v3/operations" + assert actual_args.kwargs["params"] == [ + ("filter", "my_filter"), + ("pageSize", 10), + ("pageToken", "abc"), + ] + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers_async.ListOperationsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_operations_rest_failure(): + client = _get_operations_client(is_async=False, http_options=None) + + with mock.patch.object(_get_session_type(is_async=False), "request") as req: + response_value = Response() + response_value.status_code = 400 + mock_request = mock.MagicMock() + mock_request.method = "GET" + mock_request.url = "https://longrunning.googleapis.com:443/v1/operations" + response_value.request = mock_request + req.return_value = response_value + with pytest.raises(core_exceptions.GoogleAPIError): + client.list_operations(name="operations") + + +@pytest.mark.asyncio +async def test_list_operations_rest_failure_async(): + if not GOOGLE_AUTH_AIO_INSTALLED: + pytest.skip("Skipped because google-api-core[async_rest] is not installed") + + client = _get_operations_client(is_async=True, http_options=None) + + with mock.patch.object(_get_session_type(is_async=True), "request") as req: + response_value = mock.Mock() + response_value.status_code = 400 + response_value.read = mock.AsyncMock(return_value=b"{}") + mock_request = mock.MagicMock() + mock_request.method = "GET" + mock_request.url = "https://longrunning.googleapis.com:443/v1/operations" + response_value.request = mock_request + req.return_value = response_value + with pytest.raises(core_exceptions.GoogleAPIError): + await client.list_operations(name="operations") + + +def test_list_operations_rest_pager(): + client = _get_operations_client(is_async=False, http_options=None) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(_get_session_type(is_async=False), "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + operations_pb2.ListOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + next_page_token="abc", + ), + operations_pb2.ListOperationsResponse( + operations=[], + next_page_token="def", + ), + operations_pb2.ListOperationsResponse( + operations=[operations_pb2.Operation()], + next_page_token="ghi", + ), + operations_pb2.ListOperationsResponse( + operations=[operations_pb2.Operation(), operations_pb2.Operation()], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(json_format.MessageToJson(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + pager = client.list_operations(name="operations") + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, operations_pb2.Operation) for i in results) + + pages = list(client.list_operations(name="operations").pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_operations_rest_pager_async(): + if not GOOGLE_AUTH_AIO_INSTALLED: + pytest.skip("Skipped because google-api-core[async_rest] is not installed") + client = _get_operations_client(is_async=True, http_options=None) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(_get_session_type(is_async=True), "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + operations_pb2.ListOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + next_page_token="abc", + ), + operations_pb2.ListOperationsResponse( + operations=[], + next_page_token="def", + ), + operations_pb2.ListOperationsResponse( + operations=[operations_pb2.Operation()], + next_page_token="ghi", + ), + operations_pb2.ListOperationsResponse( + operations=[operations_pb2.Operation(), operations_pb2.Operation()], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(json_format.MessageToJson(x) for x in response) + return_values = tuple(mock.Mock() for i in response) + for return_val, response_val in zip(return_values, response): + return_val.read = mock.AsyncMock(return_value=response_val.encode("UTF-8")) + return_val.status_code = 200 + req.side_effect = return_values + + pager = await client.list_operations(name="operations") + + responses = [] + async for response in pager: + responses.append(response) + + results = list(responses) + assert len(results) == 6 + assert all(isinstance(i, operations_pb2.Operation) for i in results) + pager = await client.list_operations(name="operations") + + responses = [] + async for response in pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, operations_pb2.Operation) for i in results) + + pages = [] + + async for page in pager.pages: + pages.append(page) + for page_, token in zip(pages, ["", "", "", "abc", "def", "ghi", ""]): + assert page_.next_page_token == token + + +def test_get_operation_rest(): + client = _get_operations_client(is_async=False) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(_get_session_type(is_async=False), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation( + name="operations/sample1", + done=True, + error=status_pb2.Status(code=411), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_operation("operations/sample1") + + actual_args = req.call_args + assert actual_args.args[0] == "GET" + assert ( + actual_args.args[1] + == "https://longrunning.googleapis.com/v3/operations/sample1" + ) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + assert response.name == "operations/sample1" + assert response.done is True + + +@pytest.mark.asyncio +async def test_get_operation_rest_async(): + if not GOOGLE_AUTH_AIO_INSTALLED: + pytest.skip("Skipped because google-api-core[async_rest] is not installed") + client = _get_operations_client(is_async=True) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(_get_session_type(is_async=True), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation( + name="operations/sample1", + done=True, + error=status_pb2.Status(code=411), + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.read = mock.AsyncMock(return_value=json_return_value) + req.return_value = response_value + response = await client.get_operation("operations/sample1") + + actual_args = req.call_args + assert actual_args.args[0] == "GET" + assert ( + actual_args.args[1] + == "https://longrunning.googleapis.com/v3/operations/sample1" + ) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + assert response.name == "operations/sample1" + assert response.done is True + + +def test_get_operation_rest_failure(): + client = _get_operations_client(is_async=False, http_options=None) + + with mock.patch.object(_get_session_type(is_async=False), "request") as req: + response_value = Response() + response_value.status_code = 400 + mock_request = mock.MagicMock() + mock_request.method = "GET" + mock_request.url = "https://longrunning.googleapis.com/v1/operations/sample1" + response_value.request = mock_request + req.return_value = response_value + with pytest.raises(core_exceptions.GoogleAPIError): + client.get_operation("sample0/operations/sample1") + + +@pytest.mark.asyncio +async def test_get_operation_rest_failure_async(): + if not GOOGLE_AUTH_AIO_INSTALLED: + pytest.skip("Skipped because google-api-core[async_rest] is not installed") + client = _get_operations_client(is_async=True, http_options=None) + + with mock.patch.object(_get_session_type(is_async=True), "request") as req: + response_value = mock.Mock() + response_value.status_code = 400 + response_value.read = mock.AsyncMock(return_value=b"{}") + mock_request = mock.MagicMock() + mock_request.method = "GET" + mock_request.url = "https://longrunning.googleapis.com/v1/operations/sample1" + response_value.request = mock_request + req.return_value = response_value + with pytest.raises(core_exceptions.GoogleAPIError): + await client.get_operation("sample0/operations/sample1") + + +def test_delete_operation_rest(): + client = _get_operations_client(is_async=False) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(_get_session_type(is_async=False), "request") as req: + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + client.delete_operation(name="operations/sample1") + assert req.call_count == 1 + actual_args = req.call_args + assert actual_args.args[0] == "DELETE" + assert ( + actual_args.args[1] + == "https://longrunning.googleapis.com/v3/operations/sample1" + ) + + +@pytest.mark.asyncio +async def test_delete_operation_rest_async(): + if not GOOGLE_AUTH_AIO_INSTALLED: + pytest.skip("Skipped because google-api-core[async_rest] is not installed") + client = _get_operations_client(is_async=True) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(_get_session_type(is_async=True), "request") as req: + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "" + response_value.read = mock.AsyncMock( + return_value=json_return_value.encode("UTF-8") + ) + req.return_value = response_value + await client.delete_operation(name="operations/sample1") + assert req.call_count == 1 + actual_args = req.call_args + assert actual_args.args[0] == "DELETE" + assert ( + actual_args.args[1] + == "https://longrunning.googleapis.com/v3/operations/sample1" + ) + + +def test_delete_operation_rest_failure(): + client = _get_operations_client(is_async=False, http_options=None) + + with mock.patch.object(_get_session_type(is_async=False), "request") as req: + response_value = Response() + response_value.status_code = 400 + mock_request = mock.MagicMock() + mock_request.method = "DELETE" + mock_request.url = "https://longrunning.googleapis.com/v1/operations/sample1" + response_value.request = mock_request + req.return_value = response_value + with pytest.raises(core_exceptions.GoogleAPIError): + client.delete_operation(name="sample0/operations/sample1") + + +@pytest.mark.asyncio +async def test_delete_operation_rest_failure_async(): + if not GOOGLE_AUTH_AIO_INSTALLED: + pytest.skip("Skipped because google-api-core[async_rest] is not installed") + client = _get_operations_client(is_async=True, http_options=None) + + with mock.patch.object(_get_session_type(is_async=True), "request") as req: + response_value = mock.Mock() + response_value.status_code = 400 + response_value.read = mock.AsyncMock(return_value=b"{}") + mock_request = mock.MagicMock() + mock_request.method = "DELETE" + mock_request.url = "https://longrunning.googleapis.com/v1/operations/sample1" + response_value.request = mock_request + req.return_value = response_value + with pytest.raises(core_exceptions.GoogleAPIError): + await client.delete_operation(name="sample0/operations/sample1") + + +def test_cancel_operation_rest(): + client = _get_operations_client(is_async=False) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(_get_session_type(is_async=False), "request") as req: + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + client.cancel_operation(name="operations/sample1") + assert req.call_count == 1 + actual_args = req.call_args + assert actual_args.args[0] == "POST" + assert ( + actual_args.args[1] + == "https://longrunning.googleapis.com/v3/operations/sample1:cancel" + ) + + +@pytest.mark.asyncio +async def test_cancel_operation_rest_async(): + if not GOOGLE_AUTH_AIO_INSTALLED: + pytest.skip("Skipped because google-api-core[async_rest] is not installed") + client = _get_operations_client(is_async=True) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(_get_session_type(is_async=True), "request") as req: + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "" + response_value.read = mock.AsyncMock( + return_value=json_return_value.encode("UTF-8") + ) + req.return_value = response_value + await client.cancel_operation(name="operations/sample1") + assert req.call_count == 1 + actual_args = req.call_args + assert actual_args.args[0] == "POST" + assert ( + actual_args.args[1] + == "https://longrunning.googleapis.com/v3/operations/sample1:cancel" + ) + + +def test_cancel_operation_rest_failure(): + client = _get_operations_client(is_async=False, http_options=None) + + with mock.patch.object(_get_session_type(is_async=False), "request") as req: + response_value = Response() + response_value.status_code = 400 + mock_request = mock.MagicMock() + mock_request.method = "POST" + mock_request.url = ( + "https://longrunning.googleapis.com/v1/operations/sample1:cancel" + ) + response_value.request = mock_request + req.return_value = response_value + with pytest.raises(core_exceptions.GoogleAPIError): + client.cancel_operation(name="sample0/operations/sample1") + + +@pytest.mark.asyncio +async def test_cancel_operation_rest_failure_async(): + if not GOOGLE_AUTH_AIO_INSTALLED: + pytest.skip("Skipped because google-api-core[async_rest] is not installed") + client = _get_operations_client(is_async=True, http_options=None) + + with mock.patch.object(_get_session_type(is_async=True), "request") as req: + response_value = mock.Mock() + response_value.status_code = 400 + response_value.read = mock.AsyncMock(return_value=b"{}") + mock_request = mock.MagicMock() + mock_request.method = "POST" + mock_request.url = ( + "https://longrunning.googleapis.com/v1/operations/sample1:cancel" + ) + response_value.request = mock_request + req.return_value = response_value + with pytest.raises(core_exceptions.GoogleAPIError): + await client.cancel_operation(name="sample0/operations/sample1") + + +@pytest.mark.parametrize( + "client_class,transport_class,credentials", + PYPARAM_CLIENT_TRANSPORT_CREDENTIALS, +) +def test_credentials_transport_error(client_class, transport_class, credentials): + + # It is an error to provide credentials and a transport instance. + transport = transport_class(credentials=credentials) + with pytest.raises(ValueError): + client_class( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transport_class(credentials=credentials) + with pytest.raises(ValueError): + client_class( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transport_class(credentials=credentials) + with pytest.raises(ValueError): + client_class( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,credentials", + PYPARAM_CLIENT_TRANSPORT_CREDENTIALS, +) +def test_transport_instance(client_class, transport_class, credentials): + # A client may be instantiated with a custom transport instance. + transport = transport_class( + credentials=credentials, + ) + client = client_class(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize( + "client_class,transport_class,credentials", + PYPARAM_CLIENT_TRANSPORT_CREDENTIALS, +) +def test_transport_adc(client_class, transport_class, credentials): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (credentials, None) + transport_class() + adc.assert_called_once() + + +def test_operations_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transports.OperationsTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_operations_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.api_core.operations_v1.transports.OperationsTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.OperationsTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "list_operations", + "get_operation", + "delete_operation", + "cancel_operation", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + +def test_operations_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.api_core.operations_v1.transports.OperationsTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transports.OperationsTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=(), + quota_project_id="octopus", + ) + + +def test_operations_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.api_core.operations_v1.transports.OperationsTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transports.OperationsTransport() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "client_class", + PYPARAM_CLIENT, +) +def test_operations_auth_adc(client_class): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + + if "async" in str(client_class).lower(): + # TODO(): Add support for adc to async REST transport. + # NOTE: Ideally, the logic for adc shouldn't be called if transport + # is set to async REST. If the user does not configure credentials + # of type `google.auth.aio.credentials.Credentials`, + # we should raise an exception to avoid the adc workflow. + with pytest.raises(google.auth.exceptions.InvalidType): + client_class() + else: + client_class() + adc.assert_called_once_with( + scopes=None, + default_scopes=(), + quota_project_id=None, + ) + + +# TODO(https://github.com/googleapis/python-api-core/issues/705): Add +# testing for `transports.AsyncOperationsRestTransport` once MTLS is supported +# in `google.auth.aio.transport`. +@pytest.mark.parametrize( + "transport_class", + [ + transports.OperationsRestTransport, + ], +) +def test_operations_http_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transport_class( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "client_class,transport_class,credentials", + PYPARAM_CLIENT_TRANSPORT_CREDENTIALS, +) +def test_operations_host_no_port(client_class, transport_class, credentials): + client = client_class( + credentials=credentials, + client_options=client_options.ClientOptions( + api_endpoint="longrunning.googleapis.com" + ), + ) + assert client.transport._host == "https://longrunning.googleapis.com" + + +@pytest.mark.parametrize( + "client_class,transport_class,credentials", + PYPARAM_CLIENT_TRANSPORT_CREDENTIALS, +) +def test_operations_host_with_port(client_class, transport_class, credentials): + client = client_class( + credentials=credentials, + client_options=client_options.ClientOptions( + api_endpoint="longrunning.googleapis.com:8000" + ), + ) + assert client.transport._host == "https://longrunning.googleapis.com:8000" + + +@pytest.mark.parametrize( + "client_class", + PYPARAM_CLIENT, +) +def test_common_billing_account_path(client_class): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = client_class.common_billing_account_path(billing_account) + assert expected == actual + + +@pytest.mark.parametrize( + "client_class", + PYPARAM_CLIENT, +) +def test_parse_common_billing_account_path(client_class): + expected = { + "billing_account": "clam", + } + path = client_class.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = client_class.parse_common_billing_account_path(path) + assert expected == actual + + +@pytest.mark.parametrize( + "client_class", + PYPARAM_CLIENT, +) +def test_common_folder_path(client_class): + folder = "whelk" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = client_class.common_folder_path(folder) + assert expected == actual + + +@pytest.mark.parametrize( + "client_class", + PYPARAM_CLIENT, +) +def test_parse_common_folder_path(client_class): + expected = { + "folder": "octopus", + } + path = client_class.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = client_class.parse_common_folder_path(path) + assert expected == actual + + +@pytest.mark.parametrize( + "client_class", + PYPARAM_CLIENT, +) +def test_common_organization_path(client_class): + organization = "oyster" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = client_class.common_organization_path(organization) + assert expected == actual + + +@pytest.mark.parametrize( + "client_class", + PYPARAM_CLIENT, +) +def test_parse_common_organization_path(client_class): + expected = { + "organization": "nudibranch", + } + path = client_class.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = client_class.parse_common_organization_path(path) + assert expected == actual + + +@pytest.mark.parametrize( + "client_class", + PYPARAM_CLIENT, +) +def test_common_project_path(client_class): + project = "cuttlefish" + expected = "projects/{project}".format( + project=project, + ) + actual = client_class.common_project_path(project) + assert expected == actual + + +@pytest.mark.parametrize( + "client_class", + PYPARAM_CLIENT, +) +def test_parse_common_project_path(client_class): + expected = { + "project": "mussel", + } + path = client_class.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = client_class.parse_common_project_path(path) + assert expected == actual + + +@pytest.mark.parametrize( + "client_class", + PYPARAM_CLIENT, +) +def test_common_location_path(client_class): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = client_class.common_location_path(project, location) + assert expected == actual + + +@pytest.mark.parametrize( + "client_class", + PYPARAM_CLIENT, +) +def test_parse_common_location_path(client_class): + expected = { + "project": "scallop", + "location": "abalone", + } + path = client_class.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = client_class.parse_common_location_path(path) + assert expected == actual + + +@pytest.mark.parametrize( + "client_class,transport_class,credentials", + PYPARAM_CLIENT_TRANSPORT_CREDENTIALS, +) +def test_client_withDEFAULT_CLIENT_INFO(client_class, transport_class, credentials): + client_info = gapic_v1.client_info.ClientInfo() + with mock.patch.object(transport_class, "_prep_wrapped_messages") as prep: + client_class( + credentials=credentials, + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transport_class, "_prep_wrapped_messages") as prep: + transport_class( + credentials=credentials, + client_info=client_info, + ) + prep.assert_called_once_with(client_info) diff --git a/tests/unit/retry/__init__.py b/tests/unit/retry/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/unit/retry/test_retry_base.py b/tests/unit/retry/test_retry_base.py new file mode 100644 index 00000000..212c4293 --- /dev/null +++ b/tests/unit/retry/test_retry_base.py @@ -0,0 +1,293 @@ +# Copyright 2017 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import itertools +import re +from unittest import mock + +import pytest +import requests.exceptions + +from google.api_core import exceptions +from google.api_core import retry +from google.auth import exceptions as auth_exceptions + + +def test_if_exception_type(): + predicate = retry.if_exception_type(ValueError) + + assert predicate(ValueError()) + assert not predicate(TypeError()) + + +def test_if_exception_type_multiple(): + predicate = retry.if_exception_type(ValueError, TypeError) + + assert predicate(ValueError()) + assert predicate(TypeError()) + assert not predicate(RuntimeError()) + + +def test_if_transient_error(): + assert retry.if_transient_error(exceptions.InternalServerError("")) + assert retry.if_transient_error(exceptions.TooManyRequests("")) + assert retry.if_transient_error(exceptions.ServiceUnavailable("")) + assert retry.if_transient_error(requests.exceptions.ConnectionError("")) + assert retry.if_transient_error(requests.exceptions.ChunkedEncodingError("")) + assert retry.if_transient_error(auth_exceptions.TransportError("")) + assert not retry.if_transient_error(exceptions.InvalidArgument("")) + + +# Make uniform return half of its maximum, which will be the calculated +# sleep time. +@mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n) +def test_exponential_sleep_generator_base_2(uniform): + gen = retry.exponential_sleep_generator(1, 60, multiplier=2) + + result = list(itertools.islice(gen, 8)) + assert result == [1, 2, 4, 8, 16, 32, 60, 60] + + +def test_build_retry_error_empty_list(): + """ + attempt to build a retry error with no errors encountered + should return a generic RetryError + """ + from google.api_core.retry import build_retry_error + from google.api_core.retry import RetryFailureReason + + reason = RetryFailureReason.NON_RETRYABLE_ERROR + src, cause = build_retry_error([], reason, 10) + assert isinstance(src, exceptions.RetryError) + assert cause is None + assert src.message == "Unknown error" + + +def test_build_retry_error_timeout_message(): + """ + should provide helpful error message when timeout is reached + """ + from google.api_core.retry import build_retry_error + from google.api_core.retry import RetryFailureReason + + reason = RetryFailureReason.TIMEOUT + cause = RuntimeError("timeout") + src, found_cause = build_retry_error([ValueError(), cause], reason, 10) + assert isinstance(src, exceptions.RetryError) + assert src.message == "Timeout of 10.0s exceeded" + # should attach appropriate cause + assert found_cause is cause + + +def test_build_retry_error_empty_timeout(): + """ + attempt to build a retry error when timeout is None + should return a generic timeout error message + """ + from google.api_core.retry import build_retry_error + from google.api_core.retry import RetryFailureReason + + reason = RetryFailureReason.TIMEOUT + src, _ = build_retry_error([], reason, None) + assert isinstance(src, exceptions.RetryError) + assert src.message == "Timeout exceeded" + + +class Test_BaseRetry(object): + def _make_one(self, *args, **kwargs): + return retry.retry_base._BaseRetry(*args, **kwargs) + + def test_constructor_defaults(self): + retry_ = self._make_one() + assert retry_._predicate == retry.if_transient_error + assert retry_._initial == 1 + assert retry_._maximum == 60 + assert retry_._multiplier == 2 + assert retry_._timeout == 120 + assert retry_._on_error is None + assert retry_.timeout == 120 + assert retry_.timeout == 120 + + def test_constructor_options(self): + _some_function = mock.Mock() + + retry_ = self._make_one( + predicate=mock.sentinel.predicate, + initial=1, + maximum=2, + multiplier=3, + timeout=4, + on_error=_some_function, + ) + assert retry_._predicate == mock.sentinel.predicate + assert retry_._initial == 1 + assert retry_._maximum == 2 + assert retry_._multiplier == 3 + assert retry_._timeout == 4 + assert retry_._on_error is _some_function + + @pytest.mark.parametrize("use_deadline", [True, False]) + @pytest.mark.parametrize("value", [None, 0, 1, 4, 42, 5.5]) + def test_with_timeout(self, use_deadline, value): + retry_ = self._make_one( + predicate=mock.sentinel.predicate, + initial=1, + maximum=2, + multiplier=3, + timeout=4, + on_error=mock.sentinel.on_error, + ) + new_retry = ( + retry_.with_timeout(value) + if not use_deadline + else retry_.with_deadline(value) + ) + assert retry_ is not new_retry + assert new_retry._timeout == value + assert ( + new_retry.timeout == value + if not use_deadline + else new_retry.deadline == value + ) + + # the rest of the attributes should remain the same + assert new_retry._predicate is retry_._predicate + assert new_retry._initial == retry_._initial + assert new_retry._maximum == retry_._maximum + assert new_retry._multiplier == retry_._multiplier + assert new_retry._on_error is retry_._on_error + + def test_with_predicate(self): + retry_ = self._make_one( + predicate=mock.sentinel.predicate, + initial=1, + maximum=2, + multiplier=3, + timeout=4, + on_error=mock.sentinel.on_error, + ) + new_retry = retry_.with_predicate(mock.sentinel.predicate) + assert retry_ is not new_retry + assert new_retry._predicate == mock.sentinel.predicate + + # the rest of the attributes should remain the same + assert new_retry._timeout == retry_._timeout + assert new_retry._initial == retry_._initial + assert new_retry._maximum == retry_._maximum + assert new_retry._multiplier == retry_._multiplier + assert new_retry._on_error is retry_._on_error + + def test_with_delay_noop(self): + retry_ = self._make_one( + predicate=mock.sentinel.predicate, + initial=1, + maximum=2, + multiplier=3, + timeout=4, + on_error=mock.sentinel.on_error, + ) + new_retry = retry_.with_delay() + assert retry_ is not new_retry + assert new_retry._initial == retry_._initial + assert new_retry._maximum == retry_._maximum + assert new_retry._multiplier == retry_._multiplier + + @pytest.mark.parametrize( + "originals,updated,expected", + [ + [(1, 2, 3), (4, 5, 6), (4, 5, 6)], + [(1, 2, 3), (0, 0, 0), (0, 0, 0)], + [(1, 2, 3), (None, None, None), (1, 2, 3)], + [(0, 0, 0), (None, None, None), (0, 0, 0)], + [(1, 2, 3), (None, 0.5, None), (1, 0.5, 3)], + [(1, 2, 3), (None, 0.5, 4), (1, 0.5, 4)], + [(1, 2, 3), (9, None, None), (9, 2, 3)], + ], + ) + def test_with_delay(self, originals, updated, expected): + retry_ = self._make_one( + predicate=mock.sentinel.predicate, + initial=originals[0], + maximum=originals[1], + multiplier=originals[2], + timeout=14, + on_error=mock.sentinel.on_error, + ) + new_retry = retry_.with_delay( + initial=updated[0], maximum=updated[1], multiplier=updated[2] + ) + assert retry_ is not new_retry + assert new_retry._initial == expected[0] + assert new_retry._maximum == expected[1] + assert new_retry._multiplier == expected[2] + + # the rest of the attributes should remain the same + assert new_retry._timeout == retry_._timeout + assert new_retry._predicate is retry_._predicate + assert new_retry._on_error is retry_._on_error + + def test_with_delay_partial_options(self): + retry_ = self._make_one( + predicate=mock.sentinel.predicate, + initial=1, + maximum=2, + multiplier=3, + timeout=4, + on_error=mock.sentinel.on_error, + ) + new_retry = retry_.with_delay(initial=4) + assert retry_ is not new_retry + assert new_retry._initial == 4 + assert new_retry._maximum == 2 + assert new_retry._multiplier == 3 + + new_retry = retry_.with_delay(maximum=4) + assert retry_ is not new_retry + assert new_retry._initial == 1 + assert new_retry._maximum == 4 + assert new_retry._multiplier == 3 + + new_retry = retry_.with_delay(multiplier=4) + assert retry_ is not new_retry + assert new_retry._initial == 1 + assert new_retry._maximum == 2 + assert new_retry._multiplier == 4 + + # the rest of the attributes should remain the same + assert new_retry._timeout == retry_._timeout + assert new_retry._predicate is retry_._predicate + assert new_retry._on_error is retry_._on_error + + def test___str__(self): + def if_exception_type(exc): + return bool(exc) # pragma: NO COVER + + # Explicitly set all attributes as changed Retry defaults should not + # cause this test to start failing. + retry_ = self._make_one( + predicate=if_exception_type, + initial=1.0, + maximum=60.0, + multiplier=2.0, + timeout=120.0, + on_error=None, + ) + assert re.match( + ( + r"<_BaseRetry predicate=, " + r"initial=1.0, maximum=60.0, multiplier=2.0, timeout=120.0, " + r"on_error=None>" + ), + str(retry_), + ) diff --git a/tests/unit/retry/test_retry_imports.py b/tests/unit/retry/test_retry_imports.py new file mode 100644 index 00000000..597909fc --- /dev/null +++ b/tests/unit/retry/test_retry_imports.py @@ -0,0 +1,33 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def test_legacy_imports_retry_unary_sync(): + # TODO: Delete this test when when we revert these imports on the + # next major version release + # (https://github.com/googleapis/python-api-core/issues/576) + from google.api_core.retry import datetime_helpers # noqa: F401 + from google.api_core.retry import exceptions # noqa: F401 + from google.api_core.retry import auth_exceptions # noqa: F401 + + +def test_legacy_imports_retry_unary_async(): + # TODO: Delete this test when when we revert these imports on the + # next major version release + # (https://github.com/googleapis/python-api-core/issues/576) + from google.api_core import retry_async # noqa: F401 + + # See https://github.com/googleapis/python-api-core/issues/586 + # for context on why we need to test this import this explicitly. + from google.api_core.retry_async import AsyncRetry # noqa: F401 diff --git a/tests/unit/retry/test_retry_streaming.py b/tests/unit/retry/test_retry_streaming.py new file mode 100644 index 00000000..2499b2ae --- /dev/null +++ b/tests/unit/retry/test_retry_streaming.py @@ -0,0 +1,505 @@ +# Copyright 2017 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import re + +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401 +except ImportError: # pragma: NO COVER + import mock # type: ignore + +import pytest + +from google.api_core import exceptions +from google.api_core import retry +from google.api_core.retry import retry_streaming + +from .test_retry_base import Test_BaseRetry + + +def test_retry_streaming_target_bad_sleep_generator(): + with pytest.raises( + ValueError, match="Sleep generator stopped yielding sleep values" + ): + next(retry_streaming.retry_target_stream(None, lambda x: True, [], None)) + + +@mock.patch("time.sleep", autospec=True) +def test_retry_streaming_target_dynamic_backoff(sleep): + """ + sleep_generator should be iterated after on_error, to support dynamic backoff + """ + from functools import partial + + sleep.side_effect = RuntimeError("stop after sleep") + # start with empty sleep generator; values are added after exception in push_sleep_value + sleep_values = [] + error_target = partial(TestStreamingRetry._generator_mock, error_on=0) + inserted_sleep = 99 + + def push_sleep_value(err): + sleep_values.append(inserted_sleep) + + with pytest.raises(RuntimeError): + next( + retry_streaming.retry_target_stream( + error_target, + predicate=lambda x: True, + sleep_generator=sleep_values, + on_error=push_sleep_value, + ) + ) + assert sleep.call_count == 1 + sleep.assert_called_once_with(inserted_sleep) + + +class TestStreamingRetry(Test_BaseRetry): + def _make_one(self, *args, **kwargs): + return retry_streaming.StreamingRetry(*args, **kwargs) + + def test___str__(self): + def if_exception_type(exc): + return bool(exc) # pragma: NO COVER + + # Explicitly set all attributes as changed Retry defaults should not + # cause this test to start failing. + retry_ = retry_streaming.StreamingRetry( + predicate=if_exception_type, + initial=1.0, + maximum=60.0, + multiplier=2.0, + timeout=120.0, + on_error=None, + ) + assert re.match( + ( + r", " + r"initial=1.0, maximum=60.0, multiplier=2.0, timeout=120.0, " + r"on_error=None>" + ), + str(retry_), + ) + + @staticmethod + def _generator_mock( + num=5, + error_on=None, + return_val=None, + exceptions_seen=None, + ): + """ + Helper to create a mock generator that yields a number of values + Generator can optionally raise an exception on a specific iteration + + Args: + - num (int): the number of values to yield. After this, the generator will return `return_val` + - error_on (int): if given, the generator will raise a ValueError on the specified iteration + - return_val (any): if given, the generator will return this value after yielding num values + - exceptions_seen (list): if given, the generator will append any exceptions to this list before raising + """ + try: + for i in range(num): + if error_on is not None and i == error_on: + raise ValueError("generator mock error") + yield i + return return_val + except (Exception, BaseException, GeneratorExit) as e: + # keep track of exceptions seen by generator + if exceptions_seen is not None: + exceptions_seen.append(e) + raise + + @mock.patch("time.sleep", autospec=True) + def test___call___success(self, sleep): + """ + Test that a retry-decorated generator yields values as expected + This test checks a generator with no issues + """ + import types + import collections + + retry_ = retry_streaming.StreamingRetry() + + decorated = retry_(self._generator_mock) + + num = 10 + result = decorated(num) + # check types + assert isinstance(decorated(num), collections.abc.Iterable) + assert isinstance(decorated(num), types.GeneratorType) + assert isinstance(self._generator_mock(num), collections.abc.Iterable) + assert isinstance(self._generator_mock(num), types.GeneratorType) + # check yield contents + unpacked = [i for i in result] + assert len(unpacked) == num + for a, b in zip(unpacked, self._generator_mock(num)): + assert a == b + sleep.assert_not_called() + + @mock.patch("time.sleep", autospec=True) + def test___call___retry(self, sleep): + """ + Tests that a retry-decorated generator will retry on errors + """ + on_error = mock.Mock(return_value=None) + retry_ = retry_streaming.StreamingRetry( + on_error=on_error, + predicate=retry.if_exception_type(ValueError), + timeout=None, + ) + result = retry_(self._generator_mock)(error_on=3) + # error thrown on 3 + # generator should contain 0, 1, 2 looping + unpacked = [next(result) for i in range(10)] + assert unpacked == [0, 1, 2, 0, 1, 2, 0, 1, 2, 0] + assert on_error.call_count == 3 + + @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n) + @mock.patch("time.sleep", autospec=True) + @pytest.mark.parametrize("use_deadline_arg", [True, False]) + def test___call___retry_hitting_timeout(self, sleep, uniform, use_deadline_arg): + """ + Tests that a retry-decorated generator will throw a RetryError + after using the time budget + """ + import time + + timeout_val = 30.9 + # support "deadline" as an alias for "timeout" + timeout_kwarg = ( + {"timeout": timeout_val} + if not use_deadline_arg + else {"deadline": timeout_val} + ) + + on_error = mock.Mock(return_value=None) + retry_ = retry_streaming.StreamingRetry( + predicate=retry.if_exception_type(ValueError), + initial=1.0, + maximum=1024.0, + multiplier=2.0, + **timeout_kwarg, + ) + + timenow = time.monotonic() + now_patcher = mock.patch( + "time.monotonic", + return_value=timenow, + ) + + decorated = retry_(self._generator_mock, on_error=on_error) + generator = decorated(error_on=1) + with now_patcher as patched_now: + # Make sure that calls to fake time.sleep() also advance the mocked + # time clock. + def increase_time(sleep_delay): + patched_now.return_value += sleep_delay + + sleep.side_effect = increase_time + with pytest.raises(exceptions.RetryError): + [i for i in generator] + + assert on_error.call_count == 5 + # check the delays + assert sleep.call_count == 4 # once between each successive target calls + last_wait = sleep.call_args.args[0] + total_wait = sum(call_args.args[0] for call_args in sleep.call_args_list) + assert last_wait == 8.0 + assert total_wait == 15.0 + + @mock.patch("time.sleep", autospec=True) + def test___call___with_generator_send(self, sleep): + """ + Send should be passed through retry into target generator + """ + + def _mock_send_gen(): + """ + always yield whatever was sent in + """ + in_ = yield + while True: + in_ = yield in_ + + retry_ = retry_streaming.StreamingRetry() + + decorated = retry_(_mock_send_gen) + + generator = decorated() + result = next(generator) + # first yield should be None + assert result is None + in_messages = ["test_1", "hello", "world"] + out_messages = [] + for msg in in_messages: + recv = generator.send(msg) + out_messages.append(recv) + assert in_messages == out_messages + + @mock.patch("time.sleep", autospec=True) + def test___call___with_generator_send_retry(self, sleep): + """ + Send should support retries like next + """ + on_error = mock.Mock(return_value=None) + retry_ = retry_streaming.StreamingRetry( + on_error=on_error, + predicate=retry.if_exception_type(ValueError), + timeout=None, + ) + result = retry_(self._generator_mock)(error_on=3) + with pytest.raises(TypeError) as exc_info: + # calling first send with non-None input should raise a TypeError + result.send("can not send to fresh generator") + assert exc_info.match("can't send non-None value") + # initiate iteration with None + result = retry_(self._generator_mock)(error_on=3) + assert result.send(None) == 0 + # error thrown on 3 + # generator should contain 0, 1, 2 looping + unpacked = [result.send(i) for i in range(10)] + assert unpacked == [1, 2, 0, 1, 2, 0, 1, 2, 0, 1] + assert on_error.call_count == 3 + + @mock.patch("time.sleep", autospec=True) + def test___call___with_iterable_send(self, sleep): + """ + send should raise attribute error if wrapped iterator does not support it + """ + retry_ = retry_streaming.StreamingRetry() + + def iterable_fn(n): + return iter(range(n)) + + decorated = retry_(iterable_fn) + generator = decorated(5) + # initialize + next(generator) + # call send + with pytest.raises(AttributeError): + generator.send("test") + + @mock.patch("time.sleep", autospec=True) + def test___call___with_iterable_close(self, sleep): + """ + close should be handled by wrapper if wrapped iterable does not support it + """ + retry_ = retry_streaming.StreamingRetry() + + def iterable_fn(n): + return iter(range(n)) + + decorated = retry_(iterable_fn) + + # try closing active generator + retryable = decorated(10) + assert next(retryable) == 0 + retryable.close() + with pytest.raises(StopIteration): + next(retryable) + + # try closing a new generator + retryable = decorated(10) + retryable.close() + with pytest.raises(StopIteration): + next(retryable) + + @mock.patch("time.sleep", autospec=True) + def test___call___with_iterable_throw(self, sleep): + """ + Throw should work even if the wrapped iterable does not support it + """ + predicate = retry.if_exception_type(ValueError) + retry_ = retry_streaming.StreamingRetry(predicate=predicate) + + def iterable_fn(n): + return iter(range(n)) + + decorated = retry_(iterable_fn) + + # try throwing with active generator + retryable = decorated(10) + assert next(retryable) == 0 + # should swallow errors in predicate + retryable.throw(ValueError) + assert next(retryable) == 1 + # should raise on other errors + with pytest.raises(TypeError): + retryable.throw(TypeError) + with pytest.raises(StopIteration): + next(retryable) + + # try throwing with a new generator + retryable = decorated(10) + with pytest.raises(ValueError): + retryable.throw(ValueError) + with pytest.raises(StopIteration): + next(retryable) + + @mock.patch("time.sleep", autospec=True) + def test___call___with_generator_return(self, sleep): + """ + Generator return value should be passed through retry decorator + """ + retry_ = retry_streaming.StreamingRetry() + + decorated = retry_(self._generator_mock) + + expected_value = "done" + generator = decorated(5, return_val=expected_value) + found_value = None + try: + while True: + next(generator) + except StopIteration as e: + found_value = e.value + assert found_value == expected_value + + @mock.patch("time.sleep", autospec=True) + def test___call___with_generator_close(self, sleep): + """ + Close should be passed through retry into target generator + """ + retry_ = retry_streaming.StreamingRetry() + + decorated = retry_(self._generator_mock) + + exception_list = [] + generator = decorated(10, exceptions_seen=exception_list) + for i in range(2): + next(generator) + generator.close() + assert isinstance(exception_list[0], GeneratorExit) + with pytest.raises(StopIteration): + # calling next on closed generator should raise error + next(generator) + + @mock.patch("time.sleep", autospec=True) + def test___call___with_generator_throw(self, sleep): + """ + Throw should be passed through retry into target generator + """ + retry_ = retry_streaming.StreamingRetry( + predicate=retry.if_exception_type(ValueError), + ) + decorated = retry_(self._generator_mock) + + exception_list = [] + generator = decorated(10, exceptions_seen=exception_list) + for i in range(2): + next(generator) + with pytest.raises(BufferError): + generator.throw(BufferError("test")) + assert isinstance(exception_list[0], BufferError) + with pytest.raises(StopIteration): + # calling next on closed generator should raise error + next(generator) + # should retry if throw retryable exception + exception_list = [] + generator = decorated(10, exceptions_seen=exception_list) + for i in range(2): + next(generator) + val = generator.throw(ValueError("test")) + assert val == 0 + assert isinstance(exception_list[0], ValueError) + # calling next on closed generator should not raise error + assert next(generator) == 1 + + def test_exc_factory_non_retryable_error(self): + """ + generator should give the option to override exception creation logic + test when non-retryable error is thrown + """ + from google.api_core.retry import RetryFailureReason + from google.api_core.retry.retry_streaming import retry_target_stream + + timeout = None + sent_errors = [ValueError("test"), ValueError("test2"), BufferError("test3")] + expected_final_err = RuntimeError("done") + expected_source_err = ZeroDivisionError("test4") + + def factory(*args, **kwargs): + assert len(kwargs) == 0 + assert args[0] == sent_errors + assert args[1] == RetryFailureReason.NON_RETRYABLE_ERROR + assert args[2] == timeout + return expected_final_err, expected_source_err + + generator = retry_target_stream( + self._generator_mock, + retry.if_exception_type(ValueError), + [0] * 3, + timeout=timeout, + exception_factory=factory, + ) + # initialize generator + next(generator) + # trigger some retryable errors + generator.throw(sent_errors[0]) + generator.throw(sent_errors[1]) + # trigger a non-retryable error + with pytest.raises(expected_final_err.__class__) as exc_info: + generator.throw(sent_errors[2]) + assert exc_info.value == expected_final_err + assert exc_info.value.__cause__ == expected_source_err + + def test_exc_factory_timeout(self): + """ + generator should give the option to override exception creation logic + test when timeout is exceeded + """ + import time + from google.api_core.retry import RetryFailureReason + from google.api_core.retry.retry_streaming import retry_target_stream + + timeout = 2 + time_now = time.monotonic() + now_patcher = mock.patch( + "time.monotonic", + return_value=time_now, + ) + + with now_patcher as patched_now: + timeout = 2 + sent_errors = [ValueError("test"), ValueError("test2"), ValueError("test3")] + expected_final_err = RuntimeError("done") + expected_source_err = ZeroDivisionError("test4") + + def factory(*args, **kwargs): + assert len(kwargs) == 0 + assert args[0] == sent_errors + assert args[1] == RetryFailureReason.TIMEOUT + assert args[2] == timeout + return expected_final_err, expected_source_err + + generator = retry_target_stream( + self._generator_mock, + retry.if_exception_type(ValueError), + [0] * 3, + timeout=timeout, + exception_factory=factory, + check_timeout_on_yield=True, + ) + # initialize generator + next(generator) + # trigger some retryable errors + generator.throw(sent_errors[0]) + generator.throw(sent_errors[1]) + # trigger a timeout + patched_now.return_value += timeout + 1 + with pytest.raises(expected_final_err.__class__) as exc_info: + generator.throw(sent_errors[2]) + assert exc_info.value == expected_final_err + assert exc_info.value.__cause__ == expected_source_err diff --git a/tests/unit/test_retry.py b/tests/unit/retry/test_retry_unary.py similarity index 51% rename from tests/unit/test_retry.py rename to tests/unit/retry/test_retry_unary.py index 199ca559..f5bbcff7 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/retry/test_retry_unary.py @@ -13,51 +13,19 @@ # limitations under the License. import datetime -import itertools +import pytest import re -import mock -import pytest -import requests.exceptions +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401 +except ImportError: # pragma: NO COVER + import mock # type: ignore from google.api_core import exceptions from google.api_core import retry -from google.auth import exceptions as auth_exceptions - - -def test_if_exception_type(): - predicate = retry.if_exception_type(ValueError) - - assert predicate(ValueError()) - assert not predicate(TypeError()) - - -def test_if_exception_type_multiple(): - predicate = retry.if_exception_type(ValueError, TypeError) - - assert predicate(ValueError()) - assert predicate(TypeError()) - assert not predicate(RuntimeError()) - - -def test_if_transient_error(): - assert retry.if_transient_error(exceptions.InternalServerError("")) - assert retry.if_transient_error(exceptions.TooManyRequests("")) - assert retry.if_transient_error(exceptions.ServiceUnavailable("")) - assert retry.if_transient_error(requests.exceptions.ConnectionError("")) - assert retry.if_transient_error(requests.exceptions.ChunkedEncodingError("")) - assert retry.if_transient_error(auth_exceptions.TransportError("")) - assert not retry.if_transient_error(exceptions.InvalidArgument("")) - -# Make uniform return half of its maximum, which will be the calculated -# sleep time. -@mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n / 2.0) -def test_exponential_sleep_generator_base_2(uniform): - gen = retry.exponential_sleep_generator(1, 60, multiplier=2) - - result = list(itertools.islice(gen, 8)) - assert result == [1, 2, 4, 8, 16, 32, 60, 60] +from .test_retry_base import Test_BaseRetry @mock.patch("time.sleep", autospec=True) @@ -129,170 +97,87 @@ def test_retry_target_non_retryable_error(utcnow, sleep): sleep.assert_not_called() +@mock.patch("asyncio.sleep", autospec=True) +@mock.patch( + "google.api_core.datetime_helpers.utcnow", + return_value=datetime.datetime.min, + autospec=True, +) +@pytest.mark.asyncio +async def test_retry_target_warning_for_retry(utcnow, sleep): + predicate = retry.if_exception_type(ValueError) + target = mock.AsyncMock(spec=["__call__"]) + + with pytest.warns(Warning) as exc_info: + # Note: predicate is just a filler and doesn't affect the test + retry.retry_target(target, predicate, range(10), None) + + assert len(exc_info) == 2 + assert str(exc_info[0].message) == retry.retry_unary._ASYNC_RETRY_WARNING + sleep.assert_not_called() + + @mock.patch("time.sleep", autospec=True) -@mock.patch("google.api_core.datetime_helpers.utcnow", autospec=True) -def test_retry_target_deadline_exceeded(utcnow, sleep): +@mock.patch("time.monotonic", autospec=True) +@pytest.mark.parametrize("use_deadline_arg", [True, False]) +def test_retry_target_timeout_exceeded(monotonic, sleep, use_deadline_arg): predicate = retry.if_exception_type(ValueError) exception = ValueError("meep") target = mock.Mock(side_effect=exception) # Setup the timeline so that the first call takes 5 seconds but the second - # call takes 6, which puts the retry over the deadline. - utcnow.side_effect = [ - # The first call to utcnow establishes the start of the timeline. - datetime.datetime.min, - datetime.datetime.min + datetime.timedelta(seconds=5), - datetime.datetime.min + datetime.timedelta(seconds=11), - ] + # call takes 6, which puts the retry over the timeout. + monotonic.side_effect = [0, 5, 11] + + # support "deadline" as an alias for "timeout" + kwargs = {"timeout": 10} if not use_deadline_arg else {"deadline": 10} with pytest.raises(exceptions.RetryError) as exc_info: - retry.retry_target(target, predicate, range(10), deadline=10) + retry.retry_target(target, predicate, range(10), **kwargs) assert exc_info.value.cause == exception - assert exc_info.match("Deadline of 10.0s exceeded") + assert exc_info.match("Timeout of 10.0s exceeded") assert exc_info.match("last exception: meep") assert target.call_count == 2 + # Ensure the exception message does not include the target fn: + # it may be a partial with user data embedded + assert str(target) not in exc_info.exconly() + def test_retry_target_bad_sleep_generator(): with pytest.raises(ValueError, match="Sleep generator"): - retry.retry_target(mock.sentinel.target, mock.sentinel.predicate, [], None) - + retry.retry_target(mock.sentinel.target, lambda x: True, [], None) -class TestRetry(object): - def test_constructor_defaults(self): - retry_ = retry.Retry() - assert retry_._predicate == retry.if_transient_error - assert retry_._initial == 1 - assert retry_._maximum == 60 - assert retry_._multiplier == 2 - assert retry_._deadline == 120 - assert retry_._on_error is None - assert retry_.deadline == 120 - - def test_constructor_options(self): - _some_function = mock.Mock() - retry_ = retry.Retry( - predicate=mock.sentinel.predicate, - initial=1, - maximum=2, - multiplier=3, - deadline=4, - on_error=_some_function, +@mock.patch("time.sleep", autospec=True) +def test_retry_target_dynamic_backoff(sleep): + """ + sleep_generator should be iterated after on_error, to support dynamic backoff + """ + sleep.side_effect = RuntimeError("stop after sleep") + # start with empty sleep generator; values are added after exception in push_sleep_value + sleep_values = [] + exception = ValueError("trigger retry") + error_target = mock.Mock(side_effect=exception) + inserted_sleep = 99 + + def push_sleep_value(err): + sleep_values.append(inserted_sleep) + + with pytest.raises(RuntimeError): + retry.retry_target( + error_target, + predicate=lambda x: True, + sleep_generator=sleep_values, + on_error=push_sleep_value, ) - assert retry_._predicate == mock.sentinel.predicate - assert retry_._initial == 1 - assert retry_._maximum == 2 - assert retry_._multiplier == 3 - assert retry_._deadline == 4 - assert retry_._on_error is _some_function + assert sleep.call_count == 1 + sleep.assert_called_once_with(inserted_sleep) - def test_with_deadline(self): - retry_ = retry.Retry( - predicate=mock.sentinel.predicate, - initial=1, - maximum=2, - multiplier=3, - deadline=4, - on_error=mock.sentinel.on_error, - ) - new_retry = retry_.with_deadline(42) - assert retry_ is not new_retry - assert new_retry._deadline == 42 - - # the rest of the attributes should remain the same - assert new_retry._predicate is retry_._predicate - assert new_retry._initial == retry_._initial - assert new_retry._maximum == retry_._maximum - assert new_retry._multiplier == retry_._multiplier - assert new_retry._on_error is retry_._on_error - - def test_with_predicate(self): - retry_ = retry.Retry( - predicate=mock.sentinel.predicate, - initial=1, - maximum=2, - multiplier=3, - deadline=4, - on_error=mock.sentinel.on_error, - ) - new_retry = retry_.with_predicate(mock.sentinel.predicate) - assert retry_ is not new_retry - assert new_retry._predicate == mock.sentinel.predicate - - # the rest of the attributes should remain the same - assert new_retry._deadline == retry_._deadline - assert new_retry._initial == retry_._initial - assert new_retry._maximum == retry_._maximum - assert new_retry._multiplier == retry_._multiplier - assert new_retry._on_error is retry_._on_error - - def test_with_delay_noop(self): - retry_ = retry.Retry( - predicate=mock.sentinel.predicate, - initial=1, - maximum=2, - multiplier=3, - deadline=4, - on_error=mock.sentinel.on_error, - ) - new_retry = retry_.with_delay() - assert retry_ is not new_retry - assert new_retry._initial == retry_._initial - assert new_retry._maximum == retry_._maximum - assert new_retry._multiplier == retry_._multiplier - def test_with_delay(self): - retry_ = retry.Retry( - predicate=mock.sentinel.predicate, - initial=1, - maximum=2, - multiplier=3, - deadline=4, - on_error=mock.sentinel.on_error, - ) - new_retry = retry_.with_delay(initial=5, maximum=6, multiplier=7) - assert retry_ is not new_retry - assert new_retry._initial == 5 - assert new_retry._maximum == 6 - assert new_retry._multiplier == 7 - - # the rest of the attributes should remain the same - assert new_retry._deadline == retry_._deadline - assert new_retry._predicate is retry_._predicate - assert new_retry._on_error is retry_._on_error - - def test_with_delay_partial_options(self): - retry_ = retry.Retry( - predicate=mock.sentinel.predicate, - initial=1, - maximum=2, - multiplier=3, - deadline=4, - on_error=mock.sentinel.on_error, - ) - new_retry = retry_.with_delay(initial=4) - assert retry_ is not new_retry - assert new_retry._initial == 4 - assert new_retry._maximum == 2 - assert new_retry._multiplier == 3 - - new_retry = retry_.with_delay(maximum=4) - assert retry_ is not new_retry - assert new_retry._initial == 1 - assert new_retry._maximum == 4 - assert new_retry._multiplier == 3 - - new_retry = retry_.with_delay(multiplier=4) - assert retry_ is not new_retry - assert new_retry._initial == 1 - assert new_retry._maximum == 2 - assert new_retry._multiplier == 4 - - # the rest of the attributes should remain the same - assert new_retry._deadline == retry_._deadline - assert new_retry._predicate is retry_._predicate - assert new_retry._on_error is retry_._on_error +class TestRetry(Test_BaseRetry): + def _make_one(self, *args, **kwargs): + return retry.Retry(*args, **kwargs) def test___str__(self): def if_exception_type(exc): @@ -305,13 +190,13 @@ def if_exception_type(exc): initial=1.0, maximum=60.0, multiplier=2.0, - deadline=120.0, + timeout=120.0, on_error=None, ) assert re.match( ( r", " - r"initial=1.0, maximum=60.0, multiplier=2.0, deadline=120.0, " + r"initial=1.0, maximum=60.0, multiplier=2.0, timeout=120.0, " r"on_error=None>" ), str(retry_), @@ -333,11 +218,9 @@ def test___call___and_execute_success(self, sleep): target.assert_called_once_with("meep") sleep.assert_not_called() - # Make uniform return half of its maximum, which is the calculated sleep time. - @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n / 2.0) + @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n) @mock.patch("time.sleep", autospec=True) def test___call___and_execute_retry(self, sleep, uniform): - on_error = mock.Mock(spec=["__call__"], side_effect=[None]) retry_ = retry.Retry(predicate=retry.if_exception_type(ValueError)) @@ -356,24 +239,19 @@ def test___call___and_execute_retry(self, sleep, uniform): sleep.assert_called_once_with(retry_._initial) assert on_error.call_count == 1 - # Make uniform return half of its maximum, which is the calculated sleep time. - @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n / 2.0) + @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n) @mock.patch("time.sleep", autospec=True) - def test___call___and_execute_retry_hitting_deadline(self, sleep, uniform): - + def test___call___and_execute_retry_hitting_timeout(self, sleep, uniform): on_error = mock.Mock(spec=["__call__"], side_effect=[None] * 10) retry_ = retry.Retry( predicate=retry.if_exception_type(ValueError), initial=1.0, maximum=1024.0, multiplier=2.0, - deadline=9.9, + timeout=30.9, ) - utcnow = datetime.datetime.utcnow() - utcnow_patcher = mock.patch( - "google.api_core.datetime_helpers.utcnow", return_value=utcnow - ) + monotonic_patcher = mock.patch("time.monotonic", return_value=0) target = mock.Mock(spec=["__call__"], side_effect=[ValueError()] * 10) # __name__ is needed by functools.partial. @@ -382,11 +260,11 @@ def test___call___and_execute_retry_hitting_deadline(self, sleep, uniform): decorated = retry_(target, on_error=on_error) target.assert_not_called() - with utcnow_patcher as patched_utcnow: + with monotonic_patcher as patched_monotonic: # Make sure that calls to fake time.sleep() also advance the mocked # time clock. def increase_time(sleep_delay): - patched_utcnow.return_value += datetime.timedelta(seconds=sleep_delay) + patched_monotonic.return_value += sleep_delay sleep.side_effect = increase_time @@ -402,8 +280,17 @@ def increase_time(sleep_delay): last_wait = sleep.call_args.args[0] total_wait = sum(call_args.args[0] for call_args in sleep.call_args_list) - assert last_wait == 2.9 # and not 8.0, because the last delay was shortened - assert total_wait == 9.9 # the same as the deadline + assert last_wait == 8.0 + # Next attempt would be scheduled in 16 secs, 15 + 16 = 31 > 30.9, thus + # we do not even wait for it to be scheduled (30.9 is configured timeout). + # This changes the previous logic of shortening the last attempt to fit + # in the timeout. The previous logic was removed to make Python retry + # logic consistent with the other languages and to not disrupt the + # randomized retry delays distribution by artificially increasing a + # probability of scheduling two (instead of one) last attempts with very + # short delay between them, while the second retry having very low chance + # of succeeding anyways. + assert total_wait == 15.0 @mock.patch("time.sleep", autospec=True) def test___init___without_retry_executed(self, sleep): @@ -428,8 +315,7 @@ def test___init___without_retry_executed(self, sleep): sleep.assert_not_called() _some_function.assert_not_called() - # Make uniform return half of its maximum, which is the calculated sleep time. - @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n / 2.0) + @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n) @mock.patch("time.sleep", autospec=True) def test___init___when_retry_is_executed(self, sleep, uniform): _some_function = mock.Mock() diff --git a/tests/unit/test_bidi.py b/tests/unit/test_bidi.py index b876d9ad..0e7b018c 100644 --- a/tests/unit/test_bidi.py +++ b/tests/unit/test_bidi.py @@ -16,11 +16,21 @@ import logging import queue import threading +import time + +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401 +except ImportError: # pragma: NO COVER + import mock # type: ignore -import grpc -import mock import pytest +try: + import grpc +except ImportError: # pragma: NO COVER + pytest.skip("No GRPC", allow_module_level=True) + from google.api_core import bidi from google.api_core import exceptions @@ -287,6 +297,9 @@ def test_close(self): # ensure the request queue was signaled to stop. assert bidi_rpc.pending_requests == 1 assert bidi_rpc._request_queue.get() is None + # ensure request and callbacks are cleaned up + assert bidi_rpc._initial_request is None + assert not bidi_rpc._callbacks def test_close_no_rpc(self): bidi_rpc = bidi.BidiRpc(None) @@ -614,6 +627,8 @@ def cancel_side_effect(): assert bidi_rpc.pending_requests == 1 assert bidi_rpc._request_queue.get() is None assert bidi_rpc._finalized + assert bidi_rpc._initial_request is None + assert not bidi_rpc._callbacks def test_reopen_failure_on_rpc_restart(self): error1 = ValueError("1") @@ -768,6 +783,7 @@ def on_response(response): consumer.stop() assert consumer.is_active is False + assert consumer._on_response is None def test_wake_on_error(self): should_continue = threading.Event() @@ -800,6 +816,21 @@ def test_wake_on_error(self): while consumer.is_active: pass + def test_rpc_callback_fires_when_consumer_start_fails(self): + expected_exception = exceptions.InvalidArgument( + "test", response=grpc.StatusCode.INVALID_ARGUMENT + ) + callback = mock.Mock(spec=["__call__"]) + + rpc, _ = make_rpc() + bidi_rpc = bidi.BidiRpc(rpc) + bidi_rpc.add_done_callback(callback) + bidi_rpc._start_rpc.side_effect = expected_exception + + consumer = bidi.BackgroundConsumer(bidi_rpc, on_response=None) + consumer.start() + assert callback.call_args.args[0] == grpc.StatusCode.INVALID_ARGUMENT + def test_consumer_expected_error(self, caplog): caplog.set_level(logging.DEBUG) @@ -836,7 +867,7 @@ def test_consumer_unexpected_error(self, caplog): # Wait for the consumer's thread to exit. while consumer.is_active: - pass + pass # pragma: NO COVER (race condition) on_response.assert_not_called() bidi_rpc.recv.assert_called_once() @@ -860,6 +891,30 @@ def close_side_effect(): consumer.stop() assert consumer.is_active is False + assert consumer._on_response is None # calling stop twice should not result in an error. consumer.stop() + + def test_stop_error_logs(self, caplog): + """ + Closing the client should result in no internal error logs + + https://github.com/googleapis/python-api-core/issues/788 + """ + caplog.set_level(logging.DEBUG) + bidi_rpc = mock.create_autospec(bidi.BidiRpc, instance=True) + bidi_rpc.is_active = True + on_response = mock.Mock(spec=["__call__"]) + + consumer = bidi.BackgroundConsumer(bidi_rpc, on_response) + + consumer.start() + consumer.stop() + # let the background thread run for a while before exiting + time.sleep(0.1) + bidi_rpc.is_active = False + # running thread should not result in error logs + error_logs = [r.message for r in caplog.records if r.levelname == "ERROR"] + assert not error_logs, f"Found unexpected ERROR logs: {error_logs}" + bidi_rpc.is_active = False diff --git a/tests/unit/test_client_info.py b/tests/unit/test_client_info.py index f2274ec2..3361fef6 100644 --- a/tests/unit/test_client_info.py +++ b/tests/unit/test_client_info.py @@ -13,6 +13,11 @@ # limitations under the License. +try: + import grpc +except ImportError: # pragma: NO COVER + grpc = None + from google.api_core import client_info @@ -20,7 +25,12 @@ def test_constructor_defaults(): info = client_info.ClientInfo() assert info.python_version is not None - assert info.grpc_version is not None + + if grpc is not None: # pragma: NO COVER + assert info.grpc_version is not None + else: # pragma: NO COVER + assert info.grpc_version is None + assert info.api_core_version is not None assert info.gapic_version is None assert info.client_library_version is None diff --git a/tests/unit/test_client_logging.py b/tests/unit/test_client_logging.py new file mode 100644 index 00000000..b3b0b5c8 --- /dev/null +++ b/tests/unit/test_client_logging.py @@ -0,0 +1,140 @@ +import json +import logging +from unittest import mock + +from google.api_core.client_logging import ( + setup_logging, + initialize_logging, + StructuredLogFormatter, +) + + +def reset_logger(scope): + logger = logging.getLogger(scope) + logger.handlers = [] + logger.setLevel(logging.NOTSET) + logger.propagate = True + + +def test_setup_logging_w_no_scopes(): + with mock.patch("google.api_core.client_logging._BASE_LOGGER_NAME", "foogle"): + setup_logging() + base_logger = logging.getLogger("foogle") + assert base_logger.handlers == [] + assert not base_logger.propagate + assert base_logger.level == logging.NOTSET + + reset_logger("foogle") + + +def test_setup_logging_w_base_scope(): + with mock.patch("google.api_core.client_logging._BASE_LOGGER_NAME", "foogle"): + setup_logging("foogle") + base_logger = logging.getLogger("foogle") + assert isinstance(base_logger.handlers[0], logging.StreamHandler) + assert not base_logger.propagate + assert base_logger.level == logging.DEBUG + + reset_logger("foogle") + + +def test_setup_logging_w_configured_scope(): + with mock.patch("google.api_core.client_logging._BASE_LOGGER_NAME", "foogle"): + base_logger = logging.getLogger("foogle") + base_logger.propagate = False + setup_logging("foogle") + assert base_logger.handlers == [] + assert not base_logger.propagate + assert base_logger.level == logging.NOTSET + + reset_logger("foogle") + + +def test_setup_logging_w_module_scope(): + with mock.patch("google.api_core.client_logging._BASE_LOGGER_NAME", "foogle"): + setup_logging("foogle.bar") + + base_logger = logging.getLogger("foogle") + assert base_logger.handlers == [] + assert not base_logger.propagate + assert base_logger.level == logging.NOTSET + + module_logger = logging.getLogger("foogle.bar") + assert isinstance(module_logger.handlers[0], logging.StreamHandler) + assert not module_logger.propagate + assert module_logger.level == logging.DEBUG + + reset_logger("foogle") + reset_logger("foogle.bar") + + +def test_setup_logging_w_incorrect_scope(): + with mock.patch("google.api_core.client_logging._BASE_LOGGER_NAME", "foogle"): + setup_logging("abc") + + base_logger = logging.getLogger("foogle") + assert base_logger.handlers == [] + assert not base_logger.propagate + assert base_logger.level == logging.NOTSET + + # TODO(https://github.com/googleapis/python-api-core/issues/759): update test once we add logic to ignore an incorrect scope. + logger = logging.getLogger("abc") + assert isinstance(logger.handlers[0], logging.StreamHandler) + assert not logger.propagate + assert logger.level == logging.DEBUG + + reset_logger("foogle") + reset_logger("abc") + + +def test_initialize_logging(): + + with mock.patch("os.getenv", return_value="foogle.bar"): + with mock.patch("google.api_core.client_logging._BASE_LOGGER_NAME", "foogle"): + initialize_logging() + + base_logger = logging.getLogger("foogle") + assert base_logger.handlers == [] + assert not base_logger.propagate + assert base_logger.level == logging.NOTSET + + module_logger = logging.getLogger("foogle.bar") + assert isinstance(module_logger.handlers[0], logging.StreamHandler) + assert not module_logger.propagate + assert module_logger.level == logging.DEBUG + + # Check that `initialize_logging()` is a no-op after the first time by verifying that user-set configs are not modified: + base_logger.propagate = True + module_logger.propagate = True + + initialize_logging() + + assert base_logger.propagate + assert module_logger.propagate + + reset_logger("foogle") + reset_logger("foogle.bar") + + +def test_structured_log_formatter(): + # TODO(https://github.com/googleapis/python-api-core/issues/761): Test additional fields when implemented. + record = logging.LogRecord( + name="Appelation", + level=logging.DEBUG, + msg="This is a test message.", + pathname="some/path", + lineno=25, + args=None, + exc_info=None, + ) + + # Extra fields: + record.rpcName = "bar" + + formatted_msg = StructuredLogFormatter().format(record) + parsed_msg = json.loads(formatted_msg) + + assert parsed_msg["name"] == "Appelation" + assert parsed_msg["severity"] == "DEBUG" + assert parsed_msg["message"] == "This is a test message." + assert parsed_msg["rpcName"] == "bar" diff --git a/tests/unit/test_client_options.py b/tests/unit/test_client_options.py index 38b9ad0a..396d6627 100644 --- a/tests/unit/test_client_options.py +++ b/tests/unit/test_client_options.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +from re import match import pytest from google.api_core import client_options @@ -36,6 +37,8 @@ def test_constructor(): "https://www.googleapis.com/auth/cloud-platform", "https://www.googleapis.com/auth/cloud-platform.read-only", ], + api_audience="foo2.googleapis.com", + universe_domain="googleapis.com", ) assert options.api_endpoint == "foo.googleapis.com" @@ -46,6 +49,8 @@ def test_constructor(): "https://www.googleapis.com/auth/cloud-platform", "https://www.googleapis.com/auth/cloud-platform.read-only", ] + assert options.api_audience == "foo2.googleapis.com" + assert options.universe_domain == "googleapis.com" def test_constructor_with_encrypted_cert_source(): @@ -72,10 +77,42 @@ def test_constructor_with_both_cert_sources(): ) +def test_constructor_with_api_key(): + + options = client_options.ClientOptions( + api_endpoint="foo.googleapis.com", + client_cert_source=get_client_cert, + quota_project_id="quote-proj", + api_key="api-key", + scopes=[ + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + ], + ) + + assert options.api_endpoint == "foo.googleapis.com" + assert options.client_cert_source() == (b"cert", b"key") + assert options.quota_project_id == "quote-proj" + assert options.api_key == "api-key" + assert options.scopes == [ + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + ] + + +def test_constructor_with_both_api_key_and_credentials_file(): + with pytest.raises(ValueError): + client_options.ClientOptions( + api_key="api-key", + credentials_file="path/to/credentials.json", + ) + + def test_from_dict(): options = client_options.from_dict( { "api_endpoint": "foo.googleapis.com", + "universe_domain": "googleapis.com", "client_cert_source": get_client_cert, "quota_project_id": "quote-proj", "credentials_file": "path/to/credentials.json", @@ -83,10 +120,12 @@ def test_from_dict(): "https://www.googleapis.com/auth/cloud-platform", "https://www.googleapis.com/auth/cloud-platform.read-only", ], + "api_audience": "foo2.googleapis.com", } ) assert options.api_endpoint == "foo.googleapis.com" + assert options.universe_domain == "googleapis.com" assert options.client_cert_source() == (b"cert", b"key") assert options.quota_project_id == "quote-proj" assert options.credentials_file == "path/to/credentials.json" @@ -94,6 +133,8 @@ def test_from_dict(): "https://www.googleapis.com/auth/cloud-platform", "https://www.googleapis.com/auth/cloud-platform.read-only", ] + assert options.api_key is None + assert options.api_audience == "foo2.googleapis.com" def test_from_dict_bad_argument(): @@ -108,10 +149,22 @@ def test_from_dict_bad_argument(): def test_repr(): - options = client_options.ClientOptions(api_endpoint="foo.googleapis.com") - - assert ( - repr(options) - == "ClientOptions: {'api_endpoint': 'foo.googleapis.com', 'client_cert_source': None, 'client_encrypted_cert_source': None}" - or "ClientOptions: {'client_encrypted_cert_source': None, 'client_cert_source': None, 'api_endpoint': 'foo.googleapis.com'}" + expected_keys = set( + [ + "api_endpoint", + "universe_domain", + "client_cert_source", + "client_encrypted_cert_source", + "quota_project_id", + "credentials_file", + "scopes", + "api_key", + "api_audience", + ] ) + options = client_options.ClientOptions(api_endpoint="foo.googleapis.com") + options_repr = repr(options) + options_keys = vars(options).keys() + assert match(r"ClientOptions:", options_repr) + assert match(r".*'api_endpoint': 'foo.googleapis.com'.*", options_repr) + assert options_keys == expected_keys diff --git a/tests/unit/test_exceptions.py b/tests/unit/test_exceptions.py index 10599457..e3f8f909 100644 --- a/tests/unit/test_exceptions.py +++ b/tests/unit/test_exceptions.py @@ -14,12 +14,20 @@ import http.client import json +from unittest import mock -import grpc -import mock +import pytest import requests +try: + import grpc + from grpc_status import rpc_status +except ImportError: # pragma: NO COVER + grpc = rpc_status = None + from google.api_core import exceptions +from google.protobuf import any_pb2, json_format +from google.rpc import error_details_pb2, status_pb2 def test_create_google_cloud_error(): @@ -33,11 +41,8 @@ def test_create_google_cloud_error(): def test_create_google_cloud_error_with_args(): error = { - "domain": "global", - "location": "test", - "locationType": "testing", + "code": 600, "message": "Testing", - "reason": "test", } response = mock.sentinel.response exception = exceptions.GoogleAPICallError("Testing", [error], response=response) @@ -102,6 +107,7 @@ def test_from_http_response_no_content(): def test_from_http_response_text_content(): response = make_response(b"message") + response.encoding = "UTF8" # suppress charset_normalizer warning exception = exceptions.from_http_response(response) @@ -150,6 +156,7 @@ def test_from_http_response_json_unicode_content(): assert exception.errors == ["1", "2"] +@pytest.mark.skipif(grpc is None, reason="No grpc") def test_from_grpc_status(): message = "message" exception = exceptions.from_grpc_status(grpc.StatusCode.OUT_OF_RANGE, message) @@ -161,6 +168,7 @@ def test_from_grpc_status(): assert exception.errors == [] +@pytest.mark.skipif(grpc is None, reason="No grpc") def test_from_grpc_status_as_int(): message = "message" exception = exceptions.from_grpc_status(11, message) @@ -172,6 +180,7 @@ def test_from_grpc_status_as_int(): assert exception.errors == [] +@pytest.mark.skipif(grpc is None, reason="No grpc") def test_from_grpc_status_with_errors_and_response(): message = "message" response = mock.sentinel.response @@ -186,6 +195,7 @@ def test_from_grpc_status_with_errors_and_response(): assert exception.response == response +@pytest.mark.skipif(grpc is None, reason="No grpc") def test_from_grpc_status_unknown_code(): message = "message" exception = exceptions.from_grpc_status(grpc.StatusCode.OK, message) @@ -193,6 +203,7 @@ def test_from_grpc_status_unknown_code(): assert exception.message == message +@pytest.mark.skipif(grpc is None, reason="No grpc") def test_from_grpc_error(): message = "message" error = mock.create_autospec(grpc.Call, instance=True) @@ -210,6 +221,7 @@ def test_from_grpc_error(): assert exception.response == error +@pytest.mark.skipif(grpc is None, reason="No grpc") def test_from_grpc_error_non_call(): message = "message" error = mock.create_autospec(grpc.RpcError, instance=True) @@ -223,3 +235,161 @@ def test_from_grpc_error_non_call(): assert exception.message == message assert exception.errors == [error] assert exception.response == error + + +@pytest.mark.skipif(grpc is None, reason="No grpc") +def test_from_grpc_error_bare_call(): + message = "Testing" + + class TestingError(grpc.Call, grpc.RpcError): + def __init__(self, exception): + self.exception = exception + + def code(self): + return self.exception.grpc_status_code + + def details(self): + return message + + nested_message = "message" + error = TestingError(exceptions.GoogleAPICallError(nested_message)) + + exception = exceptions.from_grpc_error(error) + + assert isinstance(exception, exceptions.GoogleAPICallError) + assert exception.code is None + assert exception.grpc_status_code is None + assert exception.message == message + assert exception.errors == [error] + assert exception.response == error + assert exception.details == [] + + +def create_bad_request_details(): + bad_request_details = error_details_pb2.BadRequest() + field_violation = bad_request_details.field_violations.add() + field_violation.field = "document.content" + field_violation.description = "Must have some text content to annotate." + status_detail = any_pb2.Any() + status_detail.Pack(bad_request_details) + return status_detail + + +def create_error_info_details(): + info = error_details_pb2.ErrorInfo( + reason="SERVICE_DISABLED", + domain="googleapis.com", + metadata={ + "consumer": "projects/455411330361", + "service": "translate.googleapis.com", + }, + ) + status_detail = any_pb2.Any() + status_detail.Pack(info) + return status_detail + + +def test_error_details_from_rest_response(): + bad_request_detail = create_bad_request_details() + error_info_detail = create_error_info_details() + status = status_pb2.Status() + status.code = 3 + status.message = ( + "3 INVALID_ARGUMENT: One of content, or gcs_content_uri must be set." + ) + status.details.append(bad_request_detail) + status.details.append(error_info_detail) + + # See JSON schema in https://cloud.google.com/apis/design/errors#http_mapping + http_response = make_response( + json.dumps( + {"error": json.loads(json_format.MessageToJson(status, sort_keys=True))} + ).encode("utf-8") + ) + exception = exceptions.from_http_response(http_response) + want_error_details = [ + json.loads(json_format.MessageToJson(bad_request_detail)), + json.loads(json_format.MessageToJson(error_info_detail)), + ] + assert want_error_details == exception.details + + # 404 POST comes from make_response. + assert str(exception) == ( + "404 POST https://example.com/: 3 INVALID_ARGUMENT:" + " One of content, or gcs_content_uri must be set." + " [{'@type': 'type.googleapis.com/google.rpc.BadRequest'," + " 'fieldViolations': [{'description': 'Must have some text content to annotate.'," + " 'field': 'document.content'}]}," + " {'@type': 'type.googleapis.com/google.rpc.ErrorInfo'," + " 'domain': 'googleapis.com'," + " 'metadata': {'consumer': 'projects/455411330361'," + " 'service': 'translate.googleapis.com'}," + " 'reason': 'SERVICE_DISABLED'}]" + ) + + +def test_error_details_from_v1_rest_response(): + response = make_response( + json.dumps( + {"error": {"message": "\u2019 message", "errors": ["1", "2"]}} + ).encode("utf-8") + ) + exception = exceptions.from_http_response(response) + assert exception.details == [] + assert ( + exception.reason is None + and exception.domain is None + and exception.metadata is None + ) + + +@pytest.mark.skipif(grpc is None, reason="gRPC not importable") +def test_error_details_from_grpc_response(): + status = rpc_status.status_pb2.Status() + status.code = 3 + status.message = ( + "3 INVALID_ARGUMENT: One of content, or gcs_content_uri must be set." + ) + status_br_detail = create_bad_request_details() + status_ei_detail = create_error_info_details() + status.details.append(status_br_detail) + status.details.append(status_ei_detail) + + # The actual error doesn't matter as long as its grpc.Call, + # because from_call is mocked. + error = mock.create_autospec(grpc.Call, instance=True) + with mock.patch("grpc_status.rpc_status.from_call") as m: + m.return_value = status + exception = exceptions.from_grpc_error(error) + + bad_request_detail = error_details_pb2.BadRequest() + error_info_detail = error_details_pb2.ErrorInfo() + status_br_detail.Unpack(bad_request_detail) + status_ei_detail.Unpack(error_info_detail) + assert exception.details == [bad_request_detail, error_info_detail] + assert exception.reason == error_info_detail.reason + assert exception.domain == error_info_detail.domain + assert exception.metadata == error_info_detail.metadata + + +@pytest.mark.skipif(grpc is None, reason="gRPC not importable") +def test_error_details_from_grpc_response_unknown_error(): + status_detail = any_pb2.Any() + + status = rpc_status.status_pb2.Status() + status.code = 3 + status.message = ( + "3 INVALID_ARGUMENT: One of content, or gcs_content_uri must be set." + ) + status.details.append(status_detail) + + error = mock.create_autospec(grpc.Call, instance=True) + with mock.patch("grpc_status.rpc_status.from_call") as m: + m.return_value = status + exception = exceptions.from_grpc_error(error) + assert exception.details == [status_detail] + assert ( + exception.reason is None + and exception.domain is None + and exception.metadata is None + ) diff --git a/tests/unit/test_extended_operation.py b/tests/unit/test_extended_operation.py new file mode 100644 index 00000000..ab550662 --- /dev/null +++ b/tests/unit/test_extended_operation.py @@ -0,0 +1,246 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import dataclasses +import enum +import typing +from unittest import mock + +import pytest + +from google.api_core import exceptions +from google.api_core import extended_operation +from google.api_core import retry + +TEST_OPERATION_NAME = "test/extended_operation" + + +@dataclasses.dataclass(frozen=True) +class CustomOperation: + class StatusCode(enum.Enum): + UNKNOWN = 0 + DONE = 1 + PENDING = 2 + + class LROCustomErrors: + class LROCustomError: + def __init__(self, code: str = "", message: str = ""): + self.code = code + self.message = message + + def __init__(self, errors: typing.List[LROCustomError] = []): + self.errors = errors + + name: str + status: StatusCode + error_code: typing.Optional[int] = None + error_message: typing.Optional[str] = None + armor_class: typing.Optional[int] = None + # Note: `error` can be removed once proposal A from + # b/284179390 is implemented. + error: typing.Optional[LROCustomErrors] = None + + # Note: in generated clients, this property must be generated for each + # extended operation message type. + # The status may be an enum, a string, or a bool. If it's a string or enum, + # its text is compared to the string "DONE". + @property + def done(self): + return self.status.name == "DONE" + + +def make_extended_operation(responses=None): + client_operations_responses = responses or [ + CustomOperation( + name=TEST_OPERATION_NAME, status=CustomOperation.StatusCode.PENDING + ) + ] + + refresh = mock.Mock(spec=["__call__"], side_effect=client_operations_responses) + refresh.responses = client_operations_responses + cancel = mock.Mock(spec=["__call__"]) + extended_operation_future = extended_operation.ExtendedOperation.make( + refresh, + cancel, + client_operations_responses[0], + ) + + return extended_operation_future, refresh, cancel + + +def test_constructor(): + ex_op, refresh, _ = make_extended_operation() + assert ex_op._extended_operation == refresh.responses[0] + assert not ex_op.cancelled() + assert not ex_op.done() + assert ex_op.name == TEST_OPERATION_NAME + assert ex_op.status == CustomOperation.StatusCode.PENDING + assert ex_op.error_code is None + assert ex_op.error_message is None + + +def test_done(): + responses = [ + CustomOperation( + name=TEST_OPERATION_NAME, status=CustomOperation.StatusCode.PENDING + ), + # Second response indicates that the operation has finished. + CustomOperation( + name=TEST_OPERATION_NAME, status=CustomOperation.StatusCode.DONE + ), + # Bumper to make sure we stop polling on DONE. + CustomOperation( + name=TEST_OPERATION_NAME, + status=CustomOperation.StatusCode.DONE, + error_message="Gone too far!", + ), + ] + ex_op, refresh, _ = make_extended_operation(responses) + + # Start out not done. + assert not ex_op.done() + assert refresh.call_count == 1 + + # Refresh brings us to the done state. + assert ex_op.done() + assert refresh.call_count == 2 + assert not ex_op.error_message + + # Make sure that subsequent checks are no-ops. + assert ex_op.done() + assert refresh.call_count == 2 + assert not ex_op.error_message + + +def test_cancellation(): + responses = [ + CustomOperation( + name=TEST_OPERATION_NAME, status=CustomOperation.StatusCode.PENDING + ), + # Second response indicates that the operation was cancelled. + CustomOperation( + name=TEST_OPERATION_NAME, status=CustomOperation.StatusCode.DONE + ), + ] + ex_op, _, cancel = make_extended_operation(responses) + + assert not ex_op.cancelled() + + assert ex_op.cancel() + assert ex_op.cancelled() + cancel.assert_called_once_with() + + # Cancelling twice should have no effect. + assert not ex_op.cancel() + cancel.assert_called_once_with() + + +def test_done_w_retry(): + # Not sure what's going on here with the coverage, so just ignore it. + test_retry = retry.Retry(predicate=lambda x: True) # pragma: NO COVER + + responses = [ + CustomOperation( + name=TEST_OPERATION_NAME, status=CustomOperation.StatusCode.PENDING + ), + CustomOperation( + name=TEST_OPERATION_NAME, status=CustomOperation.StatusCode.DONE + ), + ] + + ex_op, refresh, _ = make_extended_operation(responses) + + ex_op.done(retry=test_retry) + + refresh.assert_called_once_with(retry=test_retry) + + +def test_error(): + responses = [ + CustomOperation( + name=TEST_OPERATION_NAME, + status=CustomOperation.StatusCode.DONE, + error_code=400, + error_message="Bad request", + ), + ] + + ex_op, _, _ = make_extended_operation(responses) + + # Defaults to CallError when grpc is not installed + with pytest.raises(exceptions.BadRequest): + ex_op.result() + + # Test GCE custom LRO Error. See b/284179390 + # Note: This test case can be removed once proposal A from + # b/284179390 is implemented. + _EXCEPTION_CODE = "INCOMPATIBLE_BACKEND_SERVICES" + _EXCEPTION_MESSAGE = "Validation failed for instance group" + responses = [ + CustomOperation( + name=TEST_OPERATION_NAME, + status=CustomOperation.StatusCode.DONE, + error_code=400, + error_message="Bad request", + error=CustomOperation.LROCustomErrors( + errors=[ + CustomOperation.LROCustomErrors.LROCustomError( + code=_EXCEPTION_CODE, message=_EXCEPTION_MESSAGE + ) + ] + ), + ), + ] + + ex_op, _, _ = make_extended_operation(responses) + + # Defaults to CallError when grpc is not installed + with pytest.raises( + exceptions.BadRequest, match=f"{_EXCEPTION_CODE}: {_EXCEPTION_MESSAGE}" + ): + ex_op.result() + + # Inconsistent result + responses = [ + CustomOperation( + name=TEST_OPERATION_NAME, + status=CustomOperation.StatusCode.DONE, + error_code=2112, + ), + ] + + ex_op, _, _ = make_extended_operation(responses) + + with pytest.raises(exceptions.GoogleAPICallError): + ex_op.result() + + +def test_pass_through(): + responses = [ + CustomOperation( + name=TEST_OPERATION_NAME, + status=CustomOperation.StatusCode.PENDING, + armor_class=10, + ), + CustomOperation( + name=TEST_OPERATION_NAME, + status=CustomOperation.StatusCode.DONE, + armor_class=20, + ), + ] + ex_op, _, _ = make_extended_operation(responses) + + assert ex_op.armor_class == 10 + ex_op.result() + assert ex_op.armor_class == 20 diff --git a/tests/unit/test_grpc_helpers.py b/tests/unit/test_grpc_helpers.py index 12bf1849..8de9d8c0 100644 --- a/tests/unit/test_grpc_helpers.py +++ b/tests/unit/test_grpc_helpers.py @@ -12,10 +12,15 @@ # See the License for the specific language governing permissions and # limitations under the License. -import grpc -import mock +from unittest import mock + import pytest +try: + import grpc +except ImportError: # pragma: NO COVER + pytest.skip("No GRPC", allow_module_level=True) + from google.api_core import exceptions from google.api_core import grpc_helpers import google.auth.credentials @@ -52,6 +57,9 @@ def code(self): def details(self): return None + def trailing_metadata(self): + return None + def test_wrap_unary_errors(): grpc_error = RpcErrorImpl(grpc.StatusCode.INVALID_ARGUMENT) @@ -66,6 +74,145 @@ def test_wrap_unary_errors(): assert exc_info.value.response == grpc_error +class Test_StreamingResponseIterator: + @staticmethod + def _make_wrapped(*items): + return iter(items) + + @staticmethod + def _make_one(wrapped, **kw): + return grpc_helpers._StreamingResponseIterator(wrapped, **kw) + + def test_ctor_defaults(self): + wrapped = self._make_wrapped("a", "b", "c") + iterator = self._make_one(wrapped) + assert iterator._stored_first_result == "a" + assert list(wrapped) == ["b", "c"] + + def test_ctor_explicit(self): + wrapped = self._make_wrapped("a", "b", "c") + iterator = self._make_one(wrapped, prefetch_first_result=False) + assert getattr(iterator, "_stored_first_result", self) is self + assert list(wrapped) == ["a", "b", "c"] + + def test_ctor_w_rpc_error_on_prefetch(self): + wrapped = mock.MagicMock() + wrapped.__next__.side_effect = grpc.RpcError() + + with pytest.raises(grpc.RpcError): + self._make_one(wrapped) + + def test___iter__(self): + wrapped = self._make_wrapped("a", "b", "c") + iterator = self._make_one(wrapped) + assert iter(iterator) is iterator + + def test___next___w_cached_first_result(self): + wrapped = self._make_wrapped("a", "b", "c") + iterator = self._make_one(wrapped) + assert next(iterator) == "a" + iterator = self._make_one(wrapped, prefetch_first_result=False) + assert next(iterator) == "b" + assert next(iterator) == "c" + + def test___next___wo_cached_first_result(self): + wrapped = self._make_wrapped("a", "b", "c") + iterator = self._make_one(wrapped, prefetch_first_result=False) + assert next(iterator) == "a" + assert next(iterator) == "b" + assert next(iterator) == "c" + + def test___next___w_rpc_error(self): + wrapped = mock.MagicMock() + wrapped.__next__.side_effect = grpc.RpcError() + iterator = self._make_one(wrapped, prefetch_first_result=False) + + with pytest.raises(exceptions.GoogleAPICallError): + next(iterator) + + def test_add_callback(self): + wrapped = mock.MagicMock() + callback = mock.Mock(spec={}) + iterator = self._make_one(wrapped, prefetch_first_result=False) + + assert iterator.add_callback(callback) is wrapped.add_callback.return_value + + wrapped.add_callback.assert_called_once_with(callback) + + def test_cancel(self): + wrapped = mock.MagicMock() + iterator = self._make_one(wrapped, prefetch_first_result=False) + + assert iterator.cancel() is wrapped.cancel.return_value + + wrapped.cancel.assert_called_once_with() + + def test_code(self): + wrapped = mock.MagicMock() + iterator = self._make_one(wrapped, prefetch_first_result=False) + + assert iterator.code() is wrapped.code.return_value + + wrapped.code.assert_called_once_with() + + def test_details(self): + wrapped = mock.MagicMock() + iterator = self._make_one(wrapped, prefetch_first_result=False) + + assert iterator.details() is wrapped.details.return_value + + wrapped.details.assert_called_once_with() + + def test_initial_metadata(self): + wrapped = mock.MagicMock() + iterator = self._make_one(wrapped, prefetch_first_result=False) + + assert iterator.initial_metadata() is wrapped.initial_metadata.return_value + + wrapped.initial_metadata.assert_called_once_with() + + def test_is_active(self): + wrapped = mock.MagicMock() + iterator = self._make_one(wrapped, prefetch_first_result=False) + + assert iterator.is_active() is wrapped.is_active.return_value + + wrapped.is_active.assert_called_once_with() + + def test_time_remaining(self): + wrapped = mock.MagicMock() + iterator = self._make_one(wrapped, prefetch_first_result=False) + + assert iterator.time_remaining() is wrapped.time_remaining.return_value + + wrapped.time_remaining.assert_called_once_with() + + def test_trailing_metadata(self): + wrapped = mock.MagicMock() + iterator = self._make_one(wrapped, prefetch_first_result=False) + + assert iterator.trailing_metadata() is wrapped.trailing_metadata.return_value + + wrapped.trailing_metadata.assert_called_once_with() + + +class TestGrpcStream(Test_StreamingResponseIterator): + @staticmethod + def _make_one(wrapped, **kw): + return grpc_helpers.GrpcStream(wrapped, **kw) + + def test_grpc_stream_attributes(self): + """ + Should be both a grpc.Call and an iterable + """ + call = self._make_one(None) + assert isinstance(call, grpc.Call) + # should implement __iter__ + assert hasattr(call, "__iter__") + it = call.__iter__() + assert hasattr(it, "__next__") + + def test_wrap_stream_okay(): expected_responses = [1, 2, 3] callable_ = mock.Mock(spec=["__call__"], return_value=iter(expected_responses)) @@ -93,7 +240,7 @@ def test_wrap_stream_prefetch_disabled(): callable_.assert_called_once_with(1, 2, three="four") -def test_wrap_stream_iterable_iterface(): +def test_wrap_stream_iterable_interface(): response_iter = mock.create_autospec(grpc.Call, instance=True) callable_ = mock.Mock(spec=["__call__"], return_value=response_iter) @@ -219,36 +366,72 @@ def test_wrap_errors_streaming(wrap_stream_errors): wrap_stream_errors.assert_called_once_with(callable_) -@mock.patch("grpc.composite_channel_credentials") +@pytest.mark.parametrize( + "attempt_direct_path,target,expected_target", + [ + (None, "example.com:443", "example.com:443"), + (False, "example.com:443", "example.com:443"), + (True, "example.com:443", "google-c2p:///example.com"), + (True, "dns:///example.com", "google-c2p:///example.com"), + (True, "another-c2p:///example.com", "another-c2p:///example.com"), + ], +) +@mock.patch("grpc.compute_engine_channel_credentials") @mock.patch( "google.auth.default", autospec=True, - return_value=(mock.sentinel.credentials, mock.sentinel.projet), + return_value=(mock.sentinel.credentials, mock.sentinel.project), ) @mock.patch("grpc.secure_channel") -def test_create_channel_implicit(grpc_secure_channel, default, composite_creds_call): - target = "example.com:443" +def test_create_channel_implicit( + grpc_secure_channel, + google_auth_default, + composite_creds_call, + attempt_direct_path, + target, + expected_target, +): composite_creds = composite_creds_call.return_value - channel = grpc_helpers.create_channel(target) + channel = grpc_helpers.create_channel( + target, + compression=grpc.Compression.Gzip, + attempt_direct_path=attempt_direct_path, + ) assert channel is grpc_secure_channel.return_value - default.assert_called_once_with(scopes=None, default_scopes=None) + google_auth_default.assert_called_once_with(scopes=None, default_scopes=None) - if grpc_helpers.HAS_GRPC_GCP: - grpc_secure_channel.assert_called_once_with(target, composite_creds, None) + if grpc_helpers.HAS_GRPC_GCP: # pragma: NO COVER + # The original target is the expected target + expected_target = target + grpc_secure_channel.assert_called_once_with( + expected_target, composite_creds, None + ) else: - grpc_secure_channel.assert_called_once_with(target, composite_creds) + grpc_secure_channel.assert_called_once_with( + expected_target, composite_creds, compression=grpc.Compression.Gzip + ) +@pytest.mark.parametrize( + "attempt_direct_path,target, expected_target", + [ + (None, "example.com:443", "example.com:443"), + (False, "example.com:443", "example.com:443"), + (True, "example.com:443", "google-c2p:///example.com"), + (True, "dns:///example.com", "google-c2p:///example.com"), + (True, "another-c2p:///example.com", "another-c2p:///example.com"), + ], +) @mock.patch("google.auth.transport.grpc.AuthMetadataPlugin", autospec=True) @mock.patch( "google.auth.transport.requests.Request", autospec=True, return_value=mock.sentinel.Request, ) -@mock.patch("grpc.composite_channel_credentials") +@mock.patch("grpc.compute_engine_channel_credentials") @mock.patch( "google.auth.default", autospec=True, @@ -256,58 +439,95 @@ def test_create_channel_implicit(grpc_secure_channel, default, composite_creds_c ) @mock.patch("grpc.secure_channel") def test_create_channel_implicit_with_default_host( - grpc_secure_channel, default, composite_creds_call, request, auth_metadata_plugin + grpc_secure_channel, + google_auth_default, + composite_creds_call, + request, + auth_metadata_plugin, + attempt_direct_path, + target, + expected_target, ): - target = "example.com:443" default_host = "example.com" composite_creds = composite_creds_call.return_value - channel = grpc_helpers.create_channel(target, default_host=default_host) + channel = grpc_helpers.create_channel( + target, default_host=default_host, attempt_direct_path=attempt_direct_path + ) assert channel is grpc_secure_channel.return_value - default.assert_called_once_with(scopes=None, default_scopes=None) + google_auth_default.assert_called_once_with(scopes=None, default_scopes=None) auth_metadata_plugin.assert_called_once_with( mock.sentinel.credentials, mock.sentinel.Request, default_host=default_host ) - if grpc_helpers.HAS_GRPC_GCP: - grpc_secure_channel.assert_called_once_with(target, composite_creds, None) + if grpc_helpers.HAS_GRPC_GCP: # pragma: NO COVER + # The original target is the expected target + expected_target = target + grpc_secure_channel.assert_called_once_with( + expected_target, composite_creds, None + ) else: - grpc_secure_channel.assert_called_once_with(target, composite_creds) + grpc_secure_channel.assert_called_once_with( + expected_target, composite_creds, compression=None + ) +@pytest.mark.parametrize( + "attempt_direct_path", + [ + None, + False, + ], +) @mock.patch("grpc.composite_channel_credentials") @mock.patch( "google.auth.default", autospec=True, - return_value=(mock.sentinel.credentials, mock.sentinel.projet), + return_value=(mock.sentinel.credentials, mock.sentinel.project), ) @mock.patch("grpc.secure_channel") def test_create_channel_implicit_with_ssl_creds( - grpc_secure_channel, default, composite_creds_call + grpc_secure_channel, default, composite_creds_call, attempt_direct_path ): target = "example.com:443" ssl_creds = grpc.ssl_channel_credentials() - grpc_helpers.create_channel(target, ssl_credentials=ssl_creds) + grpc_helpers.create_channel( + target, ssl_credentials=ssl_creds, attempt_direct_path=attempt_direct_path + ) default.assert_called_once_with(scopes=None, default_scopes=None) composite_creds_call.assert_called_once_with(ssl_creds, mock.ANY) composite_creds = composite_creds_call.return_value - if grpc_helpers.HAS_GRPC_GCP: + + if grpc_helpers.HAS_GRPC_GCP: # pragma: NO COVER grpc_secure_channel.assert_called_once_with(target, composite_creds, None) else: - grpc_secure_channel.assert_called_once_with(target, composite_creds) + grpc_secure_channel.assert_called_once_with( + target, composite_creds, compression=None + ) -@mock.patch("grpc.composite_channel_credentials") +def test_create_channel_implicit_with_ssl_creds_attempt_direct_path_true(): + target = "example.com:443" + ssl_creds = grpc.ssl_channel_credentials() + with pytest.raises( + ValueError, match="Using ssl_credentials with Direct Path is not supported" + ): + grpc_helpers.create_channel( + target, ssl_credentials=ssl_creds, attempt_direct_path=True + ) + + +@mock.patch("grpc.compute_engine_channel_credentials") @mock.patch( "google.auth.default", autospec=True, - return_value=(mock.sentinel.credentials, mock.sentinel.projet), + return_value=(mock.sentinel.credentials, mock.sentinel.project), ) @mock.patch("grpc.secure_channel") def test_create_channel_implicit_with_scopes( @@ -322,17 +542,19 @@ def test_create_channel_implicit_with_scopes( default.assert_called_once_with(scopes=["one", "two"], default_scopes=None) - if grpc_helpers.HAS_GRPC_GCP: + if grpc_helpers.HAS_GRPC_GCP: # pragma: NO COVER grpc_secure_channel.assert_called_once_with(target, composite_creds, None) else: - grpc_secure_channel.assert_called_once_with(target, composite_creds) + grpc_secure_channel.assert_called_once_with( + target, composite_creds, compression=None + ) -@mock.patch("grpc.composite_channel_credentials") +@mock.patch("grpc.compute_engine_channel_credentials") @mock.patch( "google.auth.default", autospec=True, - return_value=(mock.sentinel.credentials, mock.sentinel.projet), + return_value=(mock.sentinel.credentials, mock.sentinel.project), ) @mock.patch("grpc.secure_channel") def test_create_channel_implicit_with_default_scopes( @@ -347,10 +569,12 @@ def test_create_channel_implicit_with_default_scopes( default.assert_called_once_with(scopes=None, default_scopes=["three", "four"]) - if grpc_helpers.HAS_GRPC_GCP: + if grpc_helpers.HAS_GRPC_GCP: # pragma: NO COVER grpc_secure_channel.assert_called_once_with(target, composite_creds, None) else: - grpc_secure_channel.assert_called_once_with(target, composite_creds) + grpc_secure_channel.assert_called_once_with( + target, composite_creds, compression=None + ) def test_create_channel_explicit_with_duplicate_credentials(): @@ -364,7 +588,7 @@ def test_create_channel_explicit_with_duplicate_credentials(): ) -@mock.patch("grpc.composite_channel_credentials") +@mock.patch("grpc.compute_engine_channel_credentials") @mock.patch("google.auth.credentials.with_scopes_if_required", autospec=True) @mock.patch("grpc.secure_channel") def test_create_channel_explicit(grpc_secure_channel, auth_creds, composite_creds_call): @@ -378,13 +602,16 @@ def test_create_channel_explicit(grpc_secure_channel, auth_creds, composite_cred ) assert channel is grpc_secure_channel.return_value - if grpc_helpers.HAS_GRPC_GCP: + + if grpc_helpers.HAS_GRPC_GCP: # pragma: NO COVER grpc_secure_channel.assert_called_once_with(target, composite_creds, None) else: - grpc_secure_channel.assert_called_once_with(target, composite_creds) + grpc_secure_channel.assert_called_once_with( + target, composite_creds, compression=None + ) -@mock.patch("grpc.composite_channel_credentials") +@mock.patch("grpc.compute_engine_channel_credentials") @mock.patch("grpc.secure_channel") def test_create_channel_explicit_scoped(grpc_secure_channel, composite_creds_call): target = "example.com:443" @@ -401,13 +628,16 @@ def test_create_channel_explicit_scoped(grpc_secure_channel, composite_creds_cal credentials.with_scopes.assert_called_once_with(scopes, default_scopes=None) assert channel is grpc_secure_channel.return_value - if grpc_helpers.HAS_GRPC_GCP: + + if grpc_helpers.HAS_GRPC_GCP: # pragma: NO COVER grpc_secure_channel.assert_called_once_with(target, composite_creds, None) else: - grpc_secure_channel.assert_called_once_with(target, composite_creds) + grpc_secure_channel.assert_called_once_with( + target, composite_creds, compression=None + ) -@mock.patch("grpc.composite_channel_credentials") +@mock.patch("grpc.compute_engine_channel_credentials") @mock.patch("grpc.secure_channel") def test_create_channel_explicit_default_scopes( grpc_secure_channel, composite_creds_call @@ -428,13 +658,16 @@ def test_create_channel_explicit_default_scopes( ) assert channel is grpc_secure_channel.return_value - if grpc_helpers.HAS_GRPC_GCP: + + if grpc_helpers.HAS_GRPC_GCP: # pragma: NO COVER grpc_secure_channel.assert_called_once_with(target, composite_creds, None) else: - grpc_secure_channel.assert_called_once_with(target, composite_creds) + grpc_secure_channel.assert_called_once_with( + target, composite_creds, compression=None + ) -@mock.patch("grpc.composite_channel_credentials") +@mock.patch("grpc.compute_engine_channel_credentials") @mock.patch("grpc.secure_channel") def test_create_channel_explicit_with_quota_project( grpc_secure_channel, composite_creds_call @@ -453,13 +686,16 @@ def test_create_channel_explicit_with_quota_project( credentials.with_quota_project.assert_called_once_with("project-foo") assert channel is grpc_secure_channel.return_value - if grpc_helpers.HAS_GRPC_GCP: + + if grpc_helpers.HAS_GRPC_GCP: # pragma: NO COVER grpc_secure_channel.assert_called_once_with(target, composite_creds, None) else: - grpc_secure_channel.assert_called_once_with(target, composite_creds) + grpc_secure_channel.assert_called_once_with( + target, composite_creds, compression=None + ) -@mock.patch("grpc.composite_channel_credentials") +@mock.patch("grpc.compute_engine_channel_credentials") @mock.patch("grpc.secure_channel") @mock.patch( "google.auth.load_credentials_from_file", @@ -481,13 +717,16 @@ def test_create_channel_with_credentials_file( ) assert channel is grpc_secure_channel.return_value - if grpc_helpers.HAS_GRPC_GCP: + + if grpc_helpers.HAS_GRPC_GCP: # pragma: NO COVER grpc_secure_channel.assert_called_once_with(target, composite_creds, None) else: - grpc_secure_channel.assert_called_once_with(target, composite_creds) + grpc_secure_channel.assert_called_once_with( + target, composite_creds, compression=None + ) -@mock.patch("grpc.composite_channel_credentials") +@mock.patch("grpc.compute_engine_channel_credentials") @mock.patch("grpc.secure_channel") @mock.patch( "google.auth.load_credentials_from_file", @@ -512,13 +751,16 @@ def test_create_channel_with_credentials_file_and_scopes( ) assert channel is grpc_secure_channel.return_value - if grpc_helpers.HAS_GRPC_GCP: + + if grpc_helpers.HAS_GRPC_GCP: # pragma: NO COVER grpc_secure_channel.assert_called_once_with(target, composite_creds, None) else: - grpc_secure_channel.assert_called_once_with(target, composite_creds) + grpc_secure_channel.assert_called_once_with( + target, composite_creds, compression=None + ) -@mock.patch("grpc.composite_channel_credentials") +@mock.patch("grpc.compute_engine_channel_credentials") @mock.patch("grpc.secure_channel") @mock.patch( "google.auth.load_credentials_from_file", @@ -543,17 +785,20 @@ def test_create_channel_with_credentials_file_and_default_scopes( ) assert channel is grpc_secure_channel.return_value - if grpc_helpers.HAS_GRPC_GCP: + + if grpc_helpers.HAS_GRPC_GCP: # pragma: NO COVER grpc_secure_channel.assert_called_once_with(target, composite_creds, None) else: - grpc_secure_channel.assert_called_once_with(target, composite_creds) + grpc_secure_channel.assert_called_once_with( + target, composite_creds, compression=None + ) @pytest.mark.skipif( not grpc_helpers.HAS_GRPC_GCP, reason="grpc_gcp module not available" ) @mock.patch("grpc_gcp.secure_channel") -def test_create_channel_with_grpc_gcp(grpc_gcp_secure_channel): +def test_create_channel_with_grpc_gcp(grpc_gcp_secure_channel): # pragma: NO COVER target = "example.com:443" scopes = ["test_scope"] @@ -676,6 +921,7 @@ def test_call_info(self): stub = operations_pb2.OperationsStub(channel) expected_request = operations_pb2.GetOperationRequest(name="meep") expected_response = operations_pb2.Operation(name="moop") + expected_compression = grpc.Compression.NoCompression expected_metadata = [("red", "blue"), ("two", "shoe")] expected_credentials = mock.sentinel.credentials channel.GetOperation.response = expected_response @@ -683,6 +929,7 @@ def test_call_info(self): response = stub.GetOperation( expected_request, timeout=42, + compression=expected_compression, metadata=expected_metadata, credentials=expected_credentials, ) @@ -690,7 +937,13 @@ def test_call_info(self): assert response == expected_response assert channel.requests == [("GetOperation", expected_request)] assert channel.GetOperation.calls == [ - (expected_request, 42, expected_metadata, expected_credentials) + ( + expected_request, + 42, + expected_metadata, + expected_credentials, + expected_compression, + ) ] def test_unary_unary(self): diff --git a/tests/unit/test_iam.py b/tests/unit/test_iam.py index fbd242e5..3de15288 100644 --- a/tests/unit/test_iam.py +++ b/tests/unit/test_iam.py @@ -167,14 +167,15 @@ def test_owners_getter(self): assert policy.owners == expected def test_owners_setter(self): - import warnings from google.api_core.iam import OWNER_ROLE MEMBER = "user:phred@example.com" expected = set([MEMBER]) policy = self._make_one() - with warnings.catch_warnings(record=True) as warned: + with pytest.warns( + DeprecationWarning, match="Assigning to 'owners' is deprecated." + ) as warned: policy.owners = [MEMBER] (warning,) = warned @@ -191,14 +192,15 @@ def test_editors_getter(self): assert policy.editors == expected def test_editors_setter(self): - import warnings from google.api_core.iam import EDITOR_ROLE MEMBER = "user:phred@example.com" expected = set([MEMBER]) policy = self._make_one() - with warnings.catch_warnings(record=True) as warned: + with pytest.warns( + DeprecationWarning, match="Assigning to 'editors' is deprecated." + ) as warned: policy.editors = [MEMBER] (warning,) = warned @@ -215,14 +217,15 @@ def test_viewers_getter(self): assert policy.viewers == expected def test_viewers_setter(self): - import warnings from google.api_core.iam import VIEWER_ROLE MEMBER = "user:phred@example.com" expected = set([MEMBER]) policy = self._make_one() - with warnings.catch_warnings(record=True) as warned: + with pytest.warns( + DeprecationWarning, match="Assigning to 'viewers' is deprecated." + ) as warned: policy.viewers = [MEMBER] (warning,) = warned @@ -337,12 +340,13 @@ def test_to_api_repr_binding_wo_members(self): assert policy.to_api_repr() == {} def test_to_api_repr_binding_w_duplicates(self): - import warnings from google.api_core.iam import OWNER_ROLE OWNER = "group:cloud-logs@google.com" policy = self._make_one() - with warnings.catch_warnings(record=True): + with pytest.warns( + DeprecationWarning, match="Assigning to 'owners' is deprecated." + ): policy.owners = [OWNER, OWNER] assert policy.to_api_repr() == { "bindings": [{"role": OWNER_ROLE, "members": [OWNER]}] diff --git a/tests/unit/test_operation.py b/tests/unit/test_operation.py index 28fbfe27..80680720 100644 --- a/tests/unit/test_operation.py +++ b/tests/unit/test_operation.py @@ -13,7 +13,14 @@ # limitations under the License. -import mock +from unittest import mock + +import pytest + +try: + import grpc # noqa: F401 +except ImportError: # pragma: NO COVER + pytest.skip("No GRPC", allow_module_level=True) from google.api_core import exceptions from google.api_core import operation diff --git a/tests/unit/test_packaging.py b/tests/unit/test_packaging.py new file mode 100644 index 00000000..8100a496 --- /dev/null +++ b/tests/unit/test_packaging.py @@ -0,0 +1,28 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import subprocess +import sys + + +def test_namespace_package_compat(tmp_path): + # The ``google`` namespace package should not be masked + # by the presence of ``google-api-core``. + google = tmp_path / "google" + google.mkdir() + google.joinpath("othermod.py").write_text("") + env = dict(os.environ, PYTHONPATH=str(tmp_path)) + cmd = [sys.executable, "-m", "google.othermod"] + subprocess.check_call(cmd, env=env) diff --git a/tests/unit/test_page_iterator.py b/tests/unit/test_page_iterator.py index a44e998b..560722c5 100644 --- a/tests/unit/test_page_iterator.py +++ b/tests/unit/test_page_iterator.py @@ -14,8 +14,8 @@ import math import types +from unittest import mock -import mock import pytest from google.api_core import page_iterator @@ -505,7 +505,8 @@ def api_request(*args, **kw): assert list(next(items_iter)) == [ dict(name=str(i)) for i in range( - ipage * page_size, min((ipage + 1) * page_size, n_results), + ipage * page_size, + min((ipage + 1) * page_size, n_results), ) ] else: diff --git a/tests/unit/test_path_template.py b/tests/unit/test_path_template.py index 4c8a7c5e..c34dd0f3 100644 --- a/tests/unit/test_path_template.py +++ b/tests/unit/test_path_template.py @@ -13,10 +13,11 @@ # limitations under the License. from __future__ import unicode_literals +from unittest import mock -import mock import pytest +from google.api import auth_pb2 from google.api_core import path_template @@ -84,6 +85,61 @@ def test_expanded_failure(tmpl, args, kwargs, exc_match): path_template.expand(tmpl, *args, **kwargs) +@pytest.mark.parametrize( + "request_obj, field, expected_result", + [ + [{"field": "stringValue"}, "field", "stringValue"], + [{"field": "stringValue"}, "nosuchfield", None], + [{"field": "stringValue"}, "field.subfield", None], + [{"field": {"subfield": "stringValue"}}, "field", None], + [{"field": {"subfield": "stringValue"}}, "field.subfield", "stringValue"], + [{"field": {"subfield": [1, 2, 3]}}, "field.subfield", [1, 2, 3]], + [{"field": {"subfield": "stringValue"}}, "field", None], + [{"field": {"subfield": "stringValue"}}, "field.nosuchfield", None], + [ + {"field": {"subfield": {"subsubfield": "stringValue"}}}, + "field.subfield.subsubfield", + "stringValue", + ], + ["string", "field", None], + ], +) +def test_get_field(request_obj, field, expected_result): + result = path_template.get_field(request_obj, field) + assert result == expected_result + + +@pytest.mark.parametrize( + "request_obj, field, expected_result", + [ + [{"field": "stringValue"}, "field", {}], + [{"field": "stringValue"}, "nosuchfield", {"field": "stringValue"}], + [{"field": "stringValue"}, "field.subfield", {"field": "stringValue"}], + [{"field": {"subfield": "stringValue"}}, "field.subfield", {"field": {}}], + [ + {"field": {"subfield": "stringValue", "q": "w"}, "e": "f"}, + "field.subfield", + {"field": {"q": "w"}, "e": "f"}, + ], + [ + {"field": {"subfield": "stringValue"}}, + "field.nosuchfield", + {"field": {"subfield": "stringValue"}}, + ], + [ + {"field": {"subfield": {"subsubfield": "stringValue", "q": "w"}}}, + "field.subfield.subsubfield", + {"field": {"subfield": {"q": "w"}}}, + ], + ["string", "field", "string"], + ["string", "field.subfield", "string"], + ], +) +def test_delete_field(request_obj, field, expected_result): + path_template.delete_field(request_obj, field) + assert request_obj == expected_result + + @pytest.mark.parametrize( "tmpl, path", [ @@ -113,3 +169,484 @@ def test__replace_variable_with_pattern(): match.group.return_value = None with pytest.raises(ValueError, match="Unknown"): path_template._replace_variable_with_pattern(match) + + +@pytest.mark.parametrize( + "http_options, message, request_kwargs, expected_result", + [ + [ + [["get", "/v1/no/template", ""]], + None, + {"foo": "bar"}, + ["get", "/v1/no/template", {}, {"foo": "bar"}], + ], + [ + [["get", "/v1/no/template", ""]], + auth_pb2.AuthenticationRule(selector="bar"), + {}, + [ + "get", + "/v1/no/template", + None, + auth_pb2.AuthenticationRule(selector="bar"), + ], + ], + # Single templates + [ + [["get", "/v1/{field}", ""]], + None, + {"field": "parent"}, + ["get", "/v1/parent", {}, {}], + ], + [ + [["get", "/v1/{selector}", ""]], + auth_pb2.AuthenticationRule(selector="parent"), + {}, + ["get", "/v1/parent", None, auth_pb2.AuthenticationRule()], + ], + [ + [["get", "/v1/{field.sub}", ""]], + None, + {"field": {"sub": "parent"}, "foo": "bar"}, + ["get", "/v1/parent", {}, {"field": {}, "foo": "bar"}], + ], + [ + [["get", "/v1/{oauth.canonical_scopes}", ""]], + auth_pb2.AuthenticationRule( + selector="bar", + oauth=auth_pb2.OAuthRequirements(canonical_scopes="parent"), + ), + {}, + [ + "get", + "/v1/parent", + None, + auth_pb2.AuthenticationRule( + selector="bar", oauth=auth_pb2.OAuthRequirements() + ), + ], + ], + ], +) +def test_transcode_base_case(http_options, message, request_kwargs, expected_result): + http_options, expected_result = helper_test_transcode(http_options, expected_result) + result = path_template.transcode(http_options, message, **request_kwargs) + assert result == expected_result + + +@pytest.mark.parametrize( + "http_options, message, request_kwargs, expected_result", + [ + [ + [["get", "/v1/{field.subfield}", ""]], + None, + {"field": {"subfield": "parent"}, "foo": "bar"}, + ["get", "/v1/parent", {}, {"field": {}, "foo": "bar"}], + ], + [ + [["get", "/v1/{oauth.canonical_scopes}", ""]], + auth_pb2.AuthenticationRule( + selector="bar", + oauth=auth_pb2.OAuthRequirements(canonical_scopes="parent"), + ), + {}, + [ + "get", + "/v1/parent", + None, + auth_pb2.AuthenticationRule( + selector="bar", oauth=auth_pb2.OAuthRequirements() + ), + ], + ], + [ + [["get", "/v1/{field.subfield.subsubfield}", ""]], + None, + {"field": {"subfield": {"subsubfield": "parent"}}, "foo": "bar"}, + ["get", "/v1/parent", {}, {"field": {"subfield": {}}, "foo": "bar"}], + ], + [ + [["get", "/v1/{field.subfield1}/{field.subfield2}", ""]], + None, + {"field": {"subfield1": "parent", "subfield2": "child"}, "foo": "bar"}, + ["get", "/v1/parent/child", {}, {"field": {}, "foo": "bar"}], + ], + [ + [["get", "/v1/{selector}/{oauth.canonical_scopes}", ""]], + auth_pb2.AuthenticationRule( + selector="parent", + oauth=auth_pb2.OAuthRequirements(canonical_scopes="child"), + ), + {"field": {"subfield1": "parent", "subfield2": "child"}, "foo": "bar"}, + [ + "get", + "/v1/parent/child", + None, + auth_pb2.AuthenticationRule(oauth=auth_pb2.OAuthRequirements()), + ], + ], + ], +) +def test_transcode_subfields(http_options, message, request_kwargs, expected_result): + http_options, expected_result = helper_test_transcode(http_options, expected_result) + result = path_template.transcode(http_options, message, **request_kwargs) + assert result == expected_result + + +@pytest.mark.parametrize( + "http_options, message, request_kwargs, expected_result", + [ + # Single segment wildcard + [ + [["get", "/v1/{field=*}", ""]], + None, + {"field": "parent"}, + ["get", "/v1/parent", {}, {}], + ], + [ + [["get", "/v1/{selector=*}", ""]], + auth_pb2.AuthenticationRule(selector="parent"), + {}, + ["get", "/v1/parent", None, auth_pb2.AuthenticationRule()], + ], + [ + [["get", "/v1/{field=a/*/b/*}", ""]], + None, + {"field": "a/parent/b/child", "foo": "bar"}, + ["get", "/v1/a/parent/b/child", {}, {"foo": "bar"}], + ], + [ + [["get", "/v1/{selector=a/*/b/*}", ""]], + auth_pb2.AuthenticationRule( + selector="a/parent/b/child", allow_without_credential=True + ), + {}, + [ + "get", + "/v1/a/parent/b/child", + None, + auth_pb2.AuthenticationRule(allow_without_credential=True), + ], + ], + # Double segment wildcard + [ + [["get", "/v1/{field=**}", ""]], + None, + {"field": "parent/p1"}, + ["get", "/v1/parent/p1", {}, {}], + ], + [ + [["get", "/v1/{selector=**}", ""]], + auth_pb2.AuthenticationRule(selector="parent/p1"), + {}, + ["get", "/v1/parent/p1", None, auth_pb2.AuthenticationRule()], + ], + [ + [["get", "/v1/{field=a/**/b/**}", ""]], + None, + {"field": "a/parent/p1/b/child/c1", "foo": "bar"}, + ["get", "/v1/a/parent/p1/b/child/c1", {}, {"foo": "bar"}], + ], + [ + [["get", "/v1/{selector=a/**/b/**}", ""]], + auth_pb2.AuthenticationRule( + selector="a/parent/p1/b/child/c1", allow_without_credential=True + ), + {}, + [ + "get", + "/v1/a/parent/p1/b/child/c1", + None, + auth_pb2.AuthenticationRule(allow_without_credential=True), + ], + ], + # Combined single and double segment wildcard + [ + [["get", "/v1/{field=a/*/b/**}", ""]], + None, + {"field": "a/parent/b/child/c1"}, + ["get", "/v1/a/parent/b/child/c1", {}, {}], + ], + [ + [["get", "/v1/{selector=a/*/b/**}", ""]], + auth_pb2.AuthenticationRule(selector="a/parent/b/child/c1"), + {}, + ["get", "/v1/a/parent/b/child/c1", None, auth_pb2.AuthenticationRule()], + ], + [ + [["get", "/v1/{field=a/**/b/*}/v2/{name}", ""]], + None, + {"field": "a/parent/p1/b/child", "name": "first", "foo": "bar"}, + ["get", "/v1/a/parent/p1/b/child/v2/first", {}, {"foo": "bar"}], + ], + [ + [["get", "/v1/{selector=a/**/b/*}/v2/{oauth.canonical_scopes}", ""]], + auth_pb2.AuthenticationRule( + selector="a/parent/p1/b/child", + oauth=auth_pb2.OAuthRequirements(canonical_scopes="first"), + ), + {"field": "a/parent/p1/b/child", "name": "first", "foo": "bar"}, + [ + "get", + "/v1/a/parent/p1/b/child/v2/first", + None, + auth_pb2.AuthenticationRule(oauth=auth_pb2.OAuthRequirements()), + ], + ], + ], +) +def test_transcode_with_wildcard( + http_options, message, request_kwargs, expected_result +): + http_options, expected_result = helper_test_transcode(http_options, expected_result) + result = path_template.transcode(http_options, message, **request_kwargs) + assert result == expected_result + + +@pytest.mark.parametrize( + "http_options, message, request_kwargs, expected_result", + [ + # Single field body + [ + [["post", "/v1/no/template", "data"]], + None, + {"data": {"id": 1, "info": "some info"}, "foo": "bar"}, + ["post", "/v1/no/template", {"id": 1, "info": "some info"}, {"foo": "bar"}], + ], + [ + [["post", "/v1/no/template", "oauth"]], + auth_pb2.AuthenticationRule( + selector="bar", + oauth=auth_pb2.OAuthRequirements(canonical_scopes="child"), + ), + {}, + [ + "post", + "/v1/no/template", + auth_pb2.OAuthRequirements(canonical_scopes="child"), + auth_pb2.AuthenticationRule(selector="bar"), + ], + ], + [ + [["post", "/v1/{field=a/*}/b/{name=**}", "data"]], + None, + { + "field": "a/parent", + "name": "first/last", + "data": {"id": 1, "info": "some info"}, + "foo": "bar", + }, + [ + "post", + "/v1/a/parent/b/first/last", + {"id": 1, "info": "some info"}, + {"foo": "bar"}, + ], + ], + [ + [["post", "/v1/{selector=a/*}/b/{oauth.canonical_scopes=**}", "oauth"]], + auth_pb2.AuthenticationRule( + selector="a/parent", + allow_without_credential=True, + requirements=[auth_pb2.AuthRequirement(provider_id="p")], + oauth=auth_pb2.OAuthRequirements(canonical_scopes="first/last"), + ), + {}, + [ + "post", + "/v1/a/parent/b/first/last", + auth_pb2.OAuthRequirements(), + auth_pb2.AuthenticationRule( + requirements=[auth_pb2.AuthRequirement(provider_id="p")], + allow_without_credential=True, + ), + ], + ], + # Wildcard body + [ + [["post", "/v1/{field=a/*}/b/{name=**}", "*"]], + None, + { + "field": "a/parent", + "name": "first/last", + "data": {"id": 1, "info": "some info"}, + "foo": "bar", + }, + [ + "post", + "/v1/a/parent/b/first/last", + {"data": {"id": 1, "info": "some info"}, "foo": "bar"}, + {}, + ], + ], + [ + [["post", "/v1/{selector=a/*}/b/{oauth.canonical_scopes=**}", "*"]], + auth_pb2.AuthenticationRule( + selector="a/parent", + allow_without_credential=True, + oauth=auth_pb2.OAuthRequirements(canonical_scopes="first/last"), + ), + { + "field": "a/parent", + "name": "first/last", + "data": {"id": 1, "info": "some info"}, + "foo": "bar", + }, + [ + "post", + "/v1/a/parent/b/first/last", + auth_pb2.AuthenticationRule( + allow_without_credential=True, oauth=auth_pb2.OAuthRequirements() + ), + auth_pb2.AuthenticationRule(), + ], + ], + ], +) +def test_transcode_with_body(http_options, message, request_kwargs, expected_result): + http_options, expected_result = helper_test_transcode(http_options, expected_result) + result = path_template.transcode(http_options, message, **request_kwargs) + assert result == expected_result + + +@pytest.mark.parametrize( + "http_options, message, request_kwargs, expected_result", + [ + # Additional bindings + [ + [ + ["post", "/v1/{field=a/*}/b/{name=**}", "extra_data"], + ["post", "/v1/{field=a/*}/b/{name=**}", "*"], + ], + None, + { + "field": "a/parent", + "name": "first/last", + "data": {"id": 1, "info": "some info"}, + "foo": "bar", + }, + [ + "post", + "/v1/a/parent/b/first/last", + {"data": {"id": 1, "info": "some info"}, "foo": "bar"}, + {}, + ], + ], + [ + [ + [ + "post", + "/v1/{selector=a/*}/b/{oauth.canonical_scopes=**}", + "extra_data", + ], + ["post", "/v1/{selector=a/*}/b/{oauth.canonical_scopes=**}", "*"], + ], + auth_pb2.AuthenticationRule( + selector="a/parent", + allow_without_credential=True, + oauth=auth_pb2.OAuthRequirements(canonical_scopes="first/last"), + ), + {}, + [ + "post", + "/v1/a/parent/b/first/last", + auth_pb2.AuthenticationRule( + allow_without_credential=True, oauth=auth_pb2.OAuthRequirements() + ), + auth_pb2.AuthenticationRule(), + ], + ], + [ + [ + ["get", "/v1/{field=a/*}/b/{name=**}", ""], + ["get", "/v1/{field=a/*}/b/first/last", ""], + ], + None, + {"field": "a/parent", "foo": "bar"}, + ["get", "/v1/a/parent/b/first/last", {}, {"foo": "bar"}], + ], + [ + [ + ["get", "/v1/{selector=a/*}/b/{oauth.allow_without_credential=**}", ""], + ["get", "/v1/{selector=a/*}/b/first/last", ""], + ], + auth_pb2.AuthenticationRule( + selector="a/parent", + allow_without_credential=True, + oauth=auth_pb2.OAuthRequirements(), + ), + {}, + [ + "get", + "/v1/a/parent/b/first/last", + None, + auth_pb2.AuthenticationRule( + allow_without_credential=True, oauth=auth_pb2.OAuthRequirements() + ), + ], + ], + ], +) +def test_transcode_with_additional_bindings( + http_options, message, request_kwargs, expected_result +): + http_options, expected_result = helper_test_transcode(http_options, expected_result) + result = path_template.transcode(http_options, message, **request_kwargs) + assert result == expected_result + + +@pytest.mark.parametrize( + "http_options, message, request_kwargs", + [ + [[["get", "/v1/{name}", ""]], None, {"foo": "bar"}], + [[["get", "/v1/{selector}", ""]], auth_pb2.AuthenticationRule(), {}], + [[["get", "/v1/{name}", ""]], auth_pb2.AuthenticationRule(), {}], + [[["get", "/v1/{name}", ""]], None, {"name": "first/last"}], + [ + [["get", "/v1/{selector}", ""]], + auth_pb2.AuthenticationRule(selector="first/last"), + {}, + ], + [[["get", "/v1/{name=mr/*/*}", ""]], None, {"name": "first/last"}], + [ + [["get", "/v1/{selector=mr/*/*}", ""]], + auth_pb2.AuthenticationRule(selector="first/last"), + {}, + ], + [[["post", "/v1/{name}", "data"]], None, {"name": "first/last"}], + [ + [["post", "/v1/{selector}", "data"]], + auth_pb2.AuthenticationRule(selector="first"), + {}, + ], + [[["post", "/v1/{first_name}", "data"]], None, {"last_name": "last"}], + [ + [["post", "/v1/{first_name}", ""]], + auth_pb2.AuthenticationRule(selector="first"), + {}, + ], + ], +) +def test_transcode_fails(http_options, message, request_kwargs): + http_options, _ = helper_test_transcode(http_options, range(4)) + with pytest.raises(ValueError) as exc_info: + path_template.transcode(http_options, message, **request_kwargs) + assert str(exc_info.value).count("URI") == len(http_options) + + +def helper_test_transcode(http_options_list, expected_result_list): + http_options = [] + for opt_list in http_options_list: + http_option = {"method": opt_list[0], "uri": opt_list[1]} + if opt_list[2]: + http_option["body"] = opt_list[2] + http_options.append(http_option) + + expected_result = { + "method": expected_result_list[0], + "uri": expected_result_list[1], + "query_params": expected_result_list[3], + } + if expected_result_list[2]: + expected_result["body"] = expected_result_list[2] + return (http_options, expected_result) diff --git a/tests/unit/test_protobuf_helpers.py b/tests/unit/test_protobuf_helpers.py index 3df45df5..5678d3bc 100644 --- a/tests/unit/test_protobuf_helpers.py +++ b/tests/unit/test_protobuf_helpers.py @@ -12,9 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -import sys - import pytest +import re from google.api import http_pb2 from google.api_core import protobuf_helpers @@ -67,7 +66,12 @@ def test_from_any_pb_failure(): in_message = any_pb2.Any() in_message.Pack(date_pb2.Date(year=1990)) - with pytest.raises(TypeError): + with pytest.raises( + TypeError, + match=re.escape( + "Could not convert `google.type.Date` with underlying type `google.protobuf.any_pb2.Any` to `google.type.TimeOfDay`" + ), + ): protobuf_helpers.from_any_pb(timeofday_pb2.TimeOfDay, in_message) @@ -476,11 +480,6 @@ def test_field_mask_different_level_diffs(): ] -@pytest.mark.skipif( - sys.version_info.major == 2, - reason="Field names with trailing underscores can only be created" - "through proto-plus, which is Python 3 only.", -) def test_field_mask_ignore_trailing_underscore(): import proto @@ -496,11 +495,6 @@ class Foo(proto.Message): ] -@pytest.mark.skipif( - sys.version_info.major == 2, - reason="Field names with trailing underscores can only be created" - "through proto-plus, which is Python 3 only.", -) def test_field_mask_ignore_trailing_underscore_with_nesting(): import proto diff --git a/tests/unit/test_rest_helpers.py b/tests/unit/test_rest_helpers.py new file mode 100644 index 00000000..ff1a43f0 --- /dev/null +++ b/tests/unit/test_rest_helpers.py @@ -0,0 +1,94 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest + +from google.api_core import rest_helpers + + +def test_flatten_simple_value(): + with pytest.raises(TypeError): + rest_helpers.flatten_query_params("abc") + + +def test_flatten_list(): + with pytest.raises(TypeError): + rest_helpers.flatten_query_params(["abc", "def"]) + + +def test_flatten_none(): + assert rest_helpers.flatten_query_params(None) == [] + + +def test_flatten_empty_dict(): + assert rest_helpers.flatten_query_params({}) == [] + + +def test_flatten_simple_dict(): + obj = {"a": "abc", "b": "def", "c": True, "d": False, "e": 10, "f": -3.76} + assert rest_helpers.flatten_query_params(obj) == [ + ("a", "abc"), + ("b", "def"), + ("c", True), + ("d", False), + ("e", 10), + ("f", -3.76), + ] + + +def test_flatten_simple_dict_strict(): + obj = {"a": "abc", "b": "def", "c": True, "d": False, "e": 10, "f": -3.76} + assert rest_helpers.flatten_query_params(obj, strict=True) == [ + ("a", "abc"), + ("b", "def"), + ("c", "true"), + ("d", "false"), + ("e", "10"), + ("f", "-3.76"), + ] + + +def test_flatten_repeated_field(): + assert rest_helpers.flatten_query_params({"a": ["x", "y", "z", None]}) == [ + ("a", "x"), + ("a", "y"), + ("a", "z"), + ] + + +def test_flatten_nested_dict(): + obj = {"a": {"b": {"c": ["x", "y", "z"]}}, "d": {"e": "uvw"}} + expected_result = [("a.b.c", "x"), ("a.b.c", "y"), ("a.b.c", "z"), ("d.e", "uvw")] + + assert rest_helpers.flatten_query_params(obj) == expected_result + + +def test_flatten_repeated_dict(): + obj = { + "a": {"b": {"c": [{"v": 1}, {"v": 2}]}}, + "d": "uvw", + } + + with pytest.raises(ValueError): + rest_helpers.flatten_query_params(obj) + + +def test_flatten_repeated_list(): + obj = { + "a": {"b": {"c": [["e", "f"], ["g", "h"]]}}, + "d": "uvw", + } + + with pytest.raises(ValueError): + rest_helpers.flatten_query_params(obj) diff --git a/tests/unit/test_rest_streaming.py b/tests/unit/test_rest_streaming.py new file mode 100644 index 00000000..0f998dfe --- /dev/null +++ b/tests/unit/test_rest_streaming.py @@ -0,0 +1,296 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import datetime +import logging +import random +import time +from typing import List +from unittest.mock import patch + +import proto +import pytest +import requests + +from google.api_core import rest_streaming +from google.api import http_pb2 +from google.api import httpbody_pb2 + +from ..helpers import Composer, Song, EchoResponse, parse_responses + + +__protobuf__ = proto.module(package=__name__) +SEED = int(time.time()) +logging.info(f"Starting sync rest streaming tests with random seed: {SEED}") +random.seed(SEED) + + +class ResponseMock(requests.Response): + class _ResponseItr: + def __init__(self, _response_bytes: bytes, random_split=False): + self._responses_bytes = _response_bytes + self._i = 0 + self._random_split = random_split + + def __next__(self): + if self._i == len(self._responses_bytes): + raise StopIteration + if self._random_split: + n = random.randint(1, len(self._responses_bytes[self._i :])) + else: + n = 1 + x = self._responses_bytes[self._i : self._i + n] + self._i += n + return x.decode("utf-8") + + def __init__( + self, + responses: List[proto.Message], + response_cls, + random_split=False, + ): + super().__init__() + self._responses = responses + self._random_split = random_split + self._response_message_cls = response_cls + + def _parse_responses(self): + return parse_responses(self._response_message_cls, self._responses) + + def close(self): + raise NotImplementedError() + + def iter_content(self, *args, **kwargs): + return self._ResponseItr( + self._parse_responses(), + random_split=self._random_split, + ) + + +@pytest.mark.parametrize( + "random_split,resp_message_is_proto_plus", + [(False, True), (False, False)], +) +def test_next_simple(random_split, resp_message_is_proto_plus): + if resp_message_is_proto_plus: + response_type = EchoResponse + responses = [EchoResponse(content="hello world"), EchoResponse(content="yes")] + else: + response_type = httpbody_pb2.HttpBody + responses = [ + httpbody_pb2.HttpBody(content_type="hello world"), + httpbody_pb2.HttpBody(content_type="yes"), + ] + + resp = ResponseMock( + responses=responses, random_split=random_split, response_cls=response_type + ) + itr = rest_streaming.ResponseIterator(resp, response_type) + assert list(itr) == responses + + +@pytest.mark.parametrize( + "random_split,resp_message_is_proto_plus", + [ + (True, True), + (False, True), + (True, False), + (False, False), + ], +) +def test_next_nested(random_split, resp_message_is_proto_plus): + if resp_message_is_proto_plus: + response_type = Song + responses = [ + Song(title="some song", composer=Composer(given_name="some name")), + Song(title="another song", date_added=datetime.datetime(2021, 12, 17)), + ] + else: + # Although `http_pb2.HttpRule`` is used in the response, any response message + # can be used which meets this criteria for the test of having a nested field. + response_type = http_pb2.HttpRule + responses = [ + http_pb2.HttpRule( + selector="some selector", + custom=http_pb2.CustomHttpPattern(kind="some kind"), + ), + http_pb2.HttpRule( + selector="another selector", + custom=http_pb2.CustomHttpPattern(path="some path"), + ), + ] + resp = ResponseMock( + responses=responses, random_split=random_split, response_cls=response_type + ) + itr = rest_streaming.ResponseIterator(resp, response_type) + assert list(itr) == responses + + +@pytest.mark.parametrize( + "random_split,resp_message_is_proto_plus", + [ + (True, True), + (False, True), + (True, False), + (False, False), + ], +) +def test_next_stress(random_split, resp_message_is_proto_plus): + n = 50 + if resp_message_is_proto_plus: + response_type = Song + responses = [ + Song(title="title_%d" % i, composer=Composer(given_name="name_%d" % i)) + for i in range(n) + ] + else: + response_type = http_pb2.HttpRule + responses = [ + http_pb2.HttpRule( + selector="selector_%d" % i, + custom=http_pb2.CustomHttpPattern(path="path_%d" % i), + ) + for i in range(n) + ] + resp = ResponseMock( + responses=responses, random_split=random_split, response_cls=response_type + ) + itr = rest_streaming.ResponseIterator(resp, response_type) + assert list(itr) == responses + + +@pytest.mark.parametrize( + "random_split,resp_message_is_proto_plus", + [ + (True, True), + (False, True), + (True, False), + (False, False), + ], +) +def test_next_escaped_characters_in_string(random_split, resp_message_is_proto_plus): + if resp_message_is_proto_plus: + response_type = Song + composer_with_relateds = Composer() + relateds = ["Artist A", "Artist B"] + composer_with_relateds.relateds = relateds + + responses = [ + Song( + title='ti"tle\nfoo\tbar{}', composer=Composer(given_name="name\n\n\n") + ), + Song( + title='{"this is weird": "totally"}', + composer=Composer(given_name="\\{}\\"), + ), + Song(title='\\{"key": ["value",]}\\', composer=composer_with_relateds), + ] + else: + response_type = http_pb2.Http + responses = [ + http_pb2.Http( + rules=[ + http_pb2.HttpRule( + selector='ti"tle\nfoo\tbar{}', + custom=http_pb2.CustomHttpPattern(kind="name\n\n\n"), + ) + ] + ), + http_pb2.Http( + rules=[ + http_pb2.HttpRule( + selector='{"this is weird": "totally"}', + custom=http_pb2.CustomHttpPattern(kind="\\{}\\"), + ) + ] + ), + http_pb2.Http( + rules=[ + http_pb2.HttpRule( + selector='\\{"key": ["value",]}\\', + custom=http_pb2.CustomHttpPattern(kind="\\{}\\"), + ) + ] + ), + ] + resp = ResponseMock( + responses=responses, random_split=random_split, response_cls=response_type + ) + itr = rest_streaming.ResponseIterator(resp, response_type) + assert list(itr) == responses + + +@pytest.mark.parametrize("response_type", [EchoResponse, httpbody_pb2.HttpBody]) +def test_next_not_array(response_type): + with patch.object( + ResponseMock, "iter_content", return_value=iter('{"hello": 0}') + ) as mock_method: + resp = ResponseMock(responses=[], response_cls=response_type) + itr = rest_streaming.ResponseIterator(resp, response_type) + with pytest.raises(ValueError): + next(itr) + mock_method.assert_called_once() + + +@pytest.mark.parametrize("response_type", [EchoResponse, httpbody_pb2.HttpBody]) +def test_cancel(response_type): + with patch.object(ResponseMock, "close", return_value=None) as mock_method: + resp = ResponseMock(responses=[], response_cls=response_type) + itr = rest_streaming.ResponseIterator(resp, response_type) + itr.cancel() + mock_method.assert_called_once() + + +@pytest.mark.parametrize( + "response_type,return_value", + [ + (EchoResponse, bytes('[{"content": "hello"}, {', "utf-8")), + (httpbody_pb2.HttpBody, bytes('[{"content_type": "hello"}, {', "utf-8")), + ], +) +def test_check_buffer(response_type, return_value): + with patch.object( + ResponseMock, + "_parse_responses", + return_value=return_value, + ): + resp = ResponseMock(responses=[], response_cls=response_type) + itr = rest_streaming.ResponseIterator(resp, response_type) + with pytest.raises(ValueError): + next(itr) + next(itr) + + +@pytest.mark.parametrize("response_type", [EchoResponse, httpbody_pb2.HttpBody]) +def test_next_html(response_type): + with patch.object( + ResponseMock, "iter_content", return_value=iter("") + ) as mock_method: + resp = ResponseMock(responses=[], response_cls=response_type) + itr = rest_streaming.ResponseIterator(resp, response_type) + with pytest.raises(ValueError): + next(itr) + mock_method.assert_called_once() + + +def test_invalid_response_class(): + class SomeClass: + pass + + resp = ResponseMock(responses=[], response_cls=SomeClass) + with pytest.raises( + ValueError, + match="Response message class must be a subclass of proto.Message or google.protobuf.message.Message", + ): + rest_streaming.ResponseIterator(resp, SomeClass) diff --git a/tests/unit/test_timeout.py b/tests/unit/test_timeout.py index 30d624e2..2c20202b 100644 --- a/tests/unit/test_timeout.py +++ b/tests/unit/test_timeout.py @@ -14,14 +14,13 @@ import datetime import itertools +from unittest import mock -import mock - -from google.api_core import timeout +from google.api_core import timeout as timeouts def test__exponential_timeout_generator_base_2(): - gen = timeout._exponential_timeout_generator(1.0, 60.0, 2.0, deadline=None) + gen = timeouts._exponential_timeout_generator(1.0, 60.0, 2.0, deadline=None) result = list(itertools.islice(gen, 8)) assert result == [1, 2, 4, 8, 16, 32, 60, 60] @@ -34,7 +33,7 @@ def test__exponential_timeout_generator_base_deadline(utcnow): datetime.datetime.min + datetime.timedelta(seconds=n) for n in range(15) ] - gen = timeout._exponential_timeout_generator(1.0, 60.0, 2.0, deadline=30.0) + gen = timeouts._exponential_timeout_generator(1.0, 60.0, 2.0, deadline=30.0) result = list(itertools.islice(gen, 14)) # Should grow until the cumulative time is > 30s, then start decreasing as @@ -42,22 +41,105 @@ def test__exponential_timeout_generator_base_deadline(utcnow): assert result == [1, 2, 4, 8, 16, 24, 23, 22, 21, 20, 19, 18, 17, 16] +class TestTimeToDeadlineTimeout(object): + def test_constructor(self): + timeout_ = timeouts.TimeToDeadlineTimeout() + assert timeout_._timeout is None + + def test_constructor_args(self): + timeout_ = timeouts.TimeToDeadlineTimeout(42.0) + assert timeout_._timeout == 42.0 + + def test___str__(self): + timeout_ = timeouts.TimeToDeadlineTimeout(1) + assert str(timeout_) == "" + + def test_apply(self): + target = mock.Mock(spec=["__call__", "__name__"], __name__="target") + + datetime.datetime.now(tz=datetime.timezone.utc) + datetime.timedelta(seconds=1) + + now = datetime.datetime.now(tz=datetime.timezone.utc) + + times = [ + now, + now + datetime.timedelta(seconds=0.0009), + now + datetime.timedelta(seconds=1), + now + datetime.timedelta(seconds=39), + now + datetime.timedelta(seconds=42), + now + datetime.timedelta(seconds=43), + ] + + def _clock(): + return times.pop(0) + + timeout_ = timeouts.TimeToDeadlineTimeout(42.0, _clock) + wrapped = timeout_(target) + + wrapped() + target.assert_called_with(timeout=42.0) + wrapped() + target.assert_called_with(timeout=41.0) + wrapped() + target.assert_called_with(timeout=3.0) + wrapped() + target.assert_called_with(timeout=42.0) + wrapped() + target.assert_called_with(timeout=42.0) + + def test_apply_no_timeout(self): + target = mock.Mock(spec=["__call__", "__name__"], __name__="target") + + datetime.datetime.now(tz=datetime.timezone.utc) + datetime.timedelta(seconds=1) + + now = datetime.datetime.now(tz=datetime.timezone.utc) + + times = [ + now, + now + datetime.timedelta(seconds=0.0009), + now + datetime.timedelta(seconds=1), + now + datetime.timedelta(seconds=2), + ] + + def _clock(): + return times.pop(0) + + timeout_ = timeouts.TimeToDeadlineTimeout(clock=_clock) + wrapped = timeout_(target) + + wrapped() + target.assert_called_with() + wrapped() + target.assert_called_with() + + def test_apply_passthrough(self): + target = mock.Mock(spec=["__call__", "__name__"], __name__="target") + timeout_ = timeouts.TimeToDeadlineTimeout(42.0) + wrapped = timeout_(target) + + wrapped(1, 2, meep="moop") + + target.assert_called_once_with(1, 2, meep="moop", timeout=42.0) + + class TestConstantTimeout(object): def test_constructor(self): - timeout_ = timeout.ConstantTimeout() + timeout_ = timeouts.ConstantTimeout() assert timeout_._timeout is None def test_constructor_args(self): - timeout_ = timeout.ConstantTimeout(42.0) + timeout_ = timeouts.ConstantTimeout(42.0) assert timeout_._timeout == 42.0 def test___str__(self): - timeout_ = timeout.ConstantTimeout(1) + timeout_ = timeouts.ConstantTimeout(1) assert str(timeout_) == "" def test_apply(self): target = mock.Mock(spec=["__call__", "__name__"], __name__="target") - timeout_ = timeout.ConstantTimeout(42.0) + timeout_ = timeouts.ConstantTimeout(42.0) wrapped = timeout_(target) wrapped() @@ -66,7 +148,7 @@ def test_apply(self): def test_apply_passthrough(self): target = mock.Mock(spec=["__call__", "__name__"], __name__="target") - timeout_ = timeout.ConstantTimeout(42.0) + timeout_ = timeouts.ConstantTimeout(42.0) wrapped = timeout_(target) wrapped(1, 2, meep="moop") @@ -76,30 +158,30 @@ def test_apply_passthrough(self): class TestExponentialTimeout(object): def test_constructor(self): - timeout_ = timeout.ExponentialTimeout() - assert timeout_._initial == timeout._DEFAULT_INITIAL_TIMEOUT - assert timeout_._maximum == timeout._DEFAULT_MAXIMUM_TIMEOUT - assert timeout_._multiplier == timeout._DEFAULT_TIMEOUT_MULTIPLIER - assert timeout_._deadline == timeout._DEFAULT_DEADLINE + timeout_ = timeouts.ExponentialTimeout() + assert timeout_._initial == timeouts._DEFAULT_INITIAL_TIMEOUT + assert timeout_._maximum == timeouts._DEFAULT_MAXIMUM_TIMEOUT + assert timeout_._multiplier == timeouts._DEFAULT_TIMEOUT_MULTIPLIER + assert timeout_._deadline == timeouts._DEFAULT_DEADLINE def test_constructor_args(self): - timeout_ = timeout.ExponentialTimeout(1, 2, 3, 4) + timeout_ = timeouts.ExponentialTimeout(1, 2, 3, 4) assert timeout_._initial == 1 assert timeout_._maximum == 2 assert timeout_._multiplier == 3 assert timeout_._deadline == 4 def test_with_timeout(self): - original_timeout = timeout.ExponentialTimeout() + original_timeout = timeouts.ExponentialTimeout() timeout_ = original_timeout.with_deadline(42) assert original_timeout is not timeout_ - assert timeout_._initial == timeout._DEFAULT_INITIAL_TIMEOUT - assert timeout_._maximum == timeout._DEFAULT_MAXIMUM_TIMEOUT - assert timeout_._multiplier == timeout._DEFAULT_TIMEOUT_MULTIPLIER + assert timeout_._initial == timeouts._DEFAULT_INITIAL_TIMEOUT + assert timeout_._maximum == timeouts._DEFAULT_MAXIMUM_TIMEOUT + assert timeout_._multiplier == timeouts._DEFAULT_TIMEOUT_MULTIPLIER assert timeout_._deadline == 42 def test___str__(self): - timeout_ = timeout.ExponentialTimeout(1, 2, 3, 4) + timeout_ = timeouts.ExponentialTimeout(1, 2, 3, 4) assert str(timeout_) == ( "" @@ -107,7 +189,7 @@ def test___str__(self): def test_apply(self): target = mock.Mock(spec=["__call__", "__name__"], __name__="target") - timeout_ = timeout.ExponentialTimeout(1, 10, 2) + timeout_ = timeouts.ExponentialTimeout(1, 10, 2) wrapped = timeout_(target) wrapped() @@ -121,7 +203,7 @@ def test_apply(self): def test_apply_passthrough(self): target = mock.Mock(spec=["__call__", "__name__"], __name__="target") - timeout_ = timeout.ExponentialTimeout(42.0, 100, 2) + timeout_ = timeouts.ExponentialTimeout(42.0, 100, 2) wrapped = timeout_(target) wrapped(1, 2, meep="moop") diff --git a/tests/unit/test_universe.py b/tests/unit/test_universe.py new file mode 100644 index 00000000..214e00ac --- /dev/null +++ b/tests/unit/test_universe.py @@ -0,0 +1,63 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +from google.api_core import universe + + +class _Fake_Credentials: + def __init__(self, universe_domain=None): + if universe_domain: + self.universe_domain = universe_domain + + +def test_determine_domain(): + domain_client = "foo.com" + domain_env = "bar.com" + + assert universe.determine_domain(domain_client, domain_env) == domain_client + assert universe.determine_domain(None, domain_env) == domain_env + assert universe.determine_domain(domain_client, None) == domain_client + assert universe.determine_domain(None, None) == universe.DEFAULT_UNIVERSE + + with pytest.raises(universe.EmptyUniverseError): + universe.determine_domain("", None) + + with pytest.raises(universe.EmptyUniverseError): + universe.determine_domain(None, "") + + +def test_compare_domains(): + fake_domain = "foo.com" + another_fake_domain = "bar.com" + + assert universe.compare_domains(universe.DEFAULT_UNIVERSE, _Fake_Credentials()) + assert universe.compare_domains(fake_domain, _Fake_Credentials(fake_domain)) + + with pytest.raises(universe.UniverseMismatchError) as excinfo: + universe.compare_domains( + universe.DEFAULT_UNIVERSE, _Fake_Credentials(fake_domain) + ) + assert str(excinfo.value).find(universe.DEFAULT_UNIVERSE) >= 0 + assert str(excinfo.value).find(fake_domain) >= 0 + + with pytest.raises(universe.UniverseMismatchError) as excinfo: + universe.compare_domains(fake_domain, _Fake_Credentials()) + assert str(excinfo.value).find(fake_domain) >= 0 + assert str(excinfo.value).find(universe.DEFAULT_UNIVERSE) >= 0 + + with pytest.raises(universe.UniverseMismatchError) as excinfo: + universe.compare_domains(fake_domain, _Fake_Credentials(another_fake_domain)) + assert str(excinfo.value).find(fake_domain) >= 0 + assert str(excinfo.value).find(another_fake_domain) >= 0 diff --git a/tests/unit/test_version_header.py b/tests/unit/test_version_header.py new file mode 100644 index 00000000..ea7028e2 --- /dev/null +++ b/tests/unit/test_version_header.py @@ -0,0 +1,23 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest + +from google.api_core import version_header + + +@pytest.mark.parametrize("version_identifier", ["some_value", ""]) +def test_to_api_version_header(version_identifier): + value = version_header.to_api_version_header(version_identifier) + assert value == (version_header.API_VERSION_METADATA_KEY, version_identifier)